treewide: migrate to multi-crate layout
Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: I11a2103f3530f07409177404577b90136a6a6964
This commit is contained in:
parent
f655b133d4
commit
d445b1814a
68 changed files with 247 additions and 72 deletions
47
crates/pakker-cli/Cargo.toml
Normal file
47
crates/pakker-cli/Cargo.toml
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
[package]
|
||||
name = "pakker-cli"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "CLI library for Pakker"
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
rust-version.workspace = true
|
||||
readme = "../../docs/README.md"
|
||||
|
||||
[dependencies]
|
||||
pakker-core.workspace = true
|
||||
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
clap.workspace = true
|
||||
comfy-table.workspace = true
|
||||
dialoguer.workspace = true
|
||||
env_logger.workspace = true
|
||||
futures.workspace = true
|
||||
git2.workspace = true
|
||||
glob.workspace = true
|
||||
indicatif.workspace = true
|
||||
keyring.workspace = true
|
||||
keyring-core.workspace = true
|
||||
libc.workspace = true
|
||||
log.workspace = true
|
||||
md-5.workspace = true
|
||||
rand.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha1.workspace = true
|
||||
sha2.workspace = true
|
||||
strsim.workspace = true
|
||||
tempfile.workspace = true
|
||||
textwrap.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
walkdir.workspace = true
|
||||
yansi.workspace = true
|
||||
zip.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
625
crates/pakker-cli/src/cli.rs
Normal file
625
crates/pakker-cli/src/cli.rs
Normal file
|
|
@ -0,0 +1,625 @@
|
|||
pub mod commands;
|
||||
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
|
||||
use crate::model::{
|
||||
enums::{ProjectSide, ProjectType, UpdateStrategy},
|
||||
fork::RefType,
|
||||
};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[clap(name = "pakker")]
|
||||
#[clap(about = "A multiplatform modpack manager for Minecraft", long_about = None)]
|
||||
pub struct Cli {
|
||||
/// Enable verbose output (-v for info, -vv for debug, -vvv for trace)
|
||||
#[clap(short, long, action = clap::ArgAction::Count)]
|
||||
pub verbose: u8,
|
||||
|
||||
/// Skip all confirmation prompts (assume yes)
|
||||
#[clap(short, long, global = true)]
|
||||
pub yes: bool,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands {
|
||||
/// Initialize a new modpack project
|
||||
Init(InitArgs),
|
||||
|
||||
/// Import an existing modpack
|
||||
Import(ImportArgs),
|
||||
|
||||
/// Add projects to the modpack
|
||||
Add(AddArgs),
|
||||
|
||||
/// Add projects with explicit platform specification (non-interactive)
|
||||
#[clap(name = "add-prj", alias = "prj")]
|
||||
AddPrj(AddPrjArgs),
|
||||
|
||||
/// Remove projects from the modpack
|
||||
Rm(RmArgs),
|
||||
|
||||
/// Update projects
|
||||
Update(UpdateArgs),
|
||||
|
||||
/// List projects in the modpack
|
||||
Ls(LsArgs),
|
||||
|
||||
/// Set project properties
|
||||
Set(SetArgs),
|
||||
|
||||
/// Link projects together
|
||||
Link(LinkArgs),
|
||||
|
||||
/// Unlink projects
|
||||
Unlink(UnlinkArgs),
|
||||
|
||||
/// Show differences between local and remote
|
||||
Diff(DiffArgs),
|
||||
|
||||
/// Fetch project files
|
||||
Fetch(FetchArgs),
|
||||
|
||||
/// Sync projects (fetch + update)
|
||||
Sync(SyncArgs),
|
||||
|
||||
/// Export modpack
|
||||
Export(ExportArgs),
|
||||
|
||||
/// Manage remote repositories
|
||||
Remote(RemoteArgs),
|
||||
|
||||
/// Update modpack from remote Git repository
|
||||
RemoteUpdate(RemoteUpdateArgs),
|
||||
|
||||
/// Check for available updates
|
||||
Status(StatusArgs),
|
||||
|
||||
/// Inspect project details
|
||||
Inspect(InspectArgs),
|
||||
|
||||
/// Manage API credentials
|
||||
Credentials(CredentialsArgs),
|
||||
|
||||
/// Configure modpack properties
|
||||
Cfg(Box<CfgArgs>),
|
||||
|
||||
/// Manage fork configuration
|
||||
Fork(ForkArgs),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct InitArgs {
|
||||
/// Modpack name
|
||||
#[clap(short, long)]
|
||||
pub name: Option<String>,
|
||||
|
||||
/// Modpack version
|
||||
#[clap(short = 'V', long)]
|
||||
pub version: Option<String>,
|
||||
|
||||
/// Target platform
|
||||
#[clap(short, long)]
|
||||
pub target: Option<String>,
|
||||
|
||||
/// Minecraft versions (space-separated)
|
||||
#[clap(short, long = "mc-versions", value_delimiter = ' ', num_args = 1..)]
|
||||
pub mc_versions: Option<Vec<String>>,
|
||||
|
||||
/// Mod loaders (format: name=version, can be specified multiple times)
|
||||
#[clap(short, long = "loaders", value_delimiter = ',')]
|
||||
pub loaders: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct ImportArgs {
|
||||
/// Path to modpack file
|
||||
pub file: String,
|
||||
|
||||
/// Resolve dependencies
|
||||
#[clap(short = 'D', long = "deps")]
|
||||
pub deps: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct AddArgs {
|
||||
/// Project identifiers to add
|
||||
#[clap(required = true)]
|
||||
pub inputs: Vec<String>,
|
||||
|
||||
/// Project type (mod, resourcepack, shader, datapack, world)
|
||||
#[clap(short = 't', long = "type")]
|
||||
pub project_type: Option<ProjectType>,
|
||||
|
||||
/// Skip resolving dependencies
|
||||
#[clap(short = 'D', long)]
|
||||
pub no_deps: bool,
|
||||
|
||||
/// Update if already exists
|
||||
#[clap(short, long)]
|
||||
pub update: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct AddPrjArgs {
|
||||
/// `CurseForge` project slug or ID (optional file ID: `slug#file_id`)
|
||||
#[clap(long = "cf", alias = "curseforge")]
|
||||
pub curseforge: Option<String>,
|
||||
|
||||
/// Modrinth project slug or ID (optional file ID: `slug#file_id`)
|
||||
#[clap(long = "mr", alias = "modrinth")]
|
||||
pub modrinth: Option<String>,
|
||||
|
||||
/// GitHub repository (format: owner/repo or owner/repo#tag)
|
||||
#[clap(long = "gh", alias = "github")]
|
||||
pub github: Option<String>,
|
||||
|
||||
/// Project type (mod, resourcepack, shader, datapack, world)
|
||||
#[clap(short = 't', long = "type")]
|
||||
pub project_type: Option<ProjectType>,
|
||||
|
||||
/// Project side (client, server, both)
|
||||
#[clap(long)]
|
||||
pub side: Option<ProjectSide>,
|
||||
|
||||
/// Update strategy (latest, none)
|
||||
#[clap(long)]
|
||||
pub strategy: Option<UpdateStrategy>,
|
||||
|
||||
/// Redistributable flag
|
||||
#[clap(long)]
|
||||
pub redistributable: Option<bool>,
|
||||
|
||||
/// Subpath for project file placement
|
||||
#[clap(long)]
|
||||
pub subpath: Option<String>,
|
||||
|
||||
/// Project aliases (can be specified multiple times)
|
||||
#[clap(long = "alias")]
|
||||
pub aliases: Vec<String>,
|
||||
|
||||
/// Export flag (whether to include in exports)
|
||||
#[clap(long)]
|
||||
pub export: Option<bool>,
|
||||
|
||||
/// Skip resolving dependencies
|
||||
#[clap(short = 'D', long = "no-deps")]
|
||||
pub no_deps: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RmArgs {
|
||||
/// Project identifiers to remove
|
||||
#[clap(required = true)]
|
||||
pub inputs: Vec<String>,
|
||||
|
||||
/// Remove all projects
|
||||
#[clap(short = 'a', long)]
|
||||
pub all: bool,
|
||||
|
||||
/// Skip removing dependent projects
|
||||
#[clap(short = 'D', long = "no-deps")]
|
||||
pub no_deps: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct UpdateArgs {
|
||||
/// Projects to update (empty = all)
|
||||
#[arg(value_name = "PROJECT")]
|
||||
pub inputs: Vec<String>,
|
||||
|
||||
/// Update all projects
|
||||
#[arg(short, long)]
|
||||
pub all: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct LsArgs {
|
||||
/// Show detailed information
|
||||
#[clap(short, long)]
|
||||
pub detailed: bool,
|
||||
|
||||
/// Add update information for projects
|
||||
#[clap(short = 'c', long = "check-updates")]
|
||||
pub check_updates: bool,
|
||||
|
||||
/// Maximum length for project names
|
||||
#[clap(long = "name-max-length")]
|
||||
pub name_max_length: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct SetArgs {
|
||||
/// Project identifier (optional for lockfile properties)
|
||||
pub input: Option<String>,
|
||||
|
||||
/// Project type
|
||||
#[clap(long)]
|
||||
pub r#type: Option<String>,
|
||||
|
||||
/// Project side (client/server/both)
|
||||
#[clap(long)]
|
||||
pub side: Option<String>,
|
||||
|
||||
/// Update strategy (latest/none)
|
||||
#[clap(long)]
|
||||
pub strategy: Option<String>,
|
||||
|
||||
/// Redistributable flag
|
||||
#[clap(long)]
|
||||
pub redistributable: Option<bool>,
|
||||
|
||||
/// Change the target of the pack (curseforge, modrinth, multiplatform)
|
||||
#[clap(short = 't', long)]
|
||||
pub target: Option<String>,
|
||||
|
||||
/// Change the minecraft versions (comma-separated)
|
||||
#[clap(short = 'v', long)]
|
||||
pub mc_versions: Option<String>,
|
||||
|
||||
/// Change the mod loaders (format: name=version,name=version)
|
||||
#[clap(short = 'l', long)]
|
||||
pub loaders: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct LinkArgs {
|
||||
/// Source project
|
||||
pub from: String,
|
||||
|
||||
/// Target project
|
||||
pub to: String,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct UnlinkArgs {
|
||||
/// Source project
|
||||
pub from: String,
|
||||
|
||||
/// Target project
|
||||
pub to: String,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct DiffArgs {
|
||||
/// Path to old lockfile
|
||||
pub old_lockfile: String,
|
||||
|
||||
/// Path to current lockfile (optional, defaults to pakku-lock.json)
|
||||
pub current_lockfile: Option<String>,
|
||||
|
||||
/// Export markdown diff
|
||||
#[clap(long)]
|
||||
pub markdown_diff: Option<String>,
|
||||
|
||||
/// Export markdown (formatted)
|
||||
#[clap(long)]
|
||||
pub markdown: Option<String>,
|
||||
|
||||
/// Verbose output (show file changes)
|
||||
#[clap(short, long)]
|
||||
pub verbose: bool,
|
||||
|
||||
/// Header size for markdown (0-5)
|
||||
#[clap(short = 'H', long, default_value = "2")]
|
||||
pub header_size: usize,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct FetchArgs {
|
||||
/// Timeout for waiting on conflicting operations (seconds)
|
||||
#[clap(short, long)]
|
||||
pub timeout: Option<u64>,
|
||||
|
||||
/// Number of retry attempts for failed downloads
|
||||
#[clap(short = 'r', long, default_value = "2")]
|
||||
pub retry: u32,
|
||||
|
||||
/// Move unknown files to shelf instead of deleting
|
||||
#[clap(long)]
|
||||
pub shelve: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct SyncArgs {
|
||||
/// Sync additions only
|
||||
#[clap(short = 'A', long)]
|
||||
pub additions: bool,
|
||||
|
||||
/// Sync removals only
|
||||
#[clap(short = 'R', long)]
|
||||
pub removals: bool,
|
||||
|
||||
/// Sync updates only (apply pending updates)
|
||||
#[clap(short = 'U', long)]
|
||||
pub updates: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct ExportArgs {
|
||||
/// Export profile (curseforge, modrinth, serverpack)
|
||||
/// If not specified, all profiles will be exported
|
||||
#[clap(short, long)]
|
||||
pub profile: Option<String>,
|
||||
|
||||
/// Output directory
|
||||
#[clap(short, long)]
|
||||
pub output: Option<String>,
|
||||
|
||||
/// Use Pakker-compatible output layout (build/<profile>/...)
|
||||
/// Default is Pakker layout (exports/...)
|
||||
#[clap(long)]
|
||||
pub pakker_layout: bool,
|
||||
|
||||
/// Show file IO errors during export
|
||||
#[clap(long = "show-io-errors")]
|
||||
pub show_io_errors: bool,
|
||||
|
||||
/// Export modpack without server content
|
||||
/// Modrinth: exclude server-overrides and SERVER mods
|
||||
/// `ServerPack`: skip export
|
||||
#[clap(long = "no-server")]
|
||||
pub no_server: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RemoteArgs {
|
||||
/// Git URL to install from (if empty, shows status)
|
||||
pub url: Option<String>,
|
||||
|
||||
/// Branch to checkout (instead of remote's HEAD)
|
||||
#[clap(short, long)]
|
||||
pub branch: Option<String>,
|
||||
|
||||
/// Install server pack
|
||||
#[clap(short = 'S', long)]
|
||||
pub server_pack: bool,
|
||||
|
||||
/// Retry count for downloads
|
||||
#[clap(short, long, default_value = "2")]
|
||||
pub retry: u32,
|
||||
|
||||
/// Remove remote from modpack
|
||||
#[clap(long = "rm", long = "remove")]
|
||||
pub remove: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RemoteUpdateArgs {
|
||||
/// Branch to checkout instead of remote's HEAD
|
||||
#[clap(short, long)]
|
||||
pub branch: Option<String>,
|
||||
|
||||
/// Install server pack instead of full modpack
|
||||
#[clap(short, long)]
|
||||
pub server_pack: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct StatusArgs {
|
||||
/// Check updates in parallel
|
||||
#[clap(short, long)]
|
||||
pub parallel: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct InspectArgs {
|
||||
/// Project identifiers to inspect
|
||||
#[clap(required = true)]
|
||||
pub projects: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct CredentialsArgs {
|
||||
/// Delete stored credentials (defaults to deleting both file and keyring)
|
||||
#[clap(short, long)]
|
||||
pub delete: bool,
|
||||
|
||||
/// Delete credentials file (~/.pakku/credentials)
|
||||
#[clap(long)]
|
||||
pub delete_file: bool,
|
||||
|
||||
/// Delete credentials from keyring (service: pakker)
|
||||
#[clap(long)]
|
||||
pub delete_keyring: bool,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub subcommand: Option<CredentialsSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum CredentialsSubcommand {
|
||||
/// Set API credentials
|
||||
Set(CredentialsSetArgs),
|
||||
/// Test stored API credentials
|
||||
Test,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct CredentialsSetArgs {
|
||||
/// `CurseForge` API key
|
||||
#[clap(long)]
|
||||
pub cf_api_key: Option<String>,
|
||||
|
||||
/// Modrinth API token
|
||||
#[clap(long)]
|
||||
pub modrinth_token: Option<String>,
|
||||
|
||||
/// GitHub access token
|
||||
#[clap(long)]
|
||||
pub gh_access_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct CfgArgs {
|
||||
/// Modpack name
|
||||
#[clap(long)]
|
||||
pub name: Option<String>,
|
||||
|
||||
/// Modpack version
|
||||
#[clap(long)]
|
||||
pub version: Option<String>,
|
||||
|
||||
/// Modpack description
|
||||
#[clap(long)]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Modpack author
|
||||
#[clap(long)]
|
||||
pub author: Option<String>,
|
||||
|
||||
/// Path for mods
|
||||
#[clap(long)]
|
||||
pub mods_path: Option<String>,
|
||||
|
||||
/// Path for resource packs
|
||||
#[clap(long)]
|
||||
pub resource_packs_path: Option<String>,
|
||||
|
||||
/// Path for data packs
|
||||
#[clap(long)]
|
||||
pub data_packs_path: Option<String>,
|
||||
|
||||
/// Path for worlds
|
||||
#[clap(long)]
|
||||
pub worlds_path: Option<String>,
|
||||
|
||||
/// Path for shaders
|
||||
#[clap(long)]
|
||||
pub shaders_path: Option<String>,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub subcommand: Option<CfgSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum CfgSubcommand {
|
||||
/// Configure per-project settings
|
||||
Prj(CfgPrjArgs),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct CfgPrjArgs {
|
||||
/// Project identifier
|
||||
pub project: String,
|
||||
|
||||
/// Project type
|
||||
#[clap(long)]
|
||||
pub r#type: Option<String>,
|
||||
|
||||
/// Project side (client/server/both)
|
||||
#[clap(long)]
|
||||
pub side: Option<String>,
|
||||
|
||||
/// Update strategy (latest/none)
|
||||
#[clap(long)]
|
||||
pub update_strategy: Option<String>,
|
||||
|
||||
/// Redistributable flag
|
||||
#[clap(long)]
|
||||
pub redistributable: Option<bool>,
|
||||
|
||||
/// Subpath for project
|
||||
#[clap(long)]
|
||||
pub subpath: Option<String>,
|
||||
|
||||
/// Add alias
|
||||
#[clap(long)]
|
||||
pub add_alias: Option<String>,
|
||||
|
||||
/// Remove alias
|
||||
#[clap(long)]
|
||||
pub remove_alias: Option<String>,
|
||||
|
||||
/// Export flag
|
||||
#[clap(long)]
|
||||
pub export: Option<bool>,
|
||||
}
|
||||
|
||||
/// Fork subcommand arguments
|
||||
#[derive(Debug, Args)]
|
||||
#[command(args_conflicts_with_subcommands = true)]
|
||||
pub struct ForkArgs {
|
||||
#[clap(subcommand)]
|
||||
pub subcommand: ForkSubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum ForkSubcommand {
|
||||
/// Initialize fork from parent repository
|
||||
Init {
|
||||
/// Git URL of parent repository
|
||||
#[clap(long, conflicts_with = "from_path")]
|
||||
git_url: Option<String>,
|
||||
|
||||
/// Use current repository as parent
|
||||
#[clap(long, conflicts_with = "from_path")]
|
||||
from_current: bool,
|
||||
|
||||
/// Use an already-cloned repository as parent (path to worktree or .git)
|
||||
#[clap(long, value_parser, conflicts_with_all = &["git_url", "from_current"])]
|
||||
from_path: Option<String>,
|
||||
|
||||
/// Branch/tag/commit to track
|
||||
#[clap(long)]
|
||||
ref_name: Option<String>,
|
||||
|
||||
/// Type of ref (branch/tag/commit)
|
||||
#[clap(long, value_enum)]
|
||||
ref_type: Option<RefType>,
|
||||
|
||||
/// Remote name
|
||||
#[clap(long, default_value = "origin")]
|
||||
remote: Option<String>,
|
||||
},
|
||||
|
||||
/// Update fork configuration
|
||||
Set {
|
||||
/// New git URL (optional)
|
||||
#[clap(long)]
|
||||
git_url: Option<String>,
|
||||
|
||||
/// Branch/tag/commit to track
|
||||
#[clap(long)]
|
||||
ref_name: String,
|
||||
|
||||
/// Type of ref (branch/tag/commit)
|
||||
#[clap(long, value_enum)]
|
||||
ref_type: Option<RefType>,
|
||||
|
||||
/// Remote name
|
||||
#[clap(long)]
|
||||
remote: Option<String>,
|
||||
},
|
||||
|
||||
/// Show fork configuration
|
||||
Show,
|
||||
|
||||
/// Remove fork configuration
|
||||
Unset,
|
||||
|
||||
/// Sync with parent repository
|
||||
Sync,
|
||||
|
||||
/// Promote projects to parent (legacy)
|
||||
Promote {
|
||||
/// Project identifiers to promote
|
||||
projects: Vec<String>,
|
||||
},
|
||||
|
||||
/// Exclude parent projects from the merged export
|
||||
Exclude {
|
||||
/// Project slugs to exclude from the parent
|
||||
#[clap(required = true)]
|
||||
projects: Vec<String>,
|
||||
},
|
||||
|
||||
/// Re-include previously excluded parent projects
|
||||
Include {
|
||||
/// Project slugs to stop excluding from the parent
|
||||
#[clap(required = true)]
|
||||
projects: Vec<String>,
|
||||
},
|
||||
}
|
||||
395
crates/pakker-cli/src/cli/commands/add.rs
Normal file
395
crates/pakker-cli/src/cli/commands/add.rs
Normal file
|
|
@ -0,0 +1,395 @@
|
|||
use std::{collections::HashMap, time::Duration};
|
||||
|
||||
use crate::{
|
||||
error::{MultiError, PakkerError, Result},
|
||||
http,
|
||||
model::{
|
||||
Config,
|
||||
LockFile,
|
||||
PakkerCredentialsFile,
|
||||
Project,
|
||||
Target,
|
||||
credentials::ResolvedCredentials,
|
||||
set_keyring_secret,
|
||||
},
|
||||
platform::create_platform,
|
||||
resolver::DependencyResolver,
|
||||
ui_utils::prompt_curseforge_api_key,
|
||||
};
|
||||
|
||||
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
||||
lockfile.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
fn needs_curseforge(target: Option<&Target>) -> bool {
|
||||
matches!(
|
||||
target,
|
||||
Some(Target::CurseForge) | Some(Target::Multiplatform)
|
||||
)
|
||||
}
|
||||
|
||||
async fn ensure_curseforge_credentials() -> Result<bool> {
|
||||
let creds = ResolvedCredentials::load();
|
||||
if creds.curseforge_api_key().is_some() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
if let Some(key) = prompt_curseforge_api_key(false)? {
|
||||
// Verify the key before saving
|
||||
let client = http::create_http_client();
|
||||
let response = client
|
||||
.get("https://api.curseforge.com/v1/mods/238222")
|
||||
.header("x-api-key", &key)
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let mut creds_file = PakkerCredentialsFile::load()?;
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds_file.curseforge_api_key = Some(key.clone());
|
||||
creds_file.save()?;
|
||||
println!("CurseForge API key verified and saved.");
|
||||
Ok(true)
|
||||
},
|
||||
Ok(resp) => {
|
||||
println!(
|
||||
"Warning: CurseForge API key verification failed (HTTP {}).",
|
||||
resp.status()
|
||||
);
|
||||
if crate::ui_utils::prompt_yes_no(
|
||||
"Save this key anyway?",
|
||||
false,
|
||||
false,
|
||||
)? {
|
||||
let mut creds_file = PakkerCredentialsFile::load()?;
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds_file.curseforge_api_key = Some(key);
|
||||
creds_file.save()?;
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Warning: Could not verify CurseForge API key: {e}");
|
||||
if crate::ui_utils::prompt_yes_no(
|
||||
"Save this key anyway?",
|
||||
false,
|
||||
false,
|
||||
)? {
|
||||
let mut creds_file = PakkerCredentialsFile::load()?;
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds_file.curseforge_api_key = Some(key);
|
||||
creds_file.save()?;
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_all_platforms()
|
||||
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
|
||||
let mut platforms = HashMap::new();
|
||||
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let curseforge_key = credentials.curseforge_api_key().map(String::from);
|
||||
|
||||
if let Ok(platform) = create_platform("multiplatform", curseforge_key) {
|
||||
platforms.insert("multiplatform".to_owned(), platform);
|
||||
} else if let Ok(platform) = create_platform("modrinth", None) {
|
||||
platforms.insert("modrinth".to_owned(), platform);
|
||||
}
|
||||
|
||||
platforms
|
||||
}
|
||||
|
||||
async fn resolve_input(
|
||||
input: &str,
|
||||
platforms: &HashMap<String, Box<dyn crate::platform::PlatformClient>>,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<Project> {
|
||||
let mut projects = Vec::new();
|
||||
|
||||
for (platform_name, client) in platforms {
|
||||
match client
|
||||
.request_project_with_files(
|
||||
input,
|
||||
&lockfile.mc_versions,
|
||||
&get_loaders(lockfile),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(project) => {
|
||||
log::debug!("Resolved '{input}' on {platform_name}");
|
||||
projects.push(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::debug!("Could not resolve '{input}' on {platform_name}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if projects.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(input.to_string()));
|
||||
}
|
||||
|
||||
if projects.len() == 1 {
|
||||
return Ok(projects.remove(0));
|
||||
}
|
||||
|
||||
let mut merged = projects.remove(0);
|
||||
for project in projects {
|
||||
merged.merge(project);
|
||||
}
|
||||
Ok(merged)
|
||||
}
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{cli::AddArgs, model::fork::LocalConfig};
|
||||
|
||||
#[expect(
|
||||
clippy::future_not_send,
|
||||
reason = "not required to be Send; only called from single-threaded context"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: AddArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let skip_prompts = global_yes;
|
||||
log::info!("Adding projects: {:?}", args.inputs);
|
||||
|
||||
// Load lockfile
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// Check if lockfile exists (try both pakker-lock.json and pakku-lock.json)
|
||||
let lockfile_exists =
|
||||
lockfile_path.exists() || lockfile_dir.join("pakku-lock.json").exists();
|
||||
|
||||
if !lockfile_exists {
|
||||
// Try to load config from both pakker.json and pakku.json
|
||||
let local_config = LocalConfig::load(config_dir).or_else(|_| {
|
||||
let legacy_config_path = config_dir.join("pakku.json");
|
||||
if legacy_config_path.exists() {
|
||||
LocalConfig::load(&config_dir.join("pakku.json"))
|
||||
} else {
|
||||
Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No pakker.json found",
|
||||
)))
|
||||
}
|
||||
})?;
|
||||
|
||||
if local_config.has_parent() {
|
||||
log::info!("Creating minimal fork lockfile with parent metadata...");
|
||||
|
||||
// Check for parent lockfile (try both pakker-lock.json and
|
||||
// pakku-lock.json)
|
||||
let parent_paths = [
|
||||
lockfile_dir.join(".pakku/parent/pakker-lock.json"),
|
||||
lockfile_dir.join(".pakku/parent/pakku-lock.json"),
|
||||
];
|
||||
|
||||
let parent_found = parent_paths.iter().any(|path| path.exists());
|
||||
if !parent_found {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Fork configured but parent lockfile not found at \
|
||||
.pakku/parent/pakker-lock.json or .pakku/parent/pakku-lock.json",
|
||||
)));
|
||||
}
|
||||
|
||||
// Load parent lockfile to get metadata
|
||||
let parent_lock_path = parent_paths
|
||||
.iter()
|
||||
.find(|path| path.exists())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Parent lockfile not found at expected paths",
|
||||
))
|
||||
})?;
|
||||
let parent_lockfile = LockFile::load_with_validation(
|
||||
parent_lock_path.parent().ok_or_else(|| {
|
||||
PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Parent lockfile path has no parent directory",
|
||||
))
|
||||
})?,
|
||||
false,
|
||||
)?;
|
||||
|
||||
let minimal_lockfile = LockFile {
|
||||
target: parent_lockfile.target,
|
||||
mc_versions: parent_lockfile.mc_versions,
|
||||
loaders: parent_lockfile.loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
minimal_lockfile.save_without_validation(lockfile_dir)?;
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"pakker-lock.json not found and no fork configured. Run 'pakker init' \
|
||||
first.",
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let mut lockfile = LockFile::load_with_validation(lockfile_dir, false)?;
|
||||
|
||||
// Prompt for missing CurseForge credentials when needed
|
||||
if needs_curseforge(lockfile.target.as_ref()) && !skip_prompts {
|
||||
match ensure_curseforge_credentials().await {
|
||||
Ok(true) => {},
|
||||
Ok(false) => {
|
||||
log::warn!(
|
||||
"CurseForge credentials not configured. CurseForge-only projects \
|
||||
may not resolve."
|
||||
);
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to set up CurseForge credentials: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Load config if available
|
||||
let _config = Config::load(config_dir).ok();
|
||||
|
||||
// Create platforms
|
||||
let platforms = create_all_platforms();
|
||||
|
||||
let mut new_projects = Vec::new();
|
||||
let mut errors = MultiError::new();
|
||||
|
||||
// Resolve each input
|
||||
for input in &args.inputs {
|
||||
let project = match resolve_input(input, &platforms, &lockfile).await {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
// Collect error but continue with other inputs
|
||||
log::warn!("Failed to resolve '{input}': {e}");
|
||||
errors.push(e);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
|
||||
// Check if already exists by matching platform IDs (not pakku_id which is
|
||||
// random)
|
||||
let already_exists = lockfile.projects.iter().any(|p| {
|
||||
// Check if any platform ID matches
|
||||
project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
});
|
||||
|
||||
if already_exists {
|
||||
if args.update {
|
||||
log::info!("Updating existing project: {}", project.get_name());
|
||||
// Find and replace the existing project
|
||||
if let Some(pos) = lockfile.projects.iter().position(|p| {
|
||||
project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
}) {
|
||||
lockfile.projects[pos] = project;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
log::info!("Project already exists: {}", project.get_name());
|
||||
continue;
|
||||
}
|
||||
|
||||
// Prompt for confirmation unless --yes flag is set
|
||||
if !skip_prompts {
|
||||
let prompt_msg = format!("Add project '{}'?", project.get_name());
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, skip_prompts)? {
|
||||
log::info!("Skipping project: {}", project.get_name());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
new_projects.push(project);
|
||||
}
|
||||
|
||||
// Resolve dependencies unless --no-deps is specified
|
||||
if !args.no_deps {
|
||||
log::info!("Resolving dependencies...");
|
||||
|
||||
let mut resolver = DependencyResolver::new();
|
||||
let mut all_new_projects = new_projects.clone();
|
||||
|
||||
for project in &mut new_projects {
|
||||
let deps = resolver.resolve(project, &mut lockfile, &platforms).await?;
|
||||
|
||||
for dep in deps {
|
||||
if !lockfile.projects.iter().any(|p| p.pakku_id == dep.pakku_id)
|
||||
&& !all_new_projects.iter().any(|p| p.pakku_id == dep.pakku_id)
|
||||
{
|
||||
// Prompt user for confirmation unless --yes flag is set
|
||||
if !skip_prompts {
|
||||
let prompt_msg = format!(
|
||||
"Add dependency '{}' required by '{}'?",
|
||||
dep.get_name(),
|
||||
project.get_name()
|
||||
);
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, skip_prompts)?
|
||||
{
|
||||
log::info!("Skipping dependency: {}", dep.get_name());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Adding dependency: {}", dep.get_name());
|
||||
all_new_projects.push(dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
new_projects = all_new_projects;
|
||||
}
|
||||
|
||||
// Track count before moving
|
||||
let added_count = new_projects.len();
|
||||
|
||||
// Add projects to lockfile (updates already handled above)
|
||||
for project in new_projects {
|
||||
lockfile.add_project(project);
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully added {added_count} project(s)");
|
||||
|
||||
// Return aggregated errors if any occurred
|
||||
if !errors.is_empty() {
|
||||
let error_count = errors.len();
|
||||
log::warn!(
|
||||
"{error_count} project(s) failed to resolve (see warnings above)"
|
||||
);
|
||||
// Return success if at least some projects were added, otherwise return
|
||||
// errors
|
||||
if added_count == 0 && args.inputs.len() == error_count {
|
||||
return errors.into_result(());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
404
crates/pakker-cli/src/cli/commands/add_prj.rs
Normal file
404
crates/pakker-cli/src/cli/commands/add_prj.rs
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{
|
||||
Config,
|
||||
LockFile,
|
||||
Project,
|
||||
credentials::ResolvedCredentials,
|
||||
enums::{ProjectSide, ProjectType, UpdateStrategy},
|
||||
},
|
||||
platform::create_platform,
|
||||
resolver::DependencyResolver,
|
||||
ui_utils::prompt_curseforge_api_key,
|
||||
};
|
||||
|
||||
/// Parse a common project argument (slug or ID with optional file ID)
|
||||
/// Format: "input" or "`input#file_id`"
|
||||
fn parse_common_arg(input: &str) -> (String, Option<String>) {
|
||||
if let Some((project_input, file_id)) = input.split_once('#') {
|
||||
(project_input.to_string(), Some(file_id.to_string()))
|
||||
} else {
|
||||
(input.to_string(), None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a GitHub argument (owner/repo with optional tag)
|
||||
/// Format: "owner/repo" or "owner/repo#tag"
|
||||
fn parse_github_arg(input: &str) -> Result<(String, String, Option<String>)> {
|
||||
let (repo_part, tag) = if let Some((r, t)) = input.split_once('#') {
|
||||
(r, Some(t.to_string()))
|
||||
} else {
|
||||
(input, None)
|
||||
};
|
||||
|
||||
if let Some((owner, repo)) = repo_part.split_once('/') {
|
||||
Ok((owner.to_string(), repo.to_string(), tag))
|
||||
} else {
|
||||
Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid GitHub format '{input}'. Expected: owner/repo or owner/repo#tag"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
||||
lockfile.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::future_not_send,
|
||||
reason = "not required to be Send; only called from single-threaded context"
|
||||
)]
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub async fn execute(
|
||||
cf_arg: Option<String>,
|
||||
mr_arg: Option<String>,
|
||||
gh_arg: Option<String>,
|
||||
project_type: Option<ProjectType>,
|
||||
project_side: Option<ProjectSide>,
|
||||
update_strategy: Option<UpdateStrategy>,
|
||||
redistributable: Option<bool>,
|
||||
subpath: Option<String>,
|
||||
aliases: Vec<String>,
|
||||
export: Option<bool>,
|
||||
no_deps: bool,
|
||||
yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
// At least one platform must be specified
|
||||
if cf_arg.is_none() && mr_arg.is_none() && gh_arg.is_none() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one platform must be specified (--cf, --mr, or --gh)"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
log::info!("Adding project with explicit platform specification");
|
||||
|
||||
// Load lockfile
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Load config if available
|
||||
let _config = Config::load(config_dir).ok();
|
||||
|
||||
// Get MC versions and loaders from lockfile
|
||||
let mc_versions = &lockfile.mc_versions;
|
||||
let loaders = get_loaders(&lockfile);
|
||||
|
||||
// Fetch projects from each specified platform
|
||||
let mut projects_to_merge: Vec<Project> = Vec::new();
|
||||
|
||||
// CurseForge
|
||||
if let Some(cf_input) = cf_arg {
|
||||
log::info!("Fetching from CurseForge: {cf_input}");
|
||||
let (input, file_id) = parse_common_arg(&cf_input);
|
||||
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let mut cf_api_key = credentials.curseforge_api_key().map(String::from);
|
||||
|
||||
// Prompt for missing CurseForge credentials
|
||||
if cf_api_key.is_none() && !yes {
|
||||
if let Some(key) = prompt_curseforge_api_key(false)? {
|
||||
cf_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
let platform = create_platform("curseforge", cf_api_key)?;
|
||||
|
||||
let mut project = platform
|
||||
.request_project_with_files(&input, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!(
|
||||
"CurseForge project '{input}': {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// If file_id specified, filter to that file
|
||||
if let Some(fid) = file_id {
|
||||
project.files.retain(|f| f.id == fid);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"File ID '{fid}' not found for CurseForge project '{input}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// Modrinth
|
||||
if let Some(mr_input) = mr_arg {
|
||||
log::info!("Fetching from Modrinth: {mr_input}");
|
||||
let (input, file_id) = parse_common_arg(&mr_input);
|
||||
|
||||
let platform = create_platform("modrinth", None)?;
|
||||
|
||||
let mut project = platform
|
||||
.request_project_with_files(&input, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!("Modrinth project '{input}': {e}"))
|
||||
})?;
|
||||
|
||||
// If file_id specified, filter to that file
|
||||
if let Some(fid) = file_id {
|
||||
project.files.retain(|f| f.id == fid);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"File ID '{fid}' not found for Modrinth project '{input}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// GitHub
|
||||
if let Some(gh_input) = gh_arg {
|
||||
log::info!("Fetching from GitHub: {gh_input}");
|
||||
let (owner, repo, tag) = parse_github_arg(&gh_input)?;
|
||||
|
||||
let gh_token = std::env::var("GITHUB_TOKEN").ok();
|
||||
let platform = create_platform("github", gh_token)?;
|
||||
|
||||
let repo_path = format!("{owner}/{repo}");
|
||||
let mut project = platform
|
||||
.request_project_with_files(&repo_path, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!(
|
||||
"GitHub repository '{owner}/{repo}': {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// If tag specified, filter to that tag
|
||||
if let Some(t) = tag {
|
||||
project.files.retain(|f| f.id == t);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"Tag '{t}' not found for GitHub repository '{owner}/{repo}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// Merge all fetched projects into one
|
||||
if projects_to_merge.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(
|
||||
"No projects could be fetched from specified platforms".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut combined_project = projects_to_merge.remove(0);
|
||||
for project in projects_to_merge {
|
||||
combined_project.merge(project);
|
||||
}
|
||||
|
||||
// Apply user-specified properties
|
||||
if let Some(pt) = project_type {
|
||||
combined_project.r#type = pt;
|
||||
}
|
||||
if let Some(ps) = project_side {
|
||||
combined_project.side = ps;
|
||||
}
|
||||
if let Some(us) = update_strategy {
|
||||
combined_project.update_strategy = us;
|
||||
}
|
||||
if let Some(r) = redistributable {
|
||||
combined_project.redistributable = r;
|
||||
}
|
||||
if let Some(sp) = subpath {
|
||||
combined_project.subpath = Some(sp);
|
||||
}
|
||||
if let Some(e) = export {
|
||||
combined_project.export = e;
|
||||
}
|
||||
|
||||
// Add aliases
|
||||
for alias in aliases {
|
||||
combined_project.aliases.insert(alias);
|
||||
}
|
||||
|
||||
// Check if project already exists
|
||||
let existing_pos = lockfile.projects.iter().position(|p| {
|
||||
// Check if any platform ID matches
|
||||
combined_project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
});
|
||||
|
||||
let project_name = combined_project.get_name();
|
||||
|
||||
if let Some(pos) = existing_pos {
|
||||
let existing_project = &lockfile.projects[pos];
|
||||
let existing_name = existing_project.get_name();
|
||||
|
||||
if !yes {
|
||||
let prompt_msg = format!(
|
||||
"Project '{existing_name}' already exists. Replace with \
|
||||
'{project_name}'?"
|
||||
);
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, false, yes)? {
|
||||
log::info!("Operation cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Replacing existing project: {existing_name}");
|
||||
lockfile.projects[pos] = combined_project.clone();
|
||||
println!("✓ Replaced '{existing_name}' with '{project_name}'");
|
||||
} else {
|
||||
if !yes {
|
||||
let prompt_msg = format!("Add project '{project_name}'?");
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, yes)? {
|
||||
log::info!("Operation cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.add_project(combined_project.clone());
|
||||
println!("✓ Added '{project_name}'");
|
||||
}
|
||||
|
||||
// Resolve dependencies unless --no-deps is specified
|
||||
if !no_deps {
|
||||
log::info!("Resolving dependencies...");
|
||||
|
||||
let platforms = create_all_platforms();
|
||||
let mut resolver = DependencyResolver::new();
|
||||
|
||||
let deps = resolver
|
||||
.resolve(&mut combined_project, &mut lockfile, &platforms)
|
||||
.await?;
|
||||
|
||||
for dep in deps {
|
||||
// Skip if already in lockfile
|
||||
if lockfile.projects.iter().any(|p| {
|
||||
dep.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dep_name = dep.get_name();
|
||||
|
||||
// Prompt user for confirmation unless --yes flag is set
|
||||
if !yes {
|
||||
let prompt_msg =
|
||||
format!("Add dependency '{dep_name}' required by '{project_name}'?");
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, yes)? {
|
||||
log::info!("Skipping dependency: {dep_name}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Adding dependency: {dep_name}");
|
||||
lockfile.add_project(dep);
|
||||
println!(" ✓ Added dependency '{dep_name}'");
|
||||
}
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully completed add-prj operation");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_all_platforms()
|
||||
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
|
||||
let mut platforms = HashMap::new();
|
||||
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let curseforge_key = credentials.curseforge_api_key().map(String::from);
|
||||
let github_token = credentials.github_access_token().map(String::from);
|
||||
|
||||
if let Ok(platform) = create_platform("multiplatform", curseforge_key) {
|
||||
platforms.insert("multiplatform".to_string(), platform);
|
||||
} else if let Ok(platform) = create_platform("modrinth", None) {
|
||||
platforms.insert("modrinth".to_string(), platform);
|
||||
}
|
||||
if let Ok(platform) = create_platform("github", github_token) {
|
||||
platforms.insert("github".to_string(), platform);
|
||||
}
|
||||
|
||||
platforms
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_common_arg_without_file_id() {
|
||||
let (input, file_id) = parse_common_arg("fabric-api");
|
||||
assert_eq!(input, "fabric-api");
|
||||
assert_eq!(file_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_common_arg_with_file_id() {
|
||||
let (input, file_id) = parse_common_arg("fabric-api#12345");
|
||||
assert_eq!(input, "fabric-api");
|
||||
assert_eq!(file_id, Some("12345".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_owner_repo() {
|
||||
let result = parse_github_arg("FabricMC/fabric");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "fabric");
|
||||
assert_eq!(tag, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_with_tag() {
|
||||
let result = parse_github_arg("FabricMC/fabric#v0.15.0");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "fabric");
|
||||
assert_eq!(tag, Some("v0.15.0".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_invalid() {
|
||||
let result = parse_github_arg("invalid-format");
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("Invalid GitHub format")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_missing_repo() {
|
||||
let result = parse_github_arg("FabricMC/");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "");
|
||||
assert_eq!(tag, None);
|
||||
}
|
||||
}
|
||||
147
crates/pakker-cli/src/cli/commands/cfg.rs
Normal file
147
crates/pakker-cli/src/cli/commands/cfg.rs
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
use std::path::Path;
|
||||
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::config::Config,
|
||||
ui_utils::prompt_input_optional,
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
name: Option<String>,
|
||||
version: Option<String>,
|
||||
description: Option<String>,
|
||||
author: Option<String>,
|
||||
mods_path: Option<String>,
|
||||
resource_packs_path: Option<String>,
|
||||
data_packs_path: Option<String>,
|
||||
worlds_path: Option<String>,
|
||||
shaders_path: Option<String>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let mut changed = name.is_some()
|
||||
|| version.is_some()
|
||||
|| description.is_some()
|
||||
|| author.is_some()
|
||||
|| mods_path.is_some()
|
||||
|| resource_packs_path.is_some()
|
||||
|| data_packs_path.is_some()
|
||||
|| worlds_path.is_some()
|
||||
|| shaders_path.is_some();
|
||||
|
||||
// Modpack properties
|
||||
if let Some(new_name) = name {
|
||||
config.name.clone_from(&new_name);
|
||||
println!("{}", format!("✓ 'name' set to '{new_name}'").green());
|
||||
}
|
||||
|
||||
if let Some(new_version) = version {
|
||||
config.version.clone_from(&new_version);
|
||||
println!("{}", format!("✓ 'version' set to '{new_version}'").green());
|
||||
}
|
||||
|
||||
if let Some(new_description) = description {
|
||||
config.description = Some(new_description.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'description' set to '{new_description}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(new_author) = author {
|
||||
config.author = Some(new_author.clone());
|
||||
println!("{}", format!("✓ 'author' set to '{new_author}'").green());
|
||||
}
|
||||
|
||||
// Project type paths
|
||||
if let Some(path) = mods_path {
|
||||
config.paths.insert("mod".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.mod' set to '{path}'").green());
|
||||
}
|
||||
|
||||
if let Some(path) = resource_packs_path {
|
||||
config
|
||||
.paths
|
||||
.insert("resource-pack".to_string(), path.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'paths.resource-pack' set to '{path}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(path) = data_packs_path {
|
||||
config.paths.insert("data-pack".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green());
|
||||
}
|
||||
|
||||
if let Some(path) = worlds_path {
|
||||
config.paths.insert("world".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.world' set to '{path}'").green());
|
||||
}
|
||||
|
||||
if let Some(path) = shaders_path {
|
||||
config.paths.insert("shader".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.shader' set to '{path}'").green());
|
||||
}
|
||||
|
||||
if !changed {
|
||||
// Interactive mode: prompt for values if none were specified
|
||||
println!(
|
||||
"{}",
|
||||
"No changes specified. Enter values interactively (press Enter to skip):"
|
||||
.yellow()
|
||||
);
|
||||
println!();
|
||||
|
||||
// Prompt for each configurable field
|
||||
if let Ok(Some(new_name)) = prompt_input_optional(" Name") {
|
||||
config.name.clone_from(&new_name);
|
||||
println!("{}", format!(" ✓ 'name' set to '{new_name}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Ok(Some(new_version)) = prompt_input_optional(" Version") {
|
||||
config.version.clone_from(&new_version);
|
||||
println!(
|
||||
"{}",
|
||||
format!(" ✓ 'version' set to '{new_version}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Ok(Some(new_description)) = prompt_input_optional(" Description") {
|
||||
config.description = Some(new_description.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!(" ✓ 'description' set to '{new_description}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Ok(Some(new_author)) = prompt_input_optional(" Author") {
|
||||
config.author = Some(new_author.clone());
|
||||
println!("{}", format!(" ✓ 'author' set to '{new_author}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if !changed {
|
||||
println!();
|
||||
println!("{}", "No changes made.".dim());
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
println!("\n{}", "Configuration updated successfully".green().bold());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
204
crates/pakker-cli/src/cli/commands/cfg_prj.rs
Normal file
204
crates/pakker-cli/src/cli/commands/cfg_prj.rs
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
use std::path::Path;
|
||||
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{
|
||||
config::Config,
|
||||
enums::{ProjectSide, ProjectType, UpdateStrategy},
|
||||
lockfile::LockFile,
|
||||
},
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
lockfile_path: &Path,
|
||||
project: &str,
|
||||
r#type: Option<&str>,
|
||||
side: Option<&str>,
|
||||
update_strategy: Option<&str>,
|
||||
redistributable: Option<bool>,
|
||||
subpath: Option<String>,
|
||||
add_alias: Option<String>,
|
||||
remove_alias: Option<String>,
|
||||
export: Option<bool>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find the project in lockfile to get its pakku_id
|
||||
// Try multiple lookup strategies: pakku_id first, then slug, then name
|
||||
let found_project = lockfile
|
||||
.get_project(project)
|
||||
.or_else(|| {
|
||||
// Try to find by slug on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(project)))
|
||||
})
|
||||
.or_else(|| {
|
||||
// Try to find by name on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(project)))
|
||||
})
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(project.to_string()))?;
|
||||
|
||||
let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("Project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
// Get or create project config
|
||||
let mut project_config = config
|
||||
.get_project_config(pakku_id)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let changed = r#type.is_some()
|
||||
|| side.is_some()
|
||||
|| update_strategy.is_some()
|
||||
|| redistributable.is_some()
|
||||
|| subpath.is_some()
|
||||
|| add_alias.is_some()
|
||||
|| remove_alias.is_some()
|
||||
|| export.is_some();
|
||||
|
||||
if let Some(type_str) = r#type {
|
||||
let parsed_type = match type_str.to_uppercase().as_str() {
|
||||
"MOD" => ProjectType::Mod,
|
||||
"RESOURCE_PACK" | "RESOURCEPACK" => ProjectType::ResourcePack,
|
||||
"DATA_PACK" | "DATAPACK" => ProjectType::DataPack,
|
||||
"SHADER" => ProjectType::Shader,
|
||||
"WORLD" => ProjectType::World,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid type: {type_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.r#type = Some(parsed_type);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(side_str) = side {
|
||||
let parsed_side = match side_str.to_uppercase().as_str() {
|
||||
"CLIENT" => ProjectSide::Client,
|
||||
"SERVER" => ProjectSide::Server,
|
||||
"BOTH" => ProjectSide::Both,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid side: {side_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.side = Some(parsed_side);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(strategy_str) = update_strategy {
|
||||
let parsed_strategy = match strategy_str.to_uppercase().as_str() {
|
||||
"LATEST" => UpdateStrategy::Latest,
|
||||
"NONE" => UpdateStrategy::None,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid update strategy: {strategy_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.update_strategy = Some(parsed_strategy);
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"✓ 'updateStrategy' set to '{parsed_strategy:?}' for '{pakku_id}'"
|
||||
)
|
||||
.green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(new_redistributable) = redistributable {
|
||||
project_config.redistributable = Some(new_redistributable);
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"✓ 'redistributable' set to '{new_redistributable}' for '{pakku_id}'"
|
||||
)
|
||||
.green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(new_subpath) = subpath {
|
||||
project_config.subpath = Some(new_subpath.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(alias_to_add) = add_alias {
|
||||
let mut aliases = project_config.aliases.clone().unwrap_or_default();
|
||||
if !aliases.contains(&alias_to_add) {
|
||||
aliases.push(alias_to_add.clone());
|
||||
project_config.aliases = Some(aliases);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(alias_to_remove) = remove_alias
|
||||
&& let Some(mut aliases) = project_config.aliases.clone()
|
||||
{
|
||||
aliases.retain(|a| a != &alias_to_remove);
|
||||
project_config.aliases = Some(aliases);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(new_export) = export {
|
||||
project_config.export = Some(new_export);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green()
|
||||
);
|
||||
}
|
||||
|
||||
if !changed {
|
||||
eprintln!(
|
||||
"{}",
|
||||
"No changes specified. Use --help for options.".yellow()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
config.set_project_config(pakku_id.clone(), project_config);
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!(
|
||||
"\n{}",
|
||||
format!("Project configuration updated for '{pakku_id}'")
|
||||
.green()
|
||||
.bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
112
crates/pakker-cli/src/cli/commands/credentials.rs
Normal file
112
crates/pakker-cli/src/cli/commands/credentials.rs
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::{
|
||||
PakkerCredentialsFile,
|
||||
credentials::{CredentialsSource, ResolvedCredentials},
|
||||
},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
delete: bool,
|
||||
delete_file: bool,
|
||||
delete_keyring: bool,
|
||||
) -> Result<()> {
|
||||
let delete_effective = delete || delete_file || delete_keyring;
|
||||
|
||||
if delete_effective {
|
||||
// Pakker must never delete or modify Pakku's credentials file
|
||||
// (~/.pakku/credentials). Deletion here only affects Pakker-managed
|
||||
// storage (keyring + Pakker-owned file).
|
||||
let delete_keyring = delete_keyring || delete;
|
||||
let delete_pakker_file = delete_file || delete;
|
||||
|
||||
if delete_pakker_file {
|
||||
PakkerCredentialsFile::delete()?;
|
||||
}
|
||||
if delete_keyring {
|
||||
ResolvedCredentials::delete_keyring()?;
|
||||
}
|
||||
|
||||
println!("Credentials deleted.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let creds = ResolvedCredentials::load();
|
||||
|
||||
let has_any = creds.curseforge_api_key().is_some()
|
||||
|| creds.modrinth_token().is_some()
|
||||
|| creds.github_access_token().is_some();
|
||||
|
||||
if !has_any {
|
||||
println!("{}", "No credentials stored".yellow());
|
||||
println!("\nUse 'pakker credentials set' to add credentials");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "Stored Credentials:".cyan().bold());
|
||||
println!();
|
||||
|
||||
print_credential(
|
||||
"CurseForge API Key",
|
||||
creds.curseforge_api_key(),
|
||||
creds.curseforge_source(),
|
||||
);
|
||||
|
||||
print_credential(
|
||||
"Modrinth Token",
|
||||
creds.modrinth_token(),
|
||||
creds.modrinth_source(),
|
||||
);
|
||||
|
||||
print_credential(
|
||||
"GitHub Access Token",
|
||||
creds.github_access_token(),
|
||||
creds.github_source(),
|
||||
);
|
||||
|
||||
println!();
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"Credentials file: {}",
|
||||
PakkerCredentialsFile::get_path()?.display()
|
||||
)
|
||||
.cyan()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_credential(
|
||||
label: &str,
|
||||
value: Option<&str>,
|
||||
source: Option<CredentialsSource>,
|
||||
) {
|
||||
if let Some(v) = value {
|
||||
let masked = mask_key(v);
|
||||
let source = source.map_or("unknown", source_label);
|
||||
println!(" {} {} ({})", format!("{label}:").yellow(), masked, source);
|
||||
}
|
||||
}
|
||||
|
||||
const fn source_label(source: CredentialsSource) -> &'static str {
|
||||
match source {
|
||||
CredentialsSource::Env => "env",
|
||||
CredentialsSource::Keyring => "keyring",
|
||||
CredentialsSource::PakkerFile => "pakker-file",
|
||||
}
|
||||
}
|
||||
|
||||
fn mask_key(key: &str) -> String {
|
||||
if key.len() <= 12 {
|
||||
return "*".repeat(key.len());
|
||||
}
|
||||
|
||||
let start = &key[..8];
|
||||
let end = &key[key.len() - 4..];
|
||||
let middle = "*".repeat(key.len() - 12);
|
||||
|
||||
format!("{start}{middle}{end}")
|
||||
}
|
||||
219
crates/pakker-cli/src/cli/commands/credentials_set.rs
Normal file
219
crates/pakker-cli/src/cli/commands/credentials_set.rs
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
use std::{io::Write, time::Duration};
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
http,
|
||||
model::{PakkerCredentialsFile, set_keyring_secret},
|
||||
ui_utils::{prompt_secret, prompt_yes_no},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
curseforge_api_key: Option<String>,
|
||||
modrinth_token: Option<String>,
|
||||
github_access_token: Option<String>,
|
||||
) -> Result<()> {
|
||||
let mut cf_key = curseforge_api_key;
|
||||
let mut mr_token = modrinth_token;
|
||||
let mut gh_token = github_access_token;
|
||||
|
||||
let any_cli_args =
|
||||
cf_key.is_some() || mr_token.is_some() || gh_token.is_some();
|
||||
|
||||
// Enter interactive mode when no CLI args provided
|
||||
if !any_cli_args {
|
||||
println!("No credentials provided via command line.");
|
||||
println!();
|
||||
|
||||
if let Some(key) =
|
||||
prompt_secret("CurseForge API key (press Enter to skip)")?
|
||||
{
|
||||
cf_key = Some(key);
|
||||
}
|
||||
|
||||
if let Some(token) = prompt_secret("Modrinth token (press Enter to skip)")?
|
||||
{
|
||||
mr_token = Some(token);
|
||||
}
|
||||
|
||||
if let Some(token) =
|
||||
prompt_secret("GitHub access token (press Enter to skip)")?
|
||||
{
|
||||
gh_token = Some(token);
|
||||
}
|
||||
}
|
||||
|
||||
let updated_any =
|
||||
cf_key.is_some() || mr_token.is_some() || gh_token.is_some();
|
||||
|
||||
if !updated_any {
|
||||
println!("No credentials to save.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Verify credentials before saving
|
||||
let client = http::create_http_client();
|
||||
let mut verified = Vec::new();
|
||||
|
||||
if let Some(ref key) = cf_key {
|
||||
print!("Verifying CurseForge API key... ");
|
||||
std::io::stdout().flush().ok();
|
||||
match verify_curseforge(&client, key).await {
|
||||
Ok(()) => {
|
||||
println!("valid");
|
||||
verified.push("CurseForge");
|
||||
},
|
||||
Err(e) => {
|
||||
println!("failed ({e})");
|
||||
if !prompt_yes_no(
|
||||
"CurseForge key appears invalid. Save anyway?",
|
||||
false,
|
||||
false,
|
||||
)? {
|
||||
cf_key = None;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref token) = mr_token {
|
||||
print!("Verifying Modrinth token... ");
|
||||
std::io::stdout().flush().ok();
|
||||
match verify_modrinth(&client, token).await {
|
||||
Ok(()) => {
|
||||
println!("valid");
|
||||
verified.push("Modrinth");
|
||||
},
|
||||
Err(e) => {
|
||||
println!("failed ({e})");
|
||||
if !prompt_yes_no(
|
||||
"Modrinth token appears invalid. Save anyway?",
|
||||
false,
|
||||
false,
|
||||
)? {
|
||||
mr_token = None;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref token) = gh_token {
|
||||
print!("Verifying GitHub access token... ");
|
||||
std::io::stdout().flush().ok();
|
||||
match verify_github(&client, token).await {
|
||||
Ok(()) => {
|
||||
println!("valid");
|
||||
verified.push("GitHub");
|
||||
},
|
||||
Err(e) => {
|
||||
println!("failed ({e})");
|
||||
if !prompt_yes_no(
|
||||
"GitHub token appears invalid. Save anyway?",
|
||||
false,
|
||||
false,
|
||||
)? {
|
||||
gh_token = None;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let mut creds = PakkerCredentialsFile::load()?;
|
||||
|
||||
if let Some(key) = cf_key {
|
||||
let key = key.trim().to_string();
|
||||
if !key.is_empty() {
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds.curseforge_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = mr_token {
|
||||
let token = token.trim().to_string();
|
||||
if !token.is_empty() {
|
||||
set_keyring_secret("modrinth_token", &token)?;
|
||||
creds.modrinth_token = Some(token);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = gh_token {
|
||||
let token = token.trim().to_string();
|
||||
if !token.is_empty() {
|
||||
set_keyring_secret("github_access_token", &token)?;
|
||||
creds.github_access_token = Some(token);
|
||||
}
|
||||
}
|
||||
|
||||
creds.save()?;
|
||||
|
||||
println!();
|
||||
if verified.is_empty() {
|
||||
println!("Credentials saved (unverified).");
|
||||
} else {
|
||||
println!("Credentials saved and verified: {}", verified.join(", "));
|
||||
}
|
||||
println!(
|
||||
"Credentials file: {}",
|
||||
PakkerCredentialsFile::get_path()?.display()
|
||||
);
|
||||
println!("Keyring service: pakker");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn verify_curseforge(
|
||||
client: &reqwest::Client,
|
||||
api_key: &str,
|
||||
) -> Result<()> {
|
||||
let response = client
|
||||
.get("https://api.curseforge.com/v1/mods/238222")
|
||||
.header("x-api-key", api_key)
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn verify_modrinth(client: &reqwest::Client, token: &str) -> Result<()> {
|
||||
let response = client
|
||||
.get("https://api.modrinth.com/v2/user")
|
||||
.header("Authorization", token)
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn verify_github(client: &reqwest::Client, token: &str) -> Result<()> {
|
||||
let response = client
|
||||
.get("https://api.github.com/user")
|
||||
.header("Authorization", format!("Bearer {token}"))
|
||||
.header("User-Agent", "pakker")
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
151
crates/pakker-cli/src/cli/commands/credentials_test.rs
Normal file
151
crates/pakker-cli/src/cli/commands/credentials_test.rs
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{error::Result, http, model::credentials::ResolvedCredentials};
|
||||
|
||||
pub async fn execute() -> Result<()> {
|
||||
let creds = ResolvedCredentials::load();
|
||||
let client = http::create_http_client();
|
||||
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
spinner.enable_steady_tick(Duration::from_millis(80));
|
||||
spinner.set_message("Testing credentials...");
|
||||
|
||||
let mut all_valid = true;
|
||||
let mut results = Vec::new();
|
||||
|
||||
// Test CurseForge
|
||||
if let Some(key) = creds.curseforge_api_key() {
|
||||
spinner.set_message("Testing CurseForge API key...");
|
||||
match test_curseforge(&client, key).await {
|
||||
Ok(()) => results.push(("CurseForge API Key", true, None)),
|
||||
Err(e) => {
|
||||
results.push(("CurseForge API Key", false, Some(e.to_string())));
|
||||
all_valid = false;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
results.push(("CurseForge API Key", false, None));
|
||||
}
|
||||
|
||||
// Test Modrinth
|
||||
if let Some(token) = creds.modrinth_token() {
|
||||
spinner.set_message("Testing Modrinth token...");
|
||||
match test_modrinth(&client, token).await {
|
||||
Ok(()) => results.push(("Modrinth Token", true, None)),
|
||||
Err(e) => {
|
||||
results.push(("Modrinth Token", false, Some(e.to_string())));
|
||||
all_valid = false;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
results.push(("Modrinth Token", false, None));
|
||||
}
|
||||
|
||||
// Test GitHub
|
||||
if let Some(token) = creds.github_access_token() {
|
||||
spinner.set_message("Testing GitHub access token...");
|
||||
match test_github(&client, token).await {
|
||||
Ok(()) => results.push(("GitHub Access Token", true, None)),
|
||||
Err(e) => {
|
||||
results.push(("GitHub Access Token", false, Some(e.to_string())));
|
||||
all_valid = false;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
results.push(("GitHub Access Token", false, None));
|
||||
}
|
||||
|
||||
spinner.finish_and_clear();
|
||||
|
||||
println!("{}", "Credential Test Results:".cyan().bold());
|
||||
println!();
|
||||
|
||||
for (name, valid, error) in results {
|
||||
if let Some(err) = error {
|
||||
println!(
|
||||
" {} {} ({err})",
|
||||
format!("{name}:").yellow(),
|
||||
"invalid".red()
|
||||
);
|
||||
} else if valid {
|
||||
println!(" {} {}", format!("{name}:").yellow(), "valid".green());
|
||||
} else {
|
||||
println!(" {} {}", format!("{name}:").yellow(), "not configured");
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
if all_valid {
|
||||
println!("{}", "All configured credentials are valid.".green());
|
||||
} else {
|
||||
println!("{}", "Some credentials are invalid or expired.".red());
|
||||
println!("Use 'pakker credentials set' to update them.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn test_curseforge(
|
||||
client: &reqwest::Client,
|
||||
api_key: &str,
|
||||
) -> Result<()> {
|
||||
// Use a well-known mod (JEI) to verify key works for mod lookups
|
||||
let response = client
|
||||
.get("https://api.curseforge.com/v1/mods/238222")
|
||||
.header("x-api-key", api_key)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(crate::error::PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_modrinth(client: &reqwest::Client, token: &str) -> Result<()> {
|
||||
let response = client
|
||||
.get("https://api.modrinth.com/v2/user")
|
||||
.header("Authorization", token)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(crate::error::PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_github(client: &reqwest::Client, token: &str) -> Result<()> {
|
||||
let response = client
|
||||
.get("https://api.github.com/user")
|
||||
.header("Authorization", format!("Bearer {token}"))
|
||||
.header("User-Agent", "pakker")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(crate::error::PakkerError::PlatformApiError(format!(
|
||||
"HTTP {}",
|
||||
response.status()
|
||||
)))
|
||||
}
|
||||
}
|
||||
470
crates/pakker-cli/src/cli/commands/diff.rs
Normal file
470
crates/pakker-cli/src/cli/commands/diff.rs
Normal file
|
|
@ -0,0 +1,470 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Write,
|
||||
fs,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use crate::{cli::DiffArgs, error::Result, model::LockFile};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ChangeType {
|
||||
Added,
|
||||
Removed,
|
||||
Updated,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectChange {
|
||||
name: String,
|
||||
change_type: ChangeType,
|
||||
old_file: Option<String>,
|
||||
new_file: Option<String>,
|
||||
}
|
||||
|
||||
pub fn execute(args: &DiffArgs, _lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Comparing lockfiles");
|
||||
|
||||
// Load old lockfile
|
||||
let old_path = Path::new(&args.old_lockfile);
|
||||
let old_dir = old_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let old_lockfile = LockFile::load(old_dir)?;
|
||||
|
||||
// Load current lockfile
|
||||
let current_path = args
|
||||
.current_lockfile
|
||||
.as_ref()
|
||||
.map_or_else(|| Path::new("pakku-lock.json"), Path::new);
|
||||
let current_dir = current_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let current_lockfile = LockFile::load(current_dir)?;
|
||||
|
||||
// Compare metadata
|
||||
let mut changes = Vec::new();
|
||||
|
||||
// Check MC versions
|
||||
let old_mc: HashSet<_> = old_lockfile.mc_versions.iter().collect();
|
||||
let new_mc: HashSet<_> = current_lockfile.mc_versions.iter().collect();
|
||||
let mc_added: Vec<_> = new_mc.difference(&old_mc).collect();
|
||||
let mc_removed: Vec<_> = old_mc.difference(&new_mc).collect();
|
||||
|
||||
// Check loaders
|
||||
let old_loaders = &old_lockfile.loaders;
|
||||
let new_loaders = ¤t_lockfile.loaders;
|
||||
|
||||
// Compare projects
|
||||
let old_projects: HashMap<_, _> = old_lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.map(|p| (&p.pakku_id, p))
|
||||
.collect();
|
||||
let new_projects: HashMap<_, _> = current_lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.map(|p| (&p.pakku_id, p))
|
||||
.collect();
|
||||
|
||||
// Find added, removed, updated projects
|
||||
for (id, new_proj) in &new_projects {
|
||||
if !old_projects.contains_key(id) {
|
||||
changes.push(ProjectChange {
|
||||
name: new_proj.name.values().next().cloned().unwrap_or_default(),
|
||||
change_type: ChangeType::Added,
|
||||
old_file: None,
|
||||
new_file: new_proj.files.first().map(|f| f.file_name.clone()),
|
||||
});
|
||||
} else if let Some(old_proj) = old_projects.get(id) {
|
||||
let old_file_name = old_proj.files.first().map(|f| &f.file_name);
|
||||
let new_file_name = new_proj.files.first().map(|f| &f.file_name);
|
||||
|
||||
if old_file_name != new_file_name {
|
||||
changes.push(ProjectChange {
|
||||
name: new_proj
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
change_type: ChangeType::Updated,
|
||||
old_file: old_file_name.cloned(),
|
||||
new_file: new_file_name.cloned(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (id, old_proj) in &old_projects {
|
||||
if !new_projects.contains_key(id) {
|
||||
changes.push(ProjectChange {
|
||||
name: old_proj.name.values().next().cloned().unwrap_or_default(),
|
||||
change_type: ChangeType::Removed,
|
||||
old_file: old_proj.files.first().map(|f| f.file_name.clone()),
|
||||
new_file: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Output results
|
||||
if let Some(path) = &args.markdown_diff {
|
||||
write_markdown_diff(
|
||||
path,
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
args.header_size,
|
||||
)?;
|
||||
} else if let Some(path) = &args.markdown {
|
||||
write_markdown(
|
||||
path,
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
args.header_size,
|
||||
)?;
|
||||
} else {
|
||||
print_terminal_diff(
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff formatting requires all display parameters"
|
||||
)]
|
||||
fn print_terminal_diff(
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
) {
|
||||
println!("## Lockfile Comparison\n");
|
||||
|
||||
// Target
|
||||
if old.target != new.target {
|
||||
println!("Target: {:?} -> {:?}", old.target, new.target);
|
||||
}
|
||||
|
||||
// MC versions
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
println!("Minecraft Versions:");
|
||||
for v in mc_removed {
|
||||
println!(" - {v}");
|
||||
}
|
||||
for v in mc_added {
|
||||
println!(" + {v}");
|
||||
}
|
||||
}
|
||||
|
||||
// Loaders
|
||||
let mut loader_changes = false;
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" ~ {name}: {old_ver} -> {new_ver}");
|
||||
}
|
||||
} else {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" - {name}: {old_ver}");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" + {name}: {new_ver}");
|
||||
}
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
println!("\nProjects:");
|
||||
for change in changes {
|
||||
match change.change_type {
|
||||
ChangeType::Added => {
|
||||
print!(" + {}", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
print!(" ({file})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
ChangeType::Removed => {
|
||||
print!(" - {}", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
print!(" ({file})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
ChangeType::Updated => {
|
||||
print!(" ~ {}", change.name);
|
||||
if verbose
|
||||
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
print!(" ({old} -> {new})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if mc_removed.is_empty()
|
||||
&& mc_added.is_empty()
|
||||
&& !loader_changes
|
||||
&& changes.is_empty()
|
||||
{
|
||||
println!("✓ No differences found");
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff markdown writer requires all context parameters"
|
||||
)]
|
||||
fn write_markdown_diff(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
_header_size: usize,
|
||||
) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
content.push_str("```diff\n");
|
||||
|
||||
// Metadata changes
|
||||
if old.target != new.target {
|
||||
let _ = writeln!(content, "- Target: {:?}", old.target);
|
||||
let _ = writeln!(content, "+ Target: {:?}", new.target);
|
||||
}
|
||||
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
content.push_str("\nMinecraft Versions:\n");
|
||||
for v in mc_removed {
|
||||
let _ = writeln!(content, "- {v}");
|
||||
}
|
||||
for v in mc_added {
|
||||
let _ = writeln!(content, "+ {v}");
|
||||
}
|
||||
}
|
||||
|
||||
// Loaders
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
let _ = writeln!(content, "- {name}: {old_ver}");
|
||||
let _ = writeln!(content, "+ {name}: {new_ver}");
|
||||
}
|
||||
} else {
|
||||
let _ = writeln!(content, "- {name}: {old_ver}");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
let _ = writeln!(content, "+ {name}: {new_ver}");
|
||||
}
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
content.push_str("\nProjects:\n");
|
||||
for change in changes {
|
||||
match change.change_type {
|
||||
ChangeType::Added => {
|
||||
let _ = write!(content, "+ {}", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
ChangeType::Removed => {
|
||||
let _ = write!(content, "- {}", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
ChangeType::Updated => {
|
||||
if verbose {
|
||||
if let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
let _ = writeln!(content, "- {} ({})", change.name, old);
|
||||
let _ = writeln!(content, "+ {} ({})", change.name, new);
|
||||
}
|
||||
} else {
|
||||
let _ = writeln!(content, "~ {}", change.name);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
content.push_str("```\n");
|
||||
fs::write(path, content)?;
|
||||
println!("Diff exported to {path}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff markdown writer requires all context parameters"
|
||||
)]
|
||||
fn write_markdown(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
header_size: usize,
|
||||
) -> Result<()> {
|
||||
let header = "#".repeat(header_size.min(5));
|
||||
let mut content = String::new();
|
||||
|
||||
let _ = write!(content, "{header} Lockfile Comparison\n\n");
|
||||
|
||||
// Target
|
||||
if old.target != new.target {
|
||||
let _ = write!(
|
||||
content,
|
||||
"**Target:** {:?} → {:?}\n\n",
|
||||
old.target, new.target
|
||||
);
|
||||
}
|
||||
|
||||
// MC versions
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
let _ = write!(content, "{header} Minecraft Versions\n\n");
|
||||
for v in mc_removed {
|
||||
let _ = writeln!(content, "- ~~{v}~~");
|
||||
}
|
||||
for v in mc_added {
|
||||
let _ = writeln!(content, "- **{v}** (new)");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
// Loaders
|
||||
let mut has_loader_changes = false;
|
||||
let mut loader_content = String::new();
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
has_loader_changes = true;
|
||||
let _ = writeln!(loader_content, "- **{name}:** {old_ver} → {new_ver}");
|
||||
}
|
||||
} else {
|
||||
has_loader_changes = true;
|
||||
let _ = writeln!(loader_content, "- ~~{name}: {old_ver}~~");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
has_loader_changes = true;
|
||||
let _ = writeln!(loader_content, "- **{name}: {new_ver}** (new)");
|
||||
}
|
||||
}
|
||||
if has_loader_changes {
|
||||
let _ = write!(content, "{header} Loaders\n\n");
|
||||
content.push_str(&loader_content);
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
let _ = write!(content, "{header} Projects\n\n");
|
||||
|
||||
let added: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Added))
|
||||
.collect();
|
||||
let removed: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Removed))
|
||||
.collect();
|
||||
let updated: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Updated))
|
||||
.collect();
|
||||
|
||||
if !added.is_empty() {
|
||||
let _ = write!(content, "{}# Added ({})\n\n", header, added.len());
|
||||
for change in added {
|
||||
let _ = write!(content, "- **{}**", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
if !removed.is_empty() {
|
||||
let _ = write!(content, "{}# Removed ({})\n\n", header, removed.len());
|
||||
for change in removed {
|
||||
let _ = write!(content, "- ~~{}~~", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
if !updated.is_empty() {
|
||||
let _ = write!(content, "{}# Updated ({})\n\n", header, updated.len());
|
||||
for change in updated {
|
||||
let _ = write!(content, "- **{}**", change.name);
|
||||
if verbose
|
||||
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
let _ = write!(content, " ({old} → {new})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fs::write(path, content)?;
|
||||
println!("Diff exported to {path}");
|
||||
Ok(())
|
||||
}
|
||||
316
crates/pakker-cli/src/cli/commands/export.rs
Normal file
316
crates/pakker-cli/src/cli/commands/export.rs
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::ExportArgs,
|
||||
error::{PakkerError, Result},
|
||||
export::Exporter,
|
||||
ipc::{IpcCoordinator, OperationType},
|
||||
model::{Config, LockFile, fork::LocalConfig},
|
||||
utils::hash::compute_sha256_bytes,
|
||||
};
|
||||
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
pub async fn execute(
|
||||
args: ExportArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
if let Some(ref profile) = args.profile {
|
||||
log::info!("Exporting with profile: {profile}");
|
||||
} else {
|
||||
log::info!("Exporting all profiles");
|
||||
}
|
||||
|
||||
// Handle --no-server flag
|
||||
if args.no_server {
|
||||
log::info!("Server content will be excluded from export");
|
||||
}
|
||||
|
||||
// Handle --show-io-errors flag
|
||||
let show_io_errors = args.show_io_errors;
|
||||
if show_io_errors {
|
||||
log::info!("IO errors will be shown during export");
|
||||
}
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// IPC coordination - prevent concurrent operations on the same modpack
|
||||
let ipc = IpcCoordinator::new(config_dir)?;
|
||||
let ipc_timeout = std::time::Duration::from_secs(60);
|
||||
|
||||
// Check for conflicting export operations
|
||||
let conflicting = ipc.get_running_operations(OperationType::Export);
|
||||
if !conflicting.is_empty() {
|
||||
log::info!(
|
||||
"Waiting for conflicting operations to complete: {:?}",
|
||||
conflicting
|
||||
.iter()
|
||||
.map(|op| (op.id.clone(), op.pid))
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
ipc
|
||||
.wait_for_conflicts(OperationType::Export, ipc_timeout)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Register this export operation
|
||||
let _op_guard = ipc.register_operation(OperationType::Export)?;
|
||||
|
||||
// Load config to check for fork configuration
|
||||
let config = Config::load(config_dir)?;
|
||||
let local_config = LocalConfig::load(config_dir).ok();
|
||||
|
||||
// Check if this is a fork with parent
|
||||
let lockfile = if let Some(local_cfg) = &local_config {
|
||||
if local_cfg.parent.is_some() {
|
||||
log::info!("Fork detected - merging parent and local lockfiles");
|
||||
|
||||
// Try parent's lockfile
|
||||
let parent_paths = [".pakku/parent", ".pakker/parent"];
|
||||
let mut parent_lockfile_path = None;
|
||||
let mut lockfile_name = "pakku-lock.json";
|
||||
|
||||
for parent_dir in &parent_paths {
|
||||
// Try pakker-lock.json first
|
||||
let check_path = Path::new(parent_dir).join("pakker-lock.json");
|
||||
if check_path.exists() {
|
||||
parent_lockfile_path = Some(parent_dir);
|
||||
lockfile_name = "pakker-lock.json";
|
||||
break;
|
||||
}
|
||||
// Fall back to pakku-lock.json
|
||||
let check_path = Path::new(parent_dir).join("pakku-lock.json");
|
||||
if check_path.exists() {
|
||||
parent_lockfile_path = Some(parent_dir);
|
||||
lockfile_name = "pakku-lock.json";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent_dir) = parent_lockfile_path {
|
||||
// Load parent lockfile
|
||||
let parent_lockfile = LockFile::load(Path::new(parent_dir))?;
|
||||
|
||||
// Verify parent lockfile hash for integrity
|
||||
if let Some(stored_hash) = &local_cfg.parent_lock_hash {
|
||||
let parent_lock_path = Path::new(parent_dir).join(lockfile_name);
|
||||
let parent_lock_content = std::fs::read(&parent_lock_path)?;
|
||||
let computed_hash = compute_sha256_bytes(&parent_lock_content);
|
||||
|
||||
if &computed_hash != stored_hash {
|
||||
log::warn!(
|
||||
"Parent lockfile hash mismatch - parent may have changed since \
|
||||
last sync"
|
||||
);
|
||||
log::warn!("Expected: {stored_hash}, Got: {computed_hash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Load local lockfile if it exists
|
||||
if lockfile_path.exists() {
|
||||
log::info!("Merging parent lockfile with local overrides");
|
||||
let local_lockfile =
|
||||
LockFile::load_with_validation(lockfile_dir, false)?;
|
||||
|
||||
// Merge: start with parent, override with local
|
||||
merge_lockfiles(parent_lockfile, &local_lockfile, local_cfg)
|
||||
} else {
|
||||
log::info!("No local lockfile - using parent lockfile");
|
||||
parent_lockfile
|
||||
}
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Fork configured but parent lockfile not found",
|
||||
)));
|
||||
}
|
||||
} else {
|
||||
// No fork, use local lockfile
|
||||
if lockfile_path.exists() {
|
||||
LockFile::load(lockfile_dir)?
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No lockfile found",
|
||||
)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No local config, try local lockfile or fall back to parent
|
||||
if lockfile_path.exists() {
|
||||
LockFile::load(lockfile_dir)?
|
||||
} else {
|
||||
// Try parent's lockfile as fallback
|
||||
let parent_paths = [".pakku/parent", ".pakker/parent"];
|
||||
let mut parent_lockfile = None;
|
||||
let mut lockfile_name = "pakku-lock.json";
|
||||
|
||||
for parent_dir in &parent_paths {
|
||||
// Try pakker-lock.json first
|
||||
let lockfile_path_check =
|
||||
Path::new(parent_dir).join("pakker-lock.json");
|
||||
if lockfile_path_check.exists() {
|
||||
parent_lockfile = Some(parent_dir);
|
||||
lockfile_name = "pakker-lock.json";
|
||||
break;
|
||||
}
|
||||
// Fall back to pakku-lock.json
|
||||
let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json");
|
||||
if lockfile_path_check.exists() {
|
||||
parent_lockfile = Some(parent_dir);
|
||||
lockfile_name = "pakku-lock.json";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match parent_lockfile {
|
||||
Some(parent_dir) => {
|
||||
log::info!(
|
||||
"Using parent's lockfile ({lockfile_name}) from {parent_dir}"
|
||||
);
|
||||
LockFile::load(Path::new(parent_dir))?
|
||||
},
|
||||
None => {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No lockfile found (neither local nor parent's)",
|
||||
)));
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Determine output path
|
||||
let output_path = if args.pakker_layout {
|
||||
"build"
|
||||
} else {
|
||||
args.output.as_deref().unwrap_or("exports")
|
||||
};
|
||||
|
||||
// Create exporter
|
||||
let exporter = Exporter::new(".");
|
||||
|
||||
// Export based on profile argument
|
||||
if let Some(profile_name) = args.profile {
|
||||
// Single profile export (backwards compatible)
|
||||
let output_file = exporter
|
||||
.export(&profile_name, &lockfile, &config, Path::new(output_path))
|
||||
.await?;
|
||||
|
||||
println!("Export complete: {}", output_file.display());
|
||||
} else {
|
||||
// Multi-profile export (Pakker-compatible default behavior)
|
||||
let output_files = exporter
|
||||
.export_all_profiles(&lockfile, &config, Path::new(output_path))
|
||||
.await?;
|
||||
|
||||
println!("\nExported {} files:", output_files.len());
|
||||
for output_file in output_files {
|
||||
println!(" - {}", output_file.display());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Merges parent lockfile with local lockfile
|
||||
/// Parent projects are used as base, local projects override parent projects
|
||||
/// with same slug
|
||||
fn merge_lockfiles(
|
||||
parent: LockFile,
|
||||
local: &LockFile,
|
||||
local_config: &LocalConfig,
|
||||
) -> LockFile {
|
||||
let mut merged = LockFile {
|
||||
target: parent.target, // Use parent target
|
||||
mc_versions: parent.mc_versions, // Use parent MC versions
|
||||
loaders: parent.loaders, // Use parent loaders
|
||||
projects: Vec::new(),
|
||||
lockfile_version: parent.lockfile_version,
|
||||
};
|
||||
|
||||
// Collect local project slugs for override detection
|
||||
let mut local_slugs = std::collections::HashSet::new();
|
||||
for project in &local.projects {
|
||||
for slug in project.slug.values() {
|
||||
local_slugs.insert(slug.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Collect excluded slugs from local config
|
||||
let excluded: std::collections::HashSet<_> =
|
||||
local_config.excludes.iter().collect();
|
||||
|
||||
// Add parent projects that are NOT overridden by local and NOT excluded
|
||||
let mut parent_kept = 0usize;
|
||||
|
||||
for parent_project in &parent.projects {
|
||||
let is_overridden = parent_project
|
||||
.slug
|
||||
.values()
|
||||
.any(|slug| local_slugs.contains(slug));
|
||||
|
||||
let is_excluded = parent_project
|
||||
.slug
|
||||
.values()
|
||||
.any(|slug| excluded.contains(slug))
|
||||
|| parent_project
|
||||
.name
|
||||
.values()
|
||||
.any(|name| excluded.contains(name));
|
||||
|
||||
if is_overridden || is_excluded {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut project = parent_project.clone();
|
||||
|
||||
// Apply local config attribute overrides (side, type, etc.)
|
||||
for (key, local_proj_cfg) in &local_config.projects {
|
||||
let matches = project.slug.values().any(|s| s == key)
|
||||
|| project.name.values().any(|n| n == key)
|
||||
|| project.pakku_id.as_ref() == Some(key);
|
||||
|
||||
if matches {
|
||||
if let Some(t) = local_proj_cfg.r#type {
|
||||
project.r#type = t;
|
||||
}
|
||||
if let Some(s) = local_proj_cfg.side {
|
||||
project.side = s;
|
||||
}
|
||||
if let Some(us) = local_proj_cfg.update_strategy {
|
||||
project.update_strategy = us;
|
||||
}
|
||||
if let Some(r) = local_proj_cfg.redistributable {
|
||||
project.redistributable = r;
|
||||
}
|
||||
if let Some(ref sp) = local_proj_cfg.subpath {
|
||||
project.subpath = Some(sp.clone());
|
||||
}
|
||||
if let Some(ref aliases) = local_proj_cfg.aliases {
|
||||
project.aliases = aliases.iter().cloned().collect();
|
||||
}
|
||||
if let Some(e) = local_proj_cfg.export {
|
||||
project.export = e;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
merged.projects.push(project);
|
||||
parent_kept += 1;
|
||||
}
|
||||
|
||||
// Add all local projects
|
||||
merged.projects.extend(local.projects.clone());
|
||||
|
||||
println!(
|
||||
"Merged fork: {} parent + {} local = {} total projects",
|
||||
parent_kept,
|
||||
local.projects.len(),
|
||||
merged.projects.len()
|
||||
);
|
||||
|
||||
merged
|
||||
}
|
||||
51
crates/pakker-cli/src/cli/commands/fetch.rs
Normal file
51
crates/pakker-cli/src/cli/commands/fetch.rs
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::{
|
||||
cli::FetchArgs,
|
||||
error::Result,
|
||||
fetch::Fetcher,
|
||||
ipc::{IpcCoordinator, OperationGuard, OperationType},
|
||||
model::{Config, LockFile},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: FetchArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Create IPC coordinator for this modpack
|
||||
let working_dir = PathBuf::from(".");
|
||||
let coordinator = IpcCoordinator::new(&working_dir)?;
|
||||
|
||||
// Check for conflicting operations
|
||||
if coordinator.has_running_operation(OperationType::Fetch) {
|
||||
// Wait for conflicting operations to complete with timeout
|
||||
let timeout = std::time::Duration::from_secs(args.timeout.unwrap_or(300));
|
||||
coordinator
|
||||
.wait_for_conflicts(OperationType::Fetch, timeout)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Register this fetch operation
|
||||
let operation_id = coordinator.register_operation(OperationType::Fetch)?;
|
||||
let _guard = OperationGuard::new(coordinator, operation_id);
|
||||
|
||||
// Create fetcher with shelve option
|
||||
let fetcher = Fetcher::new(".")
|
||||
.with_shelve(args.shelve)
|
||||
.with_retry(args.retry);
|
||||
|
||||
// Fetch all projects (progress indicators handled in fetch.rs)
|
||||
fetcher.fetch_all(&lockfile, &config).await?;
|
||||
|
||||
println!("Fetch complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
899
crates/pakker-cli/src/cli/commands/fork.rs
Normal file
899
crates/pakker-cli/src/cli/commands/fork.rs
Normal file
|
|
@ -0,0 +1,899 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs,
|
||||
io::Write,
|
||||
path::Path,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use crate::{
|
||||
cli::ForkArgs,
|
||||
error::PakkerError,
|
||||
git::{self, VcsType},
|
||||
model::{
|
||||
LockFile,
|
||||
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
|
||||
},
|
||||
};
|
||||
|
||||
const PAKKU_DIR: &str = ".pakku";
|
||||
const PARENT_DIR_NAME: &str = "parent";
|
||||
|
||||
fn parent_dir() -> String {
|
||||
format!("{PAKKU_DIR}/{PARENT_DIR_NAME}")
|
||||
}
|
||||
|
||||
/// Main entry point for fork commands
|
||||
pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
|
||||
match &args.subcommand {
|
||||
crate::cli::ForkSubcommand::Init {
|
||||
git_url,
|
||||
from_current,
|
||||
from_path,
|
||||
ref_name,
|
||||
ref_type,
|
||||
remote,
|
||||
} => {
|
||||
execute_init(
|
||||
git_url.clone(),
|
||||
*from_current,
|
||||
from_path.clone(),
|
||||
ref_name.clone(),
|
||||
*ref_type,
|
||||
remote.clone(),
|
||||
)
|
||||
},
|
||||
crate::cli::ForkSubcommand::Set {
|
||||
git_url,
|
||||
ref_name,
|
||||
ref_type,
|
||||
remote,
|
||||
} => {
|
||||
execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone())
|
||||
},
|
||||
crate::cli::ForkSubcommand::Show => execute_show(),
|
||||
crate::cli::ForkSubcommand::Unset => execute_unset(),
|
||||
crate::cli::ForkSubcommand::Sync => execute_sync(),
|
||||
crate::cli::ForkSubcommand::Promote { projects } => {
|
||||
execute_promote(projects)
|
||||
},
|
||||
crate::cli::ForkSubcommand::Exclude { projects } => {
|
||||
execute_exclude(projects)
|
||||
},
|
||||
crate::cli::ForkSubcommand::Include { projects } => {
|
||||
execute_include(projects)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_git_url(url: &str) -> Result<(), PakkerError> {
|
||||
// Allow network URLs, SSH-style URLs, or local filesystem paths (tests use
|
||||
// local bare repos)
|
||||
if url.starts_with("https://")
|
||||
|| url.starts_with("git@")
|
||||
|| url.starts_with("ssh://")
|
||||
|| url.starts_with("file://")
|
||||
|| url.starts_with('/')
|
||||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PakkerError::Fork(format!(
|
||||
"Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \
|
||||
absolute filesystem path."
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_init(
|
||||
git_url: Option<String>,
|
||||
from_current: bool,
|
||||
from_path: Option<String>,
|
||||
ref_name: Option<String>,
|
||||
ref_type: Option<RefType>,
|
||||
remote: Option<String>,
|
||||
) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
|
||||
// Validate that pakker.json exists for fork operations
|
||||
let pakker_json_path = config_dir.join("pakker.json");
|
||||
let pakku_json_path = config_dir.join("pakku.json");
|
||||
|
||||
if !pakker_json_path.exists() && pakku_json_path.exists() {
|
||||
return Err(PakkerError::Fork(
|
||||
"Forking is a pakker-specific feature and requires pakker.json. \nFound \
|
||||
pakku.json but not pakker.json. Please migrate to pakker.json to use \
|
||||
fork functionality.\nYou can convert your pakku.json to pakker.json by \
|
||||
renaming the file."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut local_config = LocalConfig::load(config_dir).unwrap_or_default();
|
||||
|
||||
// Check if parent already configured
|
||||
if local_config.parent.is_some()
|
||||
&& let Some(parent) = &local_config.parent
|
||||
{
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Parent already configured: {}",
|
||||
parent.id
|
||||
)));
|
||||
}
|
||||
|
||||
// Resolve defaults early to avoid shadowing/confusion
|
||||
let resolved_remote = remote.unwrap_or_else(|| "origin".to_string());
|
||||
let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string());
|
||||
|
||||
// Parent path (where we keep the cloned parent)
|
||||
let parent_path_str = parent_dir();
|
||||
|
||||
// Branch: from_current, from_path, or git_url
|
||||
let mut cloned_from_local = false;
|
||||
let url = if from_current {
|
||||
// Detect git URL from current directory
|
||||
if !git::is_git_repository(config_dir) {
|
||||
return Err(PakkerError::Fork(
|
||||
"Not a git repository. Use --git-url or run 'git init' first."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
git::get_remote_url(config_dir, &resolved_remote)?
|
||||
} else if let Some(fp) = from_path {
|
||||
// Use provided local path as source; infer upstream remote from it
|
||||
let path = Path::new(&fp);
|
||||
if !git::is_git_repository(path) {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Provided path is not a git repository: {}",
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
// Infer upstream remote URL from the existing local clone
|
||||
let upstream_url = git::get_primary_remote_url(path)?;
|
||||
|
||||
// Reject file:// or non-network remotes
|
||||
validate_git_url(&upstream_url)?;
|
||||
|
||||
// Ensure working tree is clean
|
||||
let vcs_type = git::detect_vcs_type(path);
|
||||
if git::repo_has_uncommitted_changes(path)? {
|
||||
let error_msg = match vcs_type {
|
||||
VcsType::Git => {
|
||||
"Local repository at --from-path has uncommitted changes. Commit or \
|
||||
stash them before proceeding."
|
||||
},
|
||||
VcsType::Jujutsu => {
|
||||
"Local repository at --from-path has uncommitted changes. Run 'jj \
|
||||
commit' to save changes before proceeding."
|
||||
},
|
||||
VcsType::None => {
|
||||
"Local repository at --from-path has uncommitted changes. Please \
|
||||
clean the directory before proceeding."
|
||||
},
|
||||
};
|
||||
return Err(PakkerError::Fork(error_msg.to_string()));
|
||||
}
|
||||
|
||||
// VCS-specific validation
|
||||
match vcs_type {
|
||||
VcsType::Git => {
|
||||
// Attempt lightweight fetch of remote refs to refresh remote tracking
|
||||
match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) {
|
||||
Ok(()) => println!("Fetched remote refs for verification"),
|
||||
Err(e) => {
|
||||
log::warn!("Lightweight fetch from upstream failed: {e}");
|
||||
println!(
|
||||
"Warning: could not perform lightweight fetch from upstream. \
|
||||
Proceeding with local clone; subsequent sync may require \
|
||||
network."
|
||||
);
|
||||
},
|
||||
}
|
||||
|
||||
// Compare local ref vs remote ref
|
||||
let remote_ref = format!("{resolved_remote}/{resolved_ref}");
|
||||
match git::ahead_behind(path, &resolved_ref, &remote_ref) {
|
||||
Ok((ahead, _behind)) => {
|
||||
if ahead > 0 {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Local repository at {} has {} commits not present on \
|
||||
upstream {}. Push or use --git-url if you intend to use an \
|
||||
upstream that contains these commits.",
|
||||
path.display(),
|
||||
ahead,
|
||||
upstream_url
|
||||
)));
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Could not compute ahead/behind: {e}");
|
||||
},
|
||||
}
|
||||
},
|
||||
VcsType::Jujutsu => {
|
||||
// For jujutsu, we skip git-specific remote validation since jj has
|
||||
// different synchronization patterns
|
||||
println!(
|
||||
"Warning: Skipping remote validation for jujutsu repository. Ensure \
|
||||
your jj repo is in sync with remote before proceeding."
|
||||
);
|
||||
|
||||
// Check if there are any changes that haven't been pushed to the remote
|
||||
if let Ok(output) = std::process::Command::new("jj")
|
||||
.args(["log", "--limit", "1", "--template", ""])
|
||||
.current_dir(path)
|
||||
.output()
|
||||
&& !output.stdout.is_empty()
|
||||
{
|
||||
println!(
|
||||
"Note: Jujutsu repository detected. Make sure to run 'jj git \
|
||||
push' to sync changes with remote if needed."
|
||||
);
|
||||
}
|
||||
},
|
||||
VcsType::None => {
|
||||
// No VCS-specific validation needed
|
||||
},
|
||||
}
|
||||
|
||||
// Compute parent lock/config hashes for reproducibility
|
||||
let parent_lock_path = if path.join("pakker-lock.json").exists() {
|
||||
path.join("pakker-lock.json")
|
||||
} else {
|
||||
path.join("pakku-lock.json")
|
||||
};
|
||||
|
||||
if parent_lock_path.exists() {
|
||||
let lock_content =
|
||||
fs::read_to_string(&parent_lock_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
|
||||
})?;
|
||||
let lock_hash = hash_content(&lock_content);
|
||||
local_config.parent_lock_hash = Some(lock_hash);
|
||||
}
|
||||
|
||||
let parent_config_path = if path.join("pakker.json").exists() {
|
||||
path.join("pakker.json")
|
||||
} else {
|
||||
path.join("pakku.json")
|
||||
};
|
||||
|
||||
if parent_config_path.exists() {
|
||||
let config_content =
|
||||
fs::read_to_string(&parent_config_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent config: {e}"))
|
||||
})?;
|
||||
let config_hash = hash_content(&config_content);
|
||||
local_config.parent_config_hash = Some(config_hash);
|
||||
}
|
||||
|
||||
// Now clone from the local path into .pakku/parent, this avoids
|
||||
// re-downloading objects
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
spinner.enable_steady_tick(Duration::from_millis(80));
|
||||
spinner.set_message(format!(
|
||||
"Cloning parent repository from local path {}...",
|
||||
path.display()
|
||||
));
|
||||
git::clone_repository(&fp, parent_path, &resolved_ref, None)?;
|
||||
spinner.finish_and_clear();
|
||||
|
||||
// Ensure the cloned repo's origin is set to the upstream URL (not the local
|
||||
// path)
|
||||
git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?;
|
||||
|
||||
// Mark that we've already cloned from local
|
||||
cloned_from_local = true;
|
||||
|
||||
// We will persist upstream_url as the canonical parent id
|
||||
upstream_url
|
||||
} else if let Some(url) = git_url {
|
||||
url
|
||||
} else {
|
||||
return Err(PakkerError::Fork(
|
||||
"Either --git-url, --from-current or --from-path must be specified"
|
||||
.to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
// If we did not already clone from local, perform network clone and checks
|
||||
if cloned_from_local {
|
||||
println!(
|
||||
"Parent repository was cloned from local path; skipping network clone."
|
||||
);
|
||||
} else {
|
||||
// Check if parent directory already exists and is not empty
|
||||
if parent_path.exists() {
|
||||
let is_empty = parent_path
|
||||
.read_dir()
|
||||
.map(|mut entries| entries.next().is_none())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_empty {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Directory not empty: {}",
|
||||
parent_path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
spinner.enable_steady_tick(Duration::from_millis(80));
|
||||
spinner.set_message(format!(
|
||||
"Cloning parent repository: {url} ({resolved_ref})"
|
||||
));
|
||||
git::clone_repository(&url, parent_path, &resolved_ref, None)?;
|
||||
spinner.finish_and_clear();
|
||||
}
|
||||
|
||||
let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?;
|
||||
|
||||
// Detect ref type if not specified
|
||||
let resolved_ref_type = if let Some(rt) = ref_type {
|
||||
rt
|
||||
} else {
|
||||
git::resolve_ref_type(parent_path, &resolved_ref)?
|
||||
};
|
||||
|
||||
let parent_config = ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: url.clone(),
|
||||
version: Some(commit_sha[..8].to_string()),
|
||||
ref_: resolved_ref.clone(),
|
||||
ref_type: resolved_ref_type,
|
||||
remote_name: resolved_remote,
|
||||
};
|
||||
|
||||
local_config.parent = Some(parent_config);
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
// Add .pakku/parent to .gitignore
|
||||
add_to_gitignore()?;
|
||||
|
||||
println!();
|
||||
println!("✓ Fork initialized successfully");
|
||||
println!(" Parent: {url}");
|
||||
println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!(" Commit: {}", &commit_sha[..8]);
|
||||
println!();
|
||||
println!("Run 'pakku fork sync' to sync with parent.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_set(
|
||||
git_url: Option<String>,
|
||||
ref_name: String,
|
||||
ref_type: Option<RefType>,
|
||||
remote: Option<String>,
|
||||
) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
let Some(mut parent) = local_config.parent else {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
if let Some(url) = git_url {
|
||||
validate_git_url(&url)?;
|
||||
parent.id = url;
|
||||
}
|
||||
|
||||
parent.ref_ = ref_name;
|
||||
|
||||
if let Some(rt) = ref_type {
|
||||
parent.ref_type = rt;
|
||||
}
|
||||
|
||||
if let Some(remote_name) = remote {
|
||||
parent.remote_name = remote_name;
|
||||
}
|
||||
|
||||
local_config.parent = Some(parent.clone());
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!("✓ Fork configuration updated");
|
||||
println!(" Parent: {}", parent.id);
|
||||
println!(" Ref: {} ({})", parent.ref_, match parent.ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!();
|
||||
println!("Run 'pakku fork sync' to sync with new configuration.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_show() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if let Some(parent) = local_config.parent {
|
||||
println!("Fork Configuration:");
|
||||
println!(" Parent URL: {}", parent.id);
|
||||
println!(" Type: {}", match parent.ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!(" Ref: {}", parent.ref_);
|
||||
println!(" Remote: {}", parent.remote_name);
|
||||
|
||||
if let Some(version) = parent.version {
|
||||
println!(" Last synced commit: {version}");
|
||||
} else {
|
||||
println!(" Last synced commit: never synced");
|
||||
}
|
||||
|
||||
if !local_config.projects.is_empty() {
|
||||
println!();
|
||||
println!("Project Overrides ({}):", local_config.projects.len());
|
||||
for (slug, proj_config) in &local_config.projects {
|
||||
print!(" - {slug}");
|
||||
let mut details = Vec::new();
|
||||
if let Some(version) = &proj_config.version {
|
||||
details.push(format!("version={version}"));
|
||||
}
|
||||
if let Some(side) = &proj_config.side {
|
||||
details.push(format!("side={side}"));
|
||||
}
|
||||
if let Some(strategy) = &proj_config.update_strategy {
|
||||
details.push(format!("updateStrategy={strategy}"));
|
||||
}
|
||||
if !details.is_empty() {
|
||||
print!(" ({})", details.join(", "));
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("No fork configured.");
|
||||
println!("Run 'pakku fork init' to initialize a fork.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_unset() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
println!("No fork configured.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Prompt for confirmation
|
||||
print!("Are you sure you want to remove fork configuration? [y/N] ");
|
||||
std::io::stdout().flush().map_err(PakkerError::IoError)?;
|
||||
|
||||
let mut input = String::new();
|
||||
std::io::stdin()
|
||||
.read_line(&mut input)
|
||||
.map_err(PakkerError::IoError)?;
|
||||
|
||||
if !input.trim().eq_ignore_ascii_case("y") {
|
||||
println!("Cancelled.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Remove parent directory
|
||||
let parent_path_str = parent_dir();
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
if parent_path.exists() {
|
||||
fs::remove_dir_all(parent_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to remove parent directory: {e}"))
|
||||
})?;
|
||||
}
|
||||
|
||||
// Clear parent configuration
|
||||
local_config.parent = None;
|
||||
local_config.parent_lock_hash = None;
|
||||
local_config.parent_config_hash = None;
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!("✓ Fork configuration removed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Snapshot parent lockfile as slug → first file name map
|
||||
fn snapshot_parent_projects(
|
||||
parent_path: &Path,
|
||||
) -> HashMap<String, Option<String>> {
|
||||
let lockfile_path = if parent_path.join("pakker-lock.json").exists() {
|
||||
parent_path.join("pakker-lock.json")
|
||||
} else {
|
||||
parent_path.join("pakku-lock.json")
|
||||
};
|
||||
|
||||
if !lockfile_path.exists() {
|
||||
return HashMap::new();
|
||||
}
|
||||
|
||||
match LockFile::load_with_validation(parent_path, false) {
|
||||
Ok(lf) => {
|
||||
lf.projects
|
||||
.iter()
|
||||
.map(|p| {
|
||||
let slug = p
|
||||
.slug
|
||||
.values()
|
||||
.next()
|
||||
.cloned()
|
||||
.or_else(|| p.name.values().next().cloned())
|
||||
.unwrap_or_default();
|
||||
let file = p.files.first().map(|f| f.file_name.clone());
|
||||
(slug, file)
|
||||
})
|
||||
.collect()
|
||||
},
|
||||
Err(_) => HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_sync() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
let parent = local_config.parent.as_ref().ok_or_else(|| {
|
||||
PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let parent_path_str = parent_dir();
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
// Snapshot before update
|
||||
let before = snapshot_parent_projects(parent_path);
|
||||
|
||||
if parent_path.exists() {
|
||||
println!("Fetching parent updates...");
|
||||
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
|
||||
git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?;
|
||||
} else {
|
||||
println!("Parent repository not found. Cloning...");
|
||||
git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?;
|
||||
}
|
||||
|
||||
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
|
||||
|
||||
// Snapshot after update
|
||||
let after = snapshot_parent_projects(parent_path);
|
||||
|
||||
let mut integrity = None;
|
||||
|
||||
// Try pakker files first, fall back to pakku files
|
||||
let parent_lock_path = if parent_path.join("pakker-lock.json").exists() {
|
||||
parent_path.join("pakker-lock.json")
|
||||
} else {
|
||||
parent_path.join("pakku-lock.json")
|
||||
};
|
||||
|
||||
let parent_config_path = if parent_path.join("pakker.json").exists() {
|
||||
parent_path.join("pakker.json")
|
||||
} else {
|
||||
parent_path.join("pakku.json")
|
||||
};
|
||||
|
||||
if parent_lock_path.exists() {
|
||||
let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
|
||||
})?;
|
||||
|
||||
let lock_hash = hash_content(&lock_content);
|
||||
local_config.parent_lock_hash = Some(lock_hash);
|
||||
|
||||
let config_content = if parent_config_path.exists() {
|
||||
fs::read_to_string(&parent_config_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent config: {e}"))
|
||||
})?
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let config_hash = hash_content(&config_content);
|
||||
local_config.parent_config_hash = Some(config_hash);
|
||||
|
||||
integrity = Some(ForkIntegrity::new(
|
||||
local_config.parent_lock_hash.clone().unwrap_or_default(),
|
||||
commit_sha.clone(),
|
||||
local_config.parent_config_hash.clone().unwrap_or_default(),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(ref integrity_data) = integrity {
|
||||
log::info!(
|
||||
"Parent integrity verified at timestamp {}",
|
||||
integrity_data.verified_at
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(parent) = local_config.parent.as_mut() {
|
||||
parent.version = Some(commit_sha[..8].to_string());
|
||||
}
|
||||
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!();
|
||||
println!("✓ Parent sync complete");
|
||||
println!(" Commit: {}", &commit_sha[..8]);
|
||||
|
||||
// Print diff of parent changes
|
||||
let before_keys: HashSet<_> = before.keys().collect();
|
||||
let after_keys: HashSet<_> = after.keys().collect();
|
||||
let added: Vec<_> = after_keys.difference(&before_keys).collect();
|
||||
let removed: Vec<_> = before_keys.difference(&after_keys).collect();
|
||||
let mut updated: Vec<(&String, &Option<String>, &Option<String>)> =
|
||||
Vec::new();
|
||||
|
||||
for slug in before_keys.intersection(&after_keys) {
|
||||
if before[*slug] != after[*slug] {
|
||||
updated.push((slug, &before[*slug], &after[*slug]));
|
||||
}
|
||||
}
|
||||
|
||||
if added.is_empty() && removed.is_empty() && updated.is_empty() {
|
||||
println!(" No changes in parent projects.");
|
||||
} else {
|
||||
println!();
|
||||
println!(" Parent project changes:");
|
||||
let mut added: Vec<_> = added;
|
||||
added.sort();
|
||||
for slug in added {
|
||||
let file = after[*slug].as_deref().unwrap_or("?");
|
||||
println!(" + {slug} ({file})");
|
||||
}
|
||||
let mut removed: Vec<_> = removed;
|
||||
removed.sort();
|
||||
for slug in removed {
|
||||
let file = before[*slug].as_deref().unwrap_or("?");
|
||||
println!(" - {slug} ({file})");
|
||||
}
|
||||
updated.sort_by_key(|(slug, ..)| *slug);
|
||||
for (slug, old_file, new_file) in updated {
|
||||
let old = old_file.as_deref().unwrap_or("?");
|
||||
let new = new_file.as_deref().unwrap_or("?");
|
||||
println!(" ~ {slug}: {old} → {new}");
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("Run 'pakku export' to merge changes from parent.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if projects.is_empty() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No projects specified. Usage: pakku fork promote <project>..."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Load parent lockfile
|
||||
let parent_path_str = parent_dir();
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
if !parent_path.exists() {
|
||||
return Err(PakkerError::Fork(
|
||||
"Parent directory not found. Run 'pakku fork sync' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let parent_lockfile = LockFile::load_with_validation(parent_path, false)
|
||||
.map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to load parent lockfile: {e}"))
|
||||
})?;
|
||||
|
||||
// Load or create local lockfile
|
||||
let lockfile_path = if config_dir.join("pakker-lock.json").exists() {
|
||||
config_dir.join("pakker-lock.json")
|
||||
} else {
|
||||
config_dir.join("pakku-lock.json")
|
||||
};
|
||||
let mut local_lockfile = if lockfile_path.exists() {
|
||||
LockFile::load_with_validation(config_dir, false).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to load local lockfile: {e}"))
|
||||
})?
|
||||
} else {
|
||||
// Bootstrap from parent metadata
|
||||
LockFile {
|
||||
target: parent_lockfile.target,
|
||||
mc_versions: parent_lockfile.mc_versions.clone(),
|
||||
loaders: parent_lockfile.loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: parent_lockfile.lockfile_version,
|
||||
}
|
||||
};
|
||||
|
||||
// Track which requested projects we found
|
||||
let mut promoted = Vec::new();
|
||||
let mut not_found = Vec::new();
|
||||
|
||||
for project_arg in projects {
|
||||
let found = parent_lockfile.projects.iter().find(|p| {
|
||||
p.slug.values().any(|s| s == project_arg)
|
||||
|| p.name.values().any(|n| n == project_arg)
|
||||
|| p.pakku_id.as_deref() == Some(project_arg)
|
||||
});
|
||||
|
||||
if let Some(project) = found {
|
||||
// Skip if already in local lockfile
|
||||
let already_local = local_lockfile.projects.iter().any(|lp| {
|
||||
lp.slug
|
||||
.values()
|
||||
.any(|s| project.slug.values().any(|ps| s == ps))
|
||||
});
|
||||
|
||||
if already_local {
|
||||
println!(" ~ {project_arg}: already in local lockfile, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
local_lockfile.add_project(project.clone());
|
||||
promoted.push(project_arg);
|
||||
} else {
|
||||
not_found.push(project_arg);
|
||||
}
|
||||
}
|
||||
|
||||
if !not_found.is_empty() {
|
||||
for name in ¬_found {
|
||||
eprintln!(" ! {name}: not found in parent lockfile");
|
||||
}
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"{} project(s) not found in parent lockfile",
|
||||
not_found.len()
|
||||
)));
|
||||
}
|
||||
|
||||
if promoted.is_empty() {
|
||||
println!("No projects promoted (all already in local lockfile).");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
local_lockfile.save(config_dir)?;
|
||||
|
||||
println!("Promoted {} project(s) to local lockfile:", promoted.len());
|
||||
for name in &promoted {
|
||||
println!(" + {name}");
|
||||
}
|
||||
println!();
|
||||
println!(
|
||||
"These projects are now locally managed and will override the parent."
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_exclude(projects: &[String]) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut added = Vec::new();
|
||||
for slug in projects {
|
||||
if local_config.excludes.contains(slug) {
|
||||
println!(" ~ {slug}: already excluded");
|
||||
} else {
|
||||
local_config.excludes.push(slug.clone());
|
||||
added.push(slug);
|
||||
}
|
||||
}
|
||||
local_config.excludes.sort();
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
if !added.is_empty() {
|
||||
println!("Excluded {} project(s) from parent:", added.len());
|
||||
for slug in &added {
|
||||
println!(" - {slug}");
|
||||
}
|
||||
println!();
|
||||
println!("These parent projects will be omitted from exports.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_include(projects: &[String]) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut removed = Vec::new();
|
||||
for slug in projects {
|
||||
if let Some(pos) = local_config.excludes.iter().position(|s| s == slug) {
|
||||
local_config.excludes.remove(pos);
|
||||
removed.push(slug);
|
||||
} else {
|
||||
println!(" ~ {slug}: not in excludes list");
|
||||
}
|
||||
}
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
if !removed.is_empty() {
|
||||
println!("Re-included {} project(s) from parent:", removed.len());
|
||||
for slug in &removed {
|
||||
println!(" + {slug}");
|
||||
}
|
||||
println!();
|
||||
println!("These parent projects will be included in exports again.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_to_gitignore() -> Result<(), PakkerError> {
|
||||
let gitignore_path = Path::new(".gitignore");
|
||||
let parent_dir = parent_dir();
|
||||
|
||||
// Check if .gitignore exists and already contains the entry
|
||||
if gitignore_path.exists() {
|
||||
let content = fs::read_to_string(gitignore_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
if content.lines().any(|line| line.trim() == parent_dir) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Append to .gitignore
|
||||
let mut file = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(gitignore_path)
|
||||
.map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to open .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
writeln!(file, "{parent_dir}").map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to write to .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
415
crates/pakker-cli/src/cli/commands/import.rs
Normal file
415
crates/pakker-cli/src/cli/commands/import.rs
Normal file
|
|
@ -0,0 +1,415 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
cli::ImportArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::{Config, LockFile, Target},
|
||||
ui_utils::prompt_yes_no,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: ImportArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let skip_prompts = global_yes;
|
||||
log::info!("Importing modpack from {}", args.file);
|
||||
log::info!(
|
||||
"Dependency resolution: {}",
|
||||
if args.deps { "enabled" } else { "disabled" }
|
||||
);
|
||||
|
||||
let path = Path::new(&args.file);
|
||||
|
||||
if !path.exists() {
|
||||
return Err(PakkerError::FileNotFound(
|
||||
path.to_string_lossy().to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Check if lockfile or config already exist
|
||||
if (lockfile_path.exists() || config_path.exists()) && !skip_prompts {
|
||||
let msg = if lockfile_path.exists() && config_path.exists() {
|
||||
"Both pakku-lock.json and pakku.json exist. Importing will overwrite \
|
||||
them. Continue?"
|
||||
} else if lockfile_path.exists() {
|
||||
"pakku-lock.json exists. Importing will overwrite it. Continue?"
|
||||
} else {
|
||||
"pakku.json exists. Importing will overwrite it. Continue?"
|
||||
};
|
||||
|
||||
if !prompt_yes_no(msg, false, skip_prompts)? {
|
||||
log::info!("Import cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Detect format by checking file contents
|
||||
let file = std::fs::File::open(path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
if archive.by_name("modrinth.index.json").is_ok() {
|
||||
drop(archive);
|
||||
import_modrinth(path, lockfile_dir, config_dir).await
|
||||
} else if archive.by_name("manifest.json").is_ok() {
|
||||
drop(archive);
|
||||
import_curseforge(path, lockfile_dir, config_dir).await
|
||||
} else {
|
||||
Err(PakkerError::InvalidImportFile(
|
||||
"Unknown pack format".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn import_modrinth(
|
||||
path: &Path,
|
||||
lockfile_dir: &Path,
|
||||
config_dir: &Path,
|
||||
) -> Result<()> {
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::platform::create_platform;
|
||||
|
||||
let file = File::open(path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
|
||||
let index_content = {
|
||||
let mut index_file = archive.by_name("modrinth.index.json")?;
|
||||
let mut content = String::new();
|
||||
index_file.read_to_string(&mut content)?;
|
||||
content
|
||||
};
|
||||
|
||||
let index: serde_json::Value = serde_json::from_str(&index_content)?;
|
||||
|
||||
// Create lockfile
|
||||
let mc_version = index["dependencies"]["minecraft"]
|
||||
.as_str()
|
||||
.unwrap_or("1.20.1")
|
||||
.to_string();
|
||||
|
||||
let loader = index["dependencies"]["fabric-loader"].as_str().map_or_else(
|
||||
|| {
|
||||
index["dependencies"]["forge"].as_str().map_or_else(
|
||||
|| ("fabric".to_string(), "latest".to_string()),
|
||||
|forge| ("forge".to_string(), forge.to_string()),
|
||||
)
|
||||
},
|
||||
|fabric| ("fabric".to_string(), fabric.to_string()),
|
||||
);
|
||||
|
||||
let mut loaders = std::collections::HashMap::new();
|
||||
loaders.insert(loader.0.clone(), loader.1);
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec![mc_version.clone()],
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
// Import projects from files list
|
||||
if let Some(files) = index["files"].as_array() {
|
||||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
let creds = crate::model::credentials::ResolvedCredentials::load();
|
||||
let platform = create_platform(
|
||||
"modrinth",
|
||||
creds.modrinth_token().map(std::string::ToString::to_string),
|
||||
)?;
|
||||
|
||||
for file_entry in files {
|
||||
if let Some(project_id) = file_entry["downloads"]
|
||||
.as_array()
|
||||
.and_then(|downloads| downloads.first())
|
||||
.and_then(|url| url.as_str())
|
||||
.and_then(|url| url.split('/').rev().nth(1))
|
||||
{
|
||||
log::info!("Fetching project: {project_id}");
|
||||
match platform
|
||||
.request_project_with_files(
|
||||
project_id,
|
||||
&lockfile.mc_versions,
|
||||
std::slice::from_ref(&loader.0),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(mut project) => {
|
||||
// Select best file
|
||||
if let Err(e) = project.select_file(
|
||||
&lockfile.mc_versions,
|
||||
std::slice::from_ref(&loader.0),
|
||||
None, // Use default (1 file) during import
|
||||
) {
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
lockfile.add_project(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch project {project_id}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create config
|
||||
let config = Config {
|
||||
name: index["name"]
|
||||
.as_str()
|
||||
.unwrap_or("Imported Pack")
|
||||
.to_string(),
|
||||
version: index["versionId"]
|
||||
.as_str()
|
||||
.unwrap_or("1.0.0")
|
||||
.to_string(),
|
||||
description: index["summary"]
|
||||
.as_str()
|
||||
.map(std::string::ToString::to_string),
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
|
||||
// Save files using provided paths
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
log::info!("Imported {} projects", lockfile.projects.len());
|
||||
|
||||
// Extract overrides
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
let outpath = file.enclosed_name().ok_or_else(|| {
|
||||
PakkerError::InternalError("Invalid file path in archive".to_string())
|
||||
})?;
|
||||
|
||||
if outpath.starts_with("overrides/") {
|
||||
let Some(target) = outpath.strip_prefix("overrides/").ok() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
} else {
|
||||
if let Some(parent) = target.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut outfile = File::create(target)?;
|
||||
std::io::copy(&mut file, &mut outfile)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn import_curseforge(
|
||||
path: &Path,
|
||||
lockfile_dir: &Path,
|
||||
config_dir: &Path,
|
||||
) -> Result<()> {
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::platform::create_platform;
|
||||
|
||||
let file = File::open(path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
|
||||
let manifest_content = {
|
||||
let mut manifest_file = archive.by_name("manifest.json")?;
|
||||
let mut content = String::new();
|
||||
manifest_file.read_to_string(&mut content)?;
|
||||
content
|
||||
};
|
||||
|
||||
let manifest: serde_json::Value = serde_json::from_str(&manifest_content)?;
|
||||
|
||||
// Create lockfile
|
||||
let mc_version = manifest["minecraft"]["version"]
|
||||
.as_str()
|
||||
.unwrap_or("1.20.1")
|
||||
.to_string();
|
||||
|
||||
let mod_loaders =
|
||||
manifest["minecraft"]["modLoaders"]
|
||||
.as_array()
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidImportFile("Missing modLoaders".to_string())
|
||||
})?;
|
||||
|
||||
let loader_info = mod_loaders
|
||||
.first()
|
||||
.and_then(|l| l["id"].as_str())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidImportFile("Missing loader id".to_string())
|
||||
})?;
|
||||
|
||||
let parts: Vec<&str> = loader_info.split('-').collect();
|
||||
let loader_name = (*parts.first().unwrap_or(&"fabric")).to_string();
|
||||
let loader_version = (*parts.get(1).unwrap_or(&"latest")).to_string();
|
||||
|
||||
let mut loaders = std::collections::HashMap::new();
|
||||
loaders.insert(loader_name, loader_version);
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::CurseForge),
|
||||
mc_versions: vec![mc_version.clone()],
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
// Import projects from files list
|
||||
if let Some(files) = manifest["files"].as_array() {
|
||||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok();
|
||||
let platform = create_platform("curseforge", curseforge_token)?;
|
||||
|
||||
for file_entry in files {
|
||||
if let Some(project_id) = file_entry["projectID"].as_u64() {
|
||||
let project_id_str = project_id.to_string();
|
||||
log::info!("Fetching project: {project_id_str}");
|
||||
|
||||
match platform
|
||||
.request_project_with_files(
|
||||
&project_id_str,
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(mut project) => {
|
||||
// Try to select the specific file if fileID is provided
|
||||
if let Some(file_id) = file_entry["fileID"].as_u64() {
|
||||
let file_id_str = file_id.to_string();
|
||||
// Try to find the file with matching ID
|
||||
if let Some(file) =
|
||||
project.files.iter().find(|f| f.id == file_id_str).cloned()
|
||||
{
|
||||
project.files = vec![file];
|
||||
} else {
|
||||
log::warn!(
|
||||
"Could not find file {} for project {}, selecting best match",
|
||||
file_id,
|
||||
project.get_name()
|
||||
);
|
||||
if let Err(e) = project.select_file(
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
None, // Use default (1 file) during import
|
||||
) {
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No specific file ID, select best match
|
||||
if let Err(e) = project.select_file(
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
None, // Use default (1 file) during import
|
||||
) {
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
lockfile.add_project(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch project {project_id_str}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create config
|
||||
let config = Config {
|
||||
name: manifest["name"]
|
||||
.as_str()
|
||||
.unwrap_or("Imported Pack")
|
||||
.to_string(),
|
||||
version: manifest["version"]
|
||||
.as_str()
|
||||
.unwrap_or("1.0.0")
|
||||
.to_string(),
|
||||
description: None,
|
||||
author: manifest["author"]
|
||||
.as_str()
|
||||
.map(std::string::ToString::to_string),
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
|
||||
// Save files using provided paths
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
log::info!("Imported {} projects", lockfile.projects.len());
|
||||
|
||||
// Extract overrides
|
||||
let overrides_prefix = manifest["overrides"].as_str().unwrap_or("overrides");
|
||||
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
let outpath = file.enclosed_name().ok_or_else(|| {
|
||||
PakkerError::InternalError("Invalid file path in archive".to_string())
|
||||
})?;
|
||||
|
||||
if outpath.starts_with(overrides_prefix) {
|
||||
let Some(target) = outpath.strip_prefix(overrides_prefix).ok() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
} else {
|
||||
if let Some(parent) = target.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut outfile = File::create(target)?;
|
||||
std::io::copy(&mut file, &mut outfile)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
196
crates/pakker-cli/src/cli/commands/init.rs
Normal file
196
crates/pakker-cli/src/cli/commands/init.rs
Normal file
|
|
@ -0,0 +1,196 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
cli::InitArgs,
|
||||
error::PakkerError,
|
||||
model::{Config, LockFile, ResolvedCredentials, Target},
|
||||
ui_utils::{
|
||||
prompt_curseforge_api_key,
|
||||
prompt_input,
|
||||
prompt_select,
|
||||
prompt_yes_no,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
args: InitArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
let skip_prompts = global_yes;
|
||||
|
||||
if lockfile_path.exists() {
|
||||
return Err(PakkerError::AlreadyExists(
|
||||
"Lock file already exists".into(),
|
||||
));
|
||||
}
|
||||
|
||||
// Interactive mode: prompt for values not provided via CLI and --yes not set
|
||||
let is_interactive = !skip_prompts && args.name.is_none();
|
||||
|
||||
// Get modpack name
|
||||
let name = if let Some(name) = args.name.clone() {
|
||||
name
|
||||
} else if is_interactive {
|
||||
prompt_input("Modpack name", Some("My Modpack"))
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
|
||||
} else {
|
||||
"My Modpack".to_string()
|
||||
};
|
||||
|
||||
// Get modpack version
|
||||
let version = if let Some(version) = args.version.clone() {
|
||||
version
|
||||
} else if is_interactive {
|
||||
prompt_input("Version", Some("1.0.0"))
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
|
||||
} else {
|
||||
"1.0.0".to_string()
|
||||
};
|
||||
|
||||
// Get target platform
|
||||
let target = if let Some(target) = args.target.clone() {
|
||||
target
|
||||
} else if is_interactive {
|
||||
let targets = ["multiplatform", "curseforge", "modrinth"];
|
||||
let idx = prompt_select("Target platform", &targets)
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
|
||||
targets[idx].to_string()
|
||||
} else {
|
||||
"multiplatform".to_string()
|
||||
};
|
||||
|
||||
let target_enum = match target.as_str() {
|
||||
"curseforge" => Target::CurseForge,
|
||||
"modrinth" => Target::Modrinth,
|
||||
"multiplatform" => Target::Multiplatform,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid target: {target}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
|
||||
// Get Minecraft versions (supports multiple)
|
||||
let mc_versions = if let Some(versions) = args.mc_versions.clone() {
|
||||
versions
|
||||
} else if is_interactive {
|
||||
let input =
|
||||
prompt_input("Minecraft versions (space-separated)", Some("1.20.1"))
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
|
||||
input.split_whitespace().map(String::from).collect()
|
||||
} else {
|
||||
vec!["1.20.1".to_string()]
|
||||
};
|
||||
|
||||
// Get mod loaders (supports multiple in name=version format)
|
||||
let loaders: HashMap<String, String> = if let Some(loader_strs) = args.loaders
|
||||
{
|
||||
let mut map = HashMap::new();
|
||||
for loader_str in loader_strs {
|
||||
let parts: Vec<&str> = loader_str.splitn(2, '=').collect();
|
||||
if parts.len() == 2 {
|
||||
map.insert(parts[0].to_string(), parts[1].to_string());
|
||||
} else {
|
||||
// If no version specified, use "latest"
|
||||
map.insert(loader_str, "latest".to_string());
|
||||
}
|
||||
}
|
||||
map
|
||||
} else if is_interactive {
|
||||
let loader_options = ["fabric", "forge", "neoforge", "quilt"];
|
||||
let idx = prompt_select("Mod loader", &loader_options)
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
|
||||
let loader = loader_options[idx].to_string();
|
||||
|
||||
let loader_version = prompt_input("Loader version", Some("latest"))
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.insert(loader, loader_version);
|
||||
map
|
||||
} else {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("fabric".to_string(), "latest".to_string());
|
||||
map
|
||||
};
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(target_enum),
|
||||
mc_versions,
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 2,
|
||||
};
|
||||
|
||||
// Save expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
let config = Config {
|
||||
name: name.clone(),
|
||||
version: version.clone(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!("Initialized new modpack '{name}' v{version}");
|
||||
println!(" Target: {target}");
|
||||
println!(" Minecraft: {}", lockfile.mc_versions.join(", "));
|
||||
println!(
|
||||
" Loaders: {}",
|
||||
lockfile
|
||||
.loaders
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
// Check if CurseForge API key is needed and prompt if interactive
|
||||
if is_interactive && (target == "curseforge" || target == "multiplatform") {
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let has_cf_key = credentials.curseforge_api_key().is_some();
|
||||
|
||||
if !has_cf_key {
|
||||
println!();
|
||||
if prompt_yes_no(
|
||||
"Would you like to set up CurseForge API key now?",
|
||||
true,
|
||||
skip_prompts,
|
||||
)
|
||||
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
|
||||
&& let Ok(Some(api_key)) = prompt_curseforge_api_key(skip_prompts)
|
||||
{
|
||||
// Save to credentials file
|
||||
let creds_path = std::env::var("HOME").map_or_else(
|
||||
|_| Path::new(".pakku").to_path_buf(),
|
||||
|h| Path::new(&h).join(".pakku"),
|
||||
);
|
||||
|
||||
std::fs::create_dir_all(&creds_path).ok();
|
||||
|
||||
let creds_file = creds_path.join("credentials");
|
||||
let content =
|
||||
format!("# Pakku/Pakker credentials\nCURSEFORGE_API_KEY={api_key}\n");
|
||||
if std::fs::write(&creds_file, content).is_ok() {
|
||||
println!("CurseForge API key saved to ~/.pakku/credentials");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
603
crates/pakker-cli/src/cli/commands/inspect.rs
Normal file
603
crates/pakker-cli/src/cli/commands/inspect.rs
Normal file
|
|
@ -0,0 +1,603 @@
|
|||
use std::{collections::HashSet, path::Path};
|
||||
|
||||
use comfy_table::{Cell, Color, ContentArrangement, Table, presets};
|
||||
use strsim::levenshtein;
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::{Config, LockFile, Project, ProjectFile},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
projects: &[String],
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
||||
let mut found_any = false;
|
||||
let total_projects = projects.len();
|
||||
|
||||
for (idx, project_input) in projects.iter().enumerate() {
|
||||
if let Some(project) = find_project(&lockfile, project_input) {
|
||||
display_project_inspection(project, &lockfile)?;
|
||||
found_any = true;
|
||||
|
||||
// Add separator between projects (but not after the last one)
|
||||
if idx < total_projects - 1 {
|
||||
let width = 80; // Default terminal width
|
||||
println!("{}", "─".repeat(width));
|
||||
println!();
|
||||
}
|
||||
} else {
|
||||
eprintln!(
|
||||
"{}: {}",
|
||||
"Error".red(),
|
||||
format!("Project '{project_input}' not found in lockfile.").red()
|
||||
);
|
||||
|
||||
// Suggest similar projects
|
||||
if let Some(suggestions) =
|
||||
find_similar_projects(&lockfile, project_input, 5)
|
||||
{
|
||||
eprintln!();
|
||||
eprintln!("{}", "Did you mean one of these?".yellow());
|
||||
for suggestion in suggestions {
|
||||
eprintln!(" - {}", suggestion.cyan());
|
||||
}
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
}
|
||||
|
||||
if !found_any && !projects.is_empty() {
|
||||
return Err(crate::error::PakkerError::ProjectNotFound(
|
||||
"No projects found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn find_project<'a>(
|
||||
lockfile: &'a LockFile,
|
||||
query: &str,
|
||||
) -> Option<&'a Project> {
|
||||
lockfile.projects.iter().find(|p| project_matches(p, query))
|
||||
}
|
||||
|
||||
fn project_matches(project: &Project, query: &str) -> bool {
|
||||
// Check slugs
|
||||
for slug in project.slug.values() {
|
||||
if slug.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check names
|
||||
for name in project.name.values() {
|
||||
if name.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check pakku_id
|
||||
if let Some(ref pakku_id) = project.pakku_id
|
||||
&& pakku_id.eq_ignore_ascii_case(query)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
for alias in &project.aliases {
|
||||
if alias.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn find_similar_projects(
|
||||
lockfile: &LockFile,
|
||||
query: &str,
|
||||
max_results: usize,
|
||||
) -> Option<Vec<String>> {
|
||||
// Calculate similarity scores for all projects
|
||||
let mut candidates: Vec<(String, usize)> = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.flat_map(|p| {
|
||||
let mut scores = Vec::new();
|
||||
|
||||
// Check slug similarity
|
||||
for slug in p.slug.values() {
|
||||
let distance = levenshtein(slug, query);
|
||||
if distance <= 3 {
|
||||
scores.push((slug.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
// Check name similarity (case-insensitive)
|
||||
for name in p.name.values() {
|
||||
let distance = levenshtein(&name.to_lowercase(), &query.to_lowercase());
|
||||
if distance <= 3 {
|
||||
scores.push((name.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
for alias in &p.aliases {
|
||||
let distance = levenshtein(alias, query);
|
||||
if distance <= 3 {
|
||||
scores.push((alias.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
scores
|
||||
})
|
||||
.collect();
|
||||
|
||||
if candidates.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort by distance (closest first)
|
||||
candidates.sort_by_key(|(_, dist)| *dist);
|
||||
|
||||
// Deduplicate and take top N
|
||||
let mut seen = HashSet::new();
|
||||
let suggestions: Vec<String> = candidates
|
||||
.into_iter()
|
||||
.filter_map(|(name, _)| {
|
||||
if seen.insert(name.clone()) {
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.take(max_results)
|
||||
.collect();
|
||||
|
||||
Some(suggestions)
|
||||
}
|
||||
|
||||
fn display_project_inspection(
|
||||
project: &Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<()> {
|
||||
// Display project header panel
|
||||
display_project_header(project);
|
||||
|
||||
// Display project files
|
||||
println!();
|
||||
display_project_files(&project.files, project);
|
||||
|
||||
// Display properties
|
||||
println!();
|
||||
display_properties(project);
|
||||
|
||||
// Display dependency tree
|
||||
println!();
|
||||
display_dependencies(project, lockfile)?;
|
||||
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_project_header(project: &Project) {
|
||||
let name = get_project_name(project);
|
||||
let default_slug = String::from("N/A");
|
||||
let slug = project.slug.values().next().unwrap_or(&default_slug);
|
||||
|
||||
// Create header table with comfy-table
|
||||
let mut table = Table::new();
|
||||
table
|
||||
.load_preset(presets::UTF8_FULL)
|
||||
.set_content_arrangement(ContentArrangement::Dynamic);
|
||||
|
||||
// Title row with name
|
||||
table.add_row(vec![
|
||||
Cell::new(name)
|
||||
.fg(Color::Cyan)
|
||||
.set_alignment(comfy_table::CellAlignment::Left),
|
||||
]);
|
||||
|
||||
// Second row with slug, type, side
|
||||
let metadata = format!(
|
||||
"{} ({}) • {} • {}",
|
||||
slug,
|
||||
project.id.keys().next().map_or("unknown", String::as_str),
|
||||
format!("{:?}", project.r#type).to_lowercase(),
|
||||
format!("{:?}", project.side).to_lowercase()
|
||||
);
|
||||
table.add_row(vec![
|
||||
Cell::new(metadata)
|
||||
.fg(Color::DarkGrey)
|
||||
.set_alignment(comfy_table::CellAlignment::Left),
|
||||
]);
|
||||
|
||||
println!("{table}");
|
||||
}
|
||||
|
||||
fn display_project_files(files: &[ProjectFile], project: &Project) {
|
||||
if files.is_empty() {
|
||||
println!("{}", "No files available".yellow());
|
||||
return;
|
||||
}
|
||||
|
||||
println!("{}", "Project Files".cyan().bold());
|
||||
|
||||
for (idx, file) in files.iter().enumerate() {
|
||||
let mut table = Table::new();
|
||||
table
|
||||
.load_preset(presets::UTF8_FULL)
|
||||
.set_content_arrangement(ContentArrangement::Dynamic);
|
||||
|
||||
// Mark the first file as "current"
|
||||
let status = if idx == 0 { "current" } else { "" };
|
||||
let status_text = if status.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {status}")
|
||||
};
|
||||
|
||||
// File path line with optional site URL
|
||||
let file_path = format!("{}={}", file.file_type, file.file_name);
|
||||
let file_display = file.get_site_url(project).map_or_else(
|
||||
|| format!("{file_path}:{status_text}"),
|
||||
|site_url| {
|
||||
// Create hyperlink for the file
|
||||
let hyperlink = crate::ui_utils::hyperlink(&site_url, &file_path);
|
||||
format!("{hyperlink}:{status_text}")
|
||||
},
|
||||
);
|
||||
|
||||
table.add_row(vec![Cell::new(file_display).fg(if idx == 0 {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::White
|
||||
})]);
|
||||
|
||||
// Date published
|
||||
table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]);
|
||||
|
||||
// Show site URL if available (for non-hyperlink terminals)
|
||||
if let Some(site_url) = file.get_site_url(project) {
|
||||
table
|
||||
.add_row(vec![Cell::new(format!("URL: {site_url}")).fg(Color::Blue)]);
|
||||
}
|
||||
|
||||
// Empty line
|
||||
table.add_row(vec![Cell::new("")]);
|
||||
|
||||
// Hashes (truncated)
|
||||
if !file.hashes.is_empty() {
|
||||
for (hash_type, hash_value) in &file.hashes {
|
||||
let display_hash = if hash_value.len() > 32 {
|
||||
format!(
|
||||
"{}...{}",
|
||||
&hash_value[..16],
|
||||
&hash_value[hash_value.len() - 16..]
|
||||
)
|
||||
} else {
|
||||
hash_value.clone()
|
||||
};
|
||||
table.add_row(vec![
|
||||
Cell::new(format!("{hash_type}={display_hash}")).fg(Color::DarkGrey),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
println!("{table}");
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn display_properties(project: &Project) {
|
||||
println!("{}", "Properties".cyan().bold());
|
||||
|
||||
println!(
|
||||
" {}={}",
|
||||
"type".yellow(),
|
||||
format!("{:?}", project.r#type).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"side".yellow(),
|
||||
format!("{:?}", project.side).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"update_strategy".yellow(),
|
||||
format!("{:?}", project.update_strategy).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"redistributable".yellow(),
|
||||
project.redistributable
|
||||
);
|
||||
|
||||
if let Some(subpath) = &project.subpath {
|
||||
println!(" {}={}", "subpath".yellow(), subpath);
|
||||
}
|
||||
|
||||
if !project.aliases.is_empty() {
|
||||
let aliases: Vec<_> = project.aliases.iter().cloned().collect();
|
||||
println!(" {}={}", "aliases".yellow(), aliases.join(", "));
|
||||
}
|
||||
}
|
||||
|
||||
fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> {
|
||||
println!("{}", "Dependencies".cyan().bold());
|
||||
|
||||
// Collect all dependencies from all files
|
||||
let mut all_deps = HashSet::new();
|
||||
for file in &project.files {
|
||||
for dep in &file.required_dependencies {
|
||||
all_deps.insert(dep.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if all_deps.is_empty() {
|
||||
println!(" {}", "No dependencies".dim());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Display dependency tree
|
||||
let mut visited = HashSet::new();
|
||||
for dep_id in all_deps {
|
||||
display_dependency_tree(&dep_id, lockfile, 1, &mut visited)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_dependency_tree(
|
||||
dep_id: &str,
|
||||
lockfile: &LockFile,
|
||||
depth: usize,
|
||||
visited: &mut HashSet<String>,
|
||||
) -> Result<()> {
|
||||
let indent = " ".repeat(depth);
|
||||
let tree_char = if depth == 1 { "└─" } else { "├─" };
|
||||
|
||||
// Find the project in lockfile
|
||||
let project = lockfile.projects.iter().find(|p| {
|
||||
// Check if any ID matches
|
||||
p.id.values().any(|id| id == dep_id)
|
||||
|| p.slug.values().any(|slug| slug == dep_id)
|
||||
|| p.pakku_id.as_ref() == Some(&dep_id.to_string())
|
||||
});
|
||||
|
||||
if let Some(proj) = project {
|
||||
let name = get_project_name(proj);
|
||||
|
||||
// Check for circular dependency
|
||||
if visited.contains(&name) {
|
||||
println!("{}{} {} {}", indent, tree_char, name, "(circular)".red());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}{} {} (required)", indent, tree_char, name.green());
|
||||
visited.insert(name);
|
||||
|
||||
// Recursively display nested dependencies (limit depth to avoid infinite
|
||||
// loops)
|
||||
if depth < 5 {
|
||||
for file in &proj.files {
|
||||
for nested_dep in &file.required_dependencies {
|
||||
display_dependency_tree(nested_dep, lockfile, depth + 1, visited)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Dependency not found in lockfile
|
||||
println!(
|
||||
"{}{} {} {}",
|
||||
indent,
|
||||
tree_char,
|
||||
dep_id,
|
||||
"(not in lockfile)".yellow()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_project_name(project: &Project) -> String {
|
||||
project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.or_else(|| project.slug.values().next())
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::*;
|
||||
use crate::model::enums::{
|
||||
ProjectSide,
|
||||
ProjectType,
|
||||
ReleaseType,
|
||||
UpdateStrategy,
|
||||
};
|
||||
|
||||
fn create_test_project(pakku_id: &str, slug: &str, name: &str) -> Project {
|
||||
let mut slug_map = HashMap::new();
|
||||
slug_map.insert("modrinth".to_string(), slug.to_string());
|
||||
|
||||
let mut name_map = HashMap::new();
|
||||
name_map.insert("modrinth".to_string(), name.to_string());
|
||||
|
||||
let mut id_map = HashMap::new();
|
||||
id_map.insert("modrinth".to_string(), pakku_id.to_string());
|
||||
|
||||
Project {
|
||||
pakku_id: Some(pakku_id.to_string()),
|
||||
pakku_links: HashSet::new(),
|
||||
r#type: ProjectType::Mod,
|
||||
side: ProjectSide::Both,
|
||||
slug: slug_map,
|
||||
name: name_map,
|
||||
id: id_map,
|
||||
update_strategy: UpdateStrategy::Latest,
|
||||
redistributable: true,
|
||||
subpath: None,
|
||||
aliases: HashSet::new(),
|
||||
export: true,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_test_lockfile(projects: Vec<Project>) -> LockFile {
|
||||
use crate::model::enums::Target;
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
for project in projects {
|
||||
lockfile.add_project(project);
|
||||
}
|
||||
|
||||
lockfile
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_slug() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test-slug");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_name() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test mod"); // Case-insensitive
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_pakku_id() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test-id");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_not_found() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "nonexistent");
|
||||
assert!(found.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_matching_close_match() {
|
||||
let project1 = create_test_project("id1", "fabric-api", "Fabric API");
|
||||
let project2 = create_test_project("id2", "sodium", "Sodium");
|
||||
let lockfile = create_test_lockfile(vec![project1, project2]);
|
||||
|
||||
// Typo: "fabrc-api" should suggest "fabric-api"
|
||||
let suggestions = find_similar_projects(&lockfile, "fabrc-api", 5);
|
||||
assert!(suggestions.is_some());
|
||||
let suggestions = suggestions.unwrap();
|
||||
assert!(!suggestions.is_empty());
|
||||
assert!(suggestions.contains(&"fabric-api".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_matching_no_match() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
// Very different query, should have no suggestions (distance > 3)
|
||||
let suggestions =
|
||||
find_similar_projects(&lockfile, "completely-different-xyz", 5);
|
||||
assert!(suggestions.is_none() || suggestions.unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_matches_alias() {
|
||||
let mut project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
project.aliases.insert("test-alias".to_string());
|
||||
|
||||
assert!(project_matches(&project, "test-alias"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circular_dependency_detection() {
|
||||
// This is a conceptual test - in practice, we'd need to set up files with
|
||||
// dependencies
|
||||
let mut project1 = create_test_project("dep1", "dep1-slug", "Dependency 1");
|
||||
let mut project2 = create_test_project("dep2", "dep2-slug", "Dependency 2");
|
||||
|
||||
// Create files with circular dependencies
|
||||
let file1 = ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "dep1.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/dep1.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "dep1".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec!["dep2".to_string()],
|
||||
size: 1000,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let file2 = ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "dep2.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/dep2.jar".to_string(),
|
||||
id: "file2".to_string(),
|
||||
parent_id: "dep2".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec!["dep1".to_string()],
|
||||
size: 1000,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
project1.files.push(file1);
|
||||
project2.files.push(file2);
|
||||
|
||||
let lockfile = create_test_lockfile(vec![project1, project2]);
|
||||
|
||||
// Test that display_dependency_tree handles circular deps gracefully
|
||||
let mut visited = HashSet::new();
|
||||
let result = display_dependency_tree("dep1", &lockfile, 1, &mut visited);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
47
crates/pakker-cli/src/cli/commands/link.rs
Normal file
47
crates/pakker-cli/src/cli/commands/link.rs
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::LinkArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: &LinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Linking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
let from_project = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.matches_input(&args.from))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
|
||||
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("From project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
let to_project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(&args.to))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
|
||||
|
||||
// Check if link already exists
|
||||
if to_project.pakku_links.contains(&from_id) {
|
||||
log::info!("Link already exists");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Add link
|
||||
to_project.pakku_links.insert(from_id);
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully linked projects");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
94
crates/pakker-cli/src/cli/commands/ls.rs
Normal file
94
crates/pakker-cli/src/cli/commands/ls.rs
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{cli::LsArgs, error::Result, model::LockFile};
|
||||
|
||||
/// Truncate a name to fit within `max_len` characters, adding "..." if
|
||||
/// truncated
|
||||
fn truncate_name(name: &str, max_len: usize) -> String {
|
||||
if name.len() <= max_len {
|
||||
name.to_string()
|
||||
} else if max_len > 3 {
|
||||
format!("{}...", &name[..max_len - 3])
|
||||
} else {
|
||||
name[..max_len].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute(args: &LsArgs, lockfile_path: &Path) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
if lockfile.projects.is_empty() {
|
||||
println!("No projects installed");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Installed projects ({}):", lockfile.projects.len());
|
||||
println!();
|
||||
|
||||
// Calculate max name length for alignment
|
||||
let max_name_len = args.name_max_length.unwrap_or_else(|| {
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.map(|p| p.get_name().len())
|
||||
.max()
|
||||
.unwrap_or(20)
|
||||
.min(50)
|
||||
});
|
||||
|
||||
for project in &lockfile.projects {
|
||||
// Check for version mismatch across providers
|
||||
let version_warning = if project.versions_match_across_providers() {
|
||||
""
|
||||
} else {
|
||||
// Use the detailed check_version_mismatch for logging
|
||||
if let Some(mismatch_detail) = project.check_version_mismatch() {
|
||||
log::warn!("{mismatch_detail}");
|
||||
}
|
||||
" [!] versions do not match across providers"
|
||||
};
|
||||
|
||||
if args.detailed {
|
||||
let id = project.pakku_id.as_deref().unwrap_or("unknown");
|
||||
let name = truncate_name(&project.get_name(), max_name_len);
|
||||
println!(" {name} ({id}){version_warning}");
|
||||
println!(" Type: {:?}", project.r#type);
|
||||
println!(" Side: {:?}", project.side);
|
||||
|
||||
if let Some(file) = project.files.first() {
|
||||
println!(" File: {}", file.file_name);
|
||||
println!(
|
||||
" Version: {} ({})",
|
||||
file.release_type, file.date_published
|
||||
);
|
||||
}
|
||||
|
||||
// Show version details if there's a mismatch
|
||||
if !version_warning.is_empty() {
|
||||
println!(" Provider versions:");
|
||||
for file in &project.files {
|
||||
println!(" {}: {}", file.file_type, file.file_name);
|
||||
}
|
||||
}
|
||||
|
||||
if !project.pakku_links.is_empty() {
|
||||
println!(" Dependencies: {}", project.pakku_links.len());
|
||||
}
|
||||
|
||||
println!();
|
||||
} else {
|
||||
let name = truncate_name(&project.get_name(), max_name_len);
|
||||
let file_info = project
|
||||
.files
|
||||
.first()
|
||||
.map(|f| format!(" ({})", f.file_name))
|
||||
.unwrap_or_default();
|
||||
|
||||
println!(" {name}{file_info}{version_warning}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
24
crates/pakker-cli/src/cli/commands/mod.rs
Normal file
24
crates/pakker-cli/src/cli/commands/mod.rs
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
pub mod add;
|
||||
pub mod add_prj;
|
||||
pub mod cfg;
|
||||
pub mod cfg_prj;
|
||||
pub mod credentials;
|
||||
pub mod credentials_set;
|
||||
pub mod credentials_test;
|
||||
pub mod diff;
|
||||
pub mod export;
|
||||
pub mod fetch;
|
||||
pub mod fork;
|
||||
pub mod import;
|
||||
pub mod init;
|
||||
pub mod inspect;
|
||||
pub mod link;
|
||||
pub mod ls;
|
||||
pub mod remote;
|
||||
pub mod remote_update;
|
||||
pub mod rm;
|
||||
pub mod set;
|
||||
pub mod status;
|
||||
pub mod sync;
|
||||
pub mod unlink;
|
||||
pub mod update;
|
||||
151
crates/pakker-cli/src/cli/commands/remote.rs
Normal file
151
crates/pakker-cli/src/cli/commands/remote.rs
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
cli::RemoteArgs,
|
||||
error::{PakkerError, Result},
|
||||
fetch::Fetcher,
|
||||
git,
|
||||
model::{config::Config, lockfile::LockFile},
|
||||
};
|
||||
|
||||
const REMOTE_DIR: &str = ".pakku-remote";
|
||||
|
||||
pub async fn execute(args: RemoteArgs) -> Result<()> {
|
||||
let remote_path = PathBuf::from(REMOTE_DIR);
|
||||
|
||||
// Handle --remove flag
|
||||
if args.remove {
|
||||
if remote_path.exists() {
|
||||
fs::remove_dir_all(&remote_path)?;
|
||||
log::info!("Removed remote from modpack");
|
||||
} else {
|
||||
log::warn!("No remote configured");
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If no URL provided, show status
|
||||
if args.url.is_none() {
|
||||
show_remote_status(&remote_path);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let url = args
|
||||
.url
|
||||
.ok_or_else(|| PakkerError::InvalidInput("URL is required".to_string()))?;
|
||||
log::info!("Installing modpack from: {url}");
|
||||
|
||||
// Clone or update repository
|
||||
if remote_path.exists() {
|
||||
log::info!("Remote directory exists, updating...");
|
||||
let remote_name = "origin";
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
|
||||
git::fetch_updates(&remote_path, remote_name, ref_name, None)?;
|
||||
git::reset_to_ref(&remote_path, remote_name, ref_name)?;
|
||||
} else {
|
||||
log::info!("Cloning repository...");
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
git::clone_repository(&url, &remote_path, ref_name, None)?;
|
||||
}
|
||||
|
||||
// Load lockfile and config from remote
|
||||
let remote_lockfile_path = remote_path.join("pakku-lock.json");
|
||||
if !remote_lockfile_path.exists() {
|
||||
return Err(PakkerError::ConfigError(
|
||||
"Remote repository does not contain pakku-lock.json".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let remote_lockfile = LockFile::load(&remote_path)?;
|
||||
let remote_config = Config::load(&remote_path).ok();
|
||||
|
||||
// Copy lockfile to current directory
|
||||
let current_lockfile_path = PathBuf::from("pakku-lock.json");
|
||||
fs::copy(&remote_lockfile_path, ¤t_lockfile_path)?;
|
||||
log::info!("Copied lockfile from remote");
|
||||
|
||||
// Copy config if exists
|
||||
if remote_config.is_some() {
|
||||
let remote_config_path = remote_path.join("pakku.json");
|
||||
let current_config_path = PathBuf::from("pakku.json");
|
||||
if remote_config_path.exists() {
|
||||
fs::copy(&remote_config_path, ¤t_config_path)?;
|
||||
log::info!("Copied config from remote");
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch project files
|
||||
log::info!("Fetching project files...");
|
||||
let fetcher = Fetcher::new(&remote_path);
|
||||
fetcher
|
||||
.fetch_all(&remote_lockfile, &remote_config.unwrap_or_default())
|
||||
.await?;
|
||||
|
||||
// Sync overrides
|
||||
sync_overrides(&remote_path, args.server_pack)?;
|
||||
|
||||
log::info!("Successfully installed modpack from remote");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_remote_status(remote_path: &Path) {
|
||||
if !remote_path.exists() {
|
||||
println!("No remote configured");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Remote status:");
|
||||
println!(" Directory: {}", remote_path.display());
|
||||
|
||||
if git::is_git_repository(remote_path) {
|
||||
if let Ok(url) = git::get_remote_url(remote_path, "origin") {
|
||||
println!(" URL: {url}");
|
||||
}
|
||||
if let Ok(sha) = git::get_current_commit_sha(remote_path, None) {
|
||||
println!(" Commit: {}", &sha[..8]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> {
|
||||
let override_dirs = if server_pack {
|
||||
vec!["overrides", "server_overrides"]
|
||||
} else {
|
||||
vec!["overrides", "client_overrides"]
|
||||
};
|
||||
|
||||
for dir_name in override_dirs {
|
||||
let src_dir = remote_path.join(dir_name);
|
||||
if src_dir.exists() && src_dir.is_dir() {
|
||||
log::info!("Syncing {dir_name} directory...");
|
||||
copy_dir_recursive(&src_dir, Path::new("."))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
|
||||
if !dst.exists() {
|
||||
fs::create_dir_all(dst)?;
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let src_path = entry.path();
|
||||
let file_name = entry.file_name();
|
||||
let dst_path = dst.join(file_name);
|
||||
|
||||
if src_path.is_dir() {
|
||||
copy_dir_recursive(&src_path, &dst_path)?;
|
||||
} else {
|
||||
fs::copy(&src_path, &dst_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
121
crates/pakker-cli/src/cli/commands/remote_update.rs
Normal file
121
crates/pakker-cli/src/cli/commands/remote_update.rs
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
|
||||
|
||||
/// Update modpack from remote Git repository
|
||||
///
|
||||
/// This command updates the current modpack from its remote Git repository.
|
||||
/// It fetches the latest changes from the remote and syncs overrides.
|
||||
pub fn execute(args: &RemoteUpdateArgs) -> Result<(), PakkerError> {
|
||||
// Check if lockfile exists in current directory - if it does, we're in a
|
||||
// modpack directory and should not update remote (use regular update
|
||||
// instead)
|
||||
let lockfile_path = PathBuf::from("pakku-lock.json");
|
||||
if lockfile_path.exists() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"Cannot update remote from a modpack directory. Use 'update' command \
|
||||
instead."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Remote directory for the cloned modpack
|
||||
let remote_dir = PathBuf::from(".pakku-remote");
|
||||
|
||||
// Check if remote directory exists
|
||||
if !remote_dir.exists() {
|
||||
return Err(PakkerError::RemoteNotFound(
|
||||
"No remote found. Use 'remote' command to install a modpack first."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Fetch updates from remote repository
|
||||
println!("Updating remote repository...");
|
||||
let remote_name = "origin";
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
git::fetch_updates(&remote_dir, remote_name, ref_name, None)?;
|
||||
|
||||
// Read remote lockfile
|
||||
let remote_lockfile_path = remote_dir.join("pakku-lock.json");
|
||||
if !remote_lockfile_path.exists() {
|
||||
return Err(PakkerError::FileNotFound(
|
||||
"Remote lockfile not found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Read remote config if it exists
|
||||
let remote_config_path = remote_dir.join("pakku.json");
|
||||
let _remote_config = if remote_config_path.exists() {
|
||||
match Config::load(&remote_config_path) {
|
||||
Ok(config) => Some(config),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Could not read remote config: {e}");
|
||||
None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Sync overrides from remote directory
|
||||
println!("Syncing overrides...");
|
||||
sync_overrides(&remote_dir)?;
|
||||
|
||||
// Clean up remote directory
|
||||
std::fs::remove_dir_all(&remote_dir)?;
|
||||
|
||||
println!("Remote modpack updated successfully.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sync override files from remote directory to current directory
|
||||
fn sync_overrides(remote_dir: &Path) -> Result<(), PakkerError> {
|
||||
let remote_config_path = remote_dir.join("pakku.json");
|
||||
if !remote_config_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config = Config::load(&remote_config_path)?;
|
||||
|
||||
// Get override directories from config
|
||||
let overrides = config.overrides;
|
||||
if overrides.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for override_path in overrides {
|
||||
let source = remote_dir.join(&override_path);
|
||||
let dest = PathBuf::from(&override_path);
|
||||
|
||||
if source.exists() {
|
||||
// Copy override directory
|
||||
copy_directory(&source, &dest)?;
|
||||
println!(" Synced: {override_path}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recursively copy a directory
|
||||
fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<(), PakkerError> {
|
||||
if !dest.exists() {
|
||||
std::fs::create_dir_all(dest)?;
|
||||
}
|
||||
|
||||
for entry in std::fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dest_path = dest.join(entry.file_name());
|
||||
|
||||
if path.is_dir() {
|
||||
copy_directory(&path, &dest_path)?;
|
||||
} else {
|
||||
std::fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
160
crates/pakker-cli/src/cli/commands/rm.rs
Normal file
160
crates/pakker-cli/src/cli/commands/rm.rs
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::RmArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
ui_utils::{prompt_typo_suggestion, prompt_yes_no},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
args: &RmArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
_config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let skip_prompts = global_yes;
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Determine which projects to remove
|
||||
let inputs: Vec<String> = if args.all {
|
||||
log::info!("Removing all projects from lockfile");
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
p.pakku_id
|
||||
.clone()
|
||||
.or_else(|| p.slug.values().next().cloned())
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
args.inputs.clone()
|
||||
};
|
||||
|
||||
if inputs.is_empty() {
|
||||
return if args.all {
|
||||
Err(PakkerError::ProjectNotFound(
|
||||
"No projects found in lockfile".to_string(),
|
||||
))
|
||||
} else {
|
||||
Err(PakkerError::ProjectNotFound(
|
||||
"No projects specified".to_string(),
|
||||
))
|
||||
};
|
||||
}
|
||||
|
||||
log::info!("Removing projects: {inputs:?}");
|
||||
|
||||
let mut removed_count = 0;
|
||||
let mut removed_ids = Vec::new();
|
||||
let mut projects_to_remove = Vec::new();
|
||||
|
||||
// Collect all known project identifiers for typo suggestions
|
||||
let all_slugs: Vec<String> = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.flat_map(|p| {
|
||||
let mut ids = Vec::new();
|
||||
if let Some(ref pakku_id) = p.pakku_id {
|
||||
ids.push(pakku_id.clone());
|
||||
}
|
||||
ids.extend(p.slug.values().cloned());
|
||||
ids.extend(p.name.values().cloned());
|
||||
ids.extend(p.aliases.iter().cloned());
|
||||
ids
|
||||
})
|
||||
.collect();
|
||||
|
||||
// First, identify all projects to remove
|
||||
let mut resolved_inputs = Vec::new();
|
||||
for input in &inputs {
|
||||
// Find project by various identifiers
|
||||
if lockfile.projects.iter().any(|p| {
|
||||
p.pakku_id.as_deref() == Some(input)
|
||||
|| p.slug.values().any(|s| s == input)
|
||||
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|
||||
|| p.aliases.contains(input)
|
||||
}) {
|
||||
resolved_inputs.push(input.clone());
|
||||
} else if !args.all {
|
||||
// Try typo suggestion
|
||||
if let Ok(Some(suggestion)) =
|
||||
prompt_typo_suggestion(input, &all_slugs, skip_prompts)
|
||||
{
|
||||
log::info!("Using suggested project: {suggestion}");
|
||||
resolved_inputs.push(suggestion);
|
||||
} else {
|
||||
log::warn!("Project not found: {input}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now find the actual projects from resolved inputs
|
||||
for input in &resolved_inputs {
|
||||
if let Some(project) = lockfile.projects.iter().find(|p| {
|
||||
p.pakku_id.as_deref() == Some(input)
|
||||
|| p.slug.values().any(|s| s == input)
|
||||
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|
||||
|| p.aliases.contains(input)
|
||||
}) {
|
||||
projects_to_remove.push(project.get_name());
|
||||
}
|
||||
}
|
||||
|
||||
// Replace inputs with resolved_inputs for actual removal
|
||||
let inputs = resolved_inputs;
|
||||
|
||||
if projects_to_remove.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(
|
||||
"None of the specified projects found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Ask for confirmation unless --yes flag is provided or --all with no
|
||||
// projects
|
||||
if !skip_prompts {
|
||||
println!("The following projects will be removed:");
|
||||
for name in &projects_to_remove {
|
||||
println!(" - {name}");
|
||||
}
|
||||
|
||||
if !prompt_yes_no("Do you want to continue?", false, skip_prompts)? {
|
||||
println!("Removal cancelled.");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Now actually remove the projects
|
||||
for input in &inputs {
|
||||
if let Some(pos) = lockfile.projects.iter().position(|p| {
|
||||
p.pakku_id.as_deref() == Some(input)
|
||||
|| p.slug.values().any(|s| s == input)
|
||||
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|
||||
|| p.aliases.contains(input)
|
||||
}) {
|
||||
let project = lockfile.projects.remove(pos);
|
||||
log::info!("Removed: {}", project.get_name());
|
||||
if let Some(pakku_id) = project.pakku_id.clone() {
|
||||
removed_ids.push(pakku_id);
|
||||
}
|
||||
removed_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up pakku_links from all remaining projects
|
||||
for project in &mut lockfile.projects {
|
||||
project
|
||||
.pakku_links
|
||||
.retain(|link| !removed_ids.contains(link));
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully removed {removed_count} project(s)");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
156
crates/pakker-cli/src/cli/commands/set.rs
Normal file
156
crates/pakker-cli/src/cli/commands/set.rs
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
use std::{collections::HashMap, path::Path, str::FromStr};
|
||||
|
||||
use crate::{
|
||||
cli::SetArgs,
|
||||
error::PakkerError,
|
||||
model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
args: &SetArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Check if we're modifying lockfile properties or project properties
|
||||
let is_lockfile_modification = args.target.is_some()
|
||||
|| args.mc_versions.is_some()
|
||||
|| args.loaders.is_some();
|
||||
|
||||
if is_lockfile_modification {
|
||||
// Modify lockfile properties
|
||||
if let Some(target_str) = &args.target {
|
||||
let target = Target::from_str(target_str).map_err(|e| {
|
||||
PakkerError::InvalidInput(format!("Invalid target: {e}"))
|
||||
})?;
|
||||
lockfile.target = Some(target);
|
||||
println!("Set target to: {target:?}");
|
||||
}
|
||||
|
||||
if let Some(mc_versions_str) = &args.mc_versions {
|
||||
let mc_versions: Vec<String> = mc_versions_str
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
if mc_versions.is_empty() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one Minecraft version is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Validate that all projects are compatible with new MC versions
|
||||
for project in &lockfile.projects {
|
||||
let compatible = project
|
||||
.files
|
||||
.iter()
|
||||
.any(|file| file.mc_versions.iter().any(|v| mc_versions.contains(v)));
|
||||
if !compatible {
|
||||
eprintln!(
|
||||
"Warning: Project '{}' has no files compatible with new MC \
|
||||
versions",
|
||||
project.get_name()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.mc_versions.clone_from(&mc_versions);
|
||||
println!("Set Minecraft versions to: {mc_versions:?}");
|
||||
}
|
||||
|
||||
if let Some(loaders_str) = &args.loaders {
|
||||
let mut loaders: HashMap<String, String> = HashMap::new();
|
||||
|
||||
for pair in loaders_str.split(',') {
|
||||
let parts: Vec<&str> = pair.split('=').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid loader format '{pair}'. Expected 'name=version'"
|
||||
)));
|
||||
}
|
||||
loaders
|
||||
.insert(parts[0].trim().to_string(), parts[1].trim().to_string());
|
||||
}
|
||||
|
||||
if loaders.is_empty() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one loader is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let loader_names: Vec<String> = loaders.keys().cloned().collect();
|
||||
|
||||
// Validate that all projects are compatible with new loaders
|
||||
for project in &lockfile.projects {
|
||||
let compatible = project.files.iter().any(|file| {
|
||||
file.loaders.is_empty()
|
||||
|| file.loaders.iter().any(|l| loader_names.contains(l))
|
||||
});
|
||||
if !compatible {
|
||||
eprintln!(
|
||||
"Warning: Project '{}' has no files compatible with new loaders",
|
||||
project.get_name()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.loaders.clone_from(&loaders);
|
||||
println!("Set loaders to: {loaders:?}");
|
||||
}
|
||||
|
||||
lockfile.save(lockfile_dir)?;
|
||||
println!("Lockfile properties updated successfully");
|
||||
} else if let Some(input) = &args.input {
|
||||
// Modify project properties
|
||||
let project_name = {
|
||||
let project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(input))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(input.clone()))?;
|
||||
|
||||
if let Some(type_str) = &args.r#type {
|
||||
let project_type =
|
||||
ProjectType::from_str(type_str).map_err(PakkerError::InvalidInput)?;
|
||||
project.r#type = project_type;
|
||||
}
|
||||
|
||||
if let Some(side_str) = &args.side {
|
||||
let side =
|
||||
ProjectSide::from_str(side_str).map_err(PakkerError::InvalidInput)?;
|
||||
project.side = side;
|
||||
}
|
||||
|
||||
if let Some(strategy_str) = &args.strategy {
|
||||
let strategy = UpdateStrategy::from_str(strategy_str)
|
||||
.map_err(PakkerError::InvalidInput)?;
|
||||
project.update_strategy = strategy;
|
||||
}
|
||||
|
||||
if let Some(redistributable) = args.redistributable {
|
||||
project.redistributable = redistributable;
|
||||
}
|
||||
|
||||
project.get_name()
|
||||
};
|
||||
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!("Updated project: {project_name}");
|
||||
} else {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"Either provide a project identifier or lockfile properties to modify"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
400
crates/pakker-cli/src/cli/commands/status.rs
Normal file
400
crates/pakker-cli/src/cli/commands/status.rs
Normal file
|
|
@ -0,0 +1,400 @@
|
|||
use std::{collections::HashMap, path::Path, sync::Arc};
|
||||
|
||||
use futures::stream::{FuturesUnordered, StreamExt};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use tokio::sync::Semaphore;
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::{ErrorSeverity, Result},
|
||||
model::{Config, LockFile, Project},
|
||||
platform::create_platform,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
parallel: bool,
|
||||
skip_prompts: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Display modpack metadata
|
||||
display_modpack_info(&lockfile, &config);
|
||||
println!();
|
||||
|
||||
// Check for updates (sequential or parallel)
|
||||
let (updates, errors) = if parallel {
|
||||
check_updates_parallel(&lockfile).await?
|
||||
} else {
|
||||
check_updates_sequential(&lockfile).await?
|
||||
};
|
||||
|
||||
// Display results
|
||||
display_update_results(&updates);
|
||||
|
||||
// Display errors if any, categorized by severity
|
||||
if !errors.is_empty() {
|
||||
println!();
|
||||
|
||||
// Categorize errors by severity
|
||||
let (warnings, errors_only): (Vec<_>, Vec<_>) =
|
||||
errors.iter().partition(|(_, err)| {
|
||||
// Network errors and "not found" are warnings (non-fatal)
|
||||
err.contains("Failed to check") || err.contains("not found")
|
||||
});
|
||||
|
||||
// Display warnings (ErrorSeverity::Warning)
|
||||
if !warnings.is_empty() {
|
||||
let severity = ErrorSeverity::Warning;
|
||||
println!("{}", format_severity_header(severity, "Warnings"));
|
||||
for (project, error) in &warnings {
|
||||
println!(" - {}: {}", project.yellow(), error.dim());
|
||||
}
|
||||
}
|
||||
|
||||
// Display errors (ErrorSeverity::Error)
|
||||
if !errors_only.is_empty() {
|
||||
let severity = ErrorSeverity::Error;
|
||||
println!("{}", format_severity_header(severity, "Errors"));
|
||||
for (project, error) in &errors_only {
|
||||
println!(" - {}: {}", project.yellow(), error.red());
|
||||
}
|
||||
}
|
||||
|
||||
// Log info level summary
|
||||
log::info!(
|
||||
"Update check completed with {} warning(s) and {} error(s)",
|
||||
warnings.len(),
|
||||
errors_only.len()
|
||||
);
|
||||
}
|
||||
|
||||
// Prompt to update if there are updates available
|
||||
if !updates.is_empty() {
|
||||
println!();
|
||||
if crate::ui_utils::prompt_yes_no("Update now?", false, skip_prompts)? {
|
||||
// Call update command programmatically (update all projects)
|
||||
let update_args = crate::cli::UpdateArgs {
|
||||
inputs: vec![],
|
||||
all: true,
|
||||
};
|
||||
crate::cli::commands::update::execute(
|
||||
update_args,
|
||||
true, // Auto-yes for status command
|
||||
lockfile_path,
|
||||
config_path,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_modpack_info(lockfile: &LockFile, config: &Config) {
|
||||
let author = config.author.as_deref().unwrap_or("Unknown");
|
||||
println!(
|
||||
"Managing {} modpack, version {}, by {}",
|
||||
config.name.cyan(),
|
||||
config.version.cyan(),
|
||||
author.cyan()
|
||||
);
|
||||
|
||||
let mc_versions = lockfile.mc_versions.join(", ");
|
||||
let loaders: Vec<String> = lockfile
|
||||
.loaders
|
||||
.iter()
|
||||
.map(|(loader, version)| format!("{loader}-{version}"))
|
||||
.collect();
|
||||
let loaders_str = loaders.join(", ");
|
||||
|
||||
println!(
|
||||
"on Minecraft version {}, loader {}, targeting platform {:?}.",
|
||||
mc_versions.cyan(),
|
||||
loaders_str.cyan(),
|
||||
lockfile.target
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectUpdate {
|
||||
slug: HashMap<String, String>,
|
||||
name: String,
|
||||
project_type: String,
|
||||
side: String,
|
||||
file_updates: Vec<FileUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FileUpdate {
|
||||
platform: String,
|
||||
old_filename: String,
|
||||
new_filename: String,
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template is a string literal and is always valid"
|
||||
)]
|
||||
async fn check_updates_sequential(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
let total = lockfile.projects.len();
|
||||
let mut updates = Vec::new();
|
||||
let mut errors = Vec::new();
|
||||
|
||||
// Create progress bar
|
||||
let pb = ProgressBar::new(total as u64);
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates...");
|
||||
|
||||
for project in &lockfile.projects {
|
||||
let project_name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
pb.set_message(format!("Checking {project_name}..."));
|
||||
|
||||
match check_project_update(project, lockfile).await {
|
||||
Ok(update_opt) => {
|
||||
if let Some(update) = update_opt {
|
||||
updates.push(update);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
errors.push((project_name.clone(), e.to_string()));
|
||||
},
|
||||
}
|
||||
|
||||
pb.inc(1);
|
||||
}
|
||||
|
||||
pb.finish_with_message(format!("Checked {total} projects"));
|
||||
println!(); // Add blank line after progress bar
|
||||
|
||||
Ok((updates, errors))
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template and semaphore acquire are infallible in \
|
||||
this context"
|
||||
)]
|
||||
async fn check_updates_parallel(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
let total = lockfile.projects.len();
|
||||
let semaphore = Arc::new(Semaphore::new(10));
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
// Create progress bar
|
||||
let pb = Arc::new(ProgressBar::new(total as u64));
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates (parallel)...");
|
||||
|
||||
for project in &lockfile.projects {
|
||||
let project = project.clone();
|
||||
let sem = semaphore.clone();
|
||||
let pb_clone = pb.clone();
|
||||
let lockfile_clone = lockfile.clone();
|
||||
|
||||
futures.push(async move {
|
||||
let _permit = sem.acquire().await.expect("semaphore closed unexpectedly");
|
||||
let result = check_project_update(&project, &lockfile_clone).await;
|
||||
pb_clone.inc(1);
|
||||
(project, result)
|
||||
});
|
||||
}
|
||||
|
||||
let mut updates = Vec::new();
|
||||
let mut errors = Vec::new();
|
||||
|
||||
while let Some((project, result)) = futures.next().await {
|
||||
match result {
|
||||
Ok(update_opt) => {
|
||||
if let Some(update) = update_opt {
|
||||
updates.push(update);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let project_name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
errors.push((project_name, e.to_string()));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pb.finish_with_message(format!("Checked {total} projects"));
|
||||
println!(); // Add blank line after progress bar
|
||||
|
||||
Ok((updates, errors))
|
||||
}
|
||||
|
||||
async fn check_project_update(
|
||||
project: &Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<Option<ProjectUpdate>> {
|
||||
// Get primary slug
|
||||
let slug = project
|
||||
.slug
|
||||
.values()
|
||||
.next()
|
||||
.ok_or_else(|| {
|
||||
crate::error::PakkerError::InvalidProject("No slug found".to_string())
|
||||
})?
|
||||
.clone();
|
||||
|
||||
// Try each platform in project
|
||||
for platform_name in project.id.keys() {
|
||||
let api_key = get_api_key(platform_name);
|
||||
let Ok(platform) = create_platform(platform_name, api_key) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
|
||||
|
||||
if let Ok(updated_project) = platform
|
||||
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
|
||||
.await
|
||||
{
|
||||
// Compare files to detect updates
|
||||
let file_updates = detect_file_updates(project, &updated_project);
|
||||
|
||||
if !file_updates.is_empty() {
|
||||
return Ok(Some(ProjectUpdate {
|
||||
slug: project.slug.clone(),
|
||||
name: project.name.values().next().cloned().unwrap_or_default(),
|
||||
project_type: format!("{:?}", project.r#type),
|
||||
side: format!("{:?}", project.side),
|
||||
file_updates,
|
||||
}));
|
||||
}
|
||||
|
||||
return Ok(None); // No updates
|
||||
}
|
||||
}
|
||||
|
||||
Err(crate::error::PakkerError::PlatformApiError(
|
||||
"Failed to check for updates on any platform".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn detect_file_updates(
|
||||
current: &Project,
|
||||
updated: &Project,
|
||||
) -> Vec<FileUpdate> {
|
||||
let mut updates = Vec::new();
|
||||
|
||||
for old_file in ¤t.files {
|
||||
if let Some(new_file) = updated
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.file_type == old_file.file_type)
|
||||
{
|
||||
// Check if file ID changed (indicates update)
|
||||
if new_file.id != old_file.id {
|
||||
updates.push(FileUpdate {
|
||||
platform: old_file.file_type.clone(),
|
||||
old_filename: old_file.file_name.clone(),
|
||||
new_filename: new_file.file_name.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updates
|
||||
}
|
||||
|
||||
fn display_update_results(updates: &[ProjectUpdate]) {
|
||||
if updates.is_empty() {
|
||||
println!("{}", "✓ All projects are up to date".green());
|
||||
return;
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("{}", "📦 Updates Available:".cyan().bold());
|
||||
println!();
|
||||
|
||||
for update in updates {
|
||||
// Create hyperlink for project name using ui_utils
|
||||
let project_url = if let Some((platform, slug)) = update.slug.iter().next()
|
||||
{
|
||||
match platform.as_str() {
|
||||
"modrinth" => crate::ui_utils::modrinth_project_url(slug),
|
||||
"curseforge" => crate::ui_utils::curseforge_project_url(slug),
|
||||
_ => String::new(),
|
||||
}
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
if project_url.is_empty() {
|
||||
println!(
|
||||
"{} ({}, {})",
|
||||
update.name.yellow(),
|
||||
update.project_type,
|
||||
update.side
|
||||
);
|
||||
} else {
|
||||
let hyperlinked = crate::ui_utils::hyperlink(
|
||||
&project_url,
|
||||
&update.name.yellow().to_string(),
|
||||
);
|
||||
println!("{} ({}, {})", hyperlinked, update.project_type, update.side);
|
||||
}
|
||||
|
||||
for file_update in &update.file_updates {
|
||||
println!(
|
||||
" • {}: {} → {}",
|
||||
file_update.platform.cyan(),
|
||||
file_update.old_filename.dim(),
|
||||
file_update.new_filename.green()
|
||||
);
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
println!(
|
||||
"{}",
|
||||
format!("{} project(s) need updates", updates.len()).yellow()
|
||||
);
|
||||
}
|
||||
|
||||
fn get_api_key(platform: &str) -> Option<String> {
|
||||
match platform {
|
||||
"modrinth" => std::env::var("MODRINTH_TOKEN").ok(),
|
||||
"curseforge" => std::env::var("CURSEFORGE_API_KEY").ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Format severity header with appropriate color
|
||||
fn format_severity_header(severity: ErrorSeverity, label: &str) -> String {
|
||||
match severity {
|
||||
ErrorSeverity::Error => format!("{label}:").red().to_string(),
|
||||
ErrorSeverity::Warning => format!("{label}:").yellow().to_string(),
|
||||
ErrorSeverity::Info => format!("{label}:").cyan().to_string(),
|
||||
}
|
||||
}
|
||||
309
crates/pakker-cli/src/cli/commands/sync.rs
Normal file
309
crates/pakker-cli/src/cli/commands/sync.rs
Normal file
|
|
@ -0,0 +1,309 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use crate::{
|
||||
cli::SyncArgs,
|
||||
error::{PakkerError, Result},
|
||||
fetch::Fetcher,
|
||||
model::{Config, LockFile},
|
||||
platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient},
|
||||
};
|
||||
|
||||
enum SyncChange {
|
||||
Addition(PathBuf, String), // (file_path, project_name)
|
||||
Removal(String), // project_pakku_id
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "spinner template is a string literal and is always valid"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: SyncArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
log::info!("Synchronizing with lockfile");
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Detect changes
|
||||
let changes = detect_changes(&lockfile, &config);
|
||||
|
||||
if changes.is_empty() {
|
||||
println!("✓ Everything is in sync");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Filter changes based on flags
|
||||
let mut additions = Vec::new();
|
||||
let mut removals = Vec::new();
|
||||
|
||||
for change in changes {
|
||||
match change {
|
||||
SyncChange::Addition(path, name) => additions.push((path, name)),
|
||||
SyncChange::Removal(id) => removals.push(id),
|
||||
}
|
||||
}
|
||||
|
||||
// Apply filters
|
||||
let no_filter = !args.additions && !args.removals;
|
||||
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
|
||||
if no_filter || args.additions {
|
||||
let mut file_hashes = Vec::new();
|
||||
|
||||
for (file_path, _) in &additions {
|
||||
spinner
|
||||
.set_message(format!("Processing addition: {}", file_path.display()));
|
||||
if crate::ui_utils::prompt_yes_no(
|
||||
&format!("Add {} to lockfile?", file_path.display()),
|
||||
false,
|
||||
global_yes,
|
||||
)? && let Ok(file_data) = fs::read(file_path)
|
||||
{
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(&file_data);
|
||||
let hash =
|
||||
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
|
||||
file_hashes.push(FileHash {
|
||||
path: file_path.clone(),
|
||||
hash,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if !file_hashes.is_empty() {
|
||||
let fallback_hashes = file_hashes.clone();
|
||||
let result = add_files_batch(&mut lockfile, file_hashes).await;
|
||||
if let Err(e) = result {
|
||||
log::warn!(
|
||||
"Batch lookup failed, falling back to individual lookups: {e}"
|
||||
);
|
||||
for fh in fallback_hashes {
|
||||
if let Err(e) =
|
||||
add_file_to_lockfile(&mut lockfile, &fh.path, &config).await
|
||||
{
|
||||
log::warn!("Failed to add {}: {}", fh.path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if no_filter || args.removals {
|
||||
for pakku_id in &removals {
|
||||
if let Some(project) = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.pakku_id.as_ref() == Some(pakku_id))
|
||||
{
|
||||
let name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.map(std::string::String::as_str)
|
||||
.or(project.pakku_id.as_deref())
|
||||
.unwrap_or("unknown");
|
||||
spinner.set_message(format!("Processing removal: {name}"));
|
||||
if crate::ui_utils::prompt_yes_no(
|
||||
&format!("Remove {name} from lockfile?"),
|
||||
false,
|
||||
global_yes,
|
||||
)? {
|
||||
lockfile
|
||||
.remove_project(pakku_id)
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
spinner.finish_and_clear();
|
||||
|
||||
// Save changes
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
// Fetch missing files
|
||||
let fetcher = Fetcher::new(".");
|
||||
fetcher.sync(&lockfile, &config).await?;
|
||||
|
||||
println!("✓ Sync complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn detect_changes(lockfile: &LockFile, config: &Config) -> Vec<SyncChange> {
|
||||
let mut changes = Vec::new();
|
||||
|
||||
// Get paths for each project type
|
||||
let paths = config.paths.clone();
|
||||
let mods_path = paths
|
||||
.get("mods")
|
||||
.map_or("mods", std::string::String::as_str);
|
||||
|
||||
// Build map of lockfile projects by file path
|
||||
let mut lockfile_files: HashMap<PathBuf, String> = HashMap::new();
|
||||
for project in &lockfile.projects {
|
||||
for file in &project.files {
|
||||
let file_path = PathBuf::from(mods_path).join(&file.file_name);
|
||||
if let Some(ref pakku_id) = project.pakku_id {
|
||||
lockfile_files.insert(file_path, pakku_id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan filesystem for additions
|
||||
if let Ok(entries) = fs::read_dir(mods_path) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file()
|
||||
&& let Some(ext) = path.extension()
|
||||
&& ext == "jar"
|
||||
&& !lockfile_files.contains_key(&path)
|
||||
{
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
changes.push(SyncChange::Addition(path, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removals (projects in lockfile but files missing)
|
||||
let filesystem_files: HashSet<_> = fs::read_dir(mods_path).map_or_else(
|
||||
|_| HashSet::new(),
|
||||
|entries| {
|
||||
entries
|
||||
.flatten()
|
||||
.map(|e| e.path())
|
||||
.filter(|p| p.is_file())
|
||||
.collect()
|
||||
},
|
||||
);
|
||||
|
||||
for (lockfile_path, pakku_id) in &lockfile_files {
|
||||
if !filesystem_files.contains(lockfile_path) {
|
||||
changes.push(SyncChange::Removal(pakku_id.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
changes
|
||||
}
|
||||
|
||||
async fn add_file_to_lockfile(
|
||||
lockfile: &mut LockFile,
|
||||
file_path: &Path,
|
||||
_config: &Config,
|
||||
) -> Result<()> {
|
||||
use sha1::Digest;
|
||||
|
||||
// Try to identify the file by hash lookup
|
||||
let modrinth = ModrinthPlatform::new();
|
||||
let curseforge = CurseForgePlatform::new(None);
|
||||
|
||||
// Compute file hash
|
||||
let file_data = fs::read(file_path)?;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(&file_data);
|
||||
let hash = crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
|
||||
|
||||
// Try Modrinth first (SHA-1 hash)
|
||||
if let Ok(Some(project)) = modrinth.lookup_by_hash(&hash).await {
|
||||
lockfile.add_project(project);
|
||||
println!("✓ Added {} (from Modrinth)", file_path.display());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Try CurseForge (Murmur2 hash computed from file)
|
||||
if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await {
|
||||
lockfile.add_project(project);
|
||||
println!("✓ Added {} (from CurseForge)", file_path.display());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("⚠ Could not identify {}, skipping", file_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FileHash {
|
||||
path: PathBuf,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
async fn add_files_batch(
|
||||
lockfile: &mut LockFile,
|
||||
file_hashes: Vec<FileHash>,
|
||||
) -> Result<()> {
|
||||
if file_hashes.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let modrinth = ModrinthPlatform::new();
|
||||
|
||||
let hashes: Vec<String> =
|
||||
file_hashes.iter().map(|fh| fh.hash.clone()).collect();
|
||||
|
||||
let projects = modrinth
|
||||
.request_projects_from_hashes(&hashes, "sha1")
|
||||
.await?;
|
||||
|
||||
let mut matched_indices: std::collections::HashSet<usize> =
|
||||
std::collections::HashSet::new();
|
||||
let mut added_pakku_ids: std::collections::HashSet<String> =
|
||||
std::collections::HashSet::new();
|
||||
|
||||
for project in &projects {
|
||||
let pakku_id = match &project.pakku_id {
|
||||
Some(id) => id.clone(),
|
||||
None => continue,
|
||||
};
|
||||
if added_pakku_ids.contains(&pakku_id) {
|
||||
continue;
|
||||
}
|
||||
for file_info in &project.files {
|
||||
for (idx, fh) in file_hashes.iter().enumerate() {
|
||||
if !matched_indices.contains(&idx)
|
||||
&& file_info
|
||||
.hashes
|
||||
.get("sha1")
|
||||
.map(std::string::String::as_str)
|
||||
== Some(&fh.hash)
|
||||
{
|
||||
lockfile.add_project(project.clone());
|
||||
added_pakku_ids.insert(pakku_id.clone());
|
||||
matched_indices.insert(idx);
|
||||
println!("✓ Added {} (from Modrinth)", fh.path.display());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (idx, fh) in file_hashes.iter().enumerate() {
|
||||
if matched_indices.contains(&idx) {
|
||||
continue;
|
||||
}
|
||||
println!("⚠ Could not identify {}, skipping", fh.path.display());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
41
crates/pakker-cli/src/cli/commands/unlink.rs
Normal file
41
crates/pakker-cli/src/cli/commands/unlink.rs
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::UnlinkArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: &UnlinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Unlinking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
let from_project = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.matches_input(&args.from))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
|
||||
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("From project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
let to_project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(&args.to))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
|
||||
|
||||
// Remove link
|
||||
to_project.pakku_links.remove(&from_id);
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully unlinked projects");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
265
crates/pakker-cli/src/cli/commands/update.rs
Normal file
265
crates/pakker-cli/src/cli/commands/update.rs
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
use std::path::Path;
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use crate::{
|
||||
cli::UpdateArgs,
|
||||
error::{MultiError, PakkerError},
|
||||
model::{Config, LockFile, UpdateStrategy},
|
||||
ui_utils::{prompt_select, prompt_typo_suggestion, prompt_yes_no},
|
||||
utils::FlexVer,
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template is a string literal and is always valid"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: UpdateArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
let skip_prompts = global_yes;
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
||||
// Create platforms
|
||||
let platforms = super::add::create_all_platforms();
|
||||
|
||||
// Collect all known project identifiers for typo suggestions
|
||||
let all_slugs: Vec<String> = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.flat_map(|p| {
|
||||
let mut ids = Vec::new();
|
||||
if let Some(ref pakku_id) = p.pakku_id {
|
||||
ids.push(pakku_id.clone());
|
||||
}
|
||||
ids.extend(p.slug.values().cloned());
|
||||
ids.extend(p.name.values().cloned());
|
||||
ids.extend(p.aliases.iter().cloned());
|
||||
ids
|
||||
})
|
||||
.collect();
|
||||
|
||||
let project_indices: Vec<_> = if args.inputs.is_empty() {
|
||||
(0..lockfile.projects.len()).collect()
|
||||
} else {
|
||||
let mut indices = Vec::new();
|
||||
for input in &args.inputs {
|
||||
if let Some((idx, _)) = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, p)| p.matches_input(input))
|
||||
{
|
||||
indices.push(idx);
|
||||
} else {
|
||||
// Try typo suggestion
|
||||
if let Ok(Some(suggestion)) =
|
||||
prompt_typo_suggestion(input, &all_slugs, skip_prompts)
|
||||
&& let Some((idx, _)) = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, p)| p.matches_input(&suggestion))
|
||||
{
|
||||
log::info!("Using suggested project: {suggestion}");
|
||||
indices.push(idx);
|
||||
continue;
|
||||
}
|
||||
return Err(PakkerError::ProjectNotFound(input.clone()));
|
||||
}
|
||||
}
|
||||
indices
|
||||
};
|
||||
|
||||
// Capture count before consuming the iterator
|
||||
let total_projects = project_indices.len();
|
||||
|
||||
// Create progress bar
|
||||
let pb = ProgressBar::new(total_projects as u64);
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
|
||||
let mut skipped_pinned = 0;
|
||||
let mut update_errors = MultiError::new();
|
||||
|
||||
for idx in project_indices {
|
||||
let old_project = &lockfile.projects[idx];
|
||||
|
||||
// Skip projects with UpdateStrategy::None (pinned)
|
||||
if old_project.update_strategy == UpdateStrategy::None {
|
||||
pb.println(format!(
|
||||
" {} - Skipped (update strategy: NONE)",
|
||||
old_project.get_name()
|
||||
));
|
||||
skipped_pinned += 1;
|
||||
pb.inc(1);
|
||||
continue;
|
||||
}
|
||||
|
||||
pb.set_message(format!("Updating {}...", old_project.get_name()));
|
||||
|
||||
let slug = old_project
|
||||
.slug
|
||||
.values()
|
||||
.next()
|
||||
.ok_or_else(|| PakkerError::InvalidProject("No slug found".into()))?;
|
||||
|
||||
// Find updated project from one of the platforms
|
||||
let mut updated_project = None;
|
||||
for platform in platforms.values() {
|
||||
if let Ok(project) = platform
|
||||
.request_project_with_files(
|
||||
slug,
|
||||
&lockfile.mc_versions,
|
||||
&lockfile.loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
updated_project = Some(project);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if updated_project.is_none() {
|
||||
// Failed to fetch update info from any platform
|
||||
update_errors.push(PakkerError::PlatformApiError(format!(
|
||||
"Failed to check updates for '{}'",
|
||||
old_project.get_name()
|
||||
)));
|
||||
pb.inc(1);
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(mut updated_project) = updated_project
|
||||
&& !updated_project.files.is_empty()
|
||||
&& let Some(old_file) = lockfile.projects[idx].files.first()
|
||||
{
|
||||
// Sort files by FlexVer if that strategy is set
|
||||
if old_project.update_strategy == UpdateStrategy::FlexVer {
|
||||
updated_project.files.sort_by(|a, b| {
|
||||
// Use FlexVer for comparison - b.cmp(a) gives descending order
|
||||
// (newest first)
|
||||
FlexVer(&b.file_name).cmp(&FlexVer(&a.file_name))
|
||||
});
|
||||
}
|
||||
|
||||
// Clone data needed for comparisons to avoid borrow issues
|
||||
let first_file = updated_project
|
||||
.files
|
||||
.first()
|
||||
.ok_or_else(|| PakkerError::InvalidProject("No files found".into()))?;
|
||||
let new_file_id = first_file.id.clone();
|
||||
let new_file_name = first_file.file_name.clone();
|
||||
let old_file_name = old_file.file_name.clone();
|
||||
let project_name = old_project.get_name();
|
||||
|
||||
if new_file_id == old_file.id {
|
||||
pb.println(format!(" {project_name} - Already up to date"));
|
||||
} else {
|
||||
// Interactive confirmation and version selection if not using --yes
|
||||
// flag
|
||||
let mut should_update = skip_prompts || args.all;
|
||||
let mut selected_idx: Option<usize> = None;
|
||||
|
||||
if !skip_prompts && !args.all {
|
||||
pb.suspend(|| {
|
||||
// First, confirm the update
|
||||
let prompt_msg = format!(
|
||||
"Update '{project_name}' from {old_file_name} to \
|
||||
{new_file_name}?"
|
||||
);
|
||||
should_update =
|
||||
prompt_yes_no(&prompt_msg, true, skip_prompts).unwrap_or(false);
|
||||
|
||||
// If confirmed and multiple versions available, offer selection
|
||||
if should_update && updated_project.files.len() > 1 {
|
||||
let choices: Vec<String> = updated_project
|
||||
.files
|
||||
.iter()
|
||||
.map(|f| format!("{} ({})", f.file_name, f.id))
|
||||
.collect();
|
||||
|
||||
let choice_refs: Vec<&str> =
|
||||
choices.iter().map(std::string::String::as_str).collect();
|
||||
|
||||
if let Ok(idx) = prompt_select(
|
||||
&format!("Select version for {project_name}:"),
|
||||
&choice_refs,
|
||||
) {
|
||||
selected_idx = Some(idx);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Apply file selection outside the closure
|
||||
if let Some(idx) = selected_idx
|
||||
&& idx > 0
|
||||
{
|
||||
updated_project.files.swap(0, idx);
|
||||
}
|
||||
|
||||
if should_update {
|
||||
let selected_file =
|
||||
updated_project.files.first().ok_or_else(|| {
|
||||
PakkerError::InvalidProject(
|
||||
"No files found after selection".into(),
|
||||
)
|
||||
})?;
|
||||
pb.println(format!(
|
||||
" {} -> {}",
|
||||
old_file_name, selected_file.file_name
|
||||
));
|
||||
lockfile.projects[idx] = updated_project;
|
||||
} else {
|
||||
pb.println(format!(" {project_name} - Skipped by user"));
|
||||
}
|
||||
}
|
||||
}
|
||||
pb.inc(1);
|
||||
}
|
||||
|
||||
if skipped_pinned > 0 {
|
||||
pb.finish_with_message(format!(
|
||||
"Update complete ({skipped_pinned} pinned projects skipped)"
|
||||
));
|
||||
} else {
|
||||
pb.finish_with_message("Update complete");
|
||||
}
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
// Report any errors that occurred during updates
|
||||
if !update_errors.is_empty() {
|
||||
let error_list = update_errors.errors();
|
||||
log::warn!(
|
||||
"{} project(s) encountered errors during update check",
|
||||
error_list.len()
|
||||
);
|
||||
for err in error_list {
|
||||
log::warn!(" - {err}");
|
||||
}
|
||||
|
||||
// Extend with any additional collected errors and check if we should fail
|
||||
let all_errors = update_errors.into_errors();
|
||||
if all_errors.len() == total_projects {
|
||||
// All projects failed - return error
|
||||
let mut multi = MultiError::new();
|
||||
multi.extend(all_errors);
|
||||
return multi.into_result(());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
259
crates/pakker-cli/src/cli/tests.rs
Normal file
259
crates/pakker-cli/src/cli/tests.rs
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{fs, path::PathBuf};
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::{
|
||||
cli::{ExportArgs, ImportArgs, RmArgs},
|
||||
model::config::Config,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_rm_args_parsing_all_flag() {
|
||||
let args = RmArgs::parse_from(&["pakker", "rm", "--all"]);
|
||||
assert!(args.all);
|
||||
assert!(args.inputs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rm_args_parsing_multiple_inputs() {
|
||||
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "mod2", "mod3"]);
|
||||
assert!(!args.all);
|
||||
assert_eq!(args.inputs, vec!["mod1", "mod2", "mod3"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rm_args_parsing_all_with_yes() {
|
||||
let args = RmArgs::parse_from(&["pakker", "rm", "--all", "--yes"]);
|
||||
assert!(args.all);
|
||||
assert!(args.yes);
|
||||
assert!(args.inputs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rm_args_parsing_with_inputs_and_yes() {
|
||||
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "--yes"]);
|
||||
assert!(!args.all);
|
||||
assert!(args.yes);
|
||||
assert_eq!(args.inputs, vec!["mod1"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_args_parsing_deps_flag() {
|
||||
let args =
|
||||
ImportArgs::parse_from(&["pakker", "import", "--deps", "pack.zip"]);
|
||||
assert!(args.deps);
|
||||
assert_eq!(args.file, "pack.zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_args_parsing_no_deps_default() {
|
||||
let args = ImportArgs::parse_from(&["pakker", "import", "pack.zip"]);
|
||||
assert!(!args.deps);
|
||||
assert_eq!(args.file, "pack.zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_args_parsing_deps_with_yes() {
|
||||
let args = ImportArgs::parse_from(&[
|
||||
"pakker", "import", "--deps", "--yes", "pack.zip",
|
||||
]);
|
||||
assert!(args.deps);
|
||||
assert!(args.yes);
|
||||
assert_eq!(args.file, "pack.zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_args_parsing_short_deps_flag() {
|
||||
let args = ImportArgs::parse_from(&["pakker", "import", "-D", "pack.zip"]);
|
||||
assert!(args.deps);
|
||||
assert_eq!(args.file, "pack.zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_parsing_show_io_errors() {
|
||||
let args =
|
||||
ExportArgs::parse_from(&["pakker", "export", "--show-io-errors"]);
|
||||
assert!(args.show_io_errors);
|
||||
assert!(!args.no_server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_parsing_no_server() {
|
||||
let args = ExportArgs::parse_from(&["pakker", "export", "--no-server"]);
|
||||
assert!(args.no_server);
|
||||
assert!(!args.show_io_errors);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_parsing_both_flags() {
|
||||
let args = ExportArgs::parse_from(&[
|
||||
"pakker",
|
||||
"export",
|
||||
"--show-io-errors",
|
||||
"--no-server",
|
||||
"--profile",
|
||||
"modrinth",
|
||||
]);
|
||||
assert!(args.show_io_errors);
|
||||
assert!(args.no_server);
|
||||
assert_eq!(args.profile, Some("modrinth".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_parsing_with_output() {
|
||||
let args = ExportArgs::parse_from(&[
|
||||
"pakker",
|
||||
"export",
|
||||
"--output",
|
||||
"/tmp/export",
|
||||
"--profile",
|
||||
"curseforge",
|
||||
]);
|
||||
assert_eq!(args.output, Some("/tmp/export".to_string()));
|
||||
assert_eq!(args.profile, Some("curseforge".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_parsing_pakker_layout() {
|
||||
let args = ExportArgs::parse_from(&["pakker", "export", "--pakker-layout"]);
|
||||
assert!(args.pakker_layout);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_with_export_server_side_projects_to_client_true() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: std::collections::HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: Some(true),
|
||||
};
|
||||
assert_eq!(config.export_server_side_projects_to_client, Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_with_export_server_side_projects_to_client_false() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: std::collections::HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: Some(false),
|
||||
};
|
||||
assert_eq!(config.export_server_side_projects_to_client, Some(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_without_export_server_side_projects_to_client() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: std::collections::HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
assert!(config.export_server_side_projects_to_client.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_serialization_with_export_server_side() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: Some("A test modpack".to_string()),
|
||||
author: Some("Test Author".to_string()),
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: Some(vec![
|
||||
"server-overrides".to_string(),
|
||||
]),
|
||||
client_overrides: Some(vec![
|
||||
"client-overrides".to_string(),
|
||||
]),
|
||||
paths: std::collections::HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: Some(true),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string_pretty(&config).unwrap();
|
||||
assert!(json.contains("exportServerSideProjectsToClient"));
|
||||
assert!(json.contains("true"));
|
||||
|
||||
let deserialized: Config = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(
|
||||
deserialized.export_server_side_projects_to_client,
|
||||
Some(true)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_serialization_without_export_server_side() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: std::collections::HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string_pretty(&config).unwrap();
|
||||
assert!(!json.contains("exportServerSideProjectsToClient"));
|
||||
|
||||
let deserialized: Config = serde_json::from_str(&json).unwrap();
|
||||
assert!(deserialized.export_server_side_projects_to_client.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_default_has_no_export_server_side() {
|
||||
let config = Config::default();
|
||||
assert!(config.export_server_side_projects_to_client.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_args_all_flags_together() {
|
||||
let args = ExportArgs::parse_from(&[
|
||||
"pakker",
|
||||
"export",
|
||||
"--profile",
|
||||
"modrinth",
|
||||
"--output",
|
||||
"/tmp/out",
|
||||
"--pakker-layout",
|
||||
"--show-io-errors",
|
||||
"--no-server",
|
||||
]);
|
||||
assert_eq!(args.profile, Some("modrinth".to_string()));
|
||||
assert_eq!(args.output, Some("/tmp/out".to_string()));
|
||||
assert!(args.pakker_layout);
|
||||
assert!(args.show_io_errors);
|
||||
assert!(args.no_server);
|
||||
}
|
||||
}
|
||||
233
crates/pakker-cli/src/lib.rs
Normal file
233
crates/pakker-cli/src/lib.rs
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
#![expect(
|
||||
clippy::multiple_crate_versions,
|
||||
reason = "transitive dependency version conflicts from upstream crates"
|
||||
)]
|
||||
#![expect(
|
||||
clippy::cargo_common_metadata,
|
||||
reason = "license and repository not yet configured"
|
||||
)]
|
||||
|
||||
use std::{env, path::PathBuf};
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
pub mod cli;
|
||||
|
||||
pub use pakker_core::{
|
||||
error, export, fetch, git, http, ipc, model, platform, rate_limiter, resolver,
|
||||
ui_utils, utils,
|
||||
};
|
||||
|
||||
use cli::{Cli, Commands};
|
||||
use error::PakkerError;
|
||||
|
||||
fn find_working_directory() -> Option<PathBuf> {
|
||||
let mut current_dir = env::current_dir().ok()?;
|
||||
|
||||
loop {
|
||||
let lockfile = current_dir.join("pakker-lock.json");
|
||||
if lockfile.exists() {
|
||||
return Some(current_dir);
|
||||
}
|
||||
|
||||
if !current_dir.pop() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run() -> Result<(), PakkerError> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let log_level = match cli.verbose {
|
||||
0 => "warn",
|
||||
1 => "info",
|
||||
2 => "debug",
|
||||
_ => "trace",
|
||||
};
|
||||
|
||||
env_logger::Builder::from_env(
|
||||
env_logger::Env::default().default_filter_or(log_level),
|
||||
)
|
||||
.format_timestamp(None)
|
||||
.format_module_path(false)
|
||||
.init();
|
||||
|
||||
if let Err(e) = keyring::use_native_store(false) {
|
||||
log::warn!("Failed to initialize platform keyring store: {e}");
|
||||
}
|
||||
|
||||
let working_dir =
|
||||
find_working_directory().unwrap_or_else(|| PathBuf::from("."));
|
||||
let lockfile_path = working_dir.join("pakker-lock.json");
|
||||
let config_path = working_dir.join("pakker.json");
|
||||
let global_yes = cli.yes;
|
||||
|
||||
match cli.command {
|
||||
Commands::Init(args) => {
|
||||
cli::commands::init::execute(
|
||||
args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
},
|
||||
Commands::Import(args) => {
|
||||
cli::commands::import::execute(
|
||||
args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Add(args) => {
|
||||
cli::commands::add::execute(
|
||||
args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::AddPrj(args) => {
|
||||
cli::commands::add_prj::execute(
|
||||
args.curseforge,
|
||||
args.modrinth,
|
||||
args.github,
|
||||
args.project_type,
|
||||
args.side,
|
||||
args.strategy,
|
||||
args.redistributable,
|
||||
args.subpath,
|
||||
args.aliases,
|
||||
args.export,
|
||||
args.no_deps,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Rm(args) => {
|
||||
cli::commands::rm::execute(
|
||||
&args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
},
|
||||
Commands::Update(args) => {
|
||||
cli::commands::update::execute(
|
||||
args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Ls(args) => cli::commands::ls::execute(&args, &lockfile_path),
|
||||
Commands::Set(args) => {
|
||||
cli::commands::set::execute(&args, &lockfile_path, &config_path)
|
||||
},
|
||||
Commands::Link(args) => cli::commands::link::execute(&args, &lockfile_path),
|
||||
Commands::Unlink(args) => {
|
||||
cli::commands::unlink::execute(&args, &lockfile_path)
|
||||
},
|
||||
Commands::Diff(args) => cli::commands::diff::execute(&args, &lockfile_path),
|
||||
Commands::Fetch(args) => {
|
||||
cli::commands::fetch::execute(args, &lockfile_path, &config_path).await
|
||||
},
|
||||
Commands::Sync(args) => {
|
||||
cli::commands::sync::execute(
|
||||
args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Export(args) => {
|
||||
cli::commands::export::execute(args, &lockfile_path, &config_path).await
|
||||
},
|
||||
Commands::Remote(args) => cli::commands::remote::execute(args).await,
|
||||
Commands::RemoteUpdate(args) => {
|
||||
cli::commands::remote_update::execute(&args)
|
||||
},
|
||||
Commands::Status(args) => {
|
||||
cli::commands::status::execute(
|
||||
args.parallel,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Inspect(args) => {
|
||||
cli::commands::inspect::execute(
|
||||
&args.projects,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
},
|
||||
Commands::Credentials(args) => {
|
||||
match args.subcommand {
|
||||
Some(cli::CredentialsSubcommand::Set(set_args)) => {
|
||||
cli::commands::credentials_set::execute(
|
||||
set_args.cf_api_key,
|
||||
set_args.modrinth_token,
|
||||
set_args.gh_access_token,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Some(cli::CredentialsSubcommand::Test) => {
|
||||
cli::commands::credentials_test::execute().await
|
||||
},
|
||||
None => {
|
||||
cli::commands::credentials::execute(
|
||||
args.delete,
|
||||
args.delete_file,
|
||||
args.delete_keyring,
|
||||
)
|
||||
},
|
||||
}
|
||||
},
|
||||
Commands::Cfg(args) => {
|
||||
match args.subcommand {
|
||||
Some(cli::CfgSubcommand::Prj(prj_args)) => {
|
||||
cli::commands::cfg_prj::execute(
|
||||
&config_path,
|
||||
&lockfile_path,
|
||||
&prj_args.project,
|
||||
prj_args.r#type.as_deref(),
|
||||
prj_args.side.as_deref(),
|
||||
prj_args.update_strategy.as_deref(),
|
||||
prj_args.redistributable,
|
||||
prj_args.subpath,
|
||||
prj_args.add_alias,
|
||||
prj_args.remove_alias,
|
||||
prj_args.export,
|
||||
)
|
||||
},
|
||||
None => {
|
||||
cli::commands::cfg::execute(
|
||||
&config_path,
|
||||
args.name,
|
||||
args.version,
|
||||
args.description,
|
||||
args.author,
|
||||
args.mods_path,
|
||||
args.resource_packs_path,
|
||||
args.data_packs_path,
|
||||
args.worlds_path,
|
||||
args.shaders_path,
|
||||
)
|
||||
},
|
||||
}
|
||||
},
|
||||
Commands::Fork(args) => {
|
||||
cli::commands::fork::execute(&args)?;
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
47
crates/pakker-core/Cargo.toml
Normal file
47
crates/pakker-core/Cargo.toml
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
[package]
|
||||
name = "pakker-core"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Core library for Pakker"
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
rust-version.workspace = true
|
||||
readme = "../../docs/README.md"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
comfy-table.workspace = true
|
||||
dialoguer.workspace = true
|
||||
futures.workspace = true
|
||||
git2.workspace = true
|
||||
glob.workspace = true
|
||||
indicatif.workspace = true
|
||||
keyring.workspace = true
|
||||
keyring-core.workspace = true
|
||||
libc.workspace = true
|
||||
log.workspace = true
|
||||
md-5.workspace = true
|
||||
rand.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha1.workspace = true
|
||||
sha2.workspace = true
|
||||
strsim.workspace = true
|
||||
tempfile.workspace = true
|
||||
textwrap.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
walkdir.workspace = true
|
||||
yansi.workspace = true
|
||||
zip.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
mockito.workspace = true
|
||||
tempfile.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
292
crates/pakker-core/src/error.rs
Normal file
292
crates/pakker-core/src/error.rs
Normal file
|
|
@ -0,0 +1,292 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, PakkerError>;
|
||||
|
||||
/// Severity level for errors
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub enum ErrorSeverity {
|
||||
/// Fatal error - operation cannot continue
|
||||
#[default]
|
||||
Error,
|
||||
|
||||
/// Warning - operation can continue but may have issues
|
||||
Warning,
|
||||
|
||||
/// Info - informational message
|
||||
Info,
|
||||
}
|
||||
|
||||
/// Container for multiple errors that occurred during an operation
|
||||
#[derive(Debug)]
|
||||
pub struct MultiError {
|
||||
errors: Vec<PakkerError>,
|
||||
}
|
||||
|
||||
impl MultiError {
|
||||
pub const fn new() -> Self {
|
||||
Self { errors: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, error: PakkerError) {
|
||||
self.errors.push(error);
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, errors: impl IntoIterator<Item = PakkerError>) {
|
||||
self.errors.extend(errors);
|
||||
}
|
||||
|
||||
pub const fn is_empty(&self) -> bool {
|
||||
self.errors.is_empty()
|
||||
}
|
||||
|
||||
pub const fn len(&self) -> usize {
|
||||
self.errors.len()
|
||||
}
|
||||
|
||||
pub fn into_result<T>(self, success_value: T) -> Result<T> {
|
||||
if self.is_empty() {
|
||||
Ok(success_value)
|
||||
} else {
|
||||
Err(PakkerError::Multiple(self.errors))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[PakkerError] {
|
||||
&self.errors
|
||||
}
|
||||
|
||||
pub fn into_errors(self) -> Vec<PakkerError> {
|
||||
self.errors
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MultiError {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<PakkerError> for MultiError {
|
||||
fn from_iter<I: IntoIterator<Item = PakkerError>>(iter: I) -> Self {
|
||||
Self {
|
||||
errors: iter.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PakkerError {
|
||||
// Network errors
|
||||
#[error("Network request failed: {0}")]
|
||||
NetworkError(#[from] reqwest::Error),
|
||||
|
||||
#[error("Platform API error: {0}")]
|
||||
PlatformApiError(String),
|
||||
|
||||
// Validation errors
|
||||
#[error("Invalid lock file: {0}")]
|
||||
InvalidLockFile(String),
|
||||
|
||||
#[error("Invalid config file: {0}")]
|
||||
InvalidConfigFile(String),
|
||||
|
||||
#[error("Project not found: {0}")]
|
||||
ProjectNotFound(String),
|
||||
|
||||
#[error("File selection error: {0}")]
|
||||
FileSelectionError(String),
|
||||
|
||||
#[error("File not found: {0}")]
|
||||
FileNotFound(String),
|
||||
|
||||
// Conflict errors
|
||||
#[error("Circular dependency detected: {0}")]
|
||||
CircularDependency(String),
|
||||
|
||||
// File I/O errors
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Serialization error: {0}")]
|
||||
SerializationError(#[from] serde_json::Error),
|
||||
|
||||
#[error("Hash mismatch for file {file}: expected {expected}, got {actual}")]
|
||||
HashMismatch {
|
||||
file: String,
|
||||
expected: String,
|
||||
actual: String,
|
||||
},
|
||||
|
||||
#[error("Download failed: {0}")]
|
||||
DownloadFailed(String),
|
||||
|
||||
// Export errors
|
||||
#[error("Export failed: {0}")]
|
||||
ExportFailed(String),
|
||||
|
||||
#[error("Invalid export profile: {0}")]
|
||||
InvalidExportProfile(String),
|
||||
|
||||
// General errors
|
||||
#[error("Configuration error: {0}")]
|
||||
ConfigError(String),
|
||||
|
||||
#[error("Internal error: {0}")]
|
||||
InternalError(String),
|
||||
|
||||
#[error("Already exists: {0}")]
|
||||
AlreadyExists(String),
|
||||
|
||||
#[error("Invalid input: {0}")]
|
||||
InvalidInput(String),
|
||||
|
||||
#[error("Invalid project: {0}")]
|
||||
InvalidProject(String),
|
||||
|
||||
#[error("Invalid import file: {0}")]
|
||||
InvalidImportFile(String),
|
||||
|
||||
#[error("Zip error: {0}")]
|
||||
ZipError(#[from] zip::result::ZipError),
|
||||
|
||||
// Git and Fork errors
|
||||
#[error("Git error: {0}")]
|
||||
GitError(String),
|
||||
|
||||
#[error("Remote not found: {0}")]
|
||||
RemoteNotFound(String),
|
||||
|
||||
#[error("Fork error: {0}")]
|
||||
Fork(String),
|
||||
|
||||
#[error("Invalid hash: {0}")]
|
||||
InvalidHash(String),
|
||||
|
||||
#[error("Invalid response: {0}")]
|
||||
InvalidResponse(String),
|
||||
|
||||
#[error("IPC error: {0}")]
|
||||
IpcError(String),
|
||||
|
||||
#[error("{}", format_multiple_errors(.0))]
|
||||
Multiple(Vec<Self>),
|
||||
}
|
||||
|
||||
fn format_multiple_errors(errors: &[PakkerError]) -> String {
|
||||
if errors.len() == 1 {
|
||||
return errors[0].to_string();
|
||||
}
|
||||
|
||||
let mut msg = format!("{} errors occurred:\n", errors.len());
|
||||
for (idx, error) in errors.iter().enumerate() {
|
||||
let _ = writeln!(msg, " {}. {}", idx + 1, error);
|
||||
}
|
||||
msg
|
||||
}
|
||||
|
||||
impl From<git2::Error> for PakkerError {
|
||||
fn from(err: git2::Error) -> Self {
|
||||
Self::GitError(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::ipc::IpcError> for PakkerError {
|
||||
fn from(err: crate::ipc::IpcError) -> Self {
|
||||
Self::IpcError(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_empty() {
|
||||
let multi = MultiError::new();
|
||||
assert!(multi.is_empty());
|
||||
assert_eq!(multi.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_push() {
|
||||
let mut multi = MultiError::new();
|
||||
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
|
||||
multi.push(PakkerError::ProjectNotFound("mod2".to_string()));
|
||||
|
||||
assert!(!multi.is_empty());
|
||||
assert_eq!(multi.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_into_result_empty() {
|
||||
let multi = MultiError::new();
|
||||
let result: Result<i32> = multi.into_result(42);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_into_result_with_errors() {
|
||||
let mut multi = MultiError::new();
|
||||
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
|
||||
|
||||
let result: Result<i32> = multi.into_result(42);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_from_iterator() {
|
||||
let errors = vec![
|
||||
PakkerError::ProjectNotFound("mod1".to_string()),
|
||||
PakkerError::ProjectNotFound("mod2".to_string()),
|
||||
];
|
||||
let multi: MultiError = errors.into_iter().collect();
|
||||
assert_eq!(multi.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_error_extend() {
|
||||
let mut multi = MultiError::new();
|
||||
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
|
||||
|
||||
let more_errors = vec![
|
||||
PakkerError::ProjectNotFound("mod2".to_string()),
|
||||
PakkerError::ProjectNotFound("mod3".to_string()),
|
||||
];
|
||||
multi.extend(more_errors);
|
||||
|
||||
assert_eq!(multi.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_errors_formatting() {
|
||||
let errors = vec![
|
||||
PakkerError::ProjectNotFound("mod1".to_string()),
|
||||
PakkerError::ProjectNotFound("mod2".to_string()),
|
||||
];
|
||||
let error = PakkerError::Multiple(errors);
|
||||
let msg = error.to_string();
|
||||
|
||||
assert!(msg.contains("2 errors occurred"));
|
||||
assert!(msg.contains("mod1"));
|
||||
assert!(msg.contains("mod2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_multiple_error_formatting() {
|
||||
let errors = vec![PakkerError::ProjectNotFound("mod1".to_string())];
|
||||
let error = PakkerError::Multiple(errors);
|
||||
let msg = error.to_string();
|
||||
|
||||
// Single error should just display the error itself
|
||||
assert!(msg.contains("mod1"));
|
||||
assert!(!msg.contains("errors occurred"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_severity_default() {
|
||||
assert_eq!(ErrorSeverity::default(), ErrorSeverity::Error);
|
||||
}
|
||||
}
|
||||
270
crates/pakker-core/src/export.rs
Normal file
270
crates/pakker-core/src/export.rs
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
mod profile_config;
|
||||
mod profiles;
|
||||
mod rules;
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
pub use profile_config::ProfileConfig;
|
||||
pub use profiles::{ExportProfile, create_profile};
|
||||
pub use rules::{Effect, Rule, RuleContext};
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Config, LockFile},
|
||||
};
|
||||
|
||||
pub struct Exporter {
|
||||
base_path: PathBuf,
|
||||
}
|
||||
|
||||
impl Exporter {
|
||||
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
|
||||
Self {
|
||||
base_path: base_path.as_ref().to_path_buf(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Export all default profiles (curseforge, modrinth, serverpack).
|
||||
///
|
||||
/// In multi-profile mode we try each profile independently.
|
||||
/// - Profiles that can't run due to missing required credentials are skipped.
|
||||
/// - Profiles that fail for other reasons are recorded and reported.
|
||||
///
|
||||
/// Returns successfully exported files. If any profile failed (non-skip),
|
||||
/// returns an error after attempting all profiles.
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
pub async fn export_all_profiles(
|
||||
&self,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
output_path: &Path,
|
||||
) -> Result<Vec<PathBuf>> {
|
||||
let profiles = vec!["curseforge", "modrinth", "serverpack"];
|
||||
let mut output_files = Vec::new();
|
||||
let mut failures: Vec<(String, String)> = Vec::new();
|
||||
|
||||
println!("Exporting {} profiles...", profiles.len());
|
||||
|
||||
for profile_name in profiles {
|
||||
match self
|
||||
.export(profile_name, lockfile, config, output_path)
|
||||
.await
|
||||
{
|
||||
Ok(output_file) => output_files.push(output_file),
|
||||
Err(err) => {
|
||||
if Self::is_auth_error(&err) {
|
||||
eprintln!(
|
||||
"{profile_name} export skipped (authentication required)"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
eprintln!("{profile_name} export failed: {err}");
|
||||
failures.push((profile_name.to_string(), err.to_string()));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if !failures.is_empty() {
|
||||
return Err(PakkerError::ExportFailed(format!(
|
||||
"{} profile(s) failed",
|
||||
failures.len()
|
||||
)));
|
||||
}
|
||||
|
||||
if output_files.is_empty() {
|
||||
return Err(PakkerError::ExportFailed(
|
||||
"No export profiles produced an output file".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
println!("All profiles exported successfully.");
|
||||
Ok(output_files)
|
||||
}
|
||||
|
||||
fn is_auth_error(err: &PakkerError) -> bool {
|
||||
// Auth/token/API-key issues should not abort multi-profile export as a
|
||||
// whole. We detect these by messages emitted from the downloader.
|
||||
match err {
|
||||
PakkerError::InternalError(msg) => {
|
||||
msg.contains("authentication error")
|
||||
|| msg.contains("unauthorized")
|
||||
|| msg.contains("forbidden")
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Export modpack using specified profile
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "spinner template string is a literal and always valid"
|
||||
)]
|
||||
pub async fn export(
|
||||
&self,
|
||||
profile_name: &str,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
output_path: &Path,
|
||||
) -> Result<PathBuf> {
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.cyan} {msg}")
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
spinner.set_message(format!("Preparing {profile_name} export..."));
|
||||
|
||||
spinner.enable_steady_tick(std::time::Duration::from_millis(80));
|
||||
let spinner = &spinner;
|
||||
|
||||
// Get export profile
|
||||
let profile = create_profile(profile_name)?;
|
||||
|
||||
log::info!(
|
||||
"Exporting with profile: {} ({})",
|
||||
profile_name,
|
||||
profile.name()
|
||||
);
|
||||
|
||||
// Load profile-specific configuration if available, otherwise use defaults
|
||||
let profile_config = config
|
||||
.export_profiles
|
||||
.as_ref()
|
||||
.and_then(|profiles| profiles.get(profile_name))
|
||||
.cloned()
|
||||
.or_else(|| {
|
||||
// Use defaults based on profile name
|
||||
match profile_name {
|
||||
"curseforge" => Some(ProfileConfig::curseforge_default()),
|
||||
"modrinth" => Some(ProfileConfig::modrinth_default()),
|
||||
"serverpack" => Some(ProfileConfig::serverpack_default()),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
|
||||
// Create temporary export directory
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let export_dir = temp_dir.path();
|
||||
|
||||
// Build rule context
|
||||
let context = RuleContext {
|
||||
lockfile: lockfile.clone(),
|
||||
config: config.clone(),
|
||||
profile_config,
|
||||
export_path: export_dir.to_path_buf(),
|
||||
base_path: self.base_path.clone(),
|
||||
ui: Some(spinner.clone()),
|
||||
};
|
||||
|
||||
spinner.set_message("Collecting export rules...");
|
||||
// Apply rules and collect effects
|
||||
let mut effects = Vec::new();
|
||||
for rule in profile.rules() {
|
||||
if rule.matches(&context) {
|
||||
effects.extend(rule.effects());
|
||||
}
|
||||
}
|
||||
|
||||
// Execute effects with descriptive messages
|
||||
for effect in &effects {
|
||||
let effect_name = effect.name();
|
||||
spinner.set_message(format!("Exporting: {effect_name}..."));
|
||||
effect.execute(&context).await?;
|
||||
}
|
||||
|
||||
spinner.set_message("Creating archive...");
|
||||
// Package export
|
||||
let output_file =
|
||||
Self::package_export(export_dir, output_path, profile_name, config)?;
|
||||
|
||||
// Cleanup
|
||||
drop(temp_dir);
|
||||
|
||||
spinner.finish_and_clear();
|
||||
println!("Exported to: {}", output_file.display());
|
||||
Ok(output_file)
|
||||
}
|
||||
|
||||
/// Package export directory into final format
|
||||
fn package_export(
|
||||
export_dir: &Path,
|
||||
output_path: &Path,
|
||||
profile_name: &str,
|
||||
config: &Config,
|
||||
) -> Result<PathBuf> {
|
||||
// Pakku layout support: if output_path ends with "build" (set by CLI),
|
||||
// create build/<profile>/.
|
||||
let profile_output_path =
|
||||
if output_path.file_name().and_then(|n| n.to_str()) == Some("build") {
|
||||
output_path.join(profile_name)
|
||||
} else {
|
||||
output_path.to_path_buf()
|
||||
};
|
||||
|
||||
fs::create_dir_all(&profile_output_path)?;
|
||||
|
||||
// Use .mrpack extension for Modrinth, .zip for others
|
||||
let extension = if profile_name == "modrinth" {
|
||||
"mrpack"
|
||||
} else {
|
||||
"zip"
|
||||
};
|
||||
let output_file = profile_output_path.join(format!(
|
||||
"{}-{}-{}.{}",
|
||||
config.name, config.version, profile_name, extension
|
||||
));
|
||||
|
||||
// Create zip archive
|
||||
let file = fs::File::create(&output_file)?;
|
||||
let mut zip = zip::ZipWriter::new(file);
|
||||
|
||||
let options = zip::write::FileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Deflated)
|
||||
.unix_permissions(0o755);
|
||||
|
||||
// Add all files from export directory
|
||||
Self::add_directory_to_zip(&mut zip, export_dir, export_dir, options)?;
|
||||
|
||||
zip.finish()?;
|
||||
|
||||
Ok(output_file)
|
||||
}
|
||||
|
||||
/// Recursively add directory to zip
|
||||
fn add_directory_to_zip(
|
||||
zip: &mut zip::ZipWriter<fs::File>,
|
||||
base_path: &Path,
|
||||
current_path: &Path,
|
||||
options: zip::write::SimpleFileOptions,
|
||||
) -> Result<()> {
|
||||
for entry in fs::read_dir(current_path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let relative_path = path
|
||||
.strip_prefix(base_path)
|
||||
.map_err(|e| PakkerError::InternalError(e.to_string()))?;
|
||||
|
||||
if path.is_file() {
|
||||
zip.start_file(relative_path.to_string_lossy().to_string(), options)?;
|
||||
let content = fs::read(&path)?;
|
||||
zip.write_all(&content)?;
|
||||
} else if path.is_dir() {
|
||||
zip.add_directory(
|
||||
relative_path.to_string_lossy().to_string(),
|
||||
options,
|
||||
)?;
|
||||
Self::add_directory_to_zip(zip, base_path, &path, options)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
use std::io::Write;
|
||||
66
crates/pakker-core/src/export/cache.rs
Normal file
66
crates/pakker-core/src/export/cache.rs
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
use crate::error::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct CacheEntry {
|
||||
hash: String,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
pub struct ExportCache {
|
||||
cache_dir: PathBuf,
|
||||
entries: HashMap<String, CacheEntry>,
|
||||
}
|
||||
|
||||
impl ExportCache {
|
||||
pub fn new(cache_dir: PathBuf) -> Self {
|
||||
let entries = Self::load_cache(&cache_dir).unwrap_or_default();
|
||||
|
||||
Self { cache_dir, entries }
|
||||
}
|
||||
|
||||
fn load_cache(cache_dir: &Path) -> Result<HashMap<String, CacheEntry>> {
|
||||
let cache_file = cache_dir.join("export-cache.json");
|
||||
|
||||
if !cache_file.exists() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(cache_file)?;
|
||||
let entries = serde_json::from_str(&content)?;
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &str) -> Option<&CacheEntry> {
|
||||
self.entries.get(key)
|
||||
}
|
||||
|
||||
pub fn put(&mut self, key: String, hash: String, path: PathBuf) {
|
||||
self.entries.insert(key, CacheEntry { hash, path });
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<()> {
|
||||
fs::create_dir_all(&self.cache_dir)?;
|
||||
|
||||
let cache_file = self.cache_dir.join("export-cache.json");
|
||||
let content = serde_json::to_string_pretty(&self.entries)?;
|
||||
|
||||
fs::write(cache_file, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) -> Result<()> {
|
||||
self.entries.clear();
|
||||
|
||||
if self.cache_dir.exists() {
|
||||
fs::remove_dir_all(&self.cache_dir)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
172
crates/pakker-core/src/export/profile_config.rs
Normal file
172
crates/pakker-core/src/export/profile_config.rs
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Profile-specific export configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProfileConfig {
|
||||
/// Custom override paths for this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub overrides: Option<Vec<String>>,
|
||||
|
||||
/// Custom server override paths for this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub server_overrides: Option<Vec<String>>,
|
||||
|
||||
/// Custom client override paths for this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub client_overrides: Option<Vec<String>>,
|
||||
|
||||
/// Platform filter - only include projects available on this platform
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub filter_platform: Option<String>,
|
||||
|
||||
/// Include non-redistributable projects (default: false for `CurseForge`,
|
||||
/// true for others)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_non_redistributable: Option<bool>,
|
||||
|
||||
/// Include client-only mods in server exports (default: false)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include_client_only: Option<bool>,
|
||||
|
||||
/// Custom project-specific settings for this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub project_overrides: Option<HashMap<String, ProjectOverride>>,
|
||||
}
|
||||
|
||||
/// Project-specific overrides for a profile
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProjectOverride {
|
||||
/// Whether to export this project in this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub export: Option<bool>,
|
||||
|
||||
/// Custom subpath for this project in this profile
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub subpath: Option<String>,
|
||||
}
|
||||
|
||||
impl ProfileConfig {
|
||||
/// Get effective override paths, falling back to global config
|
||||
pub fn get_overrides<'a>(
|
||||
&'a self,
|
||||
global_overrides: &'a [String],
|
||||
) -> &'a [String] {
|
||||
self.overrides.as_deref().unwrap_or(global_overrides)
|
||||
}
|
||||
|
||||
/// Get effective server override paths, falling back to global config
|
||||
pub fn get_server_overrides<'a>(
|
||||
&'a self,
|
||||
global_server_overrides: Option<&'a Vec<String>>,
|
||||
) -> Option<&'a [String]> {
|
||||
self
|
||||
.server_overrides
|
||||
.as_deref()
|
||||
.or_else(|| global_server_overrides.map(std::vec::Vec::as_slice))
|
||||
}
|
||||
|
||||
/// Get effective client override paths, falling back to global config
|
||||
pub fn get_client_overrides<'a>(
|
||||
&'a self,
|
||||
global_client_overrides: Option<&'a Vec<String>>,
|
||||
) -> Option<&'a [String]> {
|
||||
self
|
||||
.client_overrides
|
||||
.as_deref()
|
||||
.or_else(|| global_client_overrides.map(std::vec::Vec::as_slice))
|
||||
}
|
||||
|
||||
/// Get default config for `CurseForge` profile
|
||||
pub fn curseforge_default() -> Self {
|
||||
Self {
|
||||
filter_platform: Some("curseforge".to_string()),
|
||||
include_non_redistributable: Some(false),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get default config for Modrinth profile
|
||||
pub fn modrinth_default() -> Self {
|
||||
Self {
|
||||
filter_platform: Some("modrinth".to_string()),
|
||||
include_non_redistributable: Some(true),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get default config for `ServerPack` profile
|
||||
pub fn serverpack_default() -> Self {
|
||||
Self {
|
||||
include_client_only: Some(false),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_profile_config() {
|
||||
let config = ProfileConfig::default();
|
||||
assert!(config.overrides.is_none());
|
||||
assert!(config.filter_platform.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_curseforge_default() {
|
||||
let config = ProfileConfig::curseforge_default();
|
||||
assert_eq!(config.filter_platform, Some("curseforge".to_string()));
|
||||
assert_eq!(config.include_non_redistributable, Some(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_modrinth_default() {
|
||||
let config = ProfileConfig::modrinth_default();
|
||||
assert_eq!(config.filter_platform, Some("modrinth".to_string()));
|
||||
assert_eq!(config.include_non_redistributable, Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serverpack_default() {
|
||||
let config = ProfileConfig::serverpack_default();
|
||||
assert_eq!(config.include_client_only, Some(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_overrides_with_custom() {
|
||||
let mut config = ProfileConfig::default();
|
||||
config.overrides = Some(vec!["custom-overrides".to_string()]);
|
||||
|
||||
let global = vec!["overrides".to_string()];
|
||||
assert_eq!(config.get_overrides(&global), &["custom-overrides"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_overrides_fallback_to_global() {
|
||||
let config = ProfileConfig::default();
|
||||
let global = vec!["overrides".to_string()];
|
||||
assert_eq!(config.get_overrides(&global), &["overrides"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialization() {
|
||||
let mut config = ProfileConfig::default();
|
||||
config.filter_platform = Some("modrinth".to_string());
|
||||
config.include_non_redistributable = Some(true);
|
||||
|
||||
let json = serde_json::to_string(&config).unwrap();
|
||||
let deserialized: ProfileConfig = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(deserialized.filter_platform, config.filter_platform);
|
||||
assert_eq!(
|
||||
deserialized.include_non_redistributable,
|
||||
config.include_non_redistributable
|
||||
);
|
||||
}
|
||||
}
|
||||
88
crates/pakker-core/src/export/profiles.rs
Normal file
88
crates/pakker-core/src/export/profiles.rs
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
use super::rules::Rule;
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
pub trait ExportProfile {
|
||||
fn name(&self) -> &str;
|
||||
fn rules(&self) -> Vec<Box<dyn Rule>>;
|
||||
}
|
||||
|
||||
/// Implements [`ExportProfile`] for a unit struct with a static name and rule
|
||||
/// list.
|
||||
///
|
||||
/// Each rule entry is an expression evaluated as
|
||||
/// `Box::new(super::rules::<expr>)`, supporting both bare unit struct names and
|
||||
/// constructor calls with arguments.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```ignore
|
||||
/// export_profile! {
|
||||
/// MyProfile => "my-profile" {
|
||||
/// SomeRule,
|
||||
/// AnotherRule::new("arg"),
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
macro_rules! export_profile {
|
||||
($struct:ident => $name:literal { $($rule:expr),* $(,)? }) => {
|
||||
pub struct $struct;
|
||||
|
||||
impl ExportProfile for $struct {
|
||||
fn name(&self) -> &'static str {
|
||||
$name
|
||||
}
|
||||
|
||||
fn rules(&self) -> Vec<Box<dyn Rule>> {
|
||||
use super::rules::*;
|
||||
vec![
|
||||
$(Box::new($rule)),*
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export_profile! {
|
||||
CurseForgeProfile => "curseforge" {
|
||||
CopyProjectFilesRule,
|
||||
FilterByPlatformRule,
|
||||
MissingProjectsAsOverridesRule::new("curseforge"),
|
||||
CopyOverridesRule,
|
||||
CopyClientOverridesRule,
|
||||
FilterServerOnlyRule,
|
||||
GenerateManifestRule::curseforge(),
|
||||
FilterNonRedistributableRule,
|
||||
TextReplacementRule
|
||||
}
|
||||
}
|
||||
|
||||
export_profile! {
|
||||
ModrinthProfile => "modrinth" {
|
||||
CopyProjectFilesRule,
|
||||
FilterByPlatformRule,
|
||||
MissingProjectsAsOverridesRule::new("modrinth"),
|
||||
CopyOverridesRule,
|
||||
CopyClientOverridesRule,
|
||||
FilterServerOnlyRule,
|
||||
GenerateManifestRule::modrinth(),
|
||||
TextReplacementRule
|
||||
}
|
||||
}
|
||||
|
||||
export_profile! {
|
||||
ServerPackProfile => "serverpack" {
|
||||
CopyProjectFilesRule,
|
||||
CopyServerOverridesRule,
|
||||
FilterClientOnlyRule,
|
||||
TextReplacementRule
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_profile(name: &str) -> Result<Box<dyn ExportProfile>> {
|
||||
match name {
|
||||
"curseforge" => Ok(Box::new(CurseForgeProfile)),
|
||||
"modrinth" => Ok(Box::new(ModrinthProfile)),
|
||||
"serverpack" => Ok(Box::new(ServerPackProfile)),
|
||||
_ => Err(PakkerError::InvalidExportProfile(name.to_string())),
|
||||
}
|
||||
}
|
||||
1545
crates/pakker-core/src/export/rules.rs
Normal file
1545
crates/pakker-core/src/export/rules.rs
Normal file
File diff suppressed because it is too large
Load diff
488
crates/pakker-core/src/fetch.rs
Normal file
488
crates/pakker-core/src/fetch.rs
Normal file
|
|
@ -0,0 +1,488 @@
|
|||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use futures::future::join_all;
|
||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
use reqwest::Client;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Config, LockFile, Project, ProjectFile, UpdateStrategy},
|
||||
utils::{FlexVer, verify_hash},
|
||||
};
|
||||
|
||||
/// Maximum number of concurrent downloads
|
||||
const MAX_CONCURRENT_DOWNLOADS: usize = 8;
|
||||
|
||||
pub struct Fetcher {
|
||||
client: Client,
|
||||
base_path: PathBuf,
|
||||
shelve: bool,
|
||||
retry_count: u32,
|
||||
}
|
||||
|
||||
impl Fetcher {
|
||||
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
base_path: base_path.as_ref().to_path_buf(),
|
||||
shelve: false,
|
||||
retry_count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn with_shelve(mut self, shelve: bool) -> Self {
|
||||
self.shelve = shelve;
|
||||
self
|
||||
}
|
||||
|
||||
pub const fn with_retry(mut self, retry_count: u32) -> Self {
|
||||
self.retry_count = retry_count;
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> {
|
||||
self.fetch_all(lockfile, config).await
|
||||
}
|
||||
|
||||
/// Fetch all project files according to lockfile with parallel downloads
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template string is a literal and always valid"
|
||||
)]
|
||||
pub async fn fetch_all(
|
||||
&self,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
) -> Result<()> {
|
||||
let exportable_projects: Vec<_> =
|
||||
lockfile.projects.iter().filter(|p| p.export).collect();
|
||||
|
||||
let total = exportable_projects.len();
|
||||
|
||||
if total == 0 {
|
||||
log::info!("No projects to fetch");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Set up multi-progress for parallel download tracking
|
||||
let multi_progress = MultiProgress::new();
|
||||
let overall_bar = multi_progress.add(ProgressBar::new(total as u64));
|
||||
overall_bar.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
overall_bar.set_message("Fetching projects...");
|
||||
|
||||
// Use a semaphore to limit concurrent downloads
|
||||
let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT_DOWNLOADS));
|
||||
|
||||
// Prepare download tasks
|
||||
let client = &self.client;
|
||||
let base_path = &self.base_path;
|
||||
let download_tasks: Vec<_> = exportable_projects
|
||||
.iter()
|
||||
.map(|project| {
|
||||
let semaphore = Arc::clone(&semaphore);
|
||||
let client = client.clone();
|
||||
let base_path = base_path.clone();
|
||||
let project = (*project).clone();
|
||||
let overall_bar = overall_bar.clone();
|
||||
|
||||
async move {
|
||||
// Acquire semaphore permit to limit concurrency
|
||||
let _permit = semaphore.acquire().await.map_err(|_| {
|
||||
PakkerError::InternalError("Semaphore acquisition failed".into())
|
||||
})?;
|
||||
|
||||
let name = project.get_name();
|
||||
|
||||
let fetcher = Self {
|
||||
client,
|
||||
base_path,
|
||||
shelve: false, // Shelving happens at sync level, not per-project
|
||||
retry_count: 0,
|
||||
};
|
||||
|
||||
let result = fetcher.fetch_project(&project, lockfile, config).await;
|
||||
|
||||
// Update progress bar
|
||||
overall_bar.inc(1);
|
||||
|
||||
match &result {
|
||||
Ok(()) => {
|
||||
log::debug!("Successfully fetched: {name}");
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!("Failed to fetch {name}: {e}");
|
||||
},
|
||||
}
|
||||
|
||||
result.map(|()| name)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Execute all downloads in parallel (limited by semaphore)
|
||||
let results = join_all(download_tasks).await;
|
||||
|
||||
overall_bar.finish_with_message("All projects fetched");
|
||||
|
||||
// Collect and report errors
|
||||
let mut errors = Vec::new();
|
||||
let mut success_count = 0;
|
||||
|
||||
for result in results {
|
||||
match result {
|
||||
Ok(_) => success_count += 1,
|
||||
Err(e) => errors.push(e),
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Fetch complete: {success_count}/{total} successful");
|
||||
|
||||
if !errors.is_empty() {
|
||||
// Return the first error, but log all of them
|
||||
for (idx, error) in errors.iter().enumerate() {
|
||||
log::error!("Download error {}: {}", idx + 1, error);
|
||||
}
|
||||
return Err(errors.remove(0));
|
||||
}
|
||||
|
||||
// Handle unknown files (shelve or delete)
|
||||
self.handle_unknown_files(lockfile, config)?;
|
||||
|
||||
// Sync overrides
|
||||
self.sync_overrides(config)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Handle unknown project files that aren't in the lockfile.
|
||||
/// If shelve is true, moves them to a shelf directory.
|
||||
/// Otherwise, deletes them.
|
||||
fn handle_unknown_files(
|
||||
&self,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
) -> Result<()> {
|
||||
// Collect all expected file names from lockfile
|
||||
let expected_files: std::collections::HashSet<String> = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.filter(|p| p.export)
|
||||
.filter_map(|p| p.files.first().map(|f| f.file_name.clone()))
|
||||
.collect();
|
||||
|
||||
// Check each project type directory
|
||||
let project_dirs = [
|
||||
(
|
||||
"mod",
|
||||
Self::get_default_path(crate::model::ProjectType::Mod),
|
||||
),
|
||||
(
|
||||
"resource-pack",
|
||||
Self::get_default_path(crate::model::ProjectType::ResourcePack),
|
||||
),
|
||||
(
|
||||
"shader",
|
||||
Self::get_default_path(crate::model::ProjectType::Shader),
|
||||
),
|
||||
(
|
||||
"data-pack",
|
||||
Self::get_default_path(crate::model::ProjectType::DataPack),
|
||||
),
|
||||
(
|
||||
"world",
|
||||
Self::get_default_path(crate::model::ProjectType::World),
|
||||
),
|
||||
];
|
||||
|
||||
// Also check custom paths from config
|
||||
let mut dirs_to_check: Vec<PathBuf> = project_dirs
|
||||
.iter()
|
||||
.map(|(_, dir)| self.base_path.join(dir))
|
||||
.collect();
|
||||
|
||||
for custom_path in config.paths.values() {
|
||||
dirs_to_check.push(self.base_path.join(custom_path));
|
||||
}
|
||||
|
||||
let shelf_dir = self.base_path.join(".pakker-shelf");
|
||||
let mut shelved_count = 0;
|
||||
let mut deleted_count = 0;
|
||||
|
||||
for dir in dirs_to_check {
|
||||
if !dir.exists() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(entries) = fs::read_dir(&dir) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if !path.is_file() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_name = match path.file_name().and_then(|n| n.to_str()) {
|
||||
Some(name) => name.to_string(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Skip if file is expected
|
||||
if expected_files.contains(&file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip non-jar files (might be configs, etc.)
|
||||
if !std::path::Path::new(&file_name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("jar"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if self.shelve {
|
||||
// Move to shelf
|
||||
fs::create_dir_all(&shelf_dir)?;
|
||||
let shelf_path = shelf_dir.join(&file_name);
|
||||
fs::rename(&path, &shelf_path)?;
|
||||
log::info!("Shelved unknown file: {file_name} -> .pakker-shelf/");
|
||||
shelved_count += 1;
|
||||
} else {
|
||||
// Delete unknown file
|
||||
fs::remove_file(&path)?;
|
||||
log::info!("Deleted unknown file: {file_name}");
|
||||
deleted_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if shelved_count > 0 {
|
||||
log::info!("Shelved {shelved_count} unknown file(s) to .pakker-shelf/");
|
||||
}
|
||||
if deleted_count > 0 {
|
||||
log::info!("Deleted {deleted_count} unknown file(s)");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fetch files for a single project
|
||||
pub async fn fetch_project(
|
||||
&self,
|
||||
project: &Project,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
) -> Result<()> {
|
||||
// Select the best file for this project
|
||||
let file = Self::select_best_file(project, lockfile)?;
|
||||
|
||||
// Determine target path
|
||||
let target_path = self.get_target_path(project, file, config);
|
||||
|
||||
// Check if file already exists and is valid
|
||||
if target_path.exists()
|
||||
&& let Some((algo, expected_hash)) = file.hashes.iter().next()
|
||||
&& verify_hash(&target_path, algo, expected_hash)?
|
||||
{
|
||||
log::info!("File already exists and is valid: {}", file.file_name);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Download file
|
||||
log::info!("Downloading: {}", file.file_name);
|
||||
self.download_file(&file.url, &target_path).await?;
|
||||
|
||||
// Verify hash
|
||||
if let Some((algo, expected_hash)) = file.hashes.iter().next()
|
||||
&& !verify_hash(&target_path, algo, expected_hash)?
|
||||
{
|
||||
fs::remove_file(&target_path)?;
|
||||
return Err(PakkerError::HashMismatch {
|
||||
file: file.file_name.clone(),
|
||||
expected: expected_hash.clone(),
|
||||
actual: "mismatch".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
log::info!("Successfully downloaded: {}", file.file_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Select the best file for a project based on constraints
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "compatible_files is checked to be non-empty above"
|
||||
)]
|
||||
fn select_best_file<'a>(
|
||||
project: &'a Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<&'a ProjectFile> {
|
||||
let compatible_files: Vec<&ProjectFile> = project
|
||||
.files
|
||||
.iter()
|
||||
.filter(|f| {
|
||||
f.is_compatible(&lockfile.mc_versions, &lockfile.get_loader_names())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if compatible_files.is_empty() {
|
||||
return Err(PakkerError::FileNotFound(format!(
|
||||
"No compatible files for project: {:?}",
|
||||
project.name.values().next()
|
||||
)));
|
||||
}
|
||||
|
||||
// Select best file based on update strategy
|
||||
let best = if project.update_strategy == UpdateStrategy::FlexVer {
|
||||
let mut sorted: Vec<_> = compatible_files.iter().collect();
|
||||
sorted.sort_by(|a, b| FlexVer(&b.file_name).cmp(&FlexVer(&a.file_name)));
|
||||
*sorted
|
||||
.first()
|
||||
.expect("compatible_files is non-empty, checked above")
|
||||
} else {
|
||||
// Prefer release over beta over alpha, then by date published
|
||||
compatible_files
|
||||
.iter()
|
||||
.max_by_key(|f| {
|
||||
let type_priority = match f.release_type {
|
||||
crate::model::ReleaseType::Release => 3,
|
||||
crate::model::ReleaseType::Beta => 2,
|
||||
crate::model::ReleaseType::Alpha => 1,
|
||||
};
|
||||
(type_priority, &f.date_published)
|
||||
})
|
||||
.expect("compatible_files is non-empty, checked above")
|
||||
};
|
||||
|
||||
Ok(best)
|
||||
}
|
||||
|
||||
/// Get target path for a project file
|
||||
fn get_target_path(
|
||||
&self,
|
||||
project: &Project,
|
||||
file: &ProjectFile,
|
||||
config: &Config,
|
||||
) -> PathBuf {
|
||||
let mut path = self.base_path.clone();
|
||||
|
||||
// Check for custom path in config
|
||||
if let Some(custom_path) = config.paths.get(&project.r#type.to_string()) {
|
||||
path.push(custom_path);
|
||||
} else {
|
||||
// Default path based on project type
|
||||
path.push(Self::get_default_path(project.r#type));
|
||||
}
|
||||
|
||||
// Add subpath if specified
|
||||
if let Some(subpath) = &project.subpath {
|
||||
path.push(subpath);
|
||||
}
|
||||
|
||||
path.push(&file.file_name);
|
||||
path
|
||||
}
|
||||
|
||||
/// Get default path for project type
|
||||
const fn get_default_path(
|
||||
project_type: crate::model::ProjectType,
|
||||
) -> &'static str {
|
||||
match project_type {
|
||||
crate::model::ProjectType::Mod => "mods",
|
||||
crate::model::ProjectType::ResourcePack => "resourcepacks",
|
||||
crate::model::ProjectType::DataPack => "datapacks",
|
||||
crate::model::ProjectType::Shader => "shaderpacks",
|
||||
crate::model::ProjectType::World => "saves",
|
||||
}
|
||||
}
|
||||
|
||||
/// Download a file from URL to target path with retry
|
||||
async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> {
|
||||
// Create parent directory
|
||||
if let Some(parent) = target_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let max_attempts = self.retry_count.saturating_add(1);
|
||||
|
||||
for attempt in 0..max_attempts {
|
||||
match self.download_single_attempt(url, target_path).await {
|
||||
Ok(()) => return Ok(()),
|
||||
Err(_e) if attempt < self.retry_count => {
|
||||
log::warn!(
|
||||
"Download attempt {}/{} failed for {}, retrying...",
|
||||
attempt + 1,
|
||||
max_attempts,
|
||||
url
|
||||
);
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
|
||||
},
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_single_attempt(
|
||||
&self,
|
||||
url: &str,
|
||||
target_path: &Path,
|
||||
) -> Result<()> {
|
||||
let response = self.client.get(url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::DownloadFailed(url.to_string()));
|
||||
}
|
||||
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
let temp_path = target_path.with_extension("tmp");
|
||||
fs::write(&temp_path, bytes)?;
|
||||
fs::rename(temp_path, target_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sync override directories
|
||||
fn sync_overrides(&self, config: &Config) -> Result<()> {
|
||||
for override_path in &config.overrides {
|
||||
let source = self.base_path.join(override_path);
|
||||
if !source.exists() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy override files to target locations
|
||||
Self::copy_recursive(&source, &self.base_path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy directory recursively
|
||||
fn copy_recursive(source: &Path, dest: &Path) -> Result<()> {
|
||||
if source.is_file() {
|
||||
fs::copy(source, dest)?;
|
||||
} else if source.is_dir() {
|
||||
fs::create_dir_all(dest)?;
|
||||
for entry in fs::read_dir(source)? {
|
||||
let entry = entry?;
|
||||
let target = dest.join(entry.file_name());
|
||||
Self::copy_recursive(&entry.path(), &target)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
612
crates/pakker-core/src/git/mod.rs
Normal file
612
crates/pakker-core/src/git/mod.rs
Normal file
|
|
@ -0,0 +1,612 @@
|
|||
use std::path::Path;
|
||||
|
||||
use git2::{
|
||||
Cred,
|
||||
FetchOptions,
|
||||
Oid,
|
||||
RemoteCallbacks,
|
||||
Repository,
|
||||
ResetType,
|
||||
build::RepoBuilder,
|
||||
};
|
||||
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
type ProgressCallback =
|
||||
Option<Box<dyn FnMut(usize, usize, Option<usize>) + 'static>>;
|
||||
|
||||
/// Check if a directory is a Git repository
|
||||
pub fn is_git_repository<P: AsRef<Path>>(path: P) -> bool {
|
||||
Repository::open(path).is_ok()
|
||||
}
|
||||
|
||||
/// Get the URL of a remote
|
||||
pub fn get_remote_url<P: AsRef<Path>>(
|
||||
path: P,
|
||||
remote_name: &str,
|
||||
) -> Result<String> {
|
||||
let repo = Repository::open(path)?;
|
||||
let remote = repo.find_remote(remote_name).map_err(|e| {
|
||||
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
|
||||
})?;
|
||||
|
||||
remote
|
||||
.url()
|
||||
.ok_or_else(|| {
|
||||
PakkerError::GitError("Remote URL is not valid UTF-8".to_string())
|
||||
})
|
||||
.map(std::string::ToString::to_string)
|
||||
}
|
||||
|
||||
pub fn get_current_commit_sha<P: AsRef<Path>>(
|
||||
path: P,
|
||||
ref_name: Option<&str>,
|
||||
) -> Result<String> {
|
||||
let repo = Repository::open(path)?;
|
||||
|
||||
let commit = if let Some(ref_name) = ref_name {
|
||||
// Try the ref in several forms: bare name, local branch, remote tracking
|
||||
let candidates = [
|
||||
ref_name.to_string(),
|
||||
format!("refs/heads/{ref_name}"),
|
||||
format!("refs/remotes/origin/{ref_name}"),
|
||||
];
|
||||
let obj = candidates
|
||||
.iter()
|
||||
.find_map(|candidate| repo.revparse_single(candidate).ok())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::GitError(format!("revspec '{ref_name}' not found"))
|
||||
})?;
|
||||
obj.peel_to_commit()?
|
||||
} else {
|
||||
let head = repo.head()?;
|
||||
head.peel_to_commit()?
|
||||
};
|
||||
|
||||
Ok(commit.id().to_string())
|
||||
}
|
||||
|
||||
/// Get the commit SHA for a specific ref (alias for compatibility)
|
||||
pub fn get_commit_sha<P: AsRef<Path>>(
|
||||
path: P,
|
||||
ref_name: &str,
|
||||
) -> Result<String> {
|
||||
get_current_commit_sha(path, Some(ref_name))
|
||||
}
|
||||
|
||||
/// Clone a Git repository
|
||||
pub fn clone_repository<P: AsRef<Path>>(
|
||||
url: &str,
|
||||
target_path: P,
|
||||
ref_name: &str,
|
||||
progress_callback: ProgressCallback,
|
||||
) -> Result<Repository> {
|
||||
let target_path = target_path.as_ref();
|
||||
|
||||
// Check if target directory exists and is not empty
|
||||
if target_path.exists() {
|
||||
let is_empty = target_path.read_dir()?.next().is_none();
|
||||
if !is_empty {
|
||||
return Err(PakkerError::GitError(format!(
|
||||
"Target directory is not empty: {}",
|
||||
target_path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let mut callbacks = RemoteCallbacks::new();
|
||||
|
||||
// Setup SSH key authentication
|
||||
callbacks.credentials(|_url, username_from_url, _allowed_types| {
|
||||
let username = username_from_url.unwrap_or("git");
|
||||
Cred::ssh_key_from_agent(username)
|
||||
});
|
||||
|
||||
// Setup progress callback if provided
|
||||
if let Some(mut progress_fn) = progress_callback {
|
||||
callbacks.transfer_progress(move |stats| {
|
||||
progress_fn(
|
||||
stats.received_objects(),
|
||||
stats.total_objects(),
|
||||
Some(stats.received_bytes()),
|
||||
);
|
||||
true
|
||||
});
|
||||
}
|
||||
|
||||
let mut fetch_options = FetchOptions::new();
|
||||
fetch_options.remote_callbacks(callbacks);
|
||||
|
||||
let mut builder = RepoBuilder::new();
|
||||
builder.fetch_options(fetch_options);
|
||||
|
||||
// Perform the clone. Avoid forcing a branch at clone time because some
|
||||
// local repositories (or bare repos) may not expose the exact remote
|
||||
// tracking refs that libgit2 expects. We'll attempt to set the desired
|
||||
// ref after cloning when possible.
|
||||
let repo = builder.clone(url, target_path).map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to clone repository '{url}': {e}"))
|
||||
})?;
|
||||
|
||||
// If a branch/ref name was requested, try to make HEAD point to it.
|
||||
// Prefer local branch refs (refs/heads/*), then tags, then raw rev-parse.
|
||||
let branch_ref = format!("refs/heads/{ref_name}");
|
||||
if repo.find_reference(&branch_ref).is_ok() {
|
||||
repo.set_head(&branch_ref).map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Cloned repository but failed to set HEAD to {branch_ref}: {e}"
|
||||
))
|
||||
})?;
|
||||
} else if let Ok(obj) = repo.revparse_single(ref_name) {
|
||||
// Create a detached HEAD pointing to the commit/tag
|
||||
let commit = obj.peel_to_commit().map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Resolved ref '{ref_name}' but could not peel to commit: {e}"
|
||||
))
|
||||
})?;
|
||||
repo.set_head_detached(commit.id()).map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Cloned repository but failed to set detached HEAD to {ref_name}: {e}"
|
||||
))
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(repo)
|
||||
}
|
||||
|
||||
/// Fetch updates from a remote
|
||||
pub fn fetch_updates<P: AsRef<Path>>(
|
||||
path: P,
|
||||
remote_name: &str,
|
||||
ref_name: &str,
|
||||
progress_callback: ProgressCallback,
|
||||
) -> Result<()> {
|
||||
let repo = Repository::open(path)?;
|
||||
let mut remote = repo.find_remote(remote_name).map_err(|e| {
|
||||
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
|
||||
})?;
|
||||
|
||||
let mut callbacks = RemoteCallbacks::new();
|
||||
|
||||
// Setup SSH key authentication
|
||||
callbacks.credentials(|_url, username_from_url, _allowed_types| {
|
||||
let username = username_from_url.unwrap_or("git");
|
||||
Cred::ssh_key_from_agent(username)
|
||||
});
|
||||
|
||||
// Setup progress callback if provided
|
||||
if let Some(mut progress_fn) = progress_callback {
|
||||
callbacks.transfer_progress(move |stats| {
|
||||
progress_fn(
|
||||
stats.received_objects(),
|
||||
stats.total_objects(),
|
||||
Some(stats.received_bytes()),
|
||||
);
|
||||
true
|
||||
});
|
||||
}
|
||||
|
||||
let mut fetch_options = FetchOptions::new();
|
||||
fetch_options.remote_callbacks(callbacks);
|
||||
|
||||
remote
|
||||
.fetch(&[ref_name], Some(&mut fetch_options), None)
|
||||
.map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to fetch updates: {e}"))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Hard reset to a specific ref (like git reset --hard)
|
||||
pub fn reset_to_ref<P: AsRef<Path>>(
|
||||
path: P,
|
||||
remote_name: &str,
|
||||
ref_name: &str,
|
||||
) -> Result<()> {
|
||||
let repo = Repository::open(path)?;
|
||||
|
||||
// Construct the full ref path (e.g., "origin/main")
|
||||
let full_ref = format!("{remote_name}/{ref_name}");
|
||||
|
||||
let obj = repo.revparse_single(&full_ref).map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to find ref '{full_ref}': {e}"))
|
||||
})?;
|
||||
|
||||
let commit = obj.peel_to_commit().map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to resolve ref to commit: {e}"))
|
||||
})?;
|
||||
|
||||
repo
|
||||
.reset(commit.as_object(), ResetType::Hard, None)
|
||||
.map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to reset to ref: {e}"))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Determine the ref type (branch, tag, or commit)
|
||||
pub fn resolve_ref_type<P: AsRef<Path>>(
|
||||
path: P,
|
||||
ref_name: &str,
|
||||
) -> Result<crate::model::fork::RefType> {
|
||||
let repo = Repository::open(path)?;
|
||||
|
||||
// Check if it's a local branch
|
||||
if repo.find_branch(ref_name, git2::BranchType::Local).is_ok() {
|
||||
return Ok(crate::model::fork::RefType::Branch);
|
||||
}
|
||||
|
||||
// Check remote tracking branches (e.g. origin/main after a fresh clone)
|
||||
let remote_branch_name = format!("origin/{ref_name}");
|
||||
if repo
|
||||
.find_branch(&remote_branch_name, git2::BranchType::Remote)
|
||||
.is_ok()
|
||||
|| repo
|
||||
.find_reference(&format!("refs/remotes/origin/{ref_name}"))
|
||||
.is_ok()
|
||||
{
|
||||
return Ok(crate::model::fork::RefType::Branch);
|
||||
}
|
||||
|
||||
// Check if it's a tag
|
||||
let tag_ref = format!("refs/tags/{ref_name}");
|
||||
if repo.find_reference(&tag_ref).is_ok() {
|
||||
return Ok(crate::model::fork::RefType::Tag);
|
||||
}
|
||||
|
||||
// Try to resolve as commit SHA
|
||||
let candidates = [
|
||||
ref_name.to_string(),
|
||||
format!("refs/heads/{ref_name}"),
|
||||
format!("refs/remotes/origin/{ref_name}"),
|
||||
];
|
||||
if candidates.iter().any(|c| repo.revparse_single(c).is_ok()) {
|
||||
return Ok(crate::model::fork::RefType::Commit);
|
||||
}
|
||||
|
||||
Err(PakkerError::GitError(format!(
|
||||
"Could not resolve ref '{ref_name}' as branch, tag, or commit"
|
||||
)))
|
||||
}
|
||||
|
||||
/// Get the primary remote URL for a repository at path. Prefer 'origin',
|
||||
/// otherwise first remote with a URL.
|
||||
pub fn get_primary_remote_url<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let repo = Repository::open(path)?;
|
||||
if let Ok(remote) = repo.find_remote("origin")
|
||||
&& let Some(url) = remote.url()
|
||||
{
|
||||
return Ok(url.to_string());
|
||||
}
|
||||
|
||||
// Fallback: first remote with a URL
|
||||
if let Ok(remotes) = repo.remotes() {
|
||||
for name in remotes.iter().flatten() {
|
||||
if let Ok(remote) = repo.find_remote(name)
|
||||
&& let Some(url) = remote.url()
|
||||
{
|
||||
return Ok(url.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(PakkerError::GitError(
|
||||
"No remote with a valid URL found on repository".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum VcsType {
|
||||
Git,
|
||||
Jujutsu,
|
||||
None,
|
||||
}
|
||||
|
||||
/// Detect the VCS type for a given path
|
||||
pub fn detect_vcs_type<P: AsRef<Path>>(path: P) -> VcsType {
|
||||
let path = path.as_ref();
|
||||
|
||||
// Check for jujutsu first (higher priority)
|
||||
if let Ok(output) = std::process::Command::new("jj")
|
||||
.args(["root"])
|
||||
.current_dir(path)
|
||||
.output()
|
||||
&& output.status.success()
|
||||
{
|
||||
return VcsType::Jujutsu;
|
||||
}
|
||||
|
||||
// Check for git
|
||||
if let Ok(output) = std::process::Command::new("git")
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.current_dir(path)
|
||||
.output()
|
||||
&& output.status.success()
|
||||
{
|
||||
return VcsType::Git;
|
||||
}
|
||||
|
||||
VcsType::None
|
||||
}
|
||||
|
||||
/// Check whether the repository has uncommitted changes (working tree or index)
|
||||
pub fn repo_has_uncommitted_changes<P: AsRef<Path>>(path: P) -> Result<bool> {
|
||||
let vcs_type = detect_vcs_type(&path);
|
||||
|
||||
match vcs_type {
|
||||
VcsType::Git => {
|
||||
let repo = Repository::open(path)?;
|
||||
let statuses = repo.statuses(None)?;
|
||||
for entry in statuses.iter() {
|
||||
let s = entry.status();
|
||||
// Consider any change in index or working tree as uncommitted
|
||||
if !(s.is_empty()) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
},
|
||||
VcsType::Jujutsu => {
|
||||
// Use jj status to check for changes - look for "The working copy has no
|
||||
// changes"
|
||||
let output = std::process::Command::new("jj")
|
||||
.args(["status"])
|
||||
.current_dir(path)
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to run jj status: {e}"))
|
||||
})?;
|
||||
|
||||
let output_str = String::from_utf8_lossy(&output.stdout);
|
||||
// Check if the output indicates no changes
|
||||
Ok(!output_str.contains("The working copy has no changes"))
|
||||
},
|
||||
VcsType::None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt a lightweight fetch of a single ref from the named remote into the
|
||||
/// repository at path
|
||||
pub fn fetch_remote_light<P: AsRef<Path>>(
|
||||
path: P,
|
||||
remote_name: &str,
|
||||
ref_name: &str,
|
||||
) -> Result<()> {
|
||||
let repo = Repository::open(path)?;
|
||||
let mut remote = repo.find_remote(remote_name).map_err(|e| {
|
||||
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
|
||||
})?;
|
||||
|
||||
let mut callbacks = RemoteCallbacks::new();
|
||||
callbacks.credentials(|_url, username_from_url, _allowed_types| {
|
||||
let username = username_from_url.unwrap_or("git");
|
||||
Cred::ssh_key_from_agent(username)
|
||||
});
|
||||
|
||||
let mut fetch_options = FetchOptions::new();
|
||||
fetch_options.remote_callbacks(callbacks);
|
||||
|
||||
// Build a refspec that attempts to fetch the branch into the remote-tracking
|
||||
// namespace
|
||||
let fetch_refspec = if ref_name.starts_with("refs/") {
|
||||
ref_name.to_string()
|
||||
} else {
|
||||
format!("refs/heads/{ref_name}:refs/remotes/{remote_name}/{ref_name}")
|
||||
};
|
||||
|
||||
remote
|
||||
.fetch(&[&fetch_refspec], Some(&mut fetch_options), None)
|
||||
.map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed lightweight fetch: {e}"))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Resolve a ref name to an Oid (commit)
|
||||
pub fn get_ref_oid<P: AsRef<Path>>(path: P, ref_name: &str) -> Result<Oid> {
|
||||
let repo = Repository::open(path)?;
|
||||
let obj = repo.revparse_single(ref_name).map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to resolve ref '{ref_name}': {e}"))
|
||||
})?;
|
||||
let commit = obj.peel_to_commit().map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Failed to peel ref '{ref_name}' to commit: {e}"
|
||||
))
|
||||
})?;
|
||||
Ok(commit.id())
|
||||
}
|
||||
|
||||
/// Count commits reachable from `oid` in `repo`
|
||||
fn count_commits(repo: &Repository, oid: Oid) -> Result<usize> {
|
||||
let mut revwalk = repo.revwalk().map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Failed to create revwalk for counting commits: {e}"
|
||||
))
|
||||
})?;
|
||||
revwalk.push(oid).map_err(|e| {
|
||||
PakkerError::GitError(format!(
|
||||
"Failed to start revwalk from oid {oid}: {e}"
|
||||
))
|
||||
})?;
|
||||
let mut count = 0usize;
|
||||
for _ in revwalk {
|
||||
count += 1;
|
||||
}
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
/// Compute how many commits `local_ref` is ahead/behind `remote_ref`
|
||||
pub fn ahead_behind<P: AsRef<Path>>(
|
||||
path: P,
|
||||
local_ref: &str,
|
||||
remote_ref: &str,
|
||||
) -> Result<(usize, usize)> {
|
||||
let repo = Repository::open(&path)?;
|
||||
|
||||
// Try to resolve local OID
|
||||
let local_oid = match get_ref_oid(&path, local_ref) {
|
||||
Ok(oid) => oid,
|
||||
Err(e) => {
|
||||
return Err(PakkerError::GitError(format!(
|
||||
"Local ref not found '{local_ref}': {e}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
|
||||
// Try to resolve remote OID. If remote ref is missing, consider remote empty
|
||||
// and count all commits in local as "ahead".
|
||||
if let Ok(remote_oid) = get_ref_oid(&path, remote_ref) {
|
||||
let (ahead, behind) = repo
|
||||
.graph_ahead_behind(local_oid, remote_oid)
|
||||
.map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to compute ahead/behind: {e}"))
|
||||
})?;
|
||||
Ok((ahead, behind))
|
||||
} else {
|
||||
// Remote ref missing, count commits reachable from local
|
||||
let ahead_count = count_commits(&repo, local_oid)?;
|
||||
Ok((ahead_count, 0))
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the URL for a remote in the repository
|
||||
pub fn set_remote_url<P: AsRef<Path>>(
|
||||
path: P,
|
||||
remote_name: &str,
|
||||
url: &str,
|
||||
) -> Result<()> {
|
||||
let repo = Repository::open(path)?;
|
||||
repo.remote_set_url(remote_name, url).map_err(|e| {
|
||||
PakkerError::GitError(format!("Failed to set remote URL: {e}"))
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{fs::File, io::Write};
|
||||
|
||||
use git2::{Repository, Signature};
|
||||
use tempfile::tempdir;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn init_bare_repo(path: &std::path::Path) -> Repository {
|
||||
Repository::init_bare(path).expect("init bare")
|
||||
}
|
||||
|
||||
fn init_repo_with_commit(
|
||||
path: &std::path::Path,
|
||||
file_name: &str,
|
||||
content: &str,
|
||||
branch: &str,
|
||||
) -> Repository {
|
||||
let repo = Repository::init(path).expect("init repo");
|
||||
let sig = Signature::now("Test", "test@example.com").unwrap();
|
||||
let mut index = repo.index().unwrap();
|
||||
let file_path = path.join(file_name);
|
||||
let mut f = File::create(&file_path).unwrap();
|
||||
writeln!(f, "{content}").unwrap();
|
||||
drop(f);
|
||||
index.add_path(std::path::Path::new(file_name)).unwrap();
|
||||
let tree_id = index.write_tree().unwrap();
|
||||
// limit the scope of tree to avoid borrow while moving repo
|
||||
{
|
||||
let tree = repo.find_tree(tree_id).unwrap();
|
||||
let _commit_id = repo
|
||||
.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])
|
||||
.unwrap();
|
||||
}
|
||||
// Create branch pointing at HEAD and set HEAD
|
||||
let head_oid = repo.refname_to_id("HEAD").unwrap();
|
||||
repo
|
||||
.branch(branch, &repo.find_commit(head_oid).unwrap(), true)
|
||||
.unwrap();
|
||||
repo.set_head(&format!("refs/heads/{branch}")).unwrap();
|
||||
repo
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_git_repository_and_remote_url() {
|
||||
let tmp = tempdir().unwrap();
|
||||
let repo_path = tmp.path().join("repo");
|
||||
let _repo = init_repo_with_commit(&repo_path, "a.txt", "hello", "master");
|
||||
assert!(is_git_repository(&repo_path));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_remote_light_and_ahead_behind() {
|
||||
let tmp = tempdir().unwrap();
|
||||
let bare_path = tmp.path().join("bare.git");
|
||||
let _bare = init_bare_repo(&bare_path);
|
||||
|
||||
let work_path = tmp.path().join("work");
|
||||
let repo = init_repo_with_commit(&work_path, "a.txt", "hello", "master");
|
||||
|
||||
// Add bare remote and push
|
||||
repo.remote("origin", bare_path.to_str().unwrap()).unwrap();
|
||||
let mut remote = repo.find_remote("origin").unwrap();
|
||||
remote.connect(git2::Direction::Push).unwrap();
|
||||
remote
|
||||
.push(&["refs/heads/master:refs/heads/master"], None)
|
||||
.unwrap();
|
||||
|
||||
// Ensure bare HEAD points to master
|
||||
let bare_repo = Repository::open(&bare_path).unwrap();
|
||||
bare_repo.set_head("refs/heads/master").unwrap();
|
||||
|
||||
// Now test fetch_remote_light against the work repo (fetch from origin into
|
||||
// work should succeed)
|
||||
assert!(fetch_remote_light(&work_path, "origin", "master").is_ok());
|
||||
|
||||
// Test ahead_behind with remote tracking ref
|
||||
let (ahead, behind) = ahead_behind(
|
||||
&work_path,
|
||||
"refs/heads/master",
|
||||
"refs/remotes/origin/master",
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(ahead, 0);
|
||||
assert_eq!(behind, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clone_repository_and_origin_rewrite_integration() {
|
||||
let tmp = tempdir().unwrap();
|
||||
let bare_path = tmp.path().join("upstream.git");
|
||||
let _bare = init_bare_repo(&bare_path);
|
||||
|
||||
let work_path = tmp.path().join("workrepo");
|
||||
let repo = init_repo_with_commit(&work_path, "b.txt", "hello2", "master");
|
||||
|
||||
// Add remote upstream and push
|
||||
repo.remote("origin", bare_path.to_str().unwrap()).unwrap();
|
||||
let mut remote = repo.find_remote("origin").unwrap();
|
||||
remote.connect(git2::Direction::Push).unwrap();
|
||||
remote
|
||||
.push(&["refs/heads/master:refs/heads/master"], None)
|
||||
.unwrap();
|
||||
let bare_repo = Repository::open(&bare_path).unwrap();
|
||||
bare_repo.set_head("refs/heads/master").unwrap();
|
||||
|
||||
// Now clone from the local path into a new dir
|
||||
let clone_target = tmp.path().join("clone_target");
|
||||
let _cloned = clone_repository(
|
||||
bare_path.to_str().unwrap(),
|
||||
&clone_target,
|
||||
"master",
|
||||
None,
|
||||
)
|
||||
.expect("clone");
|
||||
|
||||
// After cloning from a local path, simulate rewriting origin to the
|
||||
// upstream network URL
|
||||
set_remote_url(&clone_target, "origin", "https://example.com/upstream.git")
|
||||
.unwrap();
|
||||
let url = get_remote_url(&clone_target, "origin").unwrap();
|
||||
assert_eq!(url, "https://example.com/upstream.git");
|
||||
}
|
||||
}
|
||||
30
crates/pakker-core/src/http.rs
Normal file
30
crates/pakker-core/src/http.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use reqwest::Client;
|
||||
|
||||
/// Create HTTP client with optimized settings for API requests.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the HTTP client cannot be built, which should only happen in
|
||||
/// extreme cases like OOM or broken TLS configuration.
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "HTTP client build failure is unrecoverable - only fails under \
|
||||
extreme system resource exhaustion"
|
||||
)]
|
||||
pub fn create_http_client() -> Client {
|
||||
Client::builder()
|
||||
.pool_max_idle_per_host(10)
|
||||
.pool_idle_timeout(Duration::from_secs(30))
|
||||
.tcp_keepalive(Duration::from_secs(60))
|
||||
.tcp_nodelay(true)
|
||||
.connect_timeout(Duration::from_secs(15))
|
||||
.timeout(Duration::from_secs(30))
|
||||
.user_agent("Pakker/0.1.0")
|
||||
.build()
|
||||
.expect(
|
||||
"Failed to build HTTP client - this should never happen unless system \
|
||||
resources are exhausted",
|
||||
)
|
||||
}
|
||||
1343
crates/pakker-core/src/ipc.rs
Normal file
1343
crates/pakker-core/src/ipc.rs
Normal file
File diff suppressed because it is too large
Load diff
21
crates/pakker-core/src/lib.rs
Normal file
21
crates/pakker-core/src/lib.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
#![expect(
|
||||
clippy::multiple_crate_versions,
|
||||
reason = "transitive dependency version conflicts from upstream crates"
|
||||
)]
|
||||
#![expect(
|
||||
clippy::cargo_common_metadata,
|
||||
reason = "license and repository not yet configured"
|
||||
)]
|
||||
|
||||
pub mod error;
|
||||
pub mod export;
|
||||
pub mod fetch;
|
||||
pub mod git;
|
||||
pub mod http;
|
||||
pub mod ipc;
|
||||
pub mod model;
|
||||
pub mod platform;
|
||||
pub mod rate_limiter;
|
||||
pub mod resolver;
|
||||
pub mod ui_utils;
|
||||
pub mod utils;
|
||||
23
crates/pakker-core/src/model.rs
Normal file
23
crates/pakker-core/src/model.rs
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
pub mod config;
|
||||
pub mod credentials;
|
||||
pub mod enums;
|
||||
pub mod fork;
|
||||
pub mod lockfile;
|
||||
pub mod r#override;
|
||||
pub mod project;
|
||||
|
||||
pub use config::Config;
|
||||
pub use credentials::{
|
||||
PakkerCredentialsFile,
|
||||
ResolvedCredentials,
|
||||
set_keyring_secret,
|
||||
};
|
||||
pub use enums::{
|
||||
ProjectSide,
|
||||
ProjectType,
|
||||
ReleaseType,
|
||||
Target,
|
||||
UpdateStrategy,
|
||||
};
|
||||
pub use lockfile::LockFile;
|
||||
pub use project::{Project, ProjectFile};
|
||||
244
crates/pakker-core/src/model/config.rs
Normal file
244
crates/pakker-core/src/model/config.rs
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::enums::{ProjectSide, ProjectType, UpdateStrategy};
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
const CONFIG_NAME: &str = "pakker.json";
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum ConfigWrapper {
|
||||
Pakker(Config),
|
||||
Pakku { pakku: PakkerWrappedConfig },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PakkerWrappedConfig {
|
||||
pub parent: Option<ParentConfig>,
|
||||
#[serde(default)]
|
||||
pub parent_lock_hash: String,
|
||||
#[serde(default)]
|
||||
pub patches: Vec<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
pub projects: HashMap<String, ProjectConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ParentConfig {
|
||||
pub id: String,
|
||||
pub r#ref: String,
|
||||
pub ref_type: String,
|
||||
pub remote_name: String,
|
||||
#[serde(rename = "type")]
|
||||
pub type_: String,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Config {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub author: Option<String>,
|
||||
#[serde(default)]
|
||||
pub overrides: Vec<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub server_overrides: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub client_overrides: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub paths: HashMap<String, String>,
|
||||
#[serde(default)]
|
||||
pub projects: Option<HashMap<String, ProjectConfig>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub export_profiles: Option<HashMap<String, crate::export::ProfileConfig>>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename = "exportServerSideProjectsToClient"
|
||||
)]
|
||||
pub export_server_side_projects_to_client: Option<bool>,
|
||||
/// Number of files to select per project (defaults to 1)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub file_count_preference: Option<usize>,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: String::new(),
|
||||
version: String::new(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: Some(HashMap::new()),
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProjectConfig {
|
||||
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
|
||||
pub r#type: Option<ProjectType>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub side: Option<ProjectSide>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub update_strategy: Option<UpdateStrategy>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub redistributable: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub subpath: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub aliases: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub export: Option<bool>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let path = path.as_ref().join(CONFIG_NAME);
|
||||
let content =
|
||||
std::fs::read_to_string(&path).map_err(PakkerError::IoError)?;
|
||||
|
||||
match serde_json::from_str::<ConfigWrapper>(&content) {
|
||||
Ok(ConfigWrapper::Pakker(config)) => {
|
||||
config.validate()?;
|
||||
Ok(config)
|
||||
},
|
||||
Ok(ConfigWrapper::Pakku { pakku }) => {
|
||||
let name = pakku.parent.as_ref().map_or_else(
|
||||
|| "unknown".to_string(),
|
||||
|p| {
|
||||
p.id
|
||||
.split('/')
|
||||
.next_back()
|
||||
.unwrap_or(&p.id)
|
||||
.trim_end_matches(".git")
|
||||
.to_string()
|
||||
},
|
||||
);
|
||||
|
||||
let version = pakku
|
||||
.parent
|
||||
.as_ref()
|
||||
.map_or_else(|| "unknown".to_string(), |p| p.version.clone());
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
version,
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: Some(pakku.projects),
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
})
|
||||
},
|
||||
Err(e) => Err(PakkerError::InvalidConfigFile(e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
self.validate()?;
|
||||
let path = path.as_ref().join(CONFIG_NAME);
|
||||
let temp_path = path.with_extension("tmp");
|
||||
let content = serde_json::to_string_pretty(self)
|
||||
.map_err(PakkerError::SerializationError)?;
|
||||
std::fs::write(&temp_path, content)?;
|
||||
std::fs::rename(temp_path, path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<()> {
|
||||
if self.name.is_empty() {
|
||||
return Err(PakkerError::InvalidConfigFile(
|
||||
"Config name cannot be empty".to_string(),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_project_config(&self, project_id: &str) -> Option<&ProjectConfig> {
|
||||
self.projects.as_ref()?.get(project_id)
|
||||
}
|
||||
|
||||
pub fn set_project_config(
|
||||
&mut self,
|
||||
project_id: String,
|
||||
project_config: ProjectConfig,
|
||||
) {
|
||||
let projects = self.projects.get_or_insert_with(HashMap::new);
|
||||
projects.insert(project_id, project_config);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_config_new() {
|
||||
let config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
assert_eq!(config.name, "test-pack");
|
||||
assert_eq!(config.version, "1.0.0");
|
||||
assert_eq!(config.overrides, vec!["overrides"]);
|
||||
assert!(config.projects.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_serialization() {
|
||||
let mut config = Config {
|
||||
name: "test-pack".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
file_count_preference: None,
|
||||
};
|
||||
config.description = Some("A test modpack".to_string());
|
||||
config.author = Some("Test Author".to_string());
|
||||
|
||||
let json = serde_json::to_string(&config).unwrap();
|
||||
let deserialized: Config = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.name, "test-pack");
|
||||
assert_eq!(deserialized.version, "1.0.0");
|
||||
assert_eq!(deserialized.description, Some("A test modpack".to_string()));
|
||||
assert_eq!(deserialized.author, Some("Test Author".to_string()));
|
||||
}
|
||||
}
|
||||
347
crates/pakker-core/src/model/credentials.rs
Normal file
347
crates/pakker-core/src/model/credentials.rs
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct PakkerCredentialsFile {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub curseforge_api_key: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub modrinth_token: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub github_access_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct PakkerCompatCredentialsFile {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub curseforge_api_key: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub github_access_token: Option<String>,
|
||||
}
|
||||
|
||||
fn home_dir() -> Result<PathBuf> {
|
||||
let home = std::env::var("HOME")
|
||||
.or_else(|_| std::env::var("USERPROFILE"))
|
||||
.map_err(|_| {
|
||||
PakkerError::InternalError(
|
||||
"Could not determine home directory".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(PathBuf::from(home))
|
||||
}
|
||||
|
||||
impl PakkerCredentialsFile {
|
||||
/// Pakker-owned credentials path: ~/.config/pakker/credentials.json
|
||||
pub fn get_path() -> Result<PathBuf> {
|
||||
Ok(
|
||||
home_dir()?
|
||||
.join(".config")
|
||||
.join("pakker")
|
||||
.join("credentials.json"),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self> {
|
||||
let path = Self::get_path()?;
|
||||
if !path.exists() {
|
||||
return Ok(Self::default());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&path).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to read Pakker credentials file: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
serde_json::from_str(&content).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to parse Pakker credentials file: {e}"
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<()> {
|
||||
let path = Self::get_path()?;
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to create pakker config directory: {e}"
|
||||
))
|
||||
})?;
|
||||
}
|
||||
|
||||
let content = serde_json::to_string_pretty(self).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to serialize credentials: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let temp_path = path.with_extension("tmp");
|
||||
fs::write(&temp_path, content).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to write credentials file: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
fs::rename(&temp_path, &path).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to save credentials file: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete() -> Result<()> {
|
||||
let path = Self::get_path()?;
|
||||
if path.exists() {
|
||||
fs::remove_file(&path).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to delete Pakker credentials file: {e}"
|
||||
))
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl PakkerCompatCredentialsFile {
|
||||
/// Pakku credentials path: ~/.pakku/credentials
|
||||
/// Read-only: Pakker must never delete or modify this file.
|
||||
pub fn get_path() -> Result<PathBuf> {
|
||||
Ok(home_dir()?.join(".pakku").join("credentials"))
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self> {
|
||||
let path = Self::get_path()?;
|
||||
if !path.exists() {
|
||||
return Ok(Self::default());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&path).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to read Pakku credentials file: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// Try JSON first (for compatibility with future format changes)
|
||||
if let Ok(parsed) = serde_json::from_str::<Self>(&content) {
|
||||
return Ok(parsed);
|
||||
}
|
||||
|
||||
// Fall back to .env-style key=value parsing (format written by pakker init)
|
||||
let mut curseforge_api_key = None;
|
||||
let mut github_access_token = None;
|
||||
|
||||
for line in content.lines() {
|
||||
let line = line.trim();
|
||||
if line.is_empty() || line.starts_with('#') {
|
||||
continue;
|
||||
}
|
||||
if let Some((key, value)) = line.split_once('=') {
|
||||
let key = key.trim();
|
||||
let value = value.trim().to_string();
|
||||
match key {
|
||||
"CURSEFORGE_API_KEY" => curseforge_api_key = Some(value),
|
||||
"GITHUB_TOKEN" => github_access_token = Some(value),
|
||||
_ => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
curseforge_api_key,
|
||||
github_access_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum CredentialsSource {
|
||||
Env,
|
||||
Keyring,
|
||||
PakkerFile,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ResolvedCredentials {
|
||||
curseforge_api_key: Option<(String, CredentialsSource)>,
|
||||
modrinth_token: Option<(String, CredentialsSource)>,
|
||||
github_access_token: Option<(String, CredentialsSource)>,
|
||||
}
|
||||
|
||||
impl ResolvedCredentials {
|
||||
pub fn load() -> Self {
|
||||
let pakker_file = PakkerCredentialsFile::load().ok();
|
||||
let pakku_file = PakkerCompatCredentialsFile::load().ok();
|
||||
|
||||
Self {
|
||||
curseforge_api_key: resolve_secret_with_fallback(
|
||||
"PAKKER_CURSEFORGE_API_KEY",
|
||||
"CURSEFORGE_API_KEY",
|
||||
"curseforge_api_key",
|
||||
pakker_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.curseforge_api_key.clone()),
|
||||
pakku_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.curseforge_api_key.clone()),
|
||||
),
|
||||
modrinth_token: resolve_secret(
|
||||
"PAKKER_MODRINTH_TOKEN",
|
||||
"modrinth_token",
|
||||
pakker_file.as_ref().and_then(|f| f.modrinth_token.clone()),
|
||||
None,
|
||||
),
|
||||
github_access_token: resolve_secret_with_fallback(
|
||||
"PAKKER_GITHUB_TOKEN",
|
||||
"GITHUB_TOKEN",
|
||||
"github_access_token",
|
||||
pakker_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.github_access_token.clone()),
|
||||
pakku_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.github_access_token.clone()),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn curseforge_api_key(&self) -> Option<&str> {
|
||||
self.curseforge_api_key.as_ref().map(|(v, _)| v.as_str())
|
||||
}
|
||||
|
||||
pub fn modrinth_token(&self) -> Option<&str> {
|
||||
self.modrinth_token.as_ref().map(|(v, _)| v.as_str())
|
||||
}
|
||||
|
||||
pub fn github_access_token(&self) -> Option<&str> {
|
||||
self.github_access_token.as_ref().map(|(v, _)| v.as_str())
|
||||
}
|
||||
|
||||
pub fn curseforge_source(&self) -> Option<CredentialsSource> {
|
||||
self.curseforge_api_key.as_ref().map(|(_, s)| *s)
|
||||
}
|
||||
|
||||
pub fn modrinth_source(&self) -> Option<CredentialsSource> {
|
||||
self.modrinth_token.as_ref().map(|(_, s)| *s)
|
||||
}
|
||||
|
||||
pub fn github_source(&self) -> Option<CredentialsSource> {
|
||||
self.github_access_token.as_ref().map(|(_, s)| *s)
|
||||
}
|
||||
|
||||
pub fn delete_keyring() -> Result<()> {
|
||||
delete_keyring_secret("curseforge_api_key")?;
|
||||
delete_keyring_secret("modrinth_token")?;
|
||||
delete_keyring_secret("github_access_token")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_secret(
|
||||
env_key: &str,
|
||||
keyring_entry: &str,
|
||||
pakker_file_value: Option<String>,
|
||||
pakku_file_value: Option<String>,
|
||||
) -> Option<(String, CredentialsSource)> {
|
||||
if let Ok(v) = std::env::var(env_key)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Some((v.trim().to_string(), CredentialsSource::Env));
|
||||
}
|
||||
|
||||
if let Ok(v) = get_keyring_secret(keyring_entry)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Some((v.trim().to_string(), CredentialsSource::Keyring));
|
||||
}
|
||||
|
||||
if let Some(v) = pakker_file_value.filter(|v| !v.trim().is_empty()) {
|
||||
return Some((v, CredentialsSource::PakkerFile));
|
||||
}
|
||||
|
||||
pakku_file_value
|
||||
.filter(|v| !v.trim().is_empty())
|
||||
.map(|v| (v, CredentialsSource::PakkerFile))
|
||||
}
|
||||
|
||||
fn resolve_secret_with_fallback(
|
||||
env_key: &str,
|
||||
fallback_env_key: &str,
|
||||
keyring_entry: &str,
|
||||
pakker_file_value: Option<String>,
|
||||
pakku_file_value: Option<String>,
|
||||
) -> Option<(String, CredentialsSource)> {
|
||||
if let Ok(v) = std::env::var(env_key)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Some((v.trim().to_string(), CredentialsSource::Env));
|
||||
}
|
||||
|
||||
if let Ok(v) = std::env::var(fallback_env_key)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Some((v.trim().to_string(), CredentialsSource::Env));
|
||||
}
|
||||
|
||||
if let Ok(v) = get_keyring_secret(keyring_entry)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Some((v.trim().to_string(), CredentialsSource::Keyring));
|
||||
}
|
||||
|
||||
if let Some(v) = pakker_file_value.filter(|v| !v.trim().is_empty()) {
|
||||
return Some((v, CredentialsSource::PakkerFile));
|
||||
}
|
||||
|
||||
pakku_file_value
|
||||
.filter(|v| !v.trim().is_empty())
|
||||
.map(|v| (v, CredentialsSource::PakkerFile))
|
||||
}
|
||||
|
||||
fn get_keyring_secret(
|
||||
entry: &str,
|
||||
) -> std::result::Result<String, keyring_core::Error> {
|
||||
let e = keyring_core::Entry::new("pakker", entry)?;
|
||||
e.get_password()
|
||||
}
|
||||
|
||||
pub fn set_keyring_secret(entry: &str, value: &str) -> Result<()> {
|
||||
let e = keyring_core::Entry::new("pakker", entry).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to access keyring entry {entry}: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
e.set_password(value).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to store secret in keyring entry {entry}: {e}"
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
fn delete_keyring_secret(entry: &str) -> Result<()> {
|
||||
let e = keyring_core::Entry::new("pakker", entry).map_err(|e| {
|
||||
PakkerError::InternalError(format!(
|
||||
"Failed to access keyring entry {entry}: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
match e.delete_credential() {
|
||||
Ok(()) | Err(keyring_core::Error::NoEntry) => Ok(()),
|
||||
Err(e) => {
|
||||
Err(PakkerError::InternalError(format!(
|
||||
"Failed to delete keyring entry {entry}: {e}"
|
||||
)))
|
||||
},
|
||||
}
|
||||
}
|
||||
164
crates/pakker-core/src/model/enums.rs
Normal file
164
crates/pakker-core/src/model/enums.rs
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
use std::{fmt, str::FromStr};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "UPPERCASE")]
|
||||
pub enum ProjectType {
|
||||
#[serde(rename = "MOD")]
|
||||
Mod,
|
||||
#[serde(rename = "RESOURCE_PACK")]
|
||||
ResourcePack,
|
||||
#[serde(rename = "DATA_PACK")]
|
||||
DataPack,
|
||||
#[serde(rename = "SHADER")]
|
||||
Shader,
|
||||
#[serde(rename = "WORLD")]
|
||||
World,
|
||||
}
|
||||
|
||||
impl fmt::Display for ProjectType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Mod => write!(f, "mod"),
|
||||
Self::ResourcePack => write!(f, "resource-pack"),
|
||||
Self::DataPack => write!(f, "data-pack"),
|
||||
Self::Shader => write!(f, "shader"),
|
||||
Self::World => write!(f, "world"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ProjectType {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"mod" => Ok(Self::Mod),
|
||||
"resource-pack" | "resourcepack" => Ok(Self::ResourcePack),
|
||||
"data-pack" | "datapack" => Ok(Self::DataPack),
|
||||
"shader" => Ok(Self::Shader),
|
||||
"world" => Ok(Self::World),
|
||||
_ => Err(format!("Invalid project type: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum ProjectSide {
|
||||
#[serde(rename = "CLIENT")]
|
||||
Client,
|
||||
#[serde(rename = "SERVER")]
|
||||
Server,
|
||||
#[serde(rename = "BOTH")]
|
||||
Both,
|
||||
}
|
||||
|
||||
impl FromStr for ProjectSide {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"client" => Ok(Self::Client),
|
||||
"server" => Ok(Self::Server),
|
||||
"both" => Ok(Self::Both),
|
||||
_ => Err(format!("Invalid project side: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ProjectSide {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Client => write!(f, "CLIENT"),
|
||||
Self::Server => write!(f, "SERVER"),
|
||||
Self::Both => write!(f, "BOTH"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum UpdateStrategy {
|
||||
#[serde(rename = "LATEST")]
|
||||
Latest,
|
||||
#[serde(rename = "FLEXVER")]
|
||||
FlexVer,
|
||||
#[serde(rename = "NONE")]
|
||||
None,
|
||||
}
|
||||
|
||||
impl FromStr for UpdateStrategy {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_uppercase().as_str() {
|
||||
"LATEST" => Ok(Self::Latest),
|
||||
"FLEXVER" => Ok(Self::FlexVer),
|
||||
"NONE" => Ok(Self::None),
|
||||
_ => Err(format!("Invalid update strategy: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UpdateStrategy {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Latest => write!(f, "LATEST"),
|
||||
Self::FlexVer => write!(f, "FLEXVER"),
|
||||
Self::None => write!(f, "NONE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize,
|
||||
)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ReleaseType {
|
||||
// Order matters: Release < Beta < Alpha for sorting (we want Release first)
|
||||
// But we want reverse order, so we'll use reverse() or handle in comparison
|
||||
Release,
|
||||
Beta,
|
||||
Alpha,
|
||||
}
|
||||
|
||||
impl FromStr for ReleaseType {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"release" => Ok(Self::Release),
|
||||
"beta" => Ok(Self::Beta),
|
||||
"alpha" => Ok(Self::Alpha),
|
||||
_ => Err(format!("Invalid release type: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ReleaseType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Release => write!(f, "release"),
|
||||
Self::Beta => write!(f, "beta"),
|
||||
Self::Alpha => write!(f, "alpha"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Target {
|
||||
CurseForge,
|
||||
Modrinth,
|
||||
Multiplatform,
|
||||
}
|
||||
|
||||
impl std::str::FromStr for Target {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"curseforge" => Ok(Self::CurseForge),
|
||||
"modrinth" => Ok(Self::Modrinth),
|
||||
"multiplatform" => Ok(Self::Multiplatform),
|
||||
_ => Err(format!("Invalid target: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
489
crates/pakker-core/src/model/fork.rs
Normal file
489
crates/pakker-core/src/model/fork.rs
Normal file
|
|
@ -0,0 +1,489 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Sha256, digest::Digest};
|
||||
|
||||
use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy};
|
||||
|
||||
/// Fork integrity verification data
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkIntegrity {
|
||||
/// SHA256 hash of the parent lockfile content
|
||||
pub lockfile_hash: String,
|
||||
/// Git commit SHA of the parent
|
||||
pub commit_sha: String,
|
||||
/// Hash of the parent config (pakku.json)
|
||||
pub config_hash: String,
|
||||
/// Timestamp of verification
|
||||
pub verified_at: u64,
|
||||
}
|
||||
|
||||
impl ForkIntegrity {
|
||||
pub fn new(
|
||||
lockfile_hash: String,
|
||||
commit_sha: String,
|
||||
config_hash: String,
|
||||
) -> Self {
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
let verified_at = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs();
|
||||
|
||||
Self {
|
||||
lockfile_hash,
|
||||
commit_sha,
|
||||
config_hash,
|
||||
verified_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute SHA256 hash of content
|
||||
pub fn hash_content(content: &str) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(content.as_bytes());
|
||||
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice())
|
||||
}
|
||||
|
||||
/// Reference type for Git operations
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
pub enum RefType {
|
||||
Branch,
|
||||
Tag,
|
||||
Commit,
|
||||
}
|
||||
|
||||
impl std::str::FromStr for RefType {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"branch" => Ok(Self::Branch),
|
||||
"tag" => Ok(Self::Tag),
|
||||
"commit" => Ok(Self::Commit),
|
||||
_ => Err(format!("Invalid ref type: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RefType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Branch => write!(f, "branch"),
|
||||
Self::Tag => write!(f, "tag"),
|
||||
Self::Commit => write!(f, "commit"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parent configuration for fork management
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ParentConfig {
|
||||
#[serde(rename = "type")]
|
||||
pub type_: String, // Always "git" for now
|
||||
pub id: String, // Git URL
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub version: Option<String>, // Current commit SHA
|
||||
#[serde(rename = "ref")]
|
||||
pub ref_: String, // Branch/tag/commit name
|
||||
pub ref_type: RefType,
|
||||
#[serde(default = "default_remote_name")]
|
||||
pub remote_name: String,
|
||||
}
|
||||
|
||||
fn default_remote_name() -> String {
|
||||
"origin".to_string()
|
||||
}
|
||||
|
||||
/// Local project configuration for overrides
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalProjectConfig {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub version: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub r#type: Option<ProjectType>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub side: Option<ProjectSide>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub update_strategy: Option<UpdateStrategy>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub redistributable: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub subpath: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub aliases: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub export: Option<bool>,
|
||||
}
|
||||
|
||||
/// Local configuration stored in pakku.json under "pakku" section
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalConfig {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parent: Option<ParentConfig>,
|
||||
#[serde(default)]
|
||||
pub projects: HashMap<String, LocalProjectConfig>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parent_lock_hash: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parent_config_hash: Option<String>,
|
||||
#[serde(default)]
|
||||
pub patches: Vec<String>,
|
||||
/// Slugs of parent projects to exclude from the merged export
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub excludes: Vec<String>,
|
||||
}
|
||||
|
||||
impl LocalConfig {
|
||||
pub const fn has_parent(&self) -> bool {
|
||||
self.parent.is_some()
|
||||
}
|
||||
|
||||
/// Load `LocalConfig` from pakker.json's "pakku" section (with fallback to
|
||||
/// pakku.json)
|
||||
pub fn load(
|
||||
dir: &std::path::Path,
|
||||
) -> Result<Self, crate::error::PakkerError> {
|
||||
use std::fs;
|
||||
|
||||
use crate::error::PakkerError;
|
||||
|
||||
// Try pakker.json first, then fall back to pakku.json
|
||||
let config_path = if dir.join("pakker.json").exists() {
|
||||
dir.join("pakker.json")
|
||||
} else {
|
||||
dir.join("pakku.json")
|
||||
};
|
||||
|
||||
if !config_path.exists() {
|
||||
return Ok(Self::default());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&config_path).map_err(|e| {
|
||||
PakkerError::InvalidConfigFile(format!(
|
||||
"Failed to read {}: {}",
|
||||
config_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
let json_value: serde_json::Value = serde_json::from_str(&content)
|
||||
.map_err(|e| {
|
||||
PakkerError::InvalidConfigFile(format!(
|
||||
"Failed to parse pakku.json: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// Extract "pakku" section if it exists
|
||||
if let Some(pakku_section) = json_value.get("pakku") {
|
||||
let local_config: Self = serde_json::from_value(pakku_section.clone())
|
||||
.map_err(|e| {
|
||||
PakkerError::InvalidConfigFile(format!(
|
||||
"Failed to parse pakku section: {e}"
|
||||
))
|
||||
})?;
|
||||
Ok(local_config)
|
||||
} else {
|
||||
Ok(Self::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Save `LocalConfig` to pakku.json's "pakku" section
|
||||
pub fn save(
|
||||
&self,
|
||||
dir: &std::path::Path,
|
||||
) -> Result<(), crate::error::PakkerError> {
|
||||
use std::fs;
|
||||
|
||||
use crate::error::PakkerError;
|
||||
|
||||
let config_path = dir.join("pakker.json");
|
||||
|
||||
// Read existing pakku.json
|
||||
let mut json_value: serde_json::Value = if config_path.exists() {
|
||||
let content = fs::read_to_string(&config_path).map_err(|e| {
|
||||
PakkerError::InvalidConfigFile(format!(
|
||||
"Failed to read {}: {}",
|
||||
config_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
serde_json::from_str(&content).map_err(|e| {
|
||||
PakkerError::InvalidConfigFile(format!(
|
||||
"Failed to parse pakku.json: {e}"
|
||||
))
|
||||
})?
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Update or create "pakku" section
|
||||
let local_config_json =
|
||||
serde_json::to_value(self).map_err(PakkerError::SerializationError)?;
|
||||
|
||||
json_value["pakku"] = local_config_json;
|
||||
|
||||
// Write back to file
|
||||
let content = serde_json::to_string_pretty(&json_value)
|
||||
.map_err(PakkerError::SerializationError)?;
|
||||
|
||||
fs::write(&config_path, content).map_err(|e| {
|
||||
PakkerError::IoError(std::io::Error::other(format!(
|
||||
"Failed to write {}: {}",
|
||||
config_path.display(),
|
||||
e
|
||||
)))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_ref_type_serde_serialization() {
|
||||
// Test that RefType serializes to uppercase screaming snake case
|
||||
let branch = RefType::Branch;
|
||||
let tag = RefType::Tag;
|
||||
let commit = RefType::Commit;
|
||||
|
||||
assert_eq!(serde_json::to_string(&branch).unwrap(), "\"BRANCH\"");
|
||||
assert_eq!(serde_json::to_string(&tag).unwrap(), "\"TAG\"");
|
||||
assert_eq!(serde_json::to_string(&commit).unwrap(), "\"COMMIT\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ref_type_clap_value_enum() {
|
||||
// Test that clap ValueEnum derives work correctly
|
||||
let values: Vec<RefType> =
|
||||
vec![RefType::Branch, RefType::Tag, RefType::Commit];
|
||||
assert_eq!(values.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parent_config_new() {
|
||||
let config = ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: "https://github.com/example/repo".to_string(),
|
||||
version: None,
|
||||
ref_: "main".to_string(),
|
||||
ref_type: RefType::Branch,
|
||||
remote_name: "upstream".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(config.type_, "git");
|
||||
assert_eq!(config.id, "https://github.com/example/repo");
|
||||
assert_eq!(config.version, None);
|
||||
assert_eq!(config.ref_, "main");
|
||||
assert_eq!(config.ref_type, RefType::Branch);
|
||||
assert_eq!(config.remote_name, "upstream");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parent_config_default_remote() {
|
||||
let config = ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: "https://github.com/example/repo".to_string(),
|
||||
version: None,
|
||||
ref_: "main".to_string(),
|
||||
ref_type: RefType::Branch,
|
||||
remote_name: "origin".to_string(),
|
||||
};
|
||||
assert_eq!(config.remote_name, "origin");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parent_config_serde_roundtrip() {
|
||||
let mut original = ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: "https://github.com/example/repo.git".to_string(),
|
||||
version: None,
|
||||
ref_: "v1.0.0".to_string(),
|
||||
ref_type: RefType::Tag,
|
||||
remote_name: "origin".to_string(),
|
||||
};
|
||||
original.version = Some("abc123def456".to_string());
|
||||
|
||||
let json = serde_json::to_string(&original).unwrap();
|
||||
let deserialized: ParentConfig = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(deserialized.type_, original.type_);
|
||||
assert_eq!(deserialized.id, original.id);
|
||||
assert_eq!(deserialized.version, original.version);
|
||||
assert_eq!(deserialized.ref_, original.ref_);
|
||||
assert_eq!(deserialized.ref_type, original.ref_type);
|
||||
assert_eq!(deserialized.remote_name, original.remote_name);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_project_config_default() {
|
||||
let config = LocalProjectConfig {
|
||||
version: None,
|
||||
r#type: None,
|
||||
side: None,
|
||||
update_strategy: None,
|
||||
redistributable: None,
|
||||
subpath: None,
|
||||
aliases: None,
|
||||
export: None,
|
||||
};
|
||||
assert_eq!(config.version, None);
|
||||
assert_eq!(config.side, None);
|
||||
assert_eq!(config.update_strategy, None);
|
||||
assert_eq!(config.redistributable, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_default() {
|
||||
let config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
assert!(config.parent.is_none());
|
||||
assert!(config.projects.is_empty());
|
||||
assert!(config.patches.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_has_parent_false() {
|
||||
let config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
assert!(!config.has_parent());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_has_parent_true() {
|
||||
let mut config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
config.parent = Some(ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: "https://github.com/example/repo".to_string(),
|
||||
version: None,
|
||||
ref_: "main".to_string(),
|
||||
ref_type: RefType::Branch,
|
||||
remote_name: "origin".to_string(),
|
||||
});
|
||||
assert!(config.has_parent());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_projects_insertion() {
|
||||
let mut config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
config
|
||||
.projects
|
||||
.insert("sodium".to_string(), LocalProjectConfig {
|
||||
version: Some("0.5.0".to_string()),
|
||||
r#type: None,
|
||||
side: Some(ProjectSide::Both),
|
||||
update_strategy: Some(UpdateStrategy::Latest),
|
||||
redistributable: Some(true),
|
||||
subpath: None,
|
||||
aliases: None,
|
||||
export: None,
|
||||
});
|
||||
|
||||
assert_eq!(config.projects.len(), 1);
|
||||
let project = config.projects.get("sodium").unwrap();
|
||||
assert_eq!(project.version, Some("0.5.0".to_string()));
|
||||
assert_eq!(project.side, Some(ProjectSide::Both));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_patches() {
|
||||
let mut config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
config.patches.push("custom.patch".to_string());
|
||||
config.patches.push("bugfix.patch".to_string());
|
||||
|
||||
assert_eq!(config.patches.len(), 2);
|
||||
assert_eq!(config.patches[0], "custom.patch");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_config_serde_roundtrip() {
|
||||
let mut config = LocalConfig {
|
||||
parent: None,
|
||||
projects: HashMap::new(),
|
||||
parent_lock_hash: None,
|
||||
parent_config_hash: None,
|
||||
patches: vec![],
|
||||
excludes: vec![],
|
||||
};
|
||||
config.parent = Some(ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: "https://github.com/example/repo.git".to_string(),
|
||||
version: None,
|
||||
ref_: "develop".to_string(),
|
||||
ref_type: RefType::Branch,
|
||||
remote_name: "origin".to_string(),
|
||||
});
|
||||
config.parent.as_mut().unwrap().version = Some("def456".to_string());
|
||||
config
|
||||
.projects
|
||||
.insert("test-mod".to_string(), LocalProjectConfig {
|
||||
version: Some("1.0.0".to_string()),
|
||||
r#type: None,
|
||||
side: Some(ProjectSide::Client),
|
||||
update_strategy: None,
|
||||
redistributable: Some(false),
|
||||
subpath: None,
|
||||
aliases: None,
|
||||
export: None,
|
||||
});
|
||||
config.patches.push("test.patch".to_string());
|
||||
config.parent_lock_hash = Some("hash123".to_string());
|
||||
|
||||
let json = serde_json::to_string(&config).unwrap();
|
||||
let deserialized: LocalConfig = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert!(deserialized.parent.is_some());
|
||||
let parent = deserialized.parent.unwrap();
|
||||
assert_eq!(parent.id, "https://github.com/example/repo.git");
|
||||
assert_eq!(parent.ref_, "develop");
|
||||
assert_eq!(parent.ref_type, RefType::Branch);
|
||||
assert_eq!(parent.version, Some("def456".to_string()));
|
||||
|
||||
assert_eq!(deserialized.projects.len(), 1);
|
||||
assert!(deserialized.projects.contains_key("test-mod"));
|
||||
assert_eq!(deserialized.patches.len(), 1);
|
||||
assert_eq!(deserialized.parent_lock_hash, Some("hash123".to_string()));
|
||||
}
|
||||
}
|
||||
760
crates/pakker-core/src/model/lockfile.rs
Normal file
760
crates/pakker-core/src/model/lockfile.rs
Normal file
|
|
@ -0,0 +1,760 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{enums::Target, project::Project};
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::*;
|
||||
use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy};
|
||||
|
||||
fn create_test_project(pakku_id: &str, slug: &str) -> Project {
|
||||
use std::collections::HashSet;
|
||||
let mut name_map = HashMap::new();
|
||||
name_map.insert("modrinth".to_string(), slug.to_string());
|
||||
|
||||
let mut id_map = HashMap::new();
|
||||
id_map.insert("modrinth".to_string(), pakku_id.to_string());
|
||||
|
||||
let slug_map = name_map.clone();
|
||||
|
||||
Project {
|
||||
pakku_id: Some(pakku_id.to_string()),
|
||||
pakku_links: HashSet::new(),
|
||||
r#type: ProjectType::Mod,
|
||||
side: ProjectSide::Both,
|
||||
slug: slug_map,
|
||||
name: name_map,
|
||||
id: id_map,
|
||||
update_strategy: UpdateStrategy::Latest,
|
||||
redistributable: true,
|
||||
subpath: None,
|
||||
aliases: HashSet::new(),
|
||||
export: true,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_new() {
|
||||
let target = Target::Modrinth;
|
||||
let mc_versions = vec!["1.20.1".to_string()];
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(target),
|
||||
mc_versions: mc_versions.clone(),
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
assert_eq!(lockfile.target, Some(target));
|
||||
assert_eq!(lockfile.mc_versions, mc_versions);
|
||||
assert_eq!(lockfile.loaders, loaders);
|
||||
assert_eq!(lockfile.projects.len(), 0);
|
||||
assert_eq!(lockfile.lockfile_version, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_serialization() {
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("test-id", "test-slug"));
|
||||
|
||||
let found = lockfile.get_project("test-id");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
|
||||
let not_found = lockfile.get_project("nonexistent");
|
||||
assert!(not_found.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_find_project_by_platform_id() {
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("platform-123", "test-slug"));
|
||||
|
||||
let found =
|
||||
lockfile.find_project_by_platform_id("modrinth", "platform-123");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(
|
||||
found.unwrap().id.get("modrinth"),
|
||||
Some(&"platform-123".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_get_loader_names() {
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
loaders.insert("forge".to_string(), "47.1.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
let mut loader_names = lockfile.get_loader_names();
|
||||
loader_names.sort();
|
||||
|
||||
assert_eq!(loader_names, vec!["fabric", "forge"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_save_and_load() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let lockfile_path = temp_dir.path();
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("test-mod", "test-slug"));
|
||||
|
||||
lockfile.save(lockfile_path).unwrap();
|
||||
|
||||
let loaded = LockFile::load(lockfile_path).unwrap();
|
||||
|
||||
assert_eq!(loaded.target, lockfile.target);
|
||||
assert_eq!(loaded.mc_versions, lockfile.mc_versions);
|
||||
assert_eq!(loaded.projects.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_compatibility_with_pakku() {
|
||||
// Test that we can parse a Pakku-generated lockfile
|
||||
let pakku_json = r#"{
|
||||
"target": "modrinth",
|
||||
"mc_versions": ["1.20.1"],
|
||||
"loaders": {"fabric": "0.15.0"},
|
||||
"projects": [
|
||||
{
|
||||
"pakku_id": "fabric-api",
|
||||
"type": "MOD",
|
||||
"side": "BOTH",
|
||||
"slug": {
|
||||
"modrinth": "fabric-api"
|
||||
},
|
||||
"name": {
|
||||
"modrinth": "Fabric API"
|
||||
},
|
||||
"id": {
|
||||
"modrinth": "P7dR8mSH"
|
||||
},
|
||||
"updateStrategy": "LATEST",
|
||||
"redistributable": true,
|
||||
"files": [],
|
||||
"pakku_links": []
|
||||
}
|
||||
],
|
||||
"lockfile_version": 1
|
||||
}"#;
|
||||
|
||||
let lockfile: LockFile = serde_json::from_str(pakku_json).unwrap();
|
||||
assert_eq!(lockfile.target, Some(Target::Modrinth));
|
||||
assert_eq!(lockfile.mc_versions, vec!["1.20.1"]);
|
||||
assert_eq!(lockfile.projects.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_validation_invalid_version() {
|
||||
// Test that lockfile with wrong version fails validation
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
|
||||
let invalid_json = r#"{
|
||||
"target": "modrinth",
|
||||
"mc_versions": ["1.20.1"],
|
||||
"loaders": {"fabric": "0.15.0"},
|
||||
"projects": [],
|
||||
"lockfile_version": 999
|
||||
}"#;
|
||||
|
||||
std::fs::write(&lockfile_path, invalid_json).unwrap();
|
||||
|
||||
let result = LockFile::load(temp_dir.path());
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_validation_duplicate_pakku_ids() {
|
||||
// Test that lockfile with duplicate pakku_ids fails validation
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("duplicate-id", "slug1"));
|
||||
lockfile.add_project(create_test_project("duplicate-id", "slug2"));
|
||||
|
||||
let result = lockfile.validate();
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_atomic_write() {
|
||||
// Test that save uses atomic write (temp file + rename)
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.save(temp_dir.path()).unwrap();
|
||||
|
||||
// Temp file should not exist after save
|
||||
let temp_path = temp_dir.path().join("pakku-lock.tmp");
|
||||
assert!(!temp_path.exists());
|
||||
|
||||
// Actual file should exist
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
assert!(lockfile_path.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_sort_projects() {
|
||||
// Test that projects are sorted alphabetically by name
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("zebra-mod", "zebra"));
|
||||
lockfile.add_project(create_test_project("alpha-mod", "alpha"));
|
||||
lockfile.add_project(create_test_project("middle-mod", "middle"));
|
||||
|
||||
lockfile.sort_projects();
|
||||
|
||||
assert_eq!(lockfile.projects[0].pakku_id, Some("alpha-mod".to_string()));
|
||||
assert_eq!(
|
||||
lockfile.projects[1].pakku_id,
|
||||
Some("middle-mod".to_string())
|
||||
);
|
||||
assert_eq!(lockfile.projects[2].pakku_id, Some("zebra-mod".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_find_project_mut() {
|
||||
// Test mutable project lookup
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("test-id", "test-slug"));
|
||||
|
||||
// Modify through mutable reference
|
||||
if let Some(project) = lockfile.find_project_mut("test-id") {
|
||||
project.redistributable = false;
|
||||
}
|
||||
|
||||
let found = lockfile.get_project("test-id").unwrap();
|
||||
assert!(!found.redistributable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_multiple_loaders() {
|
||||
// Test lockfile with multiple loaders
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
loaders.insert("forge".to_string(), "47.1.0".to_string());
|
||||
loaders.insert("quilt".to_string(), "0.20.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
let loader_names = lockfile.get_loader_names();
|
||||
assert_eq!(loader_names.len(), 3);
|
||||
assert!(loader_names.contains(&"fabric".to_string()));
|
||||
assert!(loader_names.contains(&"forge".to_string()));
|
||||
assert!(loader_names.contains(&"quilt".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_multiple_mc_versions() {
|
||||
// Test lockfile with multiple Minecraft versions
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mc_versions = vec![
|
||||
"1.20.1".to_string(),
|
||||
"1.20.2".to_string(),
|
||||
"1.20.4".to_string(),
|
||||
];
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: mc_versions.clone(),
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
assert_eq!(lockfile.mc_versions, mc_versions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_roundtrip_preserves_data() {
|
||||
// Test that save/load roundtrip preserves all data
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
loaders.insert("forge".to_string(), "47.1.0".to_string());
|
||||
|
||||
let mc_versions = vec!["1.20.1".to_string(), "1.20.4".to_string()];
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: mc_versions.clone(),
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.add_project(create_test_project("mod1", "slug1"));
|
||||
lockfile.add_project(create_test_project("mod2", "slug2"));
|
||||
|
||||
lockfile.save(temp_dir.path()).unwrap();
|
||||
let loaded = LockFile::load(temp_dir.path()).unwrap();
|
||||
|
||||
assert_eq!(loaded.target, Some(Target::Modrinth));
|
||||
assert_eq!(loaded.mc_versions, mc_versions);
|
||||
assert_eq!(loaded.loaders, loaders);
|
||||
assert_eq!(loaded.projects.len(), 2);
|
||||
// Lockfile should be migrated from v1 to v2 on load
|
||||
assert_eq!(loaded.lockfile_version, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_remove_nonexistent_project() {
|
||||
// Test removing a project that doesn't exist
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
let result = lockfile.remove_project("nonexistent-id");
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_empty_projects_list() {
|
||||
// Test lockfile with no projects
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
assert_eq!(lockfile.projects.len(), 0);
|
||||
assert!(lockfile.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_migration_v1_to_v2() {
|
||||
// Test that v1 lockfiles are migrated to v2
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
// Create a v1 lockfile manually
|
||||
let v1_content = r#"{
|
||||
"target": "modrinth",
|
||||
"mc_versions": ["1.20.1"],
|
||||
"loaders": {"fabric": "0.15.0"},
|
||||
"projects": [],
|
||||
"lockfile_version": 1
|
||||
}"#;
|
||||
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
std::fs::write(&lockfile_path, v1_content).unwrap();
|
||||
|
||||
// Load should trigger migration
|
||||
let loaded = LockFile::load(temp_dir.path()).unwrap();
|
||||
assert_eq!(loaded.lockfile_version, 2);
|
||||
|
||||
// Verify the migrated file was saved
|
||||
let reloaded = LockFile::load(temp_dir.path()).unwrap();
|
||||
assert_eq!(reloaded.lockfile_version, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_migration_preserves_projects() {
|
||||
// Test that migration preserves all project data
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Create a v1 lockfile with projects (using correct enum case)
|
||||
let v1_content = r#"{
|
||||
"target": "modrinth",
|
||||
"mc_versions": ["1.20.1"],
|
||||
"loaders": {"fabric": "0.15.0"},
|
||||
"projects": [
|
||||
{
|
||||
"pakku_id": "test-id-1",
|
||||
"type": "MOD",
|
||||
"side": "BOTH",
|
||||
"name": {"modrinth": "Test Mod"},
|
||||
"slug": {"modrinth": "test-mod"},
|
||||
"id": {"modrinth": "abc123"},
|
||||
"files": [],
|
||||
"pakku_links": [],
|
||||
"aliases": [],
|
||||
"update_strategy": "LATEST",
|
||||
"redistributable": true,
|
||||
"export": true
|
||||
}
|
||||
],
|
||||
"lockfile_version": 1
|
||||
}"#;
|
||||
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
std::fs::write(&lockfile_path, v1_content).unwrap();
|
||||
|
||||
let loaded = LockFile::load(temp_dir.path()).unwrap();
|
||||
assert_eq!(loaded.lockfile_version, 2);
|
||||
assert_eq!(loaded.projects.len(), 1);
|
||||
assert_eq!(loaded.projects[0].pakku_id, Some("test-id-1".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_rejects_future_version() {
|
||||
// Test that lockfiles with version > current are rejected
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let future_content = r#"{
|
||||
"target": "modrinth",
|
||||
"mc_versions": ["1.20.1"],
|
||||
"loaders": {"fabric": "0.15.0"},
|
||||
"projects": [],
|
||||
"lockfile_version": 999
|
||||
}"#;
|
||||
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
std::fs::write(&lockfile_path, future_content).unwrap();
|
||||
|
||||
let result = LockFile::load(temp_dir.path());
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err().to_string();
|
||||
assert!(err.contains("newer than supported"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_pretty_json_format() {
|
||||
// Test that saved JSON is pretty-printed
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
lockfile.save(temp_dir.path()).unwrap();
|
||||
|
||||
let content =
|
||||
std::fs::read_to_string(temp_dir.path().join("pakku-lock.json")).unwrap();
|
||||
|
||||
// Pretty-printed JSON should have newlines and indentation
|
||||
assert!(content.contains('\n'));
|
||||
assert!(content.contains(" ")); // Indentation
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_missing_file() {
|
||||
// Test loading from non-existent directory
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let nonexistent = temp_dir.path().join("nonexistent");
|
||||
|
||||
let result = LockFile::load(&nonexistent);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lockfile_corrupted_json() {
|
||||
// Test loading corrupted JSON
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let lockfile_path = temp_dir.path().join("pakku-lock.json");
|
||||
|
||||
std::fs::write(&lockfile_path, "not valid json {[}").unwrap();
|
||||
|
||||
let result = LockFile::load(temp_dir.path());
|
||||
assert!(result.is_err());
|
||||
}
|
||||
}
|
||||
|
||||
/// Current lockfile version - bump this when making breaking changes
|
||||
const LOCKFILE_VERSION: u32 = 2;
|
||||
|
||||
const LOCKFILE_NAME: &str = "pakku-lock.json";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LockFile {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub target: Option<Target>,
|
||||
pub mc_versions: Vec<String>,
|
||||
pub loaders: HashMap<String, String>,
|
||||
pub projects: Vec<Project>,
|
||||
#[serde(default)]
|
||||
pub lockfile_version: u32,
|
||||
}
|
||||
|
||||
impl LockFile {
|
||||
pub fn get_project(&self, pakku_id: &str) -> Option<&Project> {
|
||||
self
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
|
||||
}
|
||||
|
||||
pub fn get_loader_names(&self) -> Vec<String> {
|
||||
self.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
pub fn remove_project(&mut self, pakku_id: &str) -> Option<Project> {
|
||||
if let Some(pos) = self
|
||||
.projects
|
||||
.iter()
|
||||
.position(|p| p.pakku_id.as_deref() == Some(pakku_id))
|
||||
{
|
||||
Some(self.projects.remove(pos))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_project_mut(&mut self, pakku_id: &str) -> Option<&mut Project> {
|
||||
self
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
|
||||
}
|
||||
|
||||
pub fn find_project_by_platform_id(
|
||||
&self,
|
||||
platform: &str,
|
||||
id: &str,
|
||||
) -> Option<&Project> {
|
||||
self
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.id.get(platform).is_some_and(|pid| pid == id))
|
||||
}
|
||||
}
|
||||
|
||||
impl LockFile {
|
||||
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
Self::load_with_validation(path, true)
|
||||
}
|
||||
|
||||
pub fn load_with_validation<P: AsRef<Path>>(
|
||||
path: P,
|
||||
validate: bool,
|
||||
) -> Result<Self> {
|
||||
let path_ref = path.as_ref();
|
||||
let lockfile_path = path_ref.join(LOCKFILE_NAME);
|
||||
let content =
|
||||
std::fs::read_to_string(&lockfile_path).map_err(PakkerError::IoError)?;
|
||||
|
||||
let mut lockfile: Self = serde_json::from_str(&content)
|
||||
.map_err(|e| PakkerError::InvalidLockFile(e.to_string()))?;
|
||||
|
||||
// Check if migration is needed
|
||||
if lockfile.lockfile_version < LOCKFILE_VERSION {
|
||||
lockfile = lockfile.migrate();
|
||||
// Save migrated lockfile
|
||||
lockfile.save_without_validation(path_ref)?;
|
||||
log::info!(
|
||||
"Migrated lockfile from version {} to {}",
|
||||
lockfile.lockfile_version,
|
||||
LOCKFILE_VERSION
|
||||
);
|
||||
}
|
||||
|
||||
if validate {
|
||||
lockfile.validate()?;
|
||||
}
|
||||
lockfile.sort_projects();
|
||||
|
||||
Ok(lockfile)
|
||||
}
|
||||
|
||||
/// Migrate lockfile from older version to current version
|
||||
fn migrate(mut self) -> Self {
|
||||
// Migration from v0 (pakku format, no explicit version) to v1
|
||||
if self.lockfile_version == 0 {
|
||||
log::info!("Migrating lockfile from v0 (pakku format) to v1...");
|
||||
self.lockfile_version = 1;
|
||||
}
|
||||
|
||||
// Migration from v1 to v2
|
||||
if self.lockfile_version == 1 {
|
||||
log::info!("Migrating lockfile from v1 to v2...");
|
||||
|
||||
// v2 changes:
|
||||
// - Projects now have explicit export field (defaults to true)
|
||||
// - Side detection is more granular
|
||||
for project in &mut self.projects {
|
||||
// Ensure export field is set (v1 didn't always have it)
|
||||
// Already has a default in Project, but be explicit
|
||||
if !project.export {
|
||||
project.export = true;
|
||||
}
|
||||
}
|
||||
|
||||
self.lockfile_version = 2;
|
||||
}
|
||||
|
||||
// Future migrations would go here:
|
||||
// if self.lockfile_version == 2 {
|
||||
// // migrate v2 -> v3
|
||||
// self.lockfile_version = 3;
|
||||
// }
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
self.validate()?;
|
||||
let path = path.as_ref().join(LOCKFILE_NAME);
|
||||
let content = serde_json::to_string_pretty(self)
|
||||
.map_err(PakkerError::SerializationError)?;
|
||||
std::fs::write(&path, content).map_err(PakkerError::IoError)
|
||||
}
|
||||
|
||||
pub fn save_without_validation<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
let path = path.as_ref().join(LOCKFILE_NAME);
|
||||
let content = serde_json::to_string_pretty(self)
|
||||
.map_err(PakkerError::SerializationError)?;
|
||||
std::fs::write(&path, content).map_err(PakkerError::IoError)
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<()> {
|
||||
if self.lockfile_version > LOCKFILE_VERSION {
|
||||
return Err(PakkerError::InvalidLockFile(format!(
|
||||
"Lockfile version {} is newer than supported version {}. Please \
|
||||
upgrade Pakker.",
|
||||
self.lockfile_version, LOCKFILE_VERSION
|
||||
)));
|
||||
}
|
||||
if self.mc_versions.is_empty() {
|
||||
return Err(PakkerError::InvalidLockFile(
|
||||
"At least one Minecraft version is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if self.loaders.is_empty() {
|
||||
return Err(PakkerError::InvalidLockFile(
|
||||
"At least one loader is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Check for unique pakku IDs
|
||||
let mut seen_ids = std::collections::HashSet::new();
|
||||
for project in &self.projects {
|
||||
if let Some(ref pakku_id) = project.pakku_id
|
||||
&& !seen_ids.insert(pakku_id)
|
||||
{
|
||||
return Err(PakkerError::InvalidLockFile(format!(
|
||||
"Duplicate pakku ID: {pakku_id}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn sort_projects(&mut self) {
|
||||
self.projects.sort_by(|a, b| {
|
||||
a.get_name()
|
||||
.to_lowercase()
|
||||
.cmp(&b.get_name().to_lowercase())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn add_project(&mut self, project: Project) {
|
||||
self.projects.push(project);
|
||||
self.projects.sort_by_key(super::project::Project::get_name);
|
||||
}
|
||||
}
|
||||
1
crates/pakker-core/src/model/override.rs
Normal file
1
crates/pakker-core/src/model/override.rs
Normal file
|
|
@ -0,0 +1 @@
|
|||
|
||||
959
crates/pakker-core/src/model/project.rs
Normal file
959
crates/pakker-core/src/model/project.rs
Normal file
|
|
@ -0,0 +1,959 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::enums::{ProjectSide, ProjectType, ReleaseType, UpdateStrategy};
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Project {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pakku_id: Option<String>,
|
||||
#[serde(skip_serializing_if = "HashSet::is_empty", default)]
|
||||
pub pakku_links: HashSet<String>,
|
||||
#[serde(rename = "type")]
|
||||
pub r#type: ProjectType,
|
||||
#[serde(default = "default_side")]
|
||||
pub side: ProjectSide,
|
||||
pub slug: HashMap<String, String>,
|
||||
pub name: HashMap<String, String>,
|
||||
pub id: HashMap<String, String>,
|
||||
#[serde(
|
||||
default = "default_update_strategy",
|
||||
skip_serializing_if = "is_default_update_strategy"
|
||||
)]
|
||||
pub update_strategy: UpdateStrategy,
|
||||
#[serde(
|
||||
default = "default_redistributable",
|
||||
skip_serializing_if = "is_default_redistributable"
|
||||
)]
|
||||
pub redistributable: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub subpath: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
|
||||
pub aliases: HashSet<String>,
|
||||
#[serde(
|
||||
default = "default_export",
|
||||
skip_serializing_if = "is_default_export"
|
||||
)]
|
||||
pub export: bool,
|
||||
pub files: Vec<ProjectFile>,
|
||||
}
|
||||
|
||||
const fn default_export() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
const fn default_side() -> ProjectSide {
|
||||
ProjectSide::Both
|
||||
}
|
||||
|
||||
const fn default_update_strategy() -> UpdateStrategy {
|
||||
UpdateStrategy::Latest
|
||||
}
|
||||
|
||||
const fn default_redistributable() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_update_strategy(strategy: &UpdateStrategy) -> bool {
|
||||
matches!(strategy, UpdateStrategy::Latest)
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_redistributable(redistributable: &bool) -> bool {
|
||||
*redistributable
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_export(export: &bool) -> bool {
|
||||
*export
|
||||
}
|
||||
|
||||
impl Project {
|
||||
pub fn new(pakku_id: String, typ: ProjectType, side: ProjectSide) -> Self {
|
||||
Self {
|
||||
pakku_id: Some(pakku_id),
|
||||
pakku_links: HashSet::new(),
|
||||
r#type: typ,
|
||||
side,
|
||||
slug: HashMap::new(),
|
||||
name: HashMap::new(),
|
||||
id: HashMap::new(),
|
||||
update_strategy: UpdateStrategy::Latest,
|
||||
redistributable: true,
|
||||
subpath: None,
|
||||
aliases: HashSet::new(),
|
||||
export: true,
|
||||
files: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_platform_id(&self, platform: &str) -> Option<&String> {
|
||||
self.id.get(platform)
|
||||
}
|
||||
|
||||
pub fn get_name(&self) -> String {
|
||||
self
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.map(std::borrow::ToOwned::to_owned)
|
||||
.or_else(|| self.pakku_id.as_ref().map(std::borrow::ToOwned::to_owned))
|
||||
.unwrap_or_else(|| "unknown".to_string())
|
||||
}
|
||||
|
||||
pub fn matches_input(&self, input: &str) -> bool {
|
||||
// Check pakku_id
|
||||
if let Some(ref pakku_id) = self.pakku_id
|
||||
&& pakku_id == input
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check slugs
|
||||
if self.slug.values().any(|s| s == input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check names (case-insensitive)
|
||||
if self.name.values().any(|n| n.eq_ignore_ascii_case(input)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check IDs
|
||||
if self.id.values().any(|i| i == input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
if self.aliases.contains(input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn add_platform(
|
||||
&mut self,
|
||||
platform: String,
|
||||
id: String,
|
||||
slug: String,
|
||||
name: String,
|
||||
) {
|
||||
self.id.insert(platform.clone(), id);
|
||||
self.slug.insert(platform.clone(), slug);
|
||||
self.name.insert(platform, name);
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: Self) {
|
||||
// Merge platform identifiers
|
||||
for (platform, id) in other.id {
|
||||
self.id.entry(platform).or_insert(id);
|
||||
}
|
||||
for (platform, slug) in other.slug {
|
||||
self.slug.entry(platform).or_insert(slug);
|
||||
}
|
||||
for (platform, name) in other.name {
|
||||
self.name.entry(platform).or_insert(name);
|
||||
}
|
||||
|
||||
// Merge pakku links
|
||||
self.pakku_links.extend(other.pakku_links);
|
||||
|
||||
// Merge files
|
||||
for file in other.files {
|
||||
if !self.files.iter().any(|f| f.id == file.id) {
|
||||
self.files.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge aliases
|
||||
self.aliases.extend(other.aliases);
|
||||
}
|
||||
|
||||
/// Merge this project with another, returning a new combined project.
|
||||
/// Like Pakku's `Project.plus()`, this is a pure operation that doesn't
|
||||
/// modify either project.
|
||||
///
|
||||
/// # Errors
|
||||
/// Returns `PakkerError::InvalidProject` if the projects have different types
|
||||
/// or conflicting `pakku_links`.
|
||||
pub fn merged(&self, other: Self) -> Result<Self> {
|
||||
if self.r#type != other.r#type {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Cannot merge projects of different types: {:?} vs {:?}",
|
||||
self.r#type, other.r#type
|
||||
)));
|
||||
}
|
||||
|
||||
if !other.pakku_links.is_empty() && self.pakku_links != other.pakku_links {
|
||||
return Err(PakkerError::InvalidProject(
|
||||
"Cannot merge projects with conflicting pakku_links".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Prefer non-default side
|
||||
let side = if self.side == ProjectSide::Both {
|
||||
other.side
|
||||
} else {
|
||||
self.side
|
||||
};
|
||||
|
||||
let mut id = self.id.clone();
|
||||
for (platform, other_id) in other.id {
|
||||
id.entry(platform).or_insert(other_id);
|
||||
}
|
||||
|
||||
let mut slug = self.slug.clone();
|
||||
for (platform, other_slug) in other.slug {
|
||||
slug.entry(platform).or_insert(other_slug);
|
||||
}
|
||||
|
||||
let mut name = self.name.clone();
|
||||
for (platform, other_name) in other.name {
|
||||
name.entry(platform).or_insert(other_name);
|
||||
}
|
||||
|
||||
let mut files = self.files.clone();
|
||||
for file in other.files {
|
||||
if !files.iter().any(|f| f.id == file.id) {
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
let mut aliases = self.aliases.clone();
|
||||
aliases.extend(other.aliases);
|
||||
|
||||
Ok(Self {
|
||||
pakku_id: self.pakku_id.clone(),
|
||||
pakku_links: self.pakku_links.clone(),
|
||||
r#type: self.r#type,
|
||||
side,
|
||||
slug,
|
||||
name,
|
||||
id,
|
||||
update_strategy: self.update_strategy,
|
||||
redistributable: self.redistributable && other.redistributable,
|
||||
subpath: self.subpath.clone().or_else(|| other.subpath.clone()),
|
||||
aliases,
|
||||
export: if self.export {
|
||||
self.export
|
||||
} else {
|
||||
other.export
|
||||
},
|
||||
files,
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if versions match across all providers.
|
||||
/// Returns true if all provider files have the same version/file,
|
||||
/// or if there's only one provider.
|
||||
pub fn versions_match_across_providers(&self) -> bool {
|
||||
if self.files.len() <= 1 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Group files by provider (using parent_id as proxy)
|
||||
let mut versions_by_provider: HashMap<String, Vec<&str>> = HashMap::new();
|
||||
for file in &self.files {
|
||||
// Extract provider from file type or use parent_id
|
||||
let provider = &file.file_type;
|
||||
versions_by_provider
|
||||
.entry(provider.clone())
|
||||
.or_default()
|
||||
.push(&file.file_name);
|
||||
}
|
||||
|
||||
// If only one provider, versions match
|
||||
if versions_by_provider.len() <= 1 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Compare semantic versions extracted from file names
|
||||
let parse_version = |name: &str| {
|
||||
// Try to extract version from patterns like "mod-1.0.0.jar" or
|
||||
// "mod_v1.0.0"
|
||||
let version_str = name
|
||||
.rsplit_once('-')
|
||||
.and_then(|(_, v)| v.strip_suffix(".jar"))
|
||||
.or_else(|| {
|
||||
name
|
||||
.rsplit_once('_')
|
||||
.and_then(|(_, v)| v.strip_suffix(".jar"))
|
||||
})
|
||||
.unwrap_or(name);
|
||||
semver::Version::parse(version_str).ok()
|
||||
};
|
||||
|
||||
let versions: Vec<_> = versions_by_provider
|
||||
.values()
|
||||
.filter_map(|files| files.first().copied().and_then(parse_version))
|
||||
.collect();
|
||||
|
||||
// All versions should be the same
|
||||
versions.windows(2).all(|w| w[0] == w[1])
|
||||
}
|
||||
|
||||
/// Check if versions do NOT match across providers.
|
||||
/// Returns Some with details if there's a mismatch, None if versions match.
|
||||
pub fn check_version_mismatch(&self) -> Option<String> {
|
||||
if self.versions_match_across_providers() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Collect version info by provider
|
||||
let mut provider_versions: Vec<(String, String)> = Vec::new();
|
||||
for file in &self.files {
|
||||
provider_versions.push((file.file_type.clone(), file.file_name.clone()));
|
||||
}
|
||||
|
||||
Some(format!(
|
||||
"Version mismatch for {}: {}",
|
||||
self.get_name(),
|
||||
provider_versions
|
||||
.iter()
|
||||
.map(|(p, v)| format!("{p}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
))
|
||||
}
|
||||
|
||||
pub fn select_file(
|
||||
&mut self,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
file_count: Option<usize>,
|
||||
) -> crate::error::Result<()> {
|
||||
// Filter compatible files
|
||||
let compatible_files: Vec<_> = self
|
||||
.files
|
||||
.iter()
|
||||
.filter(|f| f.is_compatible(mc_versions, loaders))
|
||||
.collect();
|
||||
|
||||
if compatible_files.is_empty() {
|
||||
return Err(crate::error::PakkerError::FileSelectionError(format!(
|
||||
"No compatible files found for {}",
|
||||
self.get_name()
|
||||
)));
|
||||
}
|
||||
|
||||
// Sort by release type (Release < Beta < Alpha) and date (newest first)
|
||||
let mut sorted_files = compatible_files.clone();
|
||||
sorted_files.sort_by(|a, b| {
|
||||
a.release_type
|
||||
.cmp(&b.release_type)
|
||||
.then_with(|| b.date_published.cmp(&a.date_published))
|
||||
});
|
||||
|
||||
// Keep the specified number of files (default to 1 if not specified)
|
||||
let count = file_count.unwrap_or(1);
|
||||
self.files = sorted_files.into_iter().take(count).cloned().collect();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectFile {
|
||||
#[serde(rename = "type")]
|
||||
pub file_type: String,
|
||||
pub file_name: String,
|
||||
#[serde(default)]
|
||||
pub mc_versions: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub loaders: Vec<String>,
|
||||
pub release_type: ReleaseType,
|
||||
pub url: String,
|
||||
pub id: String,
|
||||
pub parent_id: String,
|
||||
pub hashes: HashMap<String, String>,
|
||||
#[serde(default)]
|
||||
pub required_dependencies: Vec<String>,
|
||||
pub size: u64,
|
||||
pub date_published: String,
|
||||
}
|
||||
|
||||
impl ProjectFile {
|
||||
pub fn is_compatible(
|
||||
&self,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> bool {
|
||||
const VALID_LOADERS: &[&str] =
|
||||
&["minecraft", "iris", "optifine", "datapack"];
|
||||
|
||||
let mc_compatible =
|
||||
self.mc_versions.iter().any(|v| mc_versions.contains(v));
|
||||
|
||||
// Accept files with empty loaders, OR loaders matching request, OR valid
|
||||
// special loaders
|
||||
let loader_compatible = self.loaders.is_empty()
|
||||
|| self.loaders.iter().any(|l| loaders.contains(l))
|
||||
|| self
|
||||
.loaders
|
||||
.iter()
|
||||
.any(|l| VALID_LOADERS.contains(&l.as_str()));
|
||||
|
||||
mc_compatible && loader_compatible
|
||||
}
|
||||
|
||||
/// Generate a viewable URL for this file based on its provider.
|
||||
/// Returns None if the URL cannot be determined.
|
||||
pub fn get_site_url(&self, project: &Project) -> Option<String> {
|
||||
// Determine provider from file type
|
||||
match self.file_type.as_str() {
|
||||
"modrinth" => {
|
||||
// Format: https://modrinth.com/mod/{slug}/version/{file_id}
|
||||
let slug = project.slug.get("modrinth")?;
|
||||
Some(format!(
|
||||
"https://modrinth.com/mod/{}/version/{}",
|
||||
slug, self.id
|
||||
))
|
||||
},
|
||||
"curseforge" => {
|
||||
// Format: https://www.curseforge.com/minecraft/mc-mods/{slug}/files/{file_id}
|
||||
let slug = project.slug.get("curseforge")?;
|
||||
Some(format!(
|
||||
"https://www.curseforge.com/minecraft/mc-mods/{}/files/{}",
|
||||
slug, self.id
|
||||
))
|
||||
},
|
||||
"github" => {
|
||||
// Format: https://github.com/{owner}/{repo}/releases/tag/{tag}
|
||||
// parent_id contains owner/repo, id contains the tag/version
|
||||
Some(format!(
|
||||
"https://github.com/{}/releases/tag/{}",
|
||||
self.parent_id, self.id
|
||||
))
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_project_new() {
|
||||
let project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
assert_eq!(project.pakku_id, Some("test-id".to_string()));
|
||||
assert_eq!(project.r#type, ProjectType::Mod);
|
||||
assert_eq!(project.side, ProjectSide::Both);
|
||||
assert!(project.pakku_links.is_empty());
|
||||
assert!(project.files.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_serialization() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
project
|
||||
.slug
|
||||
.insert("modrinth".to_string(), "test-slug".to_string());
|
||||
project
|
||||
.name
|
||||
.insert("modrinth".to_string(), "Test Mod".to_string());
|
||||
project
|
||||
.id
|
||||
.insert("modrinth".to_string(), "abc123".to_string());
|
||||
|
||||
let json = serde_json::to_string(&project).unwrap();
|
||||
let deserialized: Project = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(deserialized.pakku_id, project.pakku_id);
|
||||
assert_eq!(deserialized.r#type, project.r#type);
|
||||
assert_eq!(deserialized.side, project.side);
|
||||
assert_eq!(
|
||||
deserialized.slug.get("modrinth"),
|
||||
Some(&"test-slug".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_file_is_compatible_with_empty_loaders() {
|
||||
let file = ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "test.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec![], // Empty loaders should be accepted
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/test.jar".to_string(),
|
||||
id: "file123".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let lockfile_mc = vec!["1.20.1".to_string()];
|
||||
let lockfile_loaders = vec!["fabric".to_string()];
|
||||
|
||||
assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_file_is_compatible_with_matching_loaders() {
|
||||
let file = ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "test.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/test.jar".to_string(),
|
||||
id: "file123".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let lockfile_mc = vec!["1.20.1".to_string()];
|
||||
let lockfile_loaders = vec!["fabric".to_string()];
|
||||
|
||||
assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_file_is_compatible_with_valid_loaders() {
|
||||
for loader in ["minecraft", "iris", "optifine", "datapack"] {
|
||||
let file = ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "test.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec![loader.to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/test.jar".to_string(),
|
||||
id: "file123".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let lockfile_mc = vec!["1.20.1".to_string()];
|
||||
let lockfile_loaders = vec!["fabric".to_string()];
|
||||
|
||||
assert!(
|
||||
file.is_compatible(&lockfile_mc, &lockfile_loaders),
|
||||
"Failed for valid loader: {loader}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_file_incompatible() {
|
||||
let file = ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "test.jar".to_string(),
|
||||
mc_versions: vec!["1.19.4".to_string()],
|
||||
loaders: vec!["forge".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/test.jar".to_string(),
|
||||
id: "file123".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let lockfile_mc = vec!["1.20.1".to_string()];
|
||||
let lockfile_loaders = vec!["fabric".to_string()];
|
||||
|
||||
assert!(!file.is_compatible(&lockfile_mc, &lockfile_loaders));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_select_file() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "alpha.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Alpha,
|
||||
url: "https://example.com/alpha.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-03T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: "release.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/release.jar".to_string(),
|
||||
id: "file2".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
let lockfile_mc = vec!["1.20.1".to_string()];
|
||||
let lockfile_loaders = vec!["fabric".to_string()];
|
||||
|
||||
let result = project.select_file(&lockfile_mc, &lockfile_loaders, None);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_versions_match_across_providers_single_file() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "test-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/test.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
assert!(project.versions_match_across_providers());
|
||||
assert!(project.check_version_mismatch().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_versions_match_across_providers_same_file() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
// Same file name from different providers
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "test-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://modrinth.com/test.jar".to_string(),
|
||||
id: "mr-file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "curseforge".to_string(),
|
||||
file_name: "test-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://curseforge.com/test.jar".to_string(),
|
||||
id: "cf-file1".to_string(),
|
||||
parent_id: "mod456".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
assert!(project.versions_match_across_providers());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_versions_mismatch_across_providers() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
project
|
||||
.name
|
||||
.insert("test".to_string(), "Test Mod".to_string());
|
||||
|
||||
// Different file names from different providers
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "test-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://modrinth.com/test.jar".to_string(),
|
||||
id: "mr-file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
project.files.push(ProjectFile {
|
||||
file_type: "curseforge".to_string(),
|
||||
file_name: "test-0.9.0.jar".to_string(), // Different version
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://curseforge.com/test.jar".to_string(),
|
||||
id: "cf-file1".to_string(),
|
||||
parent_id: "mod456".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
assert!(!project.versions_match_across_providers());
|
||||
let mismatch = project.check_version_mismatch();
|
||||
assert!(mismatch.is_some());
|
||||
let msg = mismatch.unwrap();
|
||||
assert!(msg.contains("Version mismatch"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_site_url_modrinth() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
project
|
||||
.slug
|
||||
.insert("modrinth".to_string(), "sodium".to_string());
|
||||
|
||||
let file = ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "sodium-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://modrinth.com/sodium.jar".to_string(),
|
||||
id: "abc123".to_string(),
|
||||
parent_id: "sodium".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let url = file.get_site_url(&project);
|
||||
assert!(url.is_some());
|
||||
let url = url.unwrap();
|
||||
assert!(url.contains("modrinth.com"));
|
||||
assert!(url.contains("sodium"));
|
||||
assert!(url.contains("abc123"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_site_url_curseforge() {
|
||||
let mut project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
project
|
||||
.slug
|
||||
.insert("curseforge".to_string(), "jei".to_string());
|
||||
|
||||
let file = ProjectFile {
|
||||
file_type: "curseforge".to_string(),
|
||||
file_name: "jei-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["forge".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://curseforge.com/jei.jar".to_string(),
|
||||
id: "12345".to_string(),
|
||||
parent_id: "jei".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let url = file.get_site_url(&project);
|
||||
assert!(url.is_some());
|
||||
let url = url.unwrap();
|
||||
assert!(url.contains("curseforge.com"));
|
||||
assert!(url.contains("jei"));
|
||||
assert!(url.contains("12345"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_site_url_github() {
|
||||
let project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
let file = ProjectFile {
|
||||
file_type: "github".to_string(),
|
||||
file_name: "mod-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url:
|
||||
"https://github.com/owner/repo/releases/download/v1.0.0/mod.jar"
|
||||
.to_string(),
|
||||
id: "v1.0.0".to_string(),
|
||||
parent_id: "owner/repo".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let url = file.get_site_url(&project);
|
||||
assert!(url.is_some());
|
||||
let url = url.unwrap();
|
||||
assert!(url.contains("github.com"));
|
||||
assert!(url.contains("owner/repo"));
|
||||
assert!(url.contains("v1.0.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_site_url_unknown_type() {
|
||||
let project =
|
||||
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
let file = ProjectFile {
|
||||
file_type: "unknown".to_string(),
|
||||
file_name: "mod.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/mod.jar".to_string(),
|
||||
id: "123".to_string(),
|
||||
parent_id: "mod".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let url = file.get_site_url(&project);
|
||||
assert!(url.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merged_different_types_returns_error() {
|
||||
let mut p1 =
|
||||
Project::new("id1".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
p1.name.insert("modrinth".to_string(), "Mod1".to_string());
|
||||
|
||||
let mut p2 = Project::new(
|
||||
"id2".to_string(),
|
||||
ProjectType::ResourcePack,
|
||||
ProjectSide::Both,
|
||||
);
|
||||
p2.name.insert("modrinth".to_string(), "RP1".to_string());
|
||||
|
||||
assert!(p1.merged(p2).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merged_combines_ids_and_slugs() {
|
||||
let mut p1 =
|
||||
Project::new("id1".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
p1.add_platform(
|
||||
"modrinth".to_string(),
|
||||
"mr1".to_string(),
|
||||
"mod1".to_string(),
|
||||
"Mod 1".to_string(),
|
||||
);
|
||||
|
||||
let mut p2 =
|
||||
Project::new("id2".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
p2.add_platform(
|
||||
"curseforge".to_string(),
|
||||
"cf1".to_string(),
|
||||
"mod1".to_string(),
|
||||
"Mod 1".to_string(),
|
||||
);
|
||||
|
||||
let merged = p1.merged(p2).unwrap();
|
||||
assert_eq!(merged.id.get("modrinth"), Some(&"mr1".to_string()));
|
||||
assert_eq!(merged.id.get("curseforge"), Some(&"cf1".to_string()));
|
||||
assert_eq!(merged.slug.get("modrinth"), Some(&"mod1".to_string()));
|
||||
assert_eq!(merged.slug.get("curseforge"), Some(&"mod1".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merged_prefers_non_both_side() {
|
||||
let p1 =
|
||||
Project::new("id1".to_string(), ProjectType::Mod, ProjectSide::Client);
|
||||
let p2 =
|
||||
Project::new("id2".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
let merged = p1.merged(p2.clone()).unwrap();
|
||||
assert_eq!(merged.side, ProjectSide::Client);
|
||||
|
||||
let merged2 = p2.merged(p1).unwrap();
|
||||
assert_eq!(merged2.side, ProjectSide::Client);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merged_preserves_pakku_id() {
|
||||
let p1 =
|
||||
Project::new("id1".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
let p2 =
|
||||
Project::new("id2".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
let merged = p1.merged(p2).unwrap();
|
||||
assert_eq!(merged.pakku_id, Some("id1".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merged_deduplicates_files() {
|
||||
let mut p1 =
|
||||
Project::new("id1".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
p1.files.push(ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "mod-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/mod.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
let mut p2 =
|
||||
Project::new("id2".to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
p2.files.push(ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "mod-1.0.0.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/mod.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "mod123".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec![],
|
||||
size: 1024,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
});
|
||||
|
||||
let merged = p1.merged(p2).unwrap();
|
||||
assert_eq!(merged.files.len(), 1);
|
||||
}
|
||||
}
|
||||
150
crates/pakker-core/src/platform.rs
Normal file
150
crates/pakker-core/src/platform.rs
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
mod curseforge;
|
||||
mod github;
|
||||
mod modrinth;
|
||||
mod multiplatform;
|
||||
mod traits;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use curseforge::CurseForgePlatform;
|
||||
pub use github::GitHubPlatform;
|
||||
pub use modrinth::ModrinthPlatform;
|
||||
pub use multiplatform::MultiplatformPlatform;
|
||||
pub use traits::PlatformClient;
|
||||
|
||||
use crate::{error::Result, http, rate_limiter::RateLimiter};
|
||||
|
||||
static HTTP_CLIENT: std::sync::LazyLock<Arc<reqwest::Client>> =
|
||||
std::sync::LazyLock::new(|| Arc::new(http::create_http_client()));
|
||||
|
||||
static RATE_LIMITER: std::sync::LazyLock<Arc<RateLimiter>> =
|
||||
std::sync::LazyLock::new(|| Arc::new(RateLimiter::new(None)));
|
||||
|
||||
pub fn get_http_client() -> Arc<reqwest::Client> {
|
||||
HTTP_CLIENT.clone()
|
||||
}
|
||||
|
||||
pub fn create_platform(
|
||||
platform: &str,
|
||||
api_key: Option<String>,
|
||||
) -> Result<Box<dyn PlatformClient>> {
|
||||
let client = create_client(platform, api_key)?;
|
||||
let platform_name = platform.to_string();
|
||||
Ok(Box::new(RateLimitedPlatform {
|
||||
platform: client,
|
||||
rate_limiter: RATE_LIMITER.clone(),
|
||||
platform_name,
|
||||
}))
|
||||
}
|
||||
|
||||
fn create_client(
|
||||
platform: &str,
|
||||
api_key: Option<String>,
|
||||
) -> Result<Box<dyn PlatformClient>> {
|
||||
match platform {
|
||||
"modrinth" => {
|
||||
Ok(Box::new(ModrinthPlatform::with_client(get_http_client())))
|
||||
},
|
||||
"curseforge" => {
|
||||
Ok(Box::new(CurseForgePlatform::with_client(
|
||||
get_http_client(),
|
||||
api_key,
|
||||
)))
|
||||
},
|
||||
"github" => {
|
||||
Ok(Box::new(GitHubPlatform::with_client(
|
||||
&get_http_client(),
|
||||
api_key,
|
||||
)))
|
||||
},
|
||||
"multiplatform" => {
|
||||
let cf = CurseForgePlatform::with_client(get_http_client(), api_key);
|
||||
let mr = ModrinthPlatform::with_client(get_http_client());
|
||||
Ok(Box::new(MultiplatformPlatform::new(
|
||||
Arc::new(cf),
|
||||
Arc::new(mr),
|
||||
)))
|
||||
},
|
||||
_ => {
|
||||
Err(crate::error::PakkerError::ConfigError(format!(
|
||||
"Unknown platform: {platform}"
|
||||
)))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
struct RateLimitedPlatform {
|
||||
platform: Box<dyn PlatformClient>,
|
||||
rate_limiter: Arc<RateLimiter>,
|
||||
platform_name: String,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl PlatformClient for RateLimitedPlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
identifier: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<crate::model::Project> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self
|
||||
.platform
|
||||
.request_project(identifier, mc_versions, loaders)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Vec<crate::model::ProjectFile>> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self
|
||||
.platform
|
||||
.request_project_files(project_id, mc_versions, loaders)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
identifier: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<crate::model::Project> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self
|
||||
.platform
|
||||
.request_project_with_files(identifier, mc_versions, loaders)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(
|
||||
&self,
|
||||
hash: &str,
|
||||
) -> Result<Option<crate::model::Project>> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self.platform.lookup_by_hash(hash).await
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<crate::model::Project>> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self.platform.request_project_from_slug(slug).await
|
||||
}
|
||||
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<crate::model::Project>> {
|
||||
self.rate_limiter.wait_for(&self.platform_name).await;
|
||||
self
|
||||
.platform
|
||||
.request_projects_from_hashes(hashes, algorithm)
|
||||
.await
|
||||
}
|
||||
}
|
||||
694
crates/pakker-core/src/platform/curseforge.rs
Normal file
694
crates/pakker-core/src/platform/curseforge.rs
Normal file
|
|
@ -0,0 +1,694 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::traits::PlatformClient;
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
|
||||
utils::generate_pakku_id,
|
||||
};
|
||||
|
||||
const CURSEFORGE_API_BASE: &str = "https://api.curseforge.com/v1";
|
||||
/// `CurseForge` game version type ID for loader versions (e.g., "fabric",
|
||||
/// "forge")
|
||||
const LOADER_VERSION_TYPE_ID: i32 = 68441;
|
||||
/// `CurseForge` relation type ID for "required dependency" (mod embeds or
|
||||
/// requires another mod)
|
||||
const DEPENDENCY_RELATION_TYPE_REQUIRED: u32 = 3;
|
||||
|
||||
pub struct CurseForgePlatform {
|
||||
client: Arc<Client>,
|
||||
api_key: Option<String>,
|
||||
}
|
||||
|
||||
impl CurseForgePlatform {
|
||||
pub fn new(api_key: Option<String>) -> Self {
|
||||
Self {
|
||||
client: Arc::new(Client::new()),
|
||||
api_key,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn with_client(
|
||||
client: Arc<Client>,
|
||||
api_key: Option<String>,
|
||||
) -> Self {
|
||||
Self { client, api_key }
|
||||
}
|
||||
|
||||
fn get_headers(&self) -> Result<reqwest::header::HeaderMap> {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
|
||||
if let Some(api_key) = &self.api_key {
|
||||
headers.insert(
|
||||
"x-api-key",
|
||||
reqwest::header::HeaderValue::from_str(api_key).map_err(|_| {
|
||||
PakkerError::ConfigError("Invalid API key".to_string())
|
||||
})?,
|
||||
);
|
||||
} else {
|
||||
return Err(PakkerError::ConfigError(
|
||||
"CurseForge API key required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(headers)
|
||||
}
|
||||
|
||||
/// Map a non-success HTTP status to the appropriate `PakkerError`.
|
||||
fn map_http_error(status: reqwest::StatusCode, context: &str) -> PakkerError {
|
||||
match status.as_u16() {
|
||||
404 => PakkerError::ProjectNotFound(context.to_string()),
|
||||
401 | 403 => {
|
||||
PakkerError::ConfigError(format!(
|
||||
"CurseForge API authentication failed ({}). Check your API key.",
|
||||
status
|
||||
))
|
||||
},
|
||||
_ => {
|
||||
PakkerError::PlatformApiError(format!(
|
||||
"CurseForge API returned {} for {}",
|
||||
status, context
|
||||
))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const fn map_class_id(class_id: u32) -> ProjectType {
|
||||
match class_id {
|
||||
12 => ProjectType::ResourcePack,
|
||||
6945 => ProjectType::DataPack,
|
||||
6552 => ProjectType::Shader,
|
||||
17 => ProjectType::World,
|
||||
_ => ProjectType::Mod,
|
||||
}
|
||||
}
|
||||
|
||||
const fn map_release_type(release_type: u32) -> ReleaseType {
|
||||
match release_type {
|
||||
2 => ReleaseType::Beta,
|
||||
3 => ReleaseType::Alpha,
|
||||
_ => ReleaseType::Release,
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine project side based on `CurseForge` categories.
|
||||
/// `CurseForge` doesn't have explicit client/server fields like Modrinth,
|
||||
/// so we infer from category names and IDs.
|
||||
fn detect_side_from_categories(
|
||||
categories: &[CurseForgeCategory],
|
||||
) -> ProjectSide {
|
||||
// Known client-only category indicators (slugs and partial name matches)
|
||||
const CLIENT_INDICATORS: &[&str] = &[
|
||||
"client",
|
||||
"hud",
|
||||
"gui",
|
||||
"cosmetic",
|
||||
"shader",
|
||||
"optifine",
|
||||
"resource-pack",
|
||||
"texture",
|
||||
"minimap",
|
||||
"tooltip",
|
||||
"inventory",
|
||||
"quality-of-life", // Often client-side QoL
|
||||
];
|
||||
|
||||
// Known server-only category indicators
|
||||
const SERVER_INDICATORS: &[&str] = &[
|
||||
"server-utility",
|
||||
"bukkit",
|
||||
"spigot",
|
||||
"paper",
|
||||
"admin-tools",
|
||||
"anti-grief",
|
||||
"economy",
|
||||
"permissions",
|
||||
"chat",
|
||||
];
|
||||
|
||||
let mut client_score = 0;
|
||||
let mut server_score = 0;
|
||||
|
||||
for category in categories {
|
||||
let slug_lower = category.slug.to_lowercase();
|
||||
let name_lower = category.name.to_lowercase();
|
||||
|
||||
for indicator in CLIENT_INDICATORS {
|
||||
if slug_lower.contains(indicator) || name_lower.contains(indicator) {
|
||||
client_score += 1;
|
||||
}
|
||||
}
|
||||
|
||||
for indicator in SERVER_INDICATORS {
|
||||
if slug_lower.contains(indicator) || name_lower.contains(indicator) {
|
||||
server_score += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only assign a specific side if there's clear indication
|
||||
// and not conflicting signals
|
||||
if client_score > 0 && server_score == 0 {
|
||||
ProjectSide::Client
|
||||
} else if server_score > 0 && client_score == 0 {
|
||||
ProjectSide::Server
|
||||
} else {
|
||||
// Default to Both - works on both client and server
|
||||
ProjectSide::Both
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_project(cf_project: CurseForgeProject) -> Project {
|
||||
let pakku_id = generate_pakku_id();
|
||||
let project_type = Self::map_class_id(cf_project.class_id.unwrap_or(6));
|
||||
|
||||
// Detect side from categories
|
||||
let side = Self::detect_side_from_categories(&cf_project.categories);
|
||||
|
||||
let mut project = Project::new(pakku_id, project_type, side);
|
||||
|
||||
project.add_platform(
|
||||
"curseforge".to_string(),
|
||||
cf_project.id.to_string(),
|
||||
cf_project.slug.clone(),
|
||||
cf_project.name,
|
||||
);
|
||||
|
||||
project.redistributable = false;
|
||||
project
|
||||
}
|
||||
|
||||
fn convert_file(cf_file: CurseForgeFile, project_id: &str) -> ProjectFile {
|
||||
let mut hashes = HashMap::new();
|
||||
|
||||
for hash in cf_file.hashes {
|
||||
hashes.insert(hash.algo.to_lowercase(), hash.value.clone());
|
||||
}
|
||||
|
||||
let mc_versions: Vec<String> = cf_file.game_versions.clone();
|
||||
|
||||
// Extract loaders from sortableGameVersions with LOADER_VERSION_TYPE_ID
|
||||
let loaders: Vec<String> = cf_file
|
||||
.sortable_game_versions
|
||||
.iter()
|
||||
.filter(|v| v.game_version_type_id == Some(LOADER_VERSION_TYPE_ID))
|
||||
.map(|v| v.game_version_name.to_lowercase())
|
||||
.collect();
|
||||
|
||||
ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: cf_file.file_name.clone(),
|
||||
mc_versions,
|
||||
loaders,
|
||||
release_type: Self::map_release_type(cf_file.release_type.unwrap_or(1)),
|
||||
url: cf_file.download_url.clone().unwrap_or_else(|| {
|
||||
format!(
|
||||
"https://edge.forgecdn.net/files/{}/{}/{}",
|
||||
cf_file.id / 1000,
|
||||
cf_file.id % 1000,
|
||||
cf_file.file_name
|
||||
)
|
||||
}),
|
||||
id: cf_file.id.to_string(),
|
||||
parent_id: project_id.to_string(),
|
||||
hashes,
|
||||
required_dependencies: cf_file
|
||||
.dependencies
|
||||
.iter()
|
||||
.filter(|d| d.relation_type == DEPENDENCY_RELATION_TYPE_REQUIRED)
|
||||
.map(|d| d.mod_id.to_string())
|
||||
.collect(),
|
||||
size: cf_file.file_length,
|
||||
date_published: cf_file.file_date.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search_project_by_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<CurseForgeProject> {
|
||||
let url = format!(
|
||||
"{CURSEFORGE_API_BASE}/mods/search?gameId=432&pageSize=1&sortField=6&\
|
||||
sortOrder=desc&slug={slug}"
|
||||
);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(Self::map_http_error(response.status(), slug));
|
||||
}
|
||||
|
||||
let result: CurseForgeSearchResponse = response.json().await?;
|
||||
|
||||
log::debug!(
|
||||
"CurseForge search for '{slug}' returned {} results (slugs: {:?})",
|
||||
result.data.len(),
|
||||
result.data.iter().map(|p| &p.slug).collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let project = result
|
||||
.data
|
||||
.iter()
|
||||
.find(|p| p.slug == slug)
|
||||
.cloned()
|
||||
.or_else(|| result.data.first().cloned());
|
||||
|
||||
project.ok_or_else(|| PakkerError::ProjectNotFound(slug.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PlatformClient for CurseForgePlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
identifier: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
if let Ok(mod_id) = identifier.parse::<u32>() {
|
||||
let url = format!("{CURSEFORGE_API_BASE}/mods/{mod_id}");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: CurseForgeProjectResponse = response.json().await?;
|
||||
return Ok(Self::convert_project(result.data));
|
||||
}
|
||||
return Err(Self::map_http_error(response.status(), identifier));
|
||||
}
|
||||
|
||||
let cf_project = self.search_project_by_slug(identifier).await?;
|
||||
Ok(Self::convert_project(cf_project))
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
let mut url = format!("{CURSEFORGE_API_BASE}/mods/{project_id}/files");
|
||||
|
||||
// Add query parameters for server-side filtering (Pakku-compatible)
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
// Add gameVersionTypeId for each MC version (requires lookup)
|
||||
if !mc_versions.is_empty() {
|
||||
// Fetch game version type IDs
|
||||
// Add MC version gameVersionTypeId = 73250 for Minecraft versions
|
||||
for mc_version in mc_versions {
|
||||
query_params.push(("gameVersion", mc_version.clone()));
|
||||
}
|
||||
query_params.push(("gameVersionTypeId", "73250".to_string()));
|
||||
}
|
||||
|
||||
// Add mod loader types
|
||||
if !loaders.is_empty() {
|
||||
let loader_str = loaders.join(",");
|
||||
query_params.push(("modLoaderTypes", loader_str));
|
||||
}
|
||||
|
||||
if !query_params.is_empty() {
|
||||
let query_string = query_params
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("&");
|
||||
url = format!("{url}?{query_string}");
|
||||
}
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(Self::map_http_error(response.status(), project_id));
|
||||
}
|
||||
|
||||
let result: CurseForgeFilesResponse = response.json().await?;
|
||||
|
||||
let files: Vec<ProjectFile> = result
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|f| Self::convert_file(f, project_id))
|
||||
.collect();
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
identifier: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
let mut project = self
|
||||
.request_project(identifier, mc_versions, loaders)
|
||||
.await?;
|
||||
let project_id = project
|
||||
.get_platform_id("curseforge")
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InternalError("Missing curseforge ID".to_string())
|
||||
})?
|
||||
.clone();
|
||||
|
||||
let files = self
|
||||
.request_project_files(&project_id, mc_versions, loaders)
|
||||
.await?;
|
||||
project.files = files;
|
||||
|
||||
Ok(project)
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
|
||||
// CurseForge uses Murmur2 hash for file fingerprints
|
||||
let fingerprint = hash
|
||||
.parse::<u32>()
|
||||
.map_err(|_| PakkerError::InvalidHash(hash.to_string()))?;
|
||||
|
||||
let url = format!("{CURSEFORGE_API_BASE}/fingerprints");
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.json(&serde_json::json!({
|
||||
"fingerprints": [fingerprint]
|
||||
}))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(Self::map_http_error(response.status(), "lookup_by_hash"));
|
||||
}
|
||||
|
||||
let response_data: serde_json::Value = response.json().await?;
|
||||
|
||||
if let Some(matches) = response_data["data"]["exactMatches"].as_array()
|
||||
&& let Some(first_match) = matches.first()
|
||||
&& let Some(file) = first_match["file"].as_object()
|
||||
{
|
||||
let mod_id = file["modId"]
|
||||
.as_u64()
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidResponse("Missing modId".to_string())
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
return self
|
||||
.request_project_with_files(&mod_id, &[], &[])
|
||||
.await
|
||||
.map(Some);
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
// Try to fetch project by slug using search API
|
||||
match self.search_project_by_slug(slug).await {
|
||||
Ok(cf_project) => Ok(Some(Self::convert_project(cf_project))),
|
||||
Err(PakkerError::ProjectNotFound(_)) => Ok(None),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses `CurseForge`'s `/fingerprints/432` endpoint to resolve projects by
|
||||
/// their hashes in batch.
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
_algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
#[derive(Serialize)]
|
||||
struct FingerprintRequest {
|
||||
fingerprints: Vec<u32>,
|
||||
}
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let fingerprints: Vec<u32> = hashes
|
||||
.iter()
|
||||
.filter_map(|h| h.parse::<u32>().ok())
|
||||
.collect();
|
||||
|
||||
if fingerprints.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let url = format!("{CURSEFORGE_API_BASE}/fingerprints/432");
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.json(&FingerprintRequest {
|
||||
fingerprints: fingerprints.clone(),
|
||||
})
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::PlatformApiError(format!(
|
||||
"CurseForge batch API error: {}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
|
||||
let response_data: serde_json::Value = response.json().await?;
|
||||
|
||||
let matches = response_data["data"]["exactMatches"]
|
||||
.as_array()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut projects = Vec::new();
|
||||
let mut seen_ids = std::collections::HashSet::new();
|
||||
|
||||
for m in matches {
|
||||
if let Some(file) = m["file"].as_object()
|
||||
&& let Some(mod_id) = file["modId"].as_u64()
|
||||
{
|
||||
let mod_id_str = mod_id.to_string();
|
||||
if seen_ids.contains(&mod_id_str) {
|
||||
continue;
|
||||
}
|
||||
seen_ids.insert(mod_id_str.clone());
|
||||
|
||||
if let Ok(project) =
|
||||
self.request_project_with_files(&mod_id_str, &[], &[]).await
|
||||
{
|
||||
projects.push(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
}
|
||||
|
||||
// CurseForge API models
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeProject {
|
||||
id: u32,
|
||||
name: String,
|
||||
slug: String,
|
||||
#[serde(rename = "classId")]
|
||||
class_id: Option<u32>,
|
||||
#[serde(default)]
|
||||
categories: Vec<CurseForgeCategory>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeCategory {
|
||||
id: u32,
|
||||
name: String,
|
||||
slug: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeGameVersion {
|
||||
#[serde(rename = "gameVersionName")]
|
||||
game_version_name: String,
|
||||
#[serde(rename = "gameVersionTypeId")]
|
||||
game_version_type_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeHash {
|
||||
algo: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeDependency {
|
||||
#[serde(rename = "modId")]
|
||||
mod_id: u32,
|
||||
#[serde(rename = "relationType")]
|
||||
relation_type: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeProjectResponse {
|
||||
data: CurseForgeProject,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeFile {
|
||||
id: u32,
|
||||
#[serde(rename = "fileName")]
|
||||
file_name: String,
|
||||
#[serde(rename = "downloadUrl")]
|
||||
download_url: Option<String>,
|
||||
#[serde(rename = "gameVersions")]
|
||||
game_versions: Vec<String>,
|
||||
#[serde(rename = "sortableGameVersions")]
|
||||
sortable_game_versions: Vec<CurseForgeGameVersion>,
|
||||
#[serde(rename = "releaseType")]
|
||||
release_type: Option<u32>,
|
||||
#[serde(rename = "fileLength")]
|
||||
file_length: u64,
|
||||
#[serde(rename = "fileDate")]
|
||||
file_date: String,
|
||||
hashes: Vec<CurseForgeHash>,
|
||||
dependencies: Vec<CurseForgeDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeFilesResponse {
|
||||
data: Vec<CurseForgeFile>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct CurseForgeSearchResponse {
|
||||
data: Vec<CurseForgeProject>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_category(id: u32, name: &str, slug: &str) -> CurseForgeCategory {
|
||||
CurseForgeCategory {
|
||||
id,
|
||||
name: name.to_string(),
|
||||
slug: slug.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_client_only() {
|
||||
// HUD mod should be client-only
|
||||
let categories = vec![
|
||||
make_category(1, "HUD Mods", "hud"),
|
||||
make_category(2, "Fabric", "fabric"),
|
||||
];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Client);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_server_only() {
|
||||
// Server utility should be server-only
|
||||
let categories = vec![
|
||||
make_category(1, "Server Utility", "server-utility"),
|
||||
make_category(2, "Bukkit Plugins", "bukkit"),
|
||||
];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_both() {
|
||||
// Generic mod categories should be both
|
||||
let categories = vec![
|
||||
make_category(1, "Technology", "technology"),
|
||||
make_category(2, "Fabric", "fabric"),
|
||||
];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Both);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_conflicting_signals() {
|
||||
// Mixed categories should default to both
|
||||
let categories = vec![
|
||||
make_category(1, "Client HUD", "client-hud"),
|
||||
make_category(2, "Server Utility", "server-utility"),
|
||||
];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Both);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_empty_categories() {
|
||||
let categories = vec![];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Both);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_gui_client() {
|
||||
let categories =
|
||||
vec![make_category(1, "GUI Enhancement", "gui-enhancement")];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Client);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_side_permissions_server() {
|
||||
let categories = vec![make_category(1, "Permissions", "permissions")];
|
||||
let side = CurseForgePlatform::detect_side_from_categories(&categories);
|
||||
assert_eq!(side, ProjectSide::Server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map_class_id() {
|
||||
assert_eq!(CurseForgePlatform::map_class_id(6), ProjectType::Mod);
|
||||
assert_eq!(
|
||||
CurseForgePlatform::map_class_id(12),
|
||||
ProjectType::ResourcePack
|
||||
);
|
||||
assert_eq!(
|
||||
CurseForgePlatform::map_class_id(6945),
|
||||
ProjectType::DataPack
|
||||
);
|
||||
assert_eq!(CurseForgePlatform::map_class_id(6552), ProjectType::Shader);
|
||||
assert_eq!(CurseForgePlatform::map_class_id(17), ProjectType::World);
|
||||
assert_eq!(CurseForgePlatform::map_class_id(9999), ProjectType::Mod); // Unknown
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map_release_type() {
|
||||
assert_eq!(
|
||||
CurseForgePlatform::map_release_type(1),
|
||||
ReleaseType::Release
|
||||
);
|
||||
assert_eq!(CurseForgePlatform::map_release_type(2), ReleaseType::Beta);
|
||||
assert_eq!(CurseForgePlatform::map_release_type(3), ReleaseType::Alpha);
|
||||
assert_eq!(
|
||||
CurseForgePlatform::map_release_type(99),
|
||||
ReleaseType::Release
|
||||
); // Unknown
|
||||
}
|
||||
}
|
||||
608
crates/pakker-core/src/platform/github.rs
Normal file
608
crates/pakker-core/src/platform/github.rs
Normal file
|
|
@ -0,0 +1,608 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::traits::PlatformClient;
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
|
||||
utils::generate_pakku_id,
|
||||
};
|
||||
|
||||
const GITHUB_API_BASE: &str = "https://api.github.com";
|
||||
|
||||
pub struct GitHubPlatform {
|
||||
client: Client,
|
||||
token: Option<String>,
|
||||
}
|
||||
|
||||
impl GitHubPlatform {
|
||||
pub fn with_client(client: &Arc<Client>, token: Option<String>) -> Self {
|
||||
Self {
|
||||
client: (**client).clone(),
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_headers(&self) -> Result<reqwest::header::HeaderMap> {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert(
|
||||
reqwest::header::USER_AGENT,
|
||||
reqwest::header::HeaderValue::from_static("Pakker"),
|
||||
);
|
||||
|
||||
if let Some(token) = &self.token {
|
||||
headers.insert(
|
||||
reqwest::header::AUTHORIZATION,
|
||||
reqwest::header::HeaderValue::from_str(&format!("Bearer {token}"))
|
||||
.map_err(|_| {
|
||||
PakkerError::ConfigError("Invalid GitHub token".to_string())
|
||||
})?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(headers)
|
||||
}
|
||||
|
||||
fn parse_repo_identifier(identifier: &str) -> Result<(String, String)> {
|
||||
// Expected formats:
|
||||
// - "owner/repo"
|
||||
// - "github:owner/repo"
|
||||
// - "https://github.com/owner/repo"
|
||||
|
||||
let identifier = identifier
|
||||
.trim_start_matches("github:")
|
||||
.trim_start_matches("https://github.com/")
|
||||
.trim_start_matches("http://github.com/")
|
||||
.trim_end_matches(".git");
|
||||
|
||||
let parts: Vec<&str> = identifier.split('/').collect();
|
||||
if parts.len() >= 2 {
|
||||
Ok((parts[0].to_string(), parts[1].to_string()))
|
||||
} else {
|
||||
Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid GitHub repository identifier: {identifier}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_release(
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
release: GitHubRelease,
|
||||
) -> Project {
|
||||
let pakku_id = generate_pakku_id();
|
||||
let mut project =
|
||||
Project::new(pakku_id, ProjectType::Mod, ProjectSide::Both);
|
||||
|
||||
let repo_full = format!("{owner}/{repo}");
|
||||
project.add_platform(
|
||||
"github".to_string(),
|
||||
repo_full.clone(),
|
||||
repo_full,
|
||||
release.name.unwrap_or_else(|| repo.to_string()),
|
||||
);
|
||||
|
||||
project
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used, reason = "regex literal is always valid")]
|
||||
static MC_VERSION_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"(?:^|[^\d.])(\d+\.\d+(?:\.\d+)?)(?:[^\d]|$)")
|
||||
.expect("MC_VERSION_RE pattern is valid")
|
||||
});
|
||||
|
||||
// Helper functions for extracting metadata from GitHub releases
|
||||
fn extract_mc_versions(tag: &str, asset_name: &str) -> Vec<String> {
|
||||
let re = &*MC_VERSION_RE;
|
||||
let mut versions = Vec::new();
|
||||
|
||||
log::debug!("Extracting MC versions from tag='{tag}', asset='{asset_name}'");
|
||||
|
||||
for text in &[tag, asset_name] {
|
||||
for cap in re.captures_iter(text) {
|
||||
if let Some(version) = cap.get(1) {
|
||||
let v = version.as_str().to_string();
|
||||
if !versions.contains(&v) {
|
||||
log::debug!(" Found MC version: {v}");
|
||||
versions.push(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("Extracted MC versions: {versions:?}");
|
||||
versions
|
||||
}
|
||||
|
||||
fn extract_loaders(tag: &str, asset_name: &str) -> Vec<String> {
|
||||
let mut loaders = Vec::new();
|
||||
let text = format!("{} {}", tag.to_lowercase(), asset_name.to_lowercase());
|
||||
|
||||
log::debug!("Extracting loaders from: '{text}'");
|
||||
|
||||
if text.contains("fabric") {
|
||||
log::debug!(" Found loader: fabric");
|
||||
loaders.push("fabric".to_string());
|
||||
}
|
||||
if text.contains("forge") && !text.contains("neoforge") {
|
||||
log::debug!(" Found loader: forge");
|
||||
loaders.push("forge".to_string());
|
||||
}
|
||||
if text.contains("neoforge") {
|
||||
log::debug!(" Found loader: neoforge");
|
||||
loaders.push("neoforge".to_string());
|
||||
}
|
||||
if text.contains("quilt") {
|
||||
log::debug!(" Found loader: quilt");
|
||||
loaders.push("quilt".to_string());
|
||||
}
|
||||
|
||||
log::debug!("Extracted loaders: {loaders:?}");
|
||||
loaders
|
||||
}
|
||||
|
||||
fn detect_project_type(asset_name: &str, repo_name: &str) -> ProjectType {
|
||||
let name_lower = asset_name.to_lowercase();
|
||||
let repo_lower = repo_name.to_lowercase();
|
||||
|
||||
// Check for resourcepack indicators
|
||||
if name_lower.contains("resourcepack")
|
||||
|| name_lower.contains("resource-pack")
|
||||
|| name_lower.contains("texture")
|
||||
|| repo_lower.contains("resourcepack")
|
||||
|| repo_lower.contains("texture")
|
||||
{
|
||||
return ProjectType::ResourcePack;
|
||||
}
|
||||
|
||||
// Check for datapack indicators
|
||||
if name_lower.contains("datapack")
|
||||
|| name_lower.contains("data-pack")
|
||||
|| repo_lower.contains("datapack")
|
||||
{
|
||||
return ProjectType::DataPack;
|
||||
}
|
||||
|
||||
// Check for shader indicators
|
||||
if name_lower.contains("shader") || repo_lower.contains("shader") {
|
||||
return ProjectType::Shader;
|
||||
}
|
||||
|
||||
// Check for world/save indicators
|
||||
if name_lower.contains("world")
|
||||
|| name_lower.contains("save")
|
||||
|| repo_lower.contains("world")
|
||||
{
|
||||
return ProjectType::World;
|
||||
}
|
||||
|
||||
// Default to mod for .jar files
|
||||
ProjectType::Mod
|
||||
}
|
||||
|
||||
impl GitHubPlatform {
|
||||
fn convert_asset(
|
||||
asset: &GitHubAsset,
|
||||
release: &GitHubRelease,
|
||||
repo_id: &str,
|
||||
repo_name: &str,
|
||||
) -> ProjectFile {
|
||||
let hashes = HashMap::new();
|
||||
|
||||
// Extract MC versions and loaders from tag and asset name
|
||||
let mc_versions = extract_mc_versions(&release.tag_name, &asset.name);
|
||||
let loaders = extract_loaders(&release.tag_name, &asset.name);
|
||||
|
||||
// Detect project type from asset name and repo
|
||||
let file_type = match detect_project_type(&asset.name, repo_name) {
|
||||
ProjectType::Mod => "mod",
|
||||
ProjectType::ResourcePack => "resourcepack",
|
||||
ProjectType::DataPack => "datapack",
|
||||
ProjectType::Shader => "shader",
|
||||
ProjectType::World => "world",
|
||||
};
|
||||
|
||||
ProjectFile {
|
||||
file_type: file_type.to_string(),
|
||||
file_name: asset.name.clone(),
|
||||
mc_versions,
|
||||
loaders,
|
||||
release_type: if release.prerelease {
|
||||
ReleaseType::Beta
|
||||
} else {
|
||||
ReleaseType::Release
|
||||
},
|
||||
url: asset.browser_download_url.clone(),
|
||||
id: asset.id.to_string(),
|
||||
parent_id: repo_id.to_string(),
|
||||
hashes,
|
||||
required_dependencies: vec![],
|
||||
size: asset.size,
|
||||
date_published: release.published_at.clone().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_latest_release(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
) -> Result<GitHubRelease> {
|
||||
let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases/latest");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}")));
|
||||
}
|
||||
|
||||
let release: GitHubRelease = response.json().await?;
|
||||
Ok(release)
|
||||
}
|
||||
|
||||
async fn get_all_releases(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
) -> Result<Vec<GitHubRelease>> {
|
||||
let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}")));
|
||||
}
|
||||
|
||||
let releases: Vec<GitHubRelease> = response.json().await?;
|
||||
Ok(releases)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PlatformClient for GitHubPlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
identifier: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
let (owner, repo) = Self::parse_repo_identifier(identifier)?;
|
||||
let release = self.get_latest_release(&owner, &repo).await?;
|
||||
Ok(Self::convert_release(&owner, &repo, release))
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
let (owner, repo) = Self::parse_repo_identifier(project_id)?;
|
||||
let releases = self.get_all_releases(&owner, &repo).await?;
|
||||
|
||||
let mut files = Vec::new();
|
||||
|
||||
for release in releases {
|
||||
for asset in &release.assets {
|
||||
// Filter for .jar files (mods) or .zip files (modpacks)
|
||||
if std::path::Path::new(&asset.name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("jar"))
|
||||
|| std::path::Path::new(&asset.name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("zip"))
|
||||
{
|
||||
let file = Self::convert_asset(asset, &release, project_id, &repo);
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
identifier: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
let mut project = self
|
||||
.request_project(identifier, _mc_versions, _loaders)
|
||||
.await?;
|
||||
|
||||
let project_id = project
|
||||
.get_platform_id("github")
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InternalError("Missing github ID".to_string())
|
||||
})?
|
||||
.clone();
|
||||
|
||||
let files = self
|
||||
.request_project_files(&project_id, _mc_versions, _loaders)
|
||||
.await?;
|
||||
|
||||
project.files = files;
|
||||
|
||||
Ok(project)
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
|
||||
log::debug!("GitHub lookup_by_hash: searching for hash={hash}");
|
||||
|
||||
// GitHub Code Search API: search for files containing the hash
|
||||
// Note: This is rate-limited (10 req/min without auth, 30 req/min with
|
||||
// auth)
|
||||
let url = format!("{GITHUB_API_BASE}/search/code?q={hash}+in:file");
|
||||
log::debug!("GitHub search URL: {url}");
|
||||
|
||||
let response = match self
|
||||
.client
|
||||
.get(&url)
|
||||
.headers(self.get_headers()?)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(resp) => {
|
||||
log::debug!("GitHub search response status: {}", resp.status());
|
||||
resp
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("GitHub hash lookup failed: {e}");
|
||||
return Ok(None);
|
||||
},
|
||||
};
|
||||
|
||||
// Handle rate limiting gracefully
|
||||
if response.status().as_u16() == 403 {
|
||||
log::warn!("GitHub API rate limit exceeded for hash lookup");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
log::debug!(
|
||||
"GitHub search returned non-success status: {}",
|
||||
response.status()
|
||||
);
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let search_result: GitHubCodeSearchResult = match response.json().await {
|
||||
Ok(result) => result,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to parse GitHub search result: {e}");
|
||||
return Ok(None);
|
||||
},
|
||||
};
|
||||
|
||||
log::debug!("GitHub search found {} items", search_result.items.len());
|
||||
|
||||
// If we found matches, try to extract repo info from first result
|
||||
if let Some(item) = search_result.items.first() {
|
||||
let repo_full = item.repository.full_name.clone();
|
||||
log::info!("GitHub hash lookup found match in repo: {repo_full}");
|
||||
|
||||
// Try to get the latest release for this repo
|
||||
match self.request_project(&repo_full, &[], &[]).await {
|
||||
Ok(project) => {
|
||||
log::info!("GitHub hash lookup succeeded for {repo_full}");
|
||||
Ok(Some(project))
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch project for {repo_full}: {e}");
|
||||
Ok(None)
|
||||
},
|
||||
}
|
||||
} else {
|
||||
log::debug!("GitHub hash lookup found no matches");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
match self.request_project(slug, &[], &[]).await {
|
||||
Ok(project) => Ok(Some(project)),
|
||||
Err(PakkerError::ProjectNotFound(_)) => Ok(None),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
/// GitHub does not support hash-based batch lookup. Returns an empty list.
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
log::debug!(
|
||||
"GitHub does not support batch hash lookup ({} hashes, algorithm={})",
|
||||
hashes.len(),
|
||||
algorithm
|
||||
);
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
// GitHub API models
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct GitHubRelease {
|
||||
id: u64,
|
||||
tag_name: String,
|
||||
name: Option<String>,
|
||||
prerelease: bool,
|
||||
published_at: Option<String>,
|
||||
assets: Vec<GitHubAsset>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct GitHubAsset {
|
||||
id: u64,
|
||||
name: String,
|
||||
browser_download_url: String,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GitHubCodeSearchResult {
|
||||
items: Vec<GitHubCodeSearchItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GitHubCodeSearchItem {
|
||||
repository: GitHubRepository,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GitHubRepository {
|
||||
full_name: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_repo_identifier() {
|
||||
let cases = vec![
|
||||
("owner/repo", ("owner", "repo")),
|
||||
("github:owner/repo", ("owner", "repo")),
|
||||
("https://github.com/owner/repo", ("owner", "repo")),
|
||||
("https://github.com/owner/repo.git", ("owner", "repo")),
|
||||
];
|
||||
|
||||
for (input, (expected_owner, expected_repo)) in cases {
|
||||
let (owner, repo) = GitHubPlatform::parse_repo_identifier(input).unwrap();
|
||||
assert_eq!(owner, expected_owner);
|
||||
assert_eq!(repo, expected_repo);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_repo_identifier_invalid() {
|
||||
let result = GitHubPlatform::parse_repo_identifier("invalid");
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_mc_versions() {
|
||||
let cases = vec![
|
||||
("1.20.4-forge-1.0.0", "", vec!["1.20.4", "1.0.0"]),
|
||||
("fabric-1.21-1.0.0", "", vec!["1.21"]),
|
||||
("mc1.20.4", "", vec!["1.20.4"]),
|
||||
("1.20.1-1.20.2", "", vec!["1.20.1"]),
|
||||
("mymod-1.0.0", "", vec!["1.0.0"]),
|
||||
("mc1.20.4-v1.0.0", "", vec!["1.20.4", "1.0.0"]),
|
||||
("v1.0.0", "mymod-1.20.4.jar", vec!["1.0.0", "1.20.4"]),
|
||||
("1.20.1-47.1.0", "", vec!["1.20.1"]),
|
||||
("v0.5.1+1.20.1", "", vec!["0.5.1"]),
|
||||
("1.20.4-1.0.0+fabric", "", vec!["1.20.4"]),
|
||||
("mc1.19.2-v2.1.3", "", vec!["1.19.2", "2.1.3"]),
|
||||
("1.20-Snapshot", "", vec!["1.20"]),
|
||||
("v3.0.0-beta.2+mc1.20.4", "", vec!["3.0.0", "1.20.4"]),
|
||||
("1.16.5-1.0", "", vec!["1.16.5"]),
|
||||
("forge-1.20.1-47.2.0", "", vec!["1.20.1"]),
|
||||
("1.20.2-neoforge-20.2.59", "", vec!["1.20.2", "20.2.59"]),
|
||||
("release-1.20.1", "", vec!["1.20.1"]),
|
||||
("1.19.4_v2.5.0", "", vec!["1.19.4", "2.5.0"]),
|
||||
("MC1.18.2-v1.0.0", "", vec!["1.18.2", "1.0.0"]),
|
||||
("1.20.1-forge-v1.2.3", "", vec!["1.20.1", "1.2.3"]),
|
||||
("Minecraft_1.19.2-v0.8.1", "", vec!["1.19.2", "0.8.1"]),
|
||||
("build-1.20.4-2.1.0", "", vec!["1.20.4"]),
|
||||
("1.20.x-1.5.0", "", vec!["1.20", "1.5.0"]),
|
||||
("1.12.2-14.23.5.2859", "", vec!["1.12.2"]),
|
||||
];
|
||||
|
||||
for (tag, asset, expected) in cases {
|
||||
let result = extract_mc_versions(tag, asset);
|
||||
assert_eq!(result, expected, "Failed for tag: {tag}, asset: {asset}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_loaders() {
|
||||
let cases = vec![
|
||||
("1.20.4-forge-1.0.0", "", vec!["forge"]),
|
||||
("fabric-1.21-1.0.0", "", vec!["fabric"]),
|
||||
("1.20.1-neoforge", "", vec!["neoforge"]),
|
||||
("quilt-1.20.4", "", vec!["quilt"]),
|
||||
("mymod-1.0.0", "", vec![]),
|
||||
("1.20.4-forge-fabric", "", vec!["fabric", "forge"]), /* Alphabetical
|
||||
* order */
|
||||
("v1.0.0", "mymod-fabric-1.20.4.jar", vec!["fabric"]),
|
||||
// Real-world patterns
|
||||
("1.20.1-forge-47.1.0", "", vec!["forge"]),
|
||||
("fabric-api-0.92.0+1.20.4", "", vec!["fabric"]),
|
||||
("1.19.2-neoforge-20.2.59", "", vec!["neoforge"]),
|
||||
("quilt-loader-0.23.0", "", vec!["quilt"]),
|
||||
("1.20.4-Fabric-1.0.0", "", vec!["fabric"]), // Capitalized
|
||||
("forge-1.20.1", "", vec!["forge"]),
|
||||
("v1.0.0-fabric", "", vec!["fabric"]),
|
||||
("1.18.2-forge+fabric", "", vec!["fabric", "forge"]), // Both loaders
|
||||
("NeoForge-1.20.2", "", vec!["neoforge"]), /* Capitalized
|
||||
* NeoForge */
|
||||
("1.12.2-forge-14.23.5.2859", "", vec!["forge"]), // Old format
|
||||
];
|
||||
|
||||
for (tag, asset, expected) in cases {
|
||||
let result = extract_loaders(tag, asset);
|
||||
assert_eq!(result, expected, "Failed for tag: {tag}, asset: {asset}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_project_type() {
|
||||
let cases = vec![
|
||||
("mymod.jar", "mymod", crate::model::ProjectType::Mod),
|
||||
(
|
||||
"texture-pack.zip",
|
||||
"texture",
|
||||
crate::model::ProjectType::ResourcePack,
|
||||
),
|
||||
(
|
||||
"resourcepack.zip",
|
||||
"resources",
|
||||
crate::model::ProjectType::ResourcePack,
|
||||
),
|
||||
(
|
||||
"datapack.zip",
|
||||
"data-stuff",
|
||||
crate::model::ProjectType::DataPack,
|
||||
),
|
||||
(
|
||||
"shader.zip",
|
||||
"awesome-shaders",
|
||||
crate::model::ProjectType::Shader,
|
||||
),
|
||||
("world.zip", "my-world", crate::model::ProjectType::World),
|
||||
("save.zip", "survival", crate::model::ProjectType::World),
|
||||
("unknown.zip", "stuff", crate::model::ProjectType::Mod),
|
||||
];
|
||||
|
||||
for (filename, repo_name, expected) in cases {
|
||||
let result = detect_project_type(filename, repo_name);
|
||||
assert_eq!(
|
||||
result, expected,
|
||||
"Failed for filename: {filename}, repo: {repo_name}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
505
crates/pakker-core/src/platform/modrinth.rs
Normal file
505
crates/pakker-core/src/platform/modrinth.rs
Normal file
|
|
@ -0,0 +1,505 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::traits::PlatformClient;
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
|
||||
utils::generate_pakku_id,
|
||||
};
|
||||
|
||||
const MODRINTH_API_BASE: &str = "https://api.modrinth.com/v2";
|
||||
|
||||
pub struct ModrinthPlatform {
|
||||
client: Arc<Client>,
|
||||
}
|
||||
|
||||
impl ModrinthPlatform {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: Arc::new(Client::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn with_client(client: Arc<Client>) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
async fn request_project_url(&self, url: &str) -> Result<Project> {
|
||||
let response = self.client.get(url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::ProjectNotFound(url.to_string()));
|
||||
}
|
||||
let mr_project: ModrinthProject = response.json().await?;
|
||||
Ok(Self::convert_project(mr_project))
|
||||
}
|
||||
|
||||
async fn request_project_files_url(
|
||||
&self,
|
||||
url: &str,
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
let response = self.client.get(url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::ProjectNotFound(url.to_string()));
|
||||
}
|
||||
let mr_versions: Vec<ModrinthVersion> = response.json().await?;
|
||||
let project_id = url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidResponse(
|
||||
"Cannot parse project ID from URL".to_string(),
|
||||
)
|
||||
})?
|
||||
.to_string();
|
||||
Ok(
|
||||
mr_versions
|
||||
.iter()
|
||||
.map(|v| Self::convert_version(v, &project_id))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
async fn lookup_by_hash_url(&self, url: &str) -> Result<Option<Project>> {
|
||||
let response = self.client.get(url).send().await?;
|
||||
if response.status().as_u16() == 404 {
|
||||
return Ok(None);
|
||||
}
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::PlatformApiError(format!(
|
||||
"Modrinth API error: {}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
let version_data: serde_json::Value = response.json().await?;
|
||||
let project_id = version_data["project_id"].as_str().ok_or_else(|| {
|
||||
PakkerError::InvalidResponse("Missing project_id".to_string())
|
||||
})?;
|
||||
self
|
||||
.request_project_with_files(project_id, &[], &[])
|
||||
.await
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
fn map_project_type(type_str: &str) -> ProjectType {
|
||||
match type_str {
|
||||
"resourcepack" => ProjectType::ResourcePack,
|
||||
"datapack" => ProjectType::DataPack,
|
||||
"shader" => ProjectType::Shader,
|
||||
_ => ProjectType::Mod,
|
||||
}
|
||||
}
|
||||
|
||||
const fn map_side(client: bool, server: bool) -> ProjectSide {
|
||||
match (client, server) {
|
||||
(true, false) => ProjectSide::Client,
|
||||
(false, true) => ProjectSide::Server,
|
||||
_ => ProjectSide::Both,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_release_type(type_str: &str) -> ReleaseType {
|
||||
match type_str {
|
||||
"beta" => ReleaseType::Beta,
|
||||
"alpha" => ReleaseType::Alpha,
|
||||
_ => ReleaseType::Release,
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_project(mr_project: ModrinthProject) -> Project {
|
||||
let pakku_id = generate_pakku_id();
|
||||
let mut project = Project::new(
|
||||
pakku_id,
|
||||
Self::map_project_type(&mr_project.project_type),
|
||||
Self::map_side(
|
||||
mr_project.client_side != "unsupported",
|
||||
mr_project.server_side != "unsupported",
|
||||
),
|
||||
);
|
||||
|
||||
project.add_platform(
|
||||
"modrinth".to_string(),
|
||||
mr_project.id.clone(),
|
||||
mr_project.slug.clone(),
|
||||
mr_project.title,
|
||||
);
|
||||
|
||||
project
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "Modrinth API guarantees every version has at least one file"
|
||||
)]
|
||||
fn convert_version(
|
||||
mr_version: &ModrinthVersion,
|
||||
project_id: &str,
|
||||
) -> ProjectFile {
|
||||
let mut hashes = HashMap::new();
|
||||
|
||||
// Get primary file
|
||||
let primary_file = mr_version
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.primary)
|
||||
.or_else(|| mr_version.files.first())
|
||||
.expect("Version must have at least one file");
|
||||
|
||||
for (algo, hash) in &primary_file.hashes {
|
||||
hashes.insert(algo.clone(), hash.clone());
|
||||
}
|
||||
|
||||
ProjectFile {
|
||||
file_type: "mod".to_string(),
|
||||
file_name: primary_file.filename.clone(),
|
||||
mc_versions: mr_version.game_versions.clone(),
|
||||
loaders: mr_version.loaders.clone(),
|
||||
release_type: Self::map_release_type(&mr_version.version_type),
|
||||
url: primary_file.url.clone(),
|
||||
id: mr_version.id.clone(),
|
||||
parent_id: project_id.to_string(),
|
||||
hashes,
|
||||
required_dependencies: mr_version
|
||||
.dependencies
|
||||
.iter()
|
||||
.filter(|d| d.dependency_type == "required")
|
||||
.filter_map(|d| d.project_id.clone())
|
||||
.collect(),
|
||||
size: primary_file.size,
|
||||
date_published: mr_version.date_published.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PlatformClient for ModrinthPlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
identifier: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
let url = format!("{MODRINTH_API_BASE}/project/{identifier}");
|
||||
self.request_project_url(&url).await
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
let mut url = format!("{MODRINTH_API_BASE}/project/{project_id}/version");
|
||||
|
||||
// Add query parameters
|
||||
let mut params = vec![];
|
||||
if !mc_versions.is_empty() {
|
||||
params.push(format!(
|
||||
"game_versions=[{}]",
|
||||
mc_versions
|
||||
.iter()
|
||||
.map(|v| format!("\"{v}\""))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",")
|
||||
));
|
||||
}
|
||||
if !loaders.is_empty() {
|
||||
params.push(format!(
|
||||
"loaders=[{}]",
|
||||
loaders
|
||||
.iter()
|
||||
.map(|l| format!("\"{l}\""))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",")
|
||||
));
|
||||
}
|
||||
|
||||
if !params.is_empty() {
|
||||
url.push('?');
|
||||
url.push_str(¶ms.join("&"));
|
||||
}
|
||||
|
||||
self.request_project_files_url(&url).await
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
identifier: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
let mut project = self
|
||||
.request_project(identifier, mc_versions, loaders)
|
||||
.await?;
|
||||
let project_id = project
|
||||
.get_platform_id("modrinth")
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InternalError("Missing modrinth ID".to_string())
|
||||
})?
|
||||
.clone();
|
||||
|
||||
let files = self
|
||||
.request_project_files(&project_id, mc_versions, loaders)
|
||||
.await?;
|
||||
project.files = files;
|
||||
|
||||
Ok(project)
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
|
||||
// Modrinth uses SHA-1 hash for file lookups
|
||||
let url = format!("{MODRINTH_API_BASE}/version_file/{hash}");
|
||||
self.lookup_by_hash_url(&url).await
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
let url = format!("{MODRINTH_API_BASE}/project/{slug}");
|
||||
let response = self.client.get(&url).send().await?;
|
||||
|
||||
if response.status().as_u16() == 404 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::PlatformApiError(format!(
|
||||
"Modrinth API error: {}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
|
||||
let mr_project: ModrinthProject = response.json().await?;
|
||||
Ok(Some(Self::convert_project(mr_project)))
|
||||
}
|
||||
|
||||
/// Uses Modrinth's `/v2/version_files` endpoint to resolve projects by
|
||||
/// their hashes in batch.
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
#[derive(Serialize)]
|
||||
struct HashBatchRequest<'a> {
|
||||
hashes: &'a [String],
|
||||
algorithm: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct HashBatchResponse {
|
||||
project_id: String,
|
||||
}
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let url = format!("{MODRINTH_API_BASE}/version_files");
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&HashBatchRequest { hashes, algorithm })
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(PakkerError::PlatformApiError(format!(
|
||||
"Modrinth batch API error: {}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
|
||||
let versions_map: std::collections::HashMap<String, HashBatchResponse> =
|
||||
response.json().await?;
|
||||
|
||||
let mut projects = Vec::new();
|
||||
let mut seen_project_ids = std::collections::HashSet::new();
|
||||
|
||||
for version in versions_map.values() {
|
||||
if seen_project_ids.contains(&version.project_id) {
|
||||
continue;
|
||||
}
|
||||
seen_project_ids.insert(version.project_id.clone());
|
||||
|
||||
if let Ok(project) = self
|
||||
.request_project_with_files(&version.project_id, &[], &[])
|
||||
.await
|
||||
{
|
||||
projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
}
|
||||
|
||||
// Modrinth API models
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct ModrinthProject {
|
||||
id: String,
|
||||
slug: String,
|
||||
title: String,
|
||||
#[serde(rename = "project_type")]
|
||||
project_type: String,
|
||||
client_side: String,
|
||||
server_side: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct ModrinthVersion {
|
||||
id: String,
|
||||
project_id: String,
|
||||
name: String,
|
||||
version_number: String,
|
||||
game_versions: Vec<String>,
|
||||
version_type: String,
|
||||
loaders: Vec<String>,
|
||||
date_published: String,
|
||||
files: Vec<ModrinthFile>,
|
||||
dependencies: Vec<ModrinthDependency>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct ModrinthFile {
|
||||
hashes: HashMap<String, String>,
|
||||
url: String,
|
||||
filename: String,
|
||||
primary: bool,
|
||||
size: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
struct ModrinthDependency {
|
||||
project_id: Option<String>,
|
||||
dependency_type: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use reqwest::Client;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl ModrinthPlatform {
|
||||
fn with_raw_client(client: Client) -> Self {
|
||||
Self {
|
||||
client: Arc::new(client),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_platform_with_mock()
|
||||
-> (ModrinthPlatform, mockito::ServerGuard) {
|
||||
let server = mockito::Server::new_async().await;
|
||||
let client = Client::new();
|
||||
let platform = ModrinthPlatform::with_raw_client(client);
|
||||
(platform, server)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_request_project_success() {
|
||||
let (platform, mut server) = create_platform_with_mock().await;
|
||||
let url = format!("{}/project/test-mod", server.url());
|
||||
|
||||
let _mock = server
|
||||
.mock("GET", "/project/test-mod")
|
||||
.with_status(200)
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body(
|
||||
r#"{
|
||||
"id": "abc123",
|
||||
"slug": "test-mod",
|
||||
"title": "Test Mod",
|
||||
"project_type": "mod",
|
||||
"client_side": "required",
|
||||
"server_side": "required"
|
||||
}"#,
|
||||
)
|
||||
.create();
|
||||
|
||||
let result = platform.request_project_url(&url).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let project = result.unwrap();
|
||||
assert!(project.get_platform_id("modrinth").is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_request_project_not_found() {
|
||||
let (platform, mut server) = create_platform_with_mock().await;
|
||||
let url = format!("{}/project/nonexistent", server.url());
|
||||
|
||||
let _mock = server
|
||||
.mock("GET", "/project/nonexistent")
|
||||
.with_status(404)
|
||||
.create();
|
||||
|
||||
let result = platform.request_project_url(&url).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_request_project_files() {
|
||||
let (platform, mut server) = create_platform_with_mock().await;
|
||||
let url = format!("{}/project/abc123/version", server.url());
|
||||
|
||||
let _mock = server
|
||||
.mock("GET", "/project/abc123/version")
|
||||
.with_status(200)
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body(
|
||||
r#"[
|
||||
{
|
||||
"id": "v1",
|
||||
"project_id": "abc123",
|
||||
"name": "Test Mod v1.0.0",
|
||||
"version_number": "1.0.0",
|
||||
"game_versions": ["1.20.1"],
|
||||
"version_type": "release",
|
||||
"loaders": ["fabric"],
|
||||
"date_published": "2024-01-01T00:00:00Z",
|
||||
"files": [{
|
||||
"hashes": {"sha1": "abc123def456"},
|
||||
"url": "https://example.com/mod.jar",
|
||||
"filename": "test-mod-1.0.0.jar",
|
||||
"primary": true,
|
||||
"size": 1024
|
||||
}],
|
||||
"dependencies": []
|
||||
}
|
||||
]"#,
|
||||
)
|
||||
.create();
|
||||
|
||||
let result = platform.request_project_files_url(&url).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let files = result.unwrap();
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0].file_name, "test-mod-1.0.0.jar");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_lookup_by_hash_not_found() {
|
||||
let (platform, mut server) = create_platform_with_mock().await;
|
||||
let url = format!("{}/version_file/unknownhash123", server.url());
|
||||
|
||||
let _mock = server
|
||||
.mock("GET", "/version_file/unknownhash123")
|
||||
.with_status(404)
|
||||
.create();
|
||||
|
||||
let result = platform.lookup_by_hash_url(&url).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().is_none());
|
||||
}
|
||||
}
|
||||
442
crates/pakker-core/src/platform/multiplatform.rs
Normal file
442
crates/pakker-core/src/platform/multiplatform.rs
Normal file
|
|
@ -0,0 +1,442 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use super::traits::PlatformClient;
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Project, ProjectFile},
|
||||
};
|
||||
|
||||
/// Multiplatform platform client that aggregates `CurseForge` and Modrinth.
|
||||
/// It attempts to resolve projects on both platforms and cross-references
|
||||
/// them via slugs when a project exists on only one platform.
|
||||
pub struct MultiplatformPlatform {
|
||||
curseforge: Arc<dyn PlatformClient>,
|
||||
modrinth: Arc<dyn PlatformClient>,
|
||||
}
|
||||
|
||||
impl MultiplatformPlatform {
|
||||
pub fn new(
|
||||
curseforge: Arc<dyn PlatformClient>,
|
||||
modrinth: Arc<dyn PlatformClient>,
|
||||
) -> Self {
|
||||
Self {
|
||||
curseforge,
|
||||
modrinth,
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to fetch a project, returning Ok(None) for "not found" errors.
|
||||
async fn try_request_project(
|
||||
&self,
|
||||
client: &Arc<dyn PlatformClient>,
|
||||
identifier: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
match client.request_project(identifier, &[], &[]).await {
|
||||
Ok(project) => Ok(Some(project)),
|
||||
Err(e) => {
|
||||
let is_not_found = matches!(
|
||||
e,
|
||||
PakkerError::ProjectNotFound(_)
|
||||
| PakkerError::InvalidResponse(_)
|
||||
| PakkerError::ConfigError(_)
|
||||
);
|
||||
if is_not_found { Ok(None) } else { Err(e) }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PlatformClient for MultiplatformPlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
identifier: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
// Try both platforms in parallel
|
||||
let cf_future = self.try_request_project(&self.curseforge, identifier);
|
||||
let mr_future = self.try_request_project(&self.modrinth, identifier);
|
||||
|
||||
let (cf_result, mr_result) = tokio::join!(cf_future, mr_future);
|
||||
|
||||
// Use whichever platform succeeds. Only propagate errors when BOTH fail.
|
||||
let mut cf_project = None;
|
||||
let mut mr_project = None;
|
||||
let mut first_error = None;
|
||||
|
||||
match cf_result {
|
||||
Ok(Some(p)) => {
|
||||
log::debug!("Multiplatform: found '{identifier}' on CurseForge");
|
||||
cf_project = Some(p);
|
||||
},
|
||||
Ok(None) => {
|
||||
log::debug!("Multiplatform: '{identifier}' not found on CurseForge");
|
||||
},
|
||||
Err(e) => {
|
||||
log::debug!("Multiplatform: CurseForge error for '{identifier}': {e}");
|
||||
first_error = Some(e);
|
||||
},
|
||||
}
|
||||
|
||||
match mr_result {
|
||||
Ok(Some(p)) => {
|
||||
log::debug!("Multiplatform: found '{identifier}' on Modrinth");
|
||||
mr_project = Some(p);
|
||||
},
|
||||
Ok(None) => {
|
||||
log::debug!("Multiplatform: '{identifier}' not found on Modrinth");
|
||||
},
|
||||
Err(e) => {
|
||||
log::debug!("Multiplatform: Modrinth error for '{identifier}': {e}");
|
||||
if first_error.is_none() {
|
||||
first_error = Some(e);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if cf_project.is_none() && mr_project.is_none() {
|
||||
if let Some(e) = first_error {
|
||||
return Err(e);
|
||||
}
|
||||
return Err(PakkerError::ProjectNotFound(identifier.to_string()));
|
||||
}
|
||||
|
||||
// Cross-reference: if project exists on only one platform, fetch from the
|
||||
// other using the slug
|
||||
|
||||
// Cross-reference using each platform's own slug on the other platform.
|
||||
// Modrinth projects store their slug under "modrinth"; CurseForge under
|
||||
// "curseforge". Many mods share the same slug across platforms.
|
||||
if cf_project.is_none()
|
||||
&& let Some(ref mr) = mr_project
|
||||
&& let Some(mr_slug) = mr.slug.get("modrinth")
|
||||
&& let Ok(Some(cf)) =
|
||||
self.curseforge.request_project_from_slug(mr_slug).await
|
||||
{
|
||||
cf_project = Some(cf);
|
||||
}
|
||||
if mr_project.is_none()
|
||||
&& let Some(ref cf) = cf_project
|
||||
&& let Some(cf_slug) = cf.slug.get("curseforge")
|
||||
&& let Ok(Some(mr)) =
|
||||
self.modrinth.request_project_from_slug(cf_slug).await
|
||||
{
|
||||
mr_project = Some(mr);
|
||||
}
|
||||
|
||||
// Merge projects or return whichever was found
|
||||
let combined = match (cf_project, mr_project) {
|
||||
(Some(cf), Some(mr)) => cf.merged(mr)?,
|
||||
(Some(cf), None) => cf,
|
||||
(None, Some(mr)) => mr,
|
||||
(None, None) => unreachable!("handled above"),
|
||||
};
|
||||
|
||||
Ok(combined)
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
// Multiplatform doesn't directly support files - use
|
||||
// request_project_with_files
|
||||
let project = self
|
||||
.request_project_with_files(project_id, mc_versions, loaders)
|
||||
.await?;
|
||||
Ok(project.files)
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
identifier: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
// First get the combined project from both platforms
|
||||
let mut project = self
|
||||
.request_project(identifier, mc_versions, loaders)
|
||||
.await?;
|
||||
|
||||
// Now fetch files from both platforms in parallel
|
||||
let cf_project_id = project.id.get("curseforge").cloned();
|
||||
let mr_project_id = project.id.get("modrinth").cloned();
|
||||
|
||||
let cf_files_future = async {
|
||||
if let Some(ref id) = cf_project_id {
|
||||
self
|
||||
.curseforge
|
||||
.request_project_files(id, mc_versions, loaders)
|
||||
.await
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
};
|
||||
|
||||
let mr_files_future = async {
|
||||
if let Some(ref id) = mr_project_id {
|
||||
self
|
||||
.modrinth
|
||||
.request_project_files(id, mc_versions, loaders)
|
||||
.await
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
};
|
||||
|
||||
let (cf_files, mr_files) = tokio::join!(cf_files_future, mr_files_future);
|
||||
|
||||
let mut all_files = cf_files?;
|
||||
all_files.extend(mr_files?);
|
||||
|
||||
project.files = all_files;
|
||||
Ok(project)
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
|
||||
// Try both platforms in parallel
|
||||
let cf_future = self.curseforge.lookup_by_hash(hash);
|
||||
let mr_future = self.modrinth.lookup_by_hash(hash);
|
||||
|
||||
let (cf_result, mr_result) = tokio::join!(cf_future, mr_future);
|
||||
|
||||
match (cf_result?, mr_result?) {
|
||||
(Some(cf), Some(mr)) => cf.merged(mr).map(Some),
|
||||
(Some(project), None) | (None, Some(project)) => Ok(Some(project)),
|
||||
(None, None) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
let cf_future = self.curseforge.request_project_from_slug(slug);
|
||||
let mr_future = self.modrinth.request_project_from_slug(slug);
|
||||
|
||||
let (cf_result, mr_result) = tokio::join!(cf_future, mr_future);
|
||||
|
||||
match (cf_result, mr_result) {
|
||||
(Ok(Some(cf)), Ok(Some(mr))) => cf.merged(mr).map(Some),
|
||||
(Ok(Some(project)), Ok(None)) | (Ok(None), Ok(Some(project))) => {
|
||||
Ok(Some(project))
|
||||
},
|
||||
(Ok(None), Ok(None)) => Ok(None),
|
||||
(Err(e), _) | (_, Err(e)) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
/// Delegates to both `CurseForge` and Modrinth in parallel, then deduplicates
|
||||
/// results.
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
let cf_future = self
|
||||
.curseforge
|
||||
.request_projects_from_hashes(hashes, algorithm);
|
||||
let mr_future = self
|
||||
.modrinth
|
||||
.request_projects_from_hashes(hashes, algorithm);
|
||||
|
||||
let (cf_projects, mr_projects) = tokio::join!(cf_future, mr_future);
|
||||
|
||||
let mut all_projects = cf_projects?;
|
||||
for mr_project in mr_projects? {
|
||||
if !all_projects.iter().any(|p| {
|
||||
p.id.get("modrinth") == mr_project.id.get("modrinth")
|
||||
|| p.id.get("curseforge") == mr_project.id.get("curseforge")
|
||||
}) {
|
||||
all_projects.push(mr_project);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(all_projects)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Project, ProjectFile, ProjectSide, ProjectType},
|
||||
};
|
||||
|
||||
struct MockPlatform {
|
||||
projects: HashMap<String, Project>,
|
||||
slug_map: HashMap<String, Project>,
|
||||
}
|
||||
|
||||
impl MockPlatform {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
projects: HashMap::new(),
|
||||
slug_map: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn with_project(mut self, id: &str, project: Project) -> Self {
|
||||
self.projects.insert(id.to_string(), project);
|
||||
self
|
||||
}
|
||||
|
||||
fn with_slug(mut self, slug: &str, project: Project) -> Self {
|
||||
self.slug_map.insert(slug.to_string(), project);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl PlatformClient for MockPlatform {
|
||||
async fn request_project(
|
||||
&self,
|
||||
project_id: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
self
|
||||
.projects
|
||||
.get(project_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(project_id.to_string()))
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
_project_id: &str,
|
||||
_mc_versions: &[String],
|
||||
_loaders: &[String],
|
||||
) -> Result<Vec<ProjectFile>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project> {
|
||||
self.request_project(project_id, mc_versions, loaders).await
|
||||
}
|
||||
|
||||
async fn lookup_by_hash(&self, _hash: &str) -> Result<Option<Project>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>> {
|
||||
Ok(self.slug_map.get(slug).cloned())
|
||||
}
|
||||
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
_hashes: &[String],
|
||||
_algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
}
|
||||
|
||||
fn make_project(platform: &str, id: &str, slug: &str) -> Project {
|
||||
let mut project =
|
||||
Project::new(slug.to_string(), ProjectType::Mod, ProjectSide::Both);
|
||||
project.id.insert(platform.to_string(), id.to_string());
|
||||
project.slug.insert(platform.to_string(), slug.to_string());
|
||||
project.name.insert(platform.to_string(), slug.to_string());
|
||||
project
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cross_reference_modrinth_to_curseforge() {
|
||||
let mr_project = make_project("modrinth", "mr-abc", "sodium");
|
||||
let cf_project = make_project("curseforge", "12345", "sodium");
|
||||
|
||||
let modrinth =
|
||||
Arc::new(MockPlatform::new().with_project("sodium", mr_project.clone()));
|
||||
let curseforge =
|
||||
Arc::new(MockPlatform::new().with_slug("sodium", cf_project.clone()));
|
||||
|
||||
let platform = MultiplatformPlatform::new(curseforge, modrinth);
|
||||
let result = platform.request_project("sodium", &[], &[]).await.unwrap();
|
||||
|
||||
assert_eq!(result.id.get("modrinth"), Some(&"mr-abc".to_string()));
|
||||
assert_eq!(result.id.get("curseforge"), Some(&"12345".to_string()));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cross_reference_curseforge_to_modrinth() {
|
||||
let cf_project = make_project("curseforge", "12345", "sodium");
|
||||
let mr_project = make_project("modrinth", "mr-abc", "sodium");
|
||||
|
||||
let modrinth =
|
||||
Arc::new(MockPlatform::new().with_slug("sodium", mr_project.clone()));
|
||||
let curseforge =
|
||||
Arc::new(MockPlatform::new().with_project("sodium", cf_project.clone()));
|
||||
|
||||
let platform = MultiplatformPlatform::new(curseforge, modrinth);
|
||||
let result = platform.request_project("sodium", &[], &[]).await.unwrap();
|
||||
|
||||
assert_eq!(result.id.get("curseforge"), Some(&"12345".to_string()));
|
||||
assert_eq!(result.id.get("modrinth"), Some(&"mr-abc".to_string()));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_found_on_both_platforms_merged() {
|
||||
let cf_project = make_project("curseforge", "12345", "sodium");
|
||||
let mr_project = make_project("modrinth", "mr-abc", "sodium");
|
||||
|
||||
let modrinth =
|
||||
Arc::new(MockPlatform::new().with_project("sodium", mr_project));
|
||||
let curseforge =
|
||||
Arc::new(MockPlatform::new().with_project("sodium", cf_project));
|
||||
|
||||
let platform = MultiplatformPlatform::new(curseforge, modrinth);
|
||||
let result = platform.request_project("sodium", &[], &[]).await.unwrap();
|
||||
|
||||
assert_eq!(result.id.get("curseforge"), Some(&"12345".to_string()));
|
||||
assert_eq!(result.id.get("modrinth"), Some(&"mr-abc".to_string()));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_not_found_on_either_platform() {
|
||||
let modrinth = Arc::new(MockPlatform::new());
|
||||
let curseforge = Arc::new(MockPlatform::new());
|
||||
|
||||
let platform = MultiplatformPlatform::new(curseforge, modrinth);
|
||||
let result = platform.request_project("nonexistent", &[], &[]).await;
|
||||
|
||||
assert!(matches!(result, Err(PakkerError::ProjectNotFound(_))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_no_cross_reference_when_slug_absent() {
|
||||
// CurseForge returns a project, but slug lookup on Modrinth finds nothing
|
||||
let cf_project = make_project("curseforge", "12345", "rare-mod");
|
||||
|
||||
let modrinth = Arc::new(MockPlatform::new());
|
||||
let curseforge =
|
||||
Arc::new(MockPlatform::new().with_project("rare-mod", cf_project));
|
||||
|
||||
let platform = MultiplatformPlatform::new(curseforge, modrinth);
|
||||
let result = platform
|
||||
.request_project("rare-mod", &[], &[])
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result.id.get("curseforge"), Some(&"12345".to_string()));
|
||||
assert!(result.id.get("modrinth").is_none());
|
||||
}
|
||||
}
|
||||
53
crates/pakker-core/src/platform/traits.rs
Normal file
53
crates/pakker-core/src/platform/traits.rs
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
use async_trait::async_trait;
|
||||
|
||||
use crate::{error::Result, model::Project};
|
||||
|
||||
#[async_trait]
|
||||
pub trait PlatformClient: Send + Sync {
|
||||
/// Request a single project by identifier
|
||||
async fn request_project(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project>;
|
||||
|
||||
/// Request files for a project
|
||||
async fn request_project_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Vec<crate::model::ProjectFile>>;
|
||||
|
||||
/// Request a project with its files
|
||||
async fn request_project_with_files(
|
||||
&self,
|
||||
project_id: &str,
|
||||
mc_versions: &[String],
|
||||
loaders: &[String],
|
||||
) -> Result<Project>;
|
||||
|
||||
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>>;
|
||||
|
||||
/// Request a project using its platform-specific slug.
|
||||
/// This is used by Multiplatform to cross-reference projects between
|
||||
/// platforms.
|
||||
async fn request_project_from_slug(
|
||||
&self,
|
||||
slug: &str,
|
||||
) -> Result<Option<Project>>;
|
||||
|
||||
/// Request multiple projects by their hashes (Modrinth) or bytes
|
||||
/// (`CurseForge`).
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A list of projects found. Platforms that do not support hash-based
|
||||
/// lookup return an empty list.
|
||||
async fn request_projects_from_hashes(
|
||||
&self,
|
||||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<Project>>;
|
||||
}
|
||||
119
crates/pakker-core/src/rate_limiter.rs
Normal file
119
crates/pakker-core/src/rate_limiter.rs
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::error::Result;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RateLimiter {
|
||||
inner: Arc<Mutex<RateLimiterInner>>,
|
||||
}
|
||||
|
||||
struct RateLimiterInner {
|
||||
requests: HashMap<String, Vec<Instant>>,
|
||||
config: RateLimitConfig,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RateLimitConfig {
|
||||
pub modrinth_requests_per_min: u32,
|
||||
pub modrinth_burst: u32,
|
||||
pub curseforge_requests_per_min: u32,
|
||||
pub curseforge_burst: u32,
|
||||
pub github_requests_per_min: u32,
|
||||
pub github_burst: u32,
|
||||
pub default_requests_per_min: u32,
|
||||
pub default_burst: u32,
|
||||
}
|
||||
|
||||
impl Default for RateLimitConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
modrinth_requests_per_min: 100,
|
||||
modrinth_burst: 10,
|
||||
curseforge_requests_per_min: 60,
|
||||
curseforge_burst: 5,
|
||||
github_requests_per_min: 50,
|
||||
github_burst: 5,
|
||||
default_requests_per_min: 30,
|
||||
default_burst: 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RateLimiter {
|
||||
pub fn new(config: Option<RateLimitConfig>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(Mutex::new(RateLimiterInner {
|
||||
requests: HashMap::new(),
|
||||
config: config.unwrap_or_default(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn acquire(&self, platform: &str) -> Result<()> {
|
||||
let (rate, burst) = {
|
||||
let inner = self.inner.lock().await;
|
||||
match platform.to_lowercase().as_str() {
|
||||
"modrinth" => {
|
||||
(
|
||||
inner.config.modrinth_requests_per_min,
|
||||
inner.config.modrinth_burst,
|
||||
)
|
||||
},
|
||||
"curseforge" => {
|
||||
(
|
||||
inner.config.curseforge_requests_per_min,
|
||||
inner.config.curseforge_burst,
|
||||
)
|
||||
},
|
||||
"github" => {
|
||||
(
|
||||
inner.config.github_requests_per_min,
|
||||
inner.config.github_burst,
|
||||
)
|
||||
},
|
||||
_ => {
|
||||
(
|
||||
inner.config.default_requests_per_min,
|
||||
inner.config.default_burst,
|
||||
)
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
let interval = Duration::from_secs(60) / rate.max(1);
|
||||
|
||||
loop {
|
||||
let mut inner = self.inner.lock().await;
|
||||
let now = Instant::now();
|
||||
let platform_requests =
|
||||
inner.requests.entry(platform.to_owned()).or_default();
|
||||
|
||||
platform_requests
|
||||
.retain(|t| now.duration_since(*t) < Duration::from_secs(60));
|
||||
|
||||
if platform_requests.len() >= burst as usize
|
||||
&& let Some(oldest) = platform_requests.first()
|
||||
{
|
||||
let wait_time = interval.saturating_sub(now.duration_since(*oldest));
|
||||
if wait_time > Duration::ZERO {
|
||||
drop(inner);
|
||||
tokio::time::sleep(wait_time).await;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
platform_requests.push(Instant::now());
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wait_for(&self, platform: &str) {
|
||||
let _ = self.acquire(platform).await;
|
||||
}
|
||||
}
|
||||
154
crates/pakker-core/src/resolver.rs
Normal file
154
crates/pakker-core/src/resolver.rs
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{LockFile, Project},
|
||||
platform::PlatformClient,
|
||||
};
|
||||
|
||||
pub struct DependencyResolver {
|
||||
visited: HashSet<String>,
|
||||
path: Vec<String>,
|
||||
}
|
||||
|
||||
impl DependencyResolver {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
visited: HashSet::new(),
|
||||
path: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve<'a>(
|
||||
&'a mut self,
|
||||
project: &'a mut Project,
|
||||
lockfile: &'a mut LockFile,
|
||||
platforms: &'a HashMap<String, Box<dyn PlatformClient>>,
|
||||
) -> std::pin::Pin<
|
||||
Box<dyn std::future::Future<Output = Result<Vec<Project>>> + 'a>,
|
||||
> {
|
||||
Box::pin(async move {
|
||||
let mut resolved = Vec::new();
|
||||
|
||||
if let Some(ref pakku_id) = project.pakku_id {
|
||||
if lockfile.get_project(pakku_id).is_some() {
|
||||
log::debug!("Project already in lockfile: {}", project.get_name());
|
||||
return Ok(resolved);
|
||||
}
|
||||
if self.path.contains(pakku_id) {
|
||||
let cycle_path = self.path.join(" -> ");
|
||||
return Err(PakkerError::CircularDependency(format!(
|
||||
"{cycle_path} -> {pakku_id}"
|
||||
)));
|
||||
}
|
||||
self.path.push(pakku_id.clone());
|
||||
} else {
|
||||
return Ok(resolved);
|
||||
}
|
||||
|
||||
let mut dependencies_set: HashSet<String> = HashSet::new();
|
||||
for file in &project.files {
|
||||
for dep_id in &file.required_dependencies {
|
||||
dependencies_set.insert(dep_id.clone());
|
||||
}
|
||||
}
|
||||
let dependencies: Vec<String> = dependencies_set.into_iter().collect();
|
||||
|
||||
for dep_id in dependencies {
|
||||
let existing_pakku_id = lockfile
|
||||
.find_project_by_platform_id("modrinth", &dep_id)
|
||||
.or_else(|| {
|
||||
lockfile.find_project_by_platform_id("curseforge", &dep_id)
|
||||
})
|
||||
.or_else(|| lockfile.find_project_by_platform_id("github", &dep_id))
|
||||
.map(|p| p.pakku_id.clone());
|
||||
|
||||
if let Some(Some(existing_id)) = existing_pakku_id {
|
||||
if let Some(ref my_id) = project.pakku_id {
|
||||
project.pakku_links.insert(existing_id.clone());
|
||||
if let Some(existing_mut) = lockfile.find_project_mut(&existing_id)
|
||||
{
|
||||
existing_mut.pakku_links.insert(my_id.clone());
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut dep_project =
|
||||
self.fetch_dependency(&dep_id, lockfile, platforms).await?;
|
||||
|
||||
if let (Some(dep_id), Some(my_id)) =
|
||||
(&dep_project.pakku_id, &project.pakku_id)
|
||||
{
|
||||
project.pakku_links.insert(dep_id.clone());
|
||||
dep_project.pakku_links.insert(my_id.clone());
|
||||
}
|
||||
|
||||
let mut sub_deps =
|
||||
self.resolve(&mut dep_project, lockfile, platforms).await?;
|
||||
|
||||
resolved.push(dep_project);
|
||||
resolved.append(&mut sub_deps);
|
||||
}
|
||||
|
||||
if let Some(ref pakku_id) = project.pakku_id {
|
||||
self.visited.insert(pakku_id.clone());
|
||||
}
|
||||
self.path.pop();
|
||||
|
||||
Ok(resolved)
|
||||
})
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "projects.len() == 1 is checked directly above"
|
||||
)]
|
||||
async fn fetch_dependency(
|
||||
&self,
|
||||
dep_id: &str,
|
||||
lockfile: &LockFile,
|
||||
platforms: &HashMap<String, Box<dyn PlatformClient>>,
|
||||
) -> Result<Project> {
|
||||
let mut projects = Vec::new();
|
||||
|
||||
for (platform_name, client) in platforms {
|
||||
match client
|
||||
.request_project_with_files(
|
||||
dep_id,
|
||||
&lockfile.mc_versions,
|
||||
&lockfile.get_loader_names(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(project) => {
|
||||
log::info!("Found dependency {dep_id} on {platform_name}");
|
||||
projects.push(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::debug!("Could not find {dep_id} on {platform_name}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if projects.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(dep_id.to_string()));
|
||||
}
|
||||
|
||||
if projects.len() == 1 {
|
||||
Ok(projects.into_iter().next().expect("length is exactly 1"))
|
||||
} else {
|
||||
let mut merged = projects.remove(0);
|
||||
for project in projects {
|
||||
merged.merge(project);
|
||||
}
|
||||
Ok(merged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DependencyResolver {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
263
crates/pakker-core/src/ui_utils.rs
Normal file
263
crates/pakker-core/src/ui_utils.rs
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
// UI utility functions for terminal formatting and interactive prompts
|
||||
|
||||
use std::io;
|
||||
|
||||
use dialoguer::{Confirm, Input, Select, theme::ColorfulTheme};
|
||||
|
||||
/// Creates a terminal hyperlink using OSC 8 escape sequence
|
||||
/// Format: \x1b]8;;<URL>\x1b\\<TEXT>\x1b]8;;\x1b\\
|
||||
pub fn hyperlink(url: &str, text: &str) -> String {
|
||||
format!("\x1b]8;;{url}\x1b\\{text}\x1b]8;;\x1b\\")
|
||||
}
|
||||
|
||||
/// Prompts user with a yes/no question
|
||||
/// Returns true for yes, false for no
|
||||
/// If `skip_prompts` is true, returns the default value without prompting
|
||||
pub fn prompt_yes_no(
|
||||
question: &str,
|
||||
default: bool,
|
||||
skip_prompts: bool,
|
||||
) -> io::Result<bool> {
|
||||
if skip_prompts {
|
||||
return Ok(default);
|
||||
}
|
||||
|
||||
Confirm::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(question)
|
||||
.default(default)
|
||||
.interact()
|
||||
.map_err(io::Error::other)
|
||||
}
|
||||
|
||||
/// Prompts user to select from a list of options
|
||||
/// Returns the index of the selected option
|
||||
pub fn prompt_select(question: &str, options: &[&str]) -> io::Result<usize> {
|
||||
Select::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(question)
|
||||
.items(options)
|
||||
.default(0)
|
||||
.interact()
|
||||
.map_err(io::Error::other)
|
||||
}
|
||||
|
||||
/// Creates a formatted project URL for Modrinth
|
||||
pub fn modrinth_project_url(slug: &str) -> String {
|
||||
format!("https://modrinth.com/mod/{slug}")
|
||||
}
|
||||
|
||||
/// Creates a formatted project URL for `CurseForge`
|
||||
pub fn curseforge_project_url(project_id: &str) -> String {
|
||||
format!("https://www.curseforge.com/minecraft/mc-mods/{project_id}")
|
||||
}
|
||||
|
||||
/// Calculate Levenshtein edit distance between two strings
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
fn levenshtein_distance(a: &str, b: &str) -> usize {
|
||||
let a = a.to_lowercase();
|
||||
let b = b.to_lowercase();
|
||||
let a_len = a.chars().count();
|
||||
let b_len = b.chars().count();
|
||||
|
||||
if a_len == 0 {
|
||||
return b_len;
|
||||
}
|
||||
if b_len == 0 {
|
||||
return a_len;
|
||||
}
|
||||
|
||||
let mut matrix = vec![vec![0usize; b_len + 1]; a_len + 1];
|
||||
|
||||
for i in 0..=a_len {
|
||||
matrix[i][0] = i;
|
||||
}
|
||||
for j in 0..=b_len {
|
||||
matrix[0][j] = j;
|
||||
}
|
||||
|
||||
let a_chars: Vec<char> = a.chars().collect();
|
||||
let b_chars: Vec<char> = b.chars().collect();
|
||||
|
||||
for i in 1..=a_len {
|
||||
for j in 1..=b_len {
|
||||
let cost = usize::from(a_chars[i - 1] != b_chars[j - 1]);
|
||||
|
||||
matrix[i][j] = (matrix[i - 1][j] + 1) // deletion
|
||||
.min(matrix[i][j - 1] + 1) // insertion
|
||||
.min(matrix[i - 1][j - 1] + cost); // substitution
|
||||
}
|
||||
}
|
||||
|
||||
matrix[a_len][b_len]
|
||||
}
|
||||
|
||||
/// Find similar strings to the input using Levenshtein distance.
|
||||
/// Returns suggestions sorted by similarity (most similar first).
|
||||
/// Only returns suggestions with distance <= `max_distance`.
|
||||
pub fn suggest_similar<'a>(
|
||||
input: &str,
|
||||
candidates: &'a [String],
|
||||
max_distance: usize,
|
||||
) -> Vec<&'a str> {
|
||||
let mut scored: Vec<(&str, usize)> = candidates
|
||||
.iter()
|
||||
.map(|c| (c.as_str(), levenshtein_distance(input, c)))
|
||||
.filter(|(_, dist)| *dist <= max_distance && *dist > 0)
|
||||
.collect();
|
||||
|
||||
scored.sort_by_key(|(_, dist)| *dist);
|
||||
scored.into_iter().map(|(s, _)| s).collect()
|
||||
}
|
||||
|
||||
/// Prompt user if they meant a similar project name.
|
||||
/// Returns `Some(suggested_name)` if user confirms, None otherwise.
|
||||
/// If `skip_prompts` is true, automatically accepts the first suggestion.
|
||||
pub fn prompt_typo_suggestion(
|
||||
input: &str,
|
||||
candidates: &[String],
|
||||
skip_prompts: bool,
|
||||
) -> io::Result<Option<String>> {
|
||||
// Use a max distance based on input length for reasonable suggestions
|
||||
let max_distance = (input.len() / 2).clamp(2, 4);
|
||||
let suggestions = suggest_similar(input, candidates, max_distance);
|
||||
|
||||
if let Some(first_suggestion) = suggestions.first()
|
||||
&& prompt_yes_no(
|
||||
&format!("Did you mean '{first_suggestion}'?"),
|
||||
true,
|
||||
skip_prompts,
|
||||
)?
|
||||
{
|
||||
return Ok(Some((*first_suggestion).to_string()));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Prompt for text input with optional default value
|
||||
pub fn prompt_input(prompt: &str, default: Option<&str>) -> io::Result<String> {
|
||||
let theme = ColorfulTheme::default();
|
||||
let mut input = Input::<String>::with_theme(&theme).with_prompt(prompt);
|
||||
|
||||
if let Some(def) = default {
|
||||
input = input.default(def.to_string());
|
||||
}
|
||||
|
||||
input.interact_text().map_err(io::Error::other)
|
||||
}
|
||||
|
||||
/// Prompt for text input, returning None if empty
|
||||
pub fn prompt_input_optional(prompt: &str) -> io::Result<Option<String>> {
|
||||
let input: String = Input::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(prompt)
|
||||
.allow_empty(true)
|
||||
.interact_text()
|
||||
.map_err(io::Error::other)?;
|
||||
|
||||
if input.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(input))
|
||||
}
|
||||
}
|
||||
|
||||
/// Prompt for `CurseForge` API key when authentication fails.
|
||||
/// Returns the API key if provided, None if cancelled.
|
||||
/// If `skip_prompts` is true, returns None immediately.
|
||||
pub fn prompt_curseforge_api_key(
|
||||
skip_prompts: bool,
|
||||
) -> io::Result<Option<String>> {
|
||||
use dialoguer::Password;
|
||||
|
||||
if skip_prompts {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("CurseForge API key is required but not configured.");
|
||||
println!("Get your API key from: https://console.curseforge.com/");
|
||||
println!();
|
||||
|
||||
if !prompt_yes_no("Would you like to enter your API key now?", true, false)? {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let key: String = Password::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("CurseForge API key")
|
||||
.interact()
|
||||
.map_err(io::Error::other)?;
|
||||
|
||||
if key.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(key))
|
||||
}
|
||||
}
|
||||
|
||||
/// Prompt for a generic secret/token using a secure password input.
|
||||
/// Returns the secret if provided, None if empty or cancelled.
|
||||
pub fn prompt_secret(prompt: &str) -> io::Result<Option<String>> {
|
||||
use dialoguer::Password;
|
||||
|
||||
let secret: String = Password::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt(prompt)
|
||||
.interact()
|
||||
.map_err(io::Error::other)?;
|
||||
|
||||
if secret.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(secret))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_hyperlink() {
|
||||
let result = hyperlink("https://example.com", "Example");
|
||||
assert!(result.contains("https://example.com"));
|
||||
assert!(result.contains("Example"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_modrinth_url() {
|
||||
let url = modrinth_project_url("sodium");
|
||||
assert_eq!(url, "https://modrinth.com/mod/sodium");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_levenshtein_distance() {
|
||||
assert_eq!(levenshtein_distance("kitten", "sitting"), 3);
|
||||
assert_eq!(levenshtein_distance("saturday", "sunday"), 3);
|
||||
assert_eq!(levenshtein_distance("", "abc"), 3);
|
||||
assert_eq!(levenshtein_distance("abc", ""), 3);
|
||||
assert_eq!(levenshtein_distance("abc", "abc"), 0);
|
||||
assert_eq!(levenshtein_distance("ABC", "abc"), 0); // case insensitive
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suggest_similar() {
|
||||
let candidates = vec![
|
||||
"sodium".to_string(),
|
||||
"lithium".to_string(),
|
||||
"phosphor".to_string(),
|
||||
"iris".to_string(),
|
||||
"fabric-api".to_string(),
|
||||
];
|
||||
|
||||
// Close typo should be suggested
|
||||
let suggestions = suggest_similar("sodim", &candidates, 2);
|
||||
assert!(!suggestions.is_empty());
|
||||
assert_eq!(suggestions[0], "sodium");
|
||||
|
||||
// Complete mismatch should return empty
|
||||
let suggestions = suggest_similar("xyz123", &candidates, 2);
|
||||
assert!(suggestions.is_empty());
|
||||
|
||||
// Exact match returns empty (distance 0 filtered out)
|
||||
let suggestions = suggest_similar("sodium", &candidates, 2);
|
||||
assert!(suggestions.is_empty());
|
||||
}
|
||||
}
|
||||
323
crates/pakker-core/src/utils/flexver.rs
Normal file
323
crates/pakker-core/src/utils/flexver.rs
Normal file
|
|
@ -0,0 +1,323 @@
|
|||
// FlexVer - Flexible Version Comparison
|
||||
//
|
||||
// This implementation is based on the original implementation of the
|
||||
// `flexver-rs` crate, which no longer appears to be maintained.
|
||||
//
|
||||
// See:
|
||||
// <https://git.sleeping.town/exa/FlexVer/src/branch/trunk/rust/src/lib.rs>
|
||||
//
|
||||
// This implementation provides semver-like version comparison with support for:
|
||||
//
|
||||
// - Flexible version string parsing (not strict semver)
|
||||
// - Pre-release handling (parts starting with `-`)
|
||||
// - Build metadata stripping (parts after `+`)
|
||||
// - Numerical vs lexical comparison
|
||||
|
||||
use std::{
|
||||
cmp::Ordering::{self, Equal, Greater, Less},
|
||||
collections::VecDeque,
|
||||
};
|
||||
|
||||
/// Type of version component for sorting purposes
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum SortingType {
|
||||
/// A numeric component with both i64 value and string representation
|
||||
Numerical(i64, String),
|
||||
|
||||
/// A lexical string component
|
||||
Lexical(String),
|
||||
|
||||
/// A semver pre-release component (starting with `-`)
|
||||
SemverPrerelease(String),
|
||||
}
|
||||
|
||||
impl SortingType {
|
||||
fn into_string(self) -> String {
|
||||
match self {
|
||||
Self::Numerical(_, s) | Self::Lexical(s) | Self::SemverPrerelease(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_semver_prerelease(s: &str) -> bool {
|
||||
s.len() > 1 && s.starts_with('-')
|
||||
}
|
||||
|
||||
/// Decompose a version string into its component parts
|
||||
fn decompose(str_in: &str) -> VecDeque<SortingType> {
|
||||
use SortingType::{Lexical, Numerical, SemverPrerelease};
|
||||
|
||||
fn handle_split(
|
||||
current: &str,
|
||||
c: Option<&char>,
|
||||
currently_numeric: bool,
|
||||
) -> Option<SortingType> {
|
||||
let numeric = c.is_some_and(char::is_ascii_digit);
|
||||
|
||||
if currently_numeric {
|
||||
if numeric {
|
||||
return None;
|
||||
}
|
||||
return Some(current.parse::<i64>().map_or_else(
|
||||
|_| Lexical(current.to_owned()),
|
||||
|n| Numerical(n, current.to_owned()),
|
||||
));
|
||||
}
|
||||
|
||||
if !(numeric || c == Some(&'-') || c.is_none()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if is_semver_prerelease(current) {
|
||||
if c == Some(&'-') {
|
||||
// Pre-releases can have multiple dashes
|
||||
None
|
||||
} else {
|
||||
Some(SemverPrerelease(current.to_owned()))
|
||||
}
|
||||
} else {
|
||||
Some(Lexical(current.to_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
if str_in.is_empty() {
|
||||
return VecDeque::new();
|
||||
}
|
||||
|
||||
// Strip build metadata (after `+`)
|
||||
let s = if let Some((left, _)) = str_in.split_once('+') {
|
||||
left
|
||||
} else {
|
||||
str_in
|
||||
};
|
||||
|
||||
let mut out: VecDeque<SortingType> = VecDeque::new();
|
||||
let mut current = String::new();
|
||||
|
||||
let mut currently_numeric = s.starts_with(|c: char| c.is_ascii_digit());
|
||||
let mut skip = s.starts_with('-');
|
||||
|
||||
for c in s.chars() {
|
||||
if let Some(part) = handle_split(¤t, Some(&c), currently_numeric) {
|
||||
if skip {
|
||||
skip = false;
|
||||
} else {
|
||||
out.push_back(part);
|
||||
current.clear();
|
||||
currently_numeric = c.is_ascii_digit();
|
||||
}
|
||||
}
|
||||
current.push(c);
|
||||
}
|
||||
|
||||
if let Some(part) = handle_split(¤t, None, currently_numeric) {
|
||||
out.push_back(part);
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
/// Compare two version strings using `FlexVer` rules.
|
||||
///
|
||||
/// Returns:
|
||||
/// - `Ordering::Less` if `a` < `b`
|
||||
/// - `Ordering::Equal` if `a` == `b`
|
||||
/// - `Ordering::Greater` if `a` > `b`
|
||||
///
|
||||
/// This matches the behavior of flexver-java:
|
||||
/// - "1.0.0" > "1.0.0-beta" (release > pre-release)
|
||||
/// - "1.0.0-beta" < "1.0.0+build123" (pre-release < build metadata)
|
||||
#[expect(
|
||||
clippy::unreachable,
|
||||
reason = "the VersionComparisonIterator never yields (None, None)"
|
||||
)]
|
||||
pub fn compare(left: &str, right: &str) -> Ordering {
|
||||
let iter = VersionComparisonIterator {
|
||||
left: decompose(left),
|
||||
right: decompose(right),
|
||||
};
|
||||
|
||||
for next in iter {
|
||||
use SortingType::{Numerical, SemverPrerelease};
|
||||
|
||||
let current = match next {
|
||||
// Left ran out first
|
||||
(Some(l), None) => {
|
||||
if let SemverPrerelease(_) = l {
|
||||
Less
|
||||
} else {
|
||||
Greater
|
||||
}
|
||||
},
|
||||
// Right ran out first
|
||||
(None, Some(r)) => {
|
||||
if let SemverPrerelease(_) = r {
|
||||
Greater
|
||||
} else {
|
||||
Less
|
||||
}
|
||||
},
|
||||
// Both have components
|
||||
(Some(l), Some(r)) => {
|
||||
match (l, r) {
|
||||
(Numerical(l, _), Numerical(r, _)) => l.cmp(&r),
|
||||
(l, r) => l.into_string().cmp(&r.into_string()),
|
||||
}
|
||||
},
|
||||
(None, None) => unreachable!(),
|
||||
};
|
||||
|
||||
if current != Equal {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
|
||||
Equal
|
||||
}
|
||||
|
||||
/// Version comparison iterator that yields pairs of components
|
||||
#[derive(Debug)]
|
||||
struct VersionComparisonIterator {
|
||||
left: VecDeque<SortingType>,
|
||||
right: VecDeque<SortingType>,
|
||||
}
|
||||
|
||||
impl Iterator for VersionComparisonIterator {
|
||||
type Item = (Option<SortingType>, Option<SortingType>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let item = (self.left.pop_front(), self.right.pop_front());
|
||||
if item == (None, None) {
|
||||
None
|
||||
} else {
|
||||
Some(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `FlexVer` type for use with standard library traits
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct FlexVer<'a>(pub &'a str);
|
||||
|
||||
impl PartialEq for FlexVer<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
compare(self.0, other.0) == Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for FlexVer<'_> {}
|
||||
|
||||
impl PartialOrd for FlexVer<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for FlexVer<'_> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
compare(self.0, other.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn cmp(a: &str, b: &str) -> Ordering {
|
||||
compare(a, b)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_release_comparison() {
|
||||
assert_eq!(cmp("1.0.0", "1.0.0"), Ordering::Equal);
|
||||
assert_eq!(cmp("1.0.0", "1.0.1"), Ordering::Less);
|
||||
assert_eq!(cmp("1.0.1", "1.0.0"), Ordering::Greater);
|
||||
assert_eq!(cmp("1.0.0", "1.1.0"), Ordering::Less);
|
||||
assert_eq!(cmp("1.1.0", "1.0.0"), Ordering::Greater);
|
||||
assert_eq!(cmp("2.0.0", "1.9.9"), Ordering::Greater);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prerelease_comparison() {
|
||||
// Release > pre-release
|
||||
assert_eq!(cmp("1.0.0", "1.0.0-beta"), Ordering::Greater);
|
||||
assert_eq!(cmp("1.0.0-beta", "1.0.0"), Ordering::Less);
|
||||
|
||||
// Pre-release with tilde
|
||||
assert_eq!(cmp("1.0.0~1", "1.0.0~2"), Ordering::Less);
|
||||
assert_eq!(cmp("1.0.0~2", "1.0.0~1"), Ordering::Greater);
|
||||
assert_eq!(cmp("1.0.0~1", "1.0.0~1"), Ordering::Equal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prerelease_with_tilde_vs_alpha() {
|
||||
// In FlexVer, "~" is not treated as a pre-release marker
|
||||
// Only "-" followed by text marks a pre-release
|
||||
// So "1.0.0~1" is compared lexicographically vs "1.0.0-beta"
|
||||
// Since '~' (ASCII 126) > '-' (ASCII 45), "~1" > "-beta"
|
||||
assert_eq!(cmp("1.0.0~1", "1.0.0-beta"), Ordering::Greater);
|
||||
assert_eq!(cmp("1.0.0-beta", "1.0.0~1"), Ordering::Less);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_metadata() {
|
||||
// Build metadata with + is stripped for comparison
|
||||
assert_eq!(cmp("1.0.0+build", "1.0.0"), Ordering::Equal);
|
||||
assert_eq!(cmp("1.0.0+build", "1.0.0-alpha"), Ordering::Greater);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_file_extensions() {
|
||||
// File extensions should be handled by string comparison
|
||||
assert_eq!(cmp("mod-1.0.0.jar", "mod-1.0.0.jar"), Ordering::Equal);
|
||||
assert!(cmp("mod-1.0.0.jar", "mod-1.0.1.jar").is_lt());
|
||||
assert!(cmp("mod-1.0.1.jar", "mod-1.0.0.jar").is_gt());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complex_versions() {
|
||||
// Simple version comparison
|
||||
assert!(cmp("sodium-1.0.0", "sodium-1.0.1").is_lt());
|
||||
assert!(cmp("sodium-1.0.1", "sodium-1.0.0").is_gt());
|
||||
|
||||
// File extensions are NOT stripped - they're part of the version string
|
||||
// "sodium-1.0.0.jar" < "sodium-1.0.0~1.jar" because '.' (46) < '~' (126)
|
||||
assert!(cmp("sodium-1.0.0.jar", "sodium-1.0.0~1.jar").is_lt());
|
||||
assert!(cmp("sodium-1.0.0~1.jar", "sodium-1.0.0.jar").is_gt());
|
||||
|
||||
assert!(cmp("fabric-0.15.0.1", "fabric-0.15.0.2").is_lt());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_min_max() {
|
||||
assert_eq!(FlexVer("1.0.0").min(FlexVer("1.0.0")), FlexVer("1.0.0"));
|
||||
assert_eq!(FlexVer("a1.2.6").min(FlexVer("b1.7.3")), FlexVer("a1.2.6"));
|
||||
assert_eq!(FlexVer("b1.7.3").max(FlexVer("a1.2.6")), FlexVer("b1.7.3"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_commutative() {
|
||||
// If a > b, then b < a
|
||||
let pairs = vec![
|
||||
("1.0.0", "1.0.1"),
|
||||
("1.0.0-beta", "1.0.0"),
|
||||
("1.0.0~1", "1.0.0~2"),
|
||||
];
|
||||
|
||||
for (a, b) in pairs {
|
||||
let ordering = compare(a, b);
|
||||
let inverse = match ordering {
|
||||
Ordering::Less => Ordering::Greater,
|
||||
Ordering::Greater => Ordering::Less,
|
||||
Ordering::Equal => Ordering::Equal,
|
||||
};
|
||||
assert_eq!(
|
||||
compare(b, a),
|
||||
inverse,
|
||||
"Commutativity violation: {a} vs {b}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
169
crates/pakker-core/src/utils/hash.rs
Normal file
169
crates/pakker-core/src/utils/hash.rs
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
use std::{
|
||||
fs::File,
|
||||
io::{BufReader, Read},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use md5::{Digest as Md5Digest, Md5};
|
||||
use sha1::Sha1;
|
||||
use sha2::{Sha256, Sha512};
|
||||
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
pub fn hash_to_hex(hash: impl AsRef<[u8]>) -> String {
|
||||
use std::fmt::Write;
|
||||
let bytes = hash.as_ref();
|
||||
let mut hex = String::with_capacity(bytes.len() * 2);
|
||||
for byte in bytes {
|
||||
let _ = write!(hex, "{byte:02x}");
|
||||
}
|
||||
hex
|
||||
}
|
||||
|
||||
/// Compute SHA1 hash of a file
|
||||
pub fn compute_sha1<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut reader = BufReader::new(file);
|
||||
let mut hasher = Sha1::new();
|
||||
let mut buffer = [0; 8192];
|
||||
|
||||
loop {
|
||||
let n = reader.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..n]);
|
||||
}
|
||||
|
||||
Ok(hash_to_hex(hasher.finalize().as_slice()))
|
||||
}
|
||||
|
||||
/// Compute SHA256 hash of a file
|
||||
pub fn compute_sha256<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut reader = BufReader::new(file);
|
||||
let mut hasher = Sha256::new();
|
||||
let mut buffer = [0; 8192];
|
||||
|
||||
loop {
|
||||
let n = reader.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..n]);
|
||||
}
|
||||
|
||||
Ok(hash_to_hex(hasher.finalize().as_slice()))
|
||||
}
|
||||
|
||||
/// Compute SHA256 hash of byte data
|
||||
pub fn compute_sha256_bytes(data: &[u8]) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(data);
|
||||
hash_to_hex(hasher.finalize().as_slice())
|
||||
}
|
||||
|
||||
/// Compute SHA512 hash of a file
|
||||
pub fn compute_sha512<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut reader = BufReader::new(file);
|
||||
let mut hasher = Sha512::new();
|
||||
let mut buffer = [0; 8192];
|
||||
|
||||
loop {
|
||||
let n = reader.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..n]);
|
||||
}
|
||||
|
||||
Ok(hash_to_hex(hasher.finalize().as_slice()))
|
||||
}
|
||||
|
||||
/// Compute MD5 hash of a file
|
||||
pub fn compute_md5<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut reader = BufReader::new(file);
|
||||
let mut hasher = Md5::new();
|
||||
let mut buffer = [0; 8192];
|
||||
|
||||
loop {
|
||||
let n = reader.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..n]);
|
||||
}
|
||||
|
||||
let hash = hasher.finalize();
|
||||
let mut hex = String::with_capacity(hash.len() * 2);
|
||||
for byte in hash {
|
||||
let _ = std::fmt::write(&mut hex, format_args!("{byte:02x}"));
|
||||
}
|
||||
Ok(hex)
|
||||
}
|
||||
|
||||
/// Verify a file's hash against expected value
|
||||
pub fn verify_hash<P: AsRef<Path>>(
|
||||
path: P,
|
||||
algorithm: &str,
|
||||
expected: &str,
|
||||
) -> Result<bool> {
|
||||
let path = path.as_ref();
|
||||
let actual = match algorithm {
|
||||
"sha1" => compute_sha1(path)?,
|
||||
"sha256" => compute_sha256(path)?,
|
||||
"sha512" => compute_sha512(path)?,
|
||||
"md5" => compute_md5(path)?,
|
||||
_ => {
|
||||
return Err(PakkerError::InternalError(format!(
|
||||
"Unknown hash algorithm: {algorithm}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
|
||||
Ok(actual.eq_ignore_ascii_case(expected))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_sha256_bytes_deterministic() {
|
||||
let data = b"test data";
|
||||
let hash1 = compute_sha256_bytes(data);
|
||||
let hash2 = compute_sha256_bytes(data);
|
||||
assert_eq!(hash1, hash2, "SHA256 must be deterministic");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sha256_bytes_format() {
|
||||
let data = b"hello";
|
||||
let hash = compute_sha256_bytes(data);
|
||||
assert_eq!(hash.len(), 64, "SHA256 hex should be 64 characters");
|
||||
assert!(
|
||||
hash.chars().all(|c| c.is_ascii_hexdigit()),
|
||||
"SHA256 should only contain hex digits"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sha256_bytes_empty() {
|
||||
let hash = compute_sha256_bytes(b"");
|
||||
assert_eq!(
|
||||
hash,
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sha256_bytes_known_value() {
|
||||
// SHA256 of "hello" in hex
|
||||
let expected =
|
||||
"2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824";
|
||||
let hash = compute_sha256_bytes(b"hello");
|
||||
assert_eq!(hash, expected);
|
||||
}
|
||||
}
|
||||
35
crates/pakker-core/src/utils/id.rs
Normal file
35
crates/pakker-core/src/utils/id.rs
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
use rand::RngExt;
|
||||
|
||||
const CHARSET: &[u8] =
|
||||
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
const ID_LENGTH: usize = 16;
|
||||
|
||||
/// Generate a random 16-character alphanumeric pakku ID
|
||||
pub fn generate_pakku_id() -> String {
|
||||
let mut rng = rand::rng();
|
||||
(0..ID_LENGTH)
|
||||
.map(|_| {
|
||||
let idx = rng.random_range(0..CHARSET.len());
|
||||
CHARSET[idx] as char
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_generate_pakku_id() {
|
||||
let id = generate_pakku_id();
|
||||
assert_eq!(id.len(), ID_LENGTH);
|
||||
assert!(id.chars().all(char::is_alphanumeric));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unique_ids() {
|
||||
let id1 = generate_pakku_id();
|
||||
let id2 = generate_pakku_id();
|
||||
assert_ne!(id1, id2);
|
||||
}
|
||||
}
|
||||
7
crates/pakker-core/src/utils/mod.rs
Normal file
7
crates/pakker-core/src/utils/mod.rs
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
pub mod flexver;
|
||||
pub mod hash;
|
||||
pub mod id;
|
||||
|
||||
pub use flexver::FlexVer;
|
||||
pub use hash::verify_hash;
|
||||
pub use id::generate_pakku_id;
|
||||
Loading…
Add table
Add a link
Reference in a new issue