initial commit
Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: Ife1391ed23a1e7f388b1b5eca90b9ea76a6a6964
This commit is contained in:
commit
ef28bdaeb4
63 changed files with 17292 additions and 0 deletions
227
src/cli/commands/add.rs
Normal file
227
src/cli/commands/add.rs
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{Config, LockFile, Project},
|
||||
platform::create_platform,
|
||||
resolver::DependencyResolver,
|
||||
};
|
||||
|
||||
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
||||
lockfile.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
pub fn create_all_platforms()
|
||||
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
|
||||
let mut platforms = HashMap::new();
|
||||
|
||||
if let Ok(platform) = create_platform("modrinth", None) {
|
||||
platforms.insert("modrinth".to_string(), platform);
|
||||
}
|
||||
if let Ok(platform) =
|
||||
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
|
||||
{
|
||||
platforms.insert("curseforge".to_string(), platform);
|
||||
}
|
||||
|
||||
Ok(platforms)
|
||||
}
|
||||
|
||||
async fn resolve_input(
|
||||
input: &str,
|
||||
platforms: &HashMap<String, Box<dyn crate::platform::PlatformClient>>,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<Project> {
|
||||
for platform in platforms.values() {
|
||||
if let Ok(project) = platform
|
||||
.request_project_with_files(
|
||||
input,
|
||||
&lockfile.mc_versions,
|
||||
&get_loaders(lockfile),
|
||||
)
|
||||
.await
|
||||
{
|
||||
return Ok(project);
|
||||
}
|
||||
}
|
||||
|
||||
Err(PakkerError::ProjectNotFound(input.to_string()))
|
||||
}
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{cli::AddArgs, model::fork::LocalConfig};
|
||||
|
||||
pub async fn execute(
|
||||
args: AddArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
log::info!("Adding projects: {:?}", args.inputs);
|
||||
|
||||
// Load lockfile
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
// Check if lockfile exists (try both pakker-lock.json and pakku-lock.json)
|
||||
let lockfile_exists =
|
||||
lockfile_path.exists() || lockfile_dir.join("pakku-lock.json").exists();
|
||||
|
||||
if !lockfile_exists {
|
||||
// Try to load config from both pakker.json and pakku.json
|
||||
let local_config = LocalConfig::load(config_dir).or_else(|_| {
|
||||
let legacy_config_path = config_dir.join("pakku.json");
|
||||
if legacy_config_path.exists() {
|
||||
LocalConfig::load(&config_dir.join("pakku.json"))
|
||||
} else {
|
||||
Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No pakker.json found",
|
||||
)))
|
||||
}
|
||||
})?;
|
||||
|
||||
if local_config.has_parent() {
|
||||
log::info!("Creating minimal fork lockfile with parent metadata...");
|
||||
|
||||
// Check for parent lockfile (try both pakker-lock.json and
|
||||
// pakku-lock.json)
|
||||
let parent_paths = [
|
||||
lockfile_dir.join(".pakku/parent/pakker-lock.json"),
|
||||
lockfile_dir.join(".pakku/parent/pakku-lock.json"),
|
||||
];
|
||||
|
||||
let parent_found = parent_paths.iter().any(|path| path.exists());
|
||||
if !parent_found {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Fork configured but parent lockfile not found at \
|
||||
.pakku/parent/pakker-lock.json or .pakku/parent/pakku-lock.json",
|
||||
)));
|
||||
}
|
||||
|
||||
// Load parent lockfile to get metadata
|
||||
let parent_lockfile = parent_paths
|
||||
.iter()
|
||||
.find(|path| path.exists())
|
||||
.and_then(|path| LockFile::load(path.parent().unwrap()).ok())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Failed to load parent lockfile metadata",
|
||||
))
|
||||
})?;
|
||||
|
||||
let minimal_lockfile = LockFile {
|
||||
target: parent_lockfile.target,
|
||||
mc_versions: parent_lockfile.mc_versions,
|
||||
loaders: parent_lockfile.loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
minimal_lockfile.save_without_validation(lockfile_dir)?;
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"pakker-lock.json not found and no fork configured. Run 'pakker init' \
|
||||
first.",
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let mut lockfile = LockFile::load_with_validation(lockfile_dir, false)?;
|
||||
|
||||
// Load config if available
|
||||
let _config = Config::load(config_dir).ok();
|
||||
|
||||
// Create platforms
|
||||
let platforms = create_all_platforms()?;
|
||||
|
||||
let mut new_projects = Vec::new();
|
||||
|
||||
// Resolve each input
|
||||
for input in &args.inputs {
|
||||
let project = resolve_input(input, &platforms, &lockfile).await?;
|
||||
|
||||
// Check if already exists by matching platform IDs (not pakku_id which is
|
||||
// random)
|
||||
let already_exists = lockfile.projects.iter().any(|p| {
|
||||
// Check if any platform ID matches
|
||||
project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
});
|
||||
|
||||
if already_exists {
|
||||
if args.update {
|
||||
log::info!("Updating existing project: {}", project.get_name());
|
||||
// Find and replace the existing project
|
||||
if let Some(pos) = lockfile.projects.iter().position(|p| {
|
||||
project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
}) {
|
||||
lockfile.projects[pos] = project;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
log::info!("Project already exists: {}", project.get_name());
|
||||
continue;
|
||||
}
|
||||
|
||||
new_projects.push(project);
|
||||
}
|
||||
|
||||
// Resolve dependencies unless --no-deps is specified
|
||||
if !args.no_deps {
|
||||
log::info!("Resolving dependencies...");
|
||||
|
||||
let mut resolver = DependencyResolver::new();
|
||||
let mut all_new_projects = new_projects.clone();
|
||||
|
||||
for project in &mut new_projects {
|
||||
let deps = resolver.resolve(project, &mut lockfile, &platforms).await?;
|
||||
|
||||
for dep in deps {
|
||||
if !lockfile.projects.iter().any(|p| p.pakku_id == dep.pakku_id)
|
||||
&& !all_new_projects.iter().any(|p| p.pakku_id == dep.pakku_id)
|
||||
{
|
||||
// Prompt user for confirmation unless --yes flag is set
|
||||
if !args.yes {
|
||||
let prompt_msg = format!(
|
||||
"Add dependency '{}' required by '{}'?",
|
||||
dep.get_name(),
|
||||
project.get_name()
|
||||
);
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
|
||||
log::info!("Skipping dependency: {}", dep.get_name());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Adding dependency: {}", dep.get_name());
|
||||
all_new_projects.push(dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
new_projects = all_new_projects;
|
||||
}
|
||||
|
||||
// Add projects to lockfile (updates already handled above)
|
||||
for project in new_projects {
|
||||
lockfile.add_project(project);
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully added {} project(s)", args.inputs.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
386
src/cli/commands/add_prj.rs
Normal file
386
src/cli/commands/add_prj.rs
Normal file
|
|
@ -0,0 +1,386 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{
|
||||
Config,
|
||||
LockFile,
|
||||
Project,
|
||||
enums::{ProjectSide, ProjectType, UpdateStrategy},
|
||||
},
|
||||
platform::create_platform,
|
||||
resolver::DependencyResolver,
|
||||
};
|
||||
|
||||
/// Parse a common project argument (slug or ID with optional file ID)
|
||||
/// Format: "input" or "`input#file_id`"
|
||||
fn parse_common_arg(input: &str) -> (String, Option<String>) {
|
||||
if let Some((project_input, file_id)) = input.split_once('#') {
|
||||
(project_input.to_string(), Some(file_id.to_string()))
|
||||
} else {
|
||||
(input.to_string(), None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a GitHub argument (owner/repo with optional tag)
|
||||
/// Format: "owner/repo" or "owner/repo#tag"
|
||||
fn parse_github_arg(input: &str) -> Result<(String, String, Option<String>)> {
|
||||
let (repo_part, tag) = if let Some((r, t)) = input.split_once('#') {
|
||||
(r, Some(t.to_string()))
|
||||
} else {
|
||||
(input, None)
|
||||
};
|
||||
|
||||
if let Some((owner, repo)) = repo_part.split_once('/') {
|
||||
Ok((owner.to_string(), repo.to_string(), tag))
|
||||
} else {
|
||||
Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid GitHub format '{input}'. Expected: owner/repo or owner/repo#tag"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
||||
lockfile.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
pub async fn execute(
|
||||
cf_arg: Option<String>,
|
||||
mr_arg: Option<String>,
|
||||
gh_arg: Option<String>,
|
||||
project_type: Option<ProjectType>,
|
||||
project_side: Option<ProjectSide>,
|
||||
update_strategy: Option<UpdateStrategy>,
|
||||
redistributable: Option<bool>,
|
||||
subpath: Option<String>,
|
||||
aliases: Vec<String>,
|
||||
export: Option<bool>,
|
||||
no_deps: bool,
|
||||
yes: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
// At least one platform must be specified
|
||||
if cf_arg.is_none() && mr_arg.is_none() && gh_arg.is_none() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one platform must be specified (--cf, --mr, or --gh)"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
log::info!("Adding project with explicit platform specification");
|
||||
|
||||
// Load lockfile
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Load config if available
|
||||
let _config = Config::load(config_dir).ok();
|
||||
|
||||
// Get MC versions and loaders from lockfile
|
||||
let mc_versions = &lockfile.mc_versions;
|
||||
let loaders = get_loaders(&lockfile);
|
||||
|
||||
// Fetch projects from each specified platform
|
||||
let mut projects_to_merge: Vec<Project> = Vec::new();
|
||||
|
||||
// CurseForge
|
||||
if let Some(cf_input) = cf_arg {
|
||||
log::info!("Fetching from CurseForge: {cf_input}");
|
||||
let (input, file_id) = parse_common_arg(&cf_input);
|
||||
|
||||
let cf_api_key = std::env::var("CURSEFORGE_API_KEY").ok();
|
||||
let platform = create_platform("curseforge", cf_api_key)?;
|
||||
|
||||
let mut project = platform
|
||||
.request_project_with_files(&input, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!(
|
||||
"CurseForge project '{input}': {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// If file_id specified, filter to that file
|
||||
if let Some(fid) = file_id {
|
||||
project.files.retain(|f| f.id == fid);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"File ID '{fid}' not found for CurseForge project '{input}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// Modrinth
|
||||
if let Some(mr_input) = mr_arg {
|
||||
log::info!("Fetching from Modrinth: {mr_input}");
|
||||
let (input, file_id) = parse_common_arg(&mr_input);
|
||||
|
||||
let platform = create_platform("modrinth", None)?;
|
||||
|
||||
let mut project = platform
|
||||
.request_project_with_files(&input, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!("Modrinth project '{input}': {e}"))
|
||||
})?;
|
||||
|
||||
// If file_id specified, filter to that file
|
||||
if let Some(fid) = file_id {
|
||||
project.files.retain(|f| f.id == fid);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"File ID '{fid}' not found for Modrinth project '{input}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// GitHub
|
||||
if let Some(gh_input) = gh_arg {
|
||||
log::info!("Fetching from GitHub: {gh_input}");
|
||||
let (owner, repo, tag) = parse_github_arg(&gh_input)?;
|
||||
|
||||
let gh_token = std::env::var("GITHUB_TOKEN").ok();
|
||||
let platform = create_platform("github", gh_token)?;
|
||||
|
||||
let repo_path = format!("{owner}/{repo}");
|
||||
let mut project = platform
|
||||
.request_project_with_files(&repo_path, mc_versions, &loaders)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
PakkerError::ProjectNotFound(format!(
|
||||
"GitHub repository '{owner}/{repo}': {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// If tag specified, filter to that tag
|
||||
if let Some(t) = tag {
|
||||
project.files.retain(|f| f.id == t);
|
||||
if project.files.is_empty() {
|
||||
return Err(PakkerError::FileSelectionError(format!(
|
||||
"Tag '{t}' not found for GitHub repository '{owner}/{repo}'"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
projects_to_merge.push(project);
|
||||
}
|
||||
|
||||
// Merge all fetched projects into one
|
||||
if projects_to_merge.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(
|
||||
"No projects could be fetched from specified platforms".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut combined_project = projects_to_merge.remove(0);
|
||||
for project in projects_to_merge {
|
||||
combined_project.merge(project);
|
||||
}
|
||||
|
||||
// Apply user-specified properties
|
||||
if let Some(pt) = project_type {
|
||||
combined_project.r#type = pt;
|
||||
}
|
||||
if let Some(ps) = project_side {
|
||||
combined_project.side = ps;
|
||||
}
|
||||
if let Some(us) = update_strategy {
|
||||
combined_project.update_strategy = us;
|
||||
}
|
||||
if let Some(r) = redistributable {
|
||||
combined_project.redistributable = r;
|
||||
}
|
||||
if let Some(sp) = subpath {
|
||||
combined_project.subpath = Some(sp);
|
||||
}
|
||||
if let Some(e) = export {
|
||||
combined_project.export = e;
|
||||
}
|
||||
|
||||
// Add aliases
|
||||
for alias in aliases {
|
||||
combined_project.aliases.insert(alias);
|
||||
}
|
||||
|
||||
// Check if project already exists
|
||||
let existing_pos = lockfile.projects.iter().position(|p| {
|
||||
// Check if any platform ID matches
|
||||
combined_project.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
});
|
||||
|
||||
let project_name = combined_project.get_name();
|
||||
|
||||
if let Some(pos) = existing_pos {
|
||||
let existing_project = &lockfile.projects[pos];
|
||||
let existing_name = existing_project.get_name();
|
||||
|
||||
if !yes {
|
||||
let prompt_msg = format!(
|
||||
"Project '{existing_name}' already exists. Replace with \
|
||||
'{project_name}'?"
|
||||
);
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, false)? {
|
||||
log::info!("Operation cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Replacing existing project: {existing_name}");
|
||||
lockfile.projects[pos] = combined_project.clone();
|
||||
println!("✓ Replaced '{existing_name}' with '{project_name}'");
|
||||
} else {
|
||||
if !yes {
|
||||
let prompt_msg = format!("Add project '{project_name}'?");
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
|
||||
log::info!("Operation cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.add_project(combined_project.clone());
|
||||
println!("✓ Added '{project_name}'");
|
||||
}
|
||||
|
||||
// Resolve dependencies unless --no-deps is specified
|
||||
if !no_deps {
|
||||
log::info!("Resolving dependencies...");
|
||||
|
||||
let platforms = create_all_platforms()?;
|
||||
let mut resolver = DependencyResolver::new();
|
||||
|
||||
let deps = resolver
|
||||
.resolve(&mut combined_project, &mut lockfile, &platforms)
|
||||
.await?;
|
||||
|
||||
for dep in deps {
|
||||
// Skip if already in lockfile
|
||||
if lockfile.projects.iter().any(|p| {
|
||||
dep.id.iter().any(|(platform, id)| {
|
||||
p.id
|
||||
.get(platform)
|
||||
.is_some_and(|existing_id| existing_id == id)
|
||||
})
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dep_name = dep.get_name();
|
||||
|
||||
// Prompt user for confirmation unless --yes flag is set
|
||||
if !yes {
|
||||
let prompt_msg =
|
||||
format!("Add dependency '{dep_name}' required by '{project_name}'?");
|
||||
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
|
||||
log::info!("Skipping dependency: {dep_name}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Adding dependency: {dep_name}");
|
||||
lockfile.add_project(dep);
|
||||
println!(" ✓ Added dependency '{dep_name}'");
|
||||
}
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully completed add-prj operation");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_all_platforms()
|
||||
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
|
||||
let mut platforms = HashMap::new();
|
||||
|
||||
if let Ok(platform) = create_platform("modrinth", None) {
|
||||
platforms.insert("modrinth".to_string(), platform);
|
||||
}
|
||||
if let Ok(platform) =
|
||||
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
|
||||
{
|
||||
platforms.insert("curseforge".to_string(), platform);
|
||||
}
|
||||
if let Ok(platform) =
|
||||
create_platform("github", std::env::var("GITHUB_TOKEN").ok())
|
||||
{
|
||||
platforms.insert("github".to_string(), platform);
|
||||
}
|
||||
|
||||
Ok(platforms)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_common_arg_without_file_id() {
|
||||
let (input, file_id) = parse_common_arg("fabric-api");
|
||||
assert_eq!(input, "fabric-api");
|
||||
assert_eq!(file_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_common_arg_with_file_id() {
|
||||
let (input, file_id) = parse_common_arg("fabric-api#12345");
|
||||
assert_eq!(input, "fabric-api");
|
||||
assert_eq!(file_id, Some("12345".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_owner_repo() {
|
||||
let result = parse_github_arg("FabricMC/fabric");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "fabric");
|
||||
assert_eq!(tag, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_with_tag() {
|
||||
let result = parse_github_arg("FabricMC/fabric#v0.15.0");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "fabric");
|
||||
assert_eq!(tag, Some("v0.15.0".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_invalid() {
|
||||
let result = parse_github_arg("invalid-format");
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("Invalid GitHub format")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_github_arg_missing_repo() {
|
||||
let result = parse_github_arg("FabricMC/");
|
||||
assert!(result.is_ok());
|
||||
let (owner, repo, tag) = result.unwrap();
|
||||
assert_eq!(owner, "FabricMC");
|
||||
assert_eq!(repo, "");
|
||||
assert_eq!(tag, None);
|
||||
}
|
||||
}
|
||||
101
src/cli/commands/cfg.rs
Normal file
101
src/cli/commands/cfg.rs
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
use std::path::Path;
|
||||
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{error::Result, model::config::Config};
|
||||
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
name: Option<String>,
|
||||
version: Option<String>,
|
||||
description: Option<String>,
|
||||
author: Option<String>,
|
||||
mods_path: Option<String>,
|
||||
resource_packs_path: Option<String>,
|
||||
data_packs_path: Option<String>,
|
||||
worlds_path: Option<String>,
|
||||
shaders_path: Option<String>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let mut changed = false;
|
||||
|
||||
// Modpack properties
|
||||
if let Some(new_name) = name {
|
||||
config.name = new_name.clone();
|
||||
println!("{}", format!("✓ 'name' set to '{new_name}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_version) = version {
|
||||
config.version = new_version.clone();
|
||||
println!("{}", format!("✓ 'version' set to '{new_version}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_description) = description {
|
||||
config.description = Some(new_description.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'description' set to '{new_description}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_author) = author {
|
||||
config.author = Some(new_author.clone());
|
||||
println!("{}", format!("✓ 'author' set to '{new_author}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
// Project type paths
|
||||
if let Some(path) = mods_path {
|
||||
config.paths.insert("mod".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.mod' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = resource_packs_path {
|
||||
config
|
||||
.paths
|
||||
.insert("resource-pack".to_string(), path.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'paths.resource-pack' set to '{path}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = data_packs_path {
|
||||
config.paths.insert("data-pack".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = worlds_path {
|
||||
config.paths.insert("world".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.world' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = shaders_path {
|
||||
config.paths.insert("shader".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.shader' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if !changed {
|
||||
eprintln!(
|
||||
"{}",
|
||||
"No changes specified. Use --help for options.".yellow()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
println!("\n{}", "Configuration updated successfully".green().bold());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
201
src/cli/commands/cfg_prj.rs
Normal file
201
src/cli/commands/cfg_prj.rs
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
use std::path::Path;
|
||||
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{
|
||||
config::Config,
|
||||
enums::{ProjectSide, ProjectType, UpdateStrategy},
|
||||
lockfile::LockFile,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
lockfile_path: &Path,
|
||||
project: String,
|
||||
r#type: Option<&str>,
|
||||
side: Option<&str>,
|
||||
update_strategy: Option<&str>,
|
||||
redistributable: Option<bool>,
|
||||
subpath: Option<String>,
|
||||
add_alias: Option<String>,
|
||||
remove_alias: Option<String>,
|
||||
export: Option<bool>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find the project in lockfile to get its pakku_id
|
||||
// Try multiple lookup strategies: pakku_id first, then slug, then name
|
||||
let found_project = lockfile
|
||||
.find_project(&project)
|
||||
.or_else(|| {
|
||||
// Try to find by slug on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(&project)))
|
||||
})
|
||||
.or_else(|| {
|
||||
// Try to find by name on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(&project)))
|
||||
})
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(project.clone()))?;
|
||||
|
||||
let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("Project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
// Get or create project config
|
||||
let mut project_config = config
|
||||
.get_project_config(pakku_id)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut changed = false;
|
||||
|
||||
if let Some(type_str) = r#type {
|
||||
let parsed_type = match type_str.to_uppercase().as_str() {
|
||||
"MOD" => ProjectType::Mod,
|
||||
"RESOURCE_PACK" | "RESOURCEPACK" => ProjectType::ResourcePack,
|
||||
"DATA_PACK" | "DATAPACK" => ProjectType::DataPack,
|
||||
"SHADER" => ProjectType::Shader,
|
||||
"WORLD" => ProjectType::World,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid type: {type_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.r#type = Some(parsed_type);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(side_str) = side {
|
||||
let parsed_side = match side_str.to_uppercase().as_str() {
|
||||
"CLIENT" => ProjectSide::Client,
|
||||
"SERVER" => ProjectSide::Server,
|
||||
"BOTH" => ProjectSide::Both,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid side: {side_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.side = Some(parsed_side);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(strategy_str) = update_strategy {
|
||||
let parsed_strategy = match strategy_str.to_uppercase().as_str() {
|
||||
"LATEST" => UpdateStrategy::Latest,
|
||||
"NONE" => UpdateStrategy::None,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidProject(format!(
|
||||
"Invalid update strategy: {strategy_str}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
project_config.update_strategy = Some(parsed_strategy);
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"✓ 'updateStrategy' set to '{parsed_strategy:?}' for '{pakku_id}'"
|
||||
)
|
||||
.green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_redistributable) = redistributable {
|
||||
project_config.redistributable = Some(new_redistributable);
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"✓ 'redistributable' set to '{new_redistributable}' for '{pakku_id}'"
|
||||
)
|
||||
.green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_subpath) = subpath {
|
||||
project_config.subpath = Some(new_subpath.clone());
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(alias_to_add) = add_alias {
|
||||
let mut aliases = project_config.aliases.clone().unwrap_or_default();
|
||||
if !aliases.contains(&alias_to_add) {
|
||||
aliases.push(alias_to_add.clone());
|
||||
project_config.aliases = Some(aliases);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(alias_to_remove) = remove_alias
|
||||
&& let Some(mut aliases) = project_config.aliases.clone()
|
||||
{
|
||||
aliases.retain(|a| a != &alias_to_remove);
|
||||
project_config.aliases = Some(aliases);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_export) = export {
|
||||
project_config.export = Some(new_export);
|
||||
println!(
|
||||
"{}",
|
||||
format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if !changed {
|
||||
eprintln!(
|
||||
"{}",
|
||||
"No changes specified. Use --help for options.".yellow()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
config.set_project_config(pakku_id.clone(), project_config);
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!(
|
||||
"\n{}",
|
||||
format!("Project configuration updated for '{pakku_id}'")
|
||||
.green()
|
||||
.bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
112
src/cli/commands/credentials.rs
Normal file
112
src/cli/commands/credentials.rs
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::{
|
||||
PakkerCredentialsFile,
|
||||
credentials::{CredentialsSource, ResolvedCredentials},
|
||||
},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
delete: bool,
|
||||
delete_file: bool,
|
||||
delete_keyring: bool,
|
||||
) -> Result<()> {
|
||||
let delete_effective = delete || delete_file || delete_keyring;
|
||||
|
||||
if delete_effective {
|
||||
// Pakker must never delete or modify Pakku's credentials file
|
||||
// (~/.pakku/credentials). Deletion here only affects Pakker-managed
|
||||
// storage (keyring + Pakker-owned file).
|
||||
let delete_keyring = delete_keyring || delete;
|
||||
let delete_pakker_file = delete_file || delete;
|
||||
|
||||
if delete_pakker_file {
|
||||
PakkerCredentialsFile::delete()?;
|
||||
}
|
||||
if delete_keyring {
|
||||
ResolvedCredentials::delete_keyring()?;
|
||||
}
|
||||
|
||||
println!("Credentials deleted.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let creds = ResolvedCredentials::load()?;
|
||||
|
||||
let has_any = creds.curseforge_api_key().is_some()
|
||||
|| creds.modrinth_token().is_some()
|
||||
|| creds.github_access_token().is_some();
|
||||
|
||||
if !has_any {
|
||||
println!("{}", "No credentials stored".yellow());
|
||||
println!("\nUse 'pakker credentials set' to add credentials");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "Stored Credentials:".cyan().bold());
|
||||
println!();
|
||||
|
||||
print_credential(
|
||||
"CurseForge API Key",
|
||||
creds.curseforge_api_key(),
|
||||
creds.curseforge_source(),
|
||||
);
|
||||
|
||||
print_credential(
|
||||
"Modrinth Token",
|
||||
creds.modrinth_token(),
|
||||
creds.modrinth_source(),
|
||||
);
|
||||
|
||||
print_credential(
|
||||
"GitHub Access Token",
|
||||
creds.github_access_token(),
|
||||
creds.github_source(),
|
||||
);
|
||||
|
||||
println!();
|
||||
println!(
|
||||
"{}",
|
||||
format!(
|
||||
"Credentials file: {}",
|
||||
PakkerCredentialsFile::get_path()?.display()
|
||||
)
|
||||
.cyan()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_credential(
|
||||
label: &str,
|
||||
value: Option<&str>,
|
||||
source: Option<CredentialsSource>,
|
||||
) {
|
||||
if let Some(v) = value {
|
||||
let masked = mask_key(v);
|
||||
let source = source.map_or("unknown", source_label);
|
||||
println!(" {} {} ({})", format!("{label}:").yellow(), masked, source);
|
||||
}
|
||||
}
|
||||
|
||||
const fn source_label(source: CredentialsSource) -> &'static str {
|
||||
match source {
|
||||
CredentialsSource::Env => "env",
|
||||
CredentialsSource::Keyring => "keyring",
|
||||
CredentialsSource::PakkerFile => "pakker-file",
|
||||
}
|
||||
}
|
||||
|
||||
fn mask_key(key: &str) -> String {
|
||||
if key.len() <= 12 {
|
||||
return "*".repeat(key.len());
|
||||
}
|
||||
|
||||
let start = &key[..8];
|
||||
let end = &key[key.len() - 4..];
|
||||
let middle = "*".repeat(key.len() - 12);
|
||||
|
||||
format!("{start}{middle}{end}")
|
||||
}
|
||||
74
src/cli/commands/credentials_set.rs
Normal file
74
src/cli/commands/credentials_set.rs
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
use crate::{
|
||||
error::{PakkerError, Result},
|
||||
model::{PakkerCredentialsFile, set_keyring_secret},
|
||||
};
|
||||
|
||||
pub fn execute(
|
||||
curseforge_api_key: Option<String>,
|
||||
modrinth_token: Option<String>,
|
||||
github_access_token: Option<String>,
|
||||
) -> Result<()> {
|
||||
let mut creds = PakkerCredentialsFile::load()?;
|
||||
let mut updated_any = false;
|
||||
|
||||
if let Some(key) = curseforge_api_key {
|
||||
let key = key.trim().to_string();
|
||||
if key.is_empty() {
|
||||
return Err(PakkerError::InternalError(
|
||||
"CurseForge API key cannot be empty".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Setting CurseForge API key...");
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds.curseforge_api_key = Some(key);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if let Some(token) = modrinth_token {
|
||||
let token = token.trim().to_string();
|
||||
if token.is_empty() {
|
||||
return Err(PakkerError::InternalError(
|
||||
"Modrinth token cannot be empty".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Setting Modrinth token...");
|
||||
set_keyring_secret("modrinth_token", &token)?;
|
||||
creds.modrinth_token = Some(token);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if let Some(token) = github_access_token {
|
||||
let token = token.trim().to_string();
|
||||
if token.is_empty() {
|
||||
return Err(PakkerError::InternalError(
|
||||
"GitHub access token cannot be empty".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Setting GitHub access token...");
|
||||
set_keyring_secret("github_access_token", &token)?;
|
||||
creds.github_access_token = Some(token);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if !updated_any {
|
||||
println!(
|
||||
"No credentials provided. Use --cf-api-key, --modrinth-token, or \
|
||||
--gh-access-token."
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
creds.save()?;
|
||||
|
||||
println!("Credentials saved.");
|
||||
println!(
|
||||
"Credentials file: {}",
|
||||
PakkerCredentialsFile::get_path()?.display()
|
||||
);
|
||||
println!("Keyring service: pakker");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
457
src/cli/commands/diff.rs
Normal file
457
src/cli/commands/diff.rs
Normal file
|
|
@ -0,0 +1,457 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use crate::{cli::DiffArgs, error::Result, model::LockFile};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ChangeType {
|
||||
Added,
|
||||
Removed,
|
||||
Updated,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectChange {
|
||||
name: String,
|
||||
change_type: ChangeType,
|
||||
old_file: Option<String>,
|
||||
new_file: Option<String>,
|
||||
}
|
||||
|
||||
pub fn execute(args: DiffArgs, _lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Comparing lockfiles");
|
||||
|
||||
// Load old lockfile
|
||||
let old_path = Path::new(&args.old_lockfile);
|
||||
let old_dir = old_path.parent().unwrap_or(Path::new("."));
|
||||
let old_lockfile = LockFile::load(old_dir)?;
|
||||
|
||||
// Load current lockfile
|
||||
let current_path = args
|
||||
.current_lockfile
|
||||
.as_ref()
|
||||
.map_or(Path::new("pakku-lock.json"), Path::new);
|
||||
let current_dir = current_path.parent().unwrap_or(Path::new("."));
|
||||
let current_lockfile = LockFile::load(current_dir)?;
|
||||
|
||||
// Compare metadata
|
||||
let mut changes = Vec::new();
|
||||
|
||||
// Check MC versions
|
||||
let old_mc: HashSet<_> = old_lockfile.mc_versions.iter().collect();
|
||||
let new_mc: HashSet<_> = current_lockfile.mc_versions.iter().collect();
|
||||
let mc_added: Vec<_> = new_mc.difference(&old_mc).collect();
|
||||
let mc_removed: Vec<_> = old_mc.difference(&new_mc).collect();
|
||||
|
||||
// Check loaders
|
||||
let old_loaders = &old_lockfile.loaders;
|
||||
let new_loaders = ¤t_lockfile.loaders;
|
||||
|
||||
// Compare projects
|
||||
let old_projects: HashMap<_, _> = old_lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.map(|p| (&p.pakku_id, p))
|
||||
.collect();
|
||||
let new_projects: HashMap<_, _> = current_lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.map(|p| (&p.pakku_id, p))
|
||||
.collect();
|
||||
|
||||
// Find added, removed, updated projects
|
||||
for (id, new_proj) in &new_projects {
|
||||
if !old_projects.contains_key(id) {
|
||||
changes.push(ProjectChange {
|
||||
name: new_proj.name.values().next().cloned().unwrap_or_default(),
|
||||
change_type: ChangeType::Added,
|
||||
old_file: None,
|
||||
new_file: new_proj.files.first().map(|f| f.file_name.clone()),
|
||||
});
|
||||
} else if let Some(old_proj) = old_projects.get(id) {
|
||||
let old_file_name = old_proj.files.first().map(|f| &f.file_name);
|
||||
let new_file_name = new_proj.files.first().map(|f| &f.file_name);
|
||||
|
||||
if old_file_name != new_file_name {
|
||||
changes.push(ProjectChange {
|
||||
name: new_proj
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
change_type: ChangeType::Updated,
|
||||
old_file: old_file_name.cloned(),
|
||||
new_file: new_file_name.cloned(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (id, old_proj) in &old_projects {
|
||||
if !new_projects.contains_key(id) {
|
||||
changes.push(ProjectChange {
|
||||
name: old_proj.name.values().next().cloned().unwrap_or_default(),
|
||||
change_type: ChangeType::Removed,
|
||||
old_file: old_proj.files.first().map(|f| f.file_name.clone()),
|
||||
new_file: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Output results
|
||||
if let Some(path) = &args.markdown_diff {
|
||||
write_markdown_diff(
|
||||
path,
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
args.header_size,
|
||||
)?;
|
||||
} else if let Some(path) = &args.markdown {
|
||||
write_markdown(
|
||||
path,
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
args.header_size,
|
||||
)?;
|
||||
} else {
|
||||
print_terminal_diff(
|
||||
&old_lockfile,
|
||||
¤t_lockfile,
|
||||
&changes,
|
||||
&mc_added,
|
||||
&mc_removed,
|
||||
old_loaders,
|
||||
new_loaders,
|
||||
args.verbose,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_terminal_diff(
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
) {
|
||||
println!("## Lockfile Comparison\n");
|
||||
|
||||
// Target
|
||||
if old.target != new.target {
|
||||
println!("Target: {:?} -> {:?}", old.target, new.target);
|
||||
}
|
||||
|
||||
// MC versions
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
println!("Minecraft Versions:");
|
||||
for v in mc_removed {
|
||||
println!(" - {v}");
|
||||
}
|
||||
for v in mc_added {
|
||||
println!(" + {v}");
|
||||
}
|
||||
}
|
||||
|
||||
// Loaders
|
||||
let mut loader_changes = false;
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" ~ {name}: {old_ver} -> {new_ver}");
|
||||
}
|
||||
} else {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" - {name}: {old_ver}");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
if !loader_changes {
|
||||
println!("\nLoaders:");
|
||||
loader_changes = true;
|
||||
}
|
||||
println!(" + {name}: {new_ver}");
|
||||
}
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
println!("\nProjects:");
|
||||
for change in changes {
|
||||
match change.change_type {
|
||||
ChangeType::Added => {
|
||||
print!(" + {}", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
print!(" ({file})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
ChangeType::Removed => {
|
||||
print!(" - {}", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
print!(" ({file})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
ChangeType::Updated => {
|
||||
print!(" ~ {}", change.name);
|
||||
if verbose
|
||||
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
print!(" ({old} -> {new})");
|
||||
}
|
||||
println!();
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if mc_removed.is_empty()
|
||||
&& mc_added.is_empty()
|
||||
&& !loader_changes
|
||||
&& changes.is_empty()
|
||||
{
|
||||
println!("✓ No differences found");
|
||||
}
|
||||
}
|
||||
|
||||
fn write_markdown_diff(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
_header_size: usize,
|
||||
) -> Result<()> {
|
||||
let mut content = String::new();
|
||||
content.push_str("```diff\n");
|
||||
|
||||
// Metadata changes
|
||||
if old.target != new.target {
|
||||
content.push_str(&format!("- Target: {:?}\n", old.target));
|
||||
content.push_str(&format!("+ Target: {:?}\n", new.target));
|
||||
}
|
||||
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
content.push_str("\nMinecraft Versions:\n");
|
||||
for v in mc_removed {
|
||||
content.push_str(&format!("- {v}\n"));
|
||||
}
|
||||
for v in mc_added {
|
||||
content.push_str(&format!("+ {v}\n"));
|
||||
}
|
||||
}
|
||||
|
||||
// Loaders
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
content.push_str(&format!("- {name}: {old_ver}\n"));
|
||||
content.push_str(&format!("+ {name}: {new_ver}\n"));
|
||||
}
|
||||
} else {
|
||||
content.push_str(&format!("- {name}: {old_ver}\n"));
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
content.push_str(&format!("+ {name}: {new_ver}\n"));
|
||||
}
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
content.push_str("\nProjects:\n");
|
||||
for change in changes {
|
||||
match change.change_type {
|
||||
ChangeType::Added => {
|
||||
content.push_str(&format!("+ {}", change.name));
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
ChangeType::Removed => {
|
||||
content.push_str(&format!("- {}", change.name));
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
ChangeType::Updated => {
|
||||
if verbose {
|
||||
if let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
content.push_str(&format!("- {} ({})\n", change.name, old));
|
||||
content.push_str(&format!("+ {} ({})\n", change.name, new));
|
||||
}
|
||||
} else {
|
||||
content.push_str(&format!("~ {}\n", change.name));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
content.push_str("```\n");
|
||||
fs::write(path, content)?;
|
||||
println!("Diff exported to {path}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_markdown(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
changes: &[ProjectChange],
|
||||
mc_added: &[&&String],
|
||||
mc_removed: &[&&String],
|
||||
old_loaders: &HashMap<String, String>,
|
||||
new_loaders: &HashMap<String, String>,
|
||||
verbose: bool,
|
||||
header_size: usize,
|
||||
) -> Result<()> {
|
||||
let header = "#".repeat(header_size.min(5));
|
||||
let mut content = String::new();
|
||||
|
||||
content.push_str(&format!("{header} Lockfile Comparison\n\n"));
|
||||
|
||||
// Target
|
||||
if old.target != new.target {
|
||||
content.push_str(&format!(
|
||||
"**Target:** {:?} → {:?}\n\n",
|
||||
old.target, new.target
|
||||
));
|
||||
}
|
||||
|
||||
// MC versions
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
content.push_str(&format!("{header} Minecraft Versions\n\n"));
|
||||
for v in mc_removed {
|
||||
content.push_str(&format!("- ~~{v}~~\n"));
|
||||
}
|
||||
for v in mc_added {
|
||||
content.push_str(&format!("- **{v}** (new)\n"));
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
// Loaders
|
||||
let mut has_loader_changes = false;
|
||||
let mut loader_content = String::new();
|
||||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
has_loader_changes = true;
|
||||
loader_content
|
||||
.push_str(&format!("- **{name}:** {old_ver} → {new_ver}\n"));
|
||||
}
|
||||
} else {
|
||||
has_loader_changes = true;
|
||||
loader_content.push_str(&format!("- ~~{name}: {old_ver}~~\n"));
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
has_loader_changes = true;
|
||||
loader_content.push_str(&format!("- **{name}: {new_ver}** (new)\n"));
|
||||
}
|
||||
}
|
||||
if has_loader_changes {
|
||||
content.push_str(&format!("{header} Loaders\n\n"));
|
||||
content.push_str(&loader_content);
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
content.push_str(&format!("{header} Projects\n\n"));
|
||||
|
||||
let added: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Added))
|
||||
.collect();
|
||||
let removed: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Removed))
|
||||
.collect();
|
||||
let updated: Vec<_> = changes
|
||||
.iter()
|
||||
.filter(|c| matches!(c.change_type, ChangeType::Updated))
|
||||
.collect();
|
||||
|
||||
if !added.is_empty() {
|
||||
content.push_str(&format!("{}# Added ({})\n\n", header, added.len()));
|
||||
for change in added {
|
||||
content.push_str(&format!("- **{}**", change.name));
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
if !removed.is_empty() {
|
||||
content.push_str(&format!("{}# Removed ({})\n\n", header, removed.len()));
|
||||
for change in removed {
|
||||
content.push_str(&format!("- ~~{}~~", change.name));
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
if !updated.is_empty() {
|
||||
content.push_str(&format!("{}# Updated ({})\n\n", header, updated.len()));
|
||||
for change in updated {
|
||||
content.push_str(&format!("- **{}**", change.name));
|
||||
if verbose
|
||||
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
content.push_str(&format!(" ({old} → {new})"));
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fs::write(path, content)?;
|
||||
println!("Diff exported to {path}");
|
||||
Ok(())
|
||||
}
|
||||
291
src/cli/commands/export.rs
Normal file
291
src/cli/commands/export.rs
Normal file
|
|
@ -0,0 +1,291 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::ExportArgs,
|
||||
error::{PakkerError, Result},
|
||||
export::Exporter,
|
||||
ipc::{IpcCoordinator, OperationType},
|
||||
model::{Config, LockFile, fork::LocalConfig},
|
||||
utils::hash::compute_sha256_bytes,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: ExportArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
if let Some(ref profile) = args.profile {
|
||||
log::info!("Exporting with profile: {profile}");
|
||||
} else {
|
||||
log::info!("Exporting all profiles");
|
||||
}
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
// IPC coordination - prevent concurrent operations on the same modpack
|
||||
let ipc = IpcCoordinator::new(&config_dir.to_path_buf())?;
|
||||
let ipc_timeout = std::time::Duration::from_secs(60);
|
||||
|
||||
// Check for conflicting export operations
|
||||
let conflicting = ipc.get_running_operations(OperationType::Export);
|
||||
if !conflicting.is_empty() {
|
||||
log::info!(
|
||||
"Waiting for conflicting operations to complete: {:?}",
|
||||
conflicting
|
||||
.iter()
|
||||
.map(|op| (op.id.clone(), op.pid))
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
ipc
|
||||
.wait_for_conflicts(OperationType::Export, ipc_timeout)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Register this export operation
|
||||
let _op_guard = ipc.register_operation(OperationType::Export)?;
|
||||
|
||||
// Load config to check for fork configuration
|
||||
let config = Config::load(config_dir)?;
|
||||
let local_config = LocalConfig::load(config_dir).ok();
|
||||
|
||||
// Check if this is a fork with parent
|
||||
let lockfile = if let Some(local_cfg) = &local_config {
|
||||
if local_cfg.parent.is_some() {
|
||||
log::info!("Fork detected - merging parent and local lockfiles");
|
||||
|
||||
// Try parent's lockfile
|
||||
let parent_paths = [".pakku/parent", ".pakker/parent"];
|
||||
let mut parent_lockfile_path = None;
|
||||
let mut lockfile_name = "pakku-lock.json";
|
||||
|
||||
for parent_dir in &parent_paths {
|
||||
// Try pakker-lock.json first
|
||||
let check_path = Path::new(parent_dir).join("pakker-lock.json");
|
||||
if check_path.exists() {
|
||||
parent_lockfile_path = Some(parent_dir);
|
||||
lockfile_name = "pakker-lock.json";
|
||||
break;
|
||||
}
|
||||
// Fall back to pakku-lock.json
|
||||
let check_path = Path::new(parent_dir).join("pakku-lock.json");
|
||||
if check_path.exists() {
|
||||
parent_lockfile_path = Some(parent_dir);
|
||||
lockfile_name = "pakku-lock.json";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent_dir) = parent_lockfile_path {
|
||||
// Load parent lockfile
|
||||
let parent_lockfile = LockFile::load(Path::new(parent_dir))?;
|
||||
|
||||
// Verify parent lockfile hash for integrity
|
||||
if let Some(stored_hash) = &local_cfg.parent_lock_hash {
|
||||
let parent_lock_path = Path::new(parent_dir).join(lockfile_name);
|
||||
let parent_lock_content = std::fs::read(&parent_lock_path)?;
|
||||
let computed_hash = compute_sha256_bytes(&parent_lock_content);
|
||||
|
||||
if &computed_hash != stored_hash {
|
||||
log::warn!(
|
||||
"Parent lockfile hash mismatch - parent may have changed since \
|
||||
last sync"
|
||||
);
|
||||
log::warn!("Expected: {stored_hash}, Got: {computed_hash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Load local lockfile if it exists
|
||||
if lockfile_path.exists() {
|
||||
log::info!("Merging parent lockfile with local overrides");
|
||||
let local_lockfile =
|
||||
LockFile::load_with_validation(lockfile_dir, false)?;
|
||||
|
||||
// Merge: start with parent, override with local
|
||||
merge_lockfiles(parent_lockfile, local_lockfile, local_cfg)?
|
||||
} else {
|
||||
log::info!("No local lockfile - using parent lockfile");
|
||||
parent_lockfile
|
||||
}
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Fork configured but parent lockfile not found",
|
||||
)));
|
||||
}
|
||||
} else {
|
||||
// No fork, use local lockfile
|
||||
if lockfile_path.exists() {
|
||||
LockFile::load(lockfile_dir)?
|
||||
} else {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No lockfile found",
|
||||
)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No local config, try local lockfile or fall back to parent
|
||||
if lockfile_path.exists() {
|
||||
LockFile::load(lockfile_dir)?
|
||||
} else {
|
||||
// Try parent's lockfile as fallback
|
||||
let parent_paths = [".pakku/parent", ".pakker/parent"];
|
||||
let mut parent_lockfile = None;
|
||||
let mut lockfile_name = "pakku-lock.json";
|
||||
|
||||
for parent_dir in &parent_paths {
|
||||
// Try pakker-lock.json first
|
||||
let lockfile_path_check =
|
||||
Path::new(parent_dir).join("pakker-lock.json");
|
||||
if lockfile_path_check.exists() {
|
||||
parent_lockfile = Some(parent_dir);
|
||||
lockfile_name = "pakker-lock.json";
|
||||
break;
|
||||
}
|
||||
// Fall back to pakku-lock.json
|
||||
let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json");
|
||||
if lockfile_path_check.exists() {
|
||||
parent_lockfile = Some(parent_dir);
|
||||
lockfile_name = "pakku-lock.json";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match parent_lockfile {
|
||||
Some(parent_dir) => {
|
||||
log::info!(
|
||||
"Using parent's lockfile ({lockfile_name}) from {parent_dir}"
|
||||
);
|
||||
LockFile::load(Path::new(parent_dir))?
|
||||
},
|
||||
None => {
|
||||
return Err(PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"No lockfile found (neither local nor parent's)",
|
||||
)));
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Determine output path
|
||||
let output_path = if args.pakker_layout {
|
||||
"build"
|
||||
} else {
|
||||
args.output.as_deref().unwrap_or("exports")
|
||||
};
|
||||
|
||||
// Create exporter
|
||||
let mut exporter = Exporter::new(".");
|
||||
|
||||
// Export based on profile argument
|
||||
if let Some(profile_name) = args.profile {
|
||||
// Single profile export (backwards compatible)
|
||||
let output_file = exporter
|
||||
.export(&profile_name, &lockfile, &config, Path::new(output_path))
|
||||
.await?;
|
||||
|
||||
println!("Export complete: {output_file:?}");
|
||||
} else {
|
||||
// Multi-profile export (Pakker-compatible default behavior)
|
||||
let output_files = exporter
|
||||
.export_all_profiles(&lockfile, &config, Path::new(output_path))
|
||||
.await?;
|
||||
|
||||
println!("\nExported {} files:", output_files.len());
|
||||
for output_file in output_files {
|
||||
println!(" - {output_file:?}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Merges parent lockfile with local lockfile
|
||||
/// Parent projects are used as base, local projects override parent projects
|
||||
/// with same slug
|
||||
fn merge_lockfiles(
|
||||
parent: LockFile,
|
||||
local: LockFile,
|
||||
local_config: &LocalConfig,
|
||||
) -> Result<LockFile> {
|
||||
let mut merged = LockFile {
|
||||
target: parent.target, // Use parent target
|
||||
mc_versions: parent.mc_versions, // Use parent MC versions
|
||||
loaders: parent.loaders, // Use parent loaders
|
||||
projects: Vec::new(),
|
||||
lockfile_version: parent.lockfile_version,
|
||||
};
|
||||
|
||||
// Collect local project slugs for override detection
|
||||
let mut local_slugs = std::collections::HashSet::new();
|
||||
for project in &local.projects {
|
||||
// Add all slugs from all platforms
|
||||
for slug in project.slug.values() {
|
||||
local_slugs.insert(slug.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Add parent projects that are NOT overridden by local
|
||||
let parent_projects_count = parent.projects.len();
|
||||
|
||||
for parent_project in &parent.projects {
|
||||
let is_overridden = parent_project
|
||||
.slug
|
||||
.values()
|
||||
.any(|slug| local_slugs.contains(slug));
|
||||
|
||||
if !is_overridden {
|
||||
// Check if project has local config overrides
|
||||
let mut project = parent_project.clone();
|
||||
|
||||
// Apply local config overrides if they exist
|
||||
for (key, local_proj_cfg) in &local_config.projects {
|
||||
// Match by slug, name, or pakku_id
|
||||
let matches = project.slug.values().any(|s| s == key)
|
||||
|| project.name.values().any(|n| n == key)
|
||||
|| project.pakku_id.as_ref() == Some(key);
|
||||
|
||||
if matches {
|
||||
if let Some(t) = local_proj_cfg.r#type {
|
||||
project.r#type = t;
|
||||
}
|
||||
if let Some(s) = local_proj_cfg.side {
|
||||
project.side = s;
|
||||
}
|
||||
if let Some(us) = local_proj_cfg.update_strategy {
|
||||
project.update_strategy = us;
|
||||
}
|
||||
if let Some(r) = local_proj_cfg.redistributable {
|
||||
project.redistributable = r;
|
||||
}
|
||||
if let Some(ref sp) = local_proj_cfg.subpath {
|
||||
project.subpath = Some(sp.clone());
|
||||
}
|
||||
if let Some(ref aliases) = local_proj_cfg.aliases {
|
||||
project.aliases = aliases.iter().cloned().collect();
|
||||
}
|
||||
if let Some(e) = local_proj_cfg.export {
|
||||
project.export = e;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
merged.projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
// Add all local projects
|
||||
merged.projects.extend(local.projects.clone());
|
||||
|
||||
println!(
|
||||
"Merged fork: {} parent projects + {} local projects = {} total projects",
|
||||
parent_projects_count - local_config.projects.len(),
|
||||
local.projects.len(),
|
||||
merged.projects.len()
|
||||
);
|
||||
|
||||
Ok(merged)
|
||||
}
|
||||
49
src/cli/commands/fetch.rs
Normal file
49
src/cli/commands/fetch.rs
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::{
|
||||
cli::FetchArgs,
|
||||
error::Result,
|
||||
fetch::Fetcher,
|
||||
ipc::{IpcCoordinator, OperationGuard, OperationType},
|
||||
model::{Config, LockFile},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: FetchArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Create IPC coordinator for this modpack
|
||||
let working_dir = PathBuf::from(".");
|
||||
let coordinator = IpcCoordinator::new(&working_dir)?;
|
||||
|
||||
// Check for conflicting operations
|
||||
if coordinator.has_running_operation(OperationType::Fetch) {
|
||||
// Wait for conflicting operations to complete with timeout
|
||||
let timeout = std::time::Duration::from_secs(args.timeout.unwrap_or(300));
|
||||
coordinator
|
||||
.wait_for_conflicts(OperationType::Fetch, timeout)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Register this fetch operation
|
||||
let operation_id = coordinator.register_operation(OperationType::Fetch)?;
|
||||
let _guard = OperationGuard::new(coordinator, operation_id);
|
||||
|
||||
// Create fetcher
|
||||
let fetcher = Fetcher::new(".");
|
||||
|
||||
// Fetch all projects (progress indicators handled in fetch.rs)
|
||||
fetcher.fetch_all(&lockfile, &config).await?;
|
||||
|
||||
println!("Fetch complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
677
src/cli/commands/fork.rs
Normal file
677
src/cli/commands/fork.rs
Normal file
|
|
@ -0,0 +1,677 @@
|
|||
use std::{fs, io::Write, path::Path};
|
||||
|
||||
use crate::{
|
||||
cli::ForkArgs,
|
||||
error::PakkerError,
|
||||
git::{self, VcsType},
|
||||
model::{
|
||||
config::Config,
|
||||
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
|
||||
},
|
||||
};
|
||||
|
||||
const PAKKU_DIR: &str = ".pakku";
|
||||
const PARENT_DIR_NAME: &str = "parent";
|
||||
|
||||
fn parent_dir() -> String {
|
||||
format!("{PAKKU_DIR}/{PARENT_DIR_NAME}")
|
||||
}
|
||||
|
||||
/// Main entry point for fork commands
|
||||
pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
|
||||
match &args.subcommand {
|
||||
crate::cli::ForkSubcommand::Init {
|
||||
git_url,
|
||||
from_current,
|
||||
from_path,
|
||||
ref_name,
|
||||
ref_type,
|
||||
remote,
|
||||
} => {
|
||||
execute_init(
|
||||
git_url.clone(),
|
||||
*from_current,
|
||||
from_path.clone(),
|
||||
ref_name.clone(),
|
||||
*ref_type,
|
||||
remote.clone(),
|
||||
)
|
||||
},
|
||||
crate::cli::ForkSubcommand::Set {
|
||||
git_url,
|
||||
ref_name,
|
||||
ref_type,
|
||||
remote,
|
||||
} => {
|
||||
execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone())
|
||||
},
|
||||
crate::cli::ForkSubcommand::Show => execute_show(),
|
||||
crate::cli::ForkSubcommand::Unset => execute_unset(),
|
||||
crate::cli::ForkSubcommand::Sync => execute_sync(),
|
||||
crate::cli::ForkSubcommand::Promote { projects } => {
|
||||
execute_promote(projects.clone())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_git_url(url: &str) -> Result<(), PakkerError> {
|
||||
// Allow network URLs, SSH-style URLs, or local filesystem paths (tests use
|
||||
// local bare repos)
|
||||
if url.starts_with("https://")
|
||||
|| url.starts_with("git@")
|
||||
|| url.starts_with("ssh://")
|
||||
|| url.starts_with("file://")
|
||||
|| url.starts_with('/')
|
||||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PakkerError::Fork(format!(
|
||||
"Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \
|
||||
absolute filesystem path."
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_init(
|
||||
git_url: Option<String>,
|
||||
from_current: bool,
|
||||
from_path: Option<String>,
|
||||
ref_name: Option<String>,
|
||||
ref_type: Option<RefType>,
|
||||
remote: Option<String>,
|
||||
) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
|
||||
// Validate that pakker.json exists for fork operations
|
||||
let pakker_json_path = config_dir.join("pakker.json");
|
||||
let pakku_json_path = config_dir.join("pakku.json");
|
||||
|
||||
if !pakker_json_path.exists() && pakku_json_path.exists() {
|
||||
return Err(PakkerError::Fork(
|
||||
"Forking is a pakker-specific feature and requires pakker.json. \nFound \
|
||||
pakku.json but not pakker.json. Please migrate to pakker.json to use \
|
||||
fork functionality.\nYou can convert your pakku.json to pakker.json by \
|
||||
renaming the file."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut local_config = LocalConfig::load(config_dir).unwrap_or_default();
|
||||
|
||||
// Check if parent already configured
|
||||
if local_config.parent.is_some()
|
||||
&& let Some(parent) = &local_config.parent
|
||||
{
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Parent already configured: {}",
|
||||
parent.id
|
||||
)));
|
||||
}
|
||||
|
||||
// Resolve defaults early to avoid shadowing/confusion
|
||||
let resolved_remote = remote.unwrap_or_else(|| "origin".to_string());
|
||||
let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string());
|
||||
|
||||
// Parent path (where we keep the cloned parent)
|
||||
let parent_path_str = parent_dir();
|
||||
|
||||
// Branch: from_current, from_path, or git_url
|
||||
let mut cloned_from_local = false;
|
||||
let url = if from_current {
|
||||
// Detect git URL from current directory
|
||||
if !git::is_git_repository(config_dir) {
|
||||
return Err(PakkerError::Fork(
|
||||
"Not a git repository. Use --git-url or run 'git init' first."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
git::get_remote_url(config_dir, &resolved_remote)?
|
||||
} else if let Some(fp) = from_path {
|
||||
// Use provided local path as source; infer upstream remote from it
|
||||
let path = Path::new(&fp);
|
||||
if !git::is_git_repository(path) {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Provided path is not a git repository: {}",
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
// Infer upstream remote URL from the existing local clone
|
||||
let upstream_url = git::get_primary_remote_url(path)?;
|
||||
|
||||
// Reject file:// or non-network remotes
|
||||
validate_git_url(&upstream_url)?;
|
||||
|
||||
// Ensure working tree is clean
|
||||
let vcs_type = git::detect_vcs_type(path);
|
||||
if git::repo_has_uncommitted_changes(path)? {
|
||||
let error_msg = match vcs_type {
|
||||
VcsType::Git => {
|
||||
"Local repository at --from-path has uncommitted changes. Commit or \
|
||||
stash them before proceeding."
|
||||
},
|
||||
VcsType::Jujutsu => {
|
||||
"Local repository at --from-path has uncommitted changes. Run 'jj \
|
||||
commit' to save changes before proceeding."
|
||||
},
|
||||
VcsType::None => {
|
||||
"Local repository at --from-path has uncommitted changes. Please \
|
||||
clean the directory before proceeding."
|
||||
},
|
||||
};
|
||||
return Err(PakkerError::Fork(error_msg.to_string()));
|
||||
}
|
||||
|
||||
// VCS-specific validation
|
||||
match vcs_type {
|
||||
VcsType::Git => {
|
||||
// Attempt lightweight fetch of remote refs to refresh remote tracking
|
||||
match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) {
|
||||
Ok(()) => println!("Fetched remote refs for verification"),
|
||||
Err(e) => {
|
||||
log::warn!("Lightweight fetch from upstream failed: {e}");
|
||||
println!(
|
||||
"Warning: could not perform lightweight fetch from upstream. \
|
||||
Proceeding with local clone; subsequent sync may require \
|
||||
network."
|
||||
);
|
||||
},
|
||||
}
|
||||
|
||||
// Compare local ref vs remote ref
|
||||
let remote_ref = format!("{resolved_remote}/{resolved_ref}");
|
||||
match git::ahead_behind(path, &resolved_ref, &remote_ref) {
|
||||
Ok((ahead, _behind)) => {
|
||||
if ahead > 0 {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Local repository at {} has {} commits not present on \
|
||||
upstream {}. Push or use --git-url if you intend to use an \
|
||||
upstream that contains these commits.",
|
||||
path.display(),
|
||||
ahead,
|
||||
upstream_url
|
||||
)));
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Could not compute ahead/behind: {e}");
|
||||
},
|
||||
}
|
||||
},
|
||||
VcsType::Jujutsu => {
|
||||
// For jujutsu, we skip git-specific remote validation since jj has
|
||||
// different synchronization patterns
|
||||
println!(
|
||||
"Warning: Skipping remote validation for jujutsu repository. Ensure \
|
||||
your jj repo is in sync with remote before proceeding."
|
||||
);
|
||||
|
||||
// Check if there are any changes that haven't been pushed to the remote
|
||||
if let Ok(output) = std::process::Command::new("jj")
|
||||
.args(["log", "--limit", "1", "--template", ""])
|
||||
.current_dir(path)
|
||||
.output()
|
||||
{
|
||||
if !output.stdout.is_empty() {
|
||||
println!(
|
||||
"Note: Jujutsu repository detected. Make sure to run 'jj git \
|
||||
push' to sync changes with remote if needed."
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
VcsType::None => {
|
||||
// No VCS-specific validation needed
|
||||
},
|
||||
}
|
||||
|
||||
// Compute parent lock/config hashes for reproducibility
|
||||
let parent_lock_path = if path.join("pakker-lock.json").exists() {
|
||||
path.join("pakker-lock.json")
|
||||
} else {
|
||||
path.join("pakku-lock.json")
|
||||
};
|
||||
|
||||
if parent_lock_path.exists() {
|
||||
let lock_content =
|
||||
fs::read_to_string(&parent_lock_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
|
||||
})?;
|
||||
let lock_hash = hash_content(&lock_content);
|
||||
local_config.parent_lock_hash = Some(lock_hash);
|
||||
}
|
||||
|
||||
let parent_config_path = if path.join("pakker.json").exists() {
|
||||
path.join("pakker.json")
|
||||
} else {
|
||||
path.join("pakku.json")
|
||||
};
|
||||
|
||||
if parent_config_path.exists() {
|
||||
let config_content =
|
||||
fs::read_to_string(&parent_config_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent config: {e}"))
|
||||
})?;
|
||||
let config_hash = hash_content(&config_content);
|
||||
local_config.parent_config_hash = Some(config_hash);
|
||||
}
|
||||
|
||||
// Now clone from the local path into .pakku/parent — this avoids
|
||||
// re-downloading objects
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
println!(
|
||||
"Cloning parent repository from local path {}...",
|
||||
path.display()
|
||||
);
|
||||
git::clone_repository(&fp, parent_path, &resolved_ref, None)?;
|
||||
|
||||
// Ensure the cloned repo's origin is set to the upstream URL (not the local
|
||||
// path)
|
||||
git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?;
|
||||
|
||||
// Mark that we've already cloned from local
|
||||
cloned_from_local = true;
|
||||
|
||||
// We will persist upstream_url as the canonical parent id
|
||||
upstream_url
|
||||
} else if let Some(url) = git_url {
|
||||
url
|
||||
} else {
|
||||
return Err(PakkerError::Fork(
|
||||
"Either --git-url, --from-current or --from-path must be specified"
|
||||
.to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
// If we did not already clone from local, perform network clone and checks
|
||||
if cloned_from_local {
|
||||
println!(
|
||||
"Parent repository was cloned from local path; skipping network clone."
|
||||
);
|
||||
} else {
|
||||
// Check if parent directory already exists and is not empty
|
||||
if parent_path.exists() {
|
||||
let is_empty = parent_path
|
||||
.read_dir()
|
||||
.map(|mut entries| entries.next().is_none())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_empty {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Directory not empty: {}",
|
||||
parent_path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
println!("Cloning parent repository...");
|
||||
println!(" URL: {url}");
|
||||
println!(" Ref: {resolved_ref}");
|
||||
|
||||
git::clone_repository(&url, parent_path, &resolved_ref, None)?;
|
||||
}
|
||||
|
||||
let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?;
|
||||
|
||||
// Detect ref type if not specified
|
||||
let resolved_ref_type = if let Some(rt) = ref_type {
|
||||
rt
|
||||
} else {
|
||||
git::resolve_ref_type(parent_path, &resolved_ref)?
|
||||
};
|
||||
|
||||
let parent_config = ParentConfig {
|
||||
type_: "git".to_string(),
|
||||
id: url.clone(),
|
||||
version: Some(commit_sha[..8].to_string()),
|
||||
ref_: resolved_ref.clone(),
|
||||
ref_type: resolved_ref_type,
|
||||
remote_name: resolved_remote,
|
||||
};
|
||||
|
||||
local_config.parent = Some(parent_config);
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
// Add .pakku/parent to .gitignore
|
||||
add_to_gitignore()?;
|
||||
|
||||
println!();
|
||||
println!("✓ Fork initialized successfully");
|
||||
println!(" Parent: {url}");
|
||||
println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!(" Commit: {}", &commit_sha[..8]);
|
||||
println!();
|
||||
println!("Run 'pakku fork sync' to sync with parent.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_set(
|
||||
git_url: Option<String>,
|
||||
ref_name: String,
|
||||
ref_type: Option<RefType>,
|
||||
remote: Option<String>,
|
||||
) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut parent = local_config.parent.unwrap();
|
||||
|
||||
if let Some(url) = git_url {
|
||||
validate_git_url(&url)?;
|
||||
parent.id = url;
|
||||
}
|
||||
|
||||
parent.ref_ = ref_name;
|
||||
|
||||
if let Some(rt) = ref_type {
|
||||
parent.ref_type = rt;
|
||||
}
|
||||
|
||||
if let Some(remote_name) = remote {
|
||||
parent.remote_name = remote_name;
|
||||
}
|
||||
|
||||
local_config.parent = Some(parent.clone());
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!("✓ Fork configuration updated");
|
||||
println!(" Parent: {}", parent.id);
|
||||
println!(" Ref: {} ({})", parent.ref_, match parent.ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!();
|
||||
println!("Run 'pakku fork sync' to sync with new configuration.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_show() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if let Some(parent) = local_config.parent {
|
||||
println!("Fork Configuration:");
|
||||
println!(" Parent URL: {}", parent.id);
|
||||
println!(" Type: {}", match parent.ref_type {
|
||||
RefType::Branch => "branch",
|
||||
RefType::Tag => "tag",
|
||||
RefType::Commit => "commit",
|
||||
});
|
||||
println!(" Ref: {}", parent.ref_);
|
||||
println!(" Remote: {}", parent.remote_name);
|
||||
|
||||
if let Some(version) = parent.version {
|
||||
println!(" Last synced commit: {version}");
|
||||
} else {
|
||||
println!(" Last synced commit: never synced");
|
||||
}
|
||||
|
||||
if !local_config.projects.is_empty() {
|
||||
println!();
|
||||
println!("Project Overrides ({}):", local_config.projects.len());
|
||||
for (slug, proj_config) in &local_config.projects {
|
||||
print!(" - {slug}");
|
||||
let mut details = Vec::new();
|
||||
if let Some(version) = &proj_config.version {
|
||||
details.push(format!("version={version}"));
|
||||
}
|
||||
if let Some(side) = &proj_config.side {
|
||||
details.push(format!("side={side}"));
|
||||
}
|
||||
if let Some(strategy) = &proj_config.update_strategy {
|
||||
details.push(format!("updateStrategy={strategy}"));
|
||||
}
|
||||
if !details.is_empty() {
|
||||
print!(" ({})", details.join(", "));
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("No fork configured.");
|
||||
println!("Run 'pakku fork init' to initialize a fork.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_unset() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
println!("No fork configured.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Prompt for confirmation
|
||||
print!("Are you sure you want to remove fork configuration? [y/N] ");
|
||||
std::io::stdout().flush().unwrap();
|
||||
|
||||
let mut input = String::new();
|
||||
std::io::stdin().read_line(&mut input).unwrap();
|
||||
|
||||
if !input.trim().eq_ignore_ascii_case("y") {
|
||||
println!("Cancelled.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Remove parent directory
|
||||
let parent_path_str = parent_dir();
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
if parent_path.exists() {
|
||||
fs::remove_dir_all(parent_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to remove parent directory: {e}"))
|
||||
})?;
|
||||
}
|
||||
|
||||
// Clear parent configuration
|
||||
local_config.parent = None;
|
||||
local_config.parent_lock_hash = None;
|
||||
local_config.parent_config_hash = None;
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!("✓ Fork configuration removed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_sync() -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
let parent = local_config.parent.as_ref().ok_or_else(|| {
|
||||
PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let parent_path_str = parent_dir();
|
||||
let parent_path = Path::new(&parent_path_str);
|
||||
|
||||
if parent_path.exists() {
|
||||
println!("Fetching parent updates...");
|
||||
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
|
||||
git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?;
|
||||
} else {
|
||||
println!("Parent repository not found. Cloning...");
|
||||
git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?;
|
||||
}
|
||||
|
||||
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
|
||||
|
||||
let mut integrity = None;
|
||||
|
||||
// Try pakker files first, fall back to pakku files
|
||||
let parent_lock_path = if parent_path.join("pakker-lock.json").exists() {
|
||||
parent_path.join("pakker-lock.json")
|
||||
} else {
|
||||
parent_path.join("pakku-lock.json")
|
||||
};
|
||||
|
||||
let parent_config_path = if parent_path.join("pakker.json").exists() {
|
||||
parent_path.join("pakker.json")
|
||||
} else {
|
||||
parent_path.join("pakku.json")
|
||||
};
|
||||
|
||||
if parent_lock_path.exists() {
|
||||
let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
|
||||
})?;
|
||||
|
||||
let lock_hash = hash_content(&lock_content);
|
||||
|
||||
if let Some(prev_hash) = &local_config.parent_lock_hash
|
||||
&& prev_hash != &lock_hash
|
||||
{
|
||||
log::warn!("Parent lock file has changed since last sync");
|
||||
log::warn!(" Previous hash: {prev_hash}");
|
||||
log::warn!(" Current hash: {lock_hash}");
|
||||
}
|
||||
|
||||
local_config.parent_lock_hash = Some(lock_hash);
|
||||
|
||||
let config_content = if parent_config_path.exists() {
|
||||
fs::read_to_string(&parent_config_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read parent config: {e}"))
|
||||
})?
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let config_hash = hash_content(&config_content);
|
||||
|
||||
if let Some(prev_hash) = &local_config.parent_config_hash
|
||||
&& prev_hash != &config_hash
|
||||
{
|
||||
log::warn!("Parent config file has changed since last sync");
|
||||
log::warn!(" Previous hash: {prev_hash}");
|
||||
log::warn!(" Current hash: {config_hash}");
|
||||
}
|
||||
|
||||
local_config.parent_config_hash = Some(config_hash);
|
||||
|
||||
integrity = Some(ForkIntegrity::new(
|
||||
local_config.parent_lock_hash.clone().unwrap_or_default(),
|
||||
commit_sha.clone(),
|
||||
local_config.parent_config_hash.clone().unwrap_or_default(),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(ref integrity_data) = integrity {
|
||||
log::info!(
|
||||
"Parent integrity verified at timestamp {}",
|
||||
integrity_data.verified_at
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(parent) = local_config.parent.as_mut() {
|
||||
parent.version = Some(commit_sha[..8].to_string());
|
||||
}
|
||||
|
||||
local_config.save(config_dir)?;
|
||||
|
||||
println!();
|
||||
println!("✓ Parent sync complete");
|
||||
println!(" Commit: {}", &commit_sha[..8]);
|
||||
println!();
|
||||
println!("Run 'pakku export' to merge changes from parent.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_promote(projects: Vec<String>) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if projects.is_empty() {
|
||||
return Err(PakkerError::Fork(
|
||||
"No projects specified. Usage: pakku fork promote <project>..."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Load current config
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Verify all projects exist
|
||||
for project_arg in &projects {
|
||||
let found = config
|
||||
.projects
|
||||
.as_ref()
|
||||
.and_then(|projs| projs.get(project_arg))
|
||||
.is_some();
|
||||
|
||||
if !found {
|
||||
return Err(PakkerError::Fork(format!(
|
||||
"Project not found: {project_arg}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
println!("Note: In the current architecture, projects in pakku.json are");
|
||||
println!("automatically merged with parent projects during export.");
|
||||
println!();
|
||||
println!("The following projects are already in pakku.json:");
|
||||
for project in &projects {
|
||||
println!(" - {project}");
|
||||
}
|
||||
println!();
|
||||
println!("These will be included in exports automatically.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_to_gitignore() -> Result<(), PakkerError> {
|
||||
let gitignore_path = Path::new(".gitignore");
|
||||
let parent_dir = parent_dir();
|
||||
|
||||
// Check if .gitignore exists and already contains the entry
|
||||
if gitignore_path.exists() {
|
||||
let content = fs::read_to_string(gitignore_path).map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to read .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
if content.lines().any(|line| line.trim() == parent_dir) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Append to .gitignore
|
||||
let mut file = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(gitignore_path)
|
||||
.map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to open .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
writeln!(file, "{parent_dir}").map_err(|e| {
|
||||
PakkerError::Fork(format!("Failed to write to .gitignore: {e}"))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
395
src/cli/commands/import.rs
Normal file
395
src/cli/commands/import.rs
Normal file
|
|
@ -0,0 +1,395 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::ImportArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::{Config, LockFile, Target},
|
||||
ui_utils::prompt_yes_no,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: ImportArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
log::info!("Importing modpack from {}", args.file);
|
||||
|
||||
let path = Path::new(&args.file);
|
||||
|
||||
if !path.exists() {
|
||||
return Err(PakkerError::FileNotFound(
|
||||
path.to_string_lossy().to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Check if lockfile or config already exist
|
||||
if (lockfile_path.exists() || config_path.exists()) && !args.yes {
|
||||
let msg = if lockfile_path.exists() && config_path.exists() {
|
||||
"Both pakku-lock.json and pakku.json exist. Importing will overwrite \
|
||||
them. Continue?"
|
||||
} else if lockfile_path.exists() {
|
||||
"pakku-lock.json exists. Importing will overwrite it. Continue?"
|
||||
} else {
|
||||
"pakku.json exists. Importing will overwrite it. Continue?"
|
||||
};
|
||||
|
||||
if !prompt_yes_no(msg, false)? {
|
||||
log::info!("Import cancelled by user");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Detect format by checking file contents
|
||||
let file = std::fs::File::open(path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
if archive.by_name("modrinth.index.json").is_ok() {
|
||||
drop(archive);
|
||||
import_modrinth(path, lockfile_dir, config_dir).await
|
||||
} else if archive.by_name("manifest.json").is_ok() {
|
||||
drop(archive);
|
||||
import_curseforge(path, lockfile_dir, config_dir).await
|
||||
} else {
|
||||
Err(PakkerError::InvalidImportFile(
|
||||
"Unknown pack format".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn import_modrinth(
|
||||
path: &Path,
|
||||
lockfile_dir: &Path,
|
||||
config_dir: &Path,
|
||||
) -> Result<()> {
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::platform::create_platform;
|
||||
|
||||
let file = File::open(path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
|
||||
let index_content = {
|
||||
let mut index_file = archive.by_name("modrinth.index.json")?;
|
||||
let mut content = String::new();
|
||||
index_file.read_to_string(&mut content)?;
|
||||
content
|
||||
};
|
||||
|
||||
let index: serde_json::Value = serde_json::from_str(&index_content)?;
|
||||
|
||||
// Create lockfile
|
||||
let mc_version = index["dependencies"]["minecraft"]
|
||||
.as_str()
|
||||
.unwrap_or("1.20.1")
|
||||
.to_string();
|
||||
|
||||
let loader =
|
||||
if let Some(fabric) = index["dependencies"]["fabric-loader"].as_str() {
|
||||
("fabric".to_string(), fabric.to_string())
|
||||
} else if let Some(forge) = index["dependencies"]["forge"].as_str() {
|
||||
("forge".to_string(), forge.to_string())
|
||||
} else {
|
||||
("fabric".to_string(), "latest".to_string())
|
||||
};
|
||||
|
||||
let mut loaders = std::collections::HashMap::new();
|
||||
loaders.insert(loader.0.clone(), loader.1);
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec![mc_version.clone()],
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
// Import projects from files list
|
||||
if let Some(files) = index["files"].as_array() {
|
||||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
let creds = crate::model::credentials::ResolvedCredentials::load().ok();
|
||||
let platform = create_platform(
|
||||
"modrinth",
|
||||
creds
|
||||
.as_ref()
|
||||
.and_then(|c| c.modrinth_token().map(std::string::ToString::to_string)),
|
||||
)?;
|
||||
|
||||
for file_entry in files {
|
||||
if let Some(project_id) = file_entry["downloads"]
|
||||
.as_array()
|
||||
.and_then(|downloads| downloads.first())
|
||||
.and_then(|url| url.as_str())
|
||||
.and_then(|url| url.split('/').rev().nth(1))
|
||||
{
|
||||
log::info!("Fetching project: {project_id}");
|
||||
match platform
|
||||
.request_project_with_files(project_id, &lockfile.mc_versions, &[
|
||||
loader.0.clone(),
|
||||
])
|
||||
.await
|
||||
{
|
||||
Ok(mut project) => {
|
||||
// Select best file
|
||||
if let Err(e) =
|
||||
project.select_file(&lockfile.mc_versions, &[loader.0.clone()])
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
lockfile.add_project(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch project {project_id}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create config
|
||||
let config = Config {
|
||||
name: index["name"]
|
||||
.as_str()
|
||||
.unwrap_or("Imported Pack")
|
||||
.to_string(),
|
||||
version: index["versionId"]
|
||||
.as_str()
|
||||
.unwrap_or("1.0.0")
|
||||
.to_string(),
|
||||
description: index["summary"]
|
||||
.as_str()
|
||||
.map(std::string::ToString::to_string),
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: Default::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
};
|
||||
|
||||
// Save files using provided paths
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
log::info!("Imported {} projects", lockfile.projects.len());
|
||||
|
||||
// Extract overrides
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
let outpath = file.enclosed_name().ok_or_else(|| {
|
||||
PakkerError::InternalError("Invalid file path in archive".to_string())
|
||||
})?;
|
||||
|
||||
if outpath.starts_with("overrides/") {
|
||||
let target = outpath.strip_prefix("overrides/").unwrap();
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
} else {
|
||||
if let Some(parent) = target.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut outfile = File::create(target)?;
|
||||
std::io::copy(&mut file, &mut outfile)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn import_curseforge(
|
||||
path: &Path,
|
||||
lockfile_dir: &Path,
|
||||
config_dir: &Path,
|
||||
) -> Result<()> {
|
||||
use std::{fs::File, io::Read};
|
||||
|
||||
use zip::ZipArchive;
|
||||
|
||||
let file = File::open(path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
|
||||
let manifest_content = {
|
||||
let mut manifest_file = archive.by_name("manifest.json")?;
|
||||
let mut content = String::new();
|
||||
manifest_file.read_to_string(&mut content)?;
|
||||
content
|
||||
};
|
||||
|
||||
let manifest: serde_json::Value = serde_json::from_str(&manifest_content)?;
|
||||
|
||||
// Create lockfile
|
||||
let mc_version = manifest["minecraft"]["version"]
|
||||
.as_str()
|
||||
.unwrap_or("1.20.1")
|
||||
.to_string();
|
||||
|
||||
let mod_loaders =
|
||||
manifest["minecraft"]["modLoaders"]
|
||||
.as_array()
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidImportFile("Missing modLoaders".to_string())
|
||||
})?;
|
||||
|
||||
let loader_info = mod_loaders
|
||||
.first()
|
||||
.and_then(|l| l["id"].as_str())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::InvalidImportFile("Missing loader id".to_string())
|
||||
})?;
|
||||
|
||||
let parts: Vec<&str> = loader_info.split('-').collect();
|
||||
let loader_name = (*parts.first().unwrap_or(&"fabric")).to_string();
|
||||
let loader_version = (*parts.get(1).unwrap_or(&"latest")).to_string();
|
||||
|
||||
let mut loaders = std::collections::HashMap::new();
|
||||
loaders.insert(loader_name, loader_version);
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::CurseForge),
|
||||
mc_versions: vec![mc_version.clone()],
|
||||
loaders: loaders.clone(),
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
// Import projects from files list
|
||||
if let Some(files) = manifest["files"].as_array() {
|
||||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
use crate::platform::create_platform;
|
||||
let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok();
|
||||
let platform = create_platform("curseforge", curseforge_token)?;
|
||||
|
||||
for file_entry in files {
|
||||
if let Some(project_id) = file_entry["projectID"].as_u64() {
|
||||
let project_id_str = project_id.to_string();
|
||||
log::info!("Fetching project: {project_id_str}");
|
||||
|
||||
match platform
|
||||
.request_project_with_files(
|
||||
&project_id_str,
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(mut project) => {
|
||||
// Try to select the specific file if fileID is provided
|
||||
if let Some(file_id) = file_entry["fileID"].as_u64() {
|
||||
let file_id_str = file_id.to_string();
|
||||
// Try to find the file with matching ID
|
||||
if let Some(file) =
|
||||
project.files.iter().find(|f| f.id == file_id_str).cloned()
|
||||
{
|
||||
project.files = vec![file];
|
||||
} else {
|
||||
log::warn!(
|
||||
"Could not find file {} for project {}, selecting best match",
|
||||
file_id,
|
||||
project.get_name()
|
||||
);
|
||||
if let Err(e) = project.select_file(
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
) {
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No specific file ID, select best match
|
||||
if let Err(e) = project.select_file(
|
||||
&lockfile.mc_versions,
|
||||
&loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
) {
|
||||
log::warn!(
|
||||
"Failed to select file for {}: {}",
|
||||
project.get_name(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
lockfile.add_project(project);
|
||||
},
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch project {project_id_str}: {e}");
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create config
|
||||
let config = Config {
|
||||
name: manifest["name"]
|
||||
.as_str()
|
||||
.unwrap_or("Imported Pack")
|
||||
.to_string(),
|
||||
version: manifest["version"]
|
||||
.as_str()
|
||||
.unwrap_or("1.0.0")
|
||||
.to_string(),
|
||||
description: None,
|
||||
author: manifest["author"]
|
||||
.as_str()
|
||||
.map(std::string::ToString::to_string),
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: Default::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
};
|
||||
|
||||
// Save files using provided paths
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
log::info!("Imported {} projects", lockfile.projects.len());
|
||||
|
||||
// Extract overrides
|
||||
let overrides_prefix = manifest["overrides"].as_str().unwrap_or("overrides");
|
||||
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
let outpath = file.enclosed_name().ok_or_else(|| {
|
||||
PakkerError::InternalError("Invalid file path in archive".to_string())
|
||||
})?;
|
||||
|
||||
if outpath.starts_with(overrides_prefix) {
|
||||
let target = outpath.strip_prefix(overrides_prefix).unwrap();
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
} else {
|
||||
if let Some(parent) = target.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut outfile = File::create(target)?;
|
||||
std::io::copy(&mut file, &mut outfile)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
67
src/cli/commands/init.rs
Normal file
67
src/cli/commands/init.rs
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
cli::InitArgs,
|
||||
error::PakkerError,
|
||||
model::{Config, LockFile, Target},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: InitArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
if lockfile_path.exists() {
|
||||
return Err(PakkerError::AlreadyExists(
|
||||
"Lock file already exists".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let target = args.target.as_str();
|
||||
let target_enum = match target {
|
||||
"curseforge" => Target::CurseForge,
|
||||
"modrinth" => Target::Modrinth,
|
||||
"multiplatform" => Target::Multiplatform,
|
||||
_ => {
|
||||
return Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid target: {target}"
|
||||
)));
|
||||
},
|
||||
};
|
||||
|
||||
let mc_versions = vec![args.mc_version];
|
||||
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert(args.loader, args.loader_version);
|
||||
|
||||
let lockfile = LockFile {
|
||||
target: Some(target_enum),
|
||||
mc_versions,
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
// Save expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
let config = Config {
|
||||
name: args.name.unwrap_or_else(|| "My Modpack".to_string()),
|
||||
version: args.version.unwrap_or_else(|| "1.0.0".to_string()),
|
||||
description: None,
|
||||
author: None,
|
||||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: HashMap::new(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
};
|
||||
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!("Initialized new modpack with target: {target}");
|
||||
Ok(())
|
||||
}
|
||||
596
src/cli/commands/inspect.rs
Normal file
596
src/cli/commands/inspect.rs
Normal file
|
|
@ -0,0 +1,596 @@
|
|||
use std::{collections::HashSet, path::Path};
|
||||
|
||||
use comfy_table::{Cell, Color, ContentArrangement, Table, presets};
|
||||
use strsim::levenshtein;
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::{Config, LockFile, Project, ProjectFile},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
projects: Vec<String>,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
||||
let mut found_any = false;
|
||||
let total_projects = projects.len();
|
||||
|
||||
for (idx, project_input) in projects.iter().enumerate() {
|
||||
if let Some(project) = find_project(&lockfile, project_input) {
|
||||
display_project_inspection(project, &lockfile)?;
|
||||
found_any = true;
|
||||
|
||||
// Add separator between projects (but not after the last one)
|
||||
if idx < total_projects - 1 {
|
||||
let width = 80; // Default terminal width
|
||||
println!("{}", "─".repeat(width));
|
||||
println!();
|
||||
}
|
||||
} else {
|
||||
eprintln!(
|
||||
"{}: {}",
|
||||
"Error".red(),
|
||||
format!("Project '{project_input}' not found in lockfile.").red()
|
||||
);
|
||||
|
||||
// Suggest similar projects
|
||||
if let Some(suggestions) =
|
||||
find_similar_projects(&lockfile, project_input, 5)
|
||||
{
|
||||
eprintln!();
|
||||
eprintln!("{}", "Did you mean one of these?".yellow());
|
||||
for suggestion in suggestions {
|
||||
eprintln!(" - {}", suggestion.cyan());
|
||||
}
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
}
|
||||
|
||||
if !found_any && !projects.is_empty() {
|
||||
return Err(crate::error::PakkerError::ProjectNotFound(
|
||||
"No projects found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn find_project<'a>(
|
||||
lockfile: &'a LockFile,
|
||||
query: &str,
|
||||
) -> Option<&'a Project> {
|
||||
lockfile.projects.iter().find(|p| project_matches(p, query))
|
||||
}
|
||||
|
||||
fn project_matches(project: &Project, query: &str) -> bool {
|
||||
// Check slugs
|
||||
for slug in project.slug.values() {
|
||||
if slug.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check names
|
||||
for name in project.name.values() {
|
||||
if name.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check pakku_id
|
||||
if let Some(ref pakku_id) = project.pakku_id
|
||||
&& pakku_id.eq_ignore_ascii_case(query)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
for alias in &project.aliases {
|
||||
if alias.eq_ignore_ascii_case(query) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn find_similar_projects(
|
||||
lockfile: &LockFile,
|
||||
query: &str,
|
||||
max_results: usize,
|
||||
) -> Option<Vec<String>> {
|
||||
// Calculate similarity scores for all projects
|
||||
let mut candidates: Vec<(String, usize)> = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.flat_map(|p| {
|
||||
let mut scores = Vec::new();
|
||||
|
||||
// Check slug similarity
|
||||
for slug in p.slug.values() {
|
||||
let distance = levenshtein(slug, query);
|
||||
if distance <= 3 {
|
||||
scores.push((slug.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
// Check name similarity (case-insensitive)
|
||||
for name in p.name.values() {
|
||||
let distance = levenshtein(&name.to_lowercase(), &query.to_lowercase());
|
||||
if distance <= 3 {
|
||||
scores.push((name.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
// Check aliases
|
||||
for alias in &p.aliases {
|
||||
let distance = levenshtein(alias, query);
|
||||
if distance <= 3 {
|
||||
scores.push((alias.clone(), distance));
|
||||
}
|
||||
}
|
||||
|
||||
scores
|
||||
})
|
||||
.collect();
|
||||
|
||||
if candidates.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort by distance (closest first)
|
||||
candidates.sort_by_key(|(_, dist)| *dist);
|
||||
|
||||
// Deduplicate and take top N
|
||||
let mut seen = HashSet::new();
|
||||
let suggestions: Vec<String> = candidates
|
||||
.into_iter()
|
||||
.filter_map(|(name, _)| {
|
||||
if seen.insert(name.clone()) {
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.take(max_results)
|
||||
.collect();
|
||||
|
||||
Some(suggestions)
|
||||
}
|
||||
|
||||
fn display_project_inspection(
|
||||
project: &Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<()> {
|
||||
// Display project header panel
|
||||
display_project_header(project)?;
|
||||
|
||||
// Display project files
|
||||
println!();
|
||||
display_project_files(&project.files)?;
|
||||
|
||||
// Display properties
|
||||
println!();
|
||||
display_properties(project)?;
|
||||
|
||||
// Display dependency tree
|
||||
println!();
|
||||
display_dependencies(project, lockfile)?;
|
||||
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_project_header(project: &Project) -> Result<()> {
|
||||
let name = get_project_name(project);
|
||||
let default_slug = String::from("N/A");
|
||||
let slug = project.slug.values().next().unwrap_or(&default_slug);
|
||||
|
||||
// Create header table with comfy-table
|
||||
let mut table = Table::new();
|
||||
table
|
||||
.load_preset(presets::UTF8_FULL)
|
||||
.set_content_arrangement(ContentArrangement::Dynamic);
|
||||
|
||||
// Title row with name
|
||||
table.add_row(vec![
|
||||
Cell::new(name)
|
||||
.fg(Color::Cyan)
|
||||
.set_alignment(comfy_table::CellAlignment::Left),
|
||||
]);
|
||||
|
||||
// Second row with slug, type, side
|
||||
let metadata = format!(
|
||||
"{} ({}) • {} • {}",
|
||||
slug,
|
||||
project.id.keys().next().unwrap_or(&"unknown".to_string()),
|
||||
format!("{:?}", project.r#type).to_lowercase(),
|
||||
format!("{:?}", project.side).to_lowercase()
|
||||
);
|
||||
table.add_row(vec![
|
||||
Cell::new(metadata)
|
||||
.fg(Color::DarkGrey)
|
||||
.set_alignment(comfy_table::CellAlignment::Left),
|
||||
]);
|
||||
|
||||
println!("{table}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_project_files(files: &[ProjectFile]) -> Result<()> {
|
||||
if files.is_empty() {
|
||||
println!("{}", "No files available".yellow());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "Project Files".cyan().bold());
|
||||
|
||||
for (idx, file) in files.iter().enumerate() {
|
||||
let mut table = Table::new();
|
||||
table
|
||||
.load_preset(presets::UTF8_FULL)
|
||||
.set_content_arrangement(ContentArrangement::Dynamic);
|
||||
|
||||
// Mark the first file as "current"
|
||||
let status = if idx == 0 { "current" } else { "" };
|
||||
let status_text = if status.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {status}")
|
||||
};
|
||||
|
||||
// File path line
|
||||
let file_path = format!("{}={}", file.file_type, file.file_name);
|
||||
table.add_row(vec![
|
||||
Cell::new(format!("{file_path}:{status_text}")).fg(if idx == 0 {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::White
|
||||
}),
|
||||
]);
|
||||
|
||||
// Date published
|
||||
table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]);
|
||||
|
||||
// Empty line
|
||||
table.add_row(vec![Cell::new("")]);
|
||||
|
||||
// Hashes (truncated)
|
||||
if !file.hashes.is_empty() {
|
||||
for (hash_type, hash_value) in &file.hashes {
|
||||
let display_hash = if hash_value.len() > 32 {
|
||||
format!(
|
||||
"{}...{}",
|
||||
&hash_value[..16],
|
||||
&hash_value[hash_value.len() - 16..]
|
||||
)
|
||||
} else {
|
||||
hash_value.clone()
|
||||
};
|
||||
table.add_row(vec![
|
||||
Cell::new(format!("{hash_type}={display_hash}")).fg(Color::DarkGrey),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
println!("{table}");
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_properties(project: &Project) -> Result<()> {
|
||||
println!("{}", "Properties".cyan().bold());
|
||||
|
||||
println!(
|
||||
" {}={}",
|
||||
"type".yellow(),
|
||||
format!("{:?}", project.r#type).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"side".yellow(),
|
||||
format!("{:?}", project.side).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"update_strategy".yellow(),
|
||||
format!("{:?}", project.update_strategy).to_lowercase()
|
||||
);
|
||||
println!(
|
||||
" {}={}",
|
||||
"redistributable".yellow(),
|
||||
project.redistributable
|
||||
);
|
||||
|
||||
if let Some(subpath) = &project.subpath {
|
||||
println!(" {}={}", "subpath".yellow(), subpath);
|
||||
}
|
||||
|
||||
if !project.aliases.is_empty() {
|
||||
let aliases: Vec<_> = project.aliases.iter().cloned().collect();
|
||||
println!(" {}={}", "aliases".yellow(), aliases.join(", "));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> {
|
||||
println!("{}", "Dependencies".cyan().bold());
|
||||
|
||||
// Collect all dependencies from all files
|
||||
let mut all_deps = HashSet::new();
|
||||
for file in &project.files {
|
||||
for dep in &file.required_dependencies {
|
||||
all_deps.insert(dep.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if all_deps.is_empty() {
|
||||
println!(" {}", "No dependencies".dim());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Display dependency tree
|
||||
let mut visited = HashSet::new();
|
||||
for dep_id in all_deps {
|
||||
display_dependency_tree(&dep_id, lockfile, 1, &mut visited)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_dependency_tree(
|
||||
dep_id: &str,
|
||||
lockfile: &LockFile,
|
||||
depth: usize,
|
||||
visited: &mut HashSet<String>,
|
||||
) -> Result<()> {
|
||||
let indent = " ".repeat(depth);
|
||||
let tree_char = if depth == 1 { "└─" } else { "├─" };
|
||||
|
||||
// Find the project in lockfile
|
||||
let project = lockfile.projects.iter().find(|p| {
|
||||
// Check if any ID matches
|
||||
p.id.values().any(|id| id == dep_id)
|
||||
|| p.slug.values().any(|slug| slug == dep_id)
|
||||
|| p.pakku_id.as_ref() == Some(&dep_id.to_string())
|
||||
});
|
||||
|
||||
if let Some(proj) = project {
|
||||
let name = get_project_name(proj);
|
||||
|
||||
// Check for circular dependency
|
||||
if visited.contains(&name) {
|
||||
println!("{}{} {} {}", indent, tree_char, name, "(circular)".red());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}{} {} (required)", indent, tree_char, name.green());
|
||||
visited.insert(name);
|
||||
|
||||
// Recursively display nested dependencies (limit depth to avoid infinite
|
||||
// loops)
|
||||
if depth < 5 {
|
||||
for file in &proj.files {
|
||||
for nested_dep in &file.required_dependencies {
|
||||
display_dependency_tree(nested_dep, lockfile, depth + 1, visited)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Dependency not found in lockfile
|
||||
println!(
|
||||
"{}{} {} {}",
|
||||
indent,
|
||||
tree_char,
|
||||
dep_id,
|
||||
"(not in lockfile)".yellow()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_project_name(project: &Project) -> String {
|
||||
project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.or_else(|| project.slug.values().next())
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::*;
|
||||
use crate::model::enums::{
|
||||
ProjectSide,
|
||||
ProjectType,
|
||||
ReleaseType,
|
||||
UpdateStrategy,
|
||||
};
|
||||
|
||||
fn create_test_project(pakku_id: &str, slug: &str, name: &str) -> Project {
|
||||
let mut slug_map = HashMap::new();
|
||||
slug_map.insert("modrinth".to_string(), slug.to_string());
|
||||
|
||||
let mut name_map = HashMap::new();
|
||||
name_map.insert("modrinth".to_string(), name.to_string());
|
||||
|
||||
let mut id_map = HashMap::new();
|
||||
id_map.insert("modrinth".to_string(), pakku_id.to_string());
|
||||
|
||||
Project {
|
||||
pakku_id: Some(pakku_id.to_string()),
|
||||
pakku_links: HashSet::new(),
|
||||
r#type: ProjectType::Mod,
|
||||
side: ProjectSide::Both,
|
||||
slug: slug_map,
|
||||
name: name_map,
|
||||
id: id_map,
|
||||
update_strategy: UpdateStrategy::Latest,
|
||||
redistributable: true,
|
||||
subpath: None,
|
||||
aliases: HashSet::new(),
|
||||
export: true,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_test_lockfile(projects: Vec<Project>) -> LockFile {
|
||||
use crate::model::enums::Target;
|
||||
let mut loaders = HashMap::new();
|
||||
loaders.insert("fabric".to_string(), "0.15.0".to_string());
|
||||
|
||||
let mut lockfile = LockFile {
|
||||
target: Some(Target::Modrinth),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders,
|
||||
projects: Vec::new(),
|
||||
lockfile_version: 1,
|
||||
};
|
||||
|
||||
for project in projects {
|
||||
lockfile.add_project(project);
|
||||
}
|
||||
|
||||
lockfile
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_slug() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test-slug");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_name() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test mod"); // Case-insensitive
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_by_pakku_id() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "test-id");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_project_not_found() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
let found = find_project(&lockfile, "nonexistent");
|
||||
assert!(found.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_matching_close_match() {
|
||||
let project1 = create_test_project("id1", "fabric-api", "Fabric API");
|
||||
let project2 = create_test_project("id2", "sodium", "Sodium");
|
||||
let lockfile = create_test_lockfile(vec![project1, project2]);
|
||||
|
||||
// Typo: "fabrc-api" should suggest "fabric-api"
|
||||
let suggestions = find_similar_projects(&lockfile, "fabrc-api", 5);
|
||||
assert!(suggestions.is_some());
|
||||
let suggestions = suggestions.unwrap();
|
||||
assert!(!suggestions.is_empty());
|
||||
assert!(suggestions.contains(&"fabric-api".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_matching_no_match() {
|
||||
let project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
let lockfile = create_test_lockfile(vec![project]);
|
||||
|
||||
// Very different query, should have no suggestions (distance > 3)
|
||||
let suggestions =
|
||||
find_similar_projects(&lockfile, "completely-different-xyz", 5);
|
||||
assert!(suggestions.is_none() || suggestions.unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_project_matches_alias() {
|
||||
let mut project = create_test_project("test-id", "test-slug", "Test Mod");
|
||||
project.aliases.insert("test-alias".to_string());
|
||||
|
||||
assert!(project_matches(&project, "test-alias"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circular_dependency_detection() {
|
||||
// This is a conceptual test - in practice, we'd need to set up files with
|
||||
// dependencies
|
||||
let mut project1 = create_test_project("dep1", "dep1-slug", "Dependency 1");
|
||||
let mut project2 = create_test_project("dep2", "dep2-slug", "Dependency 2");
|
||||
|
||||
// Create files with circular dependencies
|
||||
let file1 = ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "dep1.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/dep1.jar".to_string(),
|
||||
id: "file1".to_string(),
|
||||
parent_id: "dep1".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec!["dep2".to_string()],
|
||||
size: 1000,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
let file2 = ProjectFile {
|
||||
file_type: "modrinth".to_string(),
|
||||
file_name: "dep2.jar".to_string(),
|
||||
mc_versions: vec!["1.20.1".to_string()],
|
||||
loaders: vec!["fabric".to_string()],
|
||||
release_type: ReleaseType::Release,
|
||||
url: "https://example.com/dep2.jar".to_string(),
|
||||
id: "file2".to_string(),
|
||||
parent_id: "dep2".to_string(),
|
||||
hashes: HashMap::new(),
|
||||
required_dependencies: vec!["dep1".to_string()],
|
||||
size: 1000,
|
||||
date_published: "2024-01-01T00:00:00Z".to_string(),
|
||||
};
|
||||
|
||||
project1.files.push(file1);
|
||||
project2.files.push(file2);
|
||||
|
||||
let lockfile = create_test_lockfile(vec![project1, project2]);
|
||||
|
||||
// Test that display_dependency_tree handles circular deps gracefully
|
||||
let mut visited = HashSet::new();
|
||||
let result = display_dependency_tree("dep1", &lockfile, 1, &mut visited);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
47
src/cli/commands/link.rs
Normal file
47
src/cli/commands/link.rs
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::LinkArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: LinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Linking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
let from_project = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.matches_input(&args.from))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
|
||||
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("From project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
let to_project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(&args.to))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
|
||||
|
||||
// Check if link already exists
|
||||
if to_project.pakku_links.contains(&from_id) {
|
||||
log::info!("Link already exists");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Add link
|
||||
to_project.pakku_links.insert(from_id);
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully linked projects");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
50
src/cli/commands/ls.rs
Normal file
50
src/cli/commands/ls.rs
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{cli::LsArgs, error::Result, model::LockFile};
|
||||
|
||||
pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
if lockfile.projects.is_empty() {
|
||||
println!("No projects installed");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Installed projects ({}):", lockfile.projects.len());
|
||||
println!();
|
||||
|
||||
for project in &lockfile.projects {
|
||||
if args.detailed {
|
||||
let id = project.pakku_id.as_deref().unwrap_or("unknown");
|
||||
println!(" {} ({})", project.get_name(), id);
|
||||
println!(" Type: {:?}", project.r#type);
|
||||
println!(" Side: {:?}", project.side);
|
||||
|
||||
if let Some(file) = project.files.first() {
|
||||
println!(" File: {}", file.file_name);
|
||||
println!(
|
||||
" Version: {} ({})",
|
||||
file.release_type, file.date_published
|
||||
);
|
||||
}
|
||||
|
||||
if !project.pakku_links.is_empty() {
|
||||
println!(" Dependencies: {}", project.pakku_links.len());
|
||||
}
|
||||
|
||||
println!();
|
||||
} else {
|
||||
let file_info = project
|
||||
.files
|
||||
.first()
|
||||
.map(|f| format!(" ({})", f.file_name))
|
||||
.unwrap_or_default();
|
||||
|
||||
println!(" {}{}", project.get_name(), file_info);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
23
src/cli/commands/mod.rs
Normal file
23
src/cli/commands/mod.rs
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
pub mod add;
|
||||
pub mod add_prj;
|
||||
pub mod cfg;
|
||||
pub mod cfg_prj;
|
||||
pub mod credentials;
|
||||
pub mod credentials_set;
|
||||
pub mod diff;
|
||||
pub mod export;
|
||||
pub mod fetch;
|
||||
pub mod fork;
|
||||
pub mod import;
|
||||
pub mod init;
|
||||
pub mod inspect;
|
||||
pub mod link;
|
||||
pub mod ls;
|
||||
pub mod remote;
|
||||
pub mod remote_update;
|
||||
pub mod rm;
|
||||
pub mod set;
|
||||
pub mod status;
|
||||
pub mod sync;
|
||||
pub mod unlink;
|
||||
pub mod update;
|
||||
151
src/cli/commands/remote.rs
Normal file
151
src/cli/commands/remote.rs
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
cli::RemoteArgs,
|
||||
error::{PakkerError, Result},
|
||||
fetch::Fetcher,
|
||||
git,
|
||||
model::{config::Config, lockfile::LockFile},
|
||||
};
|
||||
|
||||
const REMOTE_DIR: &str = ".pakku-remote";
|
||||
|
||||
pub async fn execute(args: RemoteArgs) -> Result<()> {
|
||||
let remote_path = PathBuf::from(REMOTE_DIR);
|
||||
|
||||
// Handle --remove flag
|
||||
if args.remove {
|
||||
if remote_path.exists() {
|
||||
fs::remove_dir_all(&remote_path)?;
|
||||
log::info!("Removed remote from modpack");
|
||||
} else {
|
||||
log::warn!("No remote configured");
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If no URL provided, show status
|
||||
if args.url.is_none() {
|
||||
show_remote_status(&remote_path)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let url = args.url.unwrap();
|
||||
log::info!("Installing modpack from: {url}");
|
||||
|
||||
// Clone or update repository
|
||||
if remote_path.exists() {
|
||||
log::info!("Remote directory exists, updating...");
|
||||
let remote_name = "origin";
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
|
||||
git::fetch_updates(&remote_path, remote_name, ref_name, None)?;
|
||||
git::reset_to_ref(&remote_path, remote_name, ref_name)?;
|
||||
} else {
|
||||
log::info!("Cloning repository...");
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
git::clone_repository(&url, &remote_path, ref_name, None)?;
|
||||
}
|
||||
|
||||
// Load lockfile and config from remote
|
||||
let remote_lockfile_path = remote_path.join("pakku-lock.json");
|
||||
if !remote_lockfile_path.exists() {
|
||||
return Err(PakkerError::ConfigError(
|
||||
"Remote repository does not contain pakku-lock.json".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let remote_lockfile = LockFile::load(&remote_path)?;
|
||||
let remote_config = Config::load(&remote_path).ok();
|
||||
|
||||
// Copy lockfile to current directory
|
||||
let current_lockfile_path = PathBuf::from("pakku-lock.json");
|
||||
fs::copy(&remote_lockfile_path, ¤t_lockfile_path)?;
|
||||
log::info!("Copied lockfile from remote");
|
||||
|
||||
// Copy config if exists
|
||||
if remote_config.is_some() {
|
||||
let remote_config_path = remote_path.join("pakku.json");
|
||||
let current_config_path = PathBuf::from("pakku.json");
|
||||
if remote_config_path.exists() {
|
||||
fs::copy(&remote_config_path, ¤t_config_path)?;
|
||||
log::info!("Copied config from remote");
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch project files
|
||||
log::info!("Fetching project files...");
|
||||
let fetcher = Fetcher::new(&remote_path);
|
||||
fetcher
|
||||
.fetch_all(&remote_lockfile, &remote_config.unwrap_or_default())
|
||||
.await?;
|
||||
|
||||
// Sync overrides
|
||||
sync_overrides(&remote_path, args.server_pack)?;
|
||||
|
||||
log::info!("Successfully installed modpack from remote");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_remote_status(remote_path: &Path) -> Result<()> {
|
||||
if !remote_path.exists() {
|
||||
println!("No remote configured");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Remote status:");
|
||||
println!(" Directory: {}", remote_path.display());
|
||||
|
||||
if git::is_git_repository(remote_path) {
|
||||
if let Ok(url) = git::get_remote_url(remote_path, "origin") {
|
||||
println!(" URL: {url}");
|
||||
}
|
||||
if let Ok(sha) = git::get_current_commit_sha(remote_path, None) {
|
||||
println!(" Commit: {}", &sha[..8]);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> {
|
||||
let override_dirs = if server_pack {
|
||||
vec!["overrides", "server_overrides"]
|
||||
} else {
|
||||
vec!["overrides", "client_overrides"]
|
||||
};
|
||||
|
||||
for dir_name in override_dirs {
|
||||
let src_dir = remote_path.join(dir_name);
|
||||
if src_dir.exists() && src_dir.is_dir() {
|
||||
log::info!("Syncing {dir_name} directory...");
|
||||
copy_dir_recursive(&src_dir, Path::new("."))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
|
||||
if !dst.exists() {
|
||||
fs::create_dir_all(dst)?;
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let src_path = entry.path();
|
||||
let file_name = entry.file_name();
|
||||
let dst_path = dst.join(file_name);
|
||||
|
||||
if src_path.is_dir() {
|
||||
copy_dir_recursive(&src_path, &dst_path)?;
|
||||
} else {
|
||||
fs::copy(&src_path, &dst_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
121
src/cli/commands/remote_update.rs
Normal file
121
src/cli/commands/remote_update.rs
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
|
||||
|
||||
/// Update modpack from remote Git repository
|
||||
///
|
||||
/// This command updates the current modpack from its remote Git repository.
|
||||
/// It fetches the latest changes from the remote and syncs overrides.
|
||||
pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
|
||||
// Check if lockfile exists in current directory - if it does, we're in a
|
||||
// modpack directory and should not update remote (use regular update
|
||||
// instead)
|
||||
let lockfile_path = PathBuf::from("pakku-lock.json");
|
||||
if lockfile_path.exists() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"Cannot update remote from a modpack directory. Use 'update' command \
|
||||
instead."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Remote directory for the cloned modpack
|
||||
let remote_dir = PathBuf::from(".pakku-remote");
|
||||
|
||||
// Check if remote directory exists
|
||||
if !remote_dir.exists() {
|
||||
return Err(PakkerError::RemoteNotFound(
|
||||
"No remote found. Use 'remote' command to install a modpack first."
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Fetch updates from remote repository
|
||||
println!("Updating remote repository...");
|
||||
let remote_name = "origin";
|
||||
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
|
||||
git::fetch_updates(&remote_dir, remote_name, ref_name, None)?;
|
||||
|
||||
// Read remote lockfile
|
||||
let remote_lockfile_path = remote_dir.join("pakku-lock.json");
|
||||
if !remote_lockfile_path.exists() {
|
||||
return Err(PakkerError::FileNotFound(
|
||||
"Remote lockfile not found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Read remote config if it exists
|
||||
let remote_config_path = remote_dir.join("pakku.json");
|
||||
let _remote_config = if remote_config_path.exists() {
|
||||
match Config::load(&remote_config_path) {
|
||||
Ok(config) => Some(config),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Could not read remote config: {e}");
|
||||
None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Sync overrides from remote directory
|
||||
println!("Syncing overrides...");
|
||||
sync_overrides(&remote_dir).await?;
|
||||
|
||||
// Clean up remote directory
|
||||
std::fs::remove_dir_all(&remote_dir)?;
|
||||
|
||||
println!("Remote modpack updated successfully.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sync override files from remote directory to current directory
|
||||
async fn sync_overrides(remote_dir: &PathBuf) -> Result<(), PakkerError> {
|
||||
let remote_config_path = remote_dir.join("pakku.json");
|
||||
if !remote_config_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config = Config::load(&remote_config_path)?;
|
||||
|
||||
// Get override directories from config
|
||||
let overrides = config.overrides;
|
||||
if overrides.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for override_path in overrides {
|
||||
let source = remote_dir.join(&override_path);
|
||||
let dest = PathBuf::from(&override_path);
|
||||
|
||||
if source.exists() {
|
||||
// Copy override directory
|
||||
copy_directory(&source, &dest)?;
|
||||
println!(" Synced: {override_path}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recursively copy a directory
|
||||
fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<(), PakkerError> {
|
||||
if !dest.exists() {
|
||||
std::fs::create_dir_all(dest)?;
|
||||
}
|
||||
|
||||
for entry in std::fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dest_path = dest.join(entry.file_name());
|
||||
|
||||
if path.is_dir() {
|
||||
copy_directory(&path, &dest_path)?;
|
||||
} else {
|
||||
std::fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
89
src/cli/commands/rm.rs
Normal file
89
src/cli/commands/rm.rs
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::RmArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
ui_utils::prompt_yes_no,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: RmArgs,
|
||||
lockfile_path: &Path,
|
||||
_config_path: &Path,
|
||||
) -> Result<()> {
|
||||
log::info!("Removing projects: {:?}", args.inputs);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
let mut removed_count = 0;
|
||||
let mut removed_ids = Vec::new();
|
||||
let mut projects_to_remove = Vec::new();
|
||||
|
||||
// First, identify all projects to remove
|
||||
for input in &args.inputs {
|
||||
// Find project by various identifiers
|
||||
if let Some(project) = lockfile.projects.iter().find(|p| {
|
||||
p.pakku_id.as_deref() == Some(input)
|
||||
|| p.slug.values().any(|s| s == input)
|
||||
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|
||||
|| p.aliases.contains(input)
|
||||
}) {
|
||||
projects_to_remove.push(project.get_name());
|
||||
} else {
|
||||
log::warn!("Project not found: {input}");
|
||||
}
|
||||
}
|
||||
|
||||
if projects_to_remove.is_empty() {
|
||||
return Err(PakkerError::ProjectNotFound(
|
||||
"None of the specified projects found".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Ask for confirmation unless --yes flag is provided
|
||||
if !args.yes {
|
||||
println!("The following projects will be removed:");
|
||||
for name in &projects_to_remove {
|
||||
println!(" - {name}");
|
||||
}
|
||||
|
||||
if !prompt_yes_no("Do you want to continue?", false)? {
|
||||
println!("Removal cancelled.");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Now actually remove the projects
|
||||
for input in &args.inputs {
|
||||
if let Some(pos) = lockfile.projects.iter().position(|p| {
|
||||
p.pakku_id.as_deref() == Some(input)
|
||||
|| p.slug.values().any(|s| s == input)
|
||||
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|
||||
|| p.aliases.contains(input)
|
||||
}) {
|
||||
let project = lockfile.projects.remove(pos);
|
||||
log::info!("Removed: {}", project.get_name());
|
||||
if let Some(pakku_id) = project.pakku_id.clone() {
|
||||
removed_ids.push(pakku_id);
|
||||
}
|
||||
removed_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up pakku_links from all remaining projects
|
||||
for project in &mut lockfile.projects {
|
||||
project
|
||||
.pakku_links
|
||||
.retain(|link| !removed_ids.contains(link));
|
||||
}
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully removed {removed_count} project(s)");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
156
src/cli/commands/set.rs
Normal file
156
src/cli/commands/set.rs
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
use std::{collections::HashMap, path::Path, str::FromStr};
|
||||
|
||||
use crate::{
|
||||
cli::SetArgs,
|
||||
error::PakkerError,
|
||||
model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: SetArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Check if we're modifying lockfile properties or project properties
|
||||
let is_lockfile_modification = args.target.is_some()
|
||||
|| args.mc_versions.is_some()
|
||||
|| args.loaders.is_some();
|
||||
|
||||
if is_lockfile_modification {
|
||||
// Modify lockfile properties
|
||||
if let Some(target_str) = &args.target {
|
||||
let target = Target::from_str(target_str).map_err(|e| {
|
||||
PakkerError::InvalidInput(format!("Invalid target: {e}"))
|
||||
})?;
|
||||
lockfile.target = Some(target);
|
||||
println!("Set target to: {target:?}");
|
||||
}
|
||||
|
||||
if let Some(mc_versions_str) = &args.mc_versions {
|
||||
let mc_versions: Vec<String> = mc_versions_str
|
||||
.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
if mc_versions.is_empty() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one Minecraft version is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Validate that all projects are compatible with new MC versions
|
||||
for project in &lockfile.projects {
|
||||
let compatible = project
|
||||
.files
|
||||
.iter()
|
||||
.any(|file| file.mc_versions.iter().any(|v| mc_versions.contains(v)));
|
||||
if !compatible {
|
||||
eprintln!(
|
||||
"Warning: Project '{}' has no files compatible with new MC \
|
||||
versions",
|
||||
project.get_name()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.mc_versions = mc_versions.clone();
|
||||
println!("Set Minecraft versions to: {mc_versions:?}");
|
||||
}
|
||||
|
||||
if let Some(loaders_str) = &args.loaders {
|
||||
let mut loaders: HashMap<String, String> = HashMap::new();
|
||||
|
||||
for pair in loaders_str.split(',') {
|
||||
let parts: Vec<&str> = pair.split('=').collect();
|
||||
if parts.len() != 2 {
|
||||
return Err(PakkerError::InvalidInput(format!(
|
||||
"Invalid loader format '{pair}'. Expected 'name=version'"
|
||||
)));
|
||||
}
|
||||
loaders
|
||||
.insert(parts[0].trim().to_string(), parts[1].trim().to_string());
|
||||
}
|
||||
|
||||
if loaders.is_empty() {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"At least one loader is required".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let loader_names: Vec<String> = loaders.keys().cloned().collect();
|
||||
|
||||
// Validate that all projects are compatible with new loaders
|
||||
for project in &lockfile.projects {
|
||||
let compatible = project.files.iter().any(|file| {
|
||||
file.loaders.is_empty()
|
||||
|| file.loaders.iter().any(|l| loader_names.contains(l))
|
||||
});
|
||||
if !compatible {
|
||||
eprintln!(
|
||||
"Warning: Project '{}' has no files compatible with new loaders",
|
||||
project.get_name()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
lockfile.loaders = loaders.clone();
|
||||
println!("Set loaders to: {loaders:?}");
|
||||
}
|
||||
|
||||
lockfile.save(lockfile_dir)?;
|
||||
println!("Lockfile properties updated successfully");
|
||||
} else if let Some(input) = &args.input {
|
||||
// Modify project properties
|
||||
let project_name = {
|
||||
let project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(input))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(input.clone()))?;
|
||||
|
||||
if let Some(type_str) = &args.r#type {
|
||||
let project_type =
|
||||
ProjectType::from_str(type_str).map_err(PakkerError::InvalidInput)?;
|
||||
project.r#type = project_type;
|
||||
}
|
||||
|
||||
if let Some(side_str) = &args.side {
|
||||
let side =
|
||||
ProjectSide::from_str(side_str).map_err(PakkerError::InvalidInput)?;
|
||||
project.side = side;
|
||||
}
|
||||
|
||||
if let Some(strategy_str) = &args.strategy {
|
||||
let strategy = UpdateStrategy::from_str(strategy_str)
|
||||
.map_err(PakkerError::InvalidInput)?;
|
||||
project.update_strategy = strategy;
|
||||
}
|
||||
|
||||
if let Some(redistributable) = args.redistributable {
|
||||
project.redistributable = redistributable;
|
||||
}
|
||||
|
||||
project.get_name()
|
||||
};
|
||||
|
||||
lockfile.save(lockfile_dir)?;
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!("Updated project: {project_name}");
|
||||
} else {
|
||||
return Err(PakkerError::InvalidInput(
|
||||
"Either provide a project identifier or lockfile properties to modify"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
370
src/cli/commands/status.rs
Normal file
370
src/cli/commands/status.rs
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
use std::{collections::HashMap, path::Path, sync::Arc};
|
||||
|
||||
use futures::stream::{FuturesUnordered, StreamExt};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use tokio::sync::Semaphore;
|
||||
use yansi::Paint;
|
||||
|
||||
use crate::{
|
||||
error::Result,
|
||||
model::{Config, LockFile, Project},
|
||||
platform::create_platform,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
parallel: bool,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Display modpack metadata
|
||||
display_modpack_info(&lockfile, &config);
|
||||
println!();
|
||||
|
||||
// Check for updates (sequential or parallel)
|
||||
let (updates, errors) = if parallel {
|
||||
check_updates_parallel(&lockfile).await?
|
||||
} else {
|
||||
check_updates_sequential(&lockfile).await?
|
||||
};
|
||||
|
||||
// Display results
|
||||
display_update_results(&updates);
|
||||
|
||||
// Display errors if any
|
||||
if !errors.is_empty() {
|
||||
println!();
|
||||
println!("{}", "Errors encountered:".red());
|
||||
for (project, error) in &errors {
|
||||
println!(" - {}: {}", project.yellow(), error.red());
|
||||
}
|
||||
}
|
||||
|
||||
// Prompt to update if there are updates available
|
||||
if !updates.is_empty() {
|
||||
println!();
|
||||
if crate::ui_utils::prompt_yes_no("Update now?", false)? {
|
||||
// Call update command programmatically (update all projects)
|
||||
let update_args = crate::cli::UpdateArgs {
|
||||
inputs: vec![],
|
||||
yes: true, // Auto-yes for status command
|
||||
};
|
||||
crate::cli::commands::update::execute(
|
||||
update_args,
|
||||
lockfile_path,
|
||||
config_path,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_modpack_info(lockfile: &LockFile, config: &Config) {
|
||||
let author = config.author.as_deref().unwrap_or("Unknown");
|
||||
println!(
|
||||
"Managing {} modpack, version {}, by {}",
|
||||
config.name.cyan(),
|
||||
config.version.cyan(),
|
||||
author.cyan()
|
||||
);
|
||||
|
||||
let mc_versions = lockfile.mc_versions.join(", ");
|
||||
let loaders: Vec<String> = lockfile
|
||||
.loaders
|
||||
.iter()
|
||||
.map(|(loader, version)| format!("{loader}-{version}"))
|
||||
.collect();
|
||||
let loaders_str = loaders.join(", ");
|
||||
|
||||
println!(
|
||||
"on Minecraft version {}, loader {}, targeting platform {:?}.",
|
||||
mc_versions.cyan(),
|
||||
loaders_str.cyan(),
|
||||
lockfile.target
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectUpdate {
|
||||
slug: HashMap<String, String>,
|
||||
name: String,
|
||||
project_type: String,
|
||||
side: String,
|
||||
file_updates: Vec<FileUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FileUpdate {
|
||||
platform: String,
|
||||
old_filename: String,
|
||||
new_filename: String,
|
||||
}
|
||||
|
||||
async fn check_updates_sequential(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
let total = lockfile.projects.len();
|
||||
let mut updates = Vec::new();
|
||||
let mut errors = Vec::new();
|
||||
|
||||
// Create progress bar
|
||||
let pb = ProgressBar::new(total as u64);
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates...");
|
||||
|
||||
for project in &lockfile.projects {
|
||||
let project_name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.unwrap_or(&"Unknown".to_string())
|
||||
.clone();
|
||||
pb.set_message(format!("Checking {project_name}..."));
|
||||
|
||||
match check_project_update(project, lockfile).await {
|
||||
Ok(update_opt) => {
|
||||
if let Some(update) = update_opt {
|
||||
updates.push(update);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
errors.push((project_name.clone(), e.to_string()));
|
||||
},
|
||||
}
|
||||
|
||||
pb.inc(1);
|
||||
}
|
||||
|
||||
pb.finish_with_message(format!("Checked {total} projects"));
|
||||
println!(); // Add blank line after progress bar
|
||||
|
||||
Ok((updates, errors))
|
||||
}
|
||||
|
||||
async fn check_updates_parallel(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
let total = lockfile.projects.len();
|
||||
let semaphore = Arc::new(Semaphore::new(10));
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
// Create progress bar
|
||||
let pb = Arc::new(ProgressBar::new(total as u64));
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates (parallel)...");
|
||||
|
||||
for project in &lockfile.projects {
|
||||
let project = project.clone();
|
||||
let sem = semaphore.clone();
|
||||
let pb_clone = pb.clone();
|
||||
let lockfile_clone = lockfile.clone();
|
||||
|
||||
futures.push(async move {
|
||||
let _permit = sem.acquire().await.unwrap();
|
||||
let result = check_project_update(&project, &lockfile_clone).await;
|
||||
pb_clone.inc(1);
|
||||
(project, result)
|
||||
});
|
||||
}
|
||||
|
||||
let mut updates = Vec::new();
|
||||
let mut errors = Vec::new();
|
||||
|
||||
while let Some((project, result)) = futures.next().await {
|
||||
match result {
|
||||
Ok(update_opt) => {
|
||||
if let Some(update) = update_opt {
|
||||
updates.push(update);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let project_name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.unwrap_or(&"Unknown".to_string())
|
||||
.clone();
|
||||
errors.push((project_name, e.to_string()));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pb.finish_with_message(format!("Checked {total} projects"));
|
||||
println!(); // Add blank line after progress bar
|
||||
|
||||
Ok((updates, errors))
|
||||
}
|
||||
|
||||
async fn check_project_update(
|
||||
project: &Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<Option<ProjectUpdate>> {
|
||||
// Get primary slug
|
||||
let slug = project
|
||||
.slug
|
||||
.values()
|
||||
.next()
|
||||
.ok_or_else(|| {
|
||||
crate::error::PakkerError::InvalidProject("No slug found".to_string())
|
||||
})?
|
||||
.clone();
|
||||
|
||||
// Try each platform in project
|
||||
for platform_name in project.id.keys() {
|
||||
let api_key = get_api_key(platform_name);
|
||||
let platform = match create_platform(platform_name, api_key) {
|
||||
Ok(p) => p,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
|
||||
|
||||
match platform
|
||||
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
|
||||
.await
|
||||
{
|
||||
Ok(updated_project) => {
|
||||
// Compare files to detect updates
|
||||
let file_updates = detect_file_updates(project, &updated_project);
|
||||
|
||||
if !file_updates.is_empty() {
|
||||
return Ok(Some(ProjectUpdate {
|
||||
slug: project.slug.clone(),
|
||||
name: project.name.values().next().cloned().unwrap_or_default(),
|
||||
project_type: format!("{:?}", project.r#type),
|
||||
side: format!("{:?}", project.side),
|
||||
file_updates,
|
||||
}));
|
||||
}
|
||||
|
||||
return Ok(None); // No updates
|
||||
},
|
||||
Err(_) => {
|
||||
// Try next platform
|
||||
continue;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Err(crate::error::PakkerError::PlatformApiError(
|
||||
"Failed to check for updates on any platform".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn detect_file_updates(
|
||||
current: &Project,
|
||||
updated: &Project,
|
||||
) -> Vec<FileUpdate> {
|
||||
let mut updates = Vec::new();
|
||||
|
||||
for old_file in ¤t.files {
|
||||
if let Some(new_file) = updated
|
||||
.files
|
||||
.iter()
|
||||
.find(|f| f.file_type == old_file.file_type)
|
||||
{
|
||||
// Check if file ID changed (indicates update)
|
||||
if new_file.id != old_file.id {
|
||||
updates.push(FileUpdate {
|
||||
platform: old_file.file_type.clone(),
|
||||
old_filename: old_file.file_name.clone(),
|
||||
new_filename: new_file.file_name.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updates
|
||||
}
|
||||
|
||||
fn display_update_results(updates: &[ProjectUpdate]) {
|
||||
if updates.is_empty() {
|
||||
println!("{}", "✓ All projects are up to date".green());
|
||||
return;
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("{}", "📦 Updates Available:".cyan().bold());
|
||||
println!();
|
||||
|
||||
for update in updates {
|
||||
// Create hyperlink for project name using ui_utils
|
||||
let project_url = if let Some((platform, slug)) = update.slug.iter().next()
|
||||
{
|
||||
match platform.as_str() {
|
||||
"modrinth" => crate::ui_utils::modrinth_project_url(slug),
|
||||
"curseforge" => crate::ui_utils::curseforge_project_url(slug),
|
||||
_ => String::new(),
|
||||
}
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
if project_url.is_empty() {
|
||||
println!(
|
||||
"{} ({}, {})",
|
||||
update.name.yellow(),
|
||||
update.project_type,
|
||||
update.side
|
||||
);
|
||||
} else {
|
||||
let hyperlinked = crate::ui_utils::hyperlink(
|
||||
&project_url,
|
||||
&update.name.yellow().to_string(),
|
||||
);
|
||||
println!("{} ({}, {})", hyperlinked, update.project_type, update.side);
|
||||
}
|
||||
|
||||
for file_update in &update.file_updates {
|
||||
println!(
|
||||
" • {}: {} → {}",
|
||||
file_update.platform.cyan(),
|
||||
file_update.old_filename.dim(),
|
||||
file_update.new_filename.green()
|
||||
);
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
println!(
|
||||
"{}",
|
||||
format!("{} project(s) need updates", updates.len()).yellow()
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_project_display_name(project: &Project) -> String {
|
||||
project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.or_else(|| project.slug.values().next())
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string())
|
||||
}
|
||||
|
||||
fn get_api_key(platform: &str) -> Option<String> {
|
||||
match platform {
|
||||
"modrinth" => std::env::var("MODRINTH_TOKEN").ok(),
|
||||
"curseforge" => std::env::var("CURSEFORGE_API_KEY").ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
216
src/cli/commands/sync.rs
Normal file
216
src/cli/commands/sync.rs
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs,
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use crate::{
|
||||
cli::SyncArgs,
|
||||
error::{PakkerError, Result},
|
||||
fetch::Fetcher,
|
||||
model::{Config, LockFile},
|
||||
platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient},
|
||||
};
|
||||
|
||||
enum SyncChange {
|
||||
Addition(PathBuf, String), // (file_path, project_name)
|
||||
Removal(String), // project_pakku_id
|
||||
}
|
||||
|
||||
pub async fn execute(
|
||||
args: SyncArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
log::info!("Synchronizing with lockfile");
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Detect changes
|
||||
let changes = detect_changes(&lockfile, &config)?;
|
||||
|
||||
if changes.is_empty() {
|
||||
println!("✓ Everything is in sync");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Filter changes based on flags
|
||||
let mut additions = Vec::new();
|
||||
let mut removals = Vec::new();
|
||||
|
||||
for change in changes {
|
||||
match change {
|
||||
SyncChange::Addition(path, name) => additions.push((path, name)),
|
||||
SyncChange::Removal(id) => removals.push(id),
|
||||
}
|
||||
}
|
||||
|
||||
// Apply filters
|
||||
let no_filter = !args.additions && !args.removals;
|
||||
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
if no_filter || args.additions {
|
||||
for (file_path, _) in &additions {
|
||||
spinner
|
||||
.set_message(format!("Processing addition: {}", file_path.display()));
|
||||
if prompt_user(&format!("Add {} to lockfile?", file_path.display()))? {
|
||||
add_file_to_lockfile(&mut lockfile, file_path, &config).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if no_filter || args.removals {
|
||||
for pakku_id in &removals {
|
||||
if let Some(project) = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.pakku_id.as_ref() == Some(pakku_id))
|
||||
{
|
||||
let name = project
|
||||
.name
|
||||
.values()
|
||||
.next()
|
||||
.map(std::string::String::as_str)
|
||||
.or(project.pakku_id.as_deref())
|
||||
.unwrap_or("unknown");
|
||||
spinner.set_message(format!("Processing removal: {name}"));
|
||||
if prompt_user(&format!("Remove {name} from lockfile?"))? {
|
||||
lockfile
|
||||
.remove_project(pakku_id)
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
spinner.finish_and_clear();
|
||||
|
||||
// Save changes
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
// Fetch missing files
|
||||
let fetcher = Fetcher::new(".");
|
||||
fetcher.sync(&lockfile, &config).await?;
|
||||
|
||||
println!("✓ Sync complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn detect_changes(
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
) -> Result<Vec<SyncChange>> {
|
||||
let mut changes = Vec::new();
|
||||
|
||||
// Get paths for each project type
|
||||
let paths = config.paths.clone();
|
||||
let mods_path = paths
|
||||
.get("mods")
|
||||
.map_or("mods", std::string::String::as_str);
|
||||
|
||||
// Build map of lockfile projects by file path
|
||||
let mut lockfile_files: HashMap<PathBuf, String> = HashMap::new();
|
||||
for project in &lockfile.projects {
|
||||
for file in &project.files {
|
||||
let file_path = PathBuf::from(mods_path).join(&file.file_name);
|
||||
if let Some(ref pakku_id) = project.pakku_id {
|
||||
lockfile_files.insert(file_path, pakku_id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan filesystem for additions
|
||||
if let Ok(entries) = fs::read_dir(mods_path) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file()
|
||||
&& let Some(ext) = path.extension()
|
||||
&& ext == "jar"
|
||||
&& !lockfile_files.contains_key(&path)
|
||||
{
|
||||
let name = path.file_name().unwrap().to_string_lossy().to_string();
|
||||
changes.push(SyncChange::Addition(path, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removals (projects in lockfile but files missing)
|
||||
let filesystem_files: HashSet<_> =
|
||||
if let Ok(entries) = fs::read_dir(mods_path) {
|
||||
entries
|
||||
.flatten()
|
||||
.map(|e| e.path())
|
||||
.filter(|p| p.is_file())
|
||||
.collect()
|
||||
} else {
|
||||
HashSet::new()
|
||||
};
|
||||
|
||||
for (lockfile_path, pakku_id) in &lockfile_files {
|
||||
if !filesystem_files.contains(lockfile_path) {
|
||||
changes.push(SyncChange::Removal(pakku_id.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(changes)
|
||||
}
|
||||
|
||||
async fn add_file_to_lockfile(
|
||||
lockfile: &mut LockFile,
|
||||
file_path: &Path,
|
||||
_config: &Config,
|
||||
) -> Result<()> {
|
||||
// Try to identify the file by hash lookup
|
||||
let _modrinth = ModrinthPlatform::new();
|
||||
let curseforge = CurseForgePlatform::new(None);
|
||||
|
||||
// Compute file hash
|
||||
let file_data = fs::read(file_path)?;
|
||||
// Compute SHA-1 hash from file bytes
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(&file_data);
|
||||
let hash = format!("{:x}", hasher.finalize());
|
||||
|
||||
// Try Modrinth first (SHA-1 hash)
|
||||
if let Ok(Some(project)) = _modrinth.lookup_by_hash(&hash).await {
|
||||
lockfile.add_project(project);
|
||||
println!("✓ Added {} (from Modrinth)", file_path.display());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Try CurseForge (Murmur2 hash computed from file)
|
||||
if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await {
|
||||
lockfile.add_project(project);
|
||||
println!("✓ Added {} (from CurseForge)", file_path.display());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("⚠ Could not identify {}, skipping", file_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn prompt_user(message: &str) -> Result<bool> {
|
||||
print!("{message} [y/N] ");
|
||||
io::stdout().flush().map_err(PakkerError::IoError)?;
|
||||
|
||||
let mut input = String::new();
|
||||
io::stdin()
|
||||
.read_line(&mut input)
|
||||
.map_err(PakkerError::IoError)?;
|
||||
|
||||
Ok(input.trim().eq_ignore_ascii_case("y"))
|
||||
}
|
||||
41
src/cli/commands/unlink.rs
Normal file
41
src/cli/commands/unlink.rs
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
cli::UnlinkArgs,
|
||||
error::{PakkerError, Result},
|
||||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: UnlinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Unlinking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
let from_project = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.matches_input(&args.from))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
|
||||
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("From project has no pakku_id".to_string())
|
||||
})?;
|
||||
|
||||
let to_project = lockfile
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|p| p.matches_input(&args.to))
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
|
||||
|
||||
// Remove link
|
||||
to_project.pakku_links.remove(&from_id);
|
||||
|
||||
// Save lockfile
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
log::info!("Successfully unlinked projects");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
140
src/cli/commands/update.rs
Normal file
140
src/cli/commands/update.rs
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
|
||||
use crate::{
|
||||
cli::UpdateArgs,
|
||||
error::PakkerError,
|
||||
model::{Config, LockFile},
|
||||
platform::create_platform,
|
||||
ui_utils::prompt_select,
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: UpdateArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
||||
// Create platforms
|
||||
let mut platforms = HashMap::new();
|
||||
if let Ok(platform) = create_platform("modrinth", None) {
|
||||
platforms.insert("modrinth".to_string(), platform);
|
||||
}
|
||||
if let Ok(platform) =
|
||||
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
|
||||
{
|
||||
platforms.insert("curseforge".to_string(), platform);
|
||||
}
|
||||
|
||||
let project_indices: Vec<_> = if args.inputs.is_empty() {
|
||||
(0..lockfile.projects.len()).collect()
|
||||
} else {
|
||||
let mut indices = Vec::new();
|
||||
for input in &args.inputs {
|
||||
if let Some((idx, _)) = lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, p)| p.matches_input(input))
|
||||
{
|
||||
indices.push(idx);
|
||||
} else {
|
||||
return Err(PakkerError::ProjectNotFound(input.clone()));
|
||||
}
|
||||
}
|
||||
indices
|
||||
};
|
||||
|
||||
// Create progress bar
|
||||
let pb = ProgressBar::new(project_indices.len() as u64);
|
||||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
|
||||
for idx in project_indices {
|
||||
let old_project = &lockfile.projects[idx];
|
||||
pb.set_message(format!("Updating {}...", old_project.get_name()));
|
||||
|
||||
let slug = old_project
|
||||
.slug
|
||||
.values()
|
||||
.next()
|
||||
.ok_or_else(|| PakkerError::InvalidProject("No slug found".into()))?;
|
||||
|
||||
// Find updated project from one of the platforms
|
||||
let mut updated_project = None;
|
||||
for platform in platforms.values() {
|
||||
if let Ok(project) = platform
|
||||
.request_project_with_files(
|
||||
slug,
|
||||
&lockfile.mc_versions,
|
||||
&lockfile.loaders.keys().cloned().collect::<Vec<_>>(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
updated_project = Some(project);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(mut updated_project) = updated_project
|
||||
&& !updated_project.files.is_empty()
|
||||
&& let Some(old_file) = lockfile.projects[idx].files.first()
|
||||
{
|
||||
let new_file = updated_project.files.first().unwrap();
|
||||
|
||||
if new_file.id == old_file.id {
|
||||
pb.println(format!(
|
||||
" {} - Already up to date",
|
||||
old_project.get_name()
|
||||
));
|
||||
} else {
|
||||
// Interactive version selection if not using --yes flag
|
||||
if !args.yes && updated_project.files.len() > 1 {
|
||||
pb.suspend(|| {
|
||||
let choices: Vec<String> = updated_project
|
||||
.files
|
||||
.iter()
|
||||
.map(|f| format!("{} ({})", f.file_name, f.id))
|
||||
.collect();
|
||||
|
||||
let choice_refs: Vec<&str> =
|
||||
choices.iter().map(std::string::String::as_str).collect();
|
||||
|
||||
if let Ok(selected_idx) = prompt_select(
|
||||
&format!("Select version for {}:", old_project.get_name()),
|
||||
&choice_refs,
|
||||
) {
|
||||
// Move selected file to front
|
||||
if selected_idx > 0 {
|
||||
updated_project.files.swap(0, selected_idx);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let selected_file = updated_project.files.first().unwrap();
|
||||
pb.println(format!(
|
||||
" {} -> {}",
|
||||
old_file.file_name, selected_file.file_name
|
||||
));
|
||||
lockfile.projects[idx] = updated_project;
|
||||
}
|
||||
}
|
||||
pb.inc(1);
|
||||
}
|
||||
|
||||
pb.finish_with_message("Update complete");
|
||||
lockfile.save(lockfile_dir)?;
|
||||
Ok(())
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue