pakker/src/cli/commands/export.rs
NotAShelf 5385c0f4ed
cli: wire shelve flag; more clippy fixes
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I694da71afe93bcb33687ff7d8e75f04f6a6a6964
2026-02-19 00:22:42 +03:00

302 lines
9.3 KiB
Rust

use std::path::Path;
use crate::{
cli::ExportArgs,
error::{PakkerError, Result},
export::Exporter,
ipc::{IpcCoordinator, OperationType},
model::{Config, LockFile, fork::LocalConfig},
utils::hash::compute_sha256_bytes,
};
pub async fn execute(
args: ExportArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
if let Some(ref profile) = args.profile {
log::info!("Exporting with profile: {profile}");
} else {
log::info!("Exporting all profiles");
}
// Handle --no-server flag
if args.no_server {
log::info!("Server content will be excluded from export");
}
// Handle --show-io-errors flag
let show_io_errors = args.show_io_errors;
if show_io_errors {
log::info!("IO errors will be shown during export");
}
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
// IPC coordination - prevent concurrent operations on the same modpack
let ipc = IpcCoordinator::new(config_dir)?;
let ipc_timeout = std::time::Duration::from_secs(60);
// Check for conflicting export operations
let conflicting = ipc.get_running_operations(OperationType::Export);
if !conflicting.is_empty() {
log::info!(
"Waiting for conflicting operations to complete: {:?}",
conflicting
.iter()
.map(|op| (op.id.clone(), op.pid))
.collect::<Vec<_>>()
);
ipc
.wait_for_conflicts(OperationType::Export, ipc_timeout)
.await?;
}
// Register this export operation
let _op_guard = ipc.register_operation(OperationType::Export)?;
// Load config to check for fork configuration
let config = Config::load(config_dir)?;
let local_config = LocalConfig::load(config_dir).ok();
// Check if this is a fork with parent
let lockfile = if let Some(local_cfg) = &local_config {
if local_cfg.parent.is_some() {
log::info!("Fork detected - merging parent and local lockfiles");
// Try parent's lockfile
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile_path = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let check_path = Path::new(parent_dir).join("pakker-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let check_path = Path::new(parent_dir).join("pakku-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
if let Some(parent_dir) = parent_lockfile_path {
// Load parent lockfile
let parent_lockfile = LockFile::load(Path::new(parent_dir))?;
// Verify parent lockfile hash for integrity
if let Some(stored_hash) = &local_cfg.parent_lock_hash {
let parent_lock_path = Path::new(parent_dir).join(lockfile_name);
let parent_lock_content = std::fs::read(&parent_lock_path)?;
let computed_hash = compute_sha256_bytes(&parent_lock_content);
if &computed_hash != stored_hash {
log::warn!(
"Parent lockfile hash mismatch - parent may have changed since \
last sync"
);
log::warn!("Expected: {stored_hash}, Got: {computed_hash}");
}
}
// Load local lockfile if it exists
if lockfile_path.exists() {
log::info!("Merging parent lockfile with local overrides");
let local_lockfile =
LockFile::load_with_validation(lockfile_dir, false)?;
// Merge: start with parent, override with local
merge_lockfiles(parent_lockfile, local_lockfile, local_cfg)?
} else {
log::info!("No local lockfile - using parent lockfile");
parent_lockfile
}
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Fork configured but parent lockfile not found",
)));
}
} else {
// No fork, use local lockfile
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found",
)));
}
}
} else {
// No local config, try local lockfile or fall back to parent
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
// Try parent's lockfile as fallback
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let lockfile_path_check =
Path::new(parent_dir).join("pakker-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
match parent_lockfile {
Some(parent_dir) => {
log::info!(
"Using parent's lockfile ({lockfile_name}) from {parent_dir}"
);
LockFile::load(Path::new(parent_dir))?
},
None => {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found (neither local nor parent's)",
)));
},
}
}
};
// Determine output path
let output_path = if args.pakker_layout {
"build"
} else {
args.output.as_deref().unwrap_or("exports")
};
// Create exporter
let mut exporter = Exporter::new(".");
// Export based on profile argument
if let Some(profile_name) = args.profile {
// Single profile export (backwards compatible)
let output_file = exporter
.export(&profile_name, &lockfile, &config, Path::new(output_path))
.await?;
println!("Export complete: {output_file:?}");
} else {
// Multi-profile export (Pakker-compatible default behavior)
let output_files = exporter
.export_all_profiles(&lockfile, &config, Path::new(output_path))
.await?;
println!("\nExported {} files:", output_files.len());
for output_file in output_files {
println!(" - {output_file:?}");
}
}
Ok(())
}
/// Merges parent lockfile with local lockfile
/// Parent projects are used as base, local projects override parent projects
/// with same slug
fn merge_lockfiles(
parent: LockFile,
local: LockFile,
local_config: &LocalConfig,
) -> Result<LockFile> {
let mut merged = LockFile {
target: parent.target, // Use parent target
mc_versions: parent.mc_versions, // Use parent MC versions
loaders: parent.loaders, // Use parent loaders
projects: Vec::new(),
lockfile_version: parent.lockfile_version,
};
// Collect local project slugs for override detection
let mut local_slugs = std::collections::HashSet::new();
for project in &local.projects {
// Add all slugs from all platforms
for slug in project.slug.values() {
local_slugs.insert(slug.clone());
}
}
// Add parent projects that are NOT overridden by local
let parent_projects_count = parent.projects.len();
for parent_project in &parent.projects {
let is_overridden = parent_project
.slug
.values()
.any(|slug| local_slugs.contains(slug));
if !is_overridden {
// Check if project has local config overrides
let mut project = parent_project.clone();
// Apply local config overrides if they exist
for (key, local_proj_cfg) in &local_config.projects {
// Match by slug, name, or pakku_id
let matches = project.slug.values().any(|s| s == key)
|| project.name.values().any(|n| n == key)
|| project.pakku_id.as_ref() == Some(key);
if matches {
if let Some(t) = local_proj_cfg.r#type {
project.r#type = t;
}
if let Some(s) = local_proj_cfg.side {
project.side = s;
}
if let Some(us) = local_proj_cfg.update_strategy {
project.update_strategy = us;
}
if let Some(r) = local_proj_cfg.redistributable {
project.redistributable = r;
}
if let Some(ref sp) = local_proj_cfg.subpath {
project.subpath = Some(sp.clone());
}
if let Some(ref aliases) = local_proj_cfg.aliases {
project.aliases = aliases.iter().cloned().collect();
}
if let Some(e) = local_proj_cfg.export {
project.export = e;
}
break;
}
}
merged.projects.push(project);
}
}
// Add all local projects
merged.projects.extend(local.projects.clone());
println!(
"Merged fork: {} parent projects + {} local projects = {} total projects",
parent_projects_count - local_config.projects.len(),
local.projects.len(),
merged.projects.len()
);
Ok(merged)
}