pakker/crates/pakker-cli/src/cli/commands/fork.rs
NotAShelf 616916cd48
cli: colorize output in various commands
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I612086bad607a92e4ac1c1f09b41534d6a6a6964
2026-05-03 03:44:56 +03:00

900 lines
26 KiB
Rust

use std::{
collections::{HashMap, HashSet},
fs,
io::Write,
path::Path,
time::Duration,
};
use indicatif::{ProgressBar, ProgressStyle};
use yansi::Paint;
use crate::{
cli::ForkArgs,
error::PakkerError,
git::{self, VcsType},
model::{
LockFile,
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
},
};
const PAKKU_DIR: &str = ".pakku";
const PARENT_DIR_NAME: &str = "parent";
fn parent_dir() -> String {
format!("{PAKKU_DIR}/{PARENT_DIR_NAME}")
}
/// Main entry point for fork commands
pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
match &args.subcommand {
crate::cli::ForkSubcommand::Init {
git_url,
from_current,
from_path,
ref_name,
ref_type,
remote,
} => {
execute_init(
git_url.clone(),
*from_current,
from_path.clone(),
ref_name.clone(),
*ref_type,
remote.clone(),
)
},
crate::cli::ForkSubcommand::Set {
git_url,
ref_name,
ref_type,
remote,
} => {
execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone())
},
crate::cli::ForkSubcommand::Show => execute_show(),
crate::cli::ForkSubcommand::Unset => execute_unset(),
crate::cli::ForkSubcommand::Sync => execute_sync(),
crate::cli::ForkSubcommand::Promote { projects } => {
execute_promote(projects)
},
crate::cli::ForkSubcommand::Exclude { projects } => {
execute_exclude(projects)
},
crate::cli::ForkSubcommand::Include { projects } => {
execute_include(projects)
},
}
}
fn validate_git_url(url: &str) -> Result<(), PakkerError> {
// Allow network URLs, SSH-style URLs, or local filesystem paths (tests use
// local bare repos)
if url.starts_with("https://")
|| url.starts_with("git@")
|| url.starts_with("ssh://")
|| url.starts_with("file://")
|| url.starts_with('/')
{
Ok(())
} else {
Err(PakkerError::Fork(format!(
"Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \
absolute filesystem path."
)))
}
}
fn execute_init(
git_url: Option<String>,
from_current: bool,
from_path: Option<String>,
ref_name: Option<String>,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
// Validate that pakker.json exists for fork operations
let pakker_json_path = config_dir.join("pakker.json");
let pakku_json_path = config_dir.join("pakku.json");
if !pakker_json_path.exists() && pakku_json_path.exists() {
return Err(PakkerError::Fork(
"Forking is a pakker-specific feature and requires pakker.json. \nFound \
pakku.json but not pakker.json. Please migrate to pakker.json to use \
fork functionality.\nYou can convert your pakku.json to pakker.json by \
renaming the file."
.to_string(),
));
}
let mut local_config = LocalConfig::load(config_dir).unwrap_or_default();
// Check if parent already configured
if local_config.parent.is_some()
&& let Some(parent) = &local_config.parent
{
return Err(PakkerError::Fork(format!(
"Parent already configured: {}",
parent.id
)));
}
// Resolve defaults early to avoid shadowing/confusion
let resolved_remote = remote.unwrap_or_else(|| "origin".to_string());
let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string());
// Parent path (where we keep the cloned parent)
let parent_path_str = parent_dir();
// Branch: from_current, from_path, or git_url
let mut cloned_from_local = false;
let url = if from_current {
// Detect git URL from current directory
if !git::is_git_repository(config_dir) {
return Err(PakkerError::Fork(
"Not a git repository. Use --git-url or run 'git init' first."
.to_string(),
));
}
git::get_remote_url(config_dir, &resolved_remote)?
} else if let Some(fp) = from_path {
// Use provided local path as source; infer upstream remote from it
let path = Path::new(&fp);
if !git::is_git_repository(path) {
return Err(PakkerError::Fork(format!(
"Provided path is not a git repository: {}",
path.display()
)));
}
// Infer upstream remote URL from the existing local clone
let upstream_url = git::get_primary_remote_url(path)?;
// Reject file:// or non-network remotes
validate_git_url(&upstream_url)?;
// Ensure working tree is clean
let vcs_type = git::detect_vcs_type(path);
if git::repo_has_uncommitted_changes(path)? {
let error_msg = match vcs_type {
VcsType::Git => {
"Local repository at --from-path has uncommitted changes. Commit or \
stash them before proceeding."
},
VcsType::Jujutsu => {
"Local repository at --from-path has uncommitted changes. Run 'jj \
commit' to save changes before proceeding."
},
VcsType::None => {
"Local repository at --from-path has uncommitted changes. Please \
clean the directory before proceeding."
},
};
return Err(PakkerError::Fork(error_msg.to_string()));
}
// VCS-specific validation
match vcs_type {
VcsType::Git => {
// Attempt lightweight fetch of remote refs to refresh remote tracking
match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) {
Ok(()) => println!("Fetched remote refs for verification"),
Err(e) => {
log::warn!("Lightweight fetch from upstream failed: {e}");
println!(
"Warning: could not perform lightweight fetch from upstream. \
Proceeding with local clone; subsequent sync may require \
network."
);
},
}
// Compare local ref vs remote ref
let remote_ref = format!("{resolved_remote}/{resolved_ref}");
match git::ahead_behind(path, &resolved_ref, &remote_ref) {
Ok((ahead, _behind)) => {
if ahead > 0 {
return Err(PakkerError::Fork(format!(
"Local repository at {} has {} commits not present on \
upstream {}. Push or use --git-url if you intend to use an \
upstream that contains these commits.",
path.display(),
ahead,
upstream_url
)));
}
},
Err(e) => {
log::warn!("Could not compute ahead/behind: {e}");
},
}
},
VcsType::Jujutsu => {
// For jujutsu, we skip git-specific remote validation since jj has
// different synchronization patterns
println!(
"Warning: Skipping remote validation for jujutsu repository. Ensure \
your jj repo is in sync with remote before proceeding."
);
// Check if there are any changes that haven't been pushed to the remote
if let Ok(output) = std::process::Command::new("jj")
.args(["log", "--limit", "1", "--template", ""])
.current_dir(path)
.output()
&& !output.stdout.is_empty()
{
println!(
"Note: Jujutsu repository detected. Make sure to run 'jj git \
push' to sync changes with remote if needed."
);
}
},
VcsType::None => {
// No VCS-specific validation needed
},
}
// Compute parent lock/config hashes for reproducibility
let parent_lock_path = if path.join("pakker-lock.json").exists() {
path.join("pakker-lock.json")
} else {
path.join("pakku-lock.json")
};
if parent_lock_path.exists() {
let lock_content =
fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
local_config.parent_lock_hash = Some(lock_hash);
}
let parent_config_path = if path.join("pakker.json").exists() {
path.join("pakker.json")
} else {
path.join("pakku.json")
};
if parent_config_path.exists() {
let config_content =
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?;
let config_hash = hash_content(&config_content);
local_config.parent_config_hash = Some(config_hash);
}
// Now clone from the local path into .pakku/parent, this avoids
// re-downloading objects
let parent_path = Path::new(&parent_path_str);
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
spinner.enable_steady_tick(Duration::from_millis(80));
spinner.set_message(format!(
"Cloning parent repository from local path {}...",
path.display()
));
git::clone_repository(&fp, parent_path, &resolved_ref, None)?;
spinner.finish_and_clear();
// Ensure the cloned repo's origin is set to the upstream URL (not the local
// path)
git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?;
// Mark that we've already cloned from local
cloned_from_local = true;
// We will persist upstream_url as the canonical parent id
upstream_url
} else if let Some(url) = git_url {
url
} else {
return Err(PakkerError::Fork(
"Either --git-url, --from-current or --from-path must be specified"
.to_string(),
));
};
let parent_path = Path::new(&parent_path_str);
// If we did not already clone from local, perform network clone and checks
if cloned_from_local {
println!(
"Parent repository was cloned from local path; skipping network clone."
);
} else {
// Check if parent directory already exists and is not empty
if parent_path.exists() {
let is_empty = parent_path
.read_dir()
.map(|mut entries| entries.next().is_none())
.unwrap_or(false);
if !is_empty {
return Err(PakkerError::Fork(format!(
"Directory not empty: {}",
parent_path.display()
)));
}
}
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
spinner.enable_steady_tick(Duration::from_millis(80));
spinner.set_message(format!(
"Cloning parent repository: {url} ({resolved_ref})"
));
git::clone_repository(&url, parent_path, &resolved_ref, None)?;
spinner.finish_and_clear();
}
let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?;
// Detect ref type if not specified
let resolved_ref_type = if let Some(rt) = ref_type {
rt
} else {
git::resolve_ref_type(parent_path, &resolved_ref)?
};
let parent_config = ParentConfig {
type_: "git".to_string(),
id: url.clone(),
version: Some(commit_sha[..8].to_string()),
ref_: resolved_ref.clone(),
ref_type: resolved_ref_type,
remote_name: resolved_remote,
};
local_config.parent = Some(parent_config);
local_config.save(config_dir)?;
// Add .pakku/parent to .gitignore
add_to_gitignore()?;
println!();
println!("{}", "✓ Fork initialized successfully".green());
println!(" Parent: {url}");
println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Commit: {}", &commit_sha[..8]);
println!();
println!("Run 'pakku fork sync' to sync with parent.");
Ok(())
}
fn execute_set(
git_url: Option<String>,
ref_name: String,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
let Some(mut parent) = local_config.parent else {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
};
if let Some(url) = git_url {
validate_git_url(&url)?;
parent.id = url;
}
parent.ref_ = ref_name;
if let Some(rt) = ref_type {
parent.ref_type = rt;
}
if let Some(remote_name) = remote {
parent.remote_name = remote_name;
}
local_config.parent = Some(parent.clone());
local_config.save(config_dir)?;
println!("{}", "✓ Fork configuration updated".green());
println!(" Parent: {}", parent.id);
println!(" Ref: {} ({})", parent.ref_, match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!();
println!("Run 'pakku fork sync' to sync with new configuration.");
Ok(())
}
fn execute_show() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if let Some(parent) = local_config.parent {
println!("Fork Configuration:");
println!(" Parent URL: {}", parent.id);
println!(" Type: {}", match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Ref: {}", parent.ref_);
println!(" Remote: {}", parent.remote_name);
if let Some(version) = parent.version {
println!(" Last synced commit: {version}");
} else {
println!(" Last synced commit: never synced");
}
if !local_config.projects.is_empty() {
println!();
println!("Project Overrides ({}):", local_config.projects.len());
for (slug, proj_config) in &local_config.projects {
print!(" - {slug}");
let mut details = Vec::new();
if let Some(version) = &proj_config.version {
details.push(format!("version={version}"));
}
if let Some(side) = &proj_config.side {
details.push(format!("side={side}"));
}
if let Some(strategy) = &proj_config.update_strategy {
details.push(format!("updateStrategy={strategy}"));
}
if !details.is_empty() {
print!(" ({})", details.join(", "));
}
println!();
}
}
} else {
println!("No fork configured.");
println!("Run 'pakku fork init' to initialize a fork.");
}
Ok(())
}
fn execute_unset() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
println!("No fork configured.");
return Ok(());
}
// Prompt for confirmation
print!("Are you sure you want to remove fork configuration? [y/N] ");
std::io::stdout().flush().map_err(PakkerError::IoError)?;
let mut input = String::new();
std::io::stdin()
.read_line(&mut input)
.map_err(PakkerError::IoError)?;
if !input.trim().eq_ignore_ascii_case("y") {
println!("Cancelled.");
return Ok(());
}
// Remove parent directory
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if parent_path.exists() {
fs::remove_dir_all(parent_path).map_err(|e| {
PakkerError::Fork(format!("Failed to remove parent directory: {e}"))
})?;
}
// Clear parent configuration
local_config.parent = None;
local_config.parent_lock_hash = None;
local_config.parent_config_hash = None;
local_config.save(config_dir)?;
println!("{}", "✓ Fork configuration removed".green());
Ok(())
}
/// Snapshot parent lockfile as slug → first file name map
fn snapshot_parent_projects(
parent_path: &Path,
) -> HashMap<String, Option<String>> {
let lockfile_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
if !lockfile_path.exists() {
return HashMap::new();
}
match LockFile::load_with_validation(parent_path, false) {
Ok(lf) => {
lf.projects
.iter()
.map(|p| {
let slug = p
.slug
.values()
.next()
.cloned()
.or_else(|| p.name.values().next().cloned())
.unwrap_or_default();
let file = p.files.first().map(|f| f.file_name.clone());
(slug, file)
})
.collect()
},
Err(_) => HashMap::new(),
}
}
fn execute_sync() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
let parent = local_config.parent.as_ref().ok_or_else(|| {
PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
)
})?;
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
// Snapshot before update
let before = snapshot_parent_projects(parent_path);
if parent_path.exists() {
println!("Fetching parent updates...");
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?;
} else {
println!("Parent repository not found. Cloning...");
git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?;
}
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
// Snapshot after update
let after = snapshot_parent_projects(parent_path);
let mut integrity = None;
// Try pakker files first, fall back to pakku files
let parent_lock_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
let parent_config_path = if parent_path.join("pakker.json").exists() {
parent_path.join("pakker.json")
} else {
parent_path.join("pakku.json")
};
if parent_lock_path.exists() {
let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
local_config.parent_lock_hash = Some(lock_hash);
let config_content = if parent_config_path.exists() {
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?
} else {
String::new()
};
let config_hash = hash_content(&config_content);
local_config.parent_config_hash = Some(config_hash);
integrity = Some(ForkIntegrity::new(
local_config.parent_lock_hash.clone().unwrap_or_default(),
commit_sha.clone(),
local_config.parent_config_hash.clone().unwrap_or_default(),
));
}
if let Some(ref integrity_data) = integrity {
log::info!(
"Parent integrity verified at timestamp {}",
integrity_data.verified_at
);
}
if let Some(parent) = local_config.parent.as_mut() {
parent.version = Some(commit_sha[..8].to_string());
}
local_config.save(config_dir)?;
println!();
println!("{}", "✓ Parent sync complete".green());
println!(" Commit: {}", &commit_sha[..8]);
// Print diff of parent changes
let before_keys: HashSet<_> = before.keys().collect();
let after_keys: HashSet<_> = after.keys().collect();
let added: Vec<_> = after_keys.difference(&before_keys).collect();
let removed: Vec<_> = before_keys.difference(&after_keys).collect();
let mut updated: Vec<(&String, &Option<String>, &Option<String>)> =
Vec::new();
for slug in before_keys.intersection(&after_keys) {
if before[*slug] != after[*slug] {
updated.push((slug, &before[*slug], &after[*slug]));
}
}
if added.is_empty() && removed.is_empty() && updated.is_empty() {
println!(" No changes in parent projects.");
} else {
println!();
println!(" Parent project changes:");
let mut added: Vec<_> = added;
added.sort();
for slug in added {
let file = after[*slug].as_deref().unwrap_or("?");
println!(" + {slug} ({file})");
}
let mut removed: Vec<_> = removed;
removed.sort();
for slug in removed {
let file = before[*slug].as_deref().unwrap_or("?");
println!(" - {slug} ({file})");
}
updated.sort_by_key(|(slug, ..)| *slug);
for (slug, old_file, new_file) in updated {
let old = old_file.as_deref().unwrap_or("?");
let new = new_file.as_deref().unwrap_or("?");
println!(" {} {slug}: {old}{new}", "~".yellow());
}
}
println!();
println!("Run 'pakku export' to merge changes from parent.");
Ok(())
}
fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
if projects.is_empty() {
return Err(PakkerError::Fork(
"No projects specified. Usage: pakku fork promote <project>..."
.to_string(),
));
}
// Load parent lockfile
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if !parent_path.exists() {
return Err(PakkerError::Fork(
"Parent directory not found. Run 'pakku fork sync' first.".to_string(),
));
}
let parent_lockfile = LockFile::load_with_validation(parent_path, false)
.map_err(|e| {
PakkerError::Fork(format!("Failed to load parent lockfile: {e}"))
})?;
// Load or create local lockfile
let lockfile_path = if config_dir.join("pakker-lock.json").exists() {
config_dir.join("pakker-lock.json")
} else {
config_dir.join("pakku-lock.json")
};
let mut local_lockfile = if lockfile_path.exists() {
LockFile::load_with_validation(config_dir, false).map_err(|e| {
PakkerError::Fork(format!("Failed to load local lockfile: {e}"))
})?
} else {
// Bootstrap from parent metadata
LockFile {
target: parent_lockfile.target,
mc_versions: parent_lockfile.mc_versions.clone(),
loaders: parent_lockfile.loaders.clone(),
projects: Vec::new(),
lockfile_version: parent_lockfile.lockfile_version,
}
};
// Track which requested projects we found
let mut promoted = Vec::new();
let mut not_found = Vec::new();
for project_arg in projects {
let found = parent_lockfile.projects.iter().find(|p| {
p.slug.values().any(|s| s == project_arg)
|| p.name.values().any(|n| n == project_arg)
|| p.pakku_id.as_deref() == Some(project_arg)
});
if let Some(project) = found {
// Skip if already in local lockfile
let already_local = local_lockfile.projects.iter().any(|lp| {
lp.slug
.values()
.any(|s| project.slug.values().any(|ps| s == ps))
});
if already_local {
println!(" ~ {project_arg}: already in local lockfile, skipping");
continue;
}
local_lockfile.add_project(project.clone());
promoted.push(project_arg);
} else {
not_found.push(project_arg);
}
}
if !not_found.is_empty() {
for name in &not_found {
eprintln!(" ! {name}: not found in parent lockfile");
}
return Err(PakkerError::Fork(format!(
"{} project(s) not found in parent lockfile",
not_found.len()
)));
}
if promoted.is_empty() {
println!("No projects promoted (all already in local lockfile).");
return Ok(());
}
local_lockfile.save(config_dir)?;
println!("Promoted {} project(s) to local lockfile:", promoted.len());
for name in &promoted {
println!(" + {name}");
}
println!();
println!(
"These projects are now locally managed and will override the parent."
);
Ok(())
}
fn execute_exclude(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut added = Vec::new();
for slug in projects {
if local_config.excludes.contains(slug) {
println!(" ~ {slug}: already excluded");
} else {
local_config.excludes.push(slug.clone());
added.push(slug);
}
}
local_config.excludes.sort();
local_config.save(config_dir)?;
if !added.is_empty() {
println!("Excluded {} project(s) from parent:", added.len());
for slug in &added {
println!(" - {slug}");
}
println!();
println!("These parent projects will be omitted from exports.");
}
Ok(())
}
fn execute_include(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut removed = Vec::new();
for slug in projects {
if let Some(pos) = local_config.excludes.iter().position(|s| s == slug) {
local_config.excludes.remove(pos);
removed.push(slug);
} else {
println!(" ~ {slug}: not in excludes list");
}
}
local_config.save(config_dir)?;
if !removed.is_empty() {
println!("Re-included {} project(s) from parent:", removed.len());
for slug in &removed {
println!(" + {slug}");
}
println!();
println!("These parent projects will be included in exports again.");
}
Ok(())
}
fn add_to_gitignore() -> Result<(), PakkerError> {
let gitignore_path = Path::new(".gitignore");
let parent_dir = parent_dir();
// Check if .gitignore exists and already contains the entry
if gitignore_path.exists() {
let content = fs::read_to_string(gitignore_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read .gitignore: {e}"))
})?;
if content.lines().any(|line| line.trim() == parent_dir) {
return Ok(());
}
}
// Append to .gitignore
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(gitignore_path)
.map_err(|e| {
PakkerError::Fork(format!("Failed to open .gitignore: {e}"))
})?;
writeln!(file, "{parent_dir}").map_err(|e| {
PakkerError::Fork(format!("Failed to write to .gitignore: {e}"))
})?;
Ok(())
}