fork: implement promote, sync diff, exclude/include, and merge with excludes

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I2d10e3f970784e84192cbca10caffe296a6a6964
This commit is contained in:
raf 2026-04-21 23:34:11 +03:00
commit 45d5f7e99b
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
4 changed files with 315 additions and 78 deletions

View file

@ -606,4 +606,18 @@ pub enum ForkSubcommand {
/// Project identifiers to promote
projects: Vec<String>,
},
/// Exclude parent projects from the merged export
Exclude {
/// Project slugs to exclude from the parent
#[clap(required = true)]
projects: Vec<String>,
},
/// Re-include previously excluded parent projects
Include {
/// Project slugs to stop excluding from the parent
#[clap(required = true)]
projects: Vec<String>,
},
}

View file

@ -233,14 +233,17 @@ fn merge_lockfiles(
// Collect local project slugs for override detection
let mut local_slugs = std::collections::HashSet::new();
for project in &local.projects {
// Add all slugs from all platforms
for slug in project.slug.values() {
local_slugs.insert(slug.clone());
}
}
// Add parent projects that are NOT overridden by local
let parent_projects_count = parent.projects.len();
// Collect excluded slugs from local config
let excluded: std::collections::HashSet<_> =
local_config.excludes.iter().collect();
// Add parent projects that are NOT overridden by local and NOT excluded
let mut parent_kept = 0usize;
for parent_project in &parent.projects {
let is_overridden = parent_project
@ -248,53 +251,63 @@ fn merge_lockfiles(
.values()
.any(|slug| local_slugs.contains(slug));
if !is_overridden {
// Check if project has local config overrides
let mut project = parent_project.clone();
let is_excluded = parent_project
.slug
.values()
.any(|slug| excluded.contains(slug))
|| parent_project
.name
.values()
.any(|name| excluded.contains(name));
// Apply local config overrides if they exist
for (key, local_proj_cfg) in &local_config.projects {
// Match by slug, name, or pakku_id
let matches = project.slug.values().any(|s| s == key)
|| project.name.values().any(|n| n == key)
|| project.pakku_id.as_ref() == Some(key);
if matches {
if let Some(t) = local_proj_cfg.r#type {
project.r#type = t;
}
if let Some(s) = local_proj_cfg.side {
project.side = s;
}
if let Some(us) = local_proj_cfg.update_strategy {
project.update_strategy = us;
}
if let Some(r) = local_proj_cfg.redistributable {
project.redistributable = r;
}
if let Some(ref sp) = local_proj_cfg.subpath {
project.subpath = Some(sp.clone());
}
if let Some(ref aliases) = local_proj_cfg.aliases {
project.aliases = aliases.iter().cloned().collect();
}
if let Some(e) = local_proj_cfg.export {
project.export = e;
}
break;
}
}
merged.projects.push(project);
if is_overridden || is_excluded {
continue;
}
let mut project = parent_project.clone();
// Apply local config attribute overrides (side, type, etc.)
for (key, local_proj_cfg) in &local_config.projects {
let matches = project.slug.values().any(|s| s == key)
|| project.name.values().any(|n| n == key)
|| project.pakku_id.as_ref() == Some(key);
if matches {
if let Some(t) = local_proj_cfg.r#type {
project.r#type = t;
}
if let Some(s) = local_proj_cfg.side {
project.side = s;
}
if let Some(us) = local_proj_cfg.update_strategy {
project.update_strategy = us;
}
if let Some(r) = local_proj_cfg.redistributable {
project.redistributable = r;
}
if let Some(ref sp) = local_proj_cfg.subpath {
project.subpath = Some(sp.clone());
}
if let Some(ref aliases) = local_proj_cfg.aliases {
project.aliases = aliases.iter().cloned().collect();
}
if let Some(e) = local_proj_cfg.export {
project.export = e;
}
break;
}
}
merged.projects.push(project);
parent_kept += 1;
}
// Add all local projects
merged.projects.extend(local.projects.clone());
println!(
"Merged fork: {} parent projects + {} local projects = {} total projects",
parent_projects_count - local_config.projects.len(),
"Merged fork: {} parent + {} local = {} total projects",
parent_kept,
local.projects.len(),
merged.projects.len()
);

View file

@ -1,11 +1,16 @@
use std::{fs, io::Write, path::Path};
use std::{
collections::{HashMap, HashSet},
fs,
io::Write,
path::Path,
};
use crate::{
cli::ForkArgs,
error::PakkerError,
git::{self, VcsType},
model::{
config::Config,
LockFile,
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
},
};
@ -51,6 +56,12 @@ pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
crate::cli::ForkSubcommand::Promote { projects } => {
execute_promote(projects)
},
crate::cli::ForkSubcommand::Exclude { projects } => {
execute_exclude(projects)
},
crate::cli::ForkSubcommand::Include { projects } => {
execute_include(projects)
},
}
}
@ -491,6 +502,41 @@ fn execute_unset() -> Result<(), PakkerError> {
Ok(())
}
/// Snapshot parent lockfile as slug → first file name map
fn snapshot_parent_projects(
parent_path: &Path,
) -> HashMap<String, Option<String>> {
let lockfile_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
if !lockfile_path.exists() {
return HashMap::new();
}
match LockFile::load_with_validation(parent_path, false) {
Ok(lf) => {
lf.projects
.iter()
.map(|p| {
let slug = p
.slug
.values()
.next()
.cloned()
.or_else(|| p.name.values().next().cloned())
.unwrap_or_default();
let file = p.files.first().map(|f| f.file_name.clone());
(slug, file)
})
.collect()
},
Err(_) => HashMap::new(),
}
}
fn execute_sync() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
@ -504,6 +550,9 @@ fn execute_sync() -> Result<(), PakkerError> {
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
// Snapshot before update
let before = snapshot_parent_projects(parent_path);
if parent_path.exists() {
println!("Fetching parent updates...");
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
@ -515,6 +564,9 @@ fn execute_sync() -> Result<(), PakkerError> {
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
// Snapshot after update
let after = snapshot_parent_projects(parent_path);
let mut integrity = None;
// Try pakker files first, fall back to pakku files
@ -536,15 +588,6 @@ fn execute_sync() -> Result<(), PakkerError> {
})?;
let lock_hash = hash_content(&lock_content);
if let Some(prev_hash) = &local_config.parent_lock_hash
&& prev_hash != &lock_hash
{
log::warn!("Parent lock file has changed since last sync");
log::warn!(" Previous hash: {prev_hash}");
log::warn!(" Current hash: {lock_hash}");
}
local_config.parent_lock_hash = Some(lock_hash);
let config_content = if parent_config_path.exists() {
@ -556,15 +599,6 @@ fn execute_sync() -> Result<(), PakkerError> {
};
let config_hash = hash_content(&config_content);
if let Some(prev_hash) = &local_config.parent_config_hash
&& prev_hash != &config_hash
{
log::warn!("Parent config file has changed since last sync");
log::warn!(" Previous hash: {prev_hash}");
log::warn!(" Current hash: {config_hash}");
}
local_config.parent_config_hash = Some(config_hash);
integrity = Some(ForkIntegrity::new(
@ -590,6 +624,46 @@ fn execute_sync() -> Result<(), PakkerError> {
println!();
println!("✓ Parent sync complete");
println!(" Commit: {}", &commit_sha[..8]);
// Print diff of parent changes
let before_keys: HashSet<_> = before.keys().collect();
let after_keys: HashSet<_> = after.keys().collect();
let added: Vec<_> = after_keys.difference(&before_keys).collect();
let removed: Vec<_> = before_keys.difference(&after_keys).collect();
let mut updated: Vec<(&String, &Option<String>, &Option<String>)> =
Vec::new();
for slug in before_keys.intersection(&after_keys) {
if before[*slug] != after[*slug] {
updated.push((slug, &before[*slug], &after[*slug]));
}
}
if added.is_empty() && removed.is_empty() && updated.is_empty() {
println!(" No changes in parent projects.");
} else {
println!();
println!(" Parent project changes:");
let mut added: Vec<_> = added;
added.sort();
for slug in added {
let file = after[*slug].as_deref().unwrap_or("?");
println!(" + {slug} ({file})");
}
let mut removed: Vec<_> = removed;
removed.sort();
for slug in removed {
let file = before[*slug].as_deref().unwrap_or("?");
println!(" - {slug} ({file})");
}
updated.sort_by_key(|(slug, ..)| *slug);
for (slug, old_file, new_file) in updated {
let old = old_file.as_deref().unwrap_or("?");
let new = new_file.as_deref().unwrap_or("?");
println!(" ~ {slug}: {old}{new}");
}
}
println!();
println!("Run 'pakku export' to merge changes from parent.");
@ -613,33 +687,160 @@ fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
));
}
// Load current config
let config = Config::load(config_dir)?;
// Load parent lockfile
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if !parent_path.exists() {
return Err(PakkerError::Fork(
"Parent directory not found. Run 'pakku fork sync' first.".to_string(),
));
}
let parent_lockfile = LockFile::load_with_validation(parent_path, false)
.map_err(|e| {
PakkerError::Fork(format!("Failed to load parent lockfile: {e}"))
})?;
// Load or create local lockfile
let lockfile_path = config_dir.join("pakku-lock.json");
let mut local_lockfile = if lockfile_path.exists() {
LockFile::load_with_validation(config_dir, false).map_err(|e| {
PakkerError::Fork(format!("Failed to load local lockfile: {e}"))
})?
} else {
// Bootstrap from parent metadata
LockFile {
target: parent_lockfile.target,
mc_versions: parent_lockfile.mc_versions.clone(),
loaders: parent_lockfile.loaders.clone(),
projects: Vec::new(),
lockfile_version: parent_lockfile.lockfile_version,
}
};
// Track which requested projects we found
let mut promoted = Vec::new();
let mut not_found = Vec::new();
// Verify all projects exist
for project_arg in projects {
let found = config
.projects
.as_ref()
.and_then(|projs| projs.get(project_arg))
.is_some();
let found = parent_lockfile.projects.iter().find(|p| {
p.slug.values().any(|s| s == project_arg)
|| p.name.values().any(|n| n == project_arg)
|| p.pakku_id.as_deref() == Some(project_arg)
});
if !found {
return Err(PakkerError::Fork(format!(
"Project not found: {project_arg}"
)));
if let Some(project) = found {
// Skip if already in local lockfile
let already_local = local_lockfile.projects.iter().any(|lp| {
lp.slug
.values()
.any(|s| project.slug.values().any(|ps| s == ps))
});
if already_local {
println!(" ~ {project_arg}: already in local lockfile, skipping");
continue;
}
local_lockfile.add_project(project.clone());
promoted.push(project_arg);
} else {
not_found.push(project_arg);
}
}
println!("Note: In the current architecture, projects in pakku.json are");
println!("automatically merged with parent projects during export.");
println!();
println!("The following projects are already in pakku.json:");
for project in projects {
println!(" - {project}");
if !not_found.is_empty() {
for name in &not_found {
eprintln!(" ! {name}: not found in parent lockfile");
}
return Err(PakkerError::Fork(format!(
"{} project(s) not found in parent lockfile",
not_found.len()
)));
}
if promoted.is_empty() {
println!("No projects promoted (all already in local lockfile).");
return Ok(());
}
local_lockfile.save(config_dir)?;
println!("Promoted {} project(s) to local lockfile:", promoted.len());
for name in &promoted {
println!(" + {name}");
}
println!();
println!("These will be included in exports automatically.");
println!(
"These projects are now locally managed and will override the parent."
);
Ok(())
}
fn execute_exclude(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut added = Vec::new();
for slug in projects {
if local_config.excludes.contains(slug) {
println!(" ~ {slug}: already excluded");
} else {
local_config.excludes.push(slug.clone());
added.push(slug);
}
}
local_config.excludes.sort();
local_config.save(config_dir)?;
if !added.is_empty() {
println!("Excluded {} project(s) from parent:", added.len());
for slug in &added {
println!(" - {slug}");
}
println!();
println!("These parent projects will be omitted from exports.");
}
Ok(())
}
fn execute_include(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut removed = Vec::new();
for slug in projects {
if let Some(pos) = local_config.excludes.iter().position(|s| s == slug) {
local_config.excludes.remove(pos);
removed.push(slug);
} else {
println!(" ~ {slug}: not in excludes list");
}
}
local_config.save(config_dir)?;
if !removed.is_empty() {
println!("Re-included {} project(s) from parent:", removed.len());
for slug in &removed {
println!(" + {slug}");
}
println!();
println!("These parent projects will be included in exports again.");
}
Ok(())
}

View file

@ -135,6 +135,9 @@ pub struct LocalConfig {
pub parent_config_hash: Option<String>,
#[serde(default)]
pub patches: Vec<String>,
/// Slugs of parent projects to exclude from the merged export
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub excludes: Vec<String>,
}
impl LocalConfig {
@ -347,6 +350,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
assert!(config.parent.is_none());
assert!(config.projects.is_empty());
@ -361,6 +365,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
assert!(!config.has_parent());
}
@ -373,6 +378,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
config.parent = Some(ParentConfig {
type_: "git".to_string(),
@ -393,6 +399,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
config
.projects
@ -421,6 +428,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
config.patches.push("custom.patch".to_string());
config.patches.push("bugfix.patch".to_string());
@ -437,6 +445,7 @@ mod tests {
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
excludes: vec![],
};
config.parent = Some(ParentConfig {
type_: "git".to_string(),