pakker/crates/pakker-cli/src/cli/commands/sync.rs
NotAShelf d445b1814a
treewide: migrate to multi-crate layout
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I11a2103f3530f07409177404577b90136a6a6964
2026-05-03 03:44:54 +03:00

309 lines
8.1 KiB
Rust

use std::{
collections::{HashMap, HashSet},
fs,
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::SyncArgs,
error::{PakkerError, Result},
fetch::Fetcher,
model::{Config, LockFile},
platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient},
};
enum SyncChange {
Addition(PathBuf, String), // (file_path, project_name)
Removal(String), // project_pakku_id
}
#[expect(
clippy::expect_used,
reason = "spinner template is a string literal and is always valid"
)]
pub async fn execute(
args: SyncArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
log::info!("Synchronizing with lockfile");
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Detect changes
let changes = detect_changes(&lockfile, &config);
if changes.is_empty() {
println!("✓ Everything is in sync");
return Ok(());
}
// Filter changes based on flags
let mut additions = Vec::new();
let mut removals = Vec::new();
for change in changes {
match change {
SyncChange::Addition(path, name) => additions.push((path, name)),
SyncChange::Removal(id) => removals.push(id),
}
}
// Apply filters
let no_filter = !args.additions && !args.removals;
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
if no_filter || args.additions {
let mut file_hashes = Vec::new();
for (file_path, _) in &additions {
spinner
.set_message(format!("Processing addition: {}", file_path.display()));
if crate::ui_utils::prompt_yes_no(
&format!("Add {} to lockfile?", file_path.display()),
false,
global_yes,
)? && let Ok(file_data) = fs::read(file_path)
{
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash =
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
file_hashes.push(FileHash {
path: file_path.clone(),
hash,
});
}
}
if !file_hashes.is_empty() {
let fallback_hashes = file_hashes.clone();
let result = add_files_batch(&mut lockfile, file_hashes).await;
if let Err(e) = result {
log::warn!(
"Batch lookup failed, falling back to individual lookups: {e}"
);
for fh in fallback_hashes {
if let Err(e) =
add_file_to_lockfile(&mut lockfile, &fh.path, &config).await
{
log::warn!("Failed to add {}: {}", fh.path.display(), e);
}
}
}
}
}
if no_filter || args.removals {
for pakku_id in &removals {
if let Some(project) = lockfile
.projects
.iter()
.find(|p| p.pakku_id.as_ref() == Some(pakku_id))
{
let name = project
.name
.values()
.next()
.map(std::string::String::as_str)
.or(project.pakku_id.as_deref())
.unwrap_or("unknown");
spinner.set_message(format!("Processing removal: {name}"));
if crate::ui_utils::prompt_yes_no(
&format!("Remove {name} from lockfile?"),
false,
global_yes,
)? {
lockfile
.remove_project(pakku_id)
.ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?;
}
}
}
}
spinner.finish_and_clear();
// Save changes
lockfile.save(lockfile_dir)?;
// Fetch missing files
let fetcher = Fetcher::new(".");
fetcher.sync(&lockfile, &config).await?;
println!("✓ Sync complete");
Ok(())
}
fn detect_changes(lockfile: &LockFile, config: &Config) -> Vec<SyncChange> {
let mut changes = Vec::new();
// Get paths for each project type
let paths = config.paths.clone();
let mods_path = paths
.get("mods")
.map_or("mods", std::string::String::as_str);
// Build map of lockfile projects by file path
let mut lockfile_files: HashMap<PathBuf, String> = HashMap::new();
for project in &lockfile.projects {
for file in &project.files {
let file_path = PathBuf::from(mods_path).join(&file.file_name);
if let Some(ref pakku_id) = project.pakku_id {
lockfile_files.insert(file_path, pakku_id.clone());
}
}
}
// Scan filesystem for additions
if let Ok(entries) = fs::read_dir(mods_path) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file()
&& let Some(ext) = path.extension()
&& ext == "jar"
&& !lockfile_files.contains_key(&path)
{
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
changes.push(SyncChange::Addition(path, name));
}
}
}
// Check for removals (projects in lockfile but files missing)
let filesystem_files: HashSet<_> = fs::read_dir(mods_path).map_or_else(
|_| HashSet::new(),
|entries| {
entries
.flatten()
.map(|e| e.path())
.filter(|p| p.is_file())
.collect()
},
);
for (lockfile_path, pakku_id) in &lockfile_files {
if !filesystem_files.contains(lockfile_path) {
changes.push(SyncChange::Removal(pakku_id.clone()));
}
}
changes
}
async fn add_file_to_lockfile(
lockfile: &mut LockFile,
file_path: &Path,
_config: &Config,
) -> Result<()> {
use sha1::Digest;
// Try to identify the file by hash lookup
let modrinth = ModrinthPlatform::new();
let curseforge = CurseForgePlatform::new(None);
// Compute file hash
let file_data = fs::read(file_path)?;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash = crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
// Try Modrinth first (SHA-1 hash)
if let Ok(Some(project)) = modrinth.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from Modrinth)", file_path.display());
return Ok(());
}
// Try CurseForge (Murmur2 hash computed from file)
if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from CurseForge)", file_path.display());
return Ok(());
}
println!("⚠ Could not identify {}, skipping", file_path.display());
Ok(())
}
#[derive(Clone)]
struct FileHash {
path: PathBuf,
hash: String,
}
async fn add_files_batch(
lockfile: &mut LockFile,
file_hashes: Vec<FileHash>,
) -> Result<()> {
if file_hashes.is_empty() {
return Ok(());
}
let modrinth = ModrinthPlatform::new();
let hashes: Vec<String> =
file_hashes.iter().map(|fh| fh.hash.clone()).collect();
let projects = modrinth
.request_projects_from_hashes(&hashes, "sha1")
.await?;
let mut matched_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut added_pakku_ids: std::collections::HashSet<String> =
std::collections::HashSet::new();
for project in &projects {
let pakku_id = match &project.pakku_id {
Some(id) => id.clone(),
None => continue,
};
if added_pakku_ids.contains(&pakku_id) {
continue;
}
for file_info in &project.files {
for (idx, fh) in file_hashes.iter().enumerate() {
if !matched_indices.contains(&idx)
&& file_info
.hashes
.get("sha1")
.map(std::string::String::as_str)
== Some(&fh.hash)
{
lockfile.add_project(project.clone());
added_pakku_ids.insert(pakku_id.clone());
matched_indices.insert(idx);
println!("✓ Added {} (from Modrinth)", fh.path.display());
break;
}
}
}
}
for (idx, fh) in file_hashes.iter().enumerate() {
if matched_indices.contains(&idx) {
continue;
}
println!("⚠ Could not identify {}, skipping", fh.path.display());
}
Ok(())
}