sync: batch file identification via hash lookup
Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: I85d3f1265cad1996340ac98ac9ee1f7e6a6a6964
This commit is contained in:
parent
0048a1cd73
commit
838ba82790
5 changed files with 261 additions and 1 deletions
|
|
@ -63,6 +63,8 @@ pub async fn execute(
|
|||
);
|
||||
|
||||
if no_filter || args.additions {
|
||||
let mut file_hashes = Vec::new();
|
||||
|
||||
for (file_path, _) in &additions {
|
||||
spinner
|
||||
.set_message(format!("Processing addition: {}", file_path.display()));
|
||||
|
|
@ -71,7 +73,34 @@ pub async fn execute(
|
|||
false,
|
||||
global_yes,
|
||||
)? {
|
||||
add_file_to_lockfile(&mut lockfile, file_path, &config).await?;
|
||||
if let Ok(file_data) = fs::read(file_path) {
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(&file_data);
|
||||
let hash = format!("{:x}", hasher.finalize());
|
||||
file_hashes.push(FileHash {
|
||||
path: file_path.clone(),
|
||||
hash,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !file_hashes.is_empty() {
|
||||
let fallback_hashes = file_hashes.clone();
|
||||
let result = add_files_batch(&mut lockfile, file_hashes).await;
|
||||
if let Err(e) = result {
|
||||
log::warn!(
|
||||
"Batch lookup failed, falling back to individual lookups: {}",
|
||||
e
|
||||
);
|
||||
for fh in fallback_hashes {
|
||||
if let Err(e) =
|
||||
add_file_to_lockfile(&mut lockfile, &fh.path, &config).await
|
||||
{
|
||||
log::warn!("Failed to add {}: {}", fh.path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -210,3 +239,64 @@ async fn add_file_to_lockfile(
|
|||
println!("⚠ Could not identify {}, skipping", file_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FileHash {
|
||||
path: PathBuf,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
async fn add_files_batch(
|
||||
lockfile: &mut LockFile,
|
||||
file_hashes: Vec<FileHash>,
|
||||
) -> Result<()> {
|
||||
if file_hashes.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let modrinth = ModrinthPlatform::new();
|
||||
|
||||
let hashes: Vec<String> =
|
||||
file_hashes.iter().map(|fh| fh.hash.clone()).collect();
|
||||
|
||||
let projects = modrinth
|
||||
.request_projects_from_hashes(&hashes, "sha1")
|
||||
.await?;
|
||||
|
||||
let mut matched_indices: std::collections::HashSet<usize> =
|
||||
std::collections::HashSet::new();
|
||||
let mut added_pakku_ids: std::collections::HashSet<String> =
|
||||
std::collections::HashSet::new();
|
||||
|
||||
for project in &projects {
|
||||
let pakku_id = match &project.pakku_id {
|
||||
Some(id) => id.clone(),
|
||||
None => continue,
|
||||
};
|
||||
if added_pakku_ids.contains(&pakku_id) {
|
||||
continue;
|
||||
}
|
||||
for file_info in &project.files {
|
||||
for (idx, fh) in file_hashes.iter().enumerate() {
|
||||
if !matched_indices.contains(&idx)
|
||||
&& file_info.hashes.get("sha1").map(|s| s.as_str()) == Some(&fh.hash)
|
||||
{
|
||||
lockfile.add_project(project.clone());
|
||||
added_pakku_ids.insert(pakku_id.clone());
|
||||
matched_indices.insert(idx);
|
||||
println!("✓ Added {} (from Modrinth)", fh.path.display());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (idx, fh) in file_hashes.iter().enumerate() {
|
||||
if matched_indices.contains(&idx) {
|
||||
continue;
|
||||
}
|
||||
println!("⚠ Could not identify {}, skipping", fh.path.display());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue