various: clean up multiplatform mod resolution; add lockfile management

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If1fed3ad9f9273266ae6e0e24d57b6996a6a6964
This commit is contained in:
raf 2026-05-03 21:21:16 +03:00
commit da15ebf9bd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
14 changed files with 818 additions and 141 deletions

View file

@ -1,4 +1,7 @@
use std::{collections::HashMap, path::Path};
use std::{
collections::{HashMap, HashSet},
path::Path,
};
use serde::{Deserialize, Serialize};
@ -656,6 +659,7 @@ impl LockFile {
lockfile.validate()?;
}
lockfile.sort_projects();
lockfile.deduplicate_projects();
Ok(lockfile)
}
@ -754,7 +758,84 @@ impl LockFile {
}
pub fn add_project(&mut self, project: Project) {
// Check for existing project with overlapping slugs
if let Some(existing) = self.projects.iter_mut().find(|p| {
p.slug
.values()
.any(|s| project.slug.values().any(|ps| ps == s))
}) {
// Merge data into existing project
for (platform, slug) in &project.slug {
existing
.slug
.entry(platform.clone())
.or_insert_with(|| slug.clone());
}
for (platform, name) in &project.name {
existing
.name
.entry(platform.clone())
.or_insert_with(|| name.clone());
}
for (platform, id) in &project.id {
existing
.id
.entry(platform.clone())
.or_insert_with(|| id.clone());
}
for file in &project.files {
if !existing.files.iter().any(|f| f.file_name == file.file_name) {
existing.files.push(file.clone());
}
}
log::debug!(
"Merged duplicate project '{}' into existing entry",
project.get_name()
);
self.projects.sort_by_key(super::project::Project::get_name);
return;
}
self.projects.push(project);
self.projects.sort_by_key(super::project::Project::get_name);
}
/// Remove duplicate projects that share overlapping slugs.
/// When duplicates are found, files from the duplicate are merged into
/// the kept project. This handles lockfiles that were corrupted before
/// `add_project` enforced slug uniqueness.
pub fn deduplicate_projects(&mut self) {
let mut seen_slugs: HashSet<String> = HashSet::new();
let mut slug_to_idx: HashMap<String, usize> = HashMap::new();
let mut unique: Vec<Project> = Vec::with_capacity(self.projects.len());
for project in self.projects.drain(..) {
let duplicate_slug =
project.slug.values().find(|s| seen_slugs.contains(*s));
if let Some(dup_slug) = duplicate_slug {
log::debug!(
"Removed duplicate project '{}' (slug collision: {dup_slug})",
project.get_name()
);
if let Some(&existing_idx) = slug_to_idx.get(dup_slug) {
if let Some(existing) = unique.get_mut(existing_idx) {
for file in &project.files {
if !existing.files.iter().any(|f| f.file_name == file.file_name) {
existing.files.push(file.clone());
}
}
}
}
} else {
for slug in project.slug.values() {
seen_slugs.insert(slug.clone());
slug_to_idx.insert(slug.clone(), unique.len());
}
unique.push(project);
}
}
self.projects = unique;
}
}

View file

@ -156,32 +156,6 @@ impl Project {
self.name.insert(platform, name);
}
pub fn merge(&mut self, other: Self) {
// Merge platform identifiers
for (platform, id) in other.id {
self.id.entry(platform).or_insert(id);
}
for (platform, slug) in other.slug {
self.slug.entry(platform).or_insert(slug);
}
for (platform, name) in other.name {
self.name.entry(platform).or_insert(name);
}
// Merge pakku links
self.pakku_links.extend(other.pakku_links);
// Merge files
for file in other.files {
if !self.files.iter().any(|f| f.id == file.id) {
self.files.push(file);
}
}
// Merge aliases
self.aliases.extend(other.aliases);
}
/// Merge this project with another, returning a new combined project.
/// Like Pakku's `Project.plus()`, this is a pure operation that doesn't
/// modify either project.

View file

@ -322,7 +322,9 @@ impl PlatformClient for CurseForgePlatform {
query_params.push(("modLoaderTypes", loader_str));
}
if !query_params.is_empty() {
let has_filters = !query_params.is_empty();
if has_filters {
let query_string = query_params
.iter()
.map(|(k, v)| format!("{k}={v}"))
@ -350,6 +352,30 @@ impl PlatformClient for CurseForgePlatform {
.map(|f| Self::convert_file(f, project_id))
.collect();
// If server-side filters eliminated all results, retry without them
if files.is_empty() && has_filters {
let bare_url = format!("{CURSEFORGE_API_BASE}/mods/{project_id}/files");
let response = self
.client
.get(&bare_url)
.headers(self.get_headers()?)
.send()
.await?;
if !response.status().is_success() {
return Err(Self::map_http_error(response.status(), project_id));
}
let result: CurseForgeFilesResponse = response.json().await?;
return Ok(
result
.data
.into_iter()
.map(|f| Self::convert_file(f, project_id))
.collect(),
);
}
Ok(files)
}

View file

@ -222,7 +222,16 @@ impl PlatformClient for ModrinthPlatform {
url.push_str(&params.join("&"));
}
self.request_project_files_url(&url).await
let files = self.request_project_files_url(&url).await?;
// If server-side filters eliminated all results, retry without them
if files.is_empty() && !params.is_empty() {
let bare_url =
format!("{MODRINTH_API_BASE}/project/{project_id}/version");
return self.request_project_files_url(&bare_url).await;
}
Ok(files)
}
async fn request_project_with_files(

View file

@ -140,7 +140,7 @@ impl DependencyResolver {
} else {
let mut merged = projects.remove(0);
for project in projects {
merged.merge(project);
merged = merged.merged(project)?;
}
Ok(merged)
}