pakker/src/model/lockfile.rs
NotAShelf 00f5442679
model/lockfile: revert get_project to O(n) linear scan; remove unused project index
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I3a35ab4ce5dec8ce95a736440fa293fe6a6a6964
2026-03-03 23:35:01 +03:00

784 lines
22 KiB
Rust

use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize};
use super::{enums::Target, project::Project};
use crate::error::{PakkerError, Result};
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy};
fn create_test_project(pakku_id: &str, slug: &str) -> Project {
use std::collections::HashSet;
let mut name_map = HashMap::new();
name_map.insert("modrinth".to_string(), slug.to_string());
let mut id_map = HashMap::new();
id_map.insert("modrinth".to_string(), pakku_id.to_string());
Project {
pakku_id: Some(pakku_id.to_string()),
pakku_links: HashSet::new(),
r#type: ProjectType::Mod,
side: ProjectSide::Both,
slug: name_map.clone(),
name: name_map.clone(),
id: id_map,
update_strategy: UpdateStrategy::Latest,
redistributable: true,
subpath: None,
aliases: HashSet::new(),
export: true,
files: vec![],
}
}
#[test]
fn test_lockfile_new() {
let target = Target::Modrinth;
let mc_versions = vec!["1.20.1".to_string()];
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(target),
mc_versions: mc_versions.clone(),
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.target, Some(target));
assert_eq!(lockfile.mc_versions, mc_versions);
assert_eq!(lockfile.loaders, loaders);
assert_eq!(lockfile.projects.len(), 0);
assert_eq!(lockfile.lockfile_version, 1);
}
#[test]
fn test_lockfile_serialization() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-id", "test-slug"));
let found = lockfile.find_project("test-id");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
let not_found = lockfile.find_project("nonexistent");
assert!(not_found.is_none());
}
#[test]
fn test_lockfile_find_project_by_platform_id() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("platform-123", "test-slug"));
let found =
lockfile.find_project_by_platform_id("modrinth", "platform-123");
assert!(found.is_some());
assert_eq!(
found.unwrap().id.get("modrinth"),
Some(&"platform-123".to_string())
);
}
#[test]
fn test_lockfile_get_loader_names() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let mut loader_names = lockfile.get_loader_names();
loader_names.sort();
assert_eq!(loader_names, vec!["fabric", "forge"]);
}
#[test]
fn test_lockfile_save_and_load() {
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-mod", "test-slug"));
lockfile.save(lockfile_path).unwrap();
let loaded = LockFile::load(lockfile_path).unwrap();
assert_eq!(loaded.target, lockfile.target);
assert_eq!(loaded.mc_versions, lockfile.mc_versions);
assert_eq!(loaded.projects.len(), 1);
}
#[test]
fn test_lockfile_compatibility_with_pakku() {
// Test that we can parse a Pakku-generated lockfile
let pakku_json = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [
{
"pakku_id": "fabric-api",
"type": "MOD",
"side": "BOTH",
"slug": {
"modrinth": "fabric-api"
},
"name": {
"modrinth": "Fabric API"
},
"id": {
"modrinth": "P7dR8mSH"
},
"updateStrategy": "LATEST",
"redistributable": true,
"files": [],
"pakku_links": []
}
],
"lockfile_version": 1
}"#;
let lockfile: LockFile = serde_json::from_str(pakku_json).unwrap();
assert_eq!(lockfile.target, Some(Target::Modrinth));
assert_eq!(lockfile.mc_versions, vec!["1.20.1"]);
assert_eq!(lockfile.projects.len(), 1);
}
#[test]
fn test_lockfile_validation_invalid_version() {
// Test that lockfile with wrong version fails validation
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path().join("pakku-lock.json");
let invalid_json = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 999
}"#;
std::fs::write(&lockfile_path, invalid_json).unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
}
#[test]
fn test_lockfile_validation_duplicate_pakku_ids() {
// Test that lockfile with duplicate pakku_ids fails validation
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("duplicate-id", "slug1"));
lockfile.add_project(create_test_project("duplicate-id", "slug2"));
let result = lockfile.validate();
assert!(result.is_err());
}
#[test]
fn test_lockfile_atomic_write() {
// Test that save uses atomic write (temp file + rename)
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.save(temp_dir.path()).unwrap();
// Temp file should not exist after save
let temp_path = temp_dir.path().join("pakku-lock.tmp");
assert!(!temp_path.exists());
// Actual file should exist
let lockfile_path = temp_dir.path().join("pakku-lock.json");
assert!(lockfile_path.exists());
}
#[test]
fn test_lockfile_sort_projects() {
// Test that projects are sorted alphabetically by name
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("zebra-mod", "zebra"));
lockfile.add_project(create_test_project("alpha-mod", "alpha"));
lockfile.add_project(create_test_project("middle-mod", "middle"));
lockfile.sort_projects();
assert_eq!(lockfile.projects[0].pakku_id, Some("alpha-mod".to_string()));
assert_eq!(
lockfile.projects[1].pakku_id,
Some("middle-mod".to_string())
);
assert_eq!(lockfile.projects[2].pakku_id, Some("zebra-mod".to_string()));
}
#[test]
fn test_lockfile_find_project_mut() {
// Test mutable project lookup
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-id", "test-slug"));
// Modify through mutable reference
if let Some(project) = lockfile.find_project_mut("test-id") {
project.redistributable = false;
}
let found = lockfile.get_project("test-id").unwrap();
assert_eq!(found.redistributable, false);
}
#[test]
fn test_lockfile_multiple_loaders() {
// Test lockfile with multiple loaders
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
loaders.insert("quilt".to_string(), "0.20.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let loader_names = lockfile.get_loader_names();
assert_eq!(loader_names.len(), 3);
assert!(loader_names.contains(&"fabric".to_string()));
assert!(loader_names.contains(&"forge".to_string()));
assert!(loader_names.contains(&"quilt".to_string()));
}
#[test]
fn test_lockfile_multiple_mc_versions() {
// Test lockfile with multiple Minecraft versions
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mc_versions = vec![
"1.20.1".to_string(),
"1.20.2".to_string(),
"1.20.4".to_string(),
];
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: mc_versions.clone(),
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.mc_versions, mc_versions);
}
#[test]
fn test_lockfile_roundtrip_preserves_data() {
// Test that save/load roundtrip preserves all data
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
let mc_versions = vec!["1.20.1".to_string(), "1.20.4".to_string()];
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: mc_versions.clone(),
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("mod1", "slug1"));
lockfile.add_project(create_test_project("mod2", "slug2"));
lockfile.save(temp_dir.path()).unwrap();
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.target, Some(Target::Modrinth));
assert_eq!(loaded.mc_versions, mc_versions);
assert_eq!(loaded.loaders, loaders);
assert_eq!(loaded.projects.len(), 2);
// Lockfile should be migrated from v1 to v2 on load
assert_eq!(loaded.lockfile_version, 2);
}
#[test]
fn test_lockfile_remove_nonexistent_project() {
// Test removing a project that doesn't exist
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let result = lockfile.remove_project("nonexistent-id");
assert!(result.is_none());
}
#[test]
fn test_lockfile_empty_projects_list() {
// Test lockfile with no projects
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.projects.len(), 0);
assert!(lockfile.validate().is_ok());
}
#[test]
fn test_lockfile_migration_v1_to_v2() {
// Test that v1 lockfiles are migrated to v2
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
// Create a v1 lockfile manually
let v1_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 1
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, v1_content).unwrap();
// Load should trigger migration
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.lockfile_version, 2);
// Verify the migrated file was saved
let reloaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(reloaded.lockfile_version, 2);
}
#[test]
fn test_lockfile_migration_preserves_projects() {
// Test that migration preserves all project data
let temp_dir = TempDir::new().unwrap();
// Create a v1 lockfile with projects (using correct enum case)
let v1_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [
{
"pakku_id": "test-id-1",
"type": "MOD",
"side": "BOTH",
"name": {"modrinth": "Test Mod"},
"slug": {"modrinth": "test-mod"},
"id": {"modrinth": "abc123"},
"files": [],
"pakku_links": [],
"aliases": [],
"update_strategy": "LATEST",
"redistributable": true,
"export": true
}
],
"lockfile_version": 1
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, v1_content).unwrap();
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.lockfile_version, 2);
assert_eq!(loaded.projects.len(), 1);
assert_eq!(loaded.projects[0].pakku_id, Some("test-id-1".to_string()));
}
#[test]
fn test_lockfile_rejects_future_version() {
// Test that lockfiles with version > current are rejected
let temp_dir = TempDir::new().unwrap();
let future_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 999
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, future_content).unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("newer than supported"));
}
#[test]
fn test_lockfile_pretty_json_format() {
// Test that saved JSON is pretty-printed
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.save(temp_dir.path()).unwrap();
let content =
std::fs::read_to_string(temp_dir.path().join("pakku-lock.json")).unwrap();
// Pretty-printed JSON should have newlines and indentation
assert!(content.contains('\n'));
assert!(content.contains(" ")); // Indentation
}
#[test]
fn test_lockfile_missing_file() {
// Test loading from non-existent directory
let temp_dir = TempDir::new().unwrap();
let nonexistent = temp_dir.path().join("nonexistent");
let result = LockFile::load(&nonexistent);
assert!(result.is_err());
}
#[test]
fn test_lockfile_corrupted_json() {
// Test loading corrupted JSON
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, "not valid json {[}").unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
}
}
/// Current lockfile version - bump this when making breaking changes
const LOCKFILE_VERSION: u32 = 2;
/// Minimum supported lockfile version for migration
const MIN_SUPPORTED_VERSION: u32 = 1;
const LOCKFILE_NAME: &str = "pakku-lock.json";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LockFile {
#[serde(skip_serializing_if = "Option::is_none")]
pub target: Option<Target>,
pub mc_versions: Vec<String>,
pub loaders: HashMap<String, String>,
pub projects: Vec<Project>,
pub lockfile_version: u32,
}
impl LockFile {
pub fn new(
target: Option<Target>,
mc_versions: Vec<String>,
loaders: HashMap<String, String>,
) -> Self {
Self {
target,
mc_versions,
loaders,
projects: Vec::new(),
lockfile_version: LOCKFILE_VERSION,
}
}
pub fn get_project(&self, pakku_id: &str) -> Option<&Project> {
self
.projects
.iter()
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
}
pub fn get_loader_names(&self) -> Vec<String> {
self.loaders.keys().cloned().collect()
}
pub fn remove_project(&mut self, pakku_id: &str) -> Option<Project> {
if let Some(pos) = self
.projects
.iter()
.position(|p| p.pakku_id.as_deref() == Some(pakku_id))
{
Some(self.projects.remove(pos))
} else {
None
}
}
pub fn find_project(&self, pakku_id: &str) -> Option<&Project> {
self.get_project(pakku_id)
}
pub fn find_project_mut(&mut self, pakku_id: &str) -> Option<&mut Project> {
self
.projects
.iter_mut()
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
}
pub fn find_project_by_platform_id(
&self,
platform: &str,
id: &str,
) -> Option<&Project> {
self
.projects
.iter()
.find(|p| p.id.get(platform).is_some_and(|pid| pid == id))
}
}
impl LockFile {
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::load_with_validation(path, true)
}
pub fn load_with_validation<P: AsRef<Path>>(
path: P,
validate: bool,
) -> Result<Self> {
let path_ref = path.as_ref();
let lockfile_path = path_ref.join(LOCKFILE_NAME);
let content =
std::fs::read_to_string(&lockfile_path).map_err(PakkerError::IoError)?;
let mut lockfile: Self = serde_json::from_str(&content)
.map_err(|e| PakkerError::InvalidLockFile(e.to_string()))?;
// Check if migration is needed
if lockfile.lockfile_version < LOCKFILE_VERSION {
lockfile = lockfile.migrate()?;
// Save migrated lockfile
lockfile.save_without_validation(path_ref)?;
log::info!(
"Migrated lockfile from version {} to {}",
lockfile.lockfile_version,
LOCKFILE_VERSION
);
}
if validate {
lockfile.validate()?;
}
lockfile.sort_projects();
Ok(lockfile)
}
/// Migrate lockfile from older version to current version
fn migrate(mut self) -> Result<Self> {
if self.lockfile_version < MIN_SUPPORTED_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Lockfile version {} is too old to migrate. Minimum supported: {}",
self.lockfile_version, MIN_SUPPORTED_VERSION
)));
}
// Migration from v1 to v2
if self.lockfile_version == 1 {
log::info!("Migrating lockfile from v1 to v2...");
// v2 changes:
// - Projects now have explicit export field (defaults to true)
// - Side detection is more granular
for project in &mut self.projects {
// Ensure export field is set (v1 didn't always have it)
// Already has a default in Project, but be explicit
if !project.export {
project.export = true;
}
}
self.lockfile_version = 2;
}
// Future migrations would go here:
// if self.lockfile_version == 2 {
// // migrate v2 -> v3
// self.lockfile_version = 3;
// }
Ok(self)
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
self.validate()?;
let path = path.as_ref().join(LOCKFILE_NAME);
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&path, content).map_err(PakkerError::IoError)
}
pub fn save_without_validation<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let path = path.as_ref().join(LOCKFILE_NAME);
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&path, content).map_err(PakkerError::IoError)
}
pub fn validate(&self) -> Result<()> {
if self.lockfile_version > LOCKFILE_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Lockfile version {} is newer than supported version {}. Please \
upgrade Pakker.",
self.lockfile_version, LOCKFILE_VERSION
)));
}
if self.lockfile_version < MIN_SUPPORTED_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Lockfile version {} is too old. Minimum supported: {}",
self.lockfile_version, MIN_SUPPORTED_VERSION
)));
}
if self.mc_versions.is_empty() {
return Err(PakkerError::InvalidLockFile(
"At least one Minecraft version is required".to_string(),
));
}
if self.loaders.is_empty() {
return Err(PakkerError::InvalidLockFile(
"At least one loader is required".to_string(),
));
}
// Check for unique pakku IDs
let mut seen_ids = std::collections::HashSet::new();
for project in &self.projects {
if let Some(ref pakku_id) = project.pakku_id
&& !seen_ids.insert(pakku_id)
{
return Err(PakkerError::InvalidLockFile(format!(
"Duplicate pakku ID: {pakku_id}"
)));
}
}
Ok(())
}
pub fn sort_projects(&mut self) {
self.projects.sort_by(|a, b| {
a.get_name()
.to_lowercase()
.cmp(&b.get_name().to_lowercase())
});
}
pub fn add_project(&mut self, project: Project) {
self.projects.push(project);
self.projects.sort_by_key(super::project::Project::get_name);
}
}