treewide: fix clippy lints
Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: I411be69ff31f9cb39cd4cdebc8985b366a6a6964
This commit is contained in:
parent
b93b234fc2
commit
61ced09d25
43 changed files with 558 additions and 464 deletions
|
|
@ -84,7 +84,7 @@ pub enum Commands {
|
|||
Credentials(CredentialsArgs),
|
||||
|
||||
/// Configure modpack properties
|
||||
Cfg(CfgArgs),
|
||||
Cfg(Box<CfgArgs>),
|
||||
|
||||
/// Manage fork configuration
|
||||
Fork(ForkArgs),
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
|||
}
|
||||
|
||||
pub fn create_all_platforms()
|
||||
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
|
||||
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
|
||||
const MODRINTH: &str = "modrinth";
|
||||
const CURSEFORGE: &str = "curseforge";
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ pub fn create_all_platforms()
|
|||
platforms.insert(CURSEFORGE.to_owned(), platform);
|
||||
}
|
||||
|
||||
Ok(platforms)
|
||||
platforms
|
||||
}
|
||||
|
||||
async fn resolve_input(
|
||||
|
|
@ -55,6 +55,10 @@ use std::path::Path;
|
|||
|
||||
use crate::{cli::AddArgs, model::fork::LocalConfig};
|
||||
|
||||
#[expect(
|
||||
clippy::future_not_send,
|
||||
reason = "not required to be Send; only called from single-threaded context"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: AddArgs,
|
||||
global_yes: bool,
|
||||
|
|
@ -66,8 +70,8 @@ pub async fn execute(
|
|||
|
||||
// Load lockfile
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// Check if lockfile exists (try both pakker-lock.json and pakku-lock.json)
|
||||
let lockfile_exists =
|
||||
|
|
@ -110,7 +114,7 @@ pub async fn execute(
|
|||
let parent_lockfile = parent_paths
|
||||
.iter()
|
||||
.find(|path| path.exists())
|
||||
.and_then(|path| LockFile::load(path.parent().unwrap()).ok())
|
||||
.and_then(|path| LockFile::load(path.parent()?).ok())
|
||||
.ok_or_else(|| {
|
||||
PakkerError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
|
|
@ -141,7 +145,7 @@ pub async fn execute(
|
|||
let _config = Config::load(config_dir).ok();
|
||||
|
||||
// Create platforms
|
||||
let platforms = create_all_platforms()?;
|
||||
let platforms = create_all_platforms();
|
||||
|
||||
let mut new_projects = Vec::new();
|
||||
let mut errors = MultiError::new();
|
||||
|
|
|
|||
|
|
@ -44,6 +44,14 @@ fn get_loaders(lockfile: &LockFile) -> Vec<String> {
|
|||
lockfile.loaders.keys().cloned().collect()
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::future_not_send,
|
||||
reason = "not required to be Send; only called from single-threaded context"
|
||||
)]
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub async fn execute(
|
||||
cf_arg: Option<String>,
|
||||
mr_arg: Option<String>,
|
||||
|
|
@ -71,8 +79,8 @@ pub async fn execute(
|
|||
log::info!("Adding project with explicit platform specification");
|
||||
|
||||
// Load lockfile
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
|
|
@ -258,7 +266,7 @@ pub async fn execute(
|
|||
if !no_deps {
|
||||
log::info!("Resolving dependencies...");
|
||||
|
||||
let platforms = create_all_platforms()?;
|
||||
let platforms = create_all_platforms();
|
||||
let mut resolver = DependencyResolver::new();
|
||||
|
||||
let deps = resolver
|
||||
|
|
@ -304,7 +312,7 @@ pub async fn execute(
|
|||
}
|
||||
|
||||
fn create_all_platforms()
|
||||
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
|
||||
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
|
||||
let mut platforms = HashMap::new();
|
||||
|
||||
if let Ok(platform) = create_platform("modrinth", None) {
|
||||
|
|
@ -321,7 +329,7 @@ fn create_all_platforms()
|
|||
platforms.insert("github".to_string(), platform);
|
||||
}
|
||||
|
||||
Ok(platforms)
|
||||
platforms
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
|||
|
|
@ -8,6 +8,10 @@ use crate::{
|
|||
ui_utils::prompt_input_optional,
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
name: Option<String>,
|
||||
|
|
@ -20,21 +24,27 @@ pub fn execute(
|
|||
worlds_path: Option<String>,
|
||||
shaders_path: Option<String>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let mut changed = false;
|
||||
let mut changed = name.is_some()
|
||||
|| version.is_some()
|
||||
|| description.is_some()
|
||||
|| author.is_some()
|
||||
|| mods_path.is_some()
|
||||
|| resource_packs_path.is_some()
|
||||
|| data_packs_path.is_some()
|
||||
|| worlds_path.is_some()
|
||||
|| shaders_path.is_some();
|
||||
|
||||
// Modpack properties
|
||||
if let Some(new_name) = name {
|
||||
config.name = new_name.clone();
|
||||
config.name.clone_from(&new_name);
|
||||
println!("{}", format!("✓ 'name' set to '{new_name}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_version) = version {
|
||||
config.version = new_version.clone();
|
||||
config.version.clone_from(&new_version);
|
||||
println!("{}", format!("✓ 'version' set to '{new_version}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_description) = description {
|
||||
|
|
@ -43,20 +53,17 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'description' set to '{new_description}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_author) = author {
|
||||
config.author = Some(new_author.clone());
|
||||
println!("{}", format!("✓ 'author' set to '{new_author}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
// Project type paths
|
||||
if let Some(path) = mods_path {
|
||||
config.paths.insert("mod".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.mod' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = resource_packs_path {
|
||||
|
|
@ -67,25 +74,21 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'paths.resource-pack' set to '{path}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = data_packs_path {
|
||||
config.paths.insert("data-pack".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = worlds_path {
|
||||
config.paths.insert("world".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.world' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(path) = shaders_path {
|
||||
config.paths.insert("shader".to_string(), path.clone());
|
||||
println!("{}", format!("✓ 'paths.shader' set to '{path}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if !changed {
|
||||
|
|
@ -99,13 +102,13 @@ pub fn execute(
|
|||
|
||||
// Prompt for each configurable field
|
||||
if let Ok(Some(new_name)) = prompt_input_optional(" Name") {
|
||||
config.name = new_name.clone();
|
||||
config.name.clone_from(&new_name);
|
||||
println!("{}", format!(" ✓ 'name' set to '{new_name}'").green());
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Ok(Some(new_version)) = prompt_input_optional(" Version") {
|
||||
config.version = new_version.clone();
|
||||
config.version.clone_from(&new_version);
|
||||
println!(
|
||||
"{}",
|
||||
format!(" ✓ 'version' set to '{new_version}'").green()
|
||||
|
|
@ -136,7 +139,7 @@ pub fn execute(
|
|||
}
|
||||
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
println!("\n{}", "Configuration updated successfully".green().bold());
|
||||
|
||||
|
|
|
|||
|
|
@ -11,10 +11,14 @@ use crate::{
|
|||
},
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "CLI command handler maps directly from clap args"
|
||||
)]
|
||||
pub fn execute(
|
||||
config_path: &Path,
|
||||
lockfile_path: &Path,
|
||||
project: String,
|
||||
project: &str,
|
||||
r#type: Option<&str>,
|
||||
side: Option<&str>,
|
||||
update_strategy: Option<&str>,
|
||||
|
|
@ -24,30 +28,30 @@ pub fn execute(
|
|||
remove_alias: Option<String>,
|
||||
export: Option<bool>,
|
||||
) -> Result<()> {
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut config = Config::load(config_dir)?;
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find the project in lockfile to get its pakku_id
|
||||
// Try multiple lookup strategies: pakku_id first, then slug, then name
|
||||
let found_project = lockfile
|
||||
.get_project(&project)
|
||||
.get_project(project)
|
||||
.or_else(|| {
|
||||
// Try to find by slug on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(&project)))
|
||||
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(project)))
|
||||
})
|
||||
.or_else(|| {
|
||||
// Try to find by name on any platform
|
||||
lockfile
|
||||
.projects
|
||||
.iter()
|
||||
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(&project)))
|
||||
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(project)))
|
||||
})
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(project.clone()))?;
|
||||
.ok_or_else(|| PakkerError::ProjectNotFound(project.to_string()))?;
|
||||
|
||||
let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| {
|
||||
PakkerError::InvalidProject("Project has no pakku_id".to_string())
|
||||
|
|
@ -59,7 +63,14 @@ pub fn execute(
|
|||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut changed = false;
|
||||
let changed = r#type.is_some()
|
||||
|| side.is_some()
|
||||
|| update_strategy.is_some()
|
||||
|| redistributable.is_some()
|
||||
|| subpath.is_some()
|
||||
|| add_alias.is_some()
|
||||
|| remove_alias.is_some()
|
||||
|| export.is_some();
|
||||
|
||||
if let Some(type_str) = r#type {
|
||||
let parsed_type = match type_str.to_uppercase().as_str() {
|
||||
|
|
@ -79,7 +90,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(side_str) = side {
|
||||
|
|
@ -98,7 +108,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(strategy_str) = update_strategy {
|
||||
|
|
@ -119,7 +128,6 @@ pub fn execute(
|
|||
)
|
||||
.green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_redistributable) = redistributable {
|
||||
|
|
@ -131,7 +139,6 @@ pub fn execute(
|
|||
)
|
||||
.green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_subpath) = subpath {
|
||||
|
|
@ -140,7 +147,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(alias_to_add) = add_alias {
|
||||
|
|
@ -152,7 +158,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -165,7 +170,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if let Some(new_export) = export {
|
||||
|
|
@ -174,7 +178,6 @@ pub fn execute(
|
|||
"{}",
|
||||
format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green()
|
||||
);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if !changed {
|
||||
|
|
@ -187,7 +190,7 @@ pub fn execute(
|
|||
|
||||
config.set_project_config(pakku_id.clone(), project_config);
|
||||
// Config::save expects directory path, not file path
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!(
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ pub fn execute(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let creds = ResolvedCredentials::load()?;
|
||||
let creds = ResolvedCredentials::load();
|
||||
|
||||
let has_any = creds.curseforge_api_key().is_some()
|
||||
|| creds.modrinth_token().is_some()
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ pub fn execute(
|
|||
github_access_token: Option<String>,
|
||||
) -> Result<()> {
|
||||
let mut creds = PakkerCredentialsFile::load()?;
|
||||
let mut updated_any = false;
|
||||
let updated_any = curseforge_api_key.is_some()
|
||||
|| modrinth_token.is_some()
|
||||
|| github_access_token.is_some();
|
||||
|
||||
if let Some(key) = curseforge_api_key {
|
||||
let key = key.trim().to_string();
|
||||
|
|
@ -22,7 +24,6 @@ pub fn execute(
|
|||
println!("Setting CurseForge API key...");
|
||||
set_keyring_secret("curseforge_api_key", &key)?;
|
||||
creds.curseforge_api_key = Some(key);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if let Some(token) = modrinth_token {
|
||||
|
|
@ -36,7 +37,6 @@ pub fn execute(
|
|||
println!("Setting Modrinth token...");
|
||||
set_keyring_secret("modrinth_token", &token)?;
|
||||
creds.modrinth_token = Some(token);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if let Some(token) = github_access_token {
|
||||
|
|
@ -50,7 +50,6 @@ pub fn execute(
|
|||
println!("Setting GitHub access token...");
|
||||
set_keyring_secret("github_access_token", &token)?;
|
||||
creds.github_access_token = Some(token);
|
||||
updated_any = true;
|
||||
}
|
||||
|
||||
if !updated_any {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Write,
|
||||
fs,
|
||||
path::Path,
|
||||
};
|
||||
|
|
@ -21,20 +22,20 @@ struct ProjectChange {
|
|||
new_file: Option<String>,
|
||||
}
|
||||
|
||||
pub fn execute(args: DiffArgs, _lockfile_path: &Path) -> Result<()> {
|
||||
pub fn execute(args: &DiffArgs, _lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Comparing lockfiles");
|
||||
|
||||
// Load old lockfile
|
||||
let old_path = Path::new(&args.old_lockfile);
|
||||
let old_dir = old_path.parent().unwrap_or(Path::new("."));
|
||||
let old_dir = old_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let old_lockfile = LockFile::load(old_dir)?;
|
||||
|
||||
// Load current lockfile
|
||||
let current_path = args
|
||||
.current_lockfile
|
||||
.as_ref()
|
||||
.map_or(Path::new("pakku-lock.json"), Path::new);
|
||||
let current_dir = current_path.parent().unwrap_or(Path::new("."));
|
||||
.map_or_else(|| Path::new("pakku-lock.json"), Path::new);
|
||||
let current_dir = current_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let current_lockfile = LockFile::load(current_dir)?;
|
||||
|
||||
// Compare metadata
|
||||
|
|
@ -145,6 +146,10 @@ pub fn execute(args: DiffArgs, _lockfile_path: &Path) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff formatting requires all display parameters"
|
||||
)]
|
||||
fn print_terminal_diff(
|
||||
old: &LockFile,
|
||||
new: &LockFile,
|
||||
|
|
@ -243,6 +248,10 @@ fn print_terminal_diff(
|
|||
}
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff markdown writer requires all context parameters"
|
||||
)]
|
||||
fn write_markdown_diff(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
|
|
@ -260,17 +269,17 @@ fn write_markdown_diff(
|
|||
|
||||
// Metadata changes
|
||||
if old.target != new.target {
|
||||
content.push_str(&format!("- Target: {:?}\n", old.target));
|
||||
content.push_str(&format!("+ Target: {:?}\n", new.target));
|
||||
let _ = writeln!(content, "- Target: {:?}", old.target);
|
||||
let _ = writeln!(content, "+ Target: {:?}", new.target);
|
||||
}
|
||||
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
content.push_str("\nMinecraft Versions:\n");
|
||||
for v in mc_removed {
|
||||
content.push_str(&format!("- {v}\n"));
|
||||
let _ = writeln!(content, "- {v}");
|
||||
}
|
||||
for v in mc_added {
|
||||
content.push_str(&format!("+ {v}\n"));
|
||||
let _ = writeln!(content, "+ {v}");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -278,16 +287,16 @@ fn write_markdown_diff(
|
|||
for (name, old_ver) in old_loaders {
|
||||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
content.push_str(&format!("- {name}: {old_ver}\n"));
|
||||
content.push_str(&format!("+ {name}: {new_ver}\n"));
|
||||
let _ = writeln!(content, "- {name}: {old_ver}");
|
||||
let _ = writeln!(content, "+ {name}: {new_ver}");
|
||||
}
|
||||
} else {
|
||||
content.push_str(&format!("- {name}: {old_ver}\n"));
|
||||
let _ = writeln!(content, "- {name}: {old_ver}");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
content.push_str(&format!("+ {name}: {new_ver}\n"));
|
||||
let _ = writeln!(content, "+ {name}: {new_ver}");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -297,16 +306,16 @@ fn write_markdown_diff(
|
|||
for change in changes {
|
||||
match change.change_type {
|
||||
ChangeType::Added => {
|
||||
content.push_str(&format!("+ {}", change.name));
|
||||
let _ = write!(content, "+ {}", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
ChangeType::Removed => {
|
||||
content.push_str(&format!("- {}", change.name));
|
||||
let _ = write!(content, "- {}", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
},
|
||||
|
|
@ -314,11 +323,11 @@ fn write_markdown_diff(
|
|||
if verbose {
|
||||
if let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
content.push_str(&format!("- {} ({})\n", change.name, old));
|
||||
content.push_str(&format!("+ {} ({})\n", change.name, new));
|
||||
let _ = writeln!(content, "- {} ({})", change.name, old);
|
||||
let _ = writeln!(content, "+ {} ({})", change.name, new);
|
||||
}
|
||||
} else {
|
||||
content.push_str(&format!("~ {}\n", change.name));
|
||||
let _ = writeln!(content, "~ {}", change.name);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
@ -331,6 +340,10 @@ fn write_markdown_diff(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::too_many_arguments,
|
||||
reason = "diff markdown writer requires all context parameters"
|
||||
)]
|
||||
fn write_markdown(
|
||||
path: &str,
|
||||
old: &LockFile,
|
||||
|
|
@ -346,24 +359,25 @@ fn write_markdown(
|
|||
let header = "#".repeat(header_size.min(5));
|
||||
let mut content = String::new();
|
||||
|
||||
content.push_str(&format!("{header} Lockfile Comparison\n\n"));
|
||||
let _ = write!(content, "{header} Lockfile Comparison\n\n");
|
||||
|
||||
// Target
|
||||
if old.target != new.target {
|
||||
content.push_str(&format!(
|
||||
let _ = write!(
|
||||
content,
|
||||
"**Target:** {:?} → {:?}\n\n",
|
||||
old.target, new.target
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
// MC versions
|
||||
if !mc_removed.is_empty() || !mc_added.is_empty() {
|
||||
content.push_str(&format!("{header} Minecraft Versions\n\n"));
|
||||
let _ = write!(content, "{header} Minecraft Versions\n\n");
|
||||
for v in mc_removed {
|
||||
content.push_str(&format!("- ~~{v}~~\n"));
|
||||
let _ = writeln!(content, "- ~~{v}~~");
|
||||
}
|
||||
for v in mc_added {
|
||||
content.push_str(&format!("- **{v}** (new)\n"));
|
||||
let _ = writeln!(content, "- **{v}** (new)");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
|
@ -375,29 +389,28 @@ fn write_markdown(
|
|||
if let Some(new_ver) = new_loaders.get(name) {
|
||||
if old_ver != new_ver {
|
||||
has_loader_changes = true;
|
||||
loader_content
|
||||
.push_str(&format!("- **{name}:** {old_ver} → {new_ver}\n"));
|
||||
let _ = writeln!(loader_content, "- **{name}:** {old_ver} → {new_ver}");
|
||||
}
|
||||
} else {
|
||||
has_loader_changes = true;
|
||||
loader_content.push_str(&format!("- ~~{name}: {old_ver}~~\n"));
|
||||
let _ = writeln!(loader_content, "- ~~{name}: {old_ver}~~");
|
||||
}
|
||||
}
|
||||
for (name, new_ver) in new_loaders {
|
||||
if !old_loaders.contains_key(name) {
|
||||
has_loader_changes = true;
|
||||
loader_content.push_str(&format!("- **{name}: {new_ver}** (new)\n"));
|
||||
let _ = writeln!(loader_content, "- **{name}: {new_ver}** (new)");
|
||||
}
|
||||
}
|
||||
if has_loader_changes {
|
||||
content.push_str(&format!("{header} Loaders\n\n"));
|
||||
let _ = write!(content, "{header} Loaders\n\n");
|
||||
content.push_str(&loader_content);
|
||||
content.push('\n');
|
||||
}
|
||||
|
||||
// Projects
|
||||
if !changes.is_empty() {
|
||||
content.push_str(&format!("{header} Projects\n\n"));
|
||||
let _ = write!(content, "{header} Projects\n\n");
|
||||
|
||||
let added: Vec<_> = changes
|
||||
.iter()
|
||||
|
|
@ -413,11 +426,11 @@ fn write_markdown(
|
|||
.collect();
|
||||
|
||||
if !added.is_empty() {
|
||||
content.push_str(&format!("{}# Added ({})\n\n", header, added.len()));
|
||||
let _ = write!(content, "{}# Added ({})\n\n", header, added.len());
|
||||
for change in added {
|
||||
content.push_str(&format!("- **{}**", change.name));
|
||||
let _ = write!(content, "- **{}**", change.name);
|
||||
if verbose && let Some(file) = &change.new_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
|
@ -425,11 +438,11 @@ fn write_markdown(
|
|||
}
|
||||
|
||||
if !removed.is_empty() {
|
||||
content.push_str(&format!("{}# Removed ({})\n\n", header, removed.len()));
|
||||
let _ = write!(content, "{}# Removed ({})\n\n", header, removed.len());
|
||||
for change in removed {
|
||||
content.push_str(&format!("- ~~{}~~", change.name));
|
||||
let _ = write!(content, "- ~~{}~~", change.name);
|
||||
if verbose && let Some(file) = &change.old_file {
|
||||
content.push_str(&format!(" ({file})"));
|
||||
let _ = write!(content, " ({file})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
|
@ -437,13 +450,13 @@ fn write_markdown(
|
|||
}
|
||||
|
||||
if !updated.is_empty() {
|
||||
content.push_str(&format!("{}# Updated ({})\n\n", header, updated.len()));
|
||||
let _ = write!(content, "{}# Updated ({})\n\n", header, updated.len());
|
||||
for change in updated {
|
||||
content.push_str(&format!("- **{}**", change.name));
|
||||
let _ = write!(content, "- **{}**", change.name);
|
||||
if verbose
|
||||
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
|
||||
{
|
||||
content.push_str(&format!(" ({old} → {new})"));
|
||||
let _ = write!(content, " ({old} → {new})");
|
||||
}
|
||||
content.push('\n');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ use crate::{
|
|||
utils::hash::compute_sha256_bytes,
|
||||
};
|
||||
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
pub async fn execute(
|
||||
args: ExportArgs,
|
||||
lockfile_path: &Path,
|
||||
|
|
@ -31,8 +32,8 @@ pub async fn execute(
|
|||
log::info!("IO errors will be shown during export");
|
||||
}
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
// IPC coordination - prevent concurrent operations on the same modpack
|
||||
let ipc = IpcCoordinator::new(config_dir)?;
|
||||
|
|
@ -113,7 +114,7 @@ pub async fn execute(
|
|||
LockFile::load_with_validation(lockfile_dir, false)?;
|
||||
|
||||
// Merge: start with parent, override with local
|
||||
merge_lockfiles(parent_lockfile, local_lockfile, local_cfg)?
|
||||
merge_lockfiles(parent_lockfile, &local_lockfile, local_cfg)
|
||||
} else {
|
||||
log::info!("No local lockfile - using parent lockfile");
|
||||
parent_lockfile
|
||||
|
|
@ -188,7 +189,7 @@ pub async fn execute(
|
|||
};
|
||||
|
||||
// Create exporter
|
||||
let mut exporter = Exporter::new(".");
|
||||
let exporter = Exporter::new(".");
|
||||
|
||||
// Export based on profile argument
|
||||
if let Some(profile_name) = args.profile {
|
||||
|
|
@ -197,7 +198,7 @@ pub async fn execute(
|
|||
.export(&profile_name, &lockfile, &config, Path::new(output_path))
|
||||
.await?;
|
||||
|
||||
println!("Export complete: {output_file:?}");
|
||||
println!("Export complete: {}", output_file.display());
|
||||
} else {
|
||||
// Multi-profile export (Pakker-compatible default behavior)
|
||||
let output_files = exporter
|
||||
|
|
@ -206,7 +207,7 @@ pub async fn execute(
|
|||
|
||||
println!("\nExported {} files:", output_files.len());
|
||||
for output_file in output_files {
|
||||
println!(" - {output_file:?}");
|
||||
println!(" - {}", output_file.display());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -218,9 +219,9 @@ pub async fn execute(
|
|||
/// with same slug
|
||||
fn merge_lockfiles(
|
||||
parent: LockFile,
|
||||
local: LockFile,
|
||||
local: &LockFile,
|
||||
local_config: &LocalConfig,
|
||||
) -> Result<LockFile> {
|
||||
) -> LockFile {
|
||||
let mut merged = LockFile {
|
||||
target: parent.target, // Use parent target
|
||||
mc_versions: parent.mc_versions, // Use parent MC versions
|
||||
|
|
@ -298,5 +299,5 @@ fn merge_lockfiles(
|
|||
merged.projects.len()
|
||||
);
|
||||
|
||||
Ok(merged)
|
||||
merged
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ pub async fn execute(
|
|||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
|
|||
crate::cli::ForkSubcommand::Unset => execute_unset(),
|
||||
crate::cli::ForkSubcommand::Sync => execute_sync(),
|
||||
crate::cli::ForkSubcommand::Promote { projects } => {
|
||||
execute_promote(projects.clone())
|
||||
execute_promote(projects)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -361,13 +361,11 @@ fn execute_set(
|
|||
let config_dir = Path::new(".");
|
||||
let mut local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
if local_config.parent.is_none() {
|
||||
let Some(mut parent) = local_config.parent else {
|
||||
return Err(PakkerError::Fork(
|
||||
"No parent configured. Run 'pakku fork init' first.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut parent = local_config.parent.unwrap();
|
||||
};
|
||||
|
||||
if let Some(url) = git_url {
|
||||
validate_git_url(&url)?;
|
||||
|
|
@ -461,10 +459,12 @@ fn execute_unset() -> Result<(), PakkerError> {
|
|||
|
||||
// Prompt for confirmation
|
||||
print!("Are you sure you want to remove fork configuration? [y/N] ");
|
||||
std::io::stdout().flush().unwrap();
|
||||
std::io::stdout().flush().map_err(PakkerError::IoError)?;
|
||||
|
||||
let mut input = String::new();
|
||||
std::io::stdin().read_line(&mut input).unwrap();
|
||||
std::io::stdin()
|
||||
.read_line(&mut input)
|
||||
.map_err(PakkerError::IoError)?;
|
||||
|
||||
if !input.trim().eq_ignore_ascii_case("y") {
|
||||
println!("Cancelled.");
|
||||
|
|
@ -596,7 +596,7 @@ fn execute_sync() -> Result<(), PakkerError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn execute_promote(projects: Vec<String>) -> Result<(), PakkerError> {
|
||||
fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
|
||||
let config_dir = Path::new(".");
|
||||
let local_config = LocalConfig::load(config_dir)?;
|
||||
|
||||
|
|
@ -617,7 +617,7 @@ fn execute_promote(projects: Vec<String>) -> Result<(), PakkerError> {
|
|||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Verify all projects exist
|
||||
for project_arg in &projects {
|
||||
for project_arg in projects {
|
||||
let found = config
|
||||
.projects
|
||||
.as_ref()
|
||||
|
|
@ -635,7 +635,7 @@ fn execute_promote(projects: Vec<String>) -> Result<(), PakkerError> {
|
|||
println!("automatically merged with parent projects during export.");
|
||||
println!();
|
||||
println!("The following projects are already in pakku.json:");
|
||||
for project in &projects {
|
||||
for project in projects {
|
||||
println!(" - {project}");
|
||||
}
|
||||
println!();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::path::Path;
|
||||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use crate::{
|
||||
cli::ImportArgs,
|
||||
|
|
@ -49,8 +49,8 @@ pub async fn execute(
|
|||
let file = std::fs::File::open(path)?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
if archive.by_name("modrinth.index.json").is_ok() {
|
||||
drop(archive);
|
||||
|
|
@ -94,14 +94,15 @@ async fn import_modrinth(
|
|||
.unwrap_or("1.20.1")
|
||||
.to_string();
|
||||
|
||||
let loader =
|
||||
if let Some(fabric) = index["dependencies"]["fabric-loader"].as_str() {
|
||||
("fabric".to_string(), fabric.to_string())
|
||||
} else if let Some(forge) = index["dependencies"]["forge"].as_str() {
|
||||
("forge".to_string(), forge.to_string())
|
||||
} else {
|
||||
("fabric".to_string(), "latest".to_string())
|
||||
};
|
||||
let loader = index["dependencies"]["fabric-loader"].as_str().map_or_else(
|
||||
|| {
|
||||
index["dependencies"]["forge"].as_str().map_or_else(
|
||||
|| ("fabric".to_string(), "latest".to_string()),
|
||||
|forge| ("forge".to_string(), forge.to_string()),
|
||||
)
|
||||
},
|
||||
|fabric| ("fabric".to_string(), fabric.to_string()),
|
||||
);
|
||||
|
||||
let mut loaders = std::collections::HashMap::new();
|
||||
loaders.insert(loader.0.clone(), loader.1);
|
||||
|
|
@ -119,12 +120,10 @@ async fn import_modrinth(
|
|||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
let creds = crate::model::credentials::ResolvedCredentials::load().ok();
|
||||
let creds = crate::model::credentials::ResolvedCredentials::load();
|
||||
let platform = create_platform(
|
||||
"modrinth",
|
||||
creds
|
||||
.as_ref()
|
||||
.and_then(|c| c.modrinth_token().map(std::string::ToString::to_string)),
|
||||
creds.modrinth_token().map(std::string::ToString::to_string),
|
||||
)?;
|
||||
|
||||
for file_entry in files {
|
||||
|
|
@ -184,7 +183,7 @@ async fn import_modrinth(
|
|||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: Default::default(),
|
||||
paths: HashMap::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
|
|
@ -205,7 +204,9 @@ async fn import_modrinth(
|
|||
})?;
|
||||
|
||||
if outpath.starts_with("overrides/") {
|
||||
let target = outpath.strip_prefix("overrides/").unwrap();
|
||||
let Some(target) = outpath.strip_prefix("overrides/").ok() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
|
|
@ -231,6 +232,8 @@ async fn import_curseforge(
|
|||
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::platform::create_platform;
|
||||
|
||||
let file = File::open(path)?;
|
||||
let mut archive = ZipArchive::new(file)?;
|
||||
|
||||
|
|
@ -283,7 +286,6 @@ async fn import_curseforge(
|
|||
log::info!("Importing {} projects from modpack", files.len());
|
||||
|
||||
// Create platform client
|
||||
use crate::platform::create_platform;
|
||||
let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok();
|
||||
let platform = create_platform("curseforge", curseforge_token)?;
|
||||
|
||||
|
|
@ -370,7 +372,7 @@ async fn import_curseforge(
|
|||
overrides: vec!["overrides".to_string()],
|
||||
server_overrides: None,
|
||||
client_overrides: None,
|
||||
paths: Default::default(),
|
||||
paths: HashMap::default(),
|
||||
projects: None,
|
||||
export_profiles: None,
|
||||
export_server_side_projects_to_client: None,
|
||||
|
|
@ -393,7 +395,9 @@ async fn import_curseforge(
|
|||
})?;
|
||||
|
||||
if outpath.starts_with(overrides_prefix) {
|
||||
let target = outpath.strip_prefix(overrides_prefix).unwrap();
|
||||
let Some(target) = outpath.strip_prefix(overrides_prefix).ok() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if file.is_dir() {
|
||||
std::fs::create_dir_all(target)?;
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
pub fn execute(
|
||||
args: InitArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
|
|
@ -125,7 +125,7 @@ pub async fn execute(
|
|||
};
|
||||
|
||||
// Save expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
lockfile.save(lockfile_dir)?;
|
||||
|
||||
let config = Config {
|
||||
|
|
@ -143,7 +143,7 @@ pub async fn execute(
|
|||
file_count_preference: None,
|
||||
};
|
||||
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
config.save(config_dir)?;
|
||||
|
||||
println!("Initialized new modpack '{name}' v{version}");
|
||||
|
|
@ -161,10 +161,8 @@ pub async fn execute(
|
|||
|
||||
// Check if CurseForge API key is needed and prompt if interactive
|
||||
if is_interactive && (target == "curseforge" || target == "multiplatform") {
|
||||
let credentials = ResolvedCredentials::load().ok();
|
||||
let has_cf_key = credentials
|
||||
.as_ref()
|
||||
.is_some_and(|c| c.curseforge_api_key().is_some());
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let has_cf_key = credentials.curseforge_api_key().is_some();
|
||||
|
||||
if !has_cf_key {
|
||||
println!();
|
||||
|
|
|
|||
|
|
@ -9,13 +9,13 @@ use crate::{
|
|||
model::{Config, LockFile, Project, ProjectFile},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
projects: Vec<String>,
|
||||
pub fn execute(
|
||||
projects: &[String],
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
|
@ -172,15 +172,15 @@ fn display_project_inspection(
|
|||
lockfile: &LockFile,
|
||||
) -> Result<()> {
|
||||
// Display project header panel
|
||||
display_project_header(project)?;
|
||||
display_project_header(project);
|
||||
|
||||
// Display project files
|
||||
println!();
|
||||
display_project_files(&project.files, project)?;
|
||||
display_project_files(&project.files, project);
|
||||
|
||||
// Display properties
|
||||
println!();
|
||||
display_properties(project)?;
|
||||
display_properties(project);
|
||||
|
||||
// Display dependency tree
|
||||
println!();
|
||||
|
|
@ -191,7 +191,7 @@ fn display_project_inspection(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn display_project_header(project: &Project) -> Result<()> {
|
||||
fn display_project_header(project: &Project) {
|
||||
let name = get_project_name(project);
|
||||
let default_slug = String::from("N/A");
|
||||
let slug = project.slug.values().next().unwrap_or(&default_slug);
|
||||
|
|
@ -213,7 +213,7 @@ fn display_project_header(project: &Project) -> Result<()> {
|
|||
let metadata = format!(
|
||||
"{} ({}) • {} • {}",
|
||||
slug,
|
||||
project.id.keys().next().unwrap_or(&"unknown".to_string()),
|
||||
project.id.keys().next().map_or("unknown", String::as_str),
|
||||
format!("{:?}", project.r#type).to_lowercase(),
|
||||
format!("{:?}", project.side).to_lowercase()
|
||||
);
|
||||
|
|
@ -224,17 +224,12 @@ fn display_project_header(project: &Project) -> Result<()> {
|
|||
]);
|
||||
|
||||
println!("{table}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_project_files(
|
||||
files: &[ProjectFile],
|
||||
project: &Project,
|
||||
) -> Result<()> {
|
||||
fn display_project_files(files: &[ProjectFile], project: &Project) {
|
||||
if files.is_empty() {
|
||||
println!("{}", "No files available".yellow());
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
println!("{}", "Project Files".cyan().bold());
|
||||
|
|
@ -255,13 +250,14 @@ fn display_project_files(
|
|||
|
||||
// File path line with optional site URL
|
||||
let file_path = format!("{}={}", file.file_type, file.file_name);
|
||||
let file_display = if let Some(site_url) = file.get_site_url(project) {
|
||||
// Create hyperlink for the file
|
||||
let hyperlink = crate::ui_utils::hyperlink(&site_url, &file_path);
|
||||
format!("{hyperlink}:{status_text}")
|
||||
} else {
|
||||
format!("{file_path}:{status_text}")
|
||||
};
|
||||
let file_display = file.get_site_url(project).map_or_else(
|
||||
|| format!("{file_path}:{status_text}"),
|
||||
|site_url| {
|
||||
// Create hyperlink for the file
|
||||
let hyperlink = crate::ui_utils::hyperlink(&site_url, &file_path);
|
||||
format!("{hyperlink}:{status_text}")
|
||||
},
|
||||
);
|
||||
|
||||
table.add_row(vec![Cell::new(file_display).fg(if idx == 0 {
|
||||
Color::Green
|
||||
|
|
@ -302,11 +298,9 @@ fn display_project_files(
|
|||
println!("{table}");
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_properties(project: &Project) -> Result<()> {
|
||||
fn display_properties(project: &Project) {
|
||||
println!("{}", "Properties".cyan().bold());
|
||||
|
||||
println!(
|
||||
|
|
@ -338,8 +332,6 @@ fn display_properties(project: &Project) -> Result<()> {
|
|||
let aliases: Vec<_> = project.aliases.iter().cloned().collect();
|
||||
println!(" {}={}", "aliases".yellow(), aliases.join(", "));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> {
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ use crate::{
|
|||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: LinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
pub fn execute(args: &LinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Linking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
|
|
|
|||
|
|
@ -14,9 +14,9 @@ fn truncate_name(name: &str, max_len: usize) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
|
||||
pub fn execute(args: &LsArgs, lockfile_path: &Path) -> Result<()> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
if lockfile.projects.is_empty() {
|
||||
|
|
|
|||
|
|
@ -29,11 +29,13 @@ pub async fn execute(args: RemoteArgs) -> Result<()> {
|
|||
|
||||
// If no URL provided, show status
|
||||
if args.url.is_none() {
|
||||
show_remote_status(&remote_path)?;
|
||||
show_remote_status(&remote_path);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let url = args.url.unwrap();
|
||||
let url = args
|
||||
.url
|
||||
.ok_or_else(|| PakkerError::InvalidInput("URL is required".to_string()))?;
|
||||
log::info!("Installing modpack from: {url}");
|
||||
|
||||
// Clone or update repository
|
||||
|
|
@ -90,10 +92,10 @@ pub async fn execute(args: RemoteArgs) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn show_remote_status(remote_path: &Path) -> Result<()> {
|
||||
fn show_remote_status(remote_path: &Path) {
|
||||
if !remote_path.exists() {
|
||||
println!("No remote configured");
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Remote status:");
|
||||
|
|
@ -107,8 +109,6 @@ fn show_remote_status(remote_path: &Path) -> Result<()> {
|
|||
println!(" Commit: {}", &sha[..8]);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
|
|||
///
|
||||
/// This command updates the current modpack from its remote Git repository.
|
||||
/// It fetches the latest changes from the remote and syncs overrides.
|
||||
pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
|
||||
pub fn execute(args: &RemoteUpdateArgs) -> Result<(), PakkerError> {
|
||||
// Check if lockfile exists in current directory - if it does, we're in a
|
||||
// modpack directory and should not update remote (use regular update
|
||||
// instead)
|
||||
|
|
@ -60,7 +60,7 @@ pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
|
|||
|
||||
// Sync overrides from remote directory
|
||||
println!("Syncing overrides...");
|
||||
sync_overrides(&remote_dir).await?;
|
||||
sync_overrides(&remote_dir)?;
|
||||
|
||||
// Clean up remote directory
|
||||
std::fs::remove_dir_all(&remote_dir)?;
|
||||
|
|
@ -71,7 +71,7 @@ pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
|
|||
}
|
||||
|
||||
/// Sync override files from remote directory to current directory
|
||||
async fn sync_overrides(remote_dir: &Path) -> Result<(), PakkerError> {
|
||||
fn sync_overrides(remote_dir: &Path) -> Result<(), PakkerError> {
|
||||
let remote_config_path = remote_dir.join("pakku.json");
|
||||
if !remote_config_path.exists() {
|
||||
return Ok(());
|
||||
|
|
|
|||
|
|
@ -7,15 +7,15 @@ use crate::{
|
|||
ui_utils::{prompt_typo_suggestion, prompt_yes_no},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: RmArgs,
|
||||
pub fn execute(
|
||||
args: &RmArgs,
|
||||
global_yes: bool,
|
||||
lockfile_path: &Path,
|
||||
_config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let skip_prompts = global_yes;
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Determine which projects to remove
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ use crate::{
|
|||
model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy},
|
||||
};
|
||||
|
||||
pub async fn execute(
|
||||
args: SetArgs,
|
||||
pub fn execute(
|
||||
args: &SetArgs,
|
||||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<(), PakkerError> {
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
|
@ -61,7 +61,7 @@ pub async fn execute(
|
|||
}
|
||||
}
|
||||
|
||||
lockfile.mc_versions = mc_versions.clone();
|
||||
lockfile.mc_versions.clone_from(&mc_versions);
|
||||
println!("Set Minecraft versions to: {mc_versions:?}");
|
||||
}
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ pub async fn execute(
|
|||
}
|
||||
}
|
||||
|
||||
lockfile.loaders = loaders.clone();
|
||||
lockfile.loaders.clone_from(&loaders);
|
||||
println!("Set loaders to: {loaders:?}");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ pub async fn execute(
|
|||
lockfile_path: &Path,
|
||||
config_path: &Path,
|
||||
) -> Result<()> {
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
|
@ -67,7 +67,6 @@ pub async fn execute(
|
|||
}
|
||||
|
||||
// Log info level summary
|
||||
let _info_severity = ErrorSeverity::Info;
|
||||
log::info!(
|
||||
"Update check completed with {} warning(s) and {} error(s)",
|
||||
warnings.len(),
|
||||
|
|
@ -138,6 +137,10 @@ struct FileUpdate {
|
|||
new_filename: String,
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template is a string literal and is always valid"
|
||||
)]
|
||||
async fn check_updates_sequential(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
|
|
@ -150,7 +153,7 @@ async fn check_updates_sequential(
|
|||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates...");
|
||||
|
|
@ -160,8 +163,8 @@ async fn check_updates_sequential(
|
|||
.name
|
||||
.values()
|
||||
.next()
|
||||
.unwrap_or(&"Unknown".to_string())
|
||||
.clone();
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
pb.set_message(format!("Checking {project_name}..."));
|
||||
|
||||
match check_project_update(project, lockfile).await {
|
||||
|
|
@ -184,6 +187,11 @@ async fn check_updates_sequential(
|
|||
Ok((updates, errors))
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template and semaphore acquire are infallible in \
|
||||
this context"
|
||||
)]
|
||||
async fn check_updates_parallel(
|
||||
lockfile: &LockFile,
|
||||
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
|
||||
|
|
@ -196,7 +204,7 @@ async fn check_updates_parallel(
|
|||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
pb.set_message("Checking for updates (parallel)...");
|
||||
|
|
@ -208,7 +216,7 @@ async fn check_updates_parallel(
|
|||
let lockfile_clone = lockfile.clone();
|
||||
|
||||
futures.push(async move {
|
||||
let _permit = sem.acquire().await.unwrap();
|
||||
let _permit = sem.acquire().await.expect("semaphore closed unexpectedly");
|
||||
let result = check_project_update(&project, &lockfile_clone).await;
|
||||
pb_clone.inc(1);
|
||||
(project, result)
|
||||
|
|
@ -230,8 +238,8 @@ async fn check_updates_parallel(
|
|||
.name
|
||||
.values()
|
||||
.next()
|
||||
.unwrap_or(&"Unknown".to_string())
|
||||
.clone();
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
errors.push((project_name, e.to_string()));
|
||||
},
|
||||
}
|
||||
|
|
@ -260,37 +268,30 @@ async fn check_project_update(
|
|||
// Try each platform in project
|
||||
for platform_name in project.id.keys() {
|
||||
let api_key = get_api_key(platform_name);
|
||||
let platform = match create_platform(platform_name, api_key) {
|
||||
Ok(p) => p,
|
||||
Err(_) => continue,
|
||||
let Ok(platform) = create_platform(platform_name, api_key) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
|
||||
|
||||
match platform
|
||||
if let Ok(updated_project) = platform
|
||||
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
|
||||
.await
|
||||
{
|
||||
Ok(updated_project) => {
|
||||
// Compare files to detect updates
|
||||
let file_updates = detect_file_updates(project, &updated_project);
|
||||
// Compare files to detect updates
|
||||
let file_updates = detect_file_updates(project, &updated_project);
|
||||
|
||||
if !file_updates.is_empty() {
|
||||
return Ok(Some(ProjectUpdate {
|
||||
slug: project.slug.clone(),
|
||||
name: project.name.values().next().cloned().unwrap_or_default(),
|
||||
project_type: format!("{:?}", project.r#type),
|
||||
side: format!("{:?}", project.side),
|
||||
file_updates,
|
||||
}));
|
||||
}
|
||||
if !file_updates.is_empty() {
|
||||
return Ok(Some(ProjectUpdate {
|
||||
slug: project.slug.clone(),
|
||||
name: project.name.values().next().cloned().unwrap_or_default(),
|
||||
project_type: format!("{:?}", project.r#type),
|
||||
side: format!("{:?}", project.side),
|
||||
file_updates,
|
||||
}));
|
||||
}
|
||||
|
||||
return Ok(None); // No updates
|
||||
},
|
||||
Err(_) => {
|
||||
// Try next platform
|
||||
continue;
|
||||
},
|
||||
return Ok(None); // No updates
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,10 @@ enum SyncChange {
|
|||
Removal(String), // project_pakku_id
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "spinner template is a string literal and is always valid"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: SyncArgs,
|
||||
global_yes: bool,
|
||||
|
|
@ -27,14 +31,14 @@ pub async fn execute(
|
|||
) -> Result<()> {
|
||||
log::info!("Synchronizing with lockfile");
|
||||
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let config = Config::load(config_dir)?;
|
||||
|
||||
// Detect changes
|
||||
let changes = detect_changes(&lockfile, &config)?;
|
||||
let changes = detect_changes(&lockfile, &config);
|
||||
|
||||
if changes.is_empty() {
|
||||
println!("✓ Everything is in sync");
|
||||
|
|
@ -59,7 +63,7 @@ pub async fn execute(
|
|||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.unwrap(),
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
|
||||
if no_filter || args.additions {
|
||||
|
|
@ -145,10 +149,7 @@ pub async fn execute(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn detect_changes(
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
) -> Result<Vec<SyncChange>> {
|
||||
fn detect_changes(lockfile: &LockFile, config: &Config) -> Vec<SyncChange> {
|
||||
let mut changes = Vec::new();
|
||||
|
||||
// Get paths for each project type
|
||||
|
|
@ -177,23 +178,26 @@ fn detect_changes(
|
|||
&& ext == "jar"
|
||||
&& !lockfile_files.contains_key(&path)
|
||||
{
|
||||
let name = path.file_name().unwrap().to_string_lossy().to_string();
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
changes.push(SyncChange::Addition(path, name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removals (projects in lockfile but files missing)
|
||||
let filesystem_files: HashSet<_> =
|
||||
if let Ok(entries) = fs::read_dir(mods_path) {
|
||||
let filesystem_files: HashSet<_> = fs::read_dir(mods_path).map_or_else(
|
||||
|_| HashSet::new(),
|
||||
|entries| {
|
||||
entries
|
||||
.flatten()
|
||||
.map(|e| e.path())
|
||||
.filter(|p| p.is_file())
|
||||
.collect()
|
||||
} else {
|
||||
HashSet::new()
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
for (lockfile_path, pakku_id) in &lockfile_files {
|
||||
if !filesystem_files.contains(lockfile_path) {
|
||||
|
|
@ -201,7 +205,7 @@ fn detect_changes(
|
|||
}
|
||||
}
|
||||
|
||||
Ok(changes)
|
||||
changes
|
||||
}
|
||||
|
||||
async fn add_file_to_lockfile(
|
||||
|
|
@ -209,14 +213,14 @@ async fn add_file_to_lockfile(
|
|||
file_path: &Path,
|
||||
_config: &Config,
|
||||
) -> Result<()> {
|
||||
use sha1::Digest;
|
||||
|
||||
// Try to identify the file by hash lookup
|
||||
let modrinth = ModrinthPlatform::new();
|
||||
let curseforge = CurseForgePlatform::new(None);
|
||||
|
||||
// Compute file hash
|
||||
let file_data = fs::read(file_path)?;
|
||||
// Compute SHA-1 hash from file bytes
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(&file_data);
|
||||
let hash = crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ use crate::{
|
|||
model::LockFile,
|
||||
};
|
||||
|
||||
pub fn execute(args: UnlinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
pub fn execute(args: &UnlinkArgs, lockfile_path: &Path) -> Result<()> {
|
||||
log::info!("Unlinking {} -> {}", args.from, args.to);
|
||||
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
|
||||
// Find projects
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ use crate::{
|
|||
utils::FlexVer,
|
||||
};
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template is a string literal and is always valid"
|
||||
)]
|
||||
pub async fn execute(
|
||||
args: UpdateArgs,
|
||||
global_yes: bool,
|
||||
|
|
@ -18,14 +22,14 @@ pub async fn execute(
|
|||
) -> Result<(), PakkerError> {
|
||||
let skip_prompts = global_yes;
|
||||
// Load expects directory path, so get parent directory
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or(Path::new("."));
|
||||
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
let mut lockfile = LockFile::load(lockfile_dir)?;
|
||||
let _config = Config::load(config_dir)?;
|
||||
|
||||
// Create platforms
|
||||
let platforms = super::add::create_all_platforms()?;
|
||||
let platforms = super::add::create_all_platforms();
|
||||
|
||||
// Collect all known project identifiers for typo suggestions
|
||||
let all_slugs: Vec<String> = lockfile
|
||||
|
|
@ -83,7 +87,7 @@ pub async fn execute(
|
|||
pb.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
|
||||
|
|
@ -152,9 +156,12 @@ pub async fn execute(
|
|||
}
|
||||
|
||||
// Clone data needed for comparisons to avoid borrow issues
|
||||
let new_file_id = updated_project.files.first().unwrap().id.clone();
|
||||
let new_file_name =
|
||||
updated_project.files.first().unwrap().file_name.clone();
|
||||
let first_file = updated_project
|
||||
.files
|
||||
.first()
|
||||
.ok_or_else(|| PakkerError::InvalidProject("No files found".into()))?;
|
||||
let new_file_id = first_file.id.clone();
|
||||
let new_file_name = first_file.file_name.clone();
|
||||
let old_file_name = old_file.file_name.clone();
|
||||
let project_name = old_project.get_name();
|
||||
|
||||
|
|
@ -205,7 +212,12 @@ pub async fn execute(
|
|||
}
|
||||
|
||||
if should_update {
|
||||
let selected_file = updated_project.files.first().unwrap();
|
||||
let selected_file =
|
||||
updated_project.files.first().ok_or_else(|| {
|
||||
PakkerError::InvalidProject(
|
||||
"No files found after selection".into(),
|
||||
)
|
||||
})?;
|
||||
pb.println(format!(
|
||||
" {} -> {}",
|
||||
old_file_name, selected_file.file_name
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, PakkerError>;
|
||||
|
|
@ -11,6 +13,7 @@ pub enum ErrorSeverity {
|
|||
/// Warning - operation can continue but may have issues
|
||||
Warning,
|
||||
/// Info - informational message
|
||||
#[expect(dead_code, reason = "reserved for future use")]
|
||||
Info,
|
||||
}
|
||||
|
||||
|
|
@ -177,7 +180,7 @@ fn format_multiple_errors(errors: &[PakkerError]) -> String {
|
|||
|
||||
let mut msg = format!("{} errors occurred:\n", errors.len());
|
||||
for (idx, error) in errors.iter().enumerate() {
|
||||
msg.push_str(&format!(" {}. {}\n", idx + 1, error));
|
||||
let _ = writeln!(msg, " {}. {}", idx + 1, error);
|
||||
}
|
||||
msg
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,8 +36,9 @@ impl Exporter {
|
|||
///
|
||||
/// Returns successfully exported files. If any profile failed (non-skip),
|
||||
/// returns an error after attempting all profiles.
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
pub async fn export_all_profiles(
|
||||
&mut self,
|
||||
&self,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
output_path: &Path,
|
||||
|
|
@ -99,8 +100,13 @@ impl Exporter {
|
|||
}
|
||||
|
||||
/// Export modpack using specified profile
|
||||
#[expect(clippy::future_not_send, reason = "not required to be Send")]
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "spinner template string is a literal and always valid"
|
||||
)]
|
||||
pub async fn export(
|
||||
&mut self,
|
||||
&self,
|
||||
profile_name: &str,
|
||||
lockfile: &LockFile,
|
||||
config: &Config,
|
||||
|
|
@ -110,7 +116,7 @@ impl Exporter {
|
|||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.cyan} {msg}")
|
||||
.unwrap(),
|
||||
.expect("spinner template is valid"),
|
||||
);
|
||||
spinner.set_message(format!("Preparing {profile_name} export..."));
|
||||
|
||||
|
|
@ -175,7 +181,7 @@ impl Exporter {
|
|||
spinner.set_message("Creating archive...");
|
||||
// Package export
|
||||
let output_file =
|
||||
self.package_export(export_dir, output_path, profile_name, config)?;
|
||||
Self::package_export(export_dir, output_path, profile_name, config)?;
|
||||
|
||||
// Cleanup
|
||||
drop(temp_dir);
|
||||
|
|
@ -187,7 +193,6 @@ impl Exporter {
|
|||
|
||||
/// Package export directory into final format
|
||||
fn package_export(
|
||||
&self,
|
||||
export_dir: &Path,
|
||||
output_path: &Path,
|
||||
profile_name: &str,
|
||||
|
|
@ -224,7 +229,7 @@ impl Exporter {
|
|||
.unix_permissions(0o755);
|
||||
|
||||
// Add all files from export directory
|
||||
self.add_directory_to_zip(&mut zip, export_dir, export_dir, options)?;
|
||||
Self::add_directory_to_zip(&mut zip, export_dir, export_dir, options)?;
|
||||
|
||||
zip.finish()?;
|
||||
|
||||
|
|
@ -233,7 +238,6 @@ impl Exporter {
|
|||
|
||||
/// Recursively add directory to zip
|
||||
fn add_directory_to_zip(
|
||||
&self,
|
||||
zip: &mut zip::ZipWriter<fs::File>,
|
||||
base_path: &Path,
|
||||
current_path: &Path,
|
||||
|
|
@ -255,7 +259,7 @@ impl Exporter {
|
|||
relative_path.to_string_lossy().to_string(),
|
||||
options,
|
||||
)?;
|
||||
self.add_directory_to_zip(zip, base_path, &path, options)?;
|
||||
Self::add_directory_to_zip(zip, base_path, &path, options)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ impl ProfileConfig {
|
|||
self
|
||||
.server_overrides
|
||||
.as_deref()
|
||||
.or(global_server_overrides.map(std::vec::Vec::as_slice))
|
||||
.or_else(|| global_server_overrides.map(std::vec::Vec::as_slice))
|
||||
}
|
||||
|
||||
/// Get effective client override paths, falling back to global config
|
||||
|
|
@ -77,7 +77,7 @@ impl ProfileConfig {
|
|||
self
|
||||
.client_overrides
|
||||
.as_deref()
|
||||
.or(global_client_overrides.map(std::vec::Vec::as_slice))
|
||||
.or_else(|| global_client_overrides.map(std::vec::Vec::as_slice))
|
||||
}
|
||||
|
||||
/// Get default config for `CurseForge` profile
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ impl Effect for CopyProjectFilesEffect {
|
|||
use crate::model::ResolvedCredentials;
|
||||
|
||||
// Resolve credentials (env -> keyring -> Pakker file -> Pakku file).
|
||||
let credentials = ResolvedCredentials::load()?;
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let curseforge_key =
|
||||
credentials.curseforge_api_key().map(ToOwned::to_owned);
|
||||
let modrinth_token = credentials.modrinth_token().map(ToOwned::to_owned);
|
||||
|
|
@ -66,14 +66,13 @@ impl Effect for CopyProjectFilesEffect {
|
|||
|
||||
if let Some(file) = project.files.first() {
|
||||
// Get the target directory based on project type and paths config
|
||||
let type_dir = get_project_type_dir(&project.r#type, &context.config);
|
||||
let type_dir = get_project_type_dir(project.r#type, &context.config);
|
||||
|
||||
// Handle subpath if specified
|
||||
let target_subdir = if let Some(subpath) = &project.subpath {
|
||||
PathBuf::from(&type_dir).join(subpath)
|
||||
} else {
|
||||
PathBuf::from(&type_dir)
|
||||
};
|
||||
let target_subdir = project.subpath.as_ref().map_or_else(
|
||||
|| PathBuf::from(&type_dir),
|
||||
|subpath| PathBuf::from(&type_dir).join(subpath),
|
||||
);
|
||||
|
||||
let export_dir = context.export_path.join(&target_subdir);
|
||||
fs::create_dir_all(&export_dir)?;
|
||||
|
|
@ -204,7 +203,15 @@ async fn download_file(
|
|||
let attempts: usize = 5;
|
||||
|
||||
for attempt in 1..=attempts {
|
||||
let response = request_builder.try_clone().unwrap().send().await;
|
||||
let response = request_builder
|
||||
.try_clone()
|
||||
.ok_or_else(|| {
|
||||
crate::error::PakkerError::InternalError(
|
||||
"Failed to clone request builder".into(),
|
||||
)
|
||||
})?
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
|
|
@ -295,11 +302,12 @@ impl Effect for CopyOverridesEffect {
|
|||
|
||||
async fn execute(&self, context: &RuleContext) -> Result<()> {
|
||||
// Use profile-specific overrides if available, otherwise use global config
|
||||
let overrides = if let Some(profile_config) = &context.profile_config {
|
||||
profile_config.get_overrides(&context.config.overrides)
|
||||
} else {
|
||||
&context.config.overrides
|
||||
};
|
||||
let overrides = context
|
||||
.profile_config
|
||||
.as_ref()
|
||||
.map_or(context.config.overrides.as_slice(), |profile_config| {
|
||||
profile_config.get_overrides(&context.config.overrides)
|
||||
});
|
||||
|
||||
// Expand any glob patterns in override paths
|
||||
let expanded_paths = expand_override_globs(&context.base_path, overrides);
|
||||
|
|
@ -342,13 +350,13 @@ impl Effect for CopyServerOverridesEffect {
|
|||
async fn execute(&self, context: &RuleContext) -> Result<()> {
|
||||
// Use profile-specific server overrides if available, otherwise use global
|
||||
// config
|
||||
let server_overrides = if let Some(profile_config) = &context.profile_config
|
||||
{
|
||||
profile_config
|
||||
.get_server_overrides(context.config.server_overrides.as_ref())
|
||||
} else {
|
||||
context.config.server_overrides.as_deref()
|
||||
};
|
||||
let server_overrides = context.profile_config.as_ref().map_or(
|
||||
context.config.server_overrides.as_deref(),
|
||||
|profile_config| {
|
||||
profile_config
|
||||
.get_server_overrides(context.config.server_overrides.as_ref())
|
||||
},
|
||||
);
|
||||
|
||||
if let Some(overrides) = server_overrides {
|
||||
// Expand any glob patterns in override paths
|
||||
|
|
@ -393,13 +401,13 @@ impl Effect for CopyClientOverridesEffect {
|
|||
async fn execute(&self, context: &RuleContext) -> Result<()> {
|
||||
// Use profile-specific client overrides if available, otherwise use global
|
||||
// config
|
||||
let client_overrides = if let Some(profile_config) = &context.profile_config
|
||||
{
|
||||
profile_config
|
||||
.get_client_overrides(context.config.client_overrides.as_ref())
|
||||
} else {
|
||||
context.config.client_overrides.as_deref()
|
||||
};
|
||||
let client_overrides = context.profile_config.as_ref().map_or(
|
||||
context.config.client_overrides.as_deref(),
|
||||
|profile_config| {
|
||||
profile_config
|
||||
.get_client_overrides(context.config.client_overrides.as_ref())
|
||||
},
|
||||
);
|
||||
|
||||
if let Some(overrides) = client_overrides {
|
||||
// Expand any glob patterns in override paths
|
||||
|
|
@ -459,7 +467,7 @@ impl Effect for FilterClientOnlyEffect {
|
|||
&& let Some(file) = project.files.first()
|
||||
{
|
||||
// Get the target directory based on project type and paths config
|
||||
let type_dir = get_project_type_dir(&project.r#type, &context.config);
|
||||
let type_dir = get_project_type_dir(project.r#type, &context.config);
|
||||
let project_dir = context.export_path.join(&type_dir);
|
||||
let file_path = project_dir.join(&file.file_name);
|
||||
|
||||
|
|
@ -514,7 +522,7 @@ impl Effect for FilterServerOnlyEffect {
|
|||
&& let Some(file) = project.files.first()
|
||||
{
|
||||
// Get the target directory based on project type and paths config
|
||||
let type_dir = get_project_type_dir(&project.r#type, &context.config);
|
||||
let type_dir = get_project_type_dir(project.r#type, &context.config);
|
||||
let project_dir = context.export_path.join(&type_dir);
|
||||
let file_path = project_dir.join(&file.file_name);
|
||||
|
||||
|
|
@ -573,7 +581,7 @@ impl Effect for FilterNonRedistributableEffect {
|
|||
&& let Some(file) = project.files.first()
|
||||
{
|
||||
// Get the target directory based on project type and paths config
|
||||
let type_dir = get_project_type_dir(&project.r#type, &context.config);
|
||||
let type_dir = get_project_type_dir(project.r#type, &context.config);
|
||||
let project_dir = context.export_path.join(&type_dir);
|
||||
let file_path = project_dir.join(&file.file_name);
|
||||
|
||||
|
|
@ -668,7 +676,7 @@ fn generate_curseforge_manifest(context: &RuleContext) -> Result<String> {
|
|||
|
||||
let manifest = json!({
|
||||
"minecraft": {
|
||||
"version": context.lockfile.mc_versions.first().unwrap_or(&"1.20.1".to_string()),
|
||||
"version": context.lockfile.mc_versions.first().map_or("1.20.1", String::as_str),
|
||||
"modLoaders": context.lockfile.loaders.iter().map(|(name, version)| {
|
||||
json!({
|
||||
"id": format!("{}-{}", name, version),
|
||||
|
|
@ -736,7 +744,7 @@ fn generate_modrinth_manifest(context: &RuleContext) -> Result<String> {
|
|||
.lockfile
|
||||
.mc_versions
|
||||
.first()
|
||||
.unwrap_or(&"1.20.1".to_string())
|
||||
.map_or("1.20.1", String::as_str)
|
||||
),
|
||||
);
|
||||
|
||||
|
|
@ -781,7 +789,7 @@ fn copy_recursive(
|
|||
|
||||
/// Get the target directory for a project type, respecting the paths config.
|
||||
/// Falls back to default directories if not configured.
|
||||
fn get_project_type_dir(project_type: &ProjectType, config: &Config) -> String {
|
||||
fn get_project_type_dir(project_type: ProjectType, config: &Config) -> String {
|
||||
// Check if there's a custom path configured for this project type
|
||||
let type_key = project_type.to_string();
|
||||
if let Some(custom_path) = config.paths.get(&type_key) {
|
||||
|
|
@ -881,7 +889,7 @@ impl Effect for FilterByPlatformEffect {
|
|||
if let Some(file) = project.files.first() {
|
||||
// Get the target directory based on project type and paths config
|
||||
let type_dir =
|
||||
get_project_type_dir(&project.r#type, &context.config);
|
||||
get_project_type_dir(project.r#type, &context.config);
|
||||
let project_dir = context.export_path.join(&type_dir);
|
||||
let file_path = project_dir.join(&file.file_name);
|
||||
|
||||
|
|
@ -942,13 +950,10 @@ impl Effect for MissingProjectsAsOverridesEffect {
|
|||
async fn execute(&self, context: &RuleContext) -> Result<()> {
|
||||
use crate::model::ResolvedCredentials;
|
||||
|
||||
let credentials = ResolvedCredentials::load().ok();
|
||||
let curseforge_key = credentials
|
||||
.as_ref()
|
||||
.and_then(|c| c.curseforge_api_key().map(ToOwned::to_owned));
|
||||
let modrinth_token = credentials
|
||||
.as_ref()
|
||||
.and_then(|c| c.modrinth_token().map(ToOwned::to_owned));
|
||||
let credentials = ResolvedCredentials::load();
|
||||
let curseforge_key =
|
||||
credentials.curseforge_api_key().map(ToOwned::to_owned);
|
||||
let modrinth_token = credentials.modrinth_token().map(ToOwned::to_owned);
|
||||
|
||||
for project in &context.lockfile.projects {
|
||||
if !project.export {
|
||||
|
|
@ -977,7 +982,7 @@ impl Effect for MissingProjectsAsOverridesEffect {
|
|||
|
||||
// Download to overrides directory
|
||||
let overrides_dir = context.export_path.join("overrides");
|
||||
let type_dir = get_project_type_dir(&project.r#type, &context.config);
|
||||
let type_dir = get_project_type_dir(project.r#type, &context.config);
|
||||
let target_dir = overrides_dir.join(&type_dir);
|
||||
fs::create_dir_all(&target_dir)?;
|
||||
|
||||
|
|
@ -1128,11 +1133,6 @@ fn process_text_files(
|
|||
dir: &std::path::Path,
|
||||
replacements: &std::collections::HashMap<&str, String>,
|
||||
) -> Result<()> {
|
||||
if !dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// File extensions that should be processed for text replacement
|
||||
const TEXT_EXTENSIONS: &[&str] = &[
|
||||
"txt",
|
||||
"md",
|
||||
|
|
@ -1150,6 +1150,10 @@ fn process_text_files(
|
|||
"xml",
|
||||
];
|
||||
|
||||
if !dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for entry in walkdir::WalkDir::new(dir)
|
||||
.into_iter()
|
||||
.filter_map(std::result::Result::ok)
|
||||
|
|
@ -1170,9 +1174,8 @@ fn process_text_files(
|
|||
}
|
||||
|
||||
// Read file content
|
||||
let content = match fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => continue, // Skip binary files or unreadable files
|
||||
let Ok(content) = fs::read_to_string(path) else {
|
||||
continue; // Skip binary files or unreadable files
|
||||
};
|
||||
|
||||
// Check if any replacements are needed
|
||||
|
|
@ -1366,20 +1369,20 @@ mod tests {
|
|||
file_count_preference: None,
|
||||
};
|
||||
|
||||
assert_eq!(get_project_type_dir(&ProjectType::Mod, &config), "mods");
|
||||
assert_eq!(get_project_type_dir(ProjectType::Mod, &config), "mods");
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::ResourcePack, &config),
|
||||
get_project_type_dir(ProjectType::ResourcePack, &config),
|
||||
"resourcepacks"
|
||||
);
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::DataPack, &config),
|
||||
get_project_type_dir(ProjectType::DataPack, &config),
|
||||
"datapacks"
|
||||
);
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::Shader, &config),
|
||||
get_project_type_dir(ProjectType::Shader, &config),
|
||||
"shaderpacks"
|
||||
);
|
||||
assert_eq!(get_project_type_dir(&ProjectType::World, &config), "saves");
|
||||
assert_eq!(get_project_type_dir(ProjectType::World, &config), "saves");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1404,16 +1407,16 @@ mod tests {
|
|||
};
|
||||
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::Mod, &config),
|
||||
get_project_type_dir(ProjectType::Mod, &config),
|
||||
"custom-mods"
|
||||
);
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::ResourcePack, &config),
|
||||
get_project_type_dir(ProjectType::ResourcePack, &config),
|
||||
"custom-rp"
|
||||
);
|
||||
// Non-customized type should use default
|
||||
assert_eq!(
|
||||
get_project_type_dir(&ProjectType::Shader, &config),
|
||||
get_project_type_dir(ProjectType::Shader, &config),
|
||||
"shaderpacks"
|
||||
);
|
||||
}
|
||||
|
|
|
|||
52
src/fetch.rs
52
src/fetch.rs
|
|
@ -50,6 +50,10 @@ impl Fetcher {
|
|||
}
|
||||
|
||||
/// Fetch all project files according to lockfile with parallel downloads
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "progress bar template string is a literal and always valid"
|
||||
)]
|
||||
pub async fn fetch_all(
|
||||
&self,
|
||||
lockfile: &LockFile,
|
||||
|
|
@ -71,7 +75,7 @@ impl Fetcher {
|
|||
overall_bar.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.expect("progress bar template is valid")
|
||||
.progress_chars("#>-"),
|
||||
);
|
||||
overall_bar.set_message("Fetching projects...");
|
||||
|
|
@ -180,23 +184,23 @@ impl Fetcher {
|
|||
let project_dirs = [
|
||||
(
|
||||
"mod",
|
||||
self.get_default_path(&crate::model::ProjectType::Mod),
|
||||
Self::get_default_path(crate::model::ProjectType::Mod),
|
||||
),
|
||||
(
|
||||
"resource-pack",
|
||||
self.get_default_path(&crate::model::ProjectType::ResourcePack),
|
||||
Self::get_default_path(crate::model::ProjectType::ResourcePack),
|
||||
),
|
||||
(
|
||||
"shader",
|
||||
self.get_default_path(&crate::model::ProjectType::Shader),
|
||||
Self::get_default_path(crate::model::ProjectType::Shader),
|
||||
),
|
||||
(
|
||||
"data-pack",
|
||||
self.get_default_path(&crate::model::ProjectType::DataPack),
|
||||
Self::get_default_path(crate::model::ProjectType::DataPack),
|
||||
),
|
||||
(
|
||||
"world",
|
||||
self.get_default_path(&crate::model::ProjectType::World),
|
||||
Self::get_default_path(crate::model::ProjectType::World),
|
||||
),
|
||||
];
|
||||
|
||||
|
|
@ -219,9 +223,8 @@ impl Fetcher {
|
|||
continue;
|
||||
}
|
||||
|
||||
let entries = match fs::read_dir(&dir) {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
let Ok(entries) = fs::read_dir(&dir) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for entry in entries.flatten() {
|
||||
|
|
@ -241,7 +244,10 @@ impl Fetcher {
|
|||
}
|
||||
|
||||
// Skip non-jar files (might be configs, etc.)
|
||||
if !file_name.ends_with(".jar") {
|
||||
if !std::path::Path::new(&file_name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("jar"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -279,7 +285,7 @@ impl Fetcher {
|
|||
config: &Config,
|
||||
) -> Result<()> {
|
||||
// Select the best file for this project
|
||||
let file = self.select_best_file(project, lockfile)?;
|
||||
let file = Self::select_best_file(project, lockfile)?;
|
||||
|
||||
// Determine target path
|
||||
let target_path = self.get_target_path(project, file, config);
|
||||
|
|
@ -314,8 +320,11 @@ impl Fetcher {
|
|||
}
|
||||
|
||||
/// Select the best file for a project based on constraints
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "compatible_files is checked to be non-empty above"
|
||||
)]
|
||||
fn select_best_file<'a>(
|
||||
&self,
|
||||
project: &'a Project,
|
||||
lockfile: &LockFile,
|
||||
) -> Result<&'a ProjectFile> {
|
||||
|
|
@ -338,7 +347,9 @@ impl Fetcher {
|
|||
let best = if project.update_strategy == UpdateStrategy::FlexVer {
|
||||
let mut sorted: Vec<_> = compatible_files.iter().collect();
|
||||
sorted.sort_by(|a, b| FlexVer(&b.file_name).cmp(&FlexVer(&a.file_name)));
|
||||
*sorted.first().unwrap()
|
||||
*sorted
|
||||
.first()
|
||||
.expect("compatible_files is non-empty, checked above")
|
||||
} else {
|
||||
// Prefer release over beta over alpha, then by date published
|
||||
compatible_files
|
||||
|
|
@ -351,7 +362,7 @@ impl Fetcher {
|
|||
};
|
||||
(type_priority, &f.date_published)
|
||||
})
|
||||
.unwrap()
|
||||
.expect("compatible_files is non-empty, checked above")
|
||||
};
|
||||
|
||||
Ok(best)
|
||||
|
|
@ -371,7 +382,7 @@ impl Fetcher {
|
|||
path.push(custom_path);
|
||||
} else {
|
||||
// Default path based on project type
|
||||
path.push(self.get_default_path(&project.r#type));
|
||||
path.push(Self::get_default_path(project.r#type));
|
||||
}
|
||||
|
||||
// Add subpath if specified
|
||||
|
|
@ -385,9 +396,8 @@ impl Fetcher {
|
|||
|
||||
/// Get default path for project type
|
||||
const fn get_default_path(
|
||||
&self,
|
||||
project_type: &crate::model::ProjectType,
|
||||
) -> &str {
|
||||
project_type: crate::model::ProjectType,
|
||||
) -> &'static str {
|
||||
match project_type {
|
||||
crate::model::ProjectType::Mod => "mods",
|
||||
crate::model::ProjectType::ResourcePack => "resourcepacks",
|
||||
|
|
@ -454,14 +464,14 @@ impl Fetcher {
|
|||
}
|
||||
|
||||
// Copy override files to target locations
|
||||
self.copy_recursive(&source, &self.base_path)?;
|
||||
Self::copy_recursive(&source, &self.base_path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy directory recursively
|
||||
fn copy_recursive(&self, source: &Path, dest: &Path) -> Result<()> {
|
||||
fn copy_recursive(source: &Path, dest: &Path) -> Result<()> {
|
||||
if source.is_file() {
|
||||
fs::copy(source, dest)?;
|
||||
} else if source.is_dir() {
|
||||
|
|
@ -469,7 +479,7 @@ impl Fetcher {
|
|||
for entry in fs::read_dir(source)? {
|
||||
let entry = entry?;
|
||||
let target = dest.join(entry.file_name());
|
||||
self.copy_recursive(&entry.path(), &target)?;
|
||||
Self::copy_recursive(&entry.path(), &target)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ use git2::{
|
|||
|
||||
use crate::error::{PakkerError, Result};
|
||||
|
||||
type ProgressCallback =
|
||||
Option<Box<dyn FnMut(usize, usize, Option<usize>) + 'static>>;
|
||||
|
||||
/// Check if a directory is a Git repository
|
||||
pub fn is_git_repository<P: AsRef<Path>>(path: P) -> bool {
|
||||
Repository::open(path).is_ok()
|
||||
|
|
@ -65,9 +68,7 @@ pub fn clone_repository<P: AsRef<Path>>(
|
|||
url: &str,
|
||||
target_path: P,
|
||||
ref_name: &str,
|
||||
progress_callback: Option<
|
||||
Box<dyn FnMut(usize, usize, Option<usize>) + 'static>,
|
||||
>,
|
||||
progress_callback: ProgressCallback,
|
||||
) -> Result<Repository> {
|
||||
let target_path = target_path.as_ref();
|
||||
|
||||
|
|
@ -147,9 +148,7 @@ pub fn fetch_updates<P: AsRef<Path>>(
|
|||
path: P,
|
||||
remote_name: &str,
|
||||
ref_name: &str,
|
||||
progress_callback: Option<
|
||||
Box<dyn FnMut(usize, usize, Option<usize>) + 'static>,
|
||||
>,
|
||||
progress_callback: ProgressCallback,
|
||||
) -> Result<()> {
|
||||
let repo = Repository::open(path)?;
|
||||
let mut remote = repo.find_remote(remote_name).map_err(|e| {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,11 @@ use reqwest::Client;
|
|||
///
|
||||
/// Panics if the HTTP client cannot be built, which should only happen in
|
||||
/// extreme cases like OOM or broken TLS configuration.
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "HTTP client build failure is unrecoverable - only fails under \
|
||||
extreme system resource exhaustion"
|
||||
)]
|
||||
pub fn create_http_client() -> Client {
|
||||
Client::builder()
|
||||
.pool_max_idle_per_host(10)
|
||||
|
|
|
|||
22
src/ipc.rs
22
src/ipc.rs
|
|
@ -63,7 +63,7 @@ pub struct OngoingOperation {
|
|||
pub status: OperationStatus,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum OperationType {
|
||||
Fetch,
|
||||
|
|
@ -99,11 +99,10 @@ impl IpcCoordinator {
|
|||
/// Get the base IPC directory in tmpfs
|
||||
fn get_ipc_base_dir() -> PathBuf {
|
||||
// Use XDG_RUNTIME_DIR if available, otherwise fallback to /tmp
|
||||
if let Ok(runtime) = std::env::var("XDG_RUNTIME_DIR") {
|
||||
PathBuf::from(runtime).join("pakker")
|
||||
} else {
|
||||
PathBuf::from("/tmp/pakker")
|
||||
}
|
||||
std::env::var("XDG_RUNTIME_DIR").map_or_else(
|
||||
|_| PathBuf::from("/tmp/pakker"),
|
||||
|runtime| PathBuf::from(runtime).join("pakker"),
|
||||
)
|
||||
}
|
||||
|
||||
/// Extract modpack hash from pakku.json's parentLockHash field.
|
||||
|
|
@ -181,7 +180,7 @@ impl IpcCoordinator {
|
|||
/// Acquire an exclusive advisory lock on the ops file for atomic operations.
|
||||
/// Returns a guard that releases the lock on drop.
|
||||
fn lock_ops_file(&self) -> Result<FileLock, IpcError> {
|
||||
log::debug!("Acquiring file lock on {:?}", self.ops_file);
|
||||
log::debug!("Acquiring file lock on {}", self.ops_file.display());
|
||||
|
||||
// Open or create the ops file with read/write access
|
||||
let file = OpenOptions::new()
|
||||
|
|
@ -200,14 +199,17 @@ impl IpcCoordinator {
|
|||
// Acquire exclusive lock using flock
|
||||
unsafe {
|
||||
if flock(file.as_raw_fd(), LOCK_EX) != 0 {
|
||||
log::warn!("Failed to acquire file lock on {:?}", self.ops_file);
|
||||
log::warn!(
|
||||
"Failed to acquire file lock on {}",
|
||||
self.ops_file.display()
|
||||
);
|
||||
return Err(IpcError::InvalidFormat(
|
||||
"failed to acquire file lock".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("File lock acquired on {:?}", self.ops_file);
|
||||
log::debug!("File lock acquired on {}", self.ops_file.display());
|
||||
|
||||
// Return a guard that releases the lock on drop
|
||||
Ok(FileLock { file })
|
||||
|
|
@ -435,7 +437,7 @@ impl IpcCoordinator {
|
|||
}
|
||||
|
||||
impl OperationType {
|
||||
pub const fn as_str(&self) -> &'static str {
|
||||
pub const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Fetch => "fetch",
|
||||
Self::Export => "export",
|
||||
|
|
|
|||
39
src/main.rs
39
src/main.rs
|
|
@ -1,8 +1,11 @@
|
|||
// Allow pre-existing clippy warnings for functions with many arguments
|
||||
// and complex types that would require significant refactoring
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::large_enum_variant)]
|
||||
#![expect(
|
||||
clippy::multiple_crate_versions,
|
||||
reason = "transitive dependency version conflicts from upstream crates"
|
||||
)]
|
||||
#![expect(
|
||||
clippy::cargo_common_metadata,
|
||||
reason = "license and repository not yet configured"
|
||||
)]
|
||||
|
||||
mod cli;
|
||||
mod error;
|
||||
|
|
@ -78,7 +81,6 @@ async fn main() -> Result<(), PakkerError> {
|
|||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Import(args) => {
|
||||
cli::commands::import::execute(
|
||||
|
|
@ -118,8 +120,12 @@ async fn main() -> Result<(), PakkerError> {
|
|||
.await
|
||||
},
|
||||
Commands::Rm(args) => {
|
||||
cli::commands::rm::execute(args, global_yes, &lockfile_path, &config_path)
|
||||
.await
|
||||
cli::commands::rm::execute(
|
||||
&args,
|
||||
global_yes,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
},
|
||||
Commands::Update(args) => {
|
||||
cli::commands::update::execute(
|
||||
|
|
@ -130,15 +136,15 @@ async fn main() -> Result<(), PakkerError> {
|
|||
)
|
||||
.await
|
||||
},
|
||||
Commands::Ls(args) => cli::commands::ls::execute(args, &lockfile_path),
|
||||
Commands::Ls(args) => cli::commands::ls::execute(&args, &lockfile_path),
|
||||
Commands::Set(args) => {
|
||||
cli::commands::set::execute(args, &lockfile_path, &config_path).await
|
||||
cli::commands::set::execute(&args, &lockfile_path, &config_path)
|
||||
},
|
||||
Commands::Link(args) => cli::commands::link::execute(args, &lockfile_path),
|
||||
Commands::Link(args) => cli::commands::link::execute(&args, &lockfile_path),
|
||||
Commands::Unlink(args) => {
|
||||
cli::commands::unlink::execute(args, &lockfile_path)
|
||||
cli::commands::unlink::execute(&args, &lockfile_path)
|
||||
},
|
||||
Commands::Diff(args) => cli::commands::diff::execute(args, &lockfile_path),
|
||||
Commands::Diff(args) => cli::commands::diff::execute(&args, &lockfile_path),
|
||||
Commands::Fetch(args) => {
|
||||
cli::commands::fetch::execute(args, &lockfile_path, &config_path).await
|
||||
},
|
||||
|
|
@ -156,7 +162,7 @@ async fn main() -> Result<(), PakkerError> {
|
|||
},
|
||||
Commands::Remote(args) => cli::commands::remote::execute(args).await,
|
||||
Commands::RemoteUpdate(args) => {
|
||||
cli::commands::remote_update::execute(args).await
|
||||
cli::commands::remote_update::execute(&args)
|
||||
},
|
||||
Commands::Status(args) => {
|
||||
cli::commands::status::execute(
|
||||
|
|
@ -169,11 +175,10 @@ async fn main() -> Result<(), PakkerError> {
|
|||
},
|
||||
Commands::Inspect(args) => {
|
||||
cli::commands::inspect::execute(
|
||||
args.projects,
|
||||
&args.projects,
|
||||
&lockfile_path,
|
||||
&config_path,
|
||||
)
|
||||
.await
|
||||
},
|
||||
Commands::Credentials(args) => {
|
||||
match args.subcommand {
|
||||
|
|
@ -199,7 +204,7 @@ async fn main() -> Result<(), PakkerError> {
|
|||
cli::commands::cfg_prj::execute(
|
||||
&config_path,
|
||||
&lockfile_path,
|
||||
prj_args.project,
|
||||
&prj_args.project,
|
||||
prj_args.r#type.as_deref(),
|
||||
prj_args.side.as_deref(),
|
||||
prj_args.update_strategy.as_deref(),
|
||||
|
|
|
|||
|
|
@ -155,11 +155,11 @@ pub struct ResolvedCredentials {
|
|||
}
|
||||
|
||||
impl ResolvedCredentials {
|
||||
pub fn load() -> Result<Self> {
|
||||
pub fn load() -> Self {
|
||||
let pakker_file = PakkerCredentialsFile::load().ok();
|
||||
let pakku_file = PakkerCompatCredentialsFile::load().ok();
|
||||
|
||||
Ok(Self {
|
||||
Self {
|
||||
curseforge_api_key: resolve_secret(
|
||||
"PAKKER_CURSEFORGE_API_KEY",
|
||||
"curseforge_api_key",
|
||||
|
|
@ -169,13 +169,13 @@ impl ResolvedCredentials {
|
|||
pakku_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.curseforge_api_key.clone()),
|
||||
)?,
|
||||
),
|
||||
modrinth_token: resolve_secret(
|
||||
"PAKKER_MODRINTH_TOKEN",
|
||||
"modrinth_token",
|
||||
pakker_file.as_ref().and_then(|f| f.modrinth_token.clone()),
|
||||
None,
|
||||
)?,
|
||||
),
|
||||
github_access_token: resolve_secret(
|
||||
"PAKKER_GITHUB_TOKEN",
|
||||
"github_access_token",
|
||||
|
|
@ -185,8 +185,8 @@ impl ResolvedCredentials {
|
|||
pakku_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.github_access_token.clone()),
|
||||
)?,
|
||||
})
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn curseforge_api_key(&self) -> Option<&str> {
|
||||
|
|
@ -226,28 +226,26 @@ fn resolve_secret(
|
|||
keyring_entry: &str,
|
||||
pakker_file_value: Option<String>,
|
||||
pakku_file_value: Option<String>,
|
||||
) -> Result<Option<(String, CredentialsSource)>> {
|
||||
) -> Option<(String, CredentialsSource)> {
|
||||
if let Ok(v) = std::env::var(env_key)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Ok(Some((v.trim().to_string(), CredentialsSource::Env)));
|
||||
return Some((v.trim().to_string(), CredentialsSource::Env));
|
||||
}
|
||||
|
||||
if let Ok(v) = get_keyring_secret(keyring_entry)
|
||||
&& !v.trim().is_empty()
|
||||
{
|
||||
return Ok(Some((v.trim().to_string(), CredentialsSource::Keyring)));
|
||||
return Some((v.trim().to_string(), CredentialsSource::Keyring));
|
||||
}
|
||||
|
||||
if let Some(v) = pakker_file_value.filter(|v| !v.trim().is_empty()) {
|
||||
return Ok(Some((v, CredentialsSource::PakkerFile)));
|
||||
return Some((v, CredentialsSource::PakkerFile));
|
||||
}
|
||||
|
||||
Ok(
|
||||
pakku_file_value
|
||||
.filter(|v| !v.trim().is_empty())
|
||||
.map(|v| (v, CredentialsSource::PakkerFile)),
|
||||
)
|
||||
pakku_file_value
|
||||
.filter(|v| !v.trim().is_empty())
|
||||
.map(|v| (v, CredentialsSource::PakkerFile))
|
||||
}
|
||||
|
||||
fn get_keyring_secret(
|
||||
|
|
@ -279,8 +277,7 @@ fn delete_keyring_secret(entry: &str) -> Result<()> {
|
|||
})?;
|
||||
|
||||
match e.delete_credential() {
|
||||
Ok(()) => Ok(()),
|
||||
Err(keyring::Error::NoEntry) => Ok(()),
|
||||
Ok(()) | Err(keyring::Error::NoEntry) => Ok(()),
|
||||
Err(e) => {
|
||||
Err(PakkerError::InternalError(format!(
|
||||
"Failed to delete keyring entry {entry}: {e}"
|
||||
|
|
|
|||
|
|
@ -56,14 +56,26 @@ const fn default_redistributable() -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_update_strategy(strategy: &UpdateStrategy) -> bool {
|
||||
matches!(strategy, UpdateStrategy::Latest)
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_redistributable(redistributable: &bool) -> bool {
|
||||
*redistributable
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
reason = "required by serde skip_serializing_if which expects fn(&T) -> bool"
|
||||
)]
|
||||
const fn is_default_export(export: &bool) -> bool {
|
||||
*export
|
||||
}
|
||||
|
|
@ -233,7 +245,7 @@ impl Project {
|
|||
id,
|
||||
update_strategy: self.update_strategy,
|
||||
redistributable: self.redistributable && other.redistributable,
|
||||
subpath: self.subpath.clone().or(other.subpath.clone()),
|
||||
subpath: self.subpath.clone().or_else(|| other.subpath.clone()),
|
||||
aliases,
|
||||
export: if self.export {
|
||||
self.export
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ fn create_client(
|
|||
},
|
||||
"github" => {
|
||||
Ok(Box::new(GitHubPlatform::with_client(
|
||||
get_http_client(),
|
||||
&get_http_client(),
|
||||
api_key,
|
||||
)))
|
||||
},
|
||||
|
|
|
|||
|
|
@ -32,7 +32,10 @@ impl CurseForgePlatform {
|
|||
}
|
||||
}
|
||||
|
||||
pub const fn with_client(client: Arc<Client>, api_key: Option<String>) -> Self {
|
||||
pub const fn with_client(
|
||||
client: Arc<Client>,
|
||||
api_key: Option<String>,
|
||||
) -> Self {
|
||||
Self { client, api_key }
|
||||
}
|
||||
|
||||
|
|
@ -57,7 +60,6 @@ impl CurseForgePlatform {
|
|||
|
||||
const fn map_class_id(class_id: u32) -> ProjectType {
|
||||
match class_id {
|
||||
6 => ProjectType::Mod,
|
||||
12 => ProjectType::ResourcePack,
|
||||
6945 => ProjectType::DataPack,
|
||||
6552 => ProjectType::Shader,
|
||||
|
|
@ -68,7 +70,6 @@ impl CurseForgePlatform {
|
|||
|
||||
const fn map_release_type(release_type: u32) -> ReleaseType {
|
||||
match release_type {
|
||||
1 => ReleaseType::Release,
|
||||
2 => ReleaseType::Beta,
|
||||
3 => ReleaseType::Alpha,
|
||||
_ => ReleaseType::Release,
|
||||
|
|
@ -142,7 +143,7 @@ impl CurseForgePlatform {
|
|||
}
|
||||
}
|
||||
|
||||
fn convert_project(&self, cf_project: CurseForgeProject) -> Project {
|
||||
fn convert_project(cf_project: CurseForgeProject) -> Project {
|
||||
let pakku_id = generate_pakku_id();
|
||||
let project_type = Self::map_class_id(cf_project.class_id.unwrap_or(6));
|
||||
|
||||
|
|
@ -162,11 +163,7 @@ impl CurseForgePlatform {
|
|||
project
|
||||
}
|
||||
|
||||
fn convert_file(
|
||||
&self,
|
||||
cf_file: CurseForgeFile,
|
||||
project_id: &str,
|
||||
) -> ProjectFile {
|
||||
fn convert_file(cf_file: CurseForgeFile, project_id: &str) -> ProjectFile {
|
||||
let mut hashes = HashMap::new();
|
||||
|
||||
for hash in cf_file.hashes {
|
||||
|
|
@ -259,12 +256,12 @@ impl PlatformClient for CurseForgePlatform {
|
|||
|
||||
if response.status().is_success() {
|
||||
let result: CurseForgeProjectResponse = response.json().await?;
|
||||
return Ok(self.convert_project(result.data));
|
||||
return Ok(Self::convert_project(result.data));
|
||||
}
|
||||
}
|
||||
|
||||
let cf_project = self.search_project_by_slug(identifier).await?;
|
||||
Ok(self.convert_project(cf_project))
|
||||
Ok(Self::convert_project(cf_project))
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
|
|
@ -319,7 +316,7 @@ impl PlatformClient for CurseForgePlatform {
|
|||
let files: Vec<ProjectFile> = result
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|f| self.convert_file(f, project_id))
|
||||
.map(|f| Self::convert_file(f, project_id))
|
||||
.collect();
|
||||
|
||||
Ok(files)
|
||||
|
|
@ -398,7 +395,7 @@ impl PlatformClient for CurseForgePlatform {
|
|||
) -> Result<Option<Project>> {
|
||||
// Try to fetch project by slug using search API
|
||||
match self.search_project_by_slug(slug).await {
|
||||
Ok(cf_project) => Ok(Some(self.convert_project(cf_project))),
|
||||
Ok(cf_project) => Ok(Some(Self::convert_project(cf_project))),
|
||||
Err(PakkerError::ProjectNotFound(_)) => Ok(None),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
|
|
@ -411,6 +408,11 @@ impl PlatformClient for CurseForgePlatform {
|
|||
hashes: &[String],
|
||||
_algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
#[derive(Serialize)]
|
||||
struct FingerprintRequest {
|
||||
fingerprints: Vec<u32>,
|
||||
}
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
|
@ -424,11 +426,6 @@ impl PlatformClient for CurseForgePlatform {
|
|||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FingerprintRequest {
|
||||
fingerprints: Vec<u32>,
|
||||
}
|
||||
|
||||
let url = format!("{CURSEFORGE_API_BASE}/fingerprints/432");
|
||||
let response = self
|
||||
.client
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use regex::Regex;
|
||||
|
|
@ -20,9 +23,9 @@ pub struct GitHubPlatform {
|
|||
}
|
||||
|
||||
impl GitHubPlatform {
|
||||
pub fn with_client(client: Arc<Client>, token: Option<String>) -> Self {
|
||||
pub fn with_client(client: &Arc<Client>, token: Option<String>) -> Self {
|
||||
Self {
|
||||
client: (*client).clone(),
|
||||
client: (**client).clone(),
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
|
@ -70,7 +73,6 @@ impl GitHubPlatform {
|
|||
}
|
||||
|
||||
fn convert_release(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
release: GitHubRelease,
|
||||
|
|
@ -91,9 +93,15 @@ impl GitHubPlatform {
|
|||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used, reason = "regex literal is always valid")]
|
||||
static MC_VERSION_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"(?:^|[^\d.])(\d+\.\d+(?:\.\d+)?)(?:[^\d]|$)")
|
||||
.expect("MC_VERSION_RE pattern is valid")
|
||||
});
|
||||
|
||||
// Helper functions for extracting metadata from GitHub releases
|
||||
fn extract_mc_versions(tag: &str, asset_name: &str) -> Vec<String> {
|
||||
let re = Regex::new(r"(?:^|[^\d.])(\d+\.\d+(?:\.\d+)?)(?:[^\d]|$)").unwrap();
|
||||
let re = &*MC_VERSION_RE;
|
||||
let mut versions = Vec::new();
|
||||
|
||||
log::debug!("Extracting MC versions from tag='{tag}', asset='{asset_name}'");
|
||||
|
|
@ -182,8 +190,7 @@ fn detect_project_type(asset_name: &str, repo_name: &str) -> ProjectType {
|
|||
|
||||
impl GitHubPlatform {
|
||||
fn convert_asset(
|
||||
&self,
|
||||
asset: GitHubAsset,
|
||||
asset: &GitHubAsset,
|
||||
release: &GitHubRelease,
|
||||
repo_id: &str,
|
||||
repo_name: &str,
|
||||
|
|
@ -278,7 +285,7 @@ impl PlatformClient for GitHubPlatform {
|
|||
) -> Result<Project> {
|
||||
let (owner, repo) = Self::parse_repo_identifier(identifier)?;
|
||||
let release = self.get_latest_release(&owner, &repo).await?;
|
||||
Ok(self.convert_release(&owner, &repo, release))
|
||||
Ok(Self::convert_release(&owner, &repo, release))
|
||||
}
|
||||
|
||||
async fn request_project_files(
|
||||
|
|
@ -295,9 +302,14 @@ impl PlatformClient for GitHubPlatform {
|
|||
for release in releases {
|
||||
for asset in &release.assets {
|
||||
// Filter for .jar files (mods) or .zip files (modpacks)
|
||||
if asset.name.ends_with(".jar") || asset.name.ends_with(".zip") {
|
||||
let file =
|
||||
self.convert_asset(asset.clone(), &release, project_id, &repo);
|
||||
if std::path::Path::new(&asset.name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("jar"))
|
||||
|| std::path::Path::new(&asset.name)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("zip"))
|
||||
{
|
||||
let file = Self::convert_asset(asset, &release, project_id, &repo);
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ impl ModrinthPlatform {
|
|||
return Err(PakkerError::ProjectNotFound(url.to_string()));
|
||||
}
|
||||
let mr_project: ModrinthProject = response.json().await?;
|
||||
Ok(self.convert_project(mr_project))
|
||||
Ok(Self::convert_project(mr_project))
|
||||
}
|
||||
|
||||
async fn request_project_files_url(
|
||||
|
|
@ -57,8 +57,8 @@ impl ModrinthPlatform {
|
|||
.to_string();
|
||||
Ok(
|
||||
mr_versions
|
||||
.into_iter()
|
||||
.map(|v| self.convert_version(v, &project_id))
|
||||
.iter()
|
||||
.map(|v| Self::convert_version(v, &project_id))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
|
@ -86,7 +86,6 @@ impl ModrinthPlatform {
|
|||
|
||||
fn map_project_type(type_str: &str) -> ProjectType {
|
||||
match type_str {
|
||||
"mod" => ProjectType::Mod,
|
||||
"resourcepack" => ProjectType::ResourcePack,
|
||||
"datapack" => ProjectType::DataPack,
|
||||
"shader" => ProjectType::Shader,
|
||||
|
|
@ -96,7 +95,6 @@ impl ModrinthPlatform {
|
|||
|
||||
const fn map_side(client: bool, server: bool) -> ProjectSide {
|
||||
match (client, server) {
|
||||
(true, true) => ProjectSide::Both,
|
||||
(true, false) => ProjectSide::Client,
|
||||
(false, true) => ProjectSide::Server,
|
||||
_ => ProjectSide::Both,
|
||||
|
|
@ -105,14 +103,13 @@ impl ModrinthPlatform {
|
|||
|
||||
fn map_release_type(type_str: &str) -> ReleaseType {
|
||||
match type_str {
|
||||
"release" => ReleaseType::Release,
|
||||
"beta" => ReleaseType::Beta,
|
||||
"alpha" => ReleaseType::Alpha,
|
||||
_ => ReleaseType::Release,
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_project(&self, mr_project: ModrinthProject) -> Project {
|
||||
fn convert_project(mr_project: ModrinthProject) -> Project {
|
||||
let pakku_id = generate_pakku_id();
|
||||
let mut project = Project::new(
|
||||
pakku_id,
|
||||
|
|
@ -133,9 +130,12 @@ impl ModrinthPlatform {
|
|||
project
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "Modrinth API guarantees every version has at least one file"
|
||||
)]
|
||||
fn convert_version(
|
||||
&self,
|
||||
mr_version: ModrinthVersion,
|
||||
mr_version: &ModrinthVersion,
|
||||
project_id: &str,
|
||||
) -> ProjectFile {
|
||||
let mut hashes = HashMap::new();
|
||||
|
|
@ -274,7 +274,7 @@ impl PlatformClient for ModrinthPlatform {
|
|||
}
|
||||
|
||||
let mr_project: ModrinthProject = response.json().await?;
|
||||
Ok(Some(self.convert_project(mr_project)))
|
||||
Ok(Some(Self::convert_project(mr_project)))
|
||||
}
|
||||
|
||||
/// Uses Modrinth's `/v2/version_files` endpoint to resolve projects by
|
||||
|
|
@ -284,10 +284,6 @@ impl PlatformClient for ModrinthPlatform {
|
|||
hashes: &[String],
|
||||
algorithm: &str,
|
||||
) -> Result<Vec<Project>> {
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct HashBatchRequest<'a> {
|
||||
hashes: &'a [String],
|
||||
|
|
@ -299,6 +295,10 @@ impl PlatformClient for ModrinthPlatform {
|
|||
project_id: String,
|
||||
}
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let url = format!("{MODRINTH_API_BASE}/version_files");
|
||||
let response = self
|
||||
.client
|
||||
|
|
@ -326,12 +326,11 @@ impl PlatformClient for ModrinthPlatform {
|
|||
}
|
||||
seen_project_ids.insert(version.project_id.clone());
|
||||
|
||||
match self
|
||||
if let Ok(project) = self
|
||||
.request_project_with_files(&version.project_id, &[], &[])
|
||||
.await
|
||||
{
|
||||
Ok(project) => projects.push(project),
|
||||
Err(_) => continue,
|
||||
projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -100,6 +100,10 @@ impl DependencyResolver {
|
|||
})
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::expect_used,
|
||||
reason = "projects.len() == 1 is checked directly above"
|
||||
)]
|
||||
async fn fetch_dependency(
|
||||
&self,
|
||||
dep_id: &str,
|
||||
|
|
@ -132,7 +136,7 @@ impl DependencyResolver {
|
|||
}
|
||||
|
||||
if projects.len() == 1 {
|
||||
Ok(projects.into_iter().next().unwrap())
|
||||
Ok(projects.into_iter().next().expect("length is exactly 1"))
|
||||
} else {
|
||||
let mut merged = projects.remove(0);
|
||||
for project in projects {
|
||||
|
|
|
|||
|
|
@ -166,12 +166,12 @@ pub fn prompt_input_optional(prompt: &str) -> io::Result<Option<String>> {
|
|||
pub fn prompt_curseforge_api_key(
|
||||
skip_prompts: bool,
|
||||
) -> io::Result<Option<String>> {
|
||||
use dialoguer::Password;
|
||||
|
||||
if skip_prompts {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
use dialoguer::Password;
|
||||
|
||||
println!();
|
||||
println!("CurseForge API key is required but not configured.");
|
||||
println!("Get your API key from: https://console.curseforge.com/");
|
||||
|
|
|
|||
|
|
@ -45,35 +45,14 @@ fn is_semver_prerelease(s: &str) -> bool {
|
|||
|
||||
/// Decompose a version string into its component parts
|
||||
fn decompose(str_in: &str) -> VecDeque<SortingType> {
|
||||
if str_in.is_empty() {
|
||||
return VecDeque::new();
|
||||
}
|
||||
|
||||
// Strip build metadata (after `+`)
|
||||
let s = if let Some((left, _)) = str_in.split_once('+') {
|
||||
left
|
||||
} else {
|
||||
str_in
|
||||
};
|
||||
|
||||
let mut out: VecDeque<SortingType> = VecDeque::new();
|
||||
let mut current = String::new();
|
||||
|
||||
let mut currently_numeric = s.starts_with(|c: char| c.is_ascii_digit());
|
||||
let mut skip = s.starts_with('-');
|
||||
use SortingType::{Lexical, Numerical, SemverPrerelease};
|
||||
|
||||
fn handle_split(
|
||||
current: &str,
|
||||
c: Option<&char>,
|
||||
currently_numeric: bool,
|
||||
) -> Option<SortingType> {
|
||||
let numeric = if let Some(c) = c {
|
||||
c.is_ascii_digit()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
use SortingType::{Lexical, Numerical, SemverPrerelease};
|
||||
let numeric = c.is_some_and(char::is_ascii_digit);
|
||||
|
||||
if currently_numeric {
|
||||
if numeric {
|
||||
|
|
@ -101,6 +80,23 @@ fn decompose(str_in: &str) -> VecDeque<SortingType> {
|
|||
}
|
||||
}
|
||||
|
||||
if str_in.is_empty() {
|
||||
return VecDeque::new();
|
||||
}
|
||||
|
||||
// Strip build metadata (after `+`)
|
||||
let s = if let Some((left, _)) = str_in.split_once('+') {
|
||||
left
|
||||
} else {
|
||||
str_in
|
||||
};
|
||||
|
||||
let mut out: VecDeque<SortingType> = VecDeque::new();
|
||||
let mut current = String::new();
|
||||
|
||||
let mut currently_numeric = s.starts_with(|c: char| c.is_ascii_digit());
|
||||
let mut skip = s.starts_with('-');
|
||||
|
||||
for c in s.chars() {
|
||||
if let Some(part) = handle_split(¤t, Some(&c), currently_numeric) {
|
||||
if skip {
|
||||
|
|
@ -131,6 +127,10 @@ fn decompose(str_in: &str) -> VecDeque<SortingType> {
|
|||
/// This matches the behavior of flexver-java:
|
||||
/// - "1.0.0" > "1.0.0-beta" (release > pre-release)
|
||||
/// - "1.0.0-beta" < "1.0.0+build123" (pre-release < build metadata)
|
||||
#[expect(
|
||||
clippy::unreachable,
|
||||
reason = "the VersionComparisonIterator never yields (None, None)"
|
||||
)]
|
||||
pub fn compare(left: &str, right: &str) -> Ordering {
|
||||
let iter = VersionComparisonIterator {
|
||||
left: decompose(left),
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ pub fn hash_to_hex(hash: impl AsRef<[u8]>) -> String {
|
|||
let bytes = hash.as_ref();
|
||||
let mut hex = String::with_capacity(bytes.len() * 2);
|
||||
for byte in bytes {
|
||||
write!(hex, "{byte:02x}").unwrap();
|
||||
let _ = write!(hex, "{byte:02x}");
|
||||
}
|
||||
hex
|
||||
}
|
||||
|
|
@ -99,7 +99,7 @@ pub fn compute_md5<P: AsRef<Path>>(path: P) -> Result<String> {
|
|||
let hash = hasher.finalize();
|
||||
let mut hex = String::with_capacity(hash.len() * 2);
|
||||
for byte in hash {
|
||||
std::fmt::write(&mut hex, format_args!("{byte:02x}")).unwrap();
|
||||
let _ = std::fmt::write(&mut hex, format_args!("{byte:02x}"));
|
||||
}
|
||||
Ok(hex)
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue