Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: I7d331410864358d30191781d1e6c23f46a6a6964
478 lines
13 KiB
Rust
478 lines
13 KiB
Rust
use std::{
|
|
fs,
|
|
path::{Path, PathBuf},
|
|
sync::Arc,
|
|
};
|
|
|
|
use futures::future::join_all;
|
|
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
|
use reqwest::Client;
|
|
use tokio::sync::Semaphore;
|
|
|
|
use crate::{
|
|
error::{PakkerError, Result},
|
|
model::{Config, LockFile, Project, ProjectFile, UpdateStrategy},
|
|
utils::{FlexVer, verify_hash},
|
|
};
|
|
|
|
/// Maximum number of concurrent downloads
|
|
const MAX_CONCURRENT_DOWNLOADS: usize = 8;
|
|
|
|
pub struct Fetcher {
|
|
client: Client,
|
|
base_path: PathBuf,
|
|
shelve: bool,
|
|
retry_count: u32,
|
|
}
|
|
|
|
impl Fetcher {
|
|
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
|
|
Self {
|
|
client: Client::new(),
|
|
base_path: base_path.as_ref().to_path_buf(),
|
|
shelve: false,
|
|
retry_count: 0,
|
|
}
|
|
}
|
|
|
|
pub const fn with_shelve(mut self, shelve: bool) -> Self {
|
|
self.shelve = shelve;
|
|
self
|
|
}
|
|
|
|
pub const fn with_retry(mut self, retry_count: u32) -> Self {
|
|
self.retry_count = retry_count;
|
|
self
|
|
}
|
|
|
|
pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> {
|
|
self.fetch_all(lockfile, config).await
|
|
}
|
|
|
|
/// Fetch all project files according to lockfile with parallel downloads
|
|
pub async fn fetch_all(
|
|
&self,
|
|
lockfile: &LockFile,
|
|
config: &Config,
|
|
) -> Result<()> {
|
|
let exportable_projects: Vec<_> =
|
|
lockfile.projects.iter().filter(|p| p.export).collect();
|
|
|
|
let total = exportable_projects.len();
|
|
|
|
if total == 0 {
|
|
log::info!("No projects to fetch");
|
|
return Ok(());
|
|
}
|
|
|
|
// Set up multi-progress for parallel download tracking
|
|
let multi_progress = MultiProgress::new();
|
|
let overall_bar = multi_progress.add(ProgressBar::new(total as u64));
|
|
overall_bar.set_style(
|
|
ProgressStyle::default_bar()
|
|
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
|
|
.unwrap()
|
|
.progress_chars("#>-"),
|
|
);
|
|
overall_bar.set_message("Fetching projects...");
|
|
|
|
// Use a semaphore to limit concurrent downloads
|
|
let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT_DOWNLOADS));
|
|
|
|
// Prepare download tasks
|
|
let client = &self.client;
|
|
let base_path = &self.base_path;
|
|
let download_tasks: Vec<_> = exportable_projects
|
|
.iter()
|
|
.map(|project| {
|
|
let semaphore = Arc::clone(&semaphore);
|
|
let client = client.clone();
|
|
let base_path = base_path.clone();
|
|
let project = (*project).clone();
|
|
let overall_bar = overall_bar.clone();
|
|
|
|
async move {
|
|
// Acquire semaphore permit to limit concurrency
|
|
let _permit = semaphore.acquire().await.map_err(|_| {
|
|
PakkerError::InternalError("Semaphore acquisition failed".into())
|
|
})?;
|
|
|
|
let name = project.get_name();
|
|
|
|
let fetcher = Self {
|
|
client,
|
|
base_path,
|
|
shelve: false, // Shelving happens at sync level, not per-project
|
|
retry_count: 0,
|
|
};
|
|
|
|
let result = fetcher.fetch_project(&project, lockfile, config).await;
|
|
|
|
// Update progress bar
|
|
overall_bar.inc(1);
|
|
|
|
match &result {
|
|
Ok(()) => {
|
|
log::debug!("Successfully fetched: {name}");
|
|
},
|
|
Err(e) => {
|
|
log::error!("Failed to fetch {name}: {e}");
|
|
},
|
|
}
|
|
|
|
result.map(|()| name)
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Execute all downloads in parallel (limited by semaphore)
|
|
let results = join_all(download_tasks).await;
|
|
|
|
overall_bar.finish_with_message("All projects fetched");
|
|
|
|
// Collect and report errors
|
|
let mut errors = Vec::new();
|
|
let mut success_count = 0;
|
|
|
|
for result in results {
|
|
match result {
|
|
Ok(_) => success_count += 1,
|
|
Err(e) => errors.push(e),
|
|
}
|
|
}
|
|
|
|
log::info!("Fetch complete: {success_count}/{total} successful");
|
|
|
|
if !errors.is_empty() {
|
|
// Return the first error, but log all of them
|
|
for (idx, error) in errors.iter().enumerate() {
|
|
log::error!("Download error {}: {}", idx + 1, error);
|
|
}
|
|
return Err(errors.remove(0));
|
|
}
|
|
|
|
// Handle unknown files (shelve or delete)
|
|
self.handle_unknown_files(lockfile, config)?;
|
|
|
|
// Sync overrides
|
|
self.sync_overrides(config)?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Handle unknown project files that aren't in the lockfile.
|
|
/// If shelve is true, moves them to a shelf directory.
|
|
/// Otherwise, deletes them.
|
|
fn handle_unknown_files(
|
|
&self,
|
|
lockfile: &LockFile,
|
|
config: &Config,
|
|
) -> Result<()> {
|
|
// Collect all expected file names from lockfile
|
|
let expected_files: std::collections::HashSet<String> = lockfile
|
|
.projects
|
|
.iter()
|
|
.filter(|p| p.export)
|
|
.filter_map(|p| p.files.first().map(|f| f.file_name.clone()))
|
|
.collect();
|
|
|
|
// Check each project type directory
|
|
let project_dirs = [
|
|
(
|
|
"mod",
|
|
self.get_default_path(&crate::model::ProjectType::Mod),
|
|
),
|
|
(
|
|
"resource-pack",
|
|
self.get_default_path(&crate::model::ProjectType::ResourcePack),
|
|
),
|
|
(
|
|
"shader",
|
|
self.get_default_path(&crate::model::ProjectType::Shader),
|
|
),
|
|
(
|
|
"data-pack",
|
|
self.get_default_path(&crate::model::ProjectType::DataPack),
|
|
),
|
|
(
|
|
"world",
|
|
self.get_default_path(&crate::model::ProjectType::World),
|
|
),
|
|
];
|
|
|
|
// Also check custom paths from config
|
|
let mut dirs_to_check: Vec<PathBuf> = project_dirs
|
|
.iter()
|
|
.map(|(_, dir)| self.base_path.join(dir))
|
|
.collect();
|
|
|
|
for custom_path in config.paths.values() {
|
|
dirs_to_check.push(self.base_path.join(custom_path));
|
|
}
|
|
|
|
let shelf_dir = self.base_path.join(".pakker-shelf");
|
|
let mut shelved_count = 0;
|
|
let mut deleted_count = 0;
|
|
|
|
for dir in dirs_to_check {
|
|
if !dir.exists() {
|
|
continue;
|
|
}
|
|
|
|
let entries = match fs::read_dir(&dir) {
|
|
Ok(e) => e,
|
|
Err(_) => continue,
|
|
};
|
|
|
|
for entry in entries.flatten() {
|
|
let path = entry.path();
|
|
if !path.is_file() {
|
|
continue;
|
|
}
|
|
|
|
let file_name = match path.file_name().and_then(|n| n.to_str()) {
|
|
Some(name) => name.to_string(),
|
|
None => continue,
|
|
};
|
|
|
|
// Skip if file is expected
|
|
if expected_files.contains(&file_name) {
|
|
continue;
|
|
}
|
|
|
|
// Skip non-jar files (might be configs, etc.)
|
|
if !file_name.ends_with(".jar") {
|
|
continue;
|
|
}
|
|
|
|
if self.shelve {
|
|
// Move to shelf
|
|
fs::create_dir_all(&shelf_dir)?;
|
|
let shelf_path = shelf_dir.join(&file_name);
|
|
fs::rename(&path, &shelf_path)?;
|
|
log::info!("Shelved unknown file: {file_name} -> .pakker-shelf/");
|
|
shelved_count += 1;
|
|
} else {
|
|
// Delete unknown file
|
|
fs::remove_file(&path)?;
|
|
log::info!("Deleted unknown file: {file_name}");
|
|
deleted_count += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
if shelved_count > 0 {
|
|
log::info!("Shelved {shelved_count} unknown file(s) to .pakker-shelf/");
|
|
}
|
|
if deleted_count > 0 {
|
|
log::info!("Deleted {deleted_count} unknown file(s)");
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Fetch files for a single project
|
|
pub async fn fetch_project(
|
|
&self,
|
|
project: &Project,
|
|
lockfile: &LockFile,
|
|
config: &Config,
|
|
) -> Result<()> {
|
|
// Select the best file for this project
|
|
let file = self.select_best_file(project, lockfile)?;
|
|
|
|
// Determine target path
|
|
let target_path = self.get_target_path(project, file, config);
|
|
|
|
// Check if file already exists and is valid
|
|
if target_path.exists()
|
|
&& let Some((algo, expected_hash)) = file.hashes.iter().next()
|
|
&& verify_hash(&target_path, algo, expected_hash)?
|
|
{
|
|
log::info!("File already exists and is valid: {}", file.file_name);
|
|
return Ok(());
|
|
}
|
|
|
|
// Download file
|
|
log::info!("Downloading: {}", file.file_name);
|
|
self.download_file(&file.url, &target_path).await?;
|
|
|
|
// Verify hash
|
|
if let Some((algo, expected_hash)) = file.hashes.iter().next()
|
|
&& !verify_hash(&target_path, algo, expected_hash)?
|
|
{
|
|
fs::remove_file(&target_path)?;
|
|
return Err(PakkerError::HashMismatch {
|
|
file: file.file_name.clone(),
|
|
expected: expected_hash.clone(),
|
|
actual: "mismatch".to_string(),
|
|
});
|
|
}
|
|
|
|
log::info!("Successfully downloaded: {}", file.file_name);
|
|
Ok(())
|
|
}
|
|
|
|
/// Select the best file for a project based on constraints
|
|
fn select_best_file<'a>(
|
|
&self,
|
|
project: &'a Project,
|
|
lockfile: &LockFile,
|
|
) -> Result<&'a ProjectFile> {
|
|
let compatible_files: Vec<&ProjectFile> = project
|
|
.files
|
|
.iter()
|
|
.filter(|f| {
|
|
f.is_compatible(&lockfile.mc_versions, &lockfile.get_loader_names())
|
|
})
|
|
.collect();
|
|
|
|
if compatible_files.is_empty() {
|
|
return Err(PakkerError::FileNotFound(format!(
|
|
"No compatible files for project: {:?}",
|
|
project.name.values().next()
|
|
)));
|
|
}
|
|
|
|
// Select best file based on update strategy
|
|
let best = if project.update_strategy == UpdateStrategy::FlexVer {
|
|
let mut sorted: Vec<_> = compatible_files.iter().collect();
|
|
sorted.sort_by(|a, b| FlexVer(&b.file_name).cmp(&FlexVer(&a.file_name)));
|
|
*sorted.first().unwrap()
|
|
} else {
|
|
// Prefer release over beta over alpha, then by date published
|
|
compatible_files
|
|
.iter()
|
|
.max_by_key(|f| {
|
|
let type_priority = match f.release_type {
|
|
crate::model::ReleaseType::Release => 3,
|
|
crate::model::ReleaseType::Beta => 2,
|
|
crate::model::ReleaseType::Alpha => 1,
|
|
};
|
|
(type_priority, &f.date_published)
|
|
})
|
|
.unwrap()
|
|
};
|
|
|
|
Ok(best)
|
|
}
|
|
|
|
/// Get target path for a project file
|
|
fn get_target_path(
|
|
&self,
|
|
project: &Project,
|
|
file: &ProjectFile,
|
|
config: &Config,
|
|
) -> PathBuf {
|
|
let mut path = self.base_path.clone();
|
|
|
|
// Check for custom path in config
|
|
if let Some(custom_path) = config.paths.get(&project.r#type.to_string()) {
|
|
path.push(custom_path);
|
|
} else {
|
|
// Default path based on project type
|
|
path.push(self.get_default_path(&project.r#type));
|
|
}
|
|
|
|
// Add subpath if specified
|
|
if let Some(subpath) = &project.subpath {
|
|
path.push(subpath);
|
|
}
|
|
|
|
path.push(&file.file_name);
|
|
path
|
|
}
|
|
|
|
/// Get default path for project type
|
|
const fn get_default_path(
|
|
&self,
|
|
project_type: &crate::model::ProjectType,
|
|
) -> &str {
|
|
match project_type {
|
|
crate::model::ProjectType::Mod => "mods",
|
|
crate::model::ProjectType::ResourcePack => "resourcepacks",
|
|
crate::model::ProjectType::DataPack => "datapacks",
|
|
crate::model::ProjectType::Shader => "shaderpacks",
|
|
crate::model::ProjectType::World => "saves",
|
|
}
|
|
}
|
|
|
|
/// Download a file from URL to target path with retry
|
|
async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> {
|
|
// Create parent directory
|
|
if let Some(parent) = target_path.parent() {
|
|
fs::create_dir_all(parent)?;
|
|
}
|
|
|
|
let max_attempts = self.retry_count.saturating_add(1);
|
|
|
|
for attempt in 0..max_attempts {
|
|
match self.download_single_attempt(url, target_path).await {
|
|
Ok(()) => return Ok(()),
|
|
Err(_e) if attempt < self.retry_count => {
|
|
log::warn!(
|
|
"Download attempt {}/{} failed for {}, retrying...",
|
|
attempt + 1,
|
|
max_attempts,
|
|
url
|
|
);
|
|
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
|
|
},
|
|
Err(e) => return Err(e),
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn download_single_attempt(
|
|
&self,
|
|
url: &str,
|
|
target_path: &Path,
|
|
) -> Result<()> {
|
|
let response = self.client.get(url).send().await?;
|
|
|
|
if !response.status().is_success() {
|
|
return Err(PakkerError::DownloadFailed(url.to_string()));
|
|
}
|
|
|
|
let bytes = response.bytes().await?;
|
|
|
|
let temp_path = target_path.with_extension("tmp");
|
|
fs::write(&temp_path, bytes)?;
|
|
fs::rename(temp_path, target_path)?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Sync override directories
|
|
fn sync_overrides(&self, config: &Config) -> Result<()> {
|
|
for override_path in &config.overrides {
|
|
let source = self.base_path.join(override_path);
|
|
if !source.exists() {
|
|
continue;
|
|
}
|
|
|
|
// Copy override files to target locations
|
|
self.copy_recursive(&source, &self.base_path)?;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Copy directory recursively
|
|
fn copy_recursive(&self, source: &Path, dest: &Path) -> Result<()> {
|
|
if source.is_file() {
|
|
fs::copy(source, dest)?;
|
|
} else if source.is_dir() {
|
|
fs::create_dir_all(dest)?;
|
|
for entry in fs::read_dir(source)? {
|
|
let entry = entry?;
|
|
let target = dest.join(entry.file_name());
|
|
self.copy_recursive(&entry.path(), &target)?;
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|