initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ife1391ed23a1e7f388b1b5eca90b9ea76a6a6964
This commit is contained in:
raf 2026-01-29 19:36:25 +03:00
commit ef28bdaeb4
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
63 changed files with 17292 additions and 0 deletions

260
src/fetch.rs Normal file
View file

@ -0,0 +1,260 @@
use std::{
fs,
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::Client;
use crate::{
error::{PakkerError, Result},
model::{Config, LockFile, Project, ProjectFile},
utils::verify_hash,
};
pub struct Fetcher {
client: Client,
base_path: PathBuf,
}
pub struct FileFetcher {
client: Client,
base_path: PathBuf,
}
impl Fetcher {
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
Self {
client: Client::new(),
base_path: base_path.as_ref().to_path_buf(),
}
}
pub async fn fetch_all(
&self,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
let fetcher = FileFetcher {
client: self.client.clone(),
base_path: self.base_path.clone(),
};
fetcher.fetch_all(lockfile, config).await
}
pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> {
self.fetch_all(lockfile, config).await
}
}
impl FileFetcher {
/// Fetch all project files according to lockfile
pub async fn fetch_all(
&self,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
let exportable_projects: Vec<_> =
lockfile.projects.iter().filter(|p| p.export).collect();
let total = exportable_projects.len();
let spinner = ProgressBar::new(total as u64);
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.unwrap(),
);
for (idx, project) in exportable_projects.iter().enumerate() {
let name = project
.name
.values()
.next()
.map_or("unknown", std::string::String::as_str);
spinner.set_message(format!("Fetching {} ({}/{})", name, idx + 1, total));
self.fetch_project(project, lockfile, config).await?;
}
spinner.finish_with_message("All projects fetched");
// Sync overrides
self.sync_overrides(config)?;
Ok(())
}
/// Fetch files for a single project
pub async fn fetch_project(
&self,
project: &Project,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
// Select the best file for this project
let file = self.select_best_file(project, lockfile)?;
// Determine target path
let target_path = self.get_target_path(project, file, config);
// Check if file already exists and is valid
if target_path.exists()
&& let Some((algo, expected_hash)) = file.hashes.iter().next()
&& verify_hash(&target_path, algo, expected_hash)?
{
log::info!("File already exists and is valid: {}", file.file_name);
return Ok(());
}
// Download file
log::info!("Downloading: {}", file.file_name);
self.download_file(&file.url, &target_path).await?;
// Verify hash
if let Some((algo, expected_hash)) = file.hashes.iter().next()
&& !verify_hash(&target_path, algo, expected_hash)?
{
fs::remove_file(&target_path)?;
return Err(PakkerError::HashMismatch {
file: file.file_name.clone(),
expected: expected_hash.clone(),
actual: "mismatch".to_string(),
});
}
log::info!("Successfully downloaded: {}", file.file_name);
Ok(())
}
/// Select the best file for a project based on constraints
fn select_best_file<'a>(
&self,
project: &'a Project,
lockfile: &LockFile,
) -> Result<&'a ProjectFile> {
let compatible_files: Vec<&ProjectFile> = project
.files
.iter()
.filter(|f| {
f.is_compatible(&lockfile.mc_versions, &lockfile.get_loader_names())
})
.collect();
if compatible_files.is_empty() {
return Err(PakkerError::FileNotFound(format!(
"No compatible files for project: {:?}",
project.name.values().next()
)));
}
// Prefer release over beta over alpha
let best = compatible_files
.iter()
.max_by_key(|f| {
let type_priority = match f.release_type {
crate::model::ReleaseType::Release => 3,
crate::model::ReleaseType::Beta => 2,
crate::model::ReleaseType::Alpha => 1,
};
(type_priority, &f.date_published)
})
.unwrap();
Ok(best)
}
/// Get target path for a project file
fn get_target_path(
&self,
project: &Project,
file: &ProjectFile,
config: &Config,
) -> PathBuf {
let mut path = self.base_path.clone();
// Check for custom path in config
if let Some(custom_path) = config.paths.get(&project.r#type.to_string()) {
path.push(custom_path);
} else {
// Default path based on project type
path.push(self.get_default_path(&project.r#type));
}
// Add subpath if specified
if let Some(subpath) = &project.subpath {
path.push(subpath);
}
path.push(&file.file_name);
path
}
/// Get default path for project type
const fn get_default_path(
&self,
project_type: &crate::model::ProjectType,
) -> &str {
match project_type {
crate::model::ProjectType::Mod => "mods",
crate::model::ProjectType::ResourcePack => "resourcepacks",
crate::model::ProjectType::DataPack => "datapacks",
crate::model::ProjectType::Shader => "shaderpacks",
crate::model::ProjectType::World => "saves",
}
}
/// Download a file from URL to target path
async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> {
// Create parent directory
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent)?;
}
// Download file
let response = self.client.get(url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::DownloadFailed(url.to_string()));
}
let bytes = response.bytes().await?;
// Write to temporary file first (atomic write)
let temp_path = target_path.with_extension("tmp");
fs::write(&temp_path, bytes)?;
fs::rename(temp_path, target_path)?;
Ok(())
}
/// Sync override directories
fn sync_overrides(&self, config: &Config) -> Result<()> {
for override_path in &config.overrides {
let source = self.base_path.join(override_path);
if !source.exists() {
continue;
}
// Copy override files to target locations
self.copy_recursive(&source, &self.base_path)?;
}
Ok(())
}
/// Copy directory recursively
fn copy_recursive(&self, source: &Path, dest: &Path) -> Result<()> {
if source.is_file() {
fs::copy(source, dest)?;
} else if source.is_dir() {
fs::create_dir_all(dest)?;
for entry in fs::read_dir(source)? {
let entry = entry?;
let target = dest.join(entry.file_name());
self.copy_recursive(&entry.path(), &target)?;
}
}
Ok(())
}
}