From 34976a66d2bba8a4f4c30703a7fc71ceb30a36c1 Mon Sep 17 00:00:00 2001 From: NotAShelf Date: Fri, 27 Feb 2026 21:27:57 +0300 Subject: [PATCH] treewide: general cleanup Finally had the time to clean up after myself. Does a bunch of things, without breakage as far as I'm aware. I've removed around 20 unnecessary clones, and simplified the architechture a little bit. Signed-off-by: NotAShelf Change-Id: I4d22337b997a3bf5b0593e6068cd1bd86a6a6964 --- src/fetch.rs | 38 ++++++------------------------------ src/http.rs | 11 ++++++++++- src/ipc.rs | 21 ++++++++++---------- src/main.rs | 46 ++++++++++++++++++++++---------------------- src/model/project.rs | 17 ++++++++-------- src/rate_limiter.rs | 40 ++++++++++++++++++++++++++------------ 6 files changed, 87 insertions(+), 86 deletions(-) diff --git a/src/fetch.rs b/src/fetch.rs index 9e1d618..5049c24 100644 --- a/src/fetch.rs +++ b/src/fetch.rs @@ -24,12 +24,6 @@ pub struct Fetcher { shelve: bool, } -pub struct FileFetcher { - client: Client, - base_path: PathBuf, - shelve: bool, -} - impl Fetcher { pub fn new>(base_path: P) -> Self { Self { @@ -44,25 +38,10 @@ impl Fetcher { self } - pub async fn fetch_all( - &self, - lockfile: &LockFile, - config: &Config, - ) -> Result<()> { - let fetcher = FileFetcher { - client: self.client.clone(), - base_path: self.base_path.clone(), - shelve: self.shelve, - }; - fetcher.fetch_all(lockfile, config).await - } - pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> { self.fetch_all(lockfile, config).await } -} -impl FileFetcher { /// Fetch all project files according to lockfile with parallel downloads pub async fn fetch_all( &self, @@ -94,14 +73,14 @@ impl FileFetcher { let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT_DOWNLOADS)); // Prepare download tasks + let client = &self.client; + let base_path = &self.base_path; let download_tasks: Vec<_> = exportable_projects .iter() .map(|project| { let semaphore = Arc::clone(&semaphore); - let client = self.client.clone(); - let base_path = self.base_path.clone(); - let lockfile = lockfile.clone(); - let config = config.clone(); + let client = client.clone(); + let base_path = base_path.clone(); let project = (*project).clone(); let overall_bar = overall_bar.clone(); @@ -111,11 +90,7 @@ impl FileFetcher { PakkerError::InternalError("Semaphore acquisition failed".into()) })?; - let name = project - .name - .values() - .next() - .map_or("unknown".to_string(), std::clone::Clone::clone); + let name = project.get_name(); let fetcher = Self { client, @@ -123,8 +98,7 @@ impl FileFetcher { shelve: false, // Shelving happens at sync level, not per-project }; - let result = - fetcher.fetch_project(&project, &lockfile, &config).await; + let result = fetcher.fetch_project(&project, lockfile, config).await; // Update progress bar overall_bar.inc(1); diff --git a/src/http.rs b/src/http.rs index 17ba51f..789c7b5 100644 --- a/src/http.rs +++ b/src/http.rs @@ -2,6 +2,12 @@ use std::time::Duration; use reqwest::Client; +/// Create HTTP client with optimized settings for API requests. +/// +/// # Panics +/// +/// Panics if the HTTP client cannot be built, which should only happen in +/// extreme cases like OOM or broken TLS configuration. pub fn create_http_client() -> Client { Client::builder() .pool_max_idle_per_host(10) @@ -12,5 +18,8 @@ pub fn create_http_client() -> Client { .timeout(Duration::from_secs(30)) .user_agent("Pakker/0.1.0") .build() - .expect("Failed to build HTTP client") + .expect( + "Failed to build HTTP client - this should never happen unless system \ + resources are exhausted", + ) } diff --git a/src/ipc.rs b/src/ipc.rs index d1aceaa..959ead8 100644 --- a/src/ipc.rs +++ b/src/ipc.rs @@ -153,17 +153,18 @@ impl IpcCoordinator { let ipc_dir = ipc_base.join(&modpack_hash); // Create IPC directory with restricted permissions - if let Err(e) = fs::create_dir_all(&ipc_dir) - && !ipc_dir.exists() - { - return Err(IpcError::IpcDirCreationFailed(e.to_string())); - } + fs::create_dir_all(&ipc_dir).or_else(|e| { + if ipc_dir.exists() { + Ok(()) + } else { + Err(IpcError::IpcDirCreationFailed(e.to_string())) + } + })?; - if ipc_dir.exists() { - // Set permissions to 700 (owner only) - if let Ok(metadata) = fs::metadata(&ipc_dir) - && metadata.permissions().mode() != 0o700 - { + // Set permissions to 700 (owner only) + if let Ok(metadata) = fs::metadata(&ipc_dir) { + let current_mode = metadata.permissions().mode() & 0o777; + if current_mode != 0o700 { let mut perms = metadata.permissions(); perms.set_mode(0o700); let _ = fs::set_permissions(&ipc_dir, perms); diff --git a/src/main.rs b/src/main.rs index aa7a529..95f0aed 100644 --- a/src/main.rs +++ b/src/main.rs @@ -59,15 +59,15 @@ async fn main() -> Result<(), PakkerError> { }, Commands::AddPrj(args) => { cli::commands::add_prj::execute( - args.curseforge.clone(), - args.modrinth.clone(), - args.github.clone(), + args.curseforge, + args.modrinth, + args.github, args.project_type, args.side, args.strategy, args.redistributable, - args.subpath.clone(), - args.aliases.clone(), + args.subpath, + args.aliases, args.export, args.no_deps, args.yes, @@ -121,12 +121,12 @@ async fn main() -> Result<(), PakkerError> { .await }, Commands::Credentials(args) => { - match &args.subcommand { + match args.subcommand { Some(cli::CredentialsSubcommand::Set(set_args)) => { cli::commands::credentials_set::execute( - set_args.cf_api_key.clone(), - set_args.modrinth_token.clone(), - set_args.gh_access_token.clone(), + set_args.cf_api_key, + set_args.modrinth_token, + set_args.gh_access_token, ) }, None => { @@ -139,34 +139,34 @@ async fn main() -> Result<(), PakkerError> { } }, Commands::Cfg(args) => { - match &args.subcommand { + match args.subcommand { Some(cli::CfgSubcommand::Prj(prj_args)) => { cli::commands::cfg_prj::execute( &config_path, &lockfile_path, - prj_args.project.clone(), + prj_args.project, prj_args.r#type.as_deref(), prj_args.side.as_deref(), prj_args.update_strategy.as_deref(), prj_args.redistributable, - prj_args.subpath.clone(), - prj_args.add_alias.clone(), - prj_args.remove_alias.clone(), + prj_args.subpath, + prj_args.add_alias, + prj_args.remove_alias, prj_args.export, ) }, None => { cli::commands::cfg::execute( &config_path, - args.name.clone(), - args.version.clone(), - args.description.clone(), - args.author.clone(), - args.mods_path.clone(), - args.resource_packs_path.clone(), - args.data_packs_path.clone(), - args.worlds_path.clone(), - args.shaders_path.clone(), + args.name, + args.version, + args.description, + args.author, + args.mods_path, + args.resource_packs_path, + args.data_packs_path, + args.worlds_path, + args.shaders_path, ) }, } diff --git a/src/model/project.rs b/src/model/project.rs index 75fa917..dd3acdc 100644 --- a/src/model/project.rs +++ b/src/model/project.rs @@ -91,12 +91,13 @@ impl Project { } pub fn get_name(&self) -> String { - self.name.values().next().cloned().unwrap_or_else(|| { - self - .pakku_id - .clone() - .unwrap_or_else(|| "unknown".to_string()) - }) + self + .name + .values() + .next() + .map(|s| s.to_owned()) + .or_else(|| self.pakku_id.as_ref().map(|s| s.to_owned())) + .unwrap_or_else(|| "unknown".to_string()) } pub fn matches_input(&self, input: &str) -> bool { @@ -145,10 +146,10 @@ impl Project { pub fn merge(&mut self, other: Self) { // Merge platform identifiers for (platform, id) in other.id { - self.id.entry(platform.clone()).or_insert(id); + self.id.entry(platform).or_insert(id); } for (platform, slug) in other.slug { - self.slug.entry(platform.clone()).or_insert(slug); + self.slug.entry(platform).or_insert(slug); } for (platform, name) in other.name { self.name.entry(platform).or_insert(name); diff --git a/src/rate_limiter.rs b/src/rate_limiter.rs index f5832f8..d3065fd 100644 --- a/src/rate_limiter.rs +++ b/src/rate_limiter.rs @@ -56,18 +56,34 @@ impl RateLimiter { } pub async fn acquire(&self, platform: &str) -> Result<()> { - let config = { + let (rate, burst) = { let inner = self.inner.lock().await; - inner.config.clone() - }; - - let (rate, burst) = match platform.to_lowercase().as_str() { - "modrinth" => (config.modrinth_requests_per_min, config.modrinth_burst), - "curseforge" => { - (config.curseforge_requests_per_min, config.curseforge_burst) - }, - "github" => (config.github_requests_per_min, config.github_burst), - _ => (config.default_requests_per_min, config.default_burst), + match platform.to_lowercase().as_str() { + "modrinth" => { + ( + inner.config.modrinth_requests_per_min, + inner.config.modrinth_burst, + ) + }, + "curseforge" => { + ( + inner.config.curseforge_requests_per_min, + inner.config.curseforge_burst, + ) + }, + "github" => { + ( + inner.config.github_requests_per_min, + inner.config.github_burst, + ) + }, + _ => { + ( + inner.config.default_requests_per_min, + inner.config.default_burst, + ) + }, + } }; let interval = Duration::from_secs(60) / rate.max(1); @@ -76,7 +92,7 @@ impl RateLimiter { let mut inner = self.inner.lock().await; let now = Instant::now(); let platform_requests = - inner.requests.entry(platform.to_string()).or_default(); + inner.requests.entry(platform.to_owned()).or_default(); platform_requests .retain(|t| now.duration_since(*t) < Duration::from_secs(60));