diff --git a/src/cli/commands/fetch.rs b/src/cli/commands/fetch.rs index 9b88b1d..9658a73 100644 --- a/src/cli/commands/fetch.rs +++ b/src/cli/commands/fetch.rs @@ -38,7 +38,9 @@ pub async fn execute( let _guard = OperationGuard::new(coordinator, operation_id); // Create fetcher with shelve option - let fetcher = Fetcher::new(".").with_shelve(args.shelve); + let fetcher = Fetcher::new(".") + .with_shelve(args.shelve) + .with_retry(args.retry); // Fetch all projects (progress indicators handled in fetch.rs) fetcher.fetch_all(&lockfile, &config).await?; diff --git a/src/fetch.rs b/src/fetch.rs index ecf0f28..7c7d41c 100644 --- a/src/fetch.rs +++ b/src/fetch.rs @@ -19,17 +19,19 @@ use crate::{ const MAX_CONCURRENT_DOWNLOADS: usize = 8; pub struct Fetcher { - client: Client, - base_path: PathBuf, - shelve: bool, + client: Client, + base_path: PathBuf, + shelve: bool, + retry_count: u32, } impl Fetcher { pub fn new>(base_path: P) -> Self { Self { - client: Client::new(), - base_path: base_path.as_ref().to_path_buf(), - shelve: false, + client: Client::new(), + base_path: base_path.as_ref().to_path_buf(), + shelve: false, + retry_count: 0, } } @@ -38,6 +40,11 @@ impl Fetcher { self } + pub const fn with_retry(mut self, retry_count: u32) -> Self { + self.retry_count = retry_count; + self + } + pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> { self.fetch_all(lockfile, config).await } @@ -96,6 +103,7 @@ impl Fetcher { client, base_path, shelve: false, // Shelving happens at sync level, not per-project + retry_count: 0, }; let result = fetcher.fetch_project(&project, lockfile, config).await; @@ -389,14 +397,39 @@ impl Fetcher { } } - /// Download a file from URL to target path + /// Download a file from URL to target path with retry async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> { // Create parent directory if let Some(parent) = target_path.parent() { fs::create_dir_all(parent)?; } - // Download file + let max_attempts = self.retry_count.saturating_add(1); + + for attempt in 0..max_attempts { + match self.download_single_attempt(url, target_path).await { + Ok(()) => return Ok(()), + Err(e) if attempt < self.retry_count => { + log::warn!( + "Download attempt {}/{} failed for {}, retrying...", + attempt + 1, + max_attempts, + url + ); + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + }, + Err(e) => return Err(e), + } + } + + Ok(()) + } + + async fn download_single_attempt( + &self, + url: &str, + target_path: &Path, + ) -> Result<()> { let response = self.client.get(url).send().await?; if !response.status().is_success() { @@ -405,7 +438,6 @@ impl Fetcher { let bytes = response.bytes().await?; - // Write to temporary file first (atomic write) let temp_path = target_path.with_extension("tmp"); fs::write(&temp_path, bytes)?; fs::rename(temp_path, target_path)?;