fetch: add retry support for downloads

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I5920652b1f84cd8d03e3f8c9d17e5aa76a6a6964
This commit is contained in:
raf 2026-04-18 22:18:40 +03:00
commit 0048a1cd73
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
2 changed files with 44 additions and 10 deletions

View file

@ -38,7 +38,9 @@ pub async fn execute(
let _guard = OperationGuard::new(coordinator, operation_id);
// Create fetcher with shelve option
let fetcher = Fetcher::new(".").with_shelve(args.shelve);
let fetcher = Fetcher::new(".")
.with_shelve(args.shelve)
.with_retry(args.retry);
// Fetch all projects (progress indicators handled in fetch.rs)
fetcher.fetch_all(&lockfile, &config).await?;

View file

@ -19,17 +19,19 @@ use crate::{
const MAX_CONCURRENT_DOWNLOADS: usize = 8;
pub struct Fetcher {
client: Client,
base_path: PathBuf,
shelve: bool,
client: Client,
base_path: PathBuf,
shelve: bool,
retry_count: u32,
}
impl Fetcher {
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
Self {
client: Client::new(),
base_path: base_path.as_ref().to_path_buf(),
shelve: false,
client: Client::new(),
base_path: base_path.as_ref().to_path_buf(),
shelve: false,
retry_count: 0,
}
}
@ -38,6 +40,11 @@ impl Fetcher {
self
}
pub const fn with_retry(mut self, retry_count: u32) -> Self {
self.retry_count = retry_count;
self
}
pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> {
self.fetch_all(lockfile, config).await
}
@ -96,6 +103,7 @@ impl Fetcher {
client,
base_path,
shelve: false, // Shelving happens at sync level, not per-project
retry_count: 0,
};
let result = fetcher.fetch_project(&project, lockfile, config).await;
@ -389,14 +397,39 @@ impl Fetcher {
}
}
/// Download a file from URL to target path
/// Download a file from URL to target path with retry
async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> {
// Create parent directory
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent)?;
}
// Download file
let max_attempts = self.retry_count.saturating_add(1);
for attempt in 0..max_attempts {
match self.download_single_attempt(url, target_path).await {
Ok(()) => return Ok(()),
Err(e) if attempt < self.retry_count => {
log::warn!(
"Download attempt {}/{} failed for {}, retrying...",
attempt + 1,
max_attempts,
url
);
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
},
Err(e) => return Err(e),
}
}
Ok(())
}
async fn download_single_attempt(
&self,
url: &str,
target_path: &Path,
) -> Result<()> {
let response = self.client.get(url).send().await?;
if !response.status().is_success() {
@ -405,7 +438,6 @@ impl Fetcher {
let bytes = response.bytes().await?;
// Write to temporary file first (atomic write)
let temp_path = target_path.with_extension("tmp");
fs::write(&temp_path, bytes)?;
fs::rename(temp_path, target_path)?;