build: bump all dependencies and set MSRV to 1.94; fix build failures

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7d331410864358d30191781d1e6c23f46a6a6964
This commit is contained in:
raf 2026-04-21 17:21:39 +03:00
commit 8b2140c057
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
12 changed files with 248 additions and 196 deletions

View file

@ -72,17 +72,17 @@ pub async fn execute(
&format!("Add {} to lockfile?", file_path.display()),
false,
global_yes,
)? {
if let Ok(file_data) = fs::read(file_path) {
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash = format!("{:x}", hasher.finalize());
file_hashes.push(FileHash {
path: file_path.clone(),
hash,
});
}
)? && let Ok(file_data) = fs::read(file_path)
{
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash =
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
file_hashes.push(FileHash {
path: file_path.clone(),
hash,
});
}
}
@ -91,8 +91,7 @@ pub async fn execute(
let result = add_files_batch(&mut lockfile, file_hashes).await;
if let Err(e) = result {
log::warn!(
"Batch lookup failed, falling back to individual lookups: {}",
e
"Batch lookup failed, falling back to individual lookups: {e}"
);
for fh in fallback_hashes {
if let Err(e) =
@ -220,7 +219,7 @@ async fn add_file_to_lockfile(
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash = format!("{:x}", hasher.finalize());
let hash = crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
// Try Modrinth first (SHA-1 hash)
if let Ok(Some(project)) = modrinth.lookup_by_hash(&hash).await {
@ -279,7 +278,11 @@ async fn add_files_batch(
for file_info in &project.files {
for (idx, fh) in file_hashes.iter().enumerate() {
if !matched_indices.contains(&idx)
&& file_info.hashes.get("sha1").map(|s| s.as_str()) == Some(&fh.hash)
&& file_info
.hashes
.get("sha1")
.map(std::string::String::as_str)
== Some(&fh.hash)
{
lockfile.add_project(project.clone());
added_pakku_ids.insert(pakku_id.clone());

View file

@ -409,7 +409,7 @@ impl Fetcher {
for attempt in 0..max_attempts {
match self.download_single_attempt(url, target_path).await {
Ok(()) => return Ok(()),
Err(e) if attempt < self.retry_count => {
Err(_e) if attempt < self.retry_count => {
log::warn!(
"Download attempt {}/{} failed for {}, retrying...",
attempt + 1,

View file

@ -119,18 +119,17 @@ impl Config {
Ok(config)
},
Ok(ConfigWrapper::Pakku { pakku }) => {
let name = pakku
.parent
.as_ref()
.map(|p| {
let name = pakku.parent.as_ref().map_or_else(
|| "unknown".to_string(),
|p| {
p.id
.split('/')
.next_back()
.unwrap_or(&p.id)
.trim_end_matches(".git")
.to_string()
})
.unwrap_or_else(|| "unknown".to_string());
},
);
let version = pakku
.parent

View file

@ -45,7 +45,7 @@ impl ForkIntegrity {
pub fn hash_content(content: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
format!("{:x}", hasher.finalize())
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice())
}
/// Reference type for Git operations

View file

@ -96,8 +96,8 @@ impl Project {
.name
.values()
.next()
.map(|s| s.to_owned())
.or_else(|| self.pakku_id.as_ref().map(|s| s.to_owned()))
.map(std::borrow::ToOwned::to_owned)
.or_else(|| self.pakku_id.as_ref().map(std::borrow::ToOwned::to_owned))
.unwrap_or_else(|| "unknown".to_string())
}
@ -176,7 +176,7 @@ impl Project {
///
/// # Errors
/// Returns `PakkerError::InvalidProject` if the projects have different types
/// or conflicting pakku_links.
/// or conflicting `pakku_links`.
pub fn merged(&self, other: Self) -> Result<Self> {
if self.r#type != other.r#type {
return Err(PakkerError::InvalidProject(format!(
@ -192,10 +192,10 @@ impl Project {
}
// Prefer non-default side
let side = if self.side != ProjectSide::Both {
self.side
} else {
let side = if self.side == ProjectSide::Both {
other.side
} else {
self.side
};
let mut id = self.id.clone();
@ -338,7 +338,7 @@ impl Project {
}
// Sort by release type (Release < Beta < Alpha) and date (newest first)
let mut sorted_files = compatible_files.to_vec();
let mut sorted_files = compatible_files.clone();
sorted_files.sort_by(|a, b| {
a.release_type
.cmp(&b.release_type)

View file

@ -12,10 +12,10 @@ use crate::{
};
const CURSEFORGE_API_BASE: &str = "https://api.curseforge.com/v1";
/// CurseForge game version type ID for loader versions (e.g., "fabric",
/// `CurseForge` game version type ID for loader versions (e.g., "fabric",
/// "forge")
const LOADER_VERSION_TYPE_ID: i32 = 68441;
/// CurseForge relation type ID for "required dependency" (mod embeds or
/// `CurseForge` relation type ID for "required dependency" (mod embeds or
/// requires another mod)
const DEPENDENCY_RELATION_TYPE_REQUIRED: u32 = 3;
@ -404,7 +404,7 @@ impl PlatformClient for CurseForgePlatform {
}
}
/// Uses CurseForge's `/fingerprints/432` endpoint to resolve projects by
/// Uses `CurseForge`'s `/fingerprints/432` endpoint to resolve projects by
/// their hashes in batch.
async fn request_projects_from_hashes(
&self,
@ -458,19 +458,19 @@ impl PlatformClient for CurseForgePlatform {
let mut seen_ids = std::collections::HashSet::new();
for m in matches {
if let Some(file) = m["file"].as_object() {
if let Some(mod_id) = file["modId"].as_u64() {
let mod_id_str = mod_id.to_string();
if seen_ids.contains(&mod_id_str) {
continue;
}
seen_ids.insert(mod_id_str.clone());
if let Some(file) = m["file"].as_object()
&& let Some(mod_id) = file["modId"].as_u64()
{
let mod_id_str = mod_id.to_string();
if seen_ids.contains(&mod_id_str) {
continue;
}
seen_ids.insert(mod_id_str.clone());
if let Ok(project) =
self.request_project_with_files(&mod_id_str, &[], &[]).await
{
projects.push(project);
}
if let Ok(project) =
self.request_project_with_files(&mod_id_str, &[], &[]).await
{
projects.push(project);
}
}
}

View file

@ -8,7 +8,7 @@ use crate::{
model::{Project, ProjectFile},
};
/// Multiplatform platform client that aggregates CurseForge and Modrinth.
/// Multiplatform platform client that aggregates `CurseForge` and Modrinth.
/// It attempts to resolve projects on both platforms and cross-references
/// them via slugs when a project exists on only one platform.
pub struct MultiplatformPlatform {
@ -203,7 +203,7 @@ impl PlatformClient for MultiplatformPlatform {
}
}
/// Delegates to both CurseForge and Modrinth in parallel, then deduplicates
/// Delegates to both `CurseForge` and Modrinth in parallel, then deduplicates
/// results.
async fn request_projects_from_hashes(
&self,

View file

@ -39,7 +39,7 @@ pub trait PlatformClient: Send + Sync {
) -> Result<Option<Project>>;
/// Request multiple projects by their hashes (Modrinth) or bytes
/// (CurseForge).
/// (`CurseForge`).
///
/// # Returns
///

View file

@ -73,19 +73,16 @@ fn decompose(str_in: &str) -> VecDeque<SortingType> {
false
};
use SortingType::*;
use SortingType::{Lexical, Numerical, SemverPrerelease};
if currently_numeric {
if numeric {
return None;
} else {
return Some(
current
.parse::<i64>()
.map(|n| Numerical(n, current.to_owned()))
.unwrap_or_else(|_| Lexical(current.to_owned())),
);
}
return Some(current.parse::<i64>().map_or_else(
|_| Lexical(current.to_owned()),
|n| Numerical(n, current.to_owned()),
));
}
if !(numeric || c == Some(&'-') || c.is_none()) {
@ -124,7 +121,7 @@ fn decompose(str_in: &str) -> VecDeque<SortingType> {
out
}
/// Compare two version strings using FlexVer rules.
/// Compare two version strings using `FlexVer` rules.
///
/// Returns:
/// - `Ordering::Less` if `a` < `b`
@ -141,7 +138,7 @@ pub fn compare(left: &str, right: &str) -> Ordering {
};
for next in iter {
use SortingType::*;
use SortingType::{Numerical, SemverPrerelease};
let current = match next {
// Left ran out first
@ -198,7 +195,7 @@ impl Iterator for VersionComparisonIterator {
}
}
/// FlexVer type for use with standard library traits
/// `FlexVer` type for use with standard library traits
#[derive(Debug, Copy, Clone)]
pub struct FlexVer<'a>(pub &'a str);

View file

@ -10,6 +10,16 @@ use sha2::{Sha256, Sha512};
use crate::error::{PakkerError, Result};
pub fn hash_to_hex(hash: impl AsRef<[u8]>) -> String {
use std::fmt::Write;
let bytes = hash.as_ref();
let mut hex = String::with_capacity(bytes.len() * 2);
for byte in bytes {
write!(hex, "{byte:02x}").unwrap();
}
hex
}
/// Compute SHA1 hash of a file
pub fn compute_sha1<P: AsRef<Path>>(path: P) -> Result<String> {
let file = File::open(path)?;
@ -25,7 +35,7 @@ pub fn compute_sha1<P: AsRef<Path>>(path: P) -> Result<String> {
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
Ok(hash_to_hex(hasher.finalize().as_slice()))
}
/// Compute SHA256 hash of a file
@ -43,14 +53,14 @@ pub fn compute_sha256<P: AsRef<Path>>(path: P) -> Result<String> {
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
Ok(hash_to_hex(hasher.finalize().as_slice()))
}
/// Compute SHA256 hash of byte data
pub fn compute_sha256_bytes(data: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(data);
format!("{:x}", hasher.finalize())
hash_to_hex(hasher.finalize().as_slice())
}
/// Compute SHA512 hash of a file
@ -68,7 +78,7 @@ pub fn compute_sha512<P: AsRef<Path>>(path: P) -> Result<String> {
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
Ok(hash_to_hex(hasher.finalize().as_slice()))
}
/// Compute MD5 hash of a file
@ -86,7 +96,12 @@ pub fn compute_md5<P: AsRef<Path>>(path: P) -> Result<String> {
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
let hash = hasher.finalize();
let mut hex = String::with_capacity(hash.len() * 2);
for byte in hash {
std::fmt::write(&mut hex, format_args!("{byte:02x}")).unwrap();
}
Ok(hex)
}
/// Verify a file's hash against expected value