pinakes-core: update remaining modules and tests

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9e0ff5ea33a5cf697473423e88f167ce6a6a6964
This commit is contained in:
raf 2026-03-08 00:42:29 +03:00
commit 3d9f8933d2
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
44 changed files with 1207 additions and 578 deletions

View file

@ -1,5 +1,6 @@
use std::{
path::{Path, PathBuf},
sync::Arc,
time::SystemTime,
};
@ -12,7 +13,14 @@ use crate::{
links,
media_type::{BuiltinMediaType, MediaType},
metadata,
model::*,
model::{
AuditAction,
CustomField,
CustomFieldType,
MediaId,
MediaItem,
StorageMode,
},
storage::DynStorageBackend,
thumbnail,
};
@ -43,7 +51,7 @@ fn get_file_mtime(path: &Path) -> Option<i64> {
.ok()
.and_then(|m| m.modified().ok())
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
.map(|d| d.as_secs() as i64)
.map(|d| i64::try_from(d.as_secs()).unwrap_or(i64::MAX))
}
/// Validates that a path is within configured root directories.
@ -103,6 +111,10 @@ pub async fn import_file(
}
/// Import a file with configurable options for incremental scanning
///
/// # Errors
///
/// Returns [`PinakesError`] if the file cannot be read, hashed, or stored.
pub async fn import_file_with_options(
storage: &DynStorageBackend,
path: &Path,
@ -161,7 +173,7 @@ pub async fn import_file_with_options(
let path_clone = path.clone();
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
metadata::extract_metadata(&path_clone, media_type_clone)
metadata::extract_metadata(&path_clone, &media_type_clone)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
@ -185,7 +197,7 @@ pub async fn import_file_with_options(
thumbnail::generate_thumbnail(
media_id,
&source,
media_type_clone,
&media_type_clone,
&thumb_dir,
)
})
@ -194,7 +206,7 @@ pub async fn import_file_with_options(
};
// Generate perceptual hash for image files (if enabled in config)
let perceptual_hash = if options.photo_config.generate_perceptual_hash
let perceptual_hash = if options.photo_config.generate_perceptual_hash()
&& media_type.category() == crate::media_type::MediaCategory::Image
{
crate::metadata::image::generate_perceptual_hash(&path)
@ -327,6 +339,12 @@ pub(crate) fn should_ignore(
/// Default number of concurrent import tasks.
const DEFAULT_IMPORT_CONCURRENCY: usize = 8;
/// Import all supported files in a directory with default options.
///
/// # Errors
///
/// Returns [`PinakesError`] if the directory cannot be read or spawned tasks
/// fail.
pub async fn import_directory(
storage: &DynStorageBackend,
dir: &Path,
@ -342,6 +360,13 @@ pub async fn import_directory(
.await
}
/// Import all supported files in a directory with a specified concurrency
/// limit.
///
/// # Errors
///
/// Returns [`PinakesError`] if the directory cannot be read or spawned tasks
/// fail.
pub async fn import_directory_with_concurrency(
storage: &DynStorageBackend,
dir: &Path,
@ -358,7 +383,12 @@ pub async fn import_directory_with_concurrency(
.await
}
/// Import a directory with full options including incremental scanning support
/// Import a directory with full options including incremental scanning support.
///
/// # Errors
///
/// Returns [`PinakesError`] if the directory cannot be read or spawned tasks
/// fail.
pub async fn import_directory_with_options(
storage: &DynStorageBackend,
dir: &Path,
@ -377,8 +407,8 @@ pub async fn import_directory_with_options(
walkdir::WalkDir::new(&dir)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.file_type().is_file())
.filter_map(std::result::Result::ok)
.filter(|e| !e.file_type().is_dir())
.filter(|e| MediaType::from_path(e.path()).is_some())
.filter(|e| !should_ignore(e.path(), &patterns))
.map(|e| e.path().to_path_buf())
@ -392,7 +422,7 @@ pub async fn import_directory_with_options(
let mut join_set = tokio::task::JoinSet::new();
for entry_path in entries {
let storage = storage.clone();
let storage = Arc::clone(storage);
let path = entry_path.clone();
let opts = options.clone();