pinakes: import in parallel; various UI improvements
Signed-off-by: NotAShelf <raf@notashelf.dev> Change-Id: I1eb47cd79cd4145c56af966f6756fe1d6a6a6964
This commit is contained in:
parent
278bcaa4b0
commit
116fe7b059
42 changed files with 4316 additions and 316 deletions
|
|
@ -1,4 +1,5 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use tracing::info;
|
||||
|
||||
|
|
@ -14,9 +15,29 @@ use crate::thumbnail;
|
|||
pub struct ImportResult {
|
||||
pub media_id: MediaId,
|
||||
pub was_duplicate: bool,
|
||||
/// True if the file was skipped because it hasn't changed since last scan
|
||||
pub was_skipped: bool,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
/// Options for import operations
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ImportOptions {
|
||||
/// Skip files that haven't changed since last scan (based on mtime)
|
||||
pub incremental: bool,
|
||||
/// Force re-import even if mtime hasn't changed
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
/// Get the modification time of a file as a Unix timestamp
|
||||
fn get_file_mtime(path: &Path) -> Option<i64> {
|
||||
std::fs::metadata(path)
|
||||
.ok()
|
||||
.and_then(|m| m.modified().ok())
|
||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_secs() as i64)
|
||||
}
|
||||
|
||||
/// Check that a canonicalized path falls under at least one configured root directory.
|
||||
/// If no roots are configured, all paths are allowed (for ad-hoc imports).
|
||||
pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) -> Result<()> {
|
||||
|
|
@ -38,6 +59,15 @@ pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) ->
|
|||
}
|
||||
|
||||
pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<ImportResult> {
|
||||
import_file_with_options(storage, path, &ImportOptions::default()).await
|
||||
}
|
||||
|
||||
/// Import a file with configurable options for incremental scanning
|
||||
pub async fn import_file_with_options(
|
||||
storage: &DynStorageBackend,
|
||||
path: &Path,
|
||||
options: &ImportOptions,
|
||||
) -> Result<ImportResult> {
|
||||
let path = path.canonicalize()?;
|
||||
|
||||
if !path.exists() {
|
||||
|
|
@ -49,12 +79,38 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
|
|||
let media_type = MediaType::from_path(&path)
|
||||
.ok_or_else(|| PinakesError::UnsupportedMediaType(path.clone()))?;
|
||||
|
||||
let current_mtime = get_file_mtime(&path);
|
||||
|
||||
// Check for incremental scan: skip if file hasn't changed
|
||||
if options.incremental && !options.force {
|
||||
if let Some(existing) = storage.get_media_by_path(&path).await? {
|
||||
// Compare mtimes - if they match, skip this file
|
||||
if let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime) {
|
||||
if stored_mtime == curr_mtime {
|
||||
return Ok(ImportResult {
|
||||
media_id: existing.id,
|
||||
was_duplicate: false,
|
||||
was_skipped: true,
|
||||
path: path.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let content_hash = compute_file_hash(&path).await?;
|
||||
|
||||
if let Some(existing) = storage.get_media_by_hash(&content_hash).await? {
|
||||
// Update the mtime even for duplicates so incremental scan works
|
||||
if current_mtime.is_some() && existing.file_mtime != current_mtime {
|
||||
let mut updated = existing.clone();
|
||||
updated.file_mtime = current_mtime;
|
||||
let _ = storage.update_media(&updated).await;
|
||||
}
|
||||
return Ok(ImportResult {
|
||||
media_id: existing.id,
|
||||
was_duplicate: true,
|
||||
was_skipped: false,
|
||||
path: path.clone(),
|
||||
});
|
||||
}
|
||||
|
|
@ -109,6 +165,7 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
|
|||
description: extracted.description,
|
||||
thumbnail_path: thumb_path,
|
||||
custom_fields: std::collections::HashMap::new(),
|
||||
file_mtime: current_mtime,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
|
|
@ -144,6 +201,7 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
|
|||
Ok(ImportResult {
|
||||
media_id,
|
||||
was_duplicate: false,
|
||||
was_skipped: false,
|
||||
path: path.clone(),
|
||||
})
|
||||
}
|
||||
|
|
@ -180,8 +238,14 @@ pub async fn import_directory(
|
|||
dir: &Path,
|
||||
ignore_patterns: &[String],
|
||||
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
|
||||
import_directory_with_concurrency(storage, dir, ignore_patterns, DEFAULT_IMPORT_CONCURRENCY)
|
||||
.await
|
||||
import_directory_with_options(
|
||||
storage,
|
||||
dir,
|
||||
ignore_patterns,
|
||||
DEFAULT_IMPORT_CONCURRENCY,
|
||||
&ImportOptions::default(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn import_directory_with_concurrency(
|
||||
|
|
@ -189,10 +253,29 @@ pub async fn import_directory_with_concurrency(
|
|||
dir: &Path,
|
||||
ignore_patterns: &[String],
|
||||
concurrency: usize,
|
||||
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
|
||||
import_directory_with_options(
|
||||
storage,
|
||||
dir,
|
||||
ignore_patterns,
|
||||
concurrency,
|
||||
&ImportOptions::default(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Import a directory with full options including incremental scanning support
|
||||
pub async fn import_directory_with_options(
|
||||
storage: &DynStorageBackend,
|
||||
dir: &Path,
|
||||
ignore_patterns: &[String],
|
||||
concurrency: usize,
|
||||
options: &ImportOptions,
|
||||
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
|
||||
let concurrency = concurrency.clamp(1, 256);
|
||||
let dir = dir.to_path_buf();
|
||||
let patterns = ignore_patterns.to_vec();
|
||||
let options = options.clone();
|
||||
|
||||
let entries: Vec<PathBuf> = {
|
||||
let dir = dir.clone();
|
||||
|
|
@ -213,15 +296,14 @@ pub async fn import_directory_with_concurrency(
|
|||
|
||||
let mut results = Vec::with_capacity(entries.len());
|
||||
let mut join_set = tokio::task::JoinSet::new();
|
||||
let mut pending_paths: Vec<PathBuf> = Vec::new();
|
||||
|
||||
for entry_path in entries {
|
||||
let storage = storage.clone();
|
||||
let path = entry_path.clone();
|
||||
pending_paths.push(entry_path);
|
||||
let opts = options.clone();
|
||||
|
||||
join_set.spawn(async move {
|
||||
let result = import_file(&storage, &path).await;
|
||||
let result = import_file_with_options(&storage, &path, &opts).await;
|
||||
(path, result)
|
||||
});
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue