pinakes-core: expand test coverage for new file/media management ops

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I2c7ac0c2717839cc014436d1d2e895796a6a6964
This commit is contained in:
raf 2026-02-05 11:11:31 +03:00
commit 152356ce9f
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
13 changed files with 2929 additions and 2961 deletions

View file

@ -0,0 +1,97 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
use pinakes_core::model::{ContentHash, MediaId, MediaItem, StorageMode};
use pinakes_core::storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend};
use tempfile::TempDir;
use uuid::Uuid;
pub async fn setup() -> Arc<SqliteBackend> {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
Arc::new(backend)
}
pub async fn setup_test_storage() -> (DynStorageBackend, TempDir) {
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir.path().join(format!("test_{}.db", Uuid::now_v7()));
let storage = SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
(Arc::new(storage), temp_dir)
}
pub fn make_test_media(hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId::new(),
path: format!("/tmp/test_{hash}.mp4").into(),
file_name: format!("test_{hash}.mp4"),
media_type: MediaType::Builtin(BuiltinMediaType::Mp4),
content_hash: ContentHash::new(hash.to_string()),
file_size: 1000,
title: Some(format!("Test {hash}")),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: Some(2024),
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
}
}
pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId(Uuid::now_v7()),
path,
file_name: "test.mp3".to_string(),
media_type: MediaType::Builtin(BuiltinMediaType::Mp3),
content_hash: ContentHash(hash.to_string()),
file_size: 1000,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
}
}

View file

@ -5,11 +5,8 @@ use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
use pinakes_core::storage::sqlite::SqliteBackend;
async fn setup() -> Arc<SqliteBackend> {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
Arc::new(backend)
}
mod common;
use common::{make_test_media, setup};
#[tokio::test]
async fn test_media_crud() {
@ -49,6 +46,7 @@ async fn test_media_crud() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
// Insert
@ -139,6 +137,7 @@ async fn test_tags() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
storage.insert_media(&item).await.unwrap();
storage.tag_media(id, parent.id).await.unwrap();
@ -203,6 +202,7 @@ async fn test_collections() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
storage.insert_media(&item).await.unwrap();
@ -262,6 +262,7 @@ async fn test_custom_fields() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
storage.insert_media(&item).await.unwrap();
@ -340,6 +341,7 @@ async fn test_search() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
storage.insert_media(&item).await.unwrap();
}
@ -483,6 +485,7 @@ async fn test_library_statistics_with_data() {
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
};
storage.insert_media(&item).await.unwrap();
@ -497,43 +500,6 @@ async fn test_library_statistics_with_data() {
// ===== Phase 2: Media Server Features =====
fn make_test_media(hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId::new(),
path: format!("/tmp/test_{hash}.mp4").into(),
file_name: format!("test_{hash}.mp4"),
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Mp4,
),
content_hash: ContentHash::new(hash.to_string()),
file_size: 1000,
title: Some(format!("Test {hash}")),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: Some(2024),
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
}
}
#[tokio::test]
async fn test_ratings_crud() {
let storage = setup().await;

View file

@ -1,74 +1,21 @@
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use pinakes_core::integrity::detect_orphans;
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
use pinakes_core::model::{ContentHash, MediaId, MediaItem, StorageMode};
use pinakes_core::storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend};
use tempfile::TempDir;
use uuid::Uuid;
async fn setup_test_storage() -> (DynStorageBackend, TempDir) {
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir.path().join(format!("test_{}.db", Uuid::now_v7()));
let storage = SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
(Arc::new(storage), temp_dir)
}
fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
use std::collections::HashMap;
MediaItem {
id: MediaId(Uuid::now_v7()),
path,
file_name: "test.mp3".to_string(),
media_type: MediaType::Builtin(BuiltinMediaType::Mp3),
content_hash: ContentHash(hash.to_string()),
file_size: 1000,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
}
}
mod common;
use common::{create_test_media_item, setup_test_storage};
#[tokio::test]
async fn test_detect_orphaned_files() {
let (storage, temp_dir) = setup_test_storage().await;
// Create a media item pointing to a file that doesn't exist
let nonexistent_path = temp_dir.path().join("nonexistent.mp3");
let orphaned_item = create_test_media_item(nonexistent_path, "hash1");
storage.insert_media(&orphaned_item).await.unwrap();
// Detect orphans
let report = detect_orphans(&storage).await.unwrap();
// Should detect the orphaned file
assert_eq!(report.orphaned_ids.len(), 1);
assert_eq!(report.orphaned_ids[0], orphaned_item.id);
}
@ -77,57 +24,41 @@ async fn test_detect_orphaned_files() {
async fn test_detect_untracked_files() {
let (storage, temp_dir) = setup_test_storage().await;
// Create a root directory
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
// Create actual files on disk
let tracked_file = root_dir.join("tracked.mp3");
let untracked_file = root_dir.join("untracked.mp3");
fs::write(&tracked_file, b"tracked content").unwrap();
fs::write(&untracked_file, b"untracked content").unwrap();
// Add only one file to the database
let tracked_item = create_test_media_item(tracked_file.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
// Detect orphans (including untracked files)
let report = detect_orphans(&storage).await.unwrap();
// Should detect the untracked file
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_file));
}
#[tokio::test]
async fn test_detect_moved_files() {
// Note: Due to UNIQUE constraint on content_hash, moved files detection
// won't find true duplicates. This test validates the detection logic
// works but won't find matches due to schema constraints.
let (storage, temp_dir) = setup_test_storage().await;
// Create files
let old_path = temp_dir.path().join("old_location.mp3");
fs::write(&old_path, b"content").unwrap();
// Create media item
let old_item = create_test_media_item(old_path.clone(), "hash_unique");
storage.insert_media(&old_item).await.unwrap();
// Delete the file to make it orphaned
fs::remove_file(&old_path).unwrap();
// Detect orphans
let report = detect_orphans(&storage).await.unwrap();
// Should detect the orphaned file, but no moved files (no duplicates exist)
assert_eq!(report.orphaned_ids.len(), 1);
// With UNIQUE constraint on content_hash, we can't have duplicates,
// so moved_files will be empty
assert_eq!(report.moved_files.len(), 0);
}
@ -135,26 +66,21 @@ async fn test_detect_moved_files() {
async fn test_ignore_patterns_respected() {
let (storage, temp_dir) = setup_test_storage().await;
// Create a root directory
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
// Create a hidden directory that should be ignored
let hidden_dir = root_dir.join(".hidden");
fs::create_dir(&hidden_dir).unwrap();
let hidden_file = hidden_dir.join("hidden.mp3");
fs::write(&hidden_file, b"hidden content").unwrap();
// Create a normal file
let normal_file = root_dir.join("normal.mp3");
fs::write(&normal_file, b"normal content").unwrap();
// Detect orphans
let report = detect_orphans(&storage).await.unwrap();
// Should only detect the normal file, not the hidden one
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&normal_file));
assert!(!report.untracked_paths.contains(&hidden_file));
@ -164,12 +90,10 @@ async fn test_ignore_patterns_respected() {
async fn test_only_supported_media_types() {
let (storage, temp_dir) = setup_test_storage().await;
// Create a root directory
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
// Create files with different extensions
let mp3_file = root_dir.join("audio.mp3");
let txt_file = root_dir.join("readme.txt");
let exe_file = root_dir.join("program.exe");
@ -178,11 +102,8 @@ async fn test_only_supported_media_types() {
fs::write(&txt_file, b"text").unwrap();
fs::write(&exe_file, b"binary").unwrap();
// Detect orphans
let report = detect_orphans(&storage).await.unwrap();
// Should only detect supported media types (mp3 and txt are supported)
// exe should not be detected
assert!(report.untracked_paths.len() <= 2);
assert!(!report.untracked_paths.contains(&exe_file));
}
@ -191,47 +112,36 @@ async fn test_only_supported_media_types() {
async fn test_complete_orphan_workflow() {
let (storage, temp_dir) = setup_test_storage().await;
// Setup root directory
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
// Create various scenarios
// 1. Orphaned file (in DB, not on disk)
let orphaned_path = root_dir.join("orphaned.mp3");
let orphaned_item = create_test_media_item(orphaned_path.clone(), "hash_orphaned");
storage.insert_media(&orphaned_item).await.unwrap();
// 2. Untracked file (on disk, not in DB)
let untracked_path = root_dir.join("untracked.mp3");
fs::write(&untracked_path, b"untracked").unwrap();
// 3. Another orphaned file (can't test moved files with UNIQUE constraint)
let another_orphaned = root_dir.join("another_orphaned.mp3");
let another_item = create_test_media_item(another_orphaned.clone(), "hash_another");
storage.insert_media(&another_item).await.unwrap();
// Don't create the file, so it's orphaned
// 4. Tracked file (normal case)
let tracked_path = root_dir.join("tracked.mp3");
fs::write(&tracked_path, b"tracked").unwrap();
let tracked_item = create_test_media_item(tracked_path.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
// Detect all orphans
let report = detect_orphans(&storage).await.unwrap();
// Verify results
assert_eq!(report.orphaned_ids.len(), 2); // orphaned + another_orphaned
assert_eq!(report.orphaned_ids.len(), 2);
assert!(report.orphaned_ids.contains(&orphaned_item.id));
assert!(report.orphaned_ids.contains(&another_item.id));
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_path));
// No moved files due to UNIQUE constraint on content_hash
assert_eq!(report.moved_files.len(), 0);
}
@ -243,31 +153,26 @@ async fn test_large_directory_performance() {
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
// Create many files
for i in 0..1000 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
fs::write(&file_path, format!("content {}", i)).unwrap();
}
// Add half to database
for i in 0..500 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
let item = create_test_media_item(file_path, &format!("hash_{}", i));
storage.insert_media(&item).await.unwrap();
}
// Measure time
let start = std::time::Instant::now();
let report = detect_orphans(&storage).await.unwrap();
let elapsed = start.elapsed();
// Should complete in reasonable time (< 5 seconds for 1000 files)
assert!(
elapsed.as_secs() < 5,
"Detection took too long: {:?}",
elapsed
);
// Should detect 500 untracked files
assert_eq!(report.untracked_paths.len(), 500);
}