treewide: fix various UI bugs; optimize crypto dependencies & format

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8fe8b38c1d9c4fecd40ff71f88d2ae06a6a6964
This commit is contained in:
raf 2026-02-10 12:56:05 +03:00
commit 3ccddce7fd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
178 changed files with 58342 additions and 54241 deletions

View file

@ -1,109 +1,113 @@
use pinakes_core::books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as};
use pinakes_core::enrichment::books::BookEnricher;
use pinakes_core::enrichment::googlebooks::GoogleBooksClient;
use pinakes_core::enrichment::openlibrary::OpenLibraryClient;
use pinakes_core::thumbnail::{CoverSize, extract_epub_cover, generate_book_covers};
use pinakes_core::{
books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as},
enrichment::{
books::BookEnricher,
googlebooks::GoogleBooksClient,
openlibrary::OpenLibraryClient,
},
thumbnail::{CoverSize, extract_epub_cover, generate_book_covers},
};
#[test]
fn test_isbn_normalization() {
// Valid ISBN-10 to ISBN-13 conversion (The Hobbit)
let result = normalize_isbn("0547928220");
assert!(result.is_ok());
let isbn13 = result.unwrap();
assert_eq!(isbn13.len(), 13);
assert!(isbn13.starts_with("978"));
// Valid ISBN-10 to ISBN-13 conversion (The Hobbit)
let result = normalize_isbn("0547928220");
assert!(result.is_ok());
let isbn13 = result.unwrap();
assert_eq!(isbn13.len(), 13);
assert!(isbn13.starts_with("978"));
// Valid ISBN-13 should return itself
let result = normalize_isbn("9780547928227");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// Valid ISBN-13 should return itself
let result = normalize_isbn("9780547928227");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// ISBN with hyphens should be normalized
let result = normalize_isbn("978-0-547-92822-7");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// ISBN with hyphens should be normalized
let result = normalize_isbn("978-0-547-92822-7");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// Invalid ISBN
let result = normalize_isbn("invalid");
assert!(result.is_err());
// Invalid ISBN
let result = normalize_isbn("invalid");
assert!(result.is_err());
}
#[test]
fn test_isbn_extraction_from_text() {
let text = "This book has ISBN-13: 978-0-123-45678-9 in the middle.";
let result = extract_isbn_from_text(text);
assert!(result.is_some());
let isbn = result.unwrap();
assert!(isbn.contains("978"));
let text = "This book has ISBN-13: 978-0-123-45678-9 in the middle.";
let result = extract_isbn_from_text(text);
assert!(result.is_some());
let isbn = result.unwrap();
assert!(isbn.contains("978"));
let text_isbn10 = "Old format ISBN: 0-123-45678-9";
let result = extract_isbn_from_text(text_isbn10);
assert!(result.is_some());
let text_isbn10 = "Old format ISBN: 0-123-45678-9";
let result = extract_isbn_from_text(text_isbn10);
assert!(result.is_some());
let text_no_isbn = "This text has no ISBN at all.";
let result = extract_isbn_from_text(text_no_isbn);
assert!(result.is_none());
let text_no_isbn = "This text has no ISBN at all.";
let result = extract_isbn_from_text(text_no_isbn);
assert!(result.is_none());
}
#[test]
fn test_author_file_as_parsing() {
// Standard name: "First Last" -> "Last, First"
let result = parse_author_file_as("John Smith");
assert_eq!(result, "Smith, John");
// Standard name: "First Last" -> "Last, First"
let result = parse_author_file_as("John Smith");
assert_eq!(result, "Smith, John");
// Single name
let result = parse_author_file_as("Shakespeare");
assert_eq!(result, "Shakespeare");
// Single name
let result = parse_author_file_as("Shakespeare");
assert_eq!(result, "Shakespeare");
// Multiple middle names
let result = parse_author_file_as("John Ronald Reuel Tolkien");
assert_eq!(result, "Tolkien, John Ronald Reuel");
// Multiple middle names
let result = parse_author_file_as("John Ronald Reuel Tolkien");
assert_eq!(result, "Tolkien, John Ronald Reuel");
// Already in "Last, First" format
let result = parse_author_file_as("Tolkien, J.R.R.");
assert_eq!(result, "Tolkien, J.R.R.");
// Already in "Last, First" format
let result = parse_author_file_as("Tolkien, J.R.R.");
assert_eq!(result, "Tolkien, J.R.R.");
}
#[test]
fn test_book_enricher_creation() {
let enricher = BookEnricher::new(None);
// Just verify it can be created
drop(enricher);
let enricher = BookEnricher::new(None);
// Just verify it can be created
drop(enricher);
let enricher_with_key = BookEnricher::new(Some("test-api-key".to_string()));
drop(enricher_with_key);
let enricher_with_key = BookEnricher::new(Some("test-api-key".to_string()));
drop(enricher_with_key);
}
#[test]
fn test_openlibrary_client_creation() {
let client = OpenLibraryClient::new();
// Verify client is created successfully
drop(client);
let client = OpenLibraryClient::new();
// Verify client is created successfully
drop(client);
}
#[test]
fn test_googlebooks_client_creation() {
let client = GoogleBooksClient::new(None);
drop(client);
let client = GoogleBooksClient::new(None);
drop(client);
let client_with_key = GoogleBooksClient::new(Some("test-key".to_string()));
drop(client_with_key);
let client_with_key = GoogleBooksClient::new(Some("test-key".to_string()));
drop(client_with_key);
}
#[test]
fn test_cover_size_dimensions() {
assert_eq!(CoverSize::Tiny.dimensions(), Some((64, 64)));
assert_eq!(CoverSize::Grid.dimensions(), Some((320, 320)));
assert_eq!(CoverSize::Preview.dimensions(), Some((1024, 1024)));
assert_eq!(CoverSize::Original.dimensions(), None);
assert_eq!(CoverSize::Tiny.dimensions(), Some((64, 64)));
assert_eq!(CoverSize::Grid.dimensions(), Some((320, 320)));
assert_eq!(CoverSize::Preview.dimensions(), Some((1024, 1024)));
assert_eq!(CoverSize::Original.dimensions(), None);
}
#[test]
fn test_cover_size_filenames() {
assert_eq!(CoverSize::Tiny.filename(), "tiny.jpg");
assert_eq!(CoverSize::Grid.filename(), "grid.jpg");
assert_eq!(CoverSize::Preview.filename(), "preview.jpg");
assert_eq!(CoverSize::Original.filename(), "original.jpg");
assert_eq!(CoverSize::Tiny.filename(), "tiny.jpg");
assert_eq!(CoverSize::Grid.filename(), "grid.jpg");
assert_eq!(CoverSize::Preview.filename(), "preview.jpg");
assert_eq!(CoverSize::Original.filename(), "original.jpg");
}
// Note: The following tests would require actual EPUB files and network access,
@ -112,92 +116,93 @@ fn test_cover_size_filenames() {
#[test]
#[ignore]
fn test_epub_cover_extraction() {
// This test requires a real EPUB file
// Create a test EPUB file path
let epub_path = std::path::PathBuf::from("test_fixtures/sample.epub");
// This test requires a real EPUB file
// Create a test EPUB file path
let epub_path = std::path::PathBuf::from("test_fixtures/sample.epub");
if !epub_path.exists() {
// Skip if test fixture doesn't exist
return;
}
if !epub_path.exists() {
// Skip if test fixture doesn't exist
return;
}
let result = extract_epub_cover(&epub_path);
// Should either succeed with Some(data) or None if no cover found
assert!(result.is_ok());
let result = extract_epub_cover(&epub_path);
// Should either succeed with Some(data) or None if no cover found
assert!(result.is_ok());
}
#[test]
#[ignore]
fn test_book_cover_generation() {
// This test requires a sample image
use tempfile::tempdir;
// This test requires a sample image
use tempfile::tempdir;
// Create a minimal 100x100 red PNG in memory
let mut img_data = Vec::new();
{
use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> =
ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img.write_to(
&mut std::io::Cursor::new(&mut img_data),
image::ImageFormat::Png,
)
.unwrap();
}
// Create a minimal 100x100 red PNG in memory
let mut img_data = Vec::new();
{
use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> =
ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img
.write_to(
&mut std::io::Cursor::new(&mut img_data),
image::ImageFormat::Png,
)
.unwrap();
}
let temp_dir = tempdir().unwrap();
let media_id = pinakes_core::model::MediaId::new();
let temp_dir = tempdir().unwrap();
let media_id = pinakes_core::model::MediaId::new();
let result = generate_book_covers(media_id, &img_data, temp_dir.path());
assert!(result.is_ok());
let result = generate_book_covers(media_id, &img_data, temp_dir.path());
assert!(result.is_ok());
let covers = result.unwrap();
assert_eq!(covers.len(), 4); // tiny, grid, preview, original
let covers = result.unwrap();
assert_eq!(covers.len(), 4); // tiny, grid, preview, original
// Verify all cover files exist
for (size, path) in &covers {
assert!(path.exists(), "Cover {:?} should exist at {:?}", size, path);
}
// Verify all cover files exist
for (size, path) in &covers {
assert!(path.exists(), "Cover {:?} should exist at {:?}", size, path);
}
}
#[tokio::test]
#[ignore]
async fn test_openlibrary_isbn_fetch() {
// This test requires network access
let client = OpenLibraryClient::new();
// This test requires network access
let client = OpenLibraryClient::new();
// Use a known ISBN for "The Hobbit"
let result = client.fetch_by_isbn("9780547928227").await;
// Use a known ISBN for "The Hobbit"
let result = client.fetch_by_isbn("9780547928227").await;
// Should either succeed or fail gracefully
// We don't assert success because network might not be available
match result {
Ok(book) => {
assert!(book.title.is_some());
}
Err(_) => {
// Network error or book not found - acceptable in tests
}
}
// Should either succeed or fail gracefully
// We don't assert success because network might not be available
match result {
Ok(book) => {
assert!(book.title.is_some());
},
Err(_) => {
// Network error or book not found - acceptable in tests
},
}
}
#[tokio::test]
#[ignore]
async fn test_googlebooks_isbn_fetch() {
// This test requires network access
let client = GoogleBooksClient::new(None);
// This test requires network access
let client = GoogleBooksClient::new(None);
// Use a known ISBN
let result = client.fetch_by_isbn("9780547928227").await;
// Use a known ISBN
let result = client.fetch_by_isbn("9780547928227").await;
match result {
Ok(books) => {
if !books.is_empty() {
assert!(books[0].volume_info.title.is_some());
}
}
Err(_) => {
// Network error - acceptable in tests
}
}
match result {
Ok(books) => {
if !books.is_empty() {
assert!(books[0].volume_info.title.is_some());
}
},
Err(_) => {
// Network error - acceptable in tests
},
}
}

View file

@ -1,137 +1,137 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
use pinakes_core::model::{ContentHash, MediaId, MediaItem, StorageMode};
use pinakes_core::storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend};
use pinakes_core::{
media_type::{BuiltinMediaType, MediaType},
model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use tempfile::TempDir;
use uuid::Uuid;
pub async fn setup() -> Arc<SqliteBackend> {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
Arc::new(backend)
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
Arc::new(backend)
}
pub async fn setup_test_storage() -> (DynStorageBackend, TempDir) {
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir.path().join(format!("test_{}.db", Uuid::now_v7()));
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir.path().join(format!("test_{}.db", Uuid::now_v7()));
let storage = SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
let storage = SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
(Arc::new(storage), temp_dir)
(Arc::new(storage), temp_dir)
}
pub fn make_test_media(hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId::new(),
path: format!("/tmp/test_{hash}.mp4").into(),
file_name: format!("test_{hash}.mp4"),
media_type: MediaType::Builtin(BuiltinMediaType::Mp4),
content_hash: ContentHash::new(hash.to_string()),
file_size: 1000,
title: Some(format!("Test {hash}")),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: Some(2024),
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
let now = chrono::Utc::now();
MediaItem {
id: MediaId::new(),
path: format!("/tmp/test_{hash}.mp4").into(),
file_name: format!("test_{hash}.mp4"),
media_type: MediaType::Builtin(BuiltinMediaType::Mp4),
content_hash: ContentHash::new(hash.to_string()),
file_size: 1000,
title: Some(format!("Test {hash}")),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: Some(2024),
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
}
pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId(Uuid::now_v7()),
path,
file_name: "test.mp3".to_string(),
media_type: MediaType::Builtin(BuiltinMediaType::Mp3),
content_hash: ContentHash(hash.to_string()),
file_size: 1000,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
let now = chrono::Utc::now();
MediaItem {
id: MediaId(Uuid::now_v7()),
path,
file_name: "test.mp3".to_string(),
media_type: MediaType::Builtin(BuiltinMediaType::Mp3),
content_hash: ContentHash(hash.to_string()),
file_size: 1000,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
}
/// Create a test markdown media item with a given ID
pub fn make_test_markdown_item(id: MediaId) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id,
path: format!("/tmp/test_{}.md", id.0).into(),
file_name: format!("test_{}.md", id.0),
media_type: MediaType::Builtin(BuiltinMediaType::Markdown),
content_hash: ContentHash::new(format!("hash_{}", id.0)),
file_size: 1024,
title: Some("Test Note".to_string()),
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: Some("Test markdown note".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
let now = chrono::Utc::now();
MediaItem {
id,
path: format!("/tmp/test_{}.md", id.0).into(),
file_name: format!("test_{}.md", id.0),
media_type: MediaType::Builtin(BuiltinMediaType::Markdown),
content_hash: ContentHash::new(format!("hash_{}", id.0)),
file_size: 1024,
title: Some("Test Note".to_string()),
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: Some("Test markdown note".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
storage_mode: StorageMode::External,
original_filename: None,
uploaded_at: None,
storage_key: None,
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
}

File diff suppressed because it is too large Load diff

View file

@ -7,172 +7,176 @@ use common::{create_test_media_item, setup_test_storage};
#[tokio::test]
async fn test_detect_orphaned_files() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let nonexistent_path = temp_dir.path().join("nonexistent.mp3");
let orphaned_item = create_test_media_item(nonexistent_path, "hash1");
let nonexistent_path = temp_dir.path().join("nonexistent.mp3");
let orphaned_item = create_test_media_item(nonexistent_path, "hash1");
storage.insert_media(&orphaned_item).await.unwrap();
storage.insert_media(&orphaned_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert_eq!(report.orphaned_ids.len(), 1);
assert_eq!(report.orphaned_ids[0], orphaned_item.id);
assert_eq!(report.orphaned_ids.len(), 1);
assert_eq!(report.orphaned_ids[0], orphaned_item.id);
}
#[tokio::test]
async fn test_detect_untracked_files() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let tracked_file = root_dir.join("tracked.mp3");
let untracked_file = root_dir.join("untracked.mp3");
let tracked_file = root_dir.join("tracked.mp3");
let untracked_file = root_dir.join("untracked.mp3");
fs::write(&tracked_file, b"tracked content").unwrap();
fs::write(&untracked_file, b"untracked content").unwrap();
fs::write(&tracked_file, b"tracked content").unwrap();
fs::write(&untracked_file, b"untracked content").unwrap();
let tracked_item = create_test_media_item(tracked_file.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let tracked_item =
create_test_media_item(tracked_file.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_file));
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_file));
}
#[tokio::test]
async fn test_detect_moved_files() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let old_path = temp_dir.path().join("old_location.mp3");
let old_path = temp_dir.path().join("old_location.mp3");
fs::write(&old_path, b"content").unwrap();
fs::write(&old_path, b"content").unwrap();
let old_item = create_test_media_item(old_path.clone(), "hash_unique");
storage.insert_media(&old_item).await.unwrap();
let old_item = create_test_media_item(old_path.clone(), "hash_unique");
storage.insert_media(&old_item).await.unwrap();
fs::remove_file(&old_path).unwrap();
fs::remove_file(&old_path).unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert_eq!(report.orphaned_ids.len(), 1);
assert_eq!(report.moved_files.len(), 0);
assert_eq!(report.orphaned_ids.len(), 1);
assert_eq!(report.moved_files.len(), 0);
}
#[tokio::test]
async fn test_ignore_patterns_respected() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let hidden_dir = root_dir.join(".hidden");
fs::create_dir(&hidden_dir).unwrap();
let hidden_dir = root_dir.join(".hidden");
fs::create_dir(&hidden_dir).unwrap();
let hidden_file = hidden_dir.join("hidden.mp3");
fs::write(&hidden_file, b"hidden content").unwrap();
let hidden_file = hidden_dir.join("hidden.mp3");
fs::write(&hidden_file, b"hidden content").unwrap();
let normal_file = root_dir.join("normal.mp3");
fs::write(&normal_file, b"normal content").unwrap();
let normal_file = root_dir.join("normal.mp3");
fs::write(&normal_file, b"normal content").unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&normal_file));
assert!(!report.untracked_paths.contains(&hidden_file));
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&normal_file));
assert!(!report.untracked_paths.contains(&hidden_file));
}
#[tokio::test]
async fn test_only_supported_media_types() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let mp3_file = root_dir.join("audio.mp3");
let txt_file = root_dir.join("readme.txt");
let exe_file = root_dir.join("program.exe");
let mp3_file = root_dir.join("audio.mp3");
let txt_file = root_dir.join("readme.txt");
let exe_file = root_dir.join("program.exe");
fs::write(&mp3_file, b"audio").unwrap();
fs::write(&txt_file, b"text").unwrap();
fs::write(&exe_file, b"binary").unwrap();
fs::write(&mp3_file, b"audio").unwrap();
fs::write(&txt_file, b"text").unwrap();
fs::write(&exe_file, b"binary").unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert!(report.untracked_paths.len() <= 2);
assert!(!report.untracked_paths.contains(&exe_file));
assert!(report.untracked_paths.len() <= 2);
assert!(!report.untracked_paths.contains(&exe_file));
}
#[tokio::test]
async fn test_complete_orphan_workflow() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let orphaned_path = root_dir.join("orphaned.mp3");
let orphaned_item = create_test_media_item(orphaned_path.clone(), "hash_orphaned");
storage.insert_media(&orphaned_item).await.unwrap();
let orphaned_path = root_dir.join("orphaned.mp3");
let orphaned_item =
create_test_media_item(orphaned_path.clone(), "hash_orphaned");
storage.insert_media(&orphaned_item).await.unwrap();
let untracked_path = root_dir.join("untracked.mp3");
fs::write(&untracked_path, b"untracked").unwrap();
let untracked_path = root_dir.join("untracked.mp3");
fs::write(&untracked_path, b"untracked").unwrap();
let another_orphaned = root_dir.join("another_orphaned.mp3");
let another_item = create_test_media_item(another_orphaned.clone(), "hash_another");
storage.insert_media(&another_item).await.unwrap();
let another_orphaned = root_dir.join("another_orphaned.mp3");
let another_item =
create_test_media_item(another_orphaned.clone(), "hash_another");
storage.insert_media(&another_item).await.unwrap();
let tracked_path = root_dir.join("tracked.mp3");
fs::write(&tracked_path, b"tracked").unwrap();
let tracked_path = root_dir.join("tracked.mp3");
fs::write(&tracked_path, b"tracked").unwrap();
let tracked_item = create_test_media_item(tracked_path.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let tracked_item =
create_test_media_item(tracked_path.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
assert_eq!(report.orphaned_ids.len(), 2);
assert!(report.orphaned_ids.contains(&orphaned_item.id));
assert!(report.orphaned_ids.contains(&another_item.id));
assert_eq!(report.orphaned_ids.len(), 2);
assert!(report.orphaned_ids.contains(&orphaned_item.id));
assert!(report.orphaned_ids.contains(&another_item.id));
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_path));
assert_eq!(report.untracked_paths.len(), 1);
assert!(report.untracked_paths.contains(&untracked_path));
assert_eq!(report.moved_files.len(), 0);
assert_eq!(report.moved_files.len(), 0);
}
#[tokio::test]
async fn test_large_directory_performance() {
let (storage, temp_dir) = setup_test_storage().await;
let (storage, temp_dir) = setup_test_storage().await;
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
let root_dir = temp_dir.path().join("media");
fs::create_dir(&root_dir).unwrap();
storage.add_root_dir(root_dir.clone()).await.unwrap();
for i in 0..1000 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
fs::write(&file_path, format!("content {}", i)).unwrap();
}
for i in 0..1000 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
fs::write(&file_path, format!("content {}", i)).unwrap();
}
for i in 0..500 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
let item = create_test_media_item(file_path, &format!("hash_{}", i));
storage.insert_media(&item).await.unwrap();
}
for i in 0..500 {
let file_path = root_dir.join(format!("file_{}.mp3", i));
let item = create_test_media_item(file_path, &format!("hash_{}", i));
storage.insert_media(&item).await.unwrap();
}
let start = std::time::Instant::now();
let report = detect_orphans(&storage).await.unwrap();
let elapsed = start.elapsed();
let start = std::time::Instant::now();
let report = detect_orphans(&storage).await.unwrap();
let elapsed = start.elapsed();
assert!(
elapsed.as_secs() < 5,
"Detection took too long: {:?}",
elapsed
);
assert!(
elapsed.as_secs() < 5,
"Detection took too long: {:?}",
elapsed
);
assert_eq!(report.untracked_paths.len(), 500);
assert_eq!(report.untracked_paths.len(), 500);
}

View file

@ -1,212 +1,210 @@
use pinakes_core::links::extract_links;
use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
use pinakes_core::{links::extract_links, model::*, storage::StorageBackend};
mod common;
/// Create test markdown content with multiple links
fn create_test_note_content(num_links: usize) -> String {
let mut content = String::from("# Test Note\n\n");
for i in 0..num_links {
content.push_str(&format!("Link {}: [[note_{}]]\n", i, i));
}
content
let mut content = String::from("# Test Note\n\n");
for i in 0..num_links {
content.push_str(&format!("Link {}: [[note_{}]]\n", i, i));
}
content
}
#[tokio::test]
async fn test_save_links_atomicity_success_case() {
// Setup: Create in-memory database
let storage = common::setup().await;
// Setup: Create in-memory database
let storage = common::setup().await;
// Create a test note
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// Create a test note
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// Extract links from test content
let content = create_test_note_content(5);
let links = extract_links(note_id, &content);
// Extract links from test content
let content = create_test_note_content(5);
let links = extract_links(note_id, &content);
assert_eq!(links.len(), 5, "Should extract 5 links");
assert_eq!(links.len(), 5, "Should extract 5 links");
// Save links (first time - should succeed)
storage.save_markdown_links(note_id, &links).await.unwrap();
// Save links (first time - should succeed)
storage.save_markdown_links(note_id, &links).await.unwrap();
// Verify all links were saved
let saved_links = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved_links.len(), 5, "All 5 links should be saved");
// Verify all links were saved
let saved_links = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved_links.len(), 5, "All 5 links should be saved");
// Update with new links
let new_content = create_test_note_content(3);
let new_links = extract_links(note_id, &new_content);
// Update with new links
let new_content = create_test_note_content(3);
let new_links = extract_links(note_id, &new_content);
// Save again (should replace old links)
storage
.save_markdown_links(note_id, &new_links)
.await
.unwrap();
// Save again (should replace old links)
storage
.save_markdown_links(note_id, &new_links)
.await
.unwrap();
// Verify old links were deleted and new links saved
let updated_links = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
updated_links.len(),
3,
"Should have exactly 3 links after update"
);
// Verify old links were deleted and new links saved
let updated_links = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
updated_links.len(),
3,
"Should have exactly 3 links after update"
);
}
#[tokio::test]
async fn test_save_links_atomicity_with_valid_data() {
// This test verifies that the transaction commit works correctly
// by saving links multiple times and ensuring consistency
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// This test verifies that the transaction commit works correctly
// by saving links multiple times and ensuring consistency
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// First batch of links
let content1 = "[[note1]] and [[note2]]";
let links1 = extract_links(note_id, content1);
storage.save_markdown_links(note_id, &links1).await.unwrap();
// First batch of links
let content1 = "[[note1]] and [[note2]]";
let links1 = extract_links(note_id, content1);
storage.save_markdown_links(note_id, &links1).await.unwrap();
let saved1 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved1.len(), 2, "First save: 2 links");
let saved1 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved1.len(), 2, "First save: 2 links");
// Second batch (replace)
let content2 = "[[note3]] [[note4]] [[note5]]";
let links2 = extract_links(note_id, content2);
storage.save_markdown_links(note_id, &links2).await.unwrap();
// Second batch (replace)
let content2 = "[[note3]] [[note4]] [[note5]]";
let links2 = extract_links(note_id, content2);
storage.save_markdown_links(note_id, &links2).await.unwrap();
let saved2 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved2.len(), 3, "Second save: 3 links (old ones deleted)");
let saved2 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved2.len(), 3, "Second save: 3 links (old ones deleted)");
// Third batch (empty)
storage.save_markdown_links(note_id, &[]).await.unwrap();
// Third batch (empty)
storage.save_markdown_links(note_id, &[]).await.unwrap();
let saved3 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved3.len(), 0, "Third save: 0 links (all deleted)");
let saved3 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved3.len(), 0, "Third save: 0 links (all deleted)");
// Fourth batch (restore some links)
let content4 = "[[final_note]]";
let links4 = extract_links(note_id, content4);
storage.save_markdown_links(note_id, &links4).await.unwrap();
// Fourth batch (restore some links)
let content4 = "[[final_note]]";
let links4 = extract_links(note_id, content4);
storage.save_markdown_links(note_id, &links4).await.unwrap();
let saved4 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved4.len(), 1, "Fourth save: 1 link");
assert_eq!(saved4[0].target_path, "final_note", "Correct link target");
let saved4 = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved4.len(), 1, "Fourth save: 1 link");
assert_eq!(saved4[0].target_path, "final_note", "Correct link target");
}
#[tokio::test]
async fn test_save_links_idempotency() {
// Verify that saving the same links multiple times is safe
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// Verify that saving the same links multiple times is safe
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
let content = "[[note_a]] [[note_b]]";
let links = extract_links(note_id, content);
let content = "[[note_a]] [[note_b]]";
let links = extract_links(note_id, content);
// Save same links 3 times
storage.save_markdown_links(note_id, &links).await.unwrap();
storage.save_markdown_links(note_id, &links).await.unwrap();
storage.save_markdown_links(note_id, &links).await.unwrap();
// Save same links 3 times
storage.save_markdown_links(note_id, &links).await.unwrap();
storage.save_markdown_links(note_id, &links).await.unwrap();
storage.save_markdown_links(note_id, &links).await.unwrap();
// Should still have exactly 2 links (not duplicated)
let saved = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
saved.len(),
2,
"Should have exactly 2 links (no duplicates)"
);
// Should still have exactly 2 links (not duplicated)
let saved = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
saved.len(),
2,
"Should have exactly 2 links (no duplicates)"
);
}
#[tokio::test]
async fn test_save_links_concurrent_updates() {
// Test that concurrent updates to different notes don't interfere
let storage = common::setup().await;
// Test that concurrent updates to different notes don't interfere
let storage = common::setup().await;
// Create two different notes
let note1_id = MediaId::new();
let note2_id = MediaId::new();
let item1 = common::make_test_markdown_item(note1_id);
let item2 = common::make_test_markdown_item(note2_id);
storage.insert_media(&item1).await.unwrap();
storage.insert_media(&item2).await.unwrap();
// Create two different notes
let note1_id = MediaId::new();
let note2_id = MediaId::new();
let item1 = common::make_test_markdown_item(note1_id);
let item2 = common::make_test_markdown_item(note2_id);
storage.insert_media(&item1).await.unwrap();
storage.insert_media(&item2).await.unwrap();
// Save links for both notes
let links1 = extract_links(note1_id, "[[target1]]");
let links2 = extract_links(note2_id, "[[target2]] [[target3]]");
// Save links for both notes
let links1 = extract_links(note1_id, "[[target1]]");
let links2 = extract_links(note2_id, "[[target2]] [[target3]]");
// Execute both saves. We do so in sequence since we can't test true concurrency easily
// ...or so I think. Database tests are annoying.
storage
.save_markdown_links(note1_id, &links1)
.await
.unwrap();
storage
.save_markdown_links(note2_id, &links2)
.await
.unwrap();
// Execute both saves. We do so in sequence since we can't test true
// concurrency easily ...or so I think. Database tests are annoying.
storage
.save_markdown_links(note1_id, &links1)
.await
.unwrap();
storage
.save_markdown_links(note2_id, &links2)
.await
.unwrap();
// Verify both notes have correct links
let saved1 = storage.get_outgoing_links(note1_id).await.unwrap();
let saved2 = storage.get_outgoing_links(note2_id).await.unwrap();
// Verify both notes have correct links
let saved1 = storage.get_outgoing_links(note1_id).await.unwrap();
let saved2 = storage.get_outgoing_links(note2_id).await.unwrap();
assert_eq!(saved1.len(), 1, "Note 1 should have 1 link");
assert_eq!(saved2.len(), 2, "Note 2 should have 2 links");
assert_eq!(saved1.len(), 1, "Note 1 should have 1 link");
assert_eq!(saved2.len(), 2, "Note 2 should have 2 links");
// Update note 1 - should not affect note 2
let new_links1 = extract_links(note1_id, "[[target_new1]] [[target_new2]]");
storage
.save_markdown_links(note1_id, &new_links1)
.await
.unwrap();
// Update note 1 - should not affect note 2
let new_links1 = extract_links(note1_id, "[[target_new1]] [[target_new2]]");
storage
.save_markdown_links(note1_id, &new_links1)
.await
.unwrap();
// Verify note 1 updated but note 2 unchanged
let updated1 = storage.get_outgoing_links(note1_id).await.unwrap();
let unchanged2 = storage.get_outgoing_links(note2_id).await.unwrap();
// Verify note 1 updated but note 2 unchanged
let updated1 = storage.get_outgoing_links(note1_id).await.unwrap();
let unchanged2 = storage.get_outgoing_links(note2_id).await.unwrap();
assert_eq!(updated1.len(), 2, "Note 1 should have 2 links after update");
assert_eq!(unchanged2.len(), 2, "Note 2 should still have 2 links");
assert_eq!(updated1.len(), 2, "Note 1 should have 2 links after update");
assert_eq!(unchanged2.len(), 2, "Note 2 should still have 2 links");
}
#[tokio::test]
async fn test_save_links_with_large_batch() {
// Test atomicity with a large number of links
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// Test atomicity with a large number of links
let storage = common::setup().await;
let note_id = MediaId::new();
let item = common::make_test_markdown_item(note_id);
storage.insert_media(&item).await.unwrap();
// Create note with 100 links
let content = create_test_note_content(100);
let links = extract_links(note_id, &content);
// Create note with 100 links
let content = create_test_note_content(100);
let links = extract_links(note_id, &content);
assert_eq!(links.len(), 100, "Should extract 100 links");
assert_eq!(links.len(), 100, "Should extract 100 links");
// Save all 100 links
storage.save_markdown_links(note_id, &links).await.unwrap();
// Save all 100 links
storage.save_markdown_links(note_id, &links).await.unwrap();
// Verify all saved
let saved = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved.len(), 100, "All 100 links should be saved atomically");
// Verify all saved
let saved = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(saved.len(), 100, "All 100 links should be saved atomically");
// Replace with smaller set
let small_content = create_test_note_content(10);
let small_links = extract_links(note_id, &small_content);
storage
.save_markdown_links(note_id, &small_links)
.await
.unwrap();
// Replace with smaller set
let small_content = create_test_note_content(10);
let small_links = extract_links(note_id, &small_content);
storage
.save_markdown_links(note_id, &small_links)
.await
.unwrap();
// Verify replacement worked
let updated = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
updated.len(),
10,
"Should have exactly 10 links after replacement"
);
// Verify replacement worked
let updated = storage.get_outgoing_links(note_id).await.unwrap();
assert_eq!(
updated.len(),
10,
"Should have exactly 10 links after replacement"
);
}
// XXX: Testing actual transaction rollback on error is difficult without

View file

@ -2,307 +2,309 @@ use chrono::Utc;
use pinakes_core::storage::{SessionData, StorageBackend};
use tempfile::TempDir;
async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend {
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir
.path()
.join(format!("test_{}.db", uuid::Uuid::now_v7()));
async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend
{
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir
.path()
.join(format!("test_{}.db", uuid::Uuid::now_v7()));
let storage = pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
let storage =
pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
// Keep temp_dir alive by leaking it (tests are short-lived anyway)
std::mem::forget(temp_dir);
// Keep temp_dir alive by leaking it (tests are short-lived anyway)
std::mem::forget(temp_dir);
storage
storage
}
#[tokio::test]
async fn test_create_and_get_session() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let session = SessionData {
session_token: "test_token_123".to_string(),
user_id: Some("user_1".to_string()),
username: "testuser".to_string(),
role: "admin".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
let now = Utc::now();
let session = SessionData {
session_token: "test_token_123".to_string(),
user_id: Some("user_1".to_string()),
username: "testuser".to_string(),
role: "admin".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
// Create session
storage.create_session(&session).await.unwrap();
// Create session
storage.create_session(&session).await.unwrap();
// Get session
let retrieved = storage.get_session("test_token_123").await.unwrap();
assert!(retrieved.is_some());
// Get session
let retrieved = storage.get_session("test_token_123").await.unwrap();
assert!(retrieved.is_some());
let retrieved = retrieved.unwrap();
assert_eq!(retrieved.session_token, "test_token_123");
assert_eq!(retrieved.username, "testuser");
assert_eq!(retrieved.role, "admin");
let retrieved = retrieved.unwrap();
assert_eq!(retrieved.session_token, "test_token_123");
assert_eq!(retrieved.username, "testuser");
assert_eq!(retrieved.role, "admin");
}
#[tokio::test]
async fn test_get_nonexistent_session() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let result = storage.get_session("nonexistent").await.unwrap();
assert!(result.is_none());
let result = storage.get_session("nonexistent").await.unwrap();
assert!(result.is_none());
}
#[tokio::test]
async fn test_touch_session() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let session = SessionData {
session_token: "test_token_456".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
let now = Utc::now();
let session = SessionData {
session_token: "test_token_456".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
storage.create_session(&session).await.unwrap();
// Wait a bit
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Wait a bit
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Touch session
storage.touch_session("test_token_456").await.unwrap();
// Touch session
storage.touch_session("test_token_456").await.unwrap();
// Verify last_accessed was updated
let updated = storage
.get_session("test_token_456")
.await
.unwrap()
.unwrap();
assert!(updated.last_accessed > now);
// Verify last_accessed was updated
let updated = storage
.get_session("test_token_456")
.await
.unwrap()
.unwrap();
assert!(updated.last_accessed > now);
}
#[tokio::test]
async fn test_delete_session() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let session = SessionData {
session_token: "delete_me".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "editor".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
let now = Utc::now();
let session = SessionData {
session_token: "delete_me".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "editor".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
assert!(storage.get_session("delete_me").await.unwrap().is_some());
storage.create_session(&session).await.unwrap();
assert!(storage.get_session("delete_me").await.unwrap().is_some());
// Delete session
storage.delete_session("delete_me").await.unwrap();
// Delete session
storage.delete_session("delete_me").await.unwrap();
// Verify it's gone
assert!(storage.get_session("delete_me").await.unwrap().is_none());
// Verify it's gone
assert!(storage.get_session("delete_me").await.unwrap().is_none());
}
#[tokio::test]
async fn test_delete_user_sessions() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let now = Utc::now();
// Create multiple sessions for the same user
for i in 0..3 {
let session = SessionData {
session_token: format!("token_{}", i),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
}
// Create session for different user
let other_session = SessionData {
session_token: "other_token".to_string(),
user_id: None,
username: "otheruser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
// Create multiple sessions for the same user
for i in 0..3 {
let session = SessionData {
session_token: format!("token_{}", i),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&other_session).await.unwrap();
storage.create_session(&session).await.unwrap();
}
// Delete all sessions for testuser
let deleted = storage.delete_user_sessions("testuser").await.unwrap();
assert_eq!(deleted, 3);
// Create session for different user
let other_session = SessionData {
session_token: "other_token".to_string(),
user_id: None,
username: "otheruser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&other_session).await.unwrap();
// Verify testuser sessions are gone
for i in 0..3 {
assert!(
storage
.get_session(&format!("token_{}", i))
.await
.unwrap()
.is_none()
);
}
// Delete all sessions for testuser
let deleted = storage.delete_user_sessions("testuser").await.unwrap();
assert_eq!(deleted, 3);
// Verify otheruser session still exists
assert!(storage.get_session("other_token").await.unwrap().is_some());
// Verify testuser sessions are gone
for i in 0..3 {
assert!(
storage
.get_session(&format!("token_{}", i))
.await
.unwrap()
.is_none()
);
}
// Verify otheruser session still exists
assert!(storage.get_session("other_token").await.unwrap().is_some());
}
#[tokio::test]
async fn test_delete_expired_sessions() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let now = Utc::now();
// Create expired session
let expired = SessionData {
session_token: "expired_token".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now - chrono::Duration::hours(25),
expires_at: now - chrono::Duration::hours(1), // Expired 1 hour ago
last_accessed: now - chrono::Duration::hours(2),
};
storage.create_session(&expired).await.unwrap();
// Create expired session
let expired = SessionData {
session_token: "expired_token".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now - chrono::Duration::hours(25),
expires_at: now - chrono::Duration::hours(1), // Expired 1 hour ago
last_accessed: now - chrono::Duration::hours(2),
};
storage.create_session(&expired).await.unwrap();
// Create valid session
let valid = SessionData {
session_token: "valid_token".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&valid).await.unwrap();
// Create valid session
let valid = SessionData {
session_token: "valid_token".to_string(),
user_id: None,
username: "testuser".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&valid).await.unwrap();
// Delete expired sessions
let deleted = storage.delete_expired_sessions().await.unwrap();
assert_eq!(deleted, 1);
// Delete expired sessions
let deleted = storage.delete_expired_sessions().await.unwrap();
assert_eq!(deleted, 1);
// Verify expired is gone, valid remains
assert!(
storage
.get_session("expired_token")
.await
.unwrap()
.is_none()
);
assert!(storage.get_session("valid_token").await.unwrap().is_some());
// Verify expired is gone, valid remains
assert!(
storage
.get_session("expired_token")
.await
.unwrap()
.is_none()
);
assert!(storage.get_session("valid_token").await.unwrap().is_some());
}
#[tokio::test]
async fn test_list_active_sessions() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let now = Utc::now();
// Create active sessions for different users
for i in 0..3 {
let session = SessionData {
session_token: format!("user1_token_{}", i),
user_id: None,
username: "user1".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
}
for i in 0..2 {
let session = SessionData {
session_token: format!("user2_token_{}", i),
user_id: None,
username: "user2".to_string(),
role: "admin".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
}
// Create expired session
let expired = SessionData {
session_token: "expired".to_string(),
user_id: None,
username: "user1".to_string(),
role: "viewer".to_string(),
created_at: now - chrono::Duration::hours(25),
expires_at: now - chrono::Duration::hours(1),
last_accessed: now - chrono::Duration::hours(2),
// Create active sessions for different users
for i in 0..3 {
let session = SessionData {
session_token: format!("user1_token_{}", i),
user_id: None,
username: "user1".to_string(),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&expired).await.unwrap();
storage.create_session(&session).await.unwrap();
}
// List all active sessions
let all_active = storage.list_active_sessions(None).await.unwrap();
assert_eq!(all_active.len(), 5); // 3 + 2, expired not included
for i in 0..2 {
let session = SessionData {
session_token: format!("user2_token_{}", i),
user_id: None,
username: "user2".to_string(),
role: "admin".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
}
// List active sessions for user1
let user1_active = storage.list_active_sessions(Some("user1")).await.unwrap();
assert_eq!(user1_active.len(), 3);
// Create expired session
let expired = SessionData {
session_token: "expired".to_string(),
user_id: None,
username: "user1".to_string(),
role: "viewer".to_string(),
created_at: now - chrono::Duration::hours(25),
expires_at: now - chrono::Duration::hours(1),
last_accessed: now - chrono::Duration::hours(2),
};
storage.create_session(&expired).await.unwrap();
// List active sessions for user2
let user2_active = storage.list_active_sessions(Some("user2")).await.unwrap();
assert_eq!(user2_active.len(), 2);
// List all active sessions
let all_active = storage.list_active_sessions(None).await.unwrap();
assert_eq!(all_active.len(), 5); // 3 + 2, expired not included
// List active sessions for user1
let user1_active = storage.list_active_sessions(Some("user1")).await.unwrap();
assert_eq!(user1_active.len(), 3);
// List active sessions for user2
let user2_active = storage.list_active_sessions(Some("user2")).await.unwrap();
assert_eq!(user2_active.len(), 2);
}
#[tokio::test]
async fn test_concurrent_session_operations() {
let storage = setup_sqlite_storage().await;
let storage = setup_sqlite_storage().await;
let now = Utc::now();
let storage = std::sync::Arc::new(storage);
let now = Utc::now();
let storage = std::sync::Arc::new(storage);
// Create sessions concurrently
let mut handles = vec![];
for i in 0..10 {
let storage = storage.clone();
let handle = tokio::spawn(async move {
let session = SessionData {
session_token: format!("concurrent_{}", i),
user_id: None,
username: format!("user{}", i),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
});
handles.push(handle);
}
// Create sessions concurrently
let mut handles = vec![];
for i in 0..10 {
let storage = storage.clone();
let handle = tokio::spawn(async move {
let session = SessionData {
session_token: format!("concurrent_{}", i),
user_id: None,
username: format!("user{}", i),
role: "viewer".to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24),
last_accessed: now,
};
storage.create_session(&session).await.unwrap();
});
handles.push(handle);
}
// Wait for all to complete
for handle in handles {
handle.await.unwrap();
}
// Wait for all to complete
for handle in handles {
handle.await.unwrap();
}
// Verify all sessions were created
for i in 0..10 {
assert!(
storage
.get_session(&format!("concurrent_{}", i))
.await
.unwrap()
.is_some()
);
}
// Verify all sessions were created
for i in 0..10 {
assert!(
storage
.get_session(&format!("concurrent_{}", i))
.await
.unwrap()
.is_some()
);
}
}