pinakes-core: improve media management features; various configuration improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I2d1f04f13970d21c36067f30bc04a9176a6a6964
This commit is contained in:
raf 2026-02-05 00:54:10 +03:00
commit e02c15490e
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
31 changed files with 1167 additions and 197 deletions

View file

@ -102,6 +102,8 @@ pub struct Config {
pub cloud: CloudConfig,
#[serde(default)]
pub analytics: AnalyticsConfig,
#[serde(default)]
pub photos: PhotoConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -499,6 +501,65 @@ impl Default for AnalyticsConfig {
}
}
// ===== Photo Management Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PhotoConfig {
/// Generate perceptual hashes for image duplicate detection (CPU-intensive)
#[serde(default = "default_true")]
pub generate_perceptual_hash: bool,
/// Automatically create tags from EXIF keywords
#[serde(default)]
pub auto_tag_from_exif: bool,
/// Generate multi-resolution thumbnails (tiny, grid, preview)
#[serde(default)]
pub multi_resolution_thumbnails: bool,
/// Auto-detect photo events/albums based on time and location
#[serde(default)]
pub enable_event_detection: bool,
/// Minimum number of photos to form an event
#[serde(default = "default_min_event_photos")]
pub min_event_photos: usize,
/// Maximum time gap between photos in the same event (in seconds)
#[serde(default = "default_event_time_gap")]
pub event_time_gap_secs: i64,
/// Maximum distance between photos in the same event (in kilometers)
#[serde(default = "default_event_distance")]
pub event_max_distance_km: f64,
}
fn default_min_event_photos() -> usize {
5
}
fn default_event_time_gap() -> i64 {
2 * 60 * 60 // 2 hours
}
fn default_event_distance() -> f64 {
1.0 // 1 km
}
impl Default for PhotoConfig {
fn default() -> Self {
Self {
generate_perceptual_hash: true,
auto_tag_from_exif: false,
multi_resolution_thumbnails: false,
enable_event_detection: false,
min_event_photos: default_min_event_photos(),
event_time_gap_secs: default_event_time_gap(),
event_max_distance_km: default_event_distance(),
}
}
}
// ===== Storage Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -867,6 +928,7 @@ impl Default for Config {
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
}
}
}

View file

@ -26,8 +26,9 @@ impl BookEnricher {
pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
match self.openlibrary.fetch_by_isbn(isbn).await {
Ok(book) => {
let metadata_json = serde_json::to_string(&book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
let metadata_json = serde_json::to_string(&book).map_err(|e| {
PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
@ -48,8 +49,9 @@ impl BookEnricher {
match self.googlebooks.fetch_by_isbn(isbn).await {
Ok(books) if !books.is_empty() => {
let book = &books[0];
let metadata_json = serde_json::to_string(book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
let metadata_json = serde_json::to_string(book).map_err(|e| {
PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
@ -75,8 +77,9 @@ impl BookEnricher {
if let Ok(results) = self.openlibrary.search(title, author).await
&& let Some(result) = results.first()
{
let metadata_json = serde_json::to_string(result)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
let metadata_json = serde_json::to_string(result).map_err(|e| {
PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
@ -93,8 +96,9 @@ impl BookEnricher {
if let Ok(results) = self.googlebooks.search(title, author).await
&& let Some(book) = results.first()
{
let metadata_json = serde_json::to_string(book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
let metadata_json = serde_json::to_string(book).map_err(|e| {
PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),

View file

@ -31,12 +31,10 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key));
}
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Google Books request failed: {}", e)))?;
let response =
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books request failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -45,10 +43,9 @@ impl GoogleBooksClient {
)));
}
let volumes: GoogleBooksResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse Google Books response: {}", e)))?;
let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse Google Books response: {}", e))
})?;
Ok(volumes.items)
}
@ -70,12 +67,10 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key));
}
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Google Books search failed: {}", e)))?;
let response =
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books search failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -84,10 +79,9 @@ impl GoogleBooksClient {
)));
}
let volumes: GoogleBooksResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse search results: {}", e))
})?;
Ok(volumes.items)
}

View file

@ -30,12 +30,10 @@ impl OpenLibraryClient {
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> {
let url = format!("{}/isbn/{}.json", self.base_url, isbn);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary request failed: {}", e)))?;
let response =
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("OpenLibrary request failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -44,15 +42,22 @@ impl OpenLibraryClient {
)));
}
response
.json::<OpenLibraryBook>()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e)))
response.json::<OpenLibraryBook>().await.map_err(|e| {
PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e))
})
}
/// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<OpenLibrarySearchResult>> {
let mut url = format!("{}/search.json?title={}", self.base_url, urlencoding::encode(title));
pub async fn search(
&self,
title: &str,
author: Option<&str>,
) -> Result<Vec<OpenLibrarySearchResult>> {
let mut url = format!(
"{}/search.json?title={}",
self.base_url,
urlencoding::encode(title)
);
if let Some(author) = author {
url.push_str(&format!("&author={}", urlencoding::encode(author)));
@ -74,10 +79,9 @@ impl OpenLibraryClient {
)));
}
let search_response: OpenLibrarySearchResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
let search_response: OpenLibrarySearchResponse = response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse search results: {}", e))
})?;
Ok(search_response.docs)
}
@ -153,9 +157,9 @@ impl OpenLibraryClient {
#[derive(Debug, Clone, Copy)]
pub enum CoverSize {
Small, // 256x256
Medium, // 600x800
Large, // Original
Small, // 256x256
Medium, // 600x800
Large, // Original
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -277,7 +281,8 @@ mod tests {
let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap();
assert_eq!(string_desc.as_str(), "Simple description");
let object_desc: StringOrObject = serde_json::from_str(r#"{"value": "Object description"}"#).unwrap();
let object_desc: StringOrObject =
serde_json::from_str(r#"{"value": "Object description"}"#).unwrap();
assert_eq!(object_desc.as_str(), "Object description");
}
}

View file

@ -1,132 +1,205 @@
use std::sync::Arc;
//! Auto-detection of photo events and albums based on time and location proximity
use serde::{Deserialize, Serialize};
use tokio::sync::broadcast;
use tracing::warn;
use chrono::{DateTime, Utc};
use crate::config::WebhookConfig;
use crate::error::Result;
use crate::model::{MediaId, MediaItem};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PinakesEvent {
MediaImported {
media_id: String,
},
MediaUpdated {
media_id: String,
},
MediaDeleted {
media_id: String,
},
ScanCompleted {
files_found: usize,
files_processed: usize,
},
IntegrityMismatch {
media_id: String,
expected: String,
actual: String,
},
MediaRated {
media_id: String,
user_id: String,
stars: u8,
},
MediaCommented {
media_id: String,
user_id: String,
},
PlaylistCreated {
playlist_id: String,
owner_id: String,
},
TranscodeStarted {
media_id: String,
profile: String,
},
TranscodeCompleted {
media_id: String,
profile: String,
},
/// Configuration for event detection
#[derive(Debug, Clone)]
pub struct EventDetectionConfig {
/// Maximum time gap between photos in the same event (in seconds)
pub max_time_gap_secs: i64,
/// Minimum number of photos to form an event
pub min_photos: usize,
/// Maximum distance between photos in the same event (in kilometers)
/// None means location is not considered
pub max_distance_km: Option<f64>,
/// Consider photos on the same day as potentially the same event
pub same_day_threshold: bool,
}
impl PinakesEvent {
pub fn event_name(&self) -> &'static str {
match self {
Self::MediaImported { .. } => "media_imported",
Self::MediaUpdated { .. } => "media_updated",
Self::MediaDeleted { .. } => "media_deleted",
Self::ScanCompleted { .. } => "scan_completed",
Self::IntegrityMismatch { .. } => "integrity_mismatch",
Self::MediaRated { .. } => "media_rated",
Self::MediaCommented { .. } => "media_commented",
Self::PlaylistCreated { .. } => "playlist_created",
Self::TranscodeStarted { .. } => "transcode_started",
Self::TranscodeCompleted { .. } => "transcode_completed",
impl Default for EventDetectionConfig {
fn default() -> Self {
Self {
max_time_gap_secs: 2 * 60 * 60, // 2 hours
min_photos: 5,
max_distance_km: Some(1.0), // 1km
same_day_threshold: true,
}
}
}
pub struct EventBus {
tx: broadcast::Sender<PinakesEvent>,
/// A detected photo event/album
#[derive(Debug, Clone)]
pub struct DetectedEvent {
/// Suggested name for the event (e.g., "Photos from 2024-01-15")
pub suggested_name: String,
/// Start time of the event
pub start_time: DateTime<Utc>,
/// End time of the event
pub end_time: DateTime<Utc>,
/// Media items in this event
pub items: Vec<MediaId>,
/// Representative location (if available)
pub location: Option<(f64, f64)>, // (latitude, longitude)
}
impl EventBus {
pub fn new(webhooks: Vec<WebhookConfig>) -> Arc<Self> {
let (tx, _) = broadcast::channel(256);
/// Calculate Haversine distance between two GPS coordinates in kilometers
fn haversine_distance(lat1: f64, lon1: f64, lat2: f64, lon2: f64) -> f64 {
const EARTH_RADIUS_KM: f64 = 6371.0;
// Spawn webhook delivery task
if !webhooks.is_empty() {
let mut rx: broadcast::Receiver<PinakesEvent> = tx.subscribe();
let webhooks = Arc::new(webhooks);
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
let event_name = event.event_name();
for hook in webhooks.iter() {
if hook.events.iter().any(|e| e == event_name || e == "*") {
let url = hook.url.clone();
let event_clone = event.clone();
let secret = hook.secret.clone();
tokio::spawn(async move {
deliver_webhook(&url, &event_clone, secret.as_deref()).await;
});
}
}
}
});
}
let dlat = (lat2 - lat1).to_radians();
let dlon = (lon2 - lon1).to_radians();
Arc::new(Self { tx })
}
let a = (dlat / 2.0).sin().powi(2)
+ lat1.to_radians().cos() * lat2.to_radians().cos() * (dlon / 2.0).sin().powi(2);
pub fn emit(&self, event: PinakesEvent) {
// Ignore send errors (no receivers)
let _ = self.tx.send(event);
}
let c = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
EARTH_RADIUS_KM * c
}
async fn deliver_webhook(url: &str, event: &PinakesEvent, _secret: Option<&str>) {
let client = reqwest::Client::new();
let body = serde_json::to_string(event).unwrap_or_default();
/// Detect photo events from a list of media items
pub fn detect_events(
mut items: Vec<MediaItem>,
config: &EventDetectionConfig,
) -> Result<Vec<DetectedEvent>> {
// Filter to only photos with date_taken
items.retain(|item| item.date_taken.is_some());
for attempt in 0..3 {
match client
.post(url)
.header("Content-Type", "application/json")
.body(body.clone())
.send()
.await
{
Ok(resp) if resp.status().is_success() => return,
Ok(resp) => {
warn!(url, status = %resp.status(), attempt, "webhook delivery failed");
if items.is_empty() {
return Ok(Vec::new());
}
// Sort by date_taken
items.sort_by(|a, b| a.date_taken.unwrap().cmp(&b.date_taken.unwrap()));
let mut events: Vec<DetectedEvent> = Vec::new();
let mut current_event_items: Vec<MediaId> = vec![items[0].id];
let mut current_start_time = items[0].date_taken.unwrap();
let mut current_last_time = items[0].date_taken.unwrap();
let mut current_location = items[0].latitude.zip(items[0].longitude);
for item in items.iter().skip(1) {
let item_time = item.date_taken.unwrap();
let time_gap = (item_time - current_last_time).num_seconds();
// Check time gap
let time_ok = if config.same_day_threshold {
// Same day or within time gap
item_time.date_naive() == current_last_time.date_naive()
|| time_gap <= config.max_time_gap_secs
} else {
time_gap <= config.max_time_gap_secs
};
// Check location proximity if both have GPS data
let location_ok = match (
config.max_distance_km,
current_location,
item.latitude.zip(item.longitude),
) {
(Some(max_dist), Some((lat1, lon1)), Some((lat2, lon2))) => {
let dist = haversine_distance(lat1, lon1, lat2, lon2);
dist <= max_dist
}
Err(e) => {
warn!(url, error = %e, attempt, "webhook delivery error");
// If no location constraint or missing GPS, consider location OK
_ => true,
};
if time_ok && location_ok {
// Add to current event
current_event_items.push(item.id);
current_last_time = item_time;
// Update location to average if available
if let (Some((lat1, lon1)), Some((lat2, lon2))) =
(current_location, item.latitude.zip(item.longitude))
{
current_location = Some(((lat1 + lat2) / 2.0, (lon1 + lon2) / 2.0));
} else if item.latitude.is_some() && item.longitude.is_some() {
current_location = item.latitude.zip(item.longitude);
}
} else {
// Start new event if current has enough photos
if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,
start_time: current_start_time,
end_time: current_last_time,
items: current_event_items.clone(),
location: current_location,
});
}
// Reset for new event
current_event_items = vec![item.id];
current_start_time = item_time;
current_last_time = item_time;
current_location = item.latitude.zip(item.longitude);
}
}
// Don't forget the last event
if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,
start_time: current_start_time,
end_time: current_last_time,
items: current_event_items,
location: current_location,
});
}
Ok(events)
}
/// Detect photo bursts (rapid sequences of photos)
/// Returns groups of media IDs that are likely burst sequences
pub fn detect_bursts(
mut items: Vec<MediaItem>,
max_gap_secs: i64,
min_burst_size: usize,
) -> Result<Vec<Vec<MediaId>>> {
// Filter to only photos with date_taken
items.retain(|item| item.date_taken.is_some());
if items.is_empty() {
return Ok(Vec::new());
}
// Sort by date_taken
items.sort_by(|a, b| a.date_taken.unwrap().cmp(&b.date_taken.unwrap()));
let mut bursts: Vec<Vec<MediaId>> = Vec::new();
let mut current_burst: Vec<MediaId> = vec![items[0].id];
let mut last_time = items[0].date_taken.unwrap();
for item in items.iter().skip(1) {
let item_time = item.date_taken.unwrap();
let gap = (item_time - last_time).num_seconds();
if gap <= max_gap_secs {
current_burst.push(item.id);
} else {
if current_burst.len() >= min_burst_size {
bursts.push(current_burst.clone());
}
current_burst = vec![item.id];
}
// Exponential backoff
tokio::time::sleep(std::time::Duration::from_secs(1 << attempt)).await;
last_time = item_time;
}
// Don't forget the last burst
if current_burst.len() >= min_burst_size {
bursts.push(current_burst);
}
Ok(bursts)
}

View file

@ -21,12 +21,24 @@ pub struct ImportResult {
}
/// Options for import operations
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone)]
pub struct ImportOptions {
/// Skip files that haven't changed since last scan (based on mtime)
pub incremental: bool,
/// Force re-import even if mtime hasn't changed
pub force: bool,
/// Photo configuration for toggleable features
pub photo_config: crate::config::PhotoConfig,
}
impl Default for ImportOptions {
fn default() -> Self {
Self {
incremental: false,
force: false,
photo_config: crate::config::PhotoConfig::default(),
}
}
}
/// Get the modification time of a file as a Unix timestamp
@ -147,6 +159,15 @@ pub async fn import_file_with_options(
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
};
// Generate perceptual hash for image files (if enabled in config)
let perceptual_hash = if options.photo_config.generate_perceptual_hash
&& media_type.category() == crate::media_type::MediaCategory::Image
{
crate::metadata::image::generate_perceptual_hash(&path)
} else {
None
};
let item = MediaItem {
id: media_id,
path: path.clone(),
@ -164,6 +185,16 @@ pub async fn import_file_with_options(
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
file_mtime: current_mtime,
// Photo-specific metadata from extraction
date_taken: extracted.date_taken,
latitude: extracted.latitude,
longitude: extracted.longitude,
camera_make: extracted.camera_make,
camera_model: extracted.camera_model,
rating: extracted.rating,
perceptual_hash,
created_at: now,
updated_at: now,
};

View file

@ -35,32 +35,38 @@ impl MetadataExtractor for ImageExtractor {
meta.extra.insert("height".to_string(), h.to_string());
}
// Camera make and model
// Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) {
let val = make.display_value().to_string();
let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val);
}
}
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) {
let val = model.display_value().to_string();
let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_model = Some(val.clone());
meta.extra.insert("camera_model".to_string(), val);
}
}
// Date taken
// Date taken - parse EXIF date format (YYYY:MM:DD HH:MM:SS)
if let Some(date) = exif_data
.get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY))
{
let val = date.display_value().to_string();
if !val.is_empty() {
// Try parsing EXIF format: "YYYY:MM:DD HH:MM:SS"
if let Some(dt) = parse_exif_datetime(&val) {
meta.date_taken = Some(dt);
}
meta.extra.insert("date_taken".to_string(), val);
}
}
// GPS coordinates
// GPS coordinates - set both in top-level fields and extra
if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = (
exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY),
@ -69,6 +75,8 @@ impl MetadataExtractor for ImageExtractor {
) && let (Some(lat_val), Some(lon_val)) =
(dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref))
{
meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val);
meta.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra
@ -211,3 +219,45 @@ fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f6
}
None
}
/// Parse EXIF datetime format: "YYYY:MM:DD HH:MM:SS"
fn parse_exif_datetime(s: &str) -> Option<chrono::DateTime<chrono::Utc>> {
use chrono::NaiveDateTime;
// EXIF format is "YYYY:MM:DD HH:MM:SS"
let s = s.trim().trim_matches('"');
// Try standard EXIF format
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S") {
return Some(dt.and_utc());
}
// Try ISO format as fallback
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S") {
return Some(dt.and_utc());
}
None
}
/// Generate a perceptual hash for an image file.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity detection.
/// Returns a hex-encoded hash string, or None if the image cannot be processed.
pub fn generate_perceptual_hash(path: &Path) -> Option<String> {
use image_hasher::{HashAlg, HasherConfig};
// Open and decode the image
let img = image::open(path).ok()?;
// Create hasher with DCT algorithm (good for finding similar images)
let hasher = HasherConfig::new()
.hash_alg(HashAlg::DoubleGradient)
.hash_size(8, 8) // 64-bit hash
.to_hasher();
// Generate hash
let hash = hasher.hash_image(&img);
// Convert to hex string for storage
Some(hash.to_base64())
}

View file

@ -22,6 +22,14 @@ pub struct ExtractedMetadata {
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub book_metadata: Option<ExtractedBookMetadata>,
// Photo-specific metadata
pub date_taken: Option<chrono::DateTime<chrono::Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
}
pub trait MetadataExtractor: Send + Sync {

View file

@ -63,6 +63,16 @@ pub struct MediaItem {
pub custom_fields: HashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental scanning
pub file_mtime: Option<i64>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
pub perceptual_hash: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View file

@ -156,6 +156,7 @@ pub async fn scan_directory_with_options(
let import_options = import::ImportOptions {
incremental: scan_options.incremental && !scan_options.force_full,
force: scan_options.force_full,
photo_config: crate::config::PhotoConfig::default(),
};
let results = import::import_directory_with_options(

View file

@ -196,6 +196,7 @@ pub trait StorageBackend: Send + Sync + 'static {
// Duplicates
async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>;
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>>;
// Database management
async fn database_stats(&self) -> Result<DatabaseStats>;

View file

@ -170,6 +170,16 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
.map(PathBuf::from),
custom_fields: HashMap::new(),
file_mtime: row.get("file_mtime"),
// Photo-specific fields
date_taken: row.get("date_taken"),
latitude: row.get("latitude"),
longitude: row.get("longitude"),
camera_make: row.get("camera_make"),
camera_model: row.get("camera_model"),
rating: row.get("rating"),
perceptual_hash: row.get("perceptual_hash"),
created_at: row.get("created_at"),
updated_at: row.get("updated_at"),
})
@ -589,9 +599,10 @@ impl StorageBackend for PostgresBackend {
"INSERT INTO media_items (
id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at
thumbnail_path, date_taken, latitude, longitude, camera_make,
camera_model, rating, perceptual_hash, created_at, updated_at
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23
)",
&[
&item.id.0,
@ -611,6 +622,13 @@ impl StorageBackend for PostgresBackend {
.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
&item.date_taken,
&item.latitude,
&item.longitude,
&item.camera_make,
&item.camera_model,
&item.rating,
&item.perceptual_hash,
&item.created_at,
&item.updated_at,
],
@ -658,7 +676,8 @@ impl StorageBackend for PostgresBackend {
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE id = $1",
&[&id.0],
)
@ -681,7 +700,8 @@ impl StorageBackend for PostgresBackend {
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE content_hash = $1",
&[&hash.0],
)
@ -709,7 +729,8 @@ impl StorageBackend for PostgresBackend {
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE path = $1",
&[&path_str],
)
@ -746,7 +767,8 @@ impl StorageBackend for PostgresBackend {
let sql = format!(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items
ORDER BY {order_by}
LIMIT $1 OFFSET $2"
@ -816,7 +838,8 @@ impl StorageBackend for PostgresBackend {
path = $2, file_name = $3, media_type = $4, content_hash = $5,
file_size = $6, title = $7, artist = $8, album = $9, genre = $10,
year = $11, duration_secs = $12, description = $13,
thumbnail_path = $14, updated_at = $15
thumbnail_path = $14, date_taken = $15, latitude = $16, longitude = $17,
camera_make = $18, camera_model = $19, rating = $20, perceptual_hash = $21, updated_at = $22
WHERE id = $1",
&[
&item.id.0,
@ -836,6 +859,13 @@ impl StorageBackend for PostgresBackend {
.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
&item.date_taken,
&item.latitude,
&item.longitude,
&item.camera_make,
&item.camera_model,
&item.rating,
&item.perceptual_hash,
&item.updated_at,
],
)
@ -1390,7 +1420,9 @@ impl StorageBackend for PostgresBackend {
let select = format!(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size,
m.title, m.artist, m.album, m.genre, m.year, m.duration_secs,
m.description, m.thumbnail_path, m.created_at, m.updated_at,
m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude,
m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash,
m.created_at, m.updated_at,
ts_rank(m.search_vector, plainto_tsquery('english', ${fts_param_idx})) AS rank
FROM media_items m
WHERE {full_where}
@ -1405,7 +1437,9 @@ impl StorageBackend for PostgresBackend {
let select = format!(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size,
m.title, m.artist, m.album, m.genre, m.year, m.duration_secs,
m.description, m.thumbnail_path, m.created_at, m.updated_at
m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude,
m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash,
m.created_at, m.updated_at
FROM media_items m
WHERE {full_where}
ORDER BY {order_by}
@ -1694,6 +1728,112 @@ impl StorageBackend for PostgresBackend {
Ok(groups)
}
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Get all images with perceptual hashes
let rows = client
.query(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE perceptual_hash IS NOT NULL ORDER BY id",
&[],
)
.await?;
let mut items = Vec::with_capacity(rows.len());
for row in &rows {
items.push(row_to_media_item(row)?);
}
// Batch-load custom fields
if !items.is_empty() {
let ids: Vec<Uuid> = items.iter().map(|i| i.id.0).collect();
let cf_rows = client
.query(
"SELECT media_id, field_name, field_type, field_value
FROM custom_fields WHERE media_id = ANY($1)",
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> = HashMap::new();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
let value: String = row.get("field_value");
let field_type = custom_field_type_from_string(&ft_str)?;
cf_map
.entry(mid)
.or_default()
.insert(name, CustomField { field_type, value });
}
for item in &mut items {
if let Some(fields) = cf_map.remove(&item.id.0) {
item.custom_fields = fields;
}
}
}
// Compare each pair and build groups
use image_hasher::ImageHash;
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
continue;
}
let hash_a = match &items[i].perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let mut group = vec![items[i].clone()];
grouped_indices.insert(i);
for (j, item_j) in items.iter().enumerate().skip(i + 1) {
if grouped_indices.contains(&j) {
continue;
}
let hash_b = match &item_j.perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let distance = hash_a.dist(&hash_b);
if distance <= threshold {
group.push(item_j.clone());
grouped_indices.insert(j);
}
}
// Only add groups with more than one item (actual duplicates)
if group.len() > 1 {
groups.push(group);
}
}
Ok(groups)
}
// ---- Database management ----
async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> {
@ -2359,7 +2499,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = $1 ORDER BY f.created_at DESC LIMIT $2 OFFSET $3",
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = $1 ORDER BY f.created_at DESC LIMIT $2 OFFSET $3",
&[&user_id.0, &(pagination.limit as i64), &(pagination.offset as i64)],
).await?;
let mut items: Vec<MediaItem> = rows
@ -2694,7 +2834,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = $1 ORDER BY pi.position ASC",
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = $1 ORDER BY pi.position ASC",
&[&playlist_id],
).await?;
let mut items: Vec<MediaItem> = rows
@ -2843,13 +2983,13 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY view_count DESC LIMIT $1",
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at ORDER BY view_count DESC LIMIT $1",
&[&(limit as i64)],
).await?;
let mut results = Vec::new();
for row in &rows {
let item = row_to_media_item(row)?;
let count: i64 = row.get(16);
let count: i64 = row.get(24);
results.push((item, count as u64));
}
@ -2896,7 +3036,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = $1 AND ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY MAX(ue.timestamp) DESC LIMIT $2",
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = $1 AND ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at ORDER BY MAX(ue.timestamp) DESC LIMIT $2",
&[&user_id.0, &(limit as i64)],
).await?;
let mut items: Vec<MediaItem> = rows

View file

@ -113,6 +113,24 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
custom_fields: HashMap::new(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
// Photo-specific fields (may not be present in all queries)
date_taken: row
.get::<_, Option<String>>("date_taken")
.ok()
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
latitude: row.get::<_, Option<f64>>("latitude").ok().flatten(),
longitude: row.get::<_, Option<f64>>("longitude").ok().flatten(),
camera_make: row.get::<_, Option<String>>("camera_make").ok().flatten(),
camera_model: row.get::<_, Option<String>>("camera_model").ok().flatten(),
rating: row.get::<_, Option<i32>>("rating").ok().flatten(),
perceptual_hash: row
.get::<_, Option<String>>("perceptual_hash")
.ok()
.flatten(),
created_at: parse_datetime(&created_str),
updated_at: parse_datetime(&updated_str),
})
@ -610,8 +628,9 @@ impl StorageBackend for SqliteBackend {
db.execute(
"INSERT INTO media_items (id, path, file_name, media_type, content_hash, \
file_size, title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, file_mtime, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17)",
thumbnail_path, file_mtime, date_taken, latitude, longitude, camera_make, \
camera_model, rating, perceptual_hash, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20, ?21, ?22, ?23, ?24)",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -630,6 +649,13 @@ impl StorageBackend for SqliteBackend {
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.date_taken.as_ref().map(|d| d.to_rfc3339()),
item.latitude,
item.longitude,
item.camera_make,
item.camera_model,
item.rating,
item.perceptual_hash,
item.created_at.to_rfc3339(),
item.updated_at.to_rfc3339(),
],
@ -781,7 +807,9 @@ impl StorageBackend for SqliteBackend {
"UPDATE media_items SET path = ?2, file_name = ?3, media_type = ?4, \
content_hash = ?5, file_size = ?6, title = ?7, artist = ?8, album = ?9, \
genre = ?10, year = ?11, duration_secs = ?12, description = ?13, \
thumbnail_path = ?14, file_mtime = ?15, updated_at = ?16 WHERE id = ?1",
thumbnail_path = ?14, file_mtime = ?15, date_taken = ?16, latitude = ?17, \
longitude = ?18, camera_make = ?19, camera_model = ?20, rating = ?21, \
perceptual_hash = ?22, updated_at = ?23 WHERE id = ?1",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -800,6 +828,13 @@ impl StorageBackend for SqliteBackend {
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.date_taken.as_ref().map(|d| d.to_rfc3339()),
item.latitude,
item.longitude,
item.camera_make,
item.camera_model,
item.rating,
item.perceptual_hash,
item.updated_at.to_rfc3339(),
],
)?;
@ -1534,6 +1569,77 @@ impl StorageBackend for SqliteBackend {
.map_err(|e| PinakesError::Database(e.to_string()))?
}
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Get all images with perceptual hashes
let mut stmt = db.prepare(
"SELECT * FROM media_items WHERE perceptual_hash IS NOT NULL ORDER BY id",
)?;
let mut items: Vec<MediaItem> = stmt
.query_map([], row_to_media_item)?
.collect::<rusqlite::Result<Vec<_>>>()?;
load_custom_fields_batch(&db, &mut items)?;
// Compare each pair and build groups
use image_hasher::ImageHash;
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
continue;
}
let hash_a = match &items[i].perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let mut group = vec![items[i].clone()];
grouped_indices.insert(i);
for (j, item_j) in items.iter().enumerate().skip(i + 1) {
if grouped_indices.contains(&j) {
continue;
}
let hash_b = match &item_j.perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let distance = hash_a.dist(&hash_b);
if distance <= threshold {
group.push(item_j.clone());
grouped_indices.insert(j);
}
}
// Only add groups with more than one item (actual duplicates)
if group.len() > 1 {
groups.push(group);
}
}
Ok(groups)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))?
}
// -- Database management -----------------------------------------------
async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> {

View file

@ -367,7 +367,14 @@ pub fn extract_epub_cover(epub_path: &Path) -> Result<Option<Vec<u8>>> {
}
// Fallback: look for common cover image filenames
let cover_names = ["cover.jpg", "cover.jpeg", "cover.png", "Cover.jpg", "Cover.jpeg", "Cover.png"];
let cover_names = [
"cover.jpg",
"cover.jpeg",
"cover.png",
"Cover.jpg",
"Cover.jpeg",
"Cover.png",
];
for name in &cover_names {
if let Some(data) = doc.get_resource_by_path(name) {
return Ok(Some(data));
@ -423,3 +430,72 @@ pub fn default_covers_dir() -> PathBuf {
pub fn default_thumbnail_dir() -> PathBuf {
crate::config::Config::default_data_dir().join("thumbnails")
}
/// Thumbnail size variant for multi-resolution support
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ThumbnailSize {
/// Tiny thumbnail for map markers and icons (64x64)
Tiny,
/// Grid thumbnail for library grid view (320x320)
Grid,
/// Preview thumbnail for quick fullscreen preview (1024x1024)
Preview,
}
impl ThumbnailSize {
/// Get the pixel size for this thumbnail variant
pub fn pixels(&self) -> u32 {
match self {
ThumbnailSize::Tiny => 64,
ThumbnailSize::Grid => 320,
ThumbnailSize::Preview => 1024,
}
}
/// Get the subdirectory name for this size
pub fn subdir_name(&self) -> &'static str {
match self {
ThumbnailSize::Tiny => "tiny",
ThumbnailSize::Grid => "grid",
ThumbnailSize::Preview => "preview",
}
}
}
/// Generate all thumbnail sizes for a media file
/// Returns paths to the generated thumbnails (tiny, grid, preview)
pub fn generate_all_thumbnail_sizes(
media_id: MediaId,
source_path: &Path,
media_type: MediaType,
thumbnail_base_dir: &Path,
) -> Result<(Option<PathBuf>, Option<PathBuf>, Option<PathBuf>)> {
let sizes = [
ThumbnailSize::Tiny,
ThumbnailSize::Grid,
ThumbnailSize::Preview,
];
let mut results = Vec::new();
for size in &sizes {
let size_dir = thumbnail_base_dir.join(size.subdir_name());
std::fs::create_dir_all(&size_dir)?;
let config = ThumbnailConfig {
size: size.pixels(),
..ThumbnailConfig::default()
};
let result = generate_thumbnail_with_config(
media_id,
source_path,
media_type.clone(),
&size_dir,
&config,
)?;
results.push(result);
}
Ok((results[0].clone(), results[1].clone(), results[2].clone()))
}