pinakes-core: improve media management features; various configuration improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I2d1f04f13970d21c36067f30bc04a9176a6a6964
This commit is contained in:
raf 2026-02-05 00:54:10 +03:00
commit e02c15490e
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
31 changed files with 1167 additions and 197 deletions

80
Cargo.lock generated
View file

@ -3198,6 +3198,19 @@ dependencies = [
"quick-error", "quick-error",
] ]
[[package]]
name = "image_hasher"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481465fe767d92494987319b0b447a5829edf57f09c52bf8639396abaaeaf78"
dependencies = [
"base64 0.22.1",
"image",
"rustdct",
"serde",
"transpose",
]
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.13.0" version = "2.13.0"
@ -4206,6 +4219,15 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "num-conv" name = "num-conv"
version = "0.2.0" version = "0.2.0"
@ -4223,6 +4245,15 @@ dependencies = [
"syn 2.0.114", "syn 2.0.114",
] ]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.19" version = "0.2.19"
@ -4813,6 +4844,7 @@ dependencies = [
"epub", "epub",
"gray_matter", "gray_matter",
"image", "image",
"image_hasher",
"kamadak-exif", "kamadak-exif",
"lofty", "lofty",
"lopdf", "lopdf",
@ -5094,6 +5126,15 @@ dependencies = [
"syn 2.0.114", "syn 2.0.114",
] ]
[[package]]
name = "primal-check"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc0d895b311e3af9902528fbb8f928688abbd95872819320517cc24ca6b2bd08"
dependencies = [
"num-integer",
]
[[package]] [[package]]
name = "proc-macro-crate" name = "proc-macro-crate"
version = "1.3.1" version = "1.3.1"
@ -5811,6 +5852,29 @@ dependencies = [
"semver", "semver",
] ]
[[package]]
name = "rustdct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b61555105d6a9bf98797c063c362a1d24ed8ab0431655e38f1cf51e52089551"
dependencies = [
"rustfft",
]
[[package]]
name = "rustfft"
version = "6.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21db5f9893e91f41798c88680037dba611ca6674703c1a18601b01a72c8adb89"
dependencies = [
"num-complex",
"num-integer",
"num-traits",
"primal-check",
"strength_reduce",
"transpose",
]
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "0.38.44" version = "0.38.44"
@ -6364,6 +6428,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strength_reduce"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82"
[[package]] [[package]]
name = "string_cache" name = "string_cache"
version = "0.8.9" version = "0.8.9"
@ -7186,6 +7256,16 @@ dependencies = [
"syn 2.0.114", "syn 2.0.114",
] ]
[[package]]
name = "transpose"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e"
dependencies = [
"num-integer",
"strength_reduce",
]
[[package]] [[package]]
name = "tray-icon" name = "tray-icon"
version = "0.21.3" version = "0.21.3"

View file

@ -40,6 +40,7 @@ argon2 = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
moka = { version = "0.12", features = ["future"] } moka = { version = "0.12", features = ["future"] }
urlencoding = "2.1" urlencoding = "2.1"
image_hasher = "2.0"
# Plugin system # Plugin system
pinakes-plugin-api = { path = "../pinakes-plugin-api" } pinakes-plugin-api = { path = "../pinakes-plugin-api" }

View file

@ -102,6 +102,8 @@ pub struct Config {
pub cloud: CloudConfig, pub cloud: CloudConfig,
#[serde(default)] #[serde(default)]
pub analytics: AnalyticsConfig, pub analytics: AnalyticsConfig,
#[serde(default)]
pub photos: PhotoConfig,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -499,6 +501,65 @@ impl Default for AnalyticsConfig {
} }
} }
// ===== Photo Management Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PhotoConfig {
/// Generate perceptual hashes for image duplicate detection (CPU-intensive)
#[serde(default = "default_true")]
pub generate_perceptual_hash: bool,
/// Automatically create tags from EXIF keywords
#[serde(default)]
pub auto_tag_from_exif: bool,
/// Generate multi-resolution thumbnails (tiny, grid, preview)
#[serde(default)]
pub multi_resolution_thumbnails: bool,
/// Auto-detect photo events/albums based on time and location
#[serde(default)]
pub enable_event_detection: bool,
/// Minimum number of photos to form an event
#[serde(default = "default_min_event_photos")]
pub min_event_photos: usize,
/// Maximum time gap between photos in the same event (in seconds)
#[serde(default = "default_event_time_gap")]
pub event_time_gap_secs: i64,
/// Maximum distance between photos in the same event (in kilometers)
#[serde(default = "default_event_distance")]
pub event_max_distance_km: f64,
}
fn default_min_event_photos() -> usize {
5
}
fn default_event_time_gap() -> i64 {
2 * 60 * 60 // 2 hours
}
fn default_event_distance() -> f64 {
1.0 // 1 km
}
impl Default for PhotoConfig {
fn default() -> Self {
Self {
generate_perceptual_hash: true,
auto_tag_from_exif: false,
multi_resolution_thumbnails: false,
enable_event_detection: false,
min_event_photos: default_min_event_photos(),
event_time_gap_secs: default_event_time_gap(),
event_max_distance_km: default_event_distance(),
}
}
}
// ===== Storage Configuration ===== // ===== Storage Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -867,6 +928,7 @@ impl Default for Config {
enrichment: EnrichmentConfig::default(), enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(), cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(), analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
} }
} }
} }

View file

@ -26,8 +26,9 @@ impl BookEnricher {
pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> { pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
match self.openlibrary.fetch_by_isbn(isbn).await { match self.openlibrary.fetch_by_isbn(isbn).await {
Ok(book) => { Ok(book) => {
let metadata_json = serde_json::to_string(&book) let metadata_json = serde_json::to_string(&book).map_err(|e| {
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?; PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
Ok(Some(ExternalMetadata { Ok(Some(ExternalMetadata {
id: Uuid::new_v4(), id: Uuid::new_v4(),
@ -48,8 +49,9 @@ impl BookEnricher {
match self.googlebooks.fetch_by_isbn(isbn).await { match self.googlebooks.fetch_by_isbn(isbn).await {
Ok(books) if !books.is_empty() => { Ok(books) if !books.is_empty() => {
let book = &books[0]; let book = &books[0];
let metadata_json = serde_json::to_string(book) let metadata_json = serde_json::to_string(book).map_err(|e| {
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?; PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
Ok(Some(ExternalMetadata { Ok(Some(ExternalMetadata {
id: Uuid::new_v4(), id: Uuid::new_v4(),
@ -75,8 +77,9 @@ impl BookEnricher {
if let Ok(results) = self.openlibrary.search(title, author).await if let Ok(results) = self.openlibrary.search(title, author).await
&& let Some(result) = results.first() && let Some(result) = results.first()
{ {
let metadata_json = serde_json::to_string(result) let metadata_json = serde_json::to_string(result).map_err(|e| {
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?; PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
return Ok(Some(ExternalMetadata { return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(), id: Uuid::new_v4(),
@ -93,8 +96,9 @@ impl BookEnricher {
if let Ok(results) = self.googlebooks.search(title, author).await if let Ok(results) = self.googlebooks.search(title, author).await
&& let Some(book) = results.first() && let Some(book) = results.first()
{ {
let metadata_json = serde_json::to_string(book) let metadata_json = serde_json::to_string(book).map_err(|e| {
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?; PinakesError::External(format!("Failed to serialize metadata: {}", e))
})?;
return Ok(Some(ExternalMetadata { return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(), id: Uuid::new_v4(),

View file

@ -31,12 +31,10 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key)); url.push_str(&format!("&key={}", key));
} }
let response = self let response =
.client self.client.get(&url).send().await.map_err(|e| {
.get(&url) PinakesError::External(format!("Google Books request failed: {}", e))
.send() })?;
.await
.map_err(|e| PinakesError::External(format!("Google Books request failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -45,10 +43,9 @@ impl GoogleBooksClient {
))); )));
} }
let volumes: GoogleBooksResponse = response let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
.json() PinakesError::External(format!("Failed to parse Google Books response: {}", e))
.await })?;
.map_err(|e| PinakesError::External(format!("Failed to parse Google Books response: {}", e)))?;
Ok(volumes.items) Ok(volumes.items)
} }
@ -70,12 +67,10 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key)); url.push_str(&format!("&key={}", key));
} }
let response = self let response =
.client self.client.get(&url).send().await.map_err(|e| {
.get(&url) PinakesError::External(format!("Google Books search failed: {}", e))
.send() })?;
.await
.map_err(|e| PinakesError::External(format!("Google Books search failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -84,10 +79,9 @@ impl GoogleBooksClient {
))); )));
} }
let volumes: GoogleBooksResponse = response let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
.json() PinakesError::External(format!("Failed to parse search results: {}", e))
.await })?;
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
Ok(volumes.items) Ok(volumes.items)
} }

View file

@ -30,12 +30,10 @@ impl OpenLibraryClient {
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> { pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> {
let url = format!("{}/isbn/{}.json", self.base_url, isbn); let url = format!("{}/isbn/{}.json", self.base_url, isbn);
let response = self let response =
.client self.client.get(&url).send().await.map_err(|e| {
.get(&url) PinakesError::External(format!("OpenLibrary request failed: {}", e))
.send() })?;
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary request failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -44,15 +42,22 @@ impl OpenLibraryClient {
))); )));
} }
response response.json::<OpenLibraryBook>().await.map_err(|e| {
.json::<OpenLibraryBook>() PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e))
.await })
.map_err(|e| PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e)))
} }
/// Search for books by title and author /// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<OpenLibrarySearchResult>> { pub async fn search(
let mut url = format!("{}/search.json?title={}", self.base_url, urlencoding::encode(title)); &self,
title: &str,
author: Option<&str>,
) -> Result<Vec<OpenLibrarySearchResult>> {
let mut url = format!(
"{}/search.json?title={}",
self.base_url,
urlencoding::encode(title)
);
if let Some(author) = author { if let Some(author) = author {
url.push_str(&format!("&author={}", urlencoding::encode(author))); url.push_str(&format!("&author={}", urlencoding::encode(author)));
@ -74,10 +79,9 @@ impl OpenLibraryClient {
))); )));
} }
let search_response: OpenLibrarySearchResponse = response let search_response: OpenLibrarySearchResponse = response.json().await.map_err(|e| {
.json() PinakesError::External(format!("Failed to parse search results: {}", e))
.await })?;
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
Ok(search_response.docs) Ok(search_response.docs)
} }
@ -277,7 +281,8 @@ mod tests {
let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap(); let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap();
assert_eq!(string_desc.as_str(), "Simple description"); assert_eq!(string_desc.as_str(), "Simple description");
let object_desc: StringOrObject = serde_json::from_str(r#"{"value": "Object description"}"#).unwrap(); let object_desc: StringOrObject =
serde_json::from_str(r#"{"value": "Object description"}"#).unwrap();
assert_eq!(object_desc.as_str(), "Object description"); assert_eq!(object_desc.as_str(), "Object description");
} }
} }

View file

@ -1,132 +1,205 @@
use std::sync::Arc; //! Auto-detection of photo events and albums based on time and location proximity
use serde::{Deserialize, Serialize}; use chrono::{DateTime, Utc};
use tokio::sync::broadcast;
use tracing::warn;
use crate::config::WebhookConfig; use crate::error::Result;
use crate::model::{MediaId, MediaItem};
#[derive(Debug, Clone, Serialize, Deserialize)] /// Configuration for event detection
#[serde(rename_all = "snake_case")] #[derive(Debug, Clone)]
pub enum PinakesEvent { pub struct EventDetectionConfig {
MediaImported { /// Maximum time gap between photos in the same event (in seconds)
media_id: String, pub max_time_gap_secs: i64,
}, /// Minimum number of photos to form an event
MediaUpdated { pub min_photos: usize,
media_id: String, /// Maximum distance between photos in the same event (in kilometers)
}, /// None means location is not considered
MediaDeleted { pub max_distance_km: Option<f64>,
media_id: String, /// Consider photos on the same day as potentially the same event
}, pub same_day_threshold: bool,
ScanCompleted {
files_found: usize,
files_processed: usize,
},
IntegrityMismatch {
media_id: String,
expected: String,
actual: String,
},
MediaRated {
media_id: String,
user_id: String,
stars: u8,
},
MediaCommented {
media_id: String,
user_id: String,
},
PlaylistCreated {
playlist_id: String,
owner_id: String,
},
TranscodeStarted {
media_id: String,
profile: String,
},
TranscodeCompleted {
media_id: String,
profile: String,
},
} }
impl PinakesEvent { impl Default for EventDetectionConfig {
pub fn event_name(&self) -> &'static str { fn default() -> Self {
match self { Self {
Self::MediaImported { .. } => "media_imported", max_time_gap_secs: 2 * 60 * 60, // 2 hours
Self::MediaUpdated { .. } => "media_updated", min_photos: 5,
Self::MediaDeleted { .. } => "media_deleted", max_distance_km: Some(1.0), // 1km
Self::ScanCompleted { .. } => "scan_completed", same_day_threshold: true,
Self::IntegrityMismatch { .. } => "integrity_mismatch",
Self::MediaRated { .. } => "media_rated",
Self::MediaCommented { .. } => "media_commented",
Self::PlaylistCreated { .. } => "playlist_created",
Self::TranscodeStarted { .. } => "transcode_started",
Self::TranscodeCompleted { .. } => "transcode_completed",
} }
} }
} }
pub struct EventBus { /// A detected photo event/album
tx: broadcast::Sender<PinakesEvent>, #[derive(Debug, Clone)]
pub struct DetectedEvent {
/// Suggested name for the event (e.g., "Photos from 2024-01-15")
pub suggested_name: String,
/// Start time of the event
pub start_time: DateTime<Utc>,
/// End time of the event
pub end_time: DateTime<Utc>,
/// Media items in this event
pub items: Vec<MediaId>,
/// Representative location (if available)
pub location: Option<(f64, f64)>, // (latitude, longitude)
} }
impl EventBus { /// Calculate Haversine distance between two GPS coordinates in kilometers
pub fn new(webhooks: Vec<WebhookConfig>) -> Arc<Self> { fn haversine_distance(lat1: f64, lon1: f64, lat2: f64, lon2: f64) -> f64 {
let (tx, _) = broadcast::channel(256); const EARTH_RADIUS_KM: f64 = 6371.0;
// Spawn webhook delivery task let dlat = (lat2 - lat1).to_radians();
if !webhooks.is_empty() { let dlon = (lon2 - lon1).to_radians();
let mut rx: broadcast::Receiver<PinakesEvent> = tx.subscribe();
let webhooks = Arc::new(webhooks);
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
let event_name = event.event_name();
for hook in webhooks.iter() {
if hook.events.iter().any(|e| e == event_name || e == "*") {
let url = hook.url.clone();
let event_clone = event.clone();
let secret = hook.secret.clone();
tokio::spawn(async move {
deliver_webhook(&url, &event_clone, secret.as_deref()).await;
});
}
}
}
});
}
Arc::new(Self { tx }) let a = (dlat / 2.0).sin().powi(2)
} + lat1.to_radians().cos() * lat2.to_radians().cos() * (dlon / 2.0).sin().powi(2);
pub fn emit(&self, event: PinakesEvent) { let c = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
// Ignore send errors (no receivers)
let _ = self.tx.send(event); EARTH_RADIUS_KM * c
}
} }
async fn deliver_webhook(url: &str, event: &PinakesEvent, _secret: Option<&str>) { /// Detect photo events from a list of media items
let client = reqwest::Client::new(); pub fn detect_events(
let body = serde_json::to_string(event).unwrap_or_default(); mut items: Vec<MediaItem>,
config: &EventDetectionConfig,
) -> Result<Vec<DetectedEvent>> {
// Filter to only photos with date_taken
items.retain(|item| item.date_taken.is_some());
for attempt in 0..3 { if items.is_empty() {
match client return Ok(Vec::new());
.post(url) }
.header("Content-Type", "application/json")
.body(body.clone()) // Sort by date_taken
.send() items.sort_by(|a, b| a.date_taken.unwrap().cmp(&b.date_taken.unwrap()));
.await
let mut events: Vec<DetectedEvent> = Vec::new();
let mut current_event_items: Vec<MediaId> = vec![items[0].id];
let mut current_start_time = items[0].date_taken.unwrap();
let mut current_last_time = items[0].date_taken.unwrap();
let mut current_location = items[0].latitude.zip(items[0].longitude);
for item in items.iter().skip(1) {
let item_time = item.date_taken.unwrap();
let time_gap = (item_time - current_last_time).num_seconds();
// Check time gap
let time_ok = if config.same_day_threshold {
// Same day or within time gap
item_time.date_naive() == current_last_time.date_naive()
|| time_gap <= config.max_time_gap_secs
} else {
time_gap <= config.max_time_gap_secs
};
// Check location proximity if both have GPS data
let location_ok = match (
config.max_distance_km,
current_location,
item.latitude.zip(item.longitude),
) {
(Some(max_dist), Some((lat1, lon1)), Some((lat2, lon2))) => {
let dist = haversine_distance(lat1, lon1, lat2, lon2);
dist <= max_dist
}
// If no location constraint or missing GPS, consider location OK
_ => true,
};
if time_ok && location_ok {
// Add to current event
current_event_items.push(item.id);
current_last_time = item_time;
// Update location to average if available
if let (Some((lat1, lon1)), Some((lat2, lon2))) =
(current_location, item.latitude.zip(item.longitude))
{ {
Ok(resp) if resp.status().is_success() => return, current_location = Some(((lat1 + lat2) / 2.0, (lon1 + lon2) / 2.0));
Ok(resp) => { } else if item.latitude.is_some() && item.longitude.is_some() {
warn!(url, status = %resp.status(), attempt, "webhook delivery failed"); current_location = item.latitude.zip(item.longitude);
} }
Err(e) => { } else {
warn!(url, error = %e, attempt, "webhook delivery error"); // Start new event if current has enough photos
if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,
start_time: current_start_time,
end_time: current_last_time,
items: current_event_items.clone(),
location: current_location,
});
}
// Reset for new event
current_event_items = vec![item.id];
current_start_time = item_time;
current_last_time = item_time;
current_location = item.latitude.zip(item.longitude);
} }
} }
// Exponential backoff // Don't forget the last event
tokio::time::sleep(std::time::Duration::from_secs(1 << attempt)).await; if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,
start_time: current_start_time,
end_time: current_last_time,
items: current_event_items,
location: current_location,
});
} }
Ok(events)
}
/// Detect photo bursts (rapid sequences of photos)
/// Returns groups of media IDs that are likely burst sequences
pub fn detect_bursts(
mut items: Vec<MediaItem>,
max_gap_secs: i64,
min_burst_size: usize,
) -> Result<Vec<Vec<MediaId>>> {
// Filter to only photos with date_taken
items.retain(|item| item.date_taken.is_some());
if items.is_empty() {
return Ok(Vec::new());
}
// Sort by date_taken
items.sort_by(|a, b| a.date_taken.unwrap().cmp(&b.date_taken.unwrap()));
let mut bursts: Vec<Vec<MediaId>> = Vec::new();
let mut current_burst: Vec<MediaId> = vec![items[0].id];
let mut last_time = items[0].date_taken.unwrap();
for item in items.iter().skip(1) {
let item_time = item.date_taken.unwrap();
let gap = (item_time - last_time).num_seconds();
if gap <= max_gap_secs {
current_burst.push(item.id);
} else {
if current_burst.len() >= min_burst_size {
bursts.push(current_burst.clone());
}
current_burst = vec![item.id];
}
last_time = item_time;
}
// Don't forget the last burst
if current_burst.len() >= min_burst_size {
bursts.push(current_burst);
}
Ok(bursts)
} }

View file

@ -21,12 +21,24 @@ pub struct ImportResult {
} }
/// Options for import operations /// Options for import operations
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone)]
pub struct ImportOptions { pub struct ImportOptions {
/// Skip files that haven't changed since last scan (based on mtime) /// Skip files that haven't changed since last scan (based on mtime)
pub incremental: bool, pub incremental: bool,
/// Force re-import even if mtime hasn't changed /// Force re-import even if mtime hasn't changed
pub force: bool, pub force: bool,
/// Photo configuration for toggleable features
pub photo_config: crate::config::PhotoConfig,
}
impl Default for ImportOptions {
fn default() -> Self {
Self {
incremental: false,
force: false,
photo_config: crate::config::PhotoConfig::default(),
}
}
} }
/// Get the modification time of a file as a Unix timestamp /// Get the modification time of a file as a Unix timestamp
@ -147,6 +159,15 @@ pub async fn import_file_with_options(
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))?? .map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
}; };
// Generate perceptual hash for image files (if enabled in config)
let perceptual_hash = if options.photo_config.generate_perceptual_hash
&& media_type.category() == crate::media_type::MediaCategory::Image
{
crate::metadata::image::generate_perceptual_hash(&path)
} else {
None
};
let item = MediaItem { let item = MediaItem {
id: media_id, id: media_id,
path: path.clone(), path: path.clone(),
@ -164,6 +185,16 @@ pub async fn import_file_with_options(
thumbnail_path: thumb_path, thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(), custom_fields: std::collections::HashMap::new(),
file_mtime: current_mtime, file_mtime: current_mtime,
// Photo-specific metadata from extraction
date_taken: extracted.date_taken,
latitude: extracted.latitude,
longitude: extracted.longitude,
camera_make: extracted.camera_make,
camera_model: extracted.camera_model,
rating: extracted.rating,
perceptual_hash,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };

View file

@ -35,32 +35,38 @@ impl MetadataExtractor for ImageExtractor {
meta.extra.insert("height".to_string(), h.to_string()); meta.extra.insert("height".to_string(), h.to_string());
} }
// Camera make and model // Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) { if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) {
let val = make.display_value().to_string(); let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val); meta.extra.insert("camera_make".to_string(), val);
} }
} }
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) { if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) {
let val = model.display_value().to_string(); let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.camera_model = Some(val.clone());
meta.extra.insert("camera_model".to_string(), val); meta.extra.insert("camera_model".to_string(), val);
} }
} }
// Date taken // Date taken - parse EXIF date format (YYYY:MM:DD HH:MM:SS)
if let Some(date) = exif_data if let Some(date) = exif_data
.get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY) .get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY)) .or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY))
{ {
let val = date.display_value().to_string(); let val = date.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
// Try parsing EXIF format: "YYYY:MM:DD HH:MM:SS"
if let Some(dt) = parse_exif_datetime(&val) {
meta.date_taken = Some(dt);
}
meta.extra.insert("date_taken".to_string(), val); meta.extra.insert("date_taken".to_string(), val);
} }
} }
// GPS coordinates // GPS coordinates - set both in top-level fields and extra
if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = ( if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = (
exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY), exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY), exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY),
@ -69,6 +75,8 @@ impl MetadataExtractor for ImageExtractor {
) && let (Some(lat_val), Some(lon_val)) = ) && let (Some(lat_val), Some(lon_val)) =
(dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref)) (dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref))
{ {
meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val);
meta.extra meta.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}")); .insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra meta.extra
@ -211,3 +219,45 @@ fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f6
} }
None None
} }
/// Parse EXIF datetime format: "YYYY:MM:DD HH:MM:SS"
fn parse_exif_datetime(s: &str) -> Option<chrono::DateTime<chrono::Utc>> {
use chrono::NaiveDateTime;
// EXIF format is "YYYY:MM:DD HH:MM:SS"
let s = s.trim().trim_matches('"');
// Try standard EXIF format
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S") {
return Some(dt.and_utc());
}
// Try ISO format as fallback
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S") {
return Some(dt.and_utc());
}
None
}
/// Generate a perceptual hash for an image file.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity detection.
/// Returns a hex-encoded hash string, or None if the image cannot be processed.
pub fn generate_perceptual_hash(path: &Path) -> Option<String> {
use image_hasher::{HashAlg, HasherConfig};
// Open and decode the image
let img = image::open(path).ok()?;
// Create hasher with DCT algorithm (good for finding similar images)
let hasher = HasherConfig::new()
.hash_alg(HashAlg::DoubleGradient)
.hash_size(8, 8) // 64-bit hash
.to_hasher();
// Generate hash
let hash = hasher.hash_image(&img);
// Convert to hex string for storage
Some(hash.to_base64())
}

View file

@ -22,6 +22,14 @@ pub struct ExtractedMetadata {
pub description: Option<String>, pub description: Option<String>,
pub extra: HashMap<String, String>, pub extra: HashMap<String, String>,
pub book_metadata: Option<ExtractedBookMetadata>, pub book_metadata: Option<ExtractedBookMetadata>,
// Photo-specific metadata
pub date_taken: Option<chrono::DateTime<chrono::Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
} }
pub trait MetadataExtractor: Send + Sync { pub trait MetadataExtractor: Send + Sync {

View file

@ -63,6 +63,16 @@ pub struct MediaItem {
pub custom_fields: HashMap<String, CustomField>, pub custom_fields: HashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental scanning /// File modification time (Unix timestamp in seconds), used for incremental scanning
pub file_mtime: Option<i64>, pub file_mtime: Option<i64>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
pub perceptual_hash: Option<String>,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
} }

View file

@ -156,6 +156,7 @@ pub async fn scan_directory_with_options(
let import_options = import::ImportOptions { let import_options = import::ImportOptions {
incremental: scan_options.incremental && !scan_options.force_full, incremental: scan_options.incremental && !scan_options.force_full,
force: scan_options.force_full, force: scan_options.force_full,
photo_config: crate::config::PhotoConfig::default(),
}; };
let results = import::import_directory_with_options( let results = import::import_directory_with_options(

View file

@ -196,6 +196,7 @@ pub trait StorageBackend: Send + Sync + 'static {
// Duplicates // Duplicates
async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>; async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>;
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>>;
// Database management // Database management
async fn database_stats(&self) -> Result<DatabaseStats>; async fn database_stats(&self) -> Result<DatabaseStats>;

View file

@ -170,6 +170,16 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
.map(PathBuf::from), .map(PathBuf::from),
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: row.get("file_mtime"), file_mtime: row.get("file_mtime"),
// Photo-specific fields
date_taken: row.get("date_taken"),
latitude: row.get("latitude"),
longitude: row.get("longitude"),
camera_make: row.get("camera_make"),
camera_model: row.get("camera_model"),
rating: row.get("rating"),
perceptual_hash: row.get("perceptual_hash"),
created_at: row.get("created_at"), created_at: row.get("created_at"),
updated_at: row.get("updated_at"), updated_at: row.get("updated_at"),
}) })
@ -589,9 +599,10 @@ impl StorageBackend for PostgresBackend {
"INSERT INTO media_items ( "INSERT INTO media_items (
id, path, file_name, media_type, content_hash, file_size, id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description, title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at thumbnail_path, date_taken, latitude, longitude, camera_make,
camera_model, rating, perceptual_hash, created_at, updated_at
) VALUES ( ) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16 $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23
)", )",
&[ &[
&item.id.0, &item.id.0,
@ -611,6 +622,13 @@ impl StorageBackend for PostgresBackend {
.thumbnail_path .thumbnail_path
.as_ref() .as_ref()
.map(|p| p.to_string_lossy().to_string()), .map(|p| p.to_string_lossy().to_string()),
&item.date_taken,
&item.latitude,
&item.longitude,
&item.camera_make,
&item.camera_model,
&item.rating,
&item.perceptual_hash,
&item.created_at, &item.created_at,
&item.updated_at, &item.updated_at,
], ],
@ -658,7 +676,8 @@ impl StorageBackend for PostgresBackend {
.query_opt( .query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size, "SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description, title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE id = $1", FROM media_items WHERE id = $1",
&[&id.0], &[&id.0],
) )
@ -681,7 +700,8 @@ impl StorageBackend for PostgresBackend {
.query_opt( .query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size, "SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description, title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE content_hash = $1", FROM media_items WHERE content_hash = $1",
&[&hash.0], &[&hash.0],
) )
@ -709,7 +729,8 @@ impl StorageBackend for PostgresBackend {
.query_opt( .query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size, "SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description, title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE path = $1", FROM media_items WHERE path = $1",
&[&path_str], &[&path_str],
) )
@ -746,7 +767,8 @@ impl StorageBackend for PostgresBackend {
let sql = format!( let sql = format!(
"SELECT id, path, file_name, media_type, content_hash, file_size, "SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description, title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items FROM media_items
ORDER BY {order_by} ORDER BY {order_by}
LIMIT $1 OFFSET $2" LIMIT $1 OFFSET $2"
@ -816,7 +838,8 @@ impl StorageBackend for PostgresBackend {
path = $2, file_name = $3, media_type = $4, content_hash = $5, path = $2, file_name = $3, media_type = $4, content_hash = $5,
file_size = $6, title = $7, artist = $8, album = $9, genre = $10, file_size = $6, title = $7, artist = $8, album = $9, genre = $10,
year = $11, duration_secs = $12, description = $13, year = $11, duration_secs = $12, description = $13,
thumbnail_path = $14, updated_at = $15 thumbnail_path = $14, date_taken = $15, latitude = $16, longitude = $17,
camera_make = $18, camera_model = $19, rating = $20, perceptual_hash = $21, updated_at = $22
WHERE id = $1", WHERE id = $1",
&[ &[
&item.id.0, &item.id.0,
@ -836,6 +859,13 @@ impl StorageBackend for PostgresBackend {
.thumbnail_path .thumbnail_path
.as_ref() .as_ref()
.map(|p| p.to_string_lossy().to_string()), .map(|p| p.to_string_lossy().to_string()),
&item.date_taken,
&item.latitude,
&item.longitude,
&item.camera_make,
&item.camera_model,
&item.rating,
&item.perceptual_hash,
&item.updated_at, &item.updated_at,
], ],
) )
@ -1390,7 +1420,9 @@ impl StorageBackend for PostgresBackend {
let select = format!( let select = format!(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size,
m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs,
m.description, m.thumbnail_path, m.created_at, m.updated_at, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude,
m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash,
m.created_at, m.updated_at,
ts_rank(m.search_vector, plainto_tsquery('english', ${fts_param_idx})) AS rank ts_rank(m.search_vector, plainto_tsquery('english', ${fts_param_idx})) AS rank
FROM media_items m FROM media_items m
WHERE {full_where} WHERE {full_where}
@ -1405,7 +1437,9 @@ impl StorageBackend for PostgresBackend {
let select = format!( let select = format!(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size,
m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs,
m.description, m.thumbnail_path, m.created_at, m.updated_at m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude,
m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash,
m.created_at, m.updated_at
FROM media_items m FROM media_items m
WHERE {full_where} WHERE {full_where}
ORDER BY {order_by} ORDER BY {order_by}
@ -1694,6 +1728,112 @@ impl StorageBackend for PostgresBackend {
Ok(groups) Ok(groups)
} }
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Get all images with perceptual hashes
let rows = client
.query(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, date_taken, latitude, longitude,
camera_make, camera_model, rating, perceptual_hash, created_at, updated_at
FROM media_items WHERE perceptual_hash IS NOT NULL ORDER BY id",
&[],
)
.await?;
let mut items = Vec::with_capacity(rows.len());
for row in &rows {
items.push(row_to_media_item(row)?);
}
// Batch-load custom fields
if !items.is_empty() {
let ids: Vec<Uuid> = items.iter().map(|i| i.id.0).collect();
let cf_rows = client
.query(
"SELECT media_id, field_name, field_type, field_value
FROM custom_fields WHERE media_id = ANY($1)",
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> = HashMap::new();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
let value: String = row.get("field_value");
let field_type = custom_field_type_from_string(&ft_str)?;
cf_map
.entry(mid)
.or_default()
.insert(name, CustomField { field_type, value });
}
for item in &mut items {
if let Some(fields) = cf_map.remove(&item.id.0) {
item.custom_fields = fields;
}
}
}
// Compare each pair and build groups
use image_hasher::ImageHash;
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
continue;
}
let hash_a = match &items[i].perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let mut group = vec![items[i].clone()];
grouped_indices.insert(i);
for (j, item_j) in items.iter().enumerate().skip(i + 1) {
if grouped_indices.contains(&j) {
continue;
}
let hash_b = match &item_j.perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let distance = hash_a.dist(&hash_b);
if distance <= threshold {
group.push(item_j.clone());
grouped_indices.insert(j);
}
}
// Only add groups with more than one item (actual duplicates)
if group.len() > 1 {
groups.push(group);
}
}
Ok(groups)
}
// ---- Database management ---- // ---- Database management ----
async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> { async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> {
@ -2359,7 +2499,7 @@ impl StorageBackend for PostgresBackend {
.await .await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query( let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = $1 ORDER BY f.created_at DESC LIMIT $2 OFFSET $3", "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = $1 ORDER BY f.created_at DESC LIMIT $2 OFFSET $3",
&[&user_id.0, &(pagination.limit as i64), &(pagination.offset as i64)], &[&user_id.0, &(pagination.limit as i64), &(pagination.offset as i64)],
).await?; ).await?;
let mut items: Vec<MediaItem> = rows let mut items: Vec<MediaItem> = rows
@ -2694,7 +2834,7 @@ impl StorageBackend for PostgresBackend {
.await .await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query( let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = $1 ORDER BY pi.position ASC", "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = $1 ORDER BY pi.position ASC",
&[&playlist_id], &[&playlist_id],
).await?; ).await?;
let mut items: Vec<MediaItem> = rows let mut items: Vec<MediaItem> = rows
@ -2843,13 +2983,13 @@ impl StorageBackend for PostgresBackend {
.await .await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query( let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY view_count DESC LIMIT $1", "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at ORDER BY view_count DESC LIMIT $1",
&[&(limit as i64)], &[&(limit as i64)],
).await?; ).await?;
let mut results = Vec::new(); let mut results = Vec::new();
for row in &rows { for row in &rows {
let item = row_to_media_item(row)?; let item = row_to_media_item(row)?;
let count: i64 = row.get(16); let count: i64 = row.get(24);
results.push((item, count as u64)); results.push((item, count as u64));
} }
@ -2896,7 +3036,7 @@ impl StorageBackend for PostgresBackend {
.await .await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client.query( let rows = client.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = $1 AND ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY MAX(ue.timestamp) DESC LIMIT $2", "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = $1 AND ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.file_mtime, m.date_taken, m.latitude, m.longitude, m.camera_make, m.camera_model, m.rating, m.perceptual_hash, m.created_at, m.updated_at ORDER BY MAX(ue.timestamp) DESC LIMIT $2",
&[&user_id.0, &(limit as i64)], &[&user_id.0, &(limit as i64)],
).await?; ).await?;
let mut items: Vec<MediaItem> = rows let mut items: Vec<MediaItem> = rows

View file

@ -113,6 +113,24 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
custom_fields: HashMap::new(), // loaded separately custom_fields: HashMap::new(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully // file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None), file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
// Photo-specific fields (may not be present in all queries)
date_taken: row
.get::<_, Option<String>>("date_taken")
.ok()
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
latitude: row.get::<_, Option<f64>>("latitude").ok().flatten(),
longitude: row.get::<_, Option<f64>>("longitude").ok().flatten(),
camera_make: row.get::<_, Option<String>>("camera_make").ok().flatten(),
camera_model: row.get::<_, Option<String>>("camera_model").ok().flatten(),
rating: row.get::<_, Option<i32>>("rating").ok().flatten(),
perceptual_hash: row
.get::<_, Option<String>>("perceptual_hash")
.ok()
.flatten(),
created_at: parse_datetime(&created_str), created_at: parse_datetime(&created_str),
updated_at: parse_datetime(&updated_str), updated_at: parse_datetime(&updated_str),
}) })
@ -610,8 +628,9 @@ impl StorageBackend for SqliteBackend {
db.execute( db.execute(
"INSERT INTO media_items (id, path, file_name, media_type, content_hash, \ "INSERT INTO media_items (id, path, file_name, media_type, content_hash, \
file_size, title, artist, album, genre, year, duration_secs, description, \ file_size, title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, file_mtime, created_at, updated_at) \ thumbnail_path, file_mtime, date_taken, latitude, longitude, camera_make, \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17)", camera_model, rating, perceptual_hash, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20, ?21, ?22, ?23, ?24)",
params![ params![
item.id.0.to_string(), item.id.0.to_string(),
item.path.to_string_lossy().as_ref(), item.path.to_string_lossy().as_ref(),
@ -630,6 +649,13 @@ impl StorageBackend for SqliteBackend {
.as_ref() .as_ref()
.map(|p| p.to_string_lossy().to_string()), .map(|p| p.to_string_lossy().to_string()),
item.file_mtime, item.file_mtime,
item.date_taken.as_ref().map(|d| d.to_rfc3339()),
item.latitude,
item.longitude,
item.camera_make,
item.camera_model,
item.rating,
item.perceptual_hash,
item.created_at.to_rfc3339(), item.created_at.to_rfc3339(),
item.updated_at.to_rfc3339(), item.updated_at.to_rfc3339(),
], ],
@ -781,7 +807,9 @@ impl StorageBackend for SqliteBackend {
"UPDATE media_items SET path = ?2, file_name = ?3, media_type = ?4, \ "UPDATE media_items SET path = ?2, file_name = ?3, media_type = ?4, \
content_hash = ?5, file_size = ?6, title = ?7, artist = ?8, album = ?9, \ content_hash = ?5, file_size = ?6, title = ?7, artist = ?8, album = ?9, \
genre = ?10, year = ?11, duration_secs = ?12, description = ?13, \ genre = ?10, year = ?11, duration_secs = ?12, description = ?13, \
thumbnail_path = ?14, file_mtime = ?15, updated_at = ?16 WHERE id = ?1", thumbnail_path = ?14, file_mtime = ?15, date_taken = ?16, latitude = ?17, \
longitude = ?18, camera_make = ?19, camera_model = ?20, rating = ?21, \
perceptual_hash = ?22, updated_at = ?23 WHERE id = ?1",
params![ params![
item.id.0.to_string(), item.id.0.to_string(),
item.path.to_string_lossy().as_ref(), item.path.to_string_lossy().as_ref(),
@ -800,6 +828,13 @@ impl StorageBackend for SqliteBackend {
.as_ref() .as_ref()
.map(|p| p.to_string_lossy().to_string()), .map(|p| p.to_string_lossy().to_string()),
item.file_mtime, item.file_mtime,
item.date_taken.as_ref().map(|d| d.to_rfc3339()),
item.latitude,
item.longitude,
item.camera_make,
item.camera_model,
item.rating,
item.perceptual_hash,
item.updated_at.to_rfc3339(), item.updated_at.to_rfc3339(),
], ],
)?; )?;
@ -1534,6 +1569,77 @@ impl StorageBackend for SqliteBackend {
.map_err(|e| PinakesError::Database(e.to_string()))? .map_err(|e| PinakesError::Database(e.to_string()))?
} }
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Get all images with perceptual hashes
let mut stmt = db.prepare(
"SELECT * FROM media_items WHERE perceptual_hash IS NOT NULL ORDER BY id",
)?;
let mut items: Vec<MediaItem> = stmt
.query_map([], row_to_media_item)?
.collect::<rusqlite::Result<Vec<_>>>()?;
load_custom_fields_batch(&db, &mut items)?;
// Compare each pair and build groups
use image_hasher::ImageHash;
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
continue;
}
let hash_a = match &items[i].perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let mut group = vec![items[i].clone()];
grouped_indices.insert(i);
for (j, item_j) in items.iter().enumerate().skip(i + 1) {
if grouped_indices.contains(&j) {
continue;
}
let hash_b = match &item_j.perceptual_hash {
Some(h) => match ImageHash::<Vec<u8>>::from_base64(h) {
Ok(hash) => hash,
Err(_) => continue,
},
None => continue,
};
let distance = hash_a.dist(&hash_b);
if distance <= threshold {
group.push(item_j.clone());
grouped_indices.insert(j);
}
}
// Only add groups with more than one item (actual duplicates)
if group.len() > 1 {
groups.push(group);
}
}
Ok(groups)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))?
}
// -- Database management ----------------------------------------------- // -- Database management -----------------------------------------------
async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> { async fn database_stats(&self) -> Result<crate::storage::DatabaseStats> {

View file

@ -367,7 +367,14 @@ pub fn extract_epub_cover(epub_path: &Path) -> Result<Option<Vec<u8>>> {
} }
// Fallback: look for common cover image filenames // Fallback: look for common cover image filenames
let cover_names = ["cover.jpg", "cover.jpeg", "cover.png", "Cover.jpg", "Cover.jpeg", "Cover.png"]; let cover_names = [
"cover.jpg",
"cover.jpeg",
"cover.png",
"Cover.jpg",
"Cover.jpeg",
"Cover.png",
];
for name in &cover_names { for name in &cover_names {
if let Some(data) = doc.get_resource_by_path(name) { if let Some(data) = doc.get_resource_by_path(name) {
return Ok(Some(data)); return Ok(Some(data));
@ -423,3 +430,72 @@ pub fn default_covers_dir() -> PathBuf {
pub fn default_thumbnail_dir() -> PathBuf { pub fn default_thumbnail_dir() -> PathBuf {
crate::config::Config::default_data_dir().join("thumbnails") crate::config::Config::default_data_dir().join("thumbnails")
} }
/// Thumbnail size variant for multi-resolution support
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ThumbnailSize {
/// Tiny thumbnail for map markers and icons (64x64)
Tiny,
/// Grid thumbnail for library grid view (320x320)
Grid,
/// Preview thumbnail for quick fullscreen preview (1024x1024)
Preview,
}
impl ThumbnailSize {
/// Get the pixel size for this thumbnail variant
pub fn pixels(&self) -> u32 {
match self {
ThumbnailSize::Tiny => 64,
ThumbnailSize::Grid => 320,
ThumbnailSize::Preview => 1024,
}
}
/// Get the subdirectory name for this size
pub fn subdir_name(&self) -> &'static str {
match self {
ThumbnailSize::Tiny => "tiny",
ThumbnailSize::Grid => "grid",
ThumbnailSize::Preview => "preview",
}
}
}
/// Generate all thumbnail sizes for a media file
/// Returns paths to the generated thumbnails (tiny, grid, preview)
pub fn generate_all_thumbnail_sizes(
media_id: MediaId,
source_path: &Path,
media_type: MediaType,
thumbnail_base_dir: &Path,
) -> Result<(Option<PathBuf>, Option<PathBuf>, Option<PathBuf>)> {
let sizes = [
ThumbnailSize::Tiny,
ThumbnailSize::Grid,
ThumbnailSize::Preview,
];
let mut results = Vec::new();
for size in &sizes {
let size_dir = thumbnail_base_dir.join(size.subdir_name());
std::fs::create_dir_all(&size_dir)?;
let config = ThumbnailConfig {
size: size.pixels(),
..ThumbnailConfig::default()
};
let result = generate_thumbnail_with_config(
media_id,
source_path,
media_type.clone(),
&size_dir,
&config,
)?;
results.push(result);
}
Ok((results[0].clone(), results[1].clone(), results[2].clone()))
}

View file

@ -2,7 +2,7 @@ use pinakes_core::books::{extract_isbn_from_text, normalize_isbn, parse_author_f
use pinakes_core::enrichment::books::BookEnricher; use pinakes_core::enrichment::books::BookEnricher;
use pinakes_core::enrichment::googlebooks::GoogleBooksClient; use pinakes_core::enrichment::googlebooks::GoogleBooksClient;
use pinakes_core::enrichment::openlibrary::OpenLibraryClient; use pinakes_core::enrichment::openlibrary::OpenLibraryClient;
use pinakes_core::thumbnail::{extract_epub_cover, generate_book_covers, CoverSize}; use pinakes_core::thumbnail::{CoverSize, extract_epub_cover, generate_book_covers};
#[test] #[test]
fn test_isbn_normalization() { fn test_isbn_normalization() {
@ -136,8 +136,12 @@ fn test_book_cover_generation() {
let mut img_data = Vec::new(); let mut img_data = Vec::new();
{ {
use image::{ImageBuffer, Rgb}; use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> = ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8])); let img: ImageBuffer<Rgb<u8>, Vec<u8>> =
img.write_to(&mut std::io::Cursor::new(&mut img_data), image::ImageFormat::Png) ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img.write_to(
&mut std::io::Cursor::new(&mut img_data),
image::ImageFormat::Png,
)
.unwrap(); .unwrap();
} }

View file

@ -36,6 +36,13 @@ async fn test_media_crud() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -115,6 +122,13 @@ async fn test_tags() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -168,6 +182,13 @@ async fn test_collections() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -216,6 +237,13 @@ async fn test_custom_fields() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -283,6 +311,13 @@ async fn test_search() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -415,6 +450,13 @@ async fn test_library_statistics_with_data() {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
}; };
@ -452,6 +494,13 @@ fn make_test_media(hash: &str) -> MediaItem {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: now, created_at: now,
updated_at: now, updated_at: now,
} }

View file

@ -39,6 +39,13 @@ fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
thumbnail_path: None, thumbnail_path: None,
custom_fields: HashMap::new(), custom_fields: HashMap::new(),
file_mtime: None, file_mtime: None,
date_taken: None,
latitude: None,
longitude: None,
camera_make: None,
camera_model: None,
rating: None,
perceptual_hash: None,
created_at: chrono::Utc::now(), created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(), updated_at: chrono::Utc::now(),
} }

View file

@ -102,6 +102,8 @@ pub fn create_router_with_tls(
.route("/media/{media_id}/tags", get(routes::tags::get_media_tags)) .route("/media/{media_id}/tags", get(routes::tags::get_media_tags))
// Books API // Books API
.nest("/books", routes::books::routes()) .nest("/books", routes::books::routes())
// Photos API
.nest("/photos", routes::photos::routes())
.route("/tags", get(routes::tags::list_tags)) .route("/tags", get(routes::tags::list_tags))
.route("/tags/{id}", get(routes::tags::get_tag)) .route("/tags/{id}", get(routes::tags::get_tag))
.route("/collections", get(routes::collections::list_collections)) .route("/collections", get(routes::collections::list_collections))

View file

@ -23,6 +23,15 @@ pub struct MediaResponse {
pub description: Option<String>, pub description: Option<String>,
pub has_thumbnail: bool, pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>, pub custom_fields: HashMap<String, CustomFieldResponse>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
} }
@ -509,6 +518,15 @@ impl From<pinakes_core::model::MediaItem> for MediaResponse {
) )
}) })
.collect(), .collect(),
// Photo-specific metadata
date_taken: item.date_taken,
latitude: item.latitude,
longitude: item.longitude,
camera_make: item.camera_make,
camera_model: item.camera_model,
rating: item.rating,
created_at: item.created_at, created_at: item.created_at,
updated_at: item.updated_at, updated_at: item.updated_at,
} }

View file

@ -90,9 +90,17 @@ async fn main() -> Result<()> {
} }
let config_path = resolve_config_path(cli.config.as_deref()); let config_path = resolve_config_path(cli.config.as_deref());
info!(path = %config_path.display(), "loading configuration");
let mut config = Config::load_or_default(&config_path)?; let mut config = if config_path.exists() {
info!(path = %config_path.display(), "loading configuration from file");
Config::from_file(&config_path)?
} else {
info!(
"using default configuration (no config file found at {})",
config_path.display()
);
Config::default()
};
config.ensure_dirs()?; config.ensure_dirs()?;
config config
.validate() .validate()

View file

@ -12,6 +12,7 @@ pub mod health;
pub mod integrity; pub mod integrity;
pub mod jobs; pub mod jobs;
pub mod media; pub mod media;
pub mod photos;
pub mod playlists; pub mod playlists;
pub mod plugins; pub mod plugins;
pub mod saved_searches; pub mod saved_searches;

View file

@ -0,0 +1,189 @@
use axum::{
Json, Router,
extract::{Query, State},
response::IntoResponse,
routing::get,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::{dto::MediaResponse, error::ApiError, state::AppState};
/// Timeline grouping mode
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum GroupBy {
#[default]
Day,
Month,
Year,
}
/// Timeline query parameters
#[derive(Debug, Deserialize)]
pub struct TimelineQuery {
#[serde(default)]
pub group_by: GroupBy,
pub year: Option<i32>,
pub month: Option<u32>,
}
/// Timeline group response
#[derive(Debug, Serialize)]
pub struct TimelineGroup {
pub date: String,
pub count: usize,
pub cover_id: Option<String>,
pub items: Vec<MediaResponse>,
}
/// Map query parameters
#[derive(Debug, Deserialize)]
pub struct MapQuery {
pub lat1: f64,
pub lon1: f64,
pub lat2: f64,
pub lon2: f64,
}
/// Map marker response
#[derive(Debug, Serialize)]
pub struct MapMarker {
pub id: String,
pub latitude: f64,
pub longitude: f64,
pub thumbnail_url: Option<String>,
pub date_taken: Option<DateTime<Utc>>,
}
/// Get timeline of photos grouped by date
pub async fn get_timeline(
State(state): State<AppState>,
Query(query): Query<TimelineQuery>,
) -> Result<impl IntoResponse, ApiError> {
// Query photos with date_taken
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: 10000, // TODO: Make this more efficient with streaming
sort: Some("date_taken DESC".to_string()),
})
.await?;
// Filter to only photos with date_taken
let photos: Vec<_> = all_media
.into_iter()
.filter(|item| {
item.date_taken.is_some()
&& item.media_type.category() == pinakes_core::media_type::MediaCategory::Image
})
.collect();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> = HashMap::new();
for photo in photos {
if let Some(date_taken) = photo.date_taken {
use chrono::Datelike;
// Filter by year/month if specified
if let Some(y) = query.year
&& date_taken.year() != y
{
continue;
}
if let Some(m) = query.month
&& date_taken.month() != m
{
continue;
}
let key = match query.group_by {
GroupBy::Day => date_taken.format("%Y-%m-%d").to_string(),
GroupBy::Month => date_taken.format("%Y-%m").to_string(),
GroupBy::Year => date_taken.format("%Y").to_string(),
};
groups.entry(key).or_default().push(photo);
}
}
// Convert to response format
let mut timeline: Vec<TimelineGroup> = groups
.into_iter()
.map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len();
let items: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
TimelineGroup {
date,
count,
cover_id,
items,
}
})
.collect();
// Sort by date descending
timeline.sort_by(|a, b| b.date.cmp(&a.date));
Ok(Json(timeline))
}
/// Get photos in a bounding box for map view
pub async fn get_map_photos(
State(state): State<AppState>,
Query(query): Query<MapQuery>,
) -> Result<impl IntoResponse, ApiError> {
// Validate bounding box
let min_lat = query.lat1.min(query.lat2);
let max_lat = query.lat1.max(query.lat2);
let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2);
// Query all media (we'll filter in-memory for now - could optimize with DB query)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: 10000,
sort: None,
})
.await?;
// Filter to photos with GPS coordinates in the bounding box
let markers: Vec<MapMarker> = all_media
.into_iter()
.filter_map(|item| {
if let (Some(lat), Some(lon)) = (item.latitude, item.longitude)
&& lat >= min_lat
&& lat <= max_lat
&& lon >= min_lon
&& lon <= max_lon
{
return Some(MapMarker {
id: item.id.0.to_string(),
latitude: lat,
longitude: lon,
thumbnail_url: item
.thumbnail_path
.map(|_p| format!("/api/v1/media/{}/thumbnail", item.id.0)),
date_taken: item.date_taken,
});
}
None
})
.collect();
Ok(Json(markers))
}
/// Photo routes
pub fn routes() -> Router<AppState> {
Router::new()
.route("/timeline", get(get_timeline))
.route("/map", get(get_map_photos))
}

View file

@ -11,9 +11,9 @@ use tower::ServiceExt;
use pinakes_core::cache::CacheLayer; use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{ use pinakes_core::config::{
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig, AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType, JobsConfig, PhotoConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig,
StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig, UserAccount, UserRole, StorageBackendType, StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig,
WebhookConfig, UserAccount, UserRole, WebhookConfig,
}; };
use pinakes_core::jobs::JobQueue; use pinakes_core::jobs::JobQueue;
use pinakes_core::storage::StorageBackend; use pinakes_core::storage::StorageBackend;
@ -126,6 +126,7 @@ fn default_config() -> Config {
enrichment: EnrichmentConfig::default(), enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(), cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(), analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
} }
} }

View file

@ -11,8 +11,9 @@ use tower::ServiceExt;
use pinakes_core::cache::CacheLayer; use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{ use pinakes_core::config::{
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig, AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType, JobsConfig, PhotoConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig,
StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig, WebhookConfig, StorageBackendType, StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig,
WebhookConfig,
}; };
use pinakes_core::jobs::JobQueue; use pinakes_core::jobs::JobQueue;
use pinakes_core::plugin::PluginManager; use pinakes_core::plugin::PluginManager;
@ -91,6 +92,7 @@ async fn setup_app_with_plugins() -> (axum::Router, Arc<PluginManager>, tempfile
enrichment: EnrichmentConfig::default(), enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(), cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(), analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
}; };
let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {})); let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));

View file

@ -1145,7 +1145,10 @@ mod tests {
fn test_thumbnail_url() { fn test_thumbnail_url() {
let client = ApiClient::new("http://localhost:3000", None); let client = ApiClient::new("http://localhost:3000", None);
let url = client.thumbnail_url("test-id-456"); let url = client.thumbnail_url("test-id-456");
assert_eq!(url, "http://localhost:3000/api/v1/media/test-id-456/thumbnail"); assert_eq!(
url,
"http://localhost:3000/api/v1/media/test-id-456/thumbnail"
);
} }
#[test] #[test]

View file

@ -0,0 +1,15 @@
-- V13: Enhanced photo metadata support
-- Add photo-specific fields to media_items table
ALTER TABLE media_items ADD COLUMN date_taken TIMESTAMPTZ;
ALTER TABLE media_items ADD COLUMN latitude DOUBLE PRECISION;
ALTER TABLE media_items ADD COLUMN longitude DOUBLE PRECISION;
ALTER TABLE media_items ADD COLUMN camera_make TEXT;
ALTER TABLE media_items ADD COLUMN camera_model TEXT;
ALTER TABLE media_items ADD COLUMN rating INTEGER CHECK (rating >= 0 AND rating <= 5);
-- Indexes for photo queries
CREATE INDEX idx_media_date_taken ON media_items(date_taken) WHERE date_taken IS NOT NULL;
CREATE INDEX idx_media_location ON media_items(latitude, longitude) WHERE latitude IS NOT NULL AND longitude IS NOT NULL;
CREATE INDEX idx_media_camera ON media_items(camera_make) WHERE camera_make IS NOT NULL;
CREATE INDEX idx_media_rating ON media_items(rating) WHERE rating IS NOT NULL;

View file

@ -0,0 +1,7 @@
-- V14: Perceptual hash for duplicate detection
-- Add perceptual hash column for image similarity detection
ALTER TABLE media_items ADD COLUMN perceptual_hash TEXT;
-- Index for perceptual hash lookups
CREATE INDEX idx_media_phash ON media_items(perceptual_hash) WHERE perceptual_hash IS NOT NULL;

View file

@ -0,0 +1,15 @@
-- V13: Enhanced photo metadata support
-- Add photo-specific fields to media_items table
ALTER TABLE media_items ADD COLUMN date_taken TIMESTAMP;
ALTER TABLE media_items ADD COLUMN latitude REAL;
ALTER TABLE media_items ADD COLUMN longitude REAL;
ALTER TABLE media_items ADD COLUMN camera_make TEXT;
ALTER TABLE media_items ADD COLUMN camera_model TEXT;
ALTER TABLE media_items ADD COLUMN rating INTEGER CHECK (rating >= 0 AND rating <= 5);
-- Indexes for photo queries
CREATE INDEX idx_media_date_taken ON media_items(date_taken) WHERE date_taken IS NOT NULL;
CREATE INDEX idx_media_location ON media_items(latitude, longitude) WHERE latitude IS NOT NULL AND longitude IS NOT NULL;
CREATE INDEX idx_media_camera ON media_items(camera_make) WHERE camera_make IS NOT NULL;
CREATE INDEX idx_media_rating ON media_items(rating) WHERE rating IS NOT NULL;

View file

@ -0,0 +1,7 @@
-- V14: Perceptual hash for duplicate detection
-- Add perceptual hash column for image similarity detection
ALTER TABLE media_items ADD COLUMN perceptual_hash TEXT;
-- Index for perceptual hash lookups
CREATE INDEX idx_media_phash ON media_items(perceptual_hash) WHERE perceptual_hash IS NOT NULL;