treewide: fix various UI bugs; optimize crypto dependencies & format

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8fe8b38c1d9c4fecd40ff71f88d2ae06a6a6964
This commit is contained in:
raf 2026-02-10 12:56:05 +03:00
commit 3ccddce7fd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
178 changed files with 58285 additions and 54241 deletions

8
.clippy.toml Normal file
View file

@ -0,0 +1,8 @@
await-holding-invalid-types = [
"generational_box::GenerationalRef",
{ path = "generational_box::GenerationalRef", reason = "Reads should not be held over an await point. This will cause any writes to fail while the await is pending since the read borrow is still active." },
"generational_box::GenerationalRefMut",
{ path = "generational_box::GenerationalRefMut", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
"dioxus_signals::WriteLock",
{ path = "dioxus_signals::WriteLock", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
]

27
.rustfmt.toml Normal file
View file

@ -0,0 +1,27 @@
condense_wildcard_suffixes = true
doc_comment_code_block_width = 80
edition = "2024" # Keep in sync with Cargo.toml.
enum_discrim_align_threshold = 60
force_explicit_abi = false
force_multiline_blocks = true
format_code_in_doc_comments = true
format_macro_matchers = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Upper"
imports_granularity = "Crate"
imports_layout = "HorizontalVertical"
inline_attribute_width = 60
match_block_trailing_comma = true
max_width = 80
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
overflow_delimited_expr = true
struct_field_align_threshold = 60
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

BIN
Cargo.lock generated

Binary file not shown.

View file

@ -135,3 +135,12 @@ http = "1.4.0"
# WASM runtime for plugins # WASM runtime for plugins
wasmtime = { version = "41.0.3", features = ["component-model"] } wasmtime = { version = "41.0.3", features = ["component-model"] }
wit-bindgen = "0.52.0" wit-bindgen = "0.52.0"
[profile.dev.package]
blake3 = { opt-level = 3 }
image = { opt-level = 3 }
regex = { opt-level = 3 }
argon2 = { opt-level = 3 }
matroska = { opt-level = 3 }
lopdf = { opt-level = 3 }
lofty = { opt-level = 3 }

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::model::MediaId; use crate::{model::MediaId, users::UserId};
use crate::users::UserId;
/// A tracked usage event for a media item. /// A tracked usage event for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,10 @@
use uuid::Uuid; use uuid::Uuid;
use crate::error::Result; use crate::{
use crate::model::{AuditAction, AuditEntry, MediaId}; error::Result,
use crate::storage::DynStorageBackend; model::{AuditAction, AuditEntry, MediaId},
storage::DynStorageBackend,
};
pub async fn record_action( pub async fn record_action(
storage: &DynStorageBackend, storage: &DynStorageBackend,

View file

@ -2,7 +2,8 @@ use crate::error::{PinakesError, Result};
/// Normalize ISBN to ISBN-13 format /// Normalize ISBN to ISBN-13 format
pub fn normalize_isbn(isbn: &str) -> Result<String> { pub fn normalize_isbn(isbn: &str) -> Result<String> {
// Remove hyphens, spaces, and any non-numeric characters (except X for ISBN-10) // Remove hyphens, spaces, and any non-numeric characters (except X for
// ISBN-10)
let clean: String = isbn let clean: String = isbn
.chars() .chars()
.filter(|c| c.is_ascii_digit() || *c == 'X' || *c == 'x') .filter(|c| c.is_ascii_digit() || *c == 'X' || *c == 'x')
@ -19,11 +20,13 @@ pub fn normalize_isbn(isbn: &str) -> Result<String> {
isbn isbn
))) )))
} }
} },
_ => Err(PinakesError::InvalidData(format!( _ => {
Err(PinakesError::InvalidData(format!(
"Invalid ISBN length: {}", "Invalid ISBN length: {}",
isbn isbn
))), )))
},
} }
} }
@ -57,7 +60,9 @@ fn calculate_isbn13_check_digit(isbn_without_check: &str) -> Result<u32> {
let sum: u32 = isbn_without_check let sum: u32 = isbn_without_check
.chars() .chars()
.enumerate() .enumerate()
.filter_map(|(i, c)| c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })) .filter_map(|(i, c)| {
c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })
})
.sum(); .sum();
let check_digit = (10 - (sum % 10)) % 10; let check_digit = (10 - (sum % 10)) % 10;
@ -73,7 +78,9 @@ fn is_valid_isbn13(isbn13: &str) -> bool {
let sum: u32 = isbn13 let sum: u32 = isbn13
.chars() .chars()
.enumerate() .enumerate()
.filter_map(|(i, c)| c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })) .filter_map(|(i, c)| {
c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })
})
.sum(); .sum();
sum.is_multiple_of(10) sum.is_multiple_of(10)
@ -128,7 +135,7 @@ pub fn parse_author_file_as(name: &str) -> String {
let surname = parts.last().unwrap(); let surname = parts.last().unwrap();
let given_names = parts[..parts.len() - 1].join(" "); let given_names = parts[..parts.len() - 1].join(" ");
format!("{}, {}", surname, given_names) format!("{}, {}", surname, given_names)
} },
} }
} }
@ -159,7 +166,8 @@ mod tests {
#[test] #[test]
fn test_extract_isbn() { fn test_extract_isbn() {
let text = "This book's ISBN is 978-0-306-40615-7 and was published in 2020."; let text =
"This book's ISBN is 978-0-306-40615-7 and was published in 2020.";
assert_eq!( assert_eq!(
extract_isbn_from_text(text), extract_isbn_from_text(text),
Some("9780306406157".to_string()) Some("9780306406157".to_string())

View file

@ -7,10 +7,14 @@
//! - Metrics tracking (hit rate, size, evictions) //! - Metrics tracking (hit rate, size, evictions)
//! - Specialized caches for different data types //! - Specialized caches for different data types
use std::hash::Hash; use std::{
use std::sync::Arc; hash::Hash,
use std::sync::atomic::{AtomicU64, Ordering}; sync::{
use std::time::Duration; Arc,
atomic::{AtomicU64, Ordering},
},
time::Duration,
};
use moka::future::Cache as MokaCache; use moka::future::Cache as MokaCache;
@ -97,7 +101,11 @@ where
} }
/// Create a new cache with TTL, max capacity, and time-to-idle. /// Create a new cache with TTL, max capacity, and time-to-idle.
pub fn new_with_idle(ttl: Duration, tti: Duration, max_capacity: u64) -> Self { pub fn new_with_idle(
ttl: Duration,
tti: Duration,
max_capacity: u64,
) -> Self {
let inner = MokaCache::builder() let inner = MokaCache::builder()
.time_to_live(ttl) .time_to_live(ttl)
.time_to_idle(tti) .time_to_idle(tti)
@ -116,11 +124,11 @@ where
Some(value) => { Some(value) => {
self.metrics.record_hit(); self.metrics.record_hit();
Some(value) Some(value)
} },
None => { None => {
self.metrics.record_miss(); self.metrics.record_miss();
None None
} },
} }
} }
@ -172,7 +180,12 @@ impl QueryCache {
} }
/// Generate a cache key from query parameters. /// Generate a cache key from query parameters.
fn make_key(query: &str, offset: u64, limit: u64, sort: Option<&str>) -> String { fn make_key(
query: &str,
offset: u64,
limit: u64,
sort: Option<&str>,
) -> String {
use std::hash::{DefaultHasher, Hasher}; use std::hash::{DefaultHasher, Hasher};
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
hasher.write(query.as_bytes()); hasher.write(query.as_bytes());
@ -234,7 +247,8 @@ impl MetadataCache {
} }
pub async fn insert(&self, content_hash: &str, metadata_json: String) { pub async fn insert(&self, content_hash: &str, metadata_json: String) {
self.inner self
.inner
.insert(content_hash.to_string(), metadata_json) .insert(content_hash.to_string(), metadata_json)
.await; .await;
} }
@ -332,7 +346,8 @@ pub struct CacheLayer {
} }
impl CacheLayer { impl CacheLayer {
/// Create a new cache layer with the specified TTL (using defaults for other settings). /// Create a new cache layer with the specified TTL (using defaults for other
/// settings).
pub fn new(ttl_secs: u64) -> Self { pub fn new(ttl_secs: u64) -> Self {
let config = CacheConfig { let config = CacheConfig {
response_ttl_secs: ttl_secs, response_ttl_secs: ttl_secs,
@ -413,8 +428,10 @@ pub struct CacheLayerStats {
impl CacheLayerStats { impl CacheLayerStats {
/// Get the overall hit rate across all caches. /// Get the overall hit rate across all caches.
pub fn overall_hit_rate(&self) -> f64 { pub fn overall_hit_rate(&self) -> f64 {
let total_hits = let total_hits = self.responses.hits
self.responses.hits + self.queries.hits + self.metadata.hits + self.media.hits; + self.queries.hits
+ self.metadata.hits
+ self.media.hits;
let total_requests = total_hits let total_requests = total_hits
+ self.responses.misses + self.responses.misses
+ self.queries.misses + self.queries.misses
@ -430,7 +447,10 @@ impl CacheLayerStats {
/// Get the total number of entries across all caches. /// Get the total number of entries across all caches.
pub fn total_entries(&self) -> u64 { pub fn total_entries(&self) -> u64 {
self.responses.size + self.queries.size + self.metadata.size + self.media.size self.responses.size
+ self.queries.size
+ self.metadata.size
+ self.media.size
} }
} }

View file

@ -1,8 +1,6 @@
use uuid::Uuid; use uuid::Uuid;
use crate::error::Result; use crate::{error::Result, model::*, storage::DynStorageBackend};
use crate::model::*;
use crate::storage::DynStorageBackend;
pub async fn create_collection( pub async fn create_collection(
storage: &DynStorageBackend, storage: &DynStorageBackend,
@ -72,7 +70,9 @@ pub async fn get_members(
} else { } else {
Ok(Vec::new()) Ok(Vec::new())
} }
} },
CollectionKind::Manual => storage.get_collection_members(collection_id).await, CollectionKind::Manual => {
storage.get_collection_members(collection_id).await
},
} }
} }

View file

@ -52,7 +52,7 @@ fn expand_env_var_string(input: &str) -> crate::error::Result<String> {
"environment variable not set: {}", "environment variable not set: {}",
var_name var_name
))); )));
} },
} }
} else if ch == '\\' { } else if ch == '\\' {
// Handle escaped characters // Handle escaped characters
@ -249,7 +249,9 @@ pub struct UserAccount {
pub role: UserRole, pub role: UserRole,
} }
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)] #[derive(
Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize,
)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum UserRole { pub enum UserRole {
Admin, Admin,
@ -811,12 +813,13 @@ pub struct ServerConfig {
pub host: String, pub host: String,
pub port: u16, pub port: u16,
/// Optional API key for bearer token authentication. /// Optional API key for bearer token authentication.
/// If set, all requests (except /health) must include `Authorization: Bearer <key>`. /// If set, all requests (except /health) must include `Authorization: Bearer
/// Can also be set via `PINAKES_API_KEY` environment variable. /// <key>`. Can also be set via `PINAKES_API_KEY` environment variable.
pub api_key: Option<String>, pub api_key: Option<String>,
/// Explicitly disable authentication (INSECURE - use only for development). /// Explicitly disable authentication (INSECURE - use only for development).
/// When true, all requests are allowed without authentication. /// When true, all requests are allowed without authentication.
/// This must be explicitly set to true; empty api_key alone is not sufficient. /// This must be explicitly set to true; empty api_key alone is not
/// sufficient.
#[serde(default)] #[serde(default)]
pub authentication_disabled: bool, pub authentication_disabled: bool,
/// TLS/HTTPS configuration /// TLS/HTTPS configuration
@ -903,7 +906,9 @@ impl TlsConfig {
impl Config { impl Config {
pub fn from_file(path: &Path) -> crate::error::Result<Self> { pub fn from_file(path: &Path) -> crate::error::Result<Self> {
let content = std::fs::read_to_string(path).map_err(|e| { let content = std::fs::read_to_string(path).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to read config file: {e}")) crate::error::PinakesError::Config(format!(
"failed to read config file: {e}"
))
})?; })?;
let mut config: Self = toml::from_str(&content).map_err(|e| { let mut config: Self = toml::from_str(&content).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to parse config: {e}")) crate::error::PinakesError::Config(format!("failed to parse config: {e}"))
@ -934,13 +939,16 @@ impl Config {
// Enrichment API keys // Enrichment API keys
if let Some(ref api_key) = self.enrichment.sources.musicbrainz.api_key { if let Some(ref api_key) = self.enrichment.sources.musicbrainz.api_key {
self.enrichment.sources.musicbrainz.api_key = Some(expand_env_var_string(api_key)?); self.enrichment.sources.musicbrainz.api_key =
Some(expand_env_var_string(api_key)?);
} }
if let Some(ref api_key) = self.enrichment.sources.tmdb.api_key { if let Some(ref api_key) = self.enrichment.sources.tmdb.api_key {
self.enrichment.sources.tmdb.api_key = Some(expand_env_var_string(api_key)?); self.enrichment.sources.tmdb.api_key =
Some(expand_env_var_string(api_key)?);
} }
if let Some(ref api_key) = self.enrichment.sources.lastfm.api_key { if let Some(ref api_key) = self.enrichment.sources.lastfm.api_key {
self.enrichment.sources.lastfm.api_key = Some(expand_env_var_string(api_key)?); self.enrichment.sources.lastfm.api_key =
Some(expand_env_var_string(api_key)?);
} }
Ok(()) Ok(())
@ -964,7 +972,9 @@ impl Config {
std::fs::create_dir_all(parent)?; std::fs::create_dir_all(parent)?;
} }
let content = toml::to_string_pretty(self).map_err(|e| { let content = toml::to_string_pretty(self).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to serialize config: {e}")) crate::error::PinakesError::Config(format!(
"failed to serialize config: {e}"
))
})?; })?;
std::fs::write(path, content)?; std::fs::write(path, content)?;
Ok(()) Ok(())
@ -974,7 +984,8 @@ impl Config {
pub fn ensure_dirs(&self) -> crate::error::Result<()> { pub fn ensure_dirs(&self) -> crate::error::Result<()> {
if let Some(ref sqlite) = self.storage.sqlite { if let Some(ref sqlite) = self.storage.sqlite {
if let Some(parent) = sqlite.path.parent() { if let Some(parent) = sqlite.path.parent() {
// Skip if parent is empty string (happens with bare filenames like "pinakes.db") // Skip if parent is empty string (happens with bare filenames like
// "pinakes.db")
if !parent.as_os_str().is_empty() { if !parent.as_os_str().is_empty() {
std::fs::create_dir_all(parent)?; std::fs::create_dir_all(parent)?;
let metadata = std::fs::metadata(parent)?; let metadata = std::fs::metadata(parent)?;
@ -1015,19 +1026,22 @@ impl Config {
if self.scanning.poll_interval_secs == 0 { if self.scanning.poll_interval_secs == 0 {
return Err("poll interval cannot be 0".into()); return Err("poll interval cannot be 0".into());
} }
if self.scanning.import_concurrency == 0 || self.scanning.import_concurrency > 256 { if self.scanning.import_concurrency == 0
|| self.scanning.import_concurrency > 256
{
return Err("import_concurrency must be between 1 and 256".into()); return Err("import_concurrency must be between 1 and 256".into());
} }
// Validate authentication configuration // Validate authentication configuration
let has_api_key = self.server.api_key.as_ref().is_some_and(|k| !k.is_empty()); let has_api_key =
self.server.api_key.as_ref().is_some_and(|k| !k.is_empty());
let has_accounts = !self.accounts.users.is_empty(); let has_accounts = !self.accounts.users.is_empty();
let auth_disabled = self.server.authentication_disabled; let auth_disabled = self.server.authentication_disabled;
if !auth_disabled && !has_api_key && !has_accounts { if !auth_disabled && !has_api_key && !has_accounts {
return Err( return Err(
"authentication is not configured: set an api_key, configure user accounts, \ "authentication is not configured: set an api_key, configure user \
or explicitly set authentication_disabled = true" accounts, or explicitly set authentication_disabled = true"
.into(), .into(),
); );
} }
@ -1036,9 +1050,11 @@ impl Config {
if let Some(ref api_key) = self.server.api_key if let Some(ref api_key) = self.server.api_key
&& api_key.is_empty() && api_key.is_empty()
{ {
return Err("empty api_key is not allowed. To disable authentication, \ return Err(
set authentication_disabled = true instead" "empty api_key is not allowed. To disable authentication, set \
.into()); authentication_disabled = true instead"
.into(),
);
} }
// Require TLS when authentication is enabled on non-localhost // Require TLS when authentication is enabled on non-localhost
@ -1052,8 +1068,8 @@ impl Config {
&& !self.server.tls.enabled && !self.server.tls.enabled
{ {
return Err( return Err(
"TLS must be enabled when authentication is used on non-localhost hosts. \ "TLS must be enabled when authentication is used on non-localhost \
Set server.tls.enabled = true or bind to localhost only" hosts. Set server.tls.enabled = true or bind to localhost only"
.into(), .into(),
); );
} }

View file

@ -1,12 +1,17 @@
use chrono::Utc; use chrono::Utc;
use uuid::Uuid; use uuid::Uuid;
use crate::error::{PinakesError, Result}; use super::{
use crate::model::MediaItem; EnrichmentSourceType,
ExternalMetadata,
use super::googlebooks::GoogleBooksClient; MetadataEnricher,
use super::openlibrary::OpenLibraryClient; googlebooks::GoogleBooksClient,
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; openlibrary::OpenLibraryClient,
};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
/// Book enricher that tries OpenLibrary first, then falls back to Google Books /// Book enricher that tries OpenLibrary first, then falls back to Google Books
pub struct BookEnricher { pub struct BookEnricher {
@ -23,7 +28,10 @@ impl BookEnricher {
} }
/// Try to enrich from OpenLibrary first /// Try to enrich from OpenLibrary first
pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> { pub async fn try_openlibrary(
&self,
isbn: &str,
) -> Result<Option<ExternalMetadata>> {
match self.openlibrary.fetch_by_isbn(isbn).await { match self.openlibrary.fetch_by_isbn(isbn).await {
Ok(book) => { Ok(book) => {
let metadata_json = serde_json::to_string(&book).map_err(|e| { let metadata_json = serde_json::to_string(&book).map_err(|e| {
@ -39,13 +47,16 @@ impl BookEnricher {
confidence: calculate_openlibrary_confidence(&book), confidence: calculate_openlibrary_confidence(&book),
last_updated: Utc::now(), last_updated: Utc::now(),
})) }))
} },
Err(_) => Ok(None), Err(_) => Ok(None),
} }
} }
/// Try to enrich from Google Books /// Try to enrich from Google Books
pub async fn try_googlebooks(&self, isbn: &str) -> Result<Option<ExternalMetadata>> { pub async fn try_googlebooks(
&self,
isbn: &str,
) -> Result<Option<ExternalMetadata>> {
match self.googlebooks.fetch_by_isbn(isbn).await { match self.googlebooks.fetch_by_isbn(isbn).await {
Ok(books) if !books.is_empty() => { Ok(books) if !books.is_empty() => {
let book = &books[0]; let book = &books[0];
@ -62,7 +73,7 @@ impl BookEnricher {
confidence: calculate_googlebooks_confidence(&book.volume_info), confidence: calculate_googlebooks_confidence(&book.volume_info),
last_updated: Utc::now(), last_updated: Utc::now(),
})) }))
} },
_ => Ok(None), _ => Ok(None),
} }
} }
@ -123,7 +134,8 @@ impl MetadataEnricher for BookEnricher {
} }
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> { async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
// Try ISBN-based enrichment first by checking title/description for ISBN patterns // Try ISBN-based enrichment first by checking title/description for ISBN
// patterns
if let Some(ref title) = item.title { if let Some(ref title) = item.title {
if let Some(isbn) = crate::books::extract_isbn_from_text(title) { if let Some(isbn) = crate::books::extract_isbn_from_text(title) {
if let Some(mut metadata) = self.try_openlibrary(&isbn).await? { if let Some(mut metadata) = self.try_openlibrary(&isbn).await? {
@ -147,7 +159,9 @@ impl MetadataEnricher for BookEnricher {
} }
/// Calculate confidence score for OpenLibrary metadata /// Calculate confidence score for OpenLibrary metadata
pub fn calculate_openlibrary_confidence(book: &super::openlibrary::OpenLibraryBook) -> f64 { pub fn calculate_openlibrary_confidence(
book: &super::openlibrary::OpenLibraryBook,
) -> f64 {
let mut score: f64 = 0.5; // Base score let mut score: f64 = 0.5; // Base score
if book.title.is_some() { if book.title.is_some() {
@ -173,7 +187,9 @@ pub fn calculate_openlibrary_confidence(book: &super::openlibrary::OpenLibraryBo
} }
/// Calculate confidence score for Google Books metadata /// Calculate confidence score for Google Books metadata
pub fn calculate_googlebooks_confidence(info: &super::googlebooks::VolumeInfo) -> f64 { pub fn calculate_googlebooks_confidence(
info: &super::googlebooks::VolumeInfo,
) -> f64 {
let mut score: f64 = 0.5; // Base score let mut score: f64 = 0.5; // Base score
if info.title.is_some() { if info.title.is_some() {

View file

@ -31,8 +31,7 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key)); url.push_str(&format!("&key={}", key));
} }
let response = let response = self.client.get(&url).send().await.map_err(|e| {
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books request failed: {}", e)) PinakesError::External(format!("Google Books request failed: {}", e))
})?; })?;
@ -44,14 +43,21 @@ impl GoogleBooksClient {
} }
let volumes: GoogleBooksResponse = response.json().await.map_err(|e| { let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse Google Books response: {}", e)) PinakesError::External(format!(
"Failed to parse Google Books response: {}",
e
))
})?; })?;
Ok(volumes.items) Ok(volumes.items)
} }
/// Search for books by title and author /// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<GoogleBook>> { pub async fn search(
&self,
title: &str,
author: Option<&str>,
) -> Result<Vec<GoogleBook>> {
let mut query = format!("intitle:{}", urlencoding::encode(title)); let mut query = format!("intitle:{}", urlencoding::encode(title));
if let Some(author) = author { if let Some(author) = author {
@ -67,8 +73,7 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key)); url.push_str(&format!("&key={}", key));
} }
let response = let response = self.client.get(&url).send().await.map_err(|e| {
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books search failed: {}", e)) PinakesError::External(format!("Google Books search failed: {}", e))
})?; })?;
@ -93,12 +98,10 @@ impl GoogleBooksClient {
.replace("&zoom=1", "&zoom=2") .replace("&zoom=1", "&zoom=2")
.replace("&edge=curl", ""); .replace("&edge=curl", "");
let response = self let response =
.client self.client.get(&high_res_link).send().await.map_err(|e| {
.get(&high_res_link) PinakesError::External(format!("Cover download failed: {}", e))
.send() })?;
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -107,11 +110,9 @@ impl GoogleBooksClient {
))); )));
} }
response response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
.bytes() PinakesError::External(format!("Failed to read cover data: {}", e))
.await })
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
} }
} }
@ -201,7 +202,8 @@ pub struct ImageLinks {
impl ImageLinks { impl ImageLinks {
/// Get the best available image link (highest resolution) /// Get the best available image link (highest resolution)
pub fn best_link(&self) -> Option<&String> { pub fn best_link(&self) -> Option<&String> {
self.extra_large self
.extra_large
.as_ref() .as_ref()
.or(self.large.as_ref()) .or(self.large.as_ref())
.or(self.medium.as_ref()) .or(self.medium.as_ref())

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc; use chrono::Utc;
use uuid::Uuid; use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct LastFmEnricher { pub struct LastFmEnricher {
client: reqwest::Client, client: reqwest::Client,
@ -70,11 +71,15 @@ impl MetadataEnricher for LastFmEnricher {
} }
let body = resp.text().await.map_err(|e| { let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm response read failed: {e}")) PinakesError::MetadataExtraction(format!(
"Last.fm response read failed: {e}"
))
})?; })?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm JSON parse failed: {e}")) PinakesError::MetadataExtraction(format!(
"Last.fm JSON parse failed: {e}"
))
})?; })?;
// Check for error response // Check for error response

View file

@ -11,8 +11,10 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::error::Result; use crate::{
use crate::model::{MediaId, MediaItem}; error::Result,
model::{MediaId, MediaItem},
};
/// Externally-sourced metadata for a media item. /// Externally-sourced metadata for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc; use chrono::Utc;
use uuid::Uuid; use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct MusicBrainzEnricher { pub struct MusicBrainzEnricher {
client: reqwest::Client, client: reqwest::Client,
@ -37,8 +38,8 @@ impl MusicBrainzEnricher {
fn escape_lucene_query(s: &str) -> String { fn escape_lucene_query(s: &str) -> String {
let special_chars = [ let special_chars = [
'+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\', '+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*',
'/', '?', ':', '\\', '/',
]; ];
let mut escaped = String::with_capacity(s.len() * 2); let mut escaped = String::with_capacity(s.len() * 2);
for c in s.chars() { for c in s.chars() {
@ -80,7 +81,9 @@ impl MetadataEnricher for MusicBrainzEnricher {
.send() .send()
.await .await
.map_err(|e| { .map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz request failed: {e}")) PinakesError::MetadataExtraction(format!(
"MusicBrainz request failed: {e}"
))
})?; })?;
if !resp.status().is_success() { if !resp.status().is_success() {
@ -97,12 +100,16 @@ impl MetadataEnricher for MusicBrainzEnricher {
} }
let body = resp.text().await.map_err(|e| { let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz response read failed: {e}")) PinakesError::MetadataExtraction(format!(
"MusicBrainz response read failed: {e}"
))
})?; })?;
// Parse to check if we got results // Parse to check if we got results
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz JSON parse failed: {e}")) PinakesError::MetadataExtraction(format!(
"MusicBrainz JSON parse failed: {e}"
))
})?; })?;
let recordings = json.get("recordings").and_then(|r| r.as_array()); let recordings = json.get("recordings").and_then(|r| r.as_array());

View file

@ -30,8 +30,7 @@ impl OpenLibraryClient {
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> { pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> {
let url = format!("{}/isbn/{}.json", self.base_url, isbn); let url = format!("{}/isbn/{}.json", self.base_url, isbn);
let response = let response = self.client.get(&url).send().await.map_err(|e| {
self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("OpenLibrary request failed: {}", e)) PinakesError::External(format!("OpenLibrary request failed: {}", e))
})?; })?;
@ -43,7 +42,10 @@ impl OpenLibraryClient {
} }
response.json::<OpenLibraryBook>().await.map_err(|e| { response.json::<OpenLibraryBook>().await.map_err(|e| {
PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e)) PinakesError::External(format!(
"Failed to parse OpenLibrary response: {}",
e
))
}) })
} }
@ -65,12 +67,9 @@ impl OpenLibraryClient {
url.push_str("&limit=5"); url.push_str("&limit=5");
let response = self let response = self.client.get(&url).send().await.map_err(|e| {
.client PinakesError::External(format!("OpenLibrary search failed: {}", e))
.get(&url) })?;
.send()
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary search failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -79,7 +78,8 @@ impl OpenLibraryClient {
))); )));
} }
let search_response: OpenLibrarySearchResponse = response.json().await.map_err(|e| { let search_response: OpenLibrarySearchResponse =
response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse search results: {}", e)) PinakesError::External(format!("Failed to parse search results: {}", e))
})?; })?;
@ -87,7 +87,11 @@ impl OpenLibraryClient {
} }
/// Fetch cover image by cover ID /// Fetch cover image by cover ID
pub async fn fetch_cover(&self, cover_id: i64, size: CoverSize) -> Result<Vec<u8>> { pub async fn fetch_cover(
&self,
cover_id: i64,
size: CoverSize,
) -> Result<Vec<u8>> {
let size_str = match size { let size_str = match size {
CoverSize::Small => "S", CoverSize::Small => "S",
CoverSize::Medium => "M", CoverSize::Medium => "M",
@ -99,12 +103,9 @@ impl OpenLibraryClient {
cover_id, size_str cover_id, size_str
); );
let response = self let response = self.client.get(&url).send().await.map_err(|e| {
.client PinakesError::External(format!("Cover download failed: {}", e))
.get(&url) })?;
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -113,15 +114,17 @@ impl OpenLibraryClient {
))); )));
} }
response response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
.bytes() PinakesError::External(format!("Failed to read cover data: {}", e))
.await })
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
} }
/// Fetch cover by ISBN /// Fetch cover by ISBN
pub async fn fetch_cover_by_isbn(&self, isbn: &str, size: CoverSize) -> Result<Vec<u8>> { pub async fn fetch_cover_by_isbn(
&self,
isbn: &str,
size: CoverSize,
) -> Result<Vec<u8>> {
let size_str = match size { let size_str = match size {
CoverSize::Small => "S", CoverSize::Small => "S",
CoverSize::Medium => "M", CoverSize::Medium => "M",
@ -133,12 +136,9 @@ impl OpenLibraryClient {
isbn, size_str isbn, size_str
); );
let response = self let response = self.client.get(&url).send().await.map_err(|e| {
.client PinakesError::External(format!("Cover download failed: {}", e))
.get(&url) })?;
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(PinakesError::External(format!( return Err(PinakesError::External(format!(
@ -147,11 +147,9 @@ impl OpenLibraryClient {
))); )));
} }
response response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
.bytes() PinakesError::External(format!("Failed to read cover data: {}", e))
.await })
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
} }
} }
@ -278,7 +276,8 @@ mod tests {
#[test] #[test]
fn test_string_or_object_parsing() { fn test_string_or_object_parsing() {
let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap(); let string_desc: StringOrObject =
serde_json::from_str(r#""Simple description""#).unwrap();
assert_eq!(string_desc.as_str(), "Simple description"); assert_eq!(string_desc.as_str(), "Simple description");
let object_desc: StringOrObject = let object_desc: StringOrObject =

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc; use chrono::Utc;
use uuid::Uuid; use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct TmdbEnricher { pub struct TmdbEnricher {
client: reqwest::Client, client: reqwest::Client,
@ -54,7 +55,9 @@ impl MetadataEnricher for TmdbEnricher {
]) ])
.send() .send()
.await .await
.map_err(|e| PinakesError::MetadataExtraction(format!("TMDB request failed: {e}")))?; .map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB request failed: {e}"))
})?;
if !resp.status().is_success() { if !resp.status().is_success() {
let status = resp.status(); let status = resp.status();
@ -72,7 +75,9 @@ impl MetadataEnricher for TmdbEnricher {
} }
let body = resp.text().await.map_err(|e| { let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB response read failed: {e}")) PinakesError::MetadataExtraction(format!(
"TMDB response read failed: {e}"
))
})?; })?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {

View file

@ -1,9 +1,12 @@
//! Auto-detection of photo events and albums based on time and location proximity //! Auto-detection of photo events and albums based on time and location
//! proximity
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use crate::error::Result; use crate::{
use crate::model::{MediaId, MediaItem}; error::Result,
model::{MediaId, MediaItem},
};
/// Configuration for event detection /// Configuration for event detection
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -53,7 +56,9 @@ fn haversine_distance(lat1: f64, lon1: f64, lat2: f64, lon2: f64) -> f64 {
let dlon = (lon2 - lon1).to_radians(); let dlon = (lon2 - lon1).to_radians();
let a = (dlat / 2.0).sin().powi(2) let a = (dlat / 2.0).sin().powi(2)
+ lat1.to_radians().cos() * lat2.to_radians().cos() * (dlon / 2.0).sin().powi(2); + lat1.to_radians().cos()
* lat2.to_radians().cos()
* (dlon / 2.0).sin().powi(2);
let c = 2.0 * a.sqrt().atan2((1.0 - a).sqrt()); let c = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
@ -103,7 +108,7 @@ pub fn detect_events(
(Some(max_dist), Some((lat1, lon1)), Some((lat2, lon2))) => { (Some(max_dist), Some((lat1, lon1)), Some((lat2, lon2))) => {
let dist = haversine_distance(lat1, lon1, lat2, lon2); let dist = haversine_distance(lat1, lon1, lat2, lon2);
dist <= max_dist dist <= max_dist
} },
// If no location constraint or missing GPS, consider location OK // If no location constraint or missing GPS, consider location OK
_ => true, _ => true,
}; };
@ -124,7 +129,8 @@ pub fn detect_events(
} else { } else {
// Start new event if current has enough photos // Start new event if current has enough photos
if current_event_items.len() >= config.min_photos { if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d")); let event_name =
format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent { events.push(DetectedEvent {
suggested_name: event_name, suggested_name: event_name,
@ -145,7 +151,8 @@ pub fn detect_events(
// Don't forget the last event // Don't forget the last event
if current_event_items.len() >= config.min_photos { if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d")); let event_name =
format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent { events.push(DetectedEvent {
suggested_name: event_name, suggested_name: event_name,

View file

@ -2,9 +2,7 @@ use std::path::Path;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::error::Result; use crate::{error::Result, jobs::ExportFormat, storage::DynStorageBackend};
use crate::jobs::ExportFormat;
use crate::storage::DynStorageBackend;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExportResult { pub struct ExportResult {
@ -28,13 +26,17 @@ pub async fn export_library(
match format { match format {
ExportFormat::Json => { ExportFormat::Json => {
let json = serde_json::to_string_pretty(&items) let json = serde_json::to_string_pretty(&items).map_err(|e| {
.map_err(|e| crate::error::PinakesError::Config(format!("json serialize: {e}")))?; crate::error::PinakesError::Config(format!("json serialize: {e}"))
})?;
std::fs::write(destination, json)?; std::fs::write(destination, json)?;
} },
ExportFormat::Csv => { ExportFormat::Csv => {
let mut csv = String::new(); let mut csv = String::new();
csv.push_str("id,path,file_name,media_type,content_hash,file_size,title,artist,album,genre,year,duration_secs,description,created_at,updated_at\n"); csv.push_str(
"id,path,file_name,media_type,content_hash,file_size,title,artist,\
album,genre,year,duration_secs,description,created_at,updated_at\n",
);
for item in &items { for item in &items {
csv.push_str(&format!( csv.push_str(&format!(
"{},{},{},{:?},{},{},{},{},{},{},{},{},{},{},{}\n", "{},{},{},{:?},{},{},{},{},{},{},{},{},{},{},{}\n",
@ -49,7 +51,8 @@ pub async fn export_library(
item.album.as_deref().unwrap_or(""), item.album.as_deref().unwrap_or(""),
item.genre.as_deref().unwrap_or(""), item.genre.as_deref().unwrap_or(""),
item.year.map(|y| y.to_string()).unwrap_or_default(), item.year.map(|y| y.to_string()).unwrap_or_default(),
item.duration_secs item
.duration_secs
.map(|d| d.to_string()) .map(|d| d.to_string())
.unwrap_or_default(), .unwrap_or_default(),
item.description.as_deref().unwrap_or(""), item.description.as_deref().unwrap_or(""),
@ -58,7 +61,7 @@ pub async fn export_library(
)); ));
} }
std::fs::write(destination, csv)?; std::fs::write(destination, csv)?;
} },
} }
Ok(ExportResult { Ok(ExportResult {

View file

@ -1,7 +1,6 @@
use std::path::Path; use std::path::Path;
use crate::error::Result; use crate::{error::Result, model::ContentHash};
use crate::model::ContentHash;
const BUFFER_SIZE: usize = 65536; const BUFFER_SIZE: usize = 65536;

View file

@ -1,17 +1,21 @@
use std::path::{Path, PathBuf}; use std::{
use std::time::SystemTime; path::{Path, PathBuf},
time::SystemTime,
};
use tracing::info; use tracing::info;
use crate::audit; use crate::{
use crate::error::{PinakesError, Result}; audit,
use crate::hash::compute_file_hash; error::{PinakesError, Result},
use crate::links; hash::compute_file_hash,
use crate::media_type::{BuiltinMediaType, MediaType}; links,
use crate::metadata; media_type::{BuiltinMediaType, MediaType},
use crate::model::*; metadata,
use crate::storage::DynStorageBackend; model::*,
use crate::thumbnail; storage::DynStorageBackend,
thumbnail,
};
pub struct ImportResult { pub struct ImportResult {
pub media_id: MediaId, pub media_id: MediaId,
@ -51,9 +55,13 @@ fn get_file_mtime(path: &Path) -> Option<i64> {
.map(|d| d.as_secs() as i64) .map(|d| d.as_secs() as i64)
} }
/// Check that a canonicalized path falls under at least one configured root directory. /// Check that a canonicalized path falls under at least one configured root
/// If no roots are configured, all paths are allowed (for ad-hoc imports). /// directory. If no roots are configured, all paths are allowed (for ad-hoc
pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) -> Result<()> { /// imports).
pub async fn validate_path_in_roots(
storage: &DynStorageBackend,
path: &Path,
) -> Result<()> {
let roots = storage.list_root_dirs().await?; let roots = storage.list_root_dirs().await?;
if roots.is_empty() { if roots.is_empty() {
return Ok(()); return Ok(());
@ -71,7 +79,10 @@ pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) ->
))) )))
} }
pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<ImportResult> { pub async fn import_file(
storage: &DynStorageBackend,
path: &Path,
) -> Result<ImportResult> {
import_file_with_options(storage, path, &ImportOptions::default()).await import_file_with_options(storage, path, &ImportOptions::default()).await
} }
@ -98,7 +109,8 @@ pub async fn import_file_with_options(
if options.incremental if options.incremental
&& !options.force && !options.force
&& let Some(existing) = storage.get_media_by_path(&path).await? && let Some(existing) = storage.get_media_by_path(&path).await?
&& let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime) && let (Some(stored_mtime), Some(curr_mtime)) =
(existing.file_mtime, current_mtime)
&& stored_mtime == curr_mtime && stored_mtime == curr_mtime
{ {
return Ok(ImportResult { return Ok(ImportResult {
@ -154,7 +166,12 @@ pub async fn import_file_with_options(
let thumb_dir = thumbnail::default_thumbnail_dir(); let thumb_dir = thumbnail::default_thumbnail_dir();
let media_type_clone = media_type.clone(); let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
thumbnail::generate_thumbnail(media_id, &source, media_type_clone, &thumb_dir) thumbnail::generate_thumbnail(
media_id,
&source,
media_type_clone,
&thumb_dir,
)
}) })
.await .await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))?? .map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
@ -170,7 +187,8 @@ pub async fn import_file_with_options(
}; };
// Check if this is a markdown file for link extraction // Check if this is a markdown file for link extraction
let is_markdown = media_type == MediaType::Builtin(BuiltinMediaType::Markdown); let is_markdown =
media_type == MediaType::Builtin(BuiltinMediaType::Markdown);
let item = MediaItem { let item = MediaItem {
id: media_id, id: media_id,
@ -263,7 +281,10 @@ pub async fn import_file_with_options(
}) })
} }
pub(crate) fn should_ignore(path: &std::path::Path, patterns: &[String]) -> bool { pub(crate) fn should_ignore(
path: &std::path::Path,
patterns: &[String],
) -> bool {
for component in path.components() { for component in path.components() {
if let std::path::Component::Normal(name) = component { if let std::path::Component::Normal(name) = component {
let name_str = name.to_string_lossy(); let name_str = name.to_string_lossy();
@ -373,7 +394,7 @@ pub async fn import_directory_with_options(
Err(e) => { Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file"); tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e)); results.push(Err(e));
} },
} }
} }
} }
@ -385,7 +406,7 @@ pub async fn import_directory_with_options(
Err(e) => { Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file"); tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e)); results.push(Err(e));
} },
} }
} }

View file

@ -1,14 +1,18 @@
use std::collections::{HashMap, HashSet}; use std::{
use std::path::{Path, PathBuf}; collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{info, warn}; use tracing::{info, warn};
use crate::error::Result; use crate::{
use crate::hash::compute_file_hash; error::Result,
use crate::media_type::MediaType; hash::compute_file_hash,
use crate::model::{ContentHash, MediaId}; media_type::MediaType,
use crate::storage::DynStorageBackend; model::{ContentHash, MediaId},
storage::DynStorageBackend,
};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrphanReport { pub struct OrphanReport {
@ -69,13 +73,17 @@ impl std::str::FromStr for IntegrityStatus {
} }
/// Detect orphaned media items (files that no longer exist on disk), /// Detect orphaned media items (files that no longer exist on disk),
/// untracked files (files on disk not in database), and moved files (same hash, different path). /// untracked files (files on disk not in database), and moved files (same hash,
pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport> { /// different path).
pub async fn detect_orphans(
storage: &DynStorageBackend,
) -> Result<OrphanReport> {
let media_paths = storage.list_media_paths().await?; let media_paths = storage.list_media_paths().await?;
let mut orphaned_ids = Vec::new(); let mut orphaned_ids = Vec::new();
// Build hash index: ContentHash -> Vec<(MediaId, PathBuf)> // Build hash index: ContentHash -> Vec<(MediaId, PathBuf)>
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> = HashMap::new(); let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
HashMap::new();
for (id, path, hash) in &media_paths { for (id, path, hash) in &media_paths {
hash_index hash_index
.entry(hash.clone()) .entry(hash.clone())
@ -91,7 +99,8 @@ pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport>
} }
// Detect moved files (orphaned items with same hash existing elsewhere) // Detect moved files (orphaned items with same hash existing elsewhere)
let moved_files = detect_moved_files(&orphaned_ids, &media_paths, &hash_index); let moved_files =
detect_moved_files(&orphaned_ids, &media_paths, &hash_index);
// Detect untracked files (on disk but not in DB) // Detect untracked files (on disk but not in DB)
let untracked_paths = detect_untracked_files(storage, &media_paths).await?; let untracked_paths = detect_untracked_files(storage, &media_paths).await?;
@ -122,7 +131,7 @@ fn detect_moved_files(
// Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash) // Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash)
let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths
.iter() .iter()
.filter(|(id, _, _)| orphaned_ids.contains(id)) .filter(|(id, ..)| orphaned_ids.contains(id))
.map(|(id, path, hash)| (*id, (path.clone(), hash.clone()))) .map(|(id, path, hash)| (*id, (path.clone(), hash.clone())))
.collect(); .collect();
@ -228,10 +237,10 @@ async fn detect_untracked_files(
if MediaType::from_path(path).is_some() { if MediaType::from_path(path).is_some() {
paths.push(path.to_path_buf()); paths.push(path.to_path_buf());
} }
} },
Err(e) => { Err(e) => {
warn!(error = %e, "failed to read directory entry"); warn!(error = %e, "failed to read directory entry");
} },
} }
} }
@ -244,13 +253,13 @@ async fn detect_untracked_files(
match result { match result {
Ok(Ok(paths)) => { Ok(Ok(paths)) => {
filesystem_paths.extend(paths); filesystem_paths.extend(paths);
} },
Ok(Err(e)) => { Ok(Err(e)) => {
warn!(error = %e, "failed to walk directory"); warn!(error = %e, "failed to walk directory");
} },
Err(e) => { Err(e) => {
warn!(error = %e, "task join error"); warn!(error = %e, "task join error");
} },
} }
} }
@ -274,11 +283,11 @@ pub async fn resolve_orphans(
let count = storage.batch_delete_media(ids).await?; let count = storage.batch_delete_media(ids).await?;
info!(count, "resolved orphans by deletion"); info!(count, "resolved orphans by deletion");
Ok(count) Ok(count)
} },
OrphanAction::Ignore => { OrphanAction::Ignore => {
info!(count = ids.len(), "orphans ignored"); info!(count = ids.len(), "orphans ignored");
Ok(0) Ok(0)
} },
} }
} }
@ -289,11 +298,13 @@ pub async fn verify_integrity(
) -> Result<VerificationReport> { ) -> Result<VerificationReport> {
let all_paths = storage.list_media_paths().await?; let all_paths = storage.list_media_paths().await?;
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> = if let Some(ids) = media_ids { let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> =
let id_set: std::collections::HashSet<MediaId> = ids.iter().copied().collect(); if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> =
ids.iter().copied().collect();
all_paths all_paths
.into_iter() .into_iter()
.filter(|(id, _, _)| id_set.contains(id)) .filter(|(id, ..)| id_set.contains(id))
.collect() .collect()
} else { } else {
all_paths all_paths
@ -321,10 +332,10 @@ pub async fn verify_integrity(
.mismatched .mismatched
.push((id, expected_hash.0.clone(), actual_hash.0)); .push((id, expected_hash.0.clone(), actual_hash.0));
} }
} },
Err(e) => { Err(e) => {
report.errors.push((id, e.to_string())); report.errors.push((id, e.to_string()));
} },
} }
} }
@ -347,7 +358,7 @@ pub async fn cleanup_orphaned_thumbnails(
let media_paths = storage.list_media_paths().await?; let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths let known_ids: std::collections::HashSet<String> = media_paths
.iter() .iter()
.map(|(id, _, _)| id.0.to_string()) .map(|(id, ..)| id.0.to_string())
.collect(); .collect();
let mut removed = 0; let mut removed = 0;

View file

@ -1,6 +1,4 @@
use std::collections::HashMap; use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::path::PathBuf;
use std::sync::Arc;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -96,7 +94,8 @@ impl JobQueue {
{ {
let (tx, rx) = mpsc::channel::<WorkerItem>(256); let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx)); let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> = Arc::new(RwLock::new(HashMap::new())); let jobs: Arc<RwLock<HashMap<Uuid, Job>>> =
Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> = let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new())); Arc::new(RwLock::new(HashMap::new()));
@ -128,7 +127,8 @@ impl JobQueue {
} }
} }
let handle = executor(item.job_id, item.kind, item.cancel, jobs.clone()); let handle =
executor(item.job_id, item.kind, item.cancel, jobs.clone());
let _ = handle.await; let _ = handle.await;
// Clean up cancellation token // Clean up cancellation token
@ -215,7 +215,11 @@ impl JobQueue {
} }
/// Mark a job as completed. /// Mark a job as completed.
pub async fn complete(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, result: Value) { pub async fn complete(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
id: Uuid,
result: Value,
) {
let mut map = jobs.write().await; let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) { if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Completed { result }; job.status = JobStatus::Completed { result };
@ -224,7 +228,11 @@ impl JobQueue {
} }
/// Mark a job as failed. /// Mark a job as failed.
pub async fn fail(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, error: String) { pub async fn fail(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
id: Uuid,
error: String,
) {
let mut map = jobs.write().await; let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) { if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Failed { error }; job.status = JobStatus::Failed { error };
@ -246,7 +254,7 @@ impl JobQueue {
JobStatus::Running { .. } => running += 1, JobStatus::Running { .. } => running += 1,
JobStatus::Completed { .. } => completed += 1, JobStatus::Completed { .. } => completed += 1,
JobStatus::Failed { .. } => failed += 1, JobStatus::Failed { .. } => failed += 1,
JobStatus::Cancelled => {} // Don't count cancelled jobs JobStatus::Cancelled => {}, // Don't count cancelled jobs
} }
} }

View file

@ -1,4 +1,5 @@
//! Markdown link extraction and management for Obsidian-style bidirectional links. //! Markdown link extraction and management for Obsidian-style bidirectional
//! links.
//! //!
//! This module provides: //! This module provides:
//! - Wikilink extraction (`[[target]]` and `[[target|display]]`) //! - Wikilink extraction (`[[target]]` and `[[target|display]]`)
@ -24,7 +25,10 @@ const CONTEXT_CHARS_AFTER: usize = 50;
/// - Wikilinks: `[[target]]` and `[[target|display text]]` /// - Wikilinks: `[[target]]` and `[[target|display text]]`
/// - Embeds: `![[target]]` /// - Embeds: `![[target]]`
/// - Markdown links: `[text](path)` (internal paths only, no http/https) /// - Markdown links: `[text](path)` (internal paths only, no http/https)
pub fn extract_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> { pub fn extract_links(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
let mut links = Vec::new(); let mut links = Vec::new();
// Extract wikilinks: [[target]] or [[target|display]] // Extract wikilinks: [[target]] or [[target|display]]
@ -40,8 +44,12 @@ pub fn extract_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
} }
/// Extract wikilinks from content. /// Extract wikilinks from content.
/// Matches: `[[target]]` or `[[target|display text]]` but NOT `![[...]]` (embeds) /// Matches: `[[target]]` or `[[target|display text]]` but NOT `![[...]]`
fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> { /// (embeds)
fn extract_wikilinks(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
// Match [[...]] - we'll manually filter out embeds that are preceded by ! // Match [[...]] - we'll manually filter out embeds that are preceded by !
let re = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap(); let re = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new(); let mut links = Vec::new();
@ -62,7 +70,12 @@ fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
let target = cap.get(1).unwrap().as_str().trim(); let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string()); let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end()); let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink { links.push(MarkdownLink {
id: Uuid::now_v7(), id: Uuid::now_v7(),
@ -83,7 +96,10 @@ fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
/// Extract embeds from content. /// Extract embeds from content.
/// Matches: `![[target]]` /// Matches: `![[target]]`
fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> { fn extract_embeds(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
let re = Regex::new(r"!\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap(); let re = Regex::new(r"!\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new(); let mut links = Vec::new();
@ -93,7 +109,12 @@ fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink>
let target = cap.get(1).unwrap().as_str().trim(); let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string()); let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end()); let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink { links.push(MarkdownLink {
id: Uuid::now_v7(), id: Uuid::now_v7(),
@ -114,7 +135,10 @@ fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink>
/// Extract markdown links from content. /// Extract markdown links from content.
/// Matches: `[text](path)` but only for internal paths (no http/https) /// Matches: `[text](path)` but only for internal paths (no http/https)
fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> { fn extract_markdown_links(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
// Match [text](path) where path doesn't start with http:// or https:// // Match [text](path) where path doesn't start with http:// or https://
let re = Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap(); let re = Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap();
let mut links = Vec::new(); let mut links = Vec::new();
@ -126,7 +150,8 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
// Skip markdown images: ![alt](image.png) // Skip markdown images: ![alt](image.png)
// Check if the character immediately before '[' is '!' // Check if the character immediately before '[' is '!'
if match_start > 0 && line.as_bytes().get(match_start - 1) == Some(&b'!') { if match_start > 0 && line.as_bytes().get(match_start - 1) == Some(&b'!')
{
continue; continue;
} }
@ -150,7 +175,12 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
// Remove any anchor from the path for resolution // Remove any anchor from the path for resolution
let target_path = path.split('#').next().unwrap_or(path); let target_path = path.split('#').next().unwrap_or(path);
let context = extract_context(content, line_num, full_match.start(), full_match.end()); let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink { links.push(MarkdownLink {
id: Uuid::now_v7(), id: Uuid::now_v7(),
@ -170,7 +200,12 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
} }
/// Extract surrounding context for a link. /// Extract surrounding context for a link.
fn extract_context(content: &str, line_num: usize, _start: usize, _end: usize) -> String { fn extract_context(
content: &str,
line_num: usize,
_start: usize,
_end: usize,
) -> String {
let lines: Vec<&str> = content.lines().collect(); let lines: Vec<&str> = content.lines().collect();
if line_num >= lines.len() { if line_num >= lines.len() {
return String::new(); return String::new();
@ -192,7 +227,8 @@ fn extract_context(content: &str, line_num: usize, _start: usize, _end: usize) -
// Truncate long lines // Truncate long lines
if line_len > CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER { if line_len > CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER {
line.chars() line
.chars()
.take(CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER) .take(CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER)
.collect() .collect()
} else { } else {
@ -279,7 +315,9 @@ pub fn resolve_link_candidates(
/// Obsidian uses the `aliases` field in frontmatter to define alternative names /// Obsidian uses the `aliases` field in frontmatter to define alternative names
/// for a note that can be used in wikilinks. /// for a note that can be used in wikilinks.
pub fn extract_aliases(content: &str) -> Vec<String> { pub fn extract_aliases(content: &str) -> Vec<String> {
let Ok(parsed) = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(content) else { let Ok(parsed) =
gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(content)
else {
return Vec::new(); return Vec::new();
}; };
@ -296,7 +334,8 @@ pub fn extract_aliases(content: &str) -> Vec<String> {
}; };
match aliases { match aliases {
gray_matter::Pod::Array(arr) => arr gray_matter::Pod::Array(arr) => {
arr
.iter() .iter()
.filter_map(|a| { .filter_map(|a| {
if let gray_matter::Pod::String(s) = a { if let gray_matter::Pod::String(s) = a {
@ -305,11 +344,12 @@ pub fn extract_aliases(content: &str) -> Vec<String> {
None None
} }
}) })
.collect(), .collect()
},
gray_matter::Pod::String(s) => { gray_matter::Pod::String(s) => {
// Single alias as string // Single alias as string
vec![s.clone()] vec![s.clone()]
} },
_ => Vec::new(), _ => Vec::new(),
} }
} }
@ -366,7 +406,8 @@ mod tests {
#[test] #[test]
fn test_skip_external_links() { fn test_skip_external_links() {
let content = "Visit [our site](https://example.com) or [email us](mailto:test@test.com)."; let content = "Visit [our site](https://example.com) or [email \
us](mailto:test@test.com).";
let links = extract_links(test_media_id(), content); let links = extract_links(test_media_id(), content);
assert!(links.is_empty()); assert!(links.is_empty());
@ -407,7 +448,8 @@ And an embedded image: ![[diagram.png]]
let source_path = std::path::Path::new("/notes/projects/readme.md"); let source_path = std::path::Path::new("/notes/projects/readme.md");
let root_dirs = vec![std::path::PathBuf::from("/notes")]; let root_dirs = vec![std::path::PathBuf::from("/notes")];
let candidates = resolve_link_candidates("My Note", source_path, &root_dirs); let candidates =
resolve_link_candidates("My Note", source_path, &root_dirs);
// Should include relative path and .md variations // Should include relative path and .md variations
assert!(!candidates.is_empty()); assert!(!candidates.is_empty());
@ -505,7 +547,8 @@ Mixed: [link](file.md) then ![image](pic.png) then [another](other.md)
} }
// Verify correct targets were extracted (links, not images) // Verify correct targets were extracted (links, not images)
let targets: Vec<&str> = links.iter().map(|l| l.target_path.as_str()).collect(); let targets: Vec<&str> =
links.iter().map(|l| l.target_path.as_str()).collect();
assert!( assert!(
targets.contains(&"docs/guide.md"), targets.contains(&"docs/guide.md"),
"Should contain docs/guide.md" "Should contain docs/guide.md"

View file

@ -7,12 +7,16 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use tokio::fs; use tokio::{
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader}; fs,
io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader},
};
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use crate::error::{PinakesError, Result}; use crate::{
use crate::model::ContentHash; error::{PinakesError, Result},
model::ContentHash,
};
/// Content-addressable storage service for managed files. /// Content-addressable storage service for managed files.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -24,7 +28,11 @@ pub struct ManagedStorageService {
impl ManagedStorageService { impl ManagedStorageService {
/// Create a new managed storage service. /// Create a new managed storage service.
pub fn new(root_dir: PathBuf, max_upload_size: u64, verify_on_read: bool) -> Self { pub fn new(
root_dir: PathBuf,
max_upload_size: u64,
verify_on_read: bool,
) -> Self {
Self { Self {
root_dir, root_dir,
max_upload_size, max_upload_size,
@ -60,7 +68,8 @@ impl ManagedStorageService {
/// Store a file from an async reader, computing the hash as we go. /// Store a file from an async reader, computing the hash as we go.
/// ///
/// Returns the content hash and file size. /// Returns the content hash and file size.
/// If the file already exists with the same hash, returns early (deduplication). /// If the file already exists with the same hash, returns early
/// (deduplication).
pub async fn store_stream<R: AsyncRead + Unpin>( pub async fn store_stream<R: AsyncRead + Unpin>(
&self, &self,
mut reader: R, mut reader: R,
@ -256,7 +265,9 @@ impl ManagedStorageService {
let mut sub_entries = fs::read_dir(&path).await?; let mut sub_entries = fs::read_dir(&path).await?;
while let Some(sub_entry) = sub_entries.next_entry().await? { while let Some(sub_entry) = sub_entries.next_entry().await? {
let sub_path = sub_entry.path(); let sub_path = sub_entry.path();
if sub_path.is_dir() && sub_path.file_name().map(|n| n.len()) == Some(2) { if sub_path.is_dir()
&& sub_path.file_name().map(|n| n.len()) == Some(2)
{
let mut file_entries = fs::read_dir(&sub_path).await?; let mut file_entries = fs::read_dir(&sub_path).await?;
while let Some(file_entry) = file_entries.next_entry().await? { while let Some(file_entry) = file_entries.next_entry().await? {
let file_path = file_entry.path(); let file_path = file_entry.path();
@ -323,13 +334,15 @@ impl ManagedStorageService {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use tempfile::tempdir; use tempfile::tempdir;
use super::*;
#[tokio::test] #[tokio::test]
async fn test_store_and_retrieve() { async fn test_store_and_retrieve() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false); let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap(); service.init().await.unwrap();
let data = b"hello, world!"; let data = b"hello, world!";
@ -345,7 +358,8 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_deduplication() { async fn test_deduplication() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false); let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap(); service.init().await.unwrap();
let data = b"duplicate content"; let data = b"duplicate content";
@ -359,7 +373,8 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_verify_integrity() { async fn test_verify_integrity() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, true); let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, true);
service.init().await.unwrap(); service.init().await.unwrap();
let data = b"verify me"; let data = b"verify me";
@ -371,7 +386,8 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_upload_too_large() { async fn test_upload_too_large() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 100, false); let service =
ManagedStorageService::new(dir.path().to_path_buf(), 100, false);
service.init().await.unwrap(); service.init().await.unwrap();
let data = vec![0u8; 200]; let data = vec![0u8; 200];
@ -383,7 +399,8 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_delete() { async fn test_delete() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false); let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap(); service.init().await.unwrap();
let data = b"delete me"; let data = b"delete me";

View file

@ -139,7 +139,8 @@ impl BuiltinMediaType {
} }
pub fn from_path(path: &Path) -> Option<Self> { pub fn from_path(path: &Path) -> Option<Self> {
path.extension() path
.extension()
.and_then(|e| e.to_str()) .and_then(|e| e.to_str())
.and_then(Self::from_extension) .and_then(Self::from_extension)
} }
@ -181,9 +182,12 @@ impl BuiltinMediaType {
pub fn category(&self) -> MediaCategory { pub fn category(&self) -> MediaCategory {
match self { match self {
Self::Mp3 | Self::Flac | Self::Ogg | Self::Wav | Self::Aac | Self::Opus => { Self::Mp3
MediaCategory::Audio | Self::Flac
} | Self::Ogg
| Self::Wav
| Self::Aac
| Self::Opus => MediaCategory::Audio,
Self::Mp4 | Self::Mkv | Self::Avi | Self::Webm => MediaCategory::Video, Self::Mp4 | Self::Mkv | Self::Avi | Self::Webm => MediaCategory::Video,
Self::Pdf | Self::Epub | Self::Djvu => MediaCategory::Document, Self::Pdf | Self::Epub | Self::Djvu => MediaCategory::Document,
Self::Markdown | Self::PlainText => MediaCategory::Text, Self::Markdown | Self::PlainText => MediaCategory::Text,

View file

@ -3,9 +3,10 @@
//! This module provides an extensible media type system that supports both //! This module provides an extensible media type system that supports both
//! built-in media types and plugin-registered custom types. //! built-in media types and plugin-registered custom types.
use serde::{Deserialize, Serialize};
use std::path::Path; use std::path::Path;
use serde::{Deserialize, Serialize};
pub mod builtin; pub mod builtin;
pub mod registry; pub mod registry;
@ -50,15 +51,18 @@ impl MediaType {
pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String { pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self { match self {
Self::Builtin(b) => b.name(), Self::Builtin(b) => b.name(),
Self::Custom(id) => registry Self::Custom(id) => {
registry
.get(id) .get(id)
.map(|d| d.name.clone()) .map(|d| d.name.clone())
.unwrap_or_else(|| id.clone()), .unwrap_or_else(|| id.clone())
},
} }
} }
/// Get the category for this media type /// Get the category for this media type
/// For custom types without a registry, returns MediaCategory::Document as default /// For custom types without a registry, returns MediaCategory::Document as
/// default
pub fn category(&self) -> MediaCategory { pub fn category(&self) -> MediaCategory {
match self { match self {
Self::Builtin(b) => b.category(), Self::Builtin(b) => b.category(),
@ -67,13 +71,18 @@ impl MediaType {
} }
/// Get the category for this media type with registry support /// Get the category for this media type with registry support
pub fn category_with_registry(&self, registry: &MediaTypeRegistry) -> MediaCategory { pub fn category_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> MediaCategory {
match self { match self {
Self::Builtin(b) => b.category(), Self::Builtin(b) => b.category(),
Self::Custom(id) => registry Self::Custom(id) => {
registry
.get(id) .get(id)
.and_then(|d| d.category) .and_then(|d| d.category)
.unwrap_or(MediaCategory::Document), .unwrap_or(MediaCategory::Document)
},
} }
} }
@ -87,13 +96,18 @@ impl MediaType {
} }
/// Get the MIME type with registry support /// Get the MIME type with registry support
pub fn mime_type_with_registry(&self, registry: &MediaTypeRegistry) -> String { pub fn mime_type_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> String {
match self { match self {
Self::Builtin(b) => b.mime_type().to_string(), Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(id) => registry Self::Custom(id) => {
registry
.get(id) .get(id)
.and_then(|d| d.mime_types.first().cloned()) .and_then(|d| d.mime_types.first().cloned())
.unwrap_or_else(|| "application/octet-stream".to_string()), .unwrap_or_else(|| "application/octet-stream".to_string())
},
} }
} }
@ -101,19 +115,28 @@ impl MediaType {
/// For custom types without a registry, returns an empty vec /// For custom types without a registry, returns an empty vec
pub fn extensions(&self) -> Vec<String> { pub fn extensions(&self) -> Vec<String> {
match self { match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(), Self::Builtin(b) => {
b.extensions().iter().map(|s| s.to_string()).collect()
},
Self::Custom(_) => vec![], Self::Custom(_) => vec![],
} }
} }
/// Get file extensions with registry support /// Get file extensions with registry support
pub fn extensions_with_registry(&self, registry: &MediaTypeRegistry) -> Vec<String> { pub fn extensions_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> Vec<String> {
match self { match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(), Self::Builtin(b) => {
Self::Custom(id) => registry b.extensions().iter().map(|s| s.to_string()).collect()
},
Self::Custom(id) => {
registry
.get(id) .get(id)
.map(|d| d.extensions.clone()) .map(|d| d.extensions.clone())
.unwrap_or_default(), .unwrap_or_default()
},
} }
} }
@ -131,8 +154,12 @@ impl MediaType {
BuiltinMediaType::from_extension(ext).map(Self::Builtin) BuiltinMediaType::from_extension(ext).map(Self::Builtin)
} }
/// Resolve a media type from file extension with registry (includes custom types) /// Resolve a media type from file extension with registry (includes custom
pub fn from_extension_with_registry(ext: &str, registry: &MediaTypeRegistry) -> Option<Self> { /// types)
pub fn from_extension_with_registry(
ext: &str,
registry: &MediaTypeRegistry,
) -> Option<Self> {
// Try built-in types first // Try built-in types first
if let Some(builtin) = BuiltinMediaType::from_extension(ext) { if let Some(builtin) = BuiltinMediaType::from_extension(ext) {
return Some(Self::Builtin(builtin)); return Some(Self::Builtin(builtin));
@ -147,14 +174,19 @@ impl MediaType {
/// Resolve a media type from file path (built-in types only) /// Resolve a media type from file path (built-in types only)
/// Use from_path_with_registry for custom types /// Use from_path_with_registry for custom types
pub fn from_path(path: &Path) -> Option<Self> { pub fn from_path(path: &Path) -> Option<Self> {
path.extension() path
.extension()
.and_then(|e| e.to_str()) .and_then(|e| e.to_str())
.and_then(Self::from_extension) .and_then(Self::from_extension)
} }
/// Resolve a media type from file path with registry (includes custom types) /// Resolve a media type from file path with registry (includes custom types)
pub fn from_path_with_registry(path: &Path, registry: &MediaTypeRegistry) -> Option<Self> { pub fn from_path_with_registry(
path.extension() path: &Path,
registry: &MediaTypeRegistry,
) -> Option<Self> {
path
.extension()
.and_then(|e| e.to_str()) .and_then(|e| e.to_str())
.and_then(|ext| Self::from_extension_with_registry(ext, registry)) .and_then(|ext| Self::from_extension_with_registry(ext, registry))
} }

View file

@ -1,8 +1,9 @@
//! Media type registry for managing both built-in and custom media types //! Media type registry for managing both built-in and custom media types
use std::collections::HashMap;
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use super::MediaCategory; use super::MediaCategory;
@ -59,7 +60,8 @@ impl MediaTypeRegistry {
let ext_lower = ext.to_lowercase(); let ext_lower = ext.to_lowercase();
if self.extension_map.contains_key(&ext_lower) { if self.extension_map.contains_key(&ext_lower) {
// Extension already registered - this is OK, we'll use the first one // Extension already registered - this is OK, we'll use the first one
// In a more sophisticated system, we might track multiple types per extension // In a more sophisticated system, we might track multiple types per
// extension
continue; continue;
} }
self.extension_map.insert(ext_lower, descriptor.id.clone()); self.extension_map.insert(ext_lower, descriptor.id.clone());
@ -97,7 +99,8 @@ impl MediaTypeRegistry {
/// Get a media type by file extension /// Get a media type by file extension
pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> { pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> {
let ext_lower = ext.to_lowercase(); let ext_lower = ext.to_lowercase();
self.extension_map self
.extension_map
.get(&ext_lower) .get(&ext_lower)
.and_then(|id| self.types.get(id)) .and_then(|id| self.types.get(id))
} }
@ -109,7 +112,8 @@ impl MediaTypeRegistry {
/// List media types from a specific plugin /// List media types from a specific plugin
pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> { pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> {
self.types self
.types
.values() .values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id)) .filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.collect() .collect()
@ -117,7 +121,8 @@ impl MediaTypeRegistry {
/// List built-in media types (plugin_id is None) /// List built-in media types (plugin_id is None)
pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> { pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> {
self.types self
.types
.values() .values()
.filter(|d| d.plugin_id.is_none()) .filter(|d| d.plugin_id.is_none())
.collect() .collect()

View file

@ -1,19 +1,23 @@
use std::path::Path; use std::path::Path;
use lofty::file::{AudioFile, TaggedFileExt}; use lofty::{
use lofty::tag::Accessor; file::{AudioFile, TaggedFileExt},
tag::Accessor,
use crate::error::{PinakesError, Result}; };
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor}; use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct AudioExtractor; pub struct AudioExtractor;
impl MetadataExtractor for AudioExtractor { impl MetadataExtractor for AudioExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> { fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let tagged_file = lofty::read_from_path(path) let tagged_file = lofty::read_from_path(path).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("audio metadata: {e}")))?; PinakesError::MetadataExtraction(format!("audio metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default(); let mut meta = ExtractedMetadata::default();
@ -33,15 +37,18 @@ impl MetadataExtractor for AudioExtractor {
.or_else(|| tagged_file.first_tag()) .or_else(|| tagged_file.first_tag())
{ {
if let Some(track) = tag.track() { if let Some(track) = tag.track() {
meta.extra meta
.extra
.insert("track_number".to_string(), track.to_string()); .insert("track_number".to_string(), track.to_string());
} }
if let Some(disc) = tag.disk() { if let Some(disc) = tag.disk() {
meta.extra meta
.extra
.insert("disc_number".to_string(), disc.to_string()); .insert("disc_number".to_string(), disc.to_string());
} }
if let Some(comment) = tag.comment() { if let Some(comment) = tag.comment() {
meta.extra meta
.extra
.insert("comment".to_string(), comment.to_string()); .insert("comment".to_string(), comment.to_string());
} }
} }
@ -53,15 +60,18 @@ impl MetadataExtractor for AudioExtractor {
} }
if let Some(bitrate) = properties.audio_bitrate() { if let Some(bitrate) = properties.audio_bitrate() {
meta.extra meta
.extra
.insert("bitrate".to_string(), format!("{bitrate} kbps")); .insert("bitrate".to_string(), format!("{bitrate} kbps"));
} }
if let Some(sample_rate) = properties.sample_rate() { if let Some(sample_rate) = properties.sample_rate() {
meta.extra meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz")); .insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
} }
if let Some(channels) = properties.channels() { if let Some(channels) = properties.channels() {
meta.extra meta
.extra
.insert("channels".to_string(), channels.to_string()); .insert("channels".to_string(), channels.to_string());
} }

View file

@ -1,9 +1,10 @@
use std::path::Path; use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor}; use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct DocumentExtractor; pub struct DocumentExtractor;
@ -128,15 +129,20 @@ fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> { fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> {
match obj { match obj {
lopdf::Object::String(bytes, _) => Some(String::from_utf8_lossy(bytes).into_owned()), lopdf::Object::String(bytes, _) => {
lopdf::Object::Name(name) => Some(String::from_utf8_lossy(name).into_owned()), Some(String::from_utf8_lossy(bytes).into_owned())
},
lopdf::Object::Name(name) => {
Some(String::from_utf8_lossy(name).into_owned())
},
_ => None, _ => None,
} }
} }
fn extract_epub(path: &Path) -> Result<ExtractedMetadata> { fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
let mut doc = epub::doc::EpubDoc::new(path) let mut doc = epub::doc::EpubDoc::new(path).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB parse: {e}")))?; PinakesError::MetadataExtraction(format!("EPUB parse: {e}"))
})?;
let mut meta = ExtractedMetadata { let mut meta = ExtractedMetadata {
title: doc.mdata("title").map(|item| item.value.clone()), title: doc.mdata("title").map(|item| item.value.clone()),
@ -156,7 +162,9 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
} }
if let Some(date) = doc.mdata("date") { if let Some(date) = doc.mdata("date") {
// Try to parse as YYYY-MM-DD or just YYYY // Try to parse as YYYY-MM-DD or just YYYY
if let Ok(parsed_date) = chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d") { if let Ok(parsed_date) =
chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d")
{
book_meta.publication_date = Some(parsed_date); book_meta.publication_date = Some(parsed_date);
} else if let Ok(year) = date.value.parse::<i32>() { } else if let Ok(year) = date.value.parse::<i32>() {
book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1); book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1);
@ -212,7 +220,7 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
} else { } else {
"other" "other"
} }
} },
}; };
// Try to normalize ISBN // Try to normalize ISBN
@ -245,7 +253,8 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
if let Some(opf_content) = opf_data { if let Some(opf_content) = opf_data {
// Look for <meta name="calibre:series" content="Series Name"/> // Look for <meta name="calibre:series" content="Series Name"/>
if let Some(series_start) = opf_content.find("name=\"calibre:series\"") if let Some(series_start) = opf_content.find("name=\"calibre:series\"")
&& let Some(content_start) = opf_content[series_start..].find("content=\"") && let Some(content_start) =
opf_content[series_start..].find("content=\"")
{ {
let after_content = &opf_content[series_start + content_start + 9..]; let after_content = &opf_content[series_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"') { if let Some(quote_end) = after_content.find('"') {

View file

@ -1,9 +1,10 @@
use std::path::Path; use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor}; use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct ImageExtractor; pub struct ImageExtractor;
@ -14,7 +15,8 @@ impl MetadataExtractor for ImageExtractor {
let file = std::fs::File::open(path)?; let file = std::fs::File::open(path)?;
let mut buf_reader = std::io::BufReader::new(&file); let mut buf_reader = std::io::BufReader::new(&file);
let exif_data = match exif::Reader::new().read_from_container(&mut buf_reader) { let exif_data =
match exif::Reader::new().read_from_container(&mut buf_reader) {
Ok(exif) => exif, Ok(exif) => exif,
Err(_) => return Ok(meta), Err(_) => return Ok(meta),
}; };
@ -29,21 +31,26 @@ impl MetadataExtractor for ImageExtractor {
} }
if let Some(height) = exif_data if let Some(height) = exif_data
.get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY) .get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY)) .or_else(|| {
exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY)
})
&& let Some(h) = field_to_u32(height) && let Some(h) = field_to_u32(height)
{ {
meta.extra.insert("height".to_string(), h.to_string()); meta.extra.insert("height".to_string(), h.to_string());
} }
// Camera make and model - set both in top-level fields and extra // Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) { if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY)
{
let val = make.display_value().to_string().trim().to_string(); let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.camera_make = Some(val.clone()); meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val); meta.extra.insert("camera_make".to_string(), val);
} }
} }
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) { if let Some(model) =
exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY)
{
let val = model.display_value().to_string().trim().to_string(); let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.camera_model = Some(val.clone()); meta.camera_model = Some(val.clone());
@ -77,9 +84,11 @@ impl MetadataExtractor for ImageExtractor {
{ {
meta.latitude = Some(lat_val); meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val); meta.longitude = Some(lon_val);
meta.extra meta
.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}")); .insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra meta
.extra
.insert("gps_longitude".to_string(), format!("{lon_val:.6}")); .insert("gps_longitude".to_string(), format!("{lon_val:.6}"));
} }
@ -92,19 +101,25 @@ impl MetadataExtractor for ImageExtractor {
meta.extra.insert("iso".to_string(), val); meta.extra.insert("iso".to_string(), val);
} }
} }
if let Some(exposure) = exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY) { if let Some(exposure) =
exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY)
{
let val = exposure.display_value().to_string(); let val = exposure.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("exposure_time".to_string(), val); meta.extra.insert("exposure_time".to_string(), val);
} }
} }
if let Some(aperture) = exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY) { if let Some(aperture) =
exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY)
{
let val = aperture.display_value().to_string(); let val = aperture.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("f_number".to_string(), val); meta.extra.insert("f_number".to_string(), val);
} }
} }
if let Some(focal) = exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY) { if let Some(focal) =
exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY)
{
let val = focal.display_value().to_string(); let val = focal.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("focal_length".to_string(), val); meta.extra.insert("focal_length".to_string(), val);
@ -112,16 +127,21 @@ impl MetadataExtractor for ImageExtractor {
} }
// Lens model // Lens model
if let Some(lens) = exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY) { if let Some(lens) =
exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY)
{
let val = lens.display_value().to_string(); let val = lens.display_value().to_string();
if !val.is_empty() && val != "\"\"" { if !val.is_empty() && val != "\"\"" {
meta.extra meta
.extra
.insert("lens_model".to_string(), val.trim_matches('"').to_string()); .insert("lens_model".to_string(), val.trim_matches('"').to_string());
} }
} }
// Flash // Flash
if let Some(flash) = exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY) { if let Some(flash) =
exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY)
{
let val = flash.display_value().to_string(); let val = flash.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("flash".to_string(), val); meta.extra.insert("flash".to_string(), val);
@ -129,7 +149,9 @@ impl MetadataExtractor for ImageExtractor {
} }
// Orientation // Orientation
if let Some(orientation) = exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY) { if let Some(orientation) =
exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY)
{
let val = orientation.display_value().to_string(); let val = orientation.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("orientation".to_string(), val); meta.extra.insert("orientation".to_string(), val);
@ -137,7 +159,9 @@ impl MetadataExtractor for ImageExtractor {
} }
// Software // Software
if let Some(software) = exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY) { if let Some(software) =
exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY)
{
let val = software.display_value().to_string(); let val = software.display_value().to_string();
if !val.is_empty() { if !val.is_empty() {
meta.extra.insert("software".to_string(), val); meta.extra.insert("software".to_string(), val);
@ -145,7 +169,9 @@ impl MetadataExtractor for ImageExtractor {
} }
// Image description as title // Image description as title
if let Some(desc) = exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY) { if let Some(desc) =
exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY)
{
let val = desc.display_value().to_string(); let val = desc.display_value().to_string();
if !val.is_empty() && val != "\"\"" { if !val.is_empty() && val != "\"\"" {
meta.title = Some(val.trim_matches('"').to_string()); meta.title = Some(val.trim_matches('"').to_string());
@ -153,7 +179,9 @@ impl MetadataExtractor for ImageExtractor {
} }
// Artist // Artist
if let Some(artist) = exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY) { if let Some(artist) =
exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY)
{
let val = artist.display_value().to_string(); let val = artist.display_value().to_string();
if !val.is_empty() && val != "\"\"" { if !val.is_empty() && val != "\"\"" {
meta.artist = Some(val.trim_matches('"').to_string()); meta.artist = Some(val.trim_matches('"').to_string());
@ -161,7 +189,9 @@ impl MetadataExtractor for ImageExtractor {
} }
// Copyright as description // Copyright as description
if let Some(copyright) = exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY) { if let Some(copyright) =
exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY)
{
let val = copyright.display_value().to_string(); let val = copyright.display_value().to_string();
if !val.is_empty() && val != "\"\"" { if !val.is_empty() && val != "\"\"" {
meta.description = Some(val.trim_matches('"').to_string()); meta.description = Some(val.trim_matches('"').to_string());
@ -201,7 +231,10 @@ fn field_to_u32(field: &exif::Field) -> Option<u32> {
} }
} }
fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f64> { fn dms_to_decimal(
dms_field: &exif::Field,
ref_field: &exif::Field,
) -> Option<f64> {
if let exif::Value::Rational(ref rationals) = dms_field.value if let exif::Value::Rational(ref rationals) = dms_field.value
&& rationals.len() >= 3 && rationals.len() >= 3
{ {
@ -241,8 +274,9 @@ fn parse_exif_datetime(s: &str) -> Option<chrono::DateTime<chrono::Utc>> {
} }
/// Generate a perceptual hash for an image file. /// Generate a perceptual hash for an image file.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity detection. /// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity
/// Returns a hex-encoded hash string, or None if the image cannot be processed. /// detection. Returns a hex-encoded hash string, or None if the image cannot be
/// processed.
pub fn generate_perceptual_hash(path: &Path) -> Option<String> { pub fn generate_perceptual_hash(path: &Path) -> Option<String> {
use image_hasher::{HashAlg, HasherConfig}; use image_hasher::{HashAlg, HasherConfig};

View file

@ -1,16 +1,18 @@
use std::path::Path; use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor}; use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct MarkdownExtractor; pub struct MarkdownExtractor;
impl MetadataExtractor for MarkdownExtractor { impl MetadataExtractor for MarkdownExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> { fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let content = std::fs::read_to_string(path)?; let content = std::fs::read_to_string(path)?;
let parsed = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content); let parsed =
gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
let mut meta = ExtractedMetadata::default(); let mut meta = ExtractedMetadata::default();

View file

@ -4,12 +4,13 @@ pub mod image;
pub mod markdown; pub mod markdown;
pub mod video; pub mod video;
use std::collections::HashMap; use std::{collections::HashMap, path::Path};
use std::path::Path;
use crate::error::Result; use crate::{
use crate::media_type::MediaType; error::Result,
use crate::model::ExtractedBookMetadata; media_type::MediaType,
model::ExtractedBookMetadata,
};
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct ExtractedMetadata { pub struct ExtractedMetadata {
@ -37,7 +38,10 @@ pub trait MetadataExtractor: Send + Sync {
fn supported_types(&self) -> Vec<MediaType>; fn supported_types(&self) -> Vec<MediaType>;
} }
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> { pub fn extract_metadata(
path: &Path,
media_type: MediaType,
) -> Result<ExtractedMetadata> {
let extractors: Vec<Box<dyn MetadataExtractor>> = vec![ let extractors: Vec<Box<dyn MetadataExtractor>> = vec![
Box::new(audio::AudioExtractor), Box::new(audio::AudioExtractor),
Box::new(document::DocumentExtractor), Box::new(document::DocumentExtractor),

View file

@ -1,9 +1,10 @@
use std::path::Path; use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor}; use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct VideoExtractor; pub struct VideoExtractor;
@ -44,23 +45,26 @@ fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
format!("{}x{}", v.pixel_width, v.pixel_height), format!("{}x{}", v.pixel_width, v.pixel_height),
); );
if !track.codec_id.is_empty() { if !track.codec_id.is_empty() {
meta.extra meta
.extra
.insert("video_codec".to_string(), track.codec_id.clone()); .insert("video_codec".to_string(), track.codec_id.clone());
} }
} },
matroska::Settings::Audio(a) => { matroska::Settings::Audio(a) => {
meta.extra.insert( meta.extra.insert(
"sample_rate".to_string(), "sample_rate".to_string(),
format!("{} Hz", a.sample_rate as u32), format!("{} Hz", a.sample_rate as u32),
); );
meta.extra meta
.extra
.insert("channels".to_string(), a.channels.to_string()); .insert("channels".to_string(), a.channels.to_string());
if !track.codec_id.is_empty() { if !track.codec_id.is_empty() {
meta.extra meta
.extra
.insert("audio_codec".to_string(), track.codec_id.clone()); .insert("audio_codec".to_string(), track.codec_id.clone());
} }
} },
_ => {} _ => {},
} }
} }
@ -68,11 +72,14 @@ fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
} }
fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> { fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
use lofty::file::{AudioFile, TaggedFileExt}; use lofty::{
use lofty::tag::Accessor; file::{AudioFile, TaggedFileExt},
tag::Accessor,
};
let tagged_file = lofty::read_from_path(path) let tagged_file = lofty::read_from_path(path).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("MP4 metadata: {e}")))?; PinakesError::MetadataExtraction(format!("MP4 metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default(); let mut meta = ExtractedMetadata::default();
@ -102,15 +109,18 @@ fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
} }
if let Some(bitrate) = properties.audio_bitrate() { if let Some(bitrate) = properties.audio_bitrate() {
meta.extra meta
.extra
.insert("audio_bitrate".to_string(), format!("{bitrate} kbps")); .insert("audio_bitrate".to_string(), format!("{bitrate} kbps"));
} }
if let Some(sample_rate) = properties.sample_rate() { if let Some(sample_rate) = properties.sample_rate() {
meta.extra meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz")); .insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
} }
if let Some(channels) = properties.channels() { if let Some(channels) = properties.channels() {
meta.extra meta
.extra
.insert("channels".to_string(), channels.to_string()); .insert("channels".to_string(), channels.to_string());
} }

View file

@ -1,6 +1,4 @@
use std::collections::HashMap; use std::{collections::HashMap, fmt, path::PathBuf};
use std::fmt;
use std::path::PathBuf;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -47,7 +45,9 @@ impl fmt::Display for ContentHash {
// ===== Managed Storage Types ===== // ===== Managed Storage Types =====
/// Storage mode for media items /// Storage mode for media items
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] #[derive(
Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize,
)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum StorageMode { pub enum StorageMode {
/// File exists on disk, referenced by path /// File exists on disk, referenced by path
@ -126,7 +126,8 @@ pub struct MediaItem {
pub description: Option<String>, pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>, pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>, pub custom_fields: HashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental scanning /// File modification time (Unix timestamp in seconds), used for incremental
/// scanning
pub file_mtime: Option<i64>, pub file_mtime: Option<i64>,
// Photo-specific metadata // Photo-specific metadata
@ -139,10 +140,12 @@ pub struct MediaItem {
pub perceptual_hash: Option<String>, pub perceptual_hash: Option<String>,
// Managed storage fields // Managed storage fields
/// How the file is stored (external on disk or managed in content-addressable storage) /// How the file is stored (external on disk or managed in
/// content-addressable storage)
#[serde(default)] #[serde(default)]
pub storage_mode: StorageMode, pub storage_mode: StorageMode,
/// Original filename for uploaded files (preserved separately from file_name) /// Original filename for uploaded files (preserved separately from
/// file_name)
pub original_filename: Option<String>, pub original_filename: Option<String>,
/// When the file was uploaded to managed storage /// When the file was uploaded to managed storage
pub uploaded_at: Option<DateTime<Utc>>, pub uploaded_at: Option<DateTime<Utc>>,

View file

@ -1,5 +1,4 @@
use std::path::Path; use std::{path::Path, process::Command};
use std::process::Command;
use crate::error::{PinakesError, Result}; use crate::error::{PinakesError, Result};
@ -12,10 +11,9 @@ pub struct XdgOpener;
impl Opener for XdgOpener { impl Opener for XdgOpener {
fn open(&self, path: &Path) -> Result<()> { fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("xdg-open") let status = Command::new("xdg-open").arg(path).status().map_err(|e| {
.arg(path) PinakesError::InvalidOperation(format!("failed to run xdg-open: {e}"))
.status() })?;
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run xdg-open: {e}")))?;
if status.success() { if status.success() {
Ok(()) Ok(())
} else { } else {
@ -31,10 +29,9 @@ pub struct MacOpener;
impl Opener for MacOpener { impl Opener for MacOpener {
fn open(&self, path: &Path) -> Result<()> { fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("open") let status = Command::new("open").arg(path).status().map_err(|e| {
.arg(path) PinakesError::InvalidOperation(format!("failed to run open: {e}"))
.status() })?;
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run open: {e}")))?;
if status.success() { if status.success() {
Ok(()) Ok(())
} else { } else {
@ -55,7 +52,9 @@ impl Opener for WindowsOpener {
.arg(path) .arg(path)
.status() .status()
.map_err(|e| { .map_err(|e| {
PinakesError::InvalidOperation(format!("failed to run cmd /c start: {e}")) PinakesError::InvalidOperation(format!(
"failed to run cmd /c start: {e}"
))
})?; })?;
if status.success() { if status.success() {
Ok(()) Ok(())

View file

@ -37,14 +37,21 @@ use crate::error::{PinakesError, Result};
/// ///
/// ```no_run /// ```no_run
/// use std::path::PathBuf; /// use std::path::PathBuf;
///
/// use pinakes_core::path_validation::validate_path; /// use pinakes_core::path_validation::validate_path;
/// ///
/// let allowed_roots = vec![PathBuf::from("/media"), PathBuf::from("/home/user/documents")]; /// let allowed_roots = vec![
/// PathBuf::from("/media"),
/// PathBuf::from("/home/user/documents"),
/// ];
/// let path = PathBuf::from("/media/music/song.mp3"); /// let path = PathBuf::from("/media/music/song.mp3");
/// ///
/// let validated = validate_path(&path, &allowed_roots).unwrap(); /// let validated = validate_path(&path, &allowed_roots).unwrap();
/// ``` /// ```
pub fn validate_path(path: &Path, allowed_roots: &[PathBuf]) -> Result<PathBuf> { pub fn validate_path(
path: &Path,
allowed_roots: &[PathBuf],
) -> Result<PathBuf> {
// Handle the case where no roots are configured // Handle the case where no roots are configured
if allowed_roots.is_empty() { if allowed_roots.is_empty() {
return Err(PinakesError::PathNotAllowed( return Err(PinakesError::PathNotAllowed(
@ -97,12 +104,14 @@ pub fn validate_path(path: &Path, allowed_roots: &[PathBuf]) -> Result<PathBuf>
/// Validates a path relative to a single root directory. /// Validates a path relative to a single root directory.
/// ///
/// This is a convenience wrapper for `validate_path` when you only have one root. /// This is a convenience wrapper for `validate_path` when you only have one
/// root.
pub fn validate_path_single_root(path: &Path, root: &Path) -> Result<PathBuf> { pub fn validate_path_single_root(path: &Path, root: &Path) -> Result<PathBuf> {
validate_path(path, &[root.to_path_buf()]) validate_path(path, &[root.to_path_buf()])
} }
/// Checks if a path appears to contain traversal sequences without canonicalizing. /// Checks if a path appears to contain traversal sequences without
/// canonicalizing.
/// ///
/// This is a quick pre-check that can reject obviously malicious paths without /// This is a quick pre-check that can reject obviously malicious paths without
/// hitting the filesystem. It should be used in addition to `validate_path`, /// hitting the filesystem. It should be used in addition to `validate_path`,
@ -144,7 +153,8 @@ pub fn sanitize_filename(filename: &str) -> String {
.chars() .chars()
.filter(|c| { .filter(|c| {
// Allow alphanumeric, common punctuation, and unicode letters // Allow alphanumeric, common punctuation, and unicode letters
c.is_alphanumeric() || matches!(*c, '-' | '_' | '.' | ' ' | '(' | ')' | '[' | ']') c.is_alphanumeric()
|| matches!(*c, '-' | '_' | '.' | ' ' | '(' | ')' | '[' | ']')
}) })
.collect(); .collect();
@ -174,7 +184,8 @@ pub fn sanitize_filename(filename: &str) -> String {
/// ///
/// # Returns /// # Returns
/// ///
/// The joined path if safe, or an error if the relative path would escape the base. /// The joined path if safe, or an error if the relative path would escape the
/// base.
pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> { pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
// Reject absolute paths in the relative component // Reject absolute paths in the relative component
if relative.starts_with('/') || relative.starts_with('\\') { if relative.starts_with('/') || relative.starts_with('\\') {
@ -210,18 +221,18 @@ pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
match component { match component {
Component::Normal(name) => { Component::Normal(name) => {
current = current.join(name); current = current.join(name);
} },
Component::ParentDir => { Component::ParentDir => {
return Err(PinakesError::PathNotAllowed( return Err(PinakesError::PathNotAllowed(
"path traversal detected".to_string(), "path traversal detected".to_string(),
)); ));
} },
Component::CurDir => continue, Component::CurDir => continue,
_ => { _ => {
return Err(PinakesError::PathNotAllowed( return Err(PinakesError::PathNotAllowed(
"invalid path component".to_string(), "invalid path component".to_string(),
)); ));
} },
} }
} }
@ -230,10 +241,12 @@ pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use std::fs; use std::fs;
use tempfile::TempDir; use tempfile::TempDir;
use super::*;
fn setup_test_dirs() -> TempDir { fn setup_test_dirs() -> TempDir {
let temp = TempDir::new().unwrap(); let temp = TempDir::new().unwrap();
fs::create_dir_all(temp.path().join("allowed")).unwrap(); fs::create_dir_all(temp.path().join("allowed")).unwrap();

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::model::MediaId; use crate::{model::MediaId, users::UserId};
use crate::users::UserId;
/// A user-owned playlist of media items. /// A user-owned playlist of media items.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,9 @@
//! Plugin loader for discovering and loading plugins from the filesystem //! Plugin loader for discovering and loading plugins from the filesystem
use std::path::{Path, PathBuf};
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginManifest; use pinakes_plugin_api::PluginManifest;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use walkdir::WalkDir; use walkdir::WalkDir;
@ -34,10 +35,10 @@ impl PluginLoader {
Ok(found) => { Ok(found) => {
info!("Found {} plugins in {:?}", found.len(), dir); info!("Found {} plugins in {:?}", found.len(), dir);
manifests.extend(found); manifests.extend(found);
} },
Err(e) => { Err(e) => {
warn!("Error discovering plugins in {:?}: {}", dir, e); warn!("Error discovering plugins in {:?}: {}", dir, e);
} },
} }
} }
@ -45,7 +46,10 @@ impl PluginLoader {
} }
/// Discover plugins in a specific directory /// Discover plugins in a specific directory
async fn discover_in_directory(&self, dir: &Path) -> Result<Vec<PluginManifest>> { async fn discover_in_directory(
&self,
dir: &Path,
) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new(); let mut manifests = Vec::new();
// Walk the directory looking for plugin.toml files // Walk the directory looking for plugin.toml files
@ -58,7 +62,7 @@ impl PluginLoader {
Err(e) => { Err(e) => {
warn!("Error reading directory entry: {}", e); warn!("Error reading directory entry: {}", e);
continue; continue;
} },
}; };
let path = entry.path(); let path = entry.path();
@ -71,10 +75,10 @@ impl PluginLoader {
Ok(manifest) => { Ok(manifest) => {
info!("Loaded manifest for plugin: {}", manifest.plugin.name); info!("Loaded manifest for plugin: {}", manifest.plugin.name);
manifests.push(manifest); manifests.push(manifest);
} },
Err(e) => { Err(e) => {
warn!("Failed to load manifest from {:?}: {}", path, e); warn!("Failed to load manifest from {:?}: {}", path, e);
} },
} }
} }
} }
@ -83,7 +87,10 @@ impl PluginLoader {
} }
/// Resolve the WASM binary path from a manifest /// Resolve the WASM binary path from a manifest
pub fn resolve_wasm_path(&self, manifest: &PluginManifest) -> Result<PathBuf> { pub fn resolve_wasm_path(
&self,
manifest: &PluginManifest,
) -> Result<PathBuf> {
// The WASM path in the manifest is relative to the manifest file // The WASM path in the manifest is relative to the manifest file
// We need to search for it in the plugin directories // We need to search for it in the plugin directories
@ -103,7 +110,8 @@ impl PluginLoader {
// Resolve WASM path relative to this directory // Resolve WASM path relative to this directory
let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm); let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm);
if wasm_path.exists() { if wasm_path.exists() {
// Verify the resolved path is within the plugin directory (prevent path traversal) // Verify the resolved path is within the plugin directory (prevent path
// traversal)
let canonical_wasm = wasm_path let canonical_wasm = wasm_path
.canonicalize() .canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?; .map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?;
@ -189,7 +197,8 @@ impl PluginLoader {
} }
// Write archive to a unique temp file // Write archive to a unique temp file
let temp_archive = dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7())); let temp_archive =
dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7()));
std::fs::write(&temp_archive, &bytes)?; std::fs::write(&temp_archive, &bytes)?;
// Extract using tar with -C to target directory // Extract using tar with -C to target directory
@ -276,7 +285,8 @@ impl PluginLoader {
)); ));
} }
// Verify the WASM path is within the plugin directory (prevent path traversal) // Verify the WASM path is within the plugin directory (prevent path
// traversal)
let canonical_wasm = wasm_path.canonicalize()?; let canonical_wasm = wasm_path.canonicalize()?;
let canonical_path = path.canonicalize()?; let canonical_path = path.canonicalize()?;
if !canonical_wasm.starts_with(&canonical_path) { if !canonical_wasm.starts_with(&canonical_path) {
@ -309,9 +319,10 @@ impl PluginLoader {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use tempfile::TempDir; use tempfile::TempDir;
use super::*;
#[tokio::test] #[tokio::test]
async fn test_discover_plugins_empty() { async fn test_discover_plugins_empty() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
@ -341,7 +352,8 @@ wasm = "plugin.wasm"
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap(); std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create dummy WASM file // Create dummy WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap(); std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00")
.unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]); let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap(); let manifests = loader.discover_plugins().await.unwrap();
@ -375,7 +387,8 @@ wasm = "plugin.wasm"
assert!(loader.validate_plugin_package(&plugin_dir).is_err()); assert!(loader.validate_plugin_package(&plugin_dir).is_err());
// Create valid WASM file (magic number only) // Create valid WASM file (magic number only)
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap(); std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00")
.unwrap();
// Should succeed now // Should succeed now
assert!(loader.validate_plugin_package(&plugin_dir).is_ok()); assert!(loader.validate_plugin_package(&plugin_dir).is_ok());

View file

@ -1,7 +1,8 @@
//! Plugin system for Pinakes //! Plugin system for Pinakes
//! //!
//! This module provides a comprehensive plugin architecture that allows extending //! This module provides a comprehensive plugin architecture that allows
//! Pinakes with custom media types, metadata extractors, search backends, and more. //! extending Pinakes with custom media types, metadata extractors, search
//! backends, and more.
//! //!
//! # Architecture //! # Architecture
//! //!
@ -10,10 +11,10 @@
//! - Hot-reload support for development //! - Hot-reload support for development
//! - Automatic plugin discovery from configured directories //! - Automatic plugin discovery from configured directories
use std::{path::PathBuf, sync::Arc};
use anyhow::Result; use anyhow::Result;
use pinakes_plugin_api::{PluginContext, PluginMetadata}; use pinakes_plugin_api::{PluginContext, PluginMetadata};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
@ -96,7 +97,11 @@ impl From<crate::config::PluginsConfig> for PluginManagerConfig {
impl PluginManager { impl PluginManager {
/// Create a new plugin manager /// Create a new plugin manager
pub fn new(data_dir: PathBuf, cache_dir: PathBuf, config: PluginManagerConfig) -> Result<Self> { pub fn new(
data_dir: PathBuf,
cache_dir: PathBuf,
config: PluginManagerConfig,
) -> Result<Self> {
// Ensure directories exist // Ensure directories exist
std::fs::create_dir_all(&data_dir)?; std::fs::create_dir_all(&data_dir)?;
std::fs::create_dir_all(&cache_dir)?; std::fs::create_dir_all(&cache_dir)?;
@ -129,10 +134,10 @@ impl PluginManager {
Ok(plugin_id) => { Ok(plugin_id) => {
info!("Loaded plugin: {}", plugin_id); info!("Loaded plugin: {}", plugin_id);
loaded_plugins.push(plugin_id); loaded_plugins.push(plugin_id);
} },
Err(e) => { Err(e) => {
warn!("Failed to load plugin {}: {}", manifest.plugin.name, e); warn!("Failed to load plugin {}: {}", manifest.plugin.name, e);
} },
} }
} }
@ -147,7 +152,10 @@ impl PluginManager {
let plugin_id = manifest.plugin_id(); let plugin_id = manifest.plugin_id();
// Validate plugin_id to prevent path traversal // Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") { if plugin_id.contains('/')
|| plugin_id.contains('\\')
|| plugin_id.contains("..")
{
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id)); return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
} }
@ -179,7 +187,11 @@ impl PluginManager {
( (
k.clone(), k.clone(),
serde_json::to_value(v).unwrap_or_else(|e| { serde_json::to_value(v).unwrap_or_else(|e| {
tracing::warn!("failed to serialize config value for key {}: {}", k, e); tracing::warn!(
"failed to serialize config value for key {}: {}",
k,
e
);
serde_json::Value::Null serde_json::Value::Null
}), }),
) )
@ -193,12 +205,15 @@ impl PluginManager {
let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?; let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?;
// Initialize plugin // Initialize plugin
let init_succeeded = match wasm_plugin.call_function("initialize", &[]).await { let init_succeeded = match wasm_plugin
.call_function("initialize", &[])
.await
{
Ok(_) => true, Ok(_) => true,
Err(e) => { Err(e) => {
tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e); tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e);
false false
} },
}; };
// Register plugin // Register plugin
@ -207,7 +222,11 @@ impl PluginManager {
name: manifest.plugin.name.clone(), name: manifest.plugin.name.clone(),
version: manifest.plugin.version.clone(), version: manifest.plugin.version.clone(),
author: manifest.plugin.author.clone().unwrap_or_default(), author: manifest.plugin.author.clone().unwrap_or_default(),
description: manifest.plugin.description.clone().unwrap_or_default(), description: manifest
.plugin
.description
.clone()
.unwrap_or_default(),
api_version: manifest.plugin.api_version.clone(), api_version: manifest.plugin.api_version.clone(),
capabilities_required: capabilities, capabilities_required: capabilities,
}; };
@ -238,7 +257,8 @@ impl PluginManager {
info!("Installing plugin from: {}", source); info!("Installing plugin from: {}", source);
// Download/copy plugin to plugins directory // Download/copy plugin to plugins directory
let plugin_path = if source.starts_with("http://") || source.starts_with("https://") { let plugin_path =
if source.starts_with("http://") || source.starts_with("https://") {
// Download from URL // Download from URL
self.loader.download_plugin(source).await? self.loader.download_plugin(source).await?
} else { } else {
@ -248,7 +268,8 @@ impl PluginManager {
// Load the manifest // Load the manifest
let manifest_path = plugin_path.join("plugin.toml"); let manifest_path = plugin_path.join("plugin.toml");
let manifest = pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?; let manifest =
pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?;
// Load the plugin // Load the plugin
self.load_plugin_from_manifest(&manifest).await self.load_plugin_from_manifest(&manifest).await
@ -257,7 +278,10 @@ impl PluginManager {
/// Uninstall a plugin /// Uninstall a plugin
pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> { pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> {
// Validate plugin_id to prevent path traversal // Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") { if plugin_id.contains('/')
|| plugin_id.contains('\\')
|| plugin_id.contains("..")
{
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id)); return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
} }
@ -314,7 +338,8 @@ impl PluginManager {
info!("Shutting down all plugins"); info!("Shutting down all plugins");
let registry = self.registry.read().await; let registry = self.registry.read().await;
let plugin_ids: Vec<String> = registry.list_all().iter().map(|p| p.id.clone()).collect(); let plugin_ids: Vec<String> =
registry.list_all().iter().map(|p| p.id.clone()).collect();
for plugin_id in plugin_ids { for plugin_id in plugin_ids {
if let Err(e) = self.shutdown_plugin(&plugin_id).await { if let Err(e) = self.shutdown_plugin(&plugin_id).await {
@ -355,14 +380,16 @@ impl PluginManager {
info!("Reloading plugin: {}", plugin_id); info!("Reloading plugin: {}", plugin_id);
// Re-read the manifest from disk if possible, falling back to cached version // Re-read the manifest from disk if possible, falling back to cached
// version
let manifest = { let manifest = {
let registry = self.registry.read().await; let registry = self.registry.read().await;
let plugin = registry let plugin = registry
.get(plugin_id) .get(plugin_id)
.ok_or_else(|| anyhow::anyhow!("Plugin not found"))?; .ok_or_else(|| anyhow::anyhow!("Plugin not found"))?;
if let Some(ref manifest_path) = plugin.manifest_path { if let Some(ref manifest_path) = plugin.manifest_path {
pinakes_plugin_api::PluginManifest::from_file(manifest_path).unwrap_or_else(|e| { pinakes_plugin_api::PluginManifest::from_file(manifest_path)
.unwrap_or_else(|e| {
warn!("Failed to re-read manifest from disk, using cached: {}", e); warn!("Failed to re-read manifest from disk, using cached: {}", e);
plugin.manifest.clone() plugin.manifest.clone()
}) })
@ -387,9 +414,10 @@ impl PluginManager {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use tempfile::TempDir; use tempfile::TempDir;
use super::*;
#[tokio::test] #[tokio::test]
async fn test_plugin_manager_creation() { async fn test_plugin_manager_creation() {
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
@ -397,7 +425,8 @@ mod tests {
let cache_dir = temp_dir.path().join("cache"); let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default(); let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir.clone(), cache_dir.clone(), config); let manager =
PluginManager::new(data_dir.clone(), cache_dir.clone(), config);
assert!(manager.is_ok()); assert!(manager.is_ok());
assert!(data_dir.exists()); assert!(data_dir.exists());

View file

@ -1,10 +1,9 @@
//! Plugin registry for managing loaded plugins //! Plugin registry for managing loaded plugins
use std::path::PathBuf; use std::{collections::HashMap, path::PathBuf};
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata}; use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use std::collections::HashMap;
use super::runtime::WasmPlugin; use super::runtime::WasmPlugin;
@ -45,7 +44,8 @@ impl PluginRegistry {
/// Unregister a plugin by ID /// Unregister a plugin by ID
pub fn unregister(&mut self, plugin_id: &str) -> Result<()> { pub fn unregister(&mut self, plugin_id: &str) -> Result<()> {
self.plugins self
.plugins
.remove(plugin_id) .remove(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?; .ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
Ok(()) Ok(())
@ -105,7 +105,8 @@ impl PluginRegistry {
/// Get plugins by kind (e.g., "media_type", "metadata_extractor") /// Get plugins by kind (e.g., "media_type", "metadata_extractor")
pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> { pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> {
self.plugins self
.plugins
.values() .values()
.filter(|p| p.manifest.plugin.kind.contains(&kind.to_string())) .filter(|p| p.manifest.plugin.kind.contains(&kind.to_string()))
.collect() .collect()
@ -130,10 +131,12 @@ impl Default for PluginRegistry {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use pinakes_plugin_api::Capabilities;
use std::collections::HashMap; use std::collections::HashMap;
use pinakes_plugin_api::Capabilities;
use super::*;
fn create_test_plugin(id: &str, kind: Vec<String>) -> RegisteredPlugin { fn create_test_plugin(id: &str, kind: Vec<String>) -> RegisteredPlugin {
let manifest = PluginManifest { let manifest = PluginManifest {
plugin: pinakes_plugin_api::manifest::PluginInfo { plugin: pinakes_plugin_api::manifest::PluginInfo {
@ -176,7 +179,8 @@ mod tests {
#[test] #[test]
fn test_registry_register_and_get() { fn test_registry_register_and_get() {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap(); registry.register(plugin.clone()).unwrap();
@ -187,7 +191,8 @@ mod tests {
#[test] #[test]
fn test_registry_duplicate_register() { fn test_registry_duplicate_register() {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap(); registry.register(plugin.clone()).unwrap();
let result = registry.register(plugin); let result = registry.register(plugin);
@ -198,7 +203,8 @@ mod tests {
#[test] #[test]
fn test_registry_unregister() { fn test_registry_unregister() {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap(); registry.register(plugin).unwrap();
registry.unregister("test-plugin").unwrap(); registry.unregister("test-plugin").unwrap();
@ -209,7 +215,8 @@ mod tests {
#[test] #[test]
fn test_registry_enable_disable() { fn test_registry_enable_disable() {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap(); registry.register(plugin).unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true)); assert_eq!(registry.is_enabled("test-plugin"), Some(true));
@ -228,22 +235,19 @@ mod tests {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
registry registry
.register(create_test_plugin( .register(create_test_plugin("plugin1", vec![
"plugin1", "media_type".to_string(),
vec!["media_type".to_string()], ]))
))
.unwrap(); .unwrap();
registry registry
.register(create_test_plugin( .register(create_test_plugin("plugin2", vec![
"plugin2", "metadata_extractor".to_string(),
vec!["metadata_extractor".to_string()], ]))
))
.unwrap(); .unwrap();
registry registry
.register(create_test_plugin( .register(create_test_plugin("plugin3", vec![
"plugin3", "media_type".to_string(),
vec!["media_type".to_string()], ]))
))
.unwrap(); .unwrap();
let media_type_plugins = registry.get_by_kind("media_type"); let media_type_plugins = registry.get_by_kind("media_type");
@ -258,16 +262,14 @@ mod tests {
let mut registry = PluginRegistry::new(); let mut registry = PluginRegistry::new();
registry registry
.register(create_test_plugin( .register(create_test_plugin("plugin1", vec![
"plugin1", "media_type".to_string(),
vec!["media_type".to_string()], ]))
))
.unwrap(); .unwrap();
registry registry
.register(create_test_plugin( .register(create_test_plugin("plugin2", vec![
"plugin2", "media_type".to_string(),
vec!["media_type".to_string()], ]))
))
.unwrap(); .unwrap();
assert_eq!(registry.count(), 2); assert_eq!(registry.count(), 2);

View file

@ -1,9 +1,9 @@
//! WASM runtime for executing plugins //! WASM runtime for executing plugins
use std::{path::Path, sync::Arc};
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginContext; use pinakes_plugin_api::PluginContext;
use std::path::Path;
use std::sync::Arc;
use wasmtime::*; use wasmtime::*;
/// WASM runtime wrapper for executing plugins /// WASM runtime wrapper for executing plugins
@ -75,7 +75,11 @@ impl WasmPlugin {
/// ///
/// Creates a fresh store and instance per invocation with host functions /// Creates a fresh store and instance per invocation with host functions
/// linked, calls the requested exported function, and returns the result. /// linked, calls the requested exported function, and returns the result.
pub async fn call_function(&self, function_name: &str, params: &[u8]) -> Result<Vec<u8>> { pub async fn call_function(
&self,
function_name: &str,
params: &[u8],
) -> Result<Vec<u8>> {
let engine = self.module.engine(); let engine = self.module.engine();
// Create store with per-invocation data // Create store with per-invocation data
@ -108,12 +112,17 @@ impl WasmPlugin {
if !params.is_empty() if !params.is_empty()
&& let Some(mem) = &memory && let Some(mem) = &memory
{ {
// Call the plugin's alloc function if available, otherwise write at offset 0 // Call the plugin's alloc function if available, otherwise write at
let offset = if let Ok(alloc) = instance.get_typed_func::<i32, i32>(&mut store, "alloc") // offset 0
let offset = if let Ok(alloc) =
instance.get_typed_func::<i32, i32>(&mut store, "alloc")
{ {
let result = alloc.call_async(&mut store, params.len() as i32).await?; let result = alloc.call_async(&mut store, params.len() as i32).await?;
if result < 0 { if result < 0 {
return Err(anyhow!("plugin alloc returned negative offset: {}", result)); return Err(anyhow!(
"plugin alloc returned negative offset: {}",
result
));
} }
result as usize result as usize
} else { } else {
@ -128,9 +137,12 @@ impl WasmPlugin {
} }
// Look up the exported function and call it // Look up the exported function and call it
let func = instance let func =
instance
.get_func(&mut store, function_name) .get_func(&mut store, function_name)
.ok_or_else(|| anyhow!("exported function '{}' not found", function_name))?; .ok_or_else(|| {
anyhow!("exported function '{}' not found", function_name)
})?;
let func_ty = func.ty(&store); let func_ty = func.ty(&store);
let param_count = func_ty.params().len(); let param_count = func_ty.params().len();
@ -141,7 +153,8 @@ impl WasmPlugin {
// Call with appropriate params based on function signature // Call with appropriate params based on function signature
if param_count == 2 && !params.is_empty() { if param_count == 2 && !params.is_empty() {
// Convention: (ptr, len) // Convention: (ptr, len)
func.call_async( func
.call_async(
&mut store, &mut store,
&[Val::I32(alloc_offset), Val::I32(params.len() as i32)], &[Val::I32(alloc_offset), Val::I32(params.len() as i32)],
&mut results, &mut results,
@ -151,8 +164,10 @@ impl WasmPlugin {
func.call_async(&mut store, &[], &mut results).await?; func.call_async(&mut store, &[], &mut results).await?;
} else { } else {
// Generic: fill with zeroes // Generic: fill with zeroes
let params_vals: Vec<Val> = (0..param_count).map(|_| Val::I32(0)).collect(); let params_vals: Vec<Val> =
func.call_async(&mut store, &params_vals, &mut results) (0..param_count).map(|_| Val::I32(0)).collect();
func
.call_async(&mut store, &params_vals, &mut results)
.await?; .await?;
} }
@ -199,7 +214,10 @@ impl HostFunctions {
linker.func_wrap( linker.func_wrap(
"env", "env",
"host_log", "host_log",
|mut caller: Caller<'_, PluginStoreData>, level: i32, ptr: i32, len: i32| { |mut caller: Caller<'_, PluginStoreData>,
level: i32,
ptr: i32,
len: i32| {
if ptr < 0 || len < 0 { if ptr < 0 || len < 0 {
return; return;
} }
@ -226,7 +244,10 @@ impl HostFunctions {
linker.func_wrap( linker.func_wrap(
"env", "env",
"host_read_file", "host_read_file",
|mut caller: Caller<'_, PluginStoreData>, path_ptr: i32, path_len: i32| -> i32 { |mut caller: Caller<'_, PluginStoreData>,
path_ptr: i32,
path_len: i32|
-> i32 {
if path_ptr < 0 || path_len < 0 { if path_ptr < 0 || path_len < 0 {
return -1; return -1;
} }
@ -259,7 +280,9 @@ impl HostFunctions {
.filesystem .filesystem
.read .read
.iter() .iter()
.any(|allowed| allowed.canonicalize().is_ok_and(|a| path.starts_with(a))); .any(|allowed| {
allowed.canonicalize().is_ok_and(|a| path.starts_with(a))
});
if !can_read { if !can_read {
tracing::warn!(path = %path_str, "plugin read access denied"); tracing::warn!(path = %path_str, "plugin read access denied");
@ -271,7 +294,7 @@ impl HostFunctions {
let len = contents.len() as i32; let len = contents.len() as i32;
caller.data_mut().exchange_buffer = contents; caller.data_mut().exchange_buffer = contents;
len len
} },
Err(_) => -1, Err(_) => -1,
} }
}, },
@ -303,7 +326,8 @@ impl HostFunctions {
return -1; return -1;
} }
let path_str = match std::str::from_utf8(&mem_data[path_start..path_end]) { let path_str =
match std::str::from_utf8(&mem_data[path_start..path_end]) {
Ok(s) => s.to_string(), Ok(s) => s.to_string(),
Err(_) => return -1, Err(_) => return -1,
}; };
@ -314,7 +338,8 @@ impl HostFunctions {
let canonical = if path.exists() { let canonical = if path.exists() {
path.canonicalize().ok() path.canonicalize().ok()
} else { } else {
path.parent() path
.parent()
.and_then(|p| p.canonicalize().ok()) .and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default())) .map(|p| p.join(path.file_name().unwrap_or_default()))
}; };
@ -352,7 +377,10 @@ impl HostFunctions {
linker.func_wrap( linker.func_wrap(
"env", "env",
"host_http_request", "host_http_request",
|mut caller: Caller<'_, PluginStoreData>, url_ptr: i32, url_len: i32| -> i32 { |mut caller: Caller<'_, PluginStoreData>,
url_ptr: i32,
url_len: i32|
-> i32 {
if url_ptr < 0 || url_len < 0 { if url_ptr < 0 || url_len < 0 {
return -1; return -1;
} }
@ -378,7 +406,8 @@ impl HostFunctions {
} }
// Use block_in_place to avoid blocking the async runtime's thread pool. // Use block_in_place to avoid blocking the async runtime's thread pool.
// Falls back to a blocking client with timeout if block_in_place is unavailable. // Falls back to a blocking client with timeout if block_in_place is
// unavailable.
let result = std::panic::catch_unwind(|| { let result = std::panic::catch_unwind(|| {
tokio::task::block_in_place(|| { tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async { tokio::runtime::Handle::current().block_on(async {
@ -402,7 +431,7 @@ impl HostFunctions {
let len = bytes.len() as i32; let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec(); caller.data_mut().exchange_buffer = bytes.to_vec();
len len
} },
Ok(Err(_)) => -1, Ok(Err(_)) => -1,
Err(_) => { Err(_) => {
// block_in_place panicked (e.g. current-thread runtime); // block_in_place panicked (e.g. current-thread runtime);
@ -415,17 +444,19 @@ impl HostFunctions {
Err(_) => return -1, Err(_) => return -1,
}; };
match client.get(&url_str).send() { match client.get(&url_str).send() {
Ok(resp) => match resp.bytes() { Ok(resp) => {
match resp.bytes() {
Ok(bytes) => { Ok(bytes) => {
let len = bytes.len() as i32; let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec(); caller.data_mut().exchange_buffer = bytes.to_vec();
len len
}
Err(_) => -1,
}, },
Err(_) => -1, Err(_) => -1,
} }
},
Err(_) => -1,
} }
},
} }
}, },
)?; )?;
@ -434,7 +465,10 @@ impl HostFunctions {
linker.func_wrap( linker.func_wrap(
"env", "env",
"host_get_config", "host_get_config",
|mut caller: Caller<'_, PluginStoreData>, key_ptr: i32, key_len: i32| -> i32 { |mut caller: Caller<'_, PluginStoreData>,
key_ptr: i32,
key_len: i32|
-> i32 {
if key_ptr < 0 || key_len < 0 { if key_ptr < 0 || key_len < 0 {
return -1; return -1;
} }
@ -460,7 +494,7 @@ impl HostFunctions {
let len = bytes.len() as i32; let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes; caller.data_mut().exchange_buffer = bytes;
len len
} },
None => -1, None => -1,
} }
}, },
@ -470,7 +504,10 @@ impl HostFunctions {
linker.func_wrap( linker.func_wrap(
"env", "env",
"host_get_buffer", "host_get_buffer",
|mut caller: Caller<'_, PluginStoreData>, dest_ptr: i32, dest_len: i32| -> i32 { |mut caller: Caller<'_, PluginStoreData>,
dest_ptr: i32,
dest_len: i32|
-> i32 {
if dest_ptr < 0 || dest_len < 0 { if dest_ptr < 0 || dest_len < 0 {
return -1; return -1;
} }
@ -497,10 +534,12 @@ impl HostFunctions {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use pinakes_plugin_api::PluginContext;
use std::collections::HashMap; use std::collections::HashMap;
use pinakes_plugin_api::PluginContext;
use super::*;
#[test] #[test]
fn test_wasm_runtime_creation() { fn test_wasm_runtime_creation() {
let runtime = WasmRuntime::new(); let runtime = WasmRuntime::new();

View file

@ -1,8 +1,9 @@
//! Capability-based security for plugins //! Capability-based security for plugins
use std::path::{Path, PathBuf};
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use pinakes_plugin_api::Capabilities; use pinakes_plugin_api::Capabilities;
use std::path::{Path, PathBuf};
/// Capability enforcer validates and enforces plugin capabilities /// Capability enforcer validates and enforces plugin capabilities
pub struct CapabilityEnforcer { pub struct CapabilityEnforcer {
@ -65,7 +66,10 @@ impl CapabilityEnforcer {
} }
/// Validate capabilities requested by a plugin /// Validate capabilities requested by a plugin
pub fn validate_capabilities(&self, capabilities: &Capabilities) -> Result<()> { pub fn validate_capabilities(
&self,
capabilities: &Capabilities,
) -> Result<()> {
// Validate memory limit // Validate memory limit
if let Some(memory) = capabilities.max_memory_bytes if let Some(memory) = capabilities.max_memory_bytes
&& memory > self.max_memory_limit && memory > self.max_memory_limit
@ -94,7 +98,8 @@ impl CapabilityEnforcer {
// Validate network access // Validate network access
if capabilities.network.enabled && !self.allow_network_default { if capabilities.network.enabled && !self.allow_network_default {
return Err(anyhow!( return Err(anyhow!(
"Plugin requests network access, but network access is disabled by policy" "Plugin requests network access, but network access is disabled by \
policy"
)); ));
} }
@ -102,7 +107,10 @@ impl CapabilityEnforcer {
} }
/// Validate filesystem access capabilities /// Validate filesystem access capabilities
fn validate_filesystem_access(&self, capabilities: &Capabilities) -> Result<()> { fn validate_filesystem_access(
&self,
capabilities: &Capabilities,
) -> Result<()> {
// Check read paths // Check read paths
for path in &capabilities.filesystem.read { for path in &capabilities.filesystem.read {
if !self.is_read_allowed(path) { if !self.is_read_allowed(path) {
@ -149,7 +157,8 @@ impl CapabilityEnforcer {
let canonical = if path.exists() { let canonical = if path.exists() {
path.canonicalize().ok() path.canonicalize().ok()
} else { } else {
path.parent() path
.parent()
.and_then(|p| p.canonicalize().ok()) .and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default())) .map(|p| p.join(path.file_name().unwrap_or_default()))
}; };
@ -169,7 +178,11 @@ impl CapabilityEnforcer {
} }
/// Check if a specific domain is allowed /// Check if a specific domain is allowed
pub fn is_domain_allowed(&self, capabilities: &Capabilities, domain: &str) -> bool { pub fn is_domain_allowed(
&self,
capabilities: &Capabilities,
domain: &str,
) -> bool {
if !capabilities.network.enabled { if !capabilities.network.enabled {
return false; return false;
} }
@ -213,10 +226,11 @@ impl Default for CapabilityEnforcer {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
#[allow(unused_imports)] #[allow(unused_imports)]
use pinakes_plugin_api::{FilesystemCapability, NetworkCapability}; use pinakes_plugin_api::{FilesystemCapability, NetworkCapability};
use super::*;
#[test] #[test]
fn test_validate_memory_limit() { fn test_validate_memory_limit() {
let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB
@ -250,7 +264,8 @@ mod tests {
let test_file = allowed_dir.join("test.txt"); let test_file = allowed_dir.join("test.txt");
std::fs::write(&test_file, "test").unwrap(); std::fs::write(&test_file, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_read_path(allowed_dir.clone()); let enforcer =
CapabilityEnforcer::new().allow_read_path(allowed_dir.clone());
assert!(enforcer.is_read_allowed(&test_file)); assert!(enforcer.is_read_allowed(&test_file));
assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd"))); assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd")));
@ -271,7 +286,8 @@ mod tests {
let existing = output_dir.join("file.txt"); let existing = output_dir.join("file.txt");
std::fs::write(&existing, "test").unwrap(); std::fs::write(&existing, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_write_path(output_dir.clone()); let enforcer =
CapabilityEnforcer::new().allow_write_path(output_dir.clone());
assert!(enforcer.is_write_allowed(&existing)); assert!(enforcer.is_write_allowed(&existing));
// New file in allowed dir (parent exists) // New file in allowed dir (parent exists)

View file

@ -1,14 +1,17 @@
use std::path::{Path, PathBuf}; use std::{
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; path::{Path, PathBuf},
use std::sync::{Arc, Mutex}; sync::{
Arc,
Mutex,
atomic::{AtomicBool, AtomicUsize, Ordering},
},
};
use notify::{PollWatcher, RecursiveMode, Watcher}; use notify::{PollWatcher, RecursiveMode, Watcher};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tracing::{info, warn}; use tracing::{info, warn};
use crate::error::Result; use crate::{error::Result, import, storage::DynStorageBackend};
use crate::import;
use crate::storage::DynStorageBackend;
pub struct ScanStatus { pub struct ScanStatus {
pub scanning: bool, pub scanning: bool,
@ -28,7 +31,8 @@ pub struct ScanOptions {
pub force_full: bool, pub force_full: bool,
} }
/// Shared scan progress that can be read by the status endpoint while a scan runs. /// Shared scan progress that can be read by the status endpoint while a scan
/// runs.
#[derive(Clone)] #[derive(Clone)]
pub struct ScanProgress { pub struct ScanProgress {
pub is_scanning: Arc<AtomicBool>, pub is_scanning: Arc<AtomicBool>,
@ -101,7 +105,14 @@ pub async fn scan_directory(
dir: &Path, dir: &Path,
ignore_patterns: &[String], ignore_patterns: &[String],
) -> Result<ScanStatus> { ) -> Result<ScanStatus> {
scan_directory_with_options(storage, dir, ignore_patterns, None, &ScanOptions::default()).await scan_directory_with_options(
storage,
dir,
ignore_patterns,
None,
&ScanOptions::default(),
)
.await
} }
/// Scan a directory with incremental scanning support /// Scan a directory with incremental scanning support
@ -114,7 +125,8 @@ pub async fn scan_directory_incremental(
incremental: true, incremental: true,
force_full: false, force_full: false,
}; };
scan_directory_with_options(storage, dir, ignore_patterns, None, &options).await scan_directory_with_options(storage, dir, ignore_patterns, None, &options)
.await
} }
pub async fn scan_directory_with_progress( pub async fn scan_directory_with_progress(
@ -133,7 +145,8 @@ pub async fn scan_directory_with_progress(
.await .await
} }
/// Scan a directory with full options including progress tracking and incremental mode /// Scan a directory with full options including progress tracking and
/// incremental mode
pub async fn scan_directory_with_options( pub async fn scan_directory_with_options(
storage: &DynStorageBackend, storage: &DynStorageBackend,
dir: &Path, dir: &Path,
@ -179,14 +192,14 @@ pub async fn scan_directory_with_options(
} else { } else {
processed += 1; processed += 1;
} }
} },
Err(e) => { Err(e) => {
let msg = e.to_string(); let msg = e.to_string();
if let Some(p) = progress { if let Some(p) = progress {
p.record_error(msg.clone()); p.record_error(msg.clone());
} }
errors.push(msg); errors.push(msg);
} },
} }
} }
@ -221,7 +234,13 @@ pub async fn scan_all_roots(
storage: &DynStorageBackend, storage: &DynStorageBackend,
ignore_patterns: &[String], ignore_patterns: &[String],
) -> Result<Vec<ScanStatus>> { ) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_options(storage, ignore_patterns, None, &ScanOptions::default()).await scan_all_roots_with_options(
storage,
ignore_patterns,
None,
&ScanOptions::default(),
)
.await
} }
/// Scan all roots incrementally (skip unchanged files) /// Scan all roots incrementally (skip unchanged files)
@ -241,7 +260,13 @@ pub async fn scan_all_roots_with_progress(
ignore_patterns: &[String], ignore_patterns: &[String],
progress: Option<&ScanProgress>, progress: Option<&ScanProgress>,
) -> Result<Vec<ScanStatus>> { ) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_options(storage, ignore_patterns, progress, &ScanOptions::default()).await scan_all_roots_with_options(
storage,
ignore_patterns,
progress,
&ScanOptions::default(),
)
.await
} }
/// Scan all roots with full options including progress and incremental mode /// Scan all roots with full options including progress and incremental mode
@ -255,7 +280,13 @@ pub async fn scan_all_roots_with_options(
let mut statuses = Vec::new(); let mut statuses = Vec::new();
for root in roots { for root in roots {
match scan_directory_with_options(storage, &root, ignore_patterns, progress, scan_options) match scan_directory_with_options(
storage,
&root,
ignore_patterns,
progress,
scan_options,
)
.await .await
{ {
Ok(status) => statuses.push(status), Ok(status) => statuses.push(status),
@ -268,7 +299,7 @@ pub async fn scan_all_roots_with_options(
files_skipped: 0, files_skipped: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
}); });
} },
} }
} }
@ -285,15 +316,18 @@ impl FileWatcher {
let (tx, rx) = mpsc::channel(1024); let (tx, rx) = mpsc::channel(1024);
// Try the recommended (native) watcher first, fall back to polling // Try the recommended (native) watcher first, fall back to polling
let watcher: Box<dyn Watcher + Send> = match Self::try_native_watcher(dirs, tx.clone()) { let watcher: Box<dyn Watcher + Send> = match Self::try_native_watcher(
dirs,
tx.clone(),
) {
Ok(w) => { Ok(w) => {
info!("using native filesystem watcher"); info!("using native filesystem watcher");
w w
} },
Err(native_err) => { Err(native_err) => {
warn!(error = %native_err, "native watcher failed, falling back to polling"); warn!(error = %native_err, "native watcher failed, falling back to polling");
Self::polling_watcher(dirs, tx)? Self::polling_watcher(dirs, tx)?
} },
}; };
Ok(Self { Ok(Self {
@ -307,8 +341,8 @@ impl FileWatcher {
tx: mpsc::Sender<PathBuf>, tx: mpsc::Sender<PathBuf>,
) -> std::result::Result<Box<dyn Watcher + Send>, notify::Error> { ) -> std::result::Result<Box<dyn Watcher + Send>, notify::Error> {
let tx_clone = tx.clone(); let tx_clone = tx.clone();
let mut watcher = let mut watcher = notify::recommended_watcher(
notify::recommended_watcher(move |res: notify::Result<notify::Event>| { move |res: notify::Result<notify::Event>| {
if let Ok(event) = res { if let Ok(event) = res {
for path in event.paths { for path in event.paths {
if tx_clone.blocking_send(path).is_err() { if tx_clone.blocking_send(path).is_err() {
@ -317,7 +351,8 @@ impl FileWatcher {
} }
} }
} }
})?; },
)?;
for dir in dirs { for dir in dirs {
watcher.watch(dir, RecursiveMode::Recursive)?; watcher.watch(dir, RecursiveMode::Recursive)?;
@ -350,9 +385,9 @@ impl FileWatcher {
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?; .map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?;
for dir in dirs { for dir in dirs {
watcher watcher.watch(dir, RecursiveMode::Recursive).map_err(|e| {
.watch(dir, RecursiveMode::Recursive) crate::error::PinakesError::Io(std::io::Error::other(e))
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?; })?;
} }
Ok(Box::new(watcher)) Ok(Box::new(watcher))

View file

@ -1,5 +1,4 @@
use std::path::PathBuf; use std::{path::PathBuf, sync::Arc};
use std::sync::Arc;
use chrono::{DateTime, Datelike, Utc}; use chrono::{DateTime, Datelike, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -7,21 +6,34 @@ use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use uuid::Uuid; use uuid::Uuid;
use crate::config::Config; use crate::{
use crate::jobs::{JobKind, JobQueue}; config::Config,
jobs::{JobKind, JobQueue},
};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")] #[serde(rename_all = "snake_case", tag = "type")]
pub enum Schedule { pub enum Schedule {
Interval { secs: u64 }, Interval {
Daily { hour: u32, minute: u32 }, secs: u64,
Weekly { day: u32, hour: u32, minute: u32 }, },
Daily {
hour: u32,
minute: u32,
},
Weekly {
day: u32,
hour: u32,
minute: u32,
},
} }
impl Schedule { impl Schedule {
pub fn next_run(&self, from: DateTime<Utc>) -> DateTime<Utc> { pub fn next_run(&self, from: DateTime<Utc>) -> DateTime<Utc> {
match self { match self {
Schedule::Interval { secs } => from + chrono::Duration::seconds(*secs as i64), Schedule::Interval { secs } => {
from + chrono::Duration::seconds(*secs as i64)
},
Schedule::Daily { hour, minute } => { Schedule::Daily { hour, minute } => {
let today = from let today = from
.date_naive() .date_naive()
@ -33,7 +45,7 @@ impl Schedule {
} else { } else {
today_utc + chrono::Duration::days(1) today_utc + chrono::Duration::days(1)
} }
} },
Schedule::Weekly { day, hour, minute } => { Schedule::Weekly { day, hour, minute } => {
let current_day = from.weekday().num_days_from_monday(); let current_day = from.weekday().num_days_from_monday();
let target_day = *day; let target_day = *day;
@ -52,12 +64,13 @@ impl Schedule {
} }
7 7
}; };
let target_date = from.date_naive() + chrono::Duration::days(days_ahead as i64); let target_date =
from.date_naive() + chrono::Duration::days(days_ahead as i64);
target_date target_date
.and_hms_opt(*hour, *minute, 0) .and_hms_opt(*hour, *minute, 0)
.unwrap_or_default() .unwrap_or_default()
.and_utc() .and_utc()
} },
} }
} }
@ -71,8 +84,10 @@ impl Schedule {
} else { } else {
format!("Every {}s", secs) format!("Every {}s", secs)
} }
} },
Schedule::Daily { hour, minute } => format!("Daily {hour:02}:{minute:02}"), Schedule::Daily { hour, minute } => {
format!("Daily {hour:02}:{minute:02}")
},
Schedule::Weekly { day, hour, minute } => { Schedule::Weekly { day, hour, minute } => {
let day_name = match day { let day_name = match day {
0 => "Mon", 0 => "Mon",
@ -84,7 +99,7 @@ impl Schedule {
_ => "Sun", _ => "Sun",
}; };
format!("{day_name} {hour:02}:{minute:02}") format!("{day_name} {hour:02}:{minute:02}")
} },
} }
} }
} }
@ -99,10 +114,12 @@ pub struct ScheduledTask {
pub last_run: Option<DateTime<Utc>>, pub last_run: Option<DateTime<Utc>>,
pub next_run: Option<DateTime<Utc>>, pub next_run: Option<DateTime<Utc>>,
pub last_status: Option<String>, pub last_status: Option<String>,
/// Whether a job for this task is currently running. Skipped during serialization. /// Whether a job for this task is currently running. Skipped during
/// serialization.
#[serde(default, skip_serializing)] #[serde(default, skip_serializing)]
pub running: bool, pub running: bool,
/// The job ID of the last submitted job. Skipped during serialization/deserialization. /// The job ID of the last submitted job. Skipped during
/// serialization/deserialization.
#[serde(skip)] #[serde(skip)]
pub last_job_id: Option<Uuid>, pub last_job_id: Option<Uuid>,
} }
@ -156,7 +173,10 @@ impl TaskScheduler {
id: "orphan_detection".to_string(), id: "orphan_detection".to_string(),
name: "Orphan Detection".to_string(), name: "Orphan Detection".to_string(),
kind: JobKind::OrphanDetection, kind: JobKind::OrphanDetection,
schedule: Schedule::Daily { hour: 2, minute: 0 }, schedule: Schedule::Daily {
hour: 2,
minute: 0,
},
enabled: false, enabled: false,
last_run: None, last_run: None,
next_run: None, next_run: None,
@ -191,7 +211,8 @@ impl TaskScheduler {
} }
} }
/// Restore saved task state from config. Should be called once after construction. /// Restore saved task state from config. Should be called once after
/// construction.
pub async fn restore_state(&self) { pub async fn restore_state(&self) {
let saved = self.config.read().await.scheduled_tasks.clone(); let saved = self.config.read().await.scheduled_tasks.clone();
if saved.is_empty() { if saved.is_empty() {
@ -224,11 +245,13 @@ impl TaskScheduler {
let tasks = self.tasks.read().await; let tasks = self.tasks.read().await;
let task_configs: Vec<crate::config::ScheduledTaskConfig> = tasks let task_configs: Vec<crate::config::ScheduledTaskConfig> = tasks
.iter() .iter()
.map(|t| crate::config::ScheduledTaskConfig { .map(|t| {
crate::config::ScheduledTaskConfig {
id: t.id.clone(), id: t.id.clone(),
enabled: t.enabled, enabled: t.enabled,
schedule: t.schedule.clone(), schedule: t.schedule.clone(),
last_run: t.last_run.map(|dt| dt.to_rfc3339()), last_run: t.last_run.map(|dt| dt.to_rfc3339()),
}
}) })
.collect(); .collect();
drop(tasks); drop(tasks);
@ -300,7 +323,8 @@ impl TaskScheduler {
/// holding the write lock across await points. Returns when the /// holding the write lock across await points. Returns when the
/// cancellation token is triggered. /// cancellation token is triggered.
pub async fn run(&self) { pub async fn run(&self) {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30)); let mut interval =
tokio::time::interval(std::time::Duration::from_secs(30));
loop { loop {
tokio::select! { tokio::select! {
_ = interval.tick() => {} _ = interval.tick() => {}
@ -328,16 +352,16 @@ impl TaskScheduler {
JobStatus::Completed { .. } => { JobStatus::Completed { .. } => {
task.running = false; task.running = false;
task.last_status = Some("completed".to_string()); task.last_status = Some("completed".to_string());
} },
JobStatus::Failed { error } => { JobStatus::Failed { error } => {
task.running = false; task.running = false;
task.last_status = Some(format!("failed: {error}")); task.last_status = Some(format!("failed: {error}"));
} },
JobStatus::Cancelled => { JobStatus::Cancelled => {
task.running = false; task.running = false;
task.last_status = Some("cancelled".to_string()); task.last_status = Some("cancelled".to_string());
} },
_ => {} // still pending or running _ => {}, // still pending or running
} }
} }
} }
@ -377,9 +401,10 @@ impl TaskScheduler {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use chrono::TimeZone; use chrono::TimeZone;
use super::*;
#[test] #[test]
fn test_interval_next_run() { fn test_interval_next_run() {
let from = Utc.with_ymd_and_hms(2025, 6, 15, 12, 0, 0).unwrap(); let from = Utc.with_ymd_and_hms(2025, 6, 15, 12, 0, 0).unwrap();
@ -427,7 +452,8 @@ mod tests {
#[test] #[test]
fn test_weekly_same_day_future() { fn test_weekly_same_day_future() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 14:00, current is 10:00 => today. // 2025-06-15 is Sunday (day 6). Schedule is Sunday 14:00, current is 10:00
// => today.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap(); let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly { let schedule = Schedule::Weekly {
day: 6, day: 6,
@ -440,7 +466,8 @@ mod tests {
#[test] #[test]
fn test_weekly_same_day_past() { fn test_weekly_same_day_past() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 08:00, current is 10:00 => next week. // 2025-06-15 is Sunday (day 6). Schedule is Sunday 08:00, current is 10:00
// => next week.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap(); let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly { let schedule = Schedule::Weekly {
day: 6, day: 6,
@ -492,14 +519,18 @@ mod tests {
"Every 30s" "Every 30s"
); );
assert_eq!( assert_eq!(
Schedule::Daily { hour: 3, minute: 0 }.display_string(), Schedule::Daily {
hour: 3,
minute: 0,
}
.display_string(),
"Daily 03:00" "Daily 03:00"
); );
assert_eq!( assert_eq!(
Schedule::Weekly { Schedule::Weekly {
day: 0, day: 0,
hour: 3, hour: 3,
minute: 0 minute: 0,
} }
.display_string(), .display_string(),
"Mon 03:00" "Mon 03:00"
@ -508,7 +539,7 @@ mod tests {
Schedule::Weekly { Schedule::Weekly {
day: 6, day: 6,
hour: 14, hour: 14,
minute: 30 minute: 30,
} }
.display_string(), .display_string(),
"Sun 14:30" "Sun 14:30"

View file

@ -1,7 +1,10 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use winnow::combinator::{alt, delimited, preceded, repeat}; use winnow::{
use winnow::token::{take_till, take_while}; ModalResult,
use winnow::{ModalResult, Parser}; Parser,
combinator::{alt, delimited, preceded, repeat},
token::{take_till, take_while},
};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum SearchQuery { pub enum SearchQuery {
@ -131,7 +134,7 @@ fn parse_date_value(s: &str) -> Option<DateValue> {
return Some(DateValue::DaysAgo(days)); return Some(DateValue::DaysAgo(days));
} }
None None
} },
} }
} }
@ -152,15 +155,15 @@ fn parse_size_value(s: &str) -> Option<i64> {
} }
fn field_match(input: &mut &str) -> ModalResult<SearchQuery> { fn field_match(input: &mut &str) -> ModalResult<SearchQuery> {
let field_name = let field_name = take_while(1.., |c: char| c.is_alphanumeric() || c == '_')
take_while(1.., |c: char| c.is_alphanumeric() || c == '_').map(|s: &str| s.to_string()); .map(|s: &str| s.to_string());
(field_name, ':', word_or_quoted) (field_name, ':', word_or_quoted)
.map(|(field, _, value)| { .map(|(field, _, value)| {
// Handle special field types // Handle special field types
match field.as_str() { match field.as_str() {
"type" => return SearchQuery::TypeFilter(value), "type" => return SearchQuery::TypeFilter(value),
"tag" => return SearchQuery::TagFilter(value), "tag" => return SearchQuery::TagFilter(value),
_ => {} _ => {},
} }
// Check for range queries: field:start..end // Check for range queries: field:start..end
@ -305,7 +308,8 @@ fn atom(input: &mut &str) -> ModalResult<SearchQuery> {
fn and_expr(input: &mut &str) -> ModalResult<SearchQuery> { fn and_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let first = atom.parse_next(input)?; let first = atom.parse_next(input)?;
let rest: Vec<SearchQuery> = repeat(0.., preceded(ws, atom)).parse_next(input)?; let rest: Vec<SearchQuery> =
repeat(0.., preceded(ws, atom)).parse_next(input)?;
if rest.is_empty() { if rest.is_empty() {
Ok(first) Ok(first)
} else { } else {
@ -352,13 +356,10 @@ mod tests {
#[test] #[test]
fn test_field_match() { fn test_field_match() {
let q = parse_search_query("artist:Beatles").unwrap(); let q = parse_search_query("artist:Beatles").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::FieldMatch {
q,
SearchQuery::FieldMatch {
field: "artist".into(), field: "artist".into(),
value: "Beatles".into() value: "Beatles".into(),
} });
);
} }
#[test] #[test]
@ -427,127 +428,97 @@ mod tests {
#[test] #[test]
fn test_range_query_year() { fn test_range_query_year() {
let q = parse_search_query("year:2020..2023").unwrap(); let q = parse_search_query("year:2020..2023").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::RangeQuery {
q,
SearchQuery::RangeQuery {
field: "year".into(), field: "year".into(),
start: Some(2020), start: Some(2020),
end: Some(2023) end: Some(2023),
} });
);
} }
#[test] #[test]
fn test_range_query_open_start() { fn test_range_query_open_start() {
let q = parse_search_query("year:..2023").unwrap(); let q = parse_search_query("year:..2023").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::RangeQuery {
q,
SearchQuery::RangeQuery {
field: "year".into(), field: "year".into(),
start: None, start: None,
end: Some(2023) end: Some(2023),
} });
);
} }
#[test] #[test]
fn test_range_query_open_end() { fn test_range_query_open_end() {
let q = parse_search_query("year:2020..").unwrap(); let q = parse_search_query("year:2020..").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::RangeQuery {
q,
SearchQuery::RangeQuery {
field: "year".into(), field: "year".into(),
start: Some(2020), start: Some(2020),
end: None end: None,
} });
);
} }
#[test] #[test]
fn test_compare_greater_than() { fn test_compare_greater_than() {
let q = parse_search_query("year:>2020").unwrap(); let q = parse_search_query("year:>2020").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::CompareQuery {
q,
SearchQuery::CompareQuery {
field: "year".into(), field: "year".into(),
op: CompareOp::GreaterThan, op: CompareOp::GreaterThan,
value: 2020 value: 2020,
} });
);
} }
#[test] #[test]
fn test_compare_less_or_equal() { fn test_compare_less_or_equal() {
let q = parse_search_query("year:<=2023").unwrap(); let q = parse_search_query("year:<=2023").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::CompareQuery {
q,
SearchQuery::CompareQuery {
field: "year".into(), field: "year".into(),
op: CompareOp::LessOrEqual, op: CompareOp::LessOrEqual,
value: 2023 value: 2023,
} });
);
} }
#[test] #[test]
fn test_size_compare_mb() { fn test_size_compare_mb() {
let q = parse_search_query("size:>10MB").unwrap(); let q = parse_search_query("size:>10MB").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::CompareQuery {
q,
SearchQuery::CompareQuery {
field: "size".into(), field: "size".into(),
op: CompareOp::GreaterThan, op: CompareOp::GreaterThan,
value: 10 * 1024 * 1024 value: 10 * 1024 * 1024,
} });
);
} }
#[test] #[test]
fn test_size_range_gb() { fn test_size_range_gb() {
let q = parse_search_query("size:1GB..2GB").unwrap(); let q = parse_search_query("size:1GB..2GB").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::RangeQuery {
q,
SearchQuery::RangeQuery {
field: "size".into(), field: "size".into(),
start: Some(1024 * 1024 * 1024), start: Some(1024 * 1024 * 1024),
end: Some(2 * 1024 * 1024 * 1024) end: Some(2 * 1024 * 1024 * 1024),
} });
);
} }
#[test] #[test]
fn test_date_query_today() { fn test_date_query_today() {
let q = parse_search_query("created:today").unwrap(); let q = parse_search_query("created:today").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::DateQuery {
q,
SearchQuery::DateQuery {
field: "created".into(), field: "created".into(),
value: DateValue::Today value: DateValue::Today,
} });
);
} }
#[test] #[test]
fn test_date_query_last_week() { fn test_date_query_last_week() {
let q = parse_search_query("modified:last-week").unwrap(); let q = parse_search_query("modified:last-week").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::DateQuery {
q,
SearchQuery::DateQuery {
field: "modified".into(), field: "modified".into(),
value: DateValue::LastWeek value: DateValue::LastWeek,
} });
);
} }
#[test] #[test]
fn test_date_query_days_ago() { fn test_date_query_days_ago() {
let q = parse_search_query("created:last-30d").unwrap(); let q = parse_search_query("created:last-30d").unwrap();
assert_eq!( assert_eq!(q, SearchQuery::DateQuery {
q,
SearchQuery::DateQuery {
field: "created".into(), field: "created".into(),
value: DateValue::DaysAgo(30) value: DateValue::DaysAgo(30),
} });
);
} }
} }

View file

@ -12,8 +12,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::model::MediaId; use crate::{model::MediaId, users::UserId};
use crate::users::UserId;
/// Unique identifier for a share. /// Unique identifier for a share.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@ -99,7 +98,9 @@ impl ShareRecipient {
} }
/// Permissions granted by a share. /// Permissions granted by a share.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)] #[derive(
Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize,
)]
pub struct SharePermissions { pub struct SharePermissions {
/// Can view the content /// Can view the content
pub can_view: bool, pub can_view: bool,

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::model::MediaId; use crate::{model::MediaId, users::UserId};
use crate::users::UserId;
/// A user's rating for a media item. /// A user's rating for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -17,7 +17,9 @@ pub fn run_sqlite_migrations(conn: &mut rusqlite::Connection) -> Result<()> {
Ok(()) Ok(())
} }
pub async fn run_postgres_migrations(client: &mut tokio_postgres::Client) -> Result<()> { pub async fn run_postgres_migrations(
client: &mut tokio_postgres::Client,
) -> Result<()> {
postgres_migrations::migrations::runner() postgres_migrations::migrations::runner()
.run_async(client) .run_async(client)
.await .await

View file

@ -2,23 +2,23 @@ pub mod migrations;
pub mod postgres; pub mod postgres;
pub mod sqlite; pub mod sqlite;
use std::path::PathBuf; use std::{path::PathBuf, sync::Arc};
use std::sync::Arc;
use uuid::Uuid;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use uuid::Uuid;
use crate::analytics::UsageEvent; use crate::{
use crate::enrichment::ExternalMetadata; analytics::UsageEvent,
use crate::error::Result; enrichment::ExternalMetadata,
use crate::model::*; error::Result,
use crate::playlists::Playlist; model::*,
use crate::search::{SearchRequest, SearchResults}; playlists::Playlist,
use crate::social::{Comment, Rating, ShareLink}; search::{SearchRequest, SearchResults},
use crate::subtitles::Subtitle; social::{Comment, Rating, ShareLink},
use crate::transcode::{TranscodeSession, TranscodeStatus}; subtitles::Subtitle,
use crate::users::UserId; transcode::{TranscodeSession, TranscodeStatus},
users::UserId,
};
/// Statistics about the database. /// Statistics about the database.
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -57,16 +57,27 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn insert_media(&self, item: &MediaItem) -> Result<()>; async fn insert_media(&self, item: &MediaItem) -> Result<()>;
async fn get_media(&self, id: MediaId) -> Result<MediaItem>; async fn get_media(&self, id: MediaId) -> Result<MediaItem>;
async fn count_media(&self) -> Result<u64>; async fn count_media(&self) -> Result<u64>;
async fn get_media_by_hash(&self, hash: &ContentHash) -> Result<Option<MediaItem>>; async fn get_media_by_hash(
&self,
hash: &ContentHash,
) -> Result<Option<MediaItem>>;
/// Get a media item by its file path (used for incremental scanning) /// Get a media item by its file path (used for incremental scanning)
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>>; async fn get_media_by_path(
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>; &self,
path: &std::path::Path,
) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination)
-> Result<Vec<MediaItem>>;
async fn update_media(&self, item: &MediaItem) -> Result<()>; async fn update_media(&self, item: &MediaItem) -> Result<()>;
async fn delete_media(&self, id: MediaId) -> Result<()>; async fn delete_media(&self, id: MediaId) -> Result<()>;
async fn delete_all_media(&self) -> Result<u64>; async fn delete_all_media(&self) -> Result<u64>;
// Tags // Tags
async fn create_tag(&self, name: &str, parent_id: Option<Uuid>) -> Result<Tag>; async fn create_tag(
&self,
name: &str,
parent_id: Option<Uuid>,
) -> Result<Tag>;
async fn get_tag(&self, id: Uuid) -> Result<Tag>; async fn get_tag(&self, id: Uuid) -> Result<Tag>;
async fn list_tags(&self) -> Result<Vec<Tag>>; async fn list_tags(&self) -> Result<Vec<Tag>>;
async fn delete_tag(&self, id: Uuid) -> Result<()>; async fn delete_tag(&self, id: Uuid) -> Result<()>;
@ -92,8 +103,15 @@ pub trait StorageBackend: Send + Sync + 'static {
media_id: MediaId, media_id: MediaId,
position: i32, position: i32,
) -> Result<()>; ) -> Result<()>;
async fn remove_from_collection(&self, collection_id: Uuid, media_id: MediaId) -> Result<()>; async fn remove_from_collection(
async fn get_collection_members(&self, collection_id: Uuid) -> Result<Vec<MediaItem>>; &self,
collection_id: Uuid,
media_id: MediaId,
) -> Result<()>;
async fn get_collection_members(
&self,
collection_id: Uuid,
) -> Result<Vec<MediaItem>>;
// Search // Search
async fn search(&self, request: &SearchRequest) -> Result<SearchResults>; async fn search(&self, request: &SearchRequest) -> Result<SearchResults>;
@ -117,7 +135,11 @@ pub trait StorageBackend: Send + Sync + 'static {
&self, &self,
media_id: MediaId, media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>; ) -> Result<std::collections::HashMap<String, CustomField>>;
async fn delete_custom_field(&self, media_id: MediaId, name: &str) -> Result<()>; async fn delete_custom_field(
&self,
media_id: MediaId,
name: &str,
) -> Result<()>;
// Batch operations (transactional where supported) // Batch operations (transactional where supported)
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> { async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
@ -129,7 +151,11 @@ pub trait StorageBackend: Send + Sync + 'static {
Ok(count) Ok(count)
} }
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> { async fn batch_tag_media(
&self,
media_ids: &[MediaId],
tag_ids: &[Uuid],
) -> Result<u64> {
let mut count = 0u64; let mut count = 0u64;
for media_id in media_ids { for media_id in media_ids {
for tag_id in tag_ids { for tag_id in tag_ids {
@ -141,7 +167,9 @@ pub trait StorageBackend: Send + Sync + 'static {
} }
// Integrity // Integrity
async fn list_media_paths(&self) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>; async fn list_media_paths(
&self,
) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>;
// Batch metadata update // Batch metadata update
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
@ -191,12 +219,16 @@ pub trait StorageBackend: Send + Sync + 'static {
query: &str, query: &str,
sort_order: Option<&str>, sort_order: Option<&str>,
) -> Result<()>; ) -> Result<()>;
async fn list_saved_searches(&self) -> Result<Vec<crate::model::SavedSearch>>; async fn list_saved_searches(&self)
-> Result<Vec<crate::model::SavedSearch>>;
async fn delete_saved_search(&self, id: uuid::Uuid) -> Result<()>; async fn delete_saved_search(&self, id: uuid::Uuid) -> Result<()>;
// Duplicates // Duplicates
async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>; async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>;
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>>; async fn find_perceptual_duplicates(
&self,
threshold: u32,
) -> Result<Vec<Vec<MediaItem>>>;
// Database management // Database management
async fn database_stats(&self) -> Result<DatabaseStats>; async fn database_stats(&self) -> Result<DatabaseStats>;
@ -215,8 +247,14 @@ pub trait StorageBackend: Send + Sync + 'static {
// User Management // User Management
async fn list_users(&self) -> Result<Vec<crate::users::User>>; async fn list_users(&self) -> Result<Vec<crate::users::User>>;
async fn get_user(&self, id: crate::users::UserId) -> Result<crate::users::User>; async fn get_user(
async fn get_user_by_username(&self, username: &str) -> Result<crate::users::User>; &self,
id: crate::users::UserId,
) -> Result<crate::users::User>;
async fn get_user_by_username(
&self,
username: &str,
) -> Result<crate::users::User>;
async fn create_user( async fn create_user(
&self, &self,
username: &str, username: &str,
@ -248,23 +286,26 @@ pub trait StorageBackend: Send + Sync + 'static {
root_path: &str, root_path: &str,
) -> Result<()>; ) -> Result<()>;
/// Check if a user has access to a specific media item based on library permissions. /// Check if a user has access to a specific media item based on library
/// Returns the permission level if access is granted, or an error if denied. /// permissions. Returns the permission level if access is granted, or an
/// Admin users (role=admin) bypass library checks and have full access. /// error if denied. Admin users (role=admin) bypass library checks and have
/// full access.
async fn check_library_access( async fn check_library_access(
&self, &self,
user_id: crate::users::UserId, user_id: crate::users::UserId,
media_id: crate::model::MediaId, media_id: crate::model::MediaId,
) -> Result<crate::users::LibraryPermission> { ) -> Result<crate::users::LibraryPermission> {
// Default implementation: get the media item's path and check against user's library access // Default implementation: get the media item's path and check against
// user's library access
let media = self.get_media(media_id).await?; let media = self.get_media(media_id).await?;
let path_str = media.path.to_string_lossy().to_string(); let path_str = media.path.to_string_lossy().to_string();
// Get user's library permissions // Get user's library permissions
let libraries = self.get_user_libraries(user_id).await?; let libraries = self.get_user_libraries(user_id).await?;
// If user has no library restrictions, they have no access (unless they're admin) // If user has no library restrictions, they have no access (unless they're
// This default impl requires at least one matching library permission // admin) This default impl requires at least one matching library
// permission
for lib in &libraries { for lib in &libraries {
if path_str.starts_with(&lib.root_path) { if path_str.starts_with(&lib.root_path) {
return Ok(lib.permission); return Ok(lib.permission);
@ -310,7 +351,11 @@ pub trait StorageBackend: Send + Sync + 'static {
review: Option<&str>, review: Option<&str>,
) -> Result<Rating>; ) -> Result<Rating>;
async fn get_media_ratings(&self, media_id: MediaId) -> Result<Vec<Rating>>; async fn get_media_ratings(&self, media_id: MediaId) -> Result<Vec<Rating>>;
async fn get_user_rating(&self, user_id: UserId, media_id: MediaId) -> Result<Option<Rating>>; async fn get_user_rating(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<Option<Rating>>;
async fn delete_rating(&self, id: Uuid) -> Result<()>; async fn delete_rating(&self, id: Uuid) -> Result<()>;
// ===== Comments ===== // ===== Comments =====
@ -321,18 +366,31 @@ pub trait StorageBackend: Send + Sync + 'static {
text: &str, text: &str,
parent_id: Option<Uuid>, parent_id: Option<Uuid>,
) -> Result<Comment>; ) -> Result<Comment>;
async fn get_media_comments(&self, media_id: MediaId) -> Result<Vec<Comment>>; async fn get_media_comments(&self, media_id: MediaId)
-> Result<Vec<Comment>>;
async fn delete_comment(&self, id: Uuid) -> Result<()>; async fn delete_comment(&self, id: Uuid) -> Result<()>;
// ===== Favorites ===== // ===== Favorites =====
async fn add_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>; async fn add_favorite(
async fn remove_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>; &self,
user_id: UserId,
media_id: MediaId,
) -> Result<()>;
async fn remove_favorite(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<()>;
async fn get_user_favorites( async fn get_user_favorites(
&self, &self,
user_id: UserId, user_id: UserId,
pagination: &Pagination, pagination: &Pagination,
) -> Result<Vec<MediaItem>>; ) -> Result<Vec<MediaItem>>;
async fn is_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<bool>; async fn is_favorite(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<bool>;
// ===== Share Links ===== // ===== Share Links =====
async fn create_share_link( async fn create_share_link(
@ -358,7 +416,10 @@ pub trait StorageBackend: Send + Sync + 'static {
filter_query: Option<&str>, filter_query: Option<&str>,
) -> Result<Playlist>; ) -> Result<Playlist>;
async fn get_playlist(&self, id: Uuid) -> Result<Playlist>; async fn get_playlist(&self, id: Uuid) -> Result<Playlist>;
async fn list_playlists(&self, owner_id: Option<UserId>) -> Result<Vec<Playlist>>; async fn list_playlists(
&self,
owner_id: Option<UserId>,
) -> Result<Vec<Playlist>>;
async fn update_playlist( async fn update_playlist(
&self, &self,
id: Uuid, id: Uuid,
@ -373,8 +434,15 @@ pub trait StorageBackend: Send + Sync + 'static {
media_id: MediaId, media_id: MediaId,
position: i32, position: i32,
) -> Result<()>; ) -> Result<()>;
async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()>; async fn remove_from_playlist(
async fn get_playlist_items(&self, playlist_id: Uuid) -> Result<Vec<MediaItem>>; &self,
playlist_id: Uuid,
media_id: MediaId,
) -> Result<()>;
async fn get_playlist_items(
&self,
playlist_id: Uuid,
) -> Result<Vec<MediaItem>>;
async fn reorder_playlist( async fn reorder_playlist(
&self, &self,
playlist_id: Uuid, playlist_id: Uuid,
@ -391,29 +459,53 @@ pub trait StorageBackend: Send + Sync + 'static {
limit: u64, limit: u64,
) -> Result<Vec<UsageEvent>>; ) -> Result<Vec<UsageEvent>>;
async fn get_most_viewed(&self, limit: u64) -> Result<Vec<(MediaItem, u64)>>; async fn get_most_viewed(&self, limit: u64) -> Result<Vec<(MediaItem, u64)>>;
async fn get_recently_viewed(&self, user_id: UserId, limit: u64) -> Result<Vec<MediaItem>>; async fn get_recently_viewed(
&self,
user_id: UserId,
limit: u64,
) -> Result<Vec<MediaItem>>;
async fn update_watch_progress( async fn update_watch_progress(
&self, &self,
user_id: UserId, user_id: UserId,
media_id: MediaId, media_id: MediaId,
progress_secs: f64, progress_secs: f64,
) -> Result<()>; ) -> Result<()>;
async fn get_watch_progress(&self, user_id: UserId, media_id: MediaId) -> Result<Option<f64>>; async fn get_watch_progress(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<Option<f64>>;
async fn cleanup_old_events(&self, before: DateTime<Utc>) -> Result<u64>; async fn cleanup_old_events(&self, before: DateTime<Utc>) -> Result<u64>;
// ===== Subtitles ===== // ===== Subtitles =====
async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>; async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>;
async fn get_media_subtitles(&self, media_id: MediaId) -> Result<Vec<Subtitle>>; async fn get_media_subtitles(
&self,
media_id: MediaId,
) -> Result<Vec<Subtitle>>;
async fn delete_subtitle(&self, id: Uuid) -> Result<()>; async fn delete_subtitle(&self, id: Uuid) -> Result<()>;
async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()>; async fn update_subtitle_offset(
&self,
id: Uuid,
offset_ms: i64,
) -> Result<()>;
// ===== External Metadata (Enrichment) ===== // ===== External Metadata (Enrichment) =====
async fn store_external_metadata(&self, meta: &ExternalMetadata) -> Result<()>; async fn store_external_metadata(
async fn get_external_metadata(&self, media_id: MediaId) -> Result<Vec<ExternalMetadata>>; &self,
meta: &ExternalMetadata,
) -> Result<()>;
async fn get_external_metadata(
&self,
media_id: MediaId,
) -> Result<Vec<ExternalMetadata>>;
async fn delete_external_metadata(&self, id: Uuid) -> Result<()>; async fn delete_external_metadata(&self, id: Uuid) -> Result<()>;
// ===== Transcode Sessions ===== // ===== Transcode Sessions =====
async fn create_transcode_session(&self, session: &TranscodeSession) -> Result<()>; async fn create_transcode_session(
&self,
session: &TranscodeSession,
) -> Result<()>;
async fn get_transcode_session(&self, id: Uuid) -> Result<TranscodeSession>; async fn get_transcode_session(&self, id: Uuid) -> Result<TranscodeSession>;
async fn list_transcode_sessions( async fn list_transcode_sessions(
&self, &self,
@ -425,14 +517,20 @@ pub trait StorageBackend: Send + Sync + 'static {
status: TranscodeStatus, status: TranscodeStatus,
progress: f32, progress: f32,
) -> Result<()>; ) -> Result<()>;
async fn cleanup_expired_transcodes(&self, before: DateTime<Utc>) -> Result<u64>; async fn cleanup_expired_transcodes(
&self,
before: DateTime<Utc>,
) -> Result<u64>;
// ===== Session Management ===== // ===== Session Management =====
/// Create a new session in the database /// Create a new session in the database
async fn create_session(&self, session: &SessionData) -> Result<()>; async fn create_session(&self, session: &SessionData) -> Result<()>;
/// Get a session by its token, returns None if not found or expired /// Get a session by its token, returns None if not found or expired
async fn get_session(&self, session_token: &str) -> Result<Option<SessionData>>; async fn get_session(
&self,
session_token: &str,
) -> Result<Option<SessionData>>;
/// Update the last_accessed timestamp for a session /// Update the last_accessed timestamp for a session
async fn touch_session(&self, session_token: &str) -> Result<()>; async fn touch_session(&self, session_token: &str) -> Result<()>;
@ -447,12 +545,18 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn delete_expired_sessions(&self) -> Result<u64>; async fn delete_expired_sessions(&self) -> Result<u64>;
/// List all active sessions (optionally filtered by username) /// List all active sessions (optionally filtered by username)
async fn list_active_sessions(&self, username: Option<&str>) -> Result<Vec<SessionData>>; async fn list_active_sessions(
&self,
username: Option<&str>,
) -> Result<Vec<SessionData>>;
// Book Management Methods // Book Management Methods
/// Upsert book metadata for a media item /// Upsert book metadata for a media item
async fn upsert_book_metadata(&self, metadata: &crate::model::BookMetadata) -> Result<()>; async fn upsert_book_metadata(
&self,
metadata: &crate::model::BookMetadata,
) -> Result<()>;
/// Get book metadata for a media item /// Get book metadata for a media item
async fn get_book_metadata( async fn get_book_metadata(
@ -468,16 +572,23 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<()>; ) -> Result<()>;
/// Get all authors for a book /// Get all authors for a book
async fn get_book_authors(&self, media_id: MediaId) -> Result<Vec<crate::model::AuthorInfo>>; async fn get_book_authors(
&self,
media_id: MediaId,
) -> Result<Vec<crate::model::AuthorInfo>>;
/// List all distinct authors with book counts /// List all distinct authors with book counts
async fn list_all_authors(&self, pagination: &Pagination) -> Result<Vec<(String, u64)>>; async fn list_all_authors(
&self,
pagination: &Pagination,
) -> Result<Vec<(String, u64)>>;
/// List all series with book counts /// List all series with book counts
async fn list_series(&self) -> Result<Vec<(String, u64)>>; async fn list_series(&self) -> Result<Vec<(String, u64)>>;
/// Get all books in a series, ordered by series_index /// Get all books in a series, ordered by series_index
async fn get_series_books(&self, series_name: &str) -> Result<Vec<MediaItem>>; async fn get_series_books(&self, series_name: &str)
-> Result<Vec<MediaItem>>;
/// Update reading progress for a user and book /// Update reading progress for a user and book
async fn update_reading_progress( async fn update_reading_progress(
@ -531,7 +642,8 @@ pub trait StorageBackend: Send + Sync + 'static {
/// Increment the reference count for a blob /// Increment the reference count for a blob
async fn increment_blob_ref(&self, hash: &ContentHash) -> Result<()>; async fn increment_blob_ref(&self, hash: &ContentHash) -> Result<()>;
/// Decrement the reference count for a blob. Returns true if blob should be deleted. /// Decrement the reference count for a blob. Returns true if blob should be
/// deleted.
async fn decrement_blob_ref(&self, hash: &ContentHash) -> Result<bool>; async fn decrement_blob_ref(&self, hash: &ContentHash) -> Result<bool>;
/// Update the last_verified timestamp for a blob /// Update the last_verified timestamp for a blob
@ -556,7 +668,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<crate::sync::SyncDevice>; ) -> Result<crate::sync::SyncDevice>;
/// Get a sync device by ID /// Get a sync device by ID
async fn get_device(&self, id: crate::sync::DeviceId) -> Result<crate::sync::SyncDevice>; async fn get_device(
&self,
id: crate::sync::DeviceId,
) -> Result<crate::sync::SyncDevice>;
/// Get a sync device by its token hash /// Get a sync device by its token hash
async fn get_device_by_token( async fn get_device_by_token(
@ -565,10 +680,14 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sync::SyncDevice>>; ) -> Result<Option<crate::sync::SyncDevice>>;
/// List all devices for a user /// List all devices for a user
async fn list_user_devices(&self, user_id: UserId) -> Result<Vec<crate::sync::SyncDevice>>; async fn list_user_devices(
&self,
user_id: UserId,
) -> Result<Vec<crate::sync::SyncDevice>>;
/// Update a sync device /// Update a sync device
async fn update_device(&self, device: &crate::sync::SyncDevice) -> Result<()>; async fn update_device(&self, device: &crate::sync::SyncDevice)
-> Result<()>;
/// Delete a sync device /// Delete a sync device
async fn delete_device(&self, id: crate::sync::DeviceId) -> Result<()>; async fn delete_device(&self, id: crate::sync::DeviceId) -> Result<()>;
@ -579,7 +698,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Sync Log ===== // ===== Sync Log =====
/// Record a change in the sync log /// Record a change in the sync log
async fn record_sync_change(&self, change: &crate::sync::SyncLogEntry) -> Result<()>; async fn record_sync_change(
&self,
change: &crate::sync::SyncLogEntry,
) -> Result<()>;
/// Get changes since a cursor position /// Get changes since a cursor position
async fn get_changes_since( async fn get_changes_since(
@ -604,7 +726,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sync::DeviceSyncState>>; ) -> Result<Option<crate::sync::DeviceSyncState>>;
/// Insert or update device sync state /// Insert or update device sync state
async fn upsert_device_sync_state(&self, state: &crate::sync::DeviceSyncState) -> Result<()>; async fn upsert_device_sync_state(
&self,
state: &crate::sync::DeviceSyncState,
) -> Result<()>;
/// List all pending sync items for a device /// List all pending sync items for a device
async fn list_pending_sync( async fn list_pending_sync(
@ -615,19 +740,35 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Upload Sessions (Chunked Uploads) ===== // ===== Upload Sessions (Chunked Uploads) =====
/// Create a new upload session /// Create a new upload session
async fn create_upload_session(&self, session: &crate::sync::UploadSession) -> Result<()>; async fn create_upload_session(
&self,
session: &crate::sync::UploadSession,
) -> Result<()>;
/// Get an upload session by ID /// Get an upload session by ID
async fn get_upload_session(&self, id: Uuid) -> Result<crate::sync::UploadSession>; async fn get_upload_session(
&self,
id: Uuid,
) -> Result<crate::sync::UploadSession>;
/// Update an upload session /// Update an upload session
async fn update_upload_session(&self, session: &crate::sync::UploadSession) -> Result<()>; async fn update_upload_session(
&self,
session: &crate::sync::UploadSession,
) -> Result<()>;
/// Record a received chunk /// Record a received chunk
async fn record_chunk(&self, upload_id: Uuid, chunk: &crate::sync::ChunkInfo) -> Result<()>; async fn record_chunk(
&self,
upload_id: Uuid,
chunk: &crate::sync::ChunkInfo,
) -> Result<()>;
/// Get all chunks for an upload /// Get all chunks for an upload
async fn get_upload_chunks(&self, upload_id: Uuid) -> Result<Vec<crate::sync::ChunkInfo>>; async fn get_upload_chunks(
&self,
upload_id: Uuid,
) -> Result<Vec<crate::sync::ChunkInfo>>;
/// Clean up expired upload sessions /// Clean up expired upload sessions
async fn cleanup_expired_uploads(&self) -> Result<u64>; async fn cleanup_expired_uploads(&self) -> Result<u64>;
@ -635,7 +776,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Sync Conflicts ===== // ===== Sync Conflicts =====
/// Record a sync conflict /// Record a sync conflict
async fn record_conflict(&self, conflict: &crate::sync::SyncConflict) -> Result<()>; async fn record_conflict(
&self,
conflict: &crate::sync::SyncConflict,
) -> Result<()>;
/// Get unresolved conflicts for a device /// Get unresolved conflicts for a device
async fn get_unresolved_conflicts( async fn get_unresolved_conflicts(
@ -653,13 +797,22 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Enhanced Sharing ===== // ===== Enhanced Sharing =====
/// Create a new share /// Create a new share
async fn create_share(&self, share: &crate::sharing::Share) -> Result<crate::sharing::Share>; async fn create_share(
&self,
share: &crate::sharing::Share,
) -> Result<crate::sharing::Share>;
/// Get a share by ID /// Get a share by ID
async fn get_share(&self, id: crate::sharing::ShareId) -> Result<crate::sharing::Share>; async fn get_share(
&self,
id: crate::sharing::ShareId,
) -> Result<crate::sharing::Share>;
/// Get a share by its public token /// Get a share by its public token
async fn get_share_by_token(&self, token: &str) -> Result<crate::sharing::Share>; async fn get_share_by_token(
&self,
token: &str,
) -> Result<crate::sharing::Share>;
/// List shares created by a user /// List shares created by a user
async fn list_shares_by_owner( async fn list_shares_by_owner(
@ -682,13 +835,19 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Vec<crate::sharing::Share>>; ) -> Result<Vec<crate::sharing::Share>>;
/// Update a share /// Update a share
async fn update_share(&self, share: &crate::sharing::Share) -> Result<crate::sharing::Share>; async fn update_share(
&self,
share: &crate::sharing::Share,
) -> Result<crate::sharing::Share>;
/// Delete a share /// Delete a share
async fn delete_share(&self, id: crate::sharing::ShareId) -> Result<()>; async fn delete_share(&self, id: crate::sharing::ShareId) -> Result<()>;
/// Record that a share was accessed /// Record that a share was accessed
async fn record_share_access(&self, id: crate::sharing::ShareId) -> Result<()>; async fn record_share_access(
&self,
id: crate::sharing::ShareId,
) -> Result<()>;
/// Check share access for a user and target /// Check share access for a user and target
async fn check_share_access( async fn check_share_access(
@ -705,7 +864,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sharing::SharePermissions>>; ) -> Result<Option<crate::sharing::SharePermissions>>;
/// Batch delete shares /// Batch delete shares
async fn batch_delete_shares(&self, ids: &[crate::sharing::ShareId]) -> Result<u64>; async fn batch_delete_shares(
&self,
ids: &[crate::sharing::ShareId],
) -> Result<u64>;
/// Clean up expired shares /// Clean up expired shares
async fn cleanup_expired_shares(&self) -> Result<u64>; async fn cleanup_expired_shares(&self) -> Result<u64>;
@ -713,7 +875,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Share Activity ===== // ===== Share Activity =====
/// Record share activity /// Record share activity
async fn record_share_activity(&self, activity: &crate::sharing::ShareActivity) -> Result<()>; async fn record_share_activity(
&self,
activity: &crate::sharing::ShareActivity,
) -> Result<()>;
/// Get activity for a share /// Get activity for a share
async fn get_share_activity( async fn get_share_activity(
@ -754,7 +919,11 @@ pub trait StorageBackend: Send + Sync + 'static {
/// For external storage, this actually moves the file on disk. /// For external storage, this actually moves the file on disk.
/// For managed storage, this only updates the path in metadata. /// For managed storage, this only updates the path in metadata.
/// Returns the old path for sync log recording. /// Returns the old path for sync log recording.
async fn move_media(&self, id: MediaId, new_directory: &std::path::Path) -> Result<String>; async fn move_media(
&self,
id: MediaId,
new_directory: &std::path::Path,
) -> Result<String>;
/// Batch move multiple media items to a new directory. /// Batch move multiple media items to a new directory.
async fn batch_move_media( async fn batch_move_media(
@ -779,7 +948,8 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn restore_media(&self, id: MediaId) -> Result<()>; async fn restore_media(&self, id: MediaId) -> Result<()>;
/// List all soft-deleted media items. /// List all soft-deleted media items.
async fn list_trash(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>; async fn list_trash(&self, pagination: &Pagination)
-> Result<Vec<MediaItem>>;
/// Permanently delete all items in trash. /// Permanently delete all items in trash.
async fn empty_trash(&self) -> Result<u64>; async fn empty_trash(&self) -> Result<u64>;
@ -807,15 +977,19 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Vec<crate::model::MarkdownLink>>; ) -> Result<Vec<crate::model::MarkdownLink>>;
/// Get backlinks (incoming links) to a media item. /// Get backlinks (incoming links) to a media item.
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>>; async fn get_backlinks(
&self,
media_id: MediaId,
) -> Result<Vec<crate::model::BacklinkInfo>>;
/// Clear all links for a media item. /// Clear all links for a media item.
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()>; async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()>;
/// Get graph data for visualization. /// Get graph data for visualization.
/// ///
/// If `center_id` is provided, returns nodes within `depth` hops of that node. /// If `center_id` is provided, returns nodes within `depth` hops of that
/// If `center_id` is None, returns the entire graph (limited by internal max). /// node. If `center_id` is None, returns the entire graph (limited by
/// internal max).
async fn get_graph_data( async fn get_graph_data(
&self, &self,
center_id: Option<MediaId>, center_id: Option<MediaId>,

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -3,14 +3,15 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use chrono::Utc; use chrono::Utc;
use tokio::fs; use tokio::{
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; fs,
io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt},
};
use tracing::{debug, info}; use tracing::{debug, info};
use uuid::Uuid; use uuid::Uuid;
use crate::error::{PinakesError, Result};
use super::{ChunkInfo, UploadSession}; use super::{ChunkInfo, UploadSession};
use crate::error::{PinakesError, Result};
/// Manager for chunked uploads. /// Manager for chunked uploads.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -144,7 +145,8 @@ impl ChunkedUploadManager {
} }
// Verify chunk indices // Verify chunk indices
let mut indices: Vec<u64> = received_chunks.iter().map(|c| c.chunk_index).collect(); let mut indices: Vec<u64> =
received_chunks.iter().map(|c| c.chunk_index).collect();
indices.sort(); indices.sort();
for (i, idx) in indices.iter().enumerate() { for (i, idx) in indices.iter().enumerate() {
if *idx != i as u64 { if *idx != i as u64 {
@ -243,11 +245,11 @@ async fn compute_file_hash(path: &Path) -> Result<String> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use crate::model::ContentHash;
use crate::sync::UploadStatus;
use tempfile::tempdir; use tempfile::tempdir;
use super::*;
use crate::{model::ContentHash, sync::UploadStatus};
#[tokio::test] #[tokio::test]
async fn test_chunked_upload() { async fn test_chunked_upload() {
let dir = tempdir().unwrap(); let dir = tempdir().unwrap();

View file

@ -1,8 +1,7 @@
//! Conflict detection and resolution for sync. //! Conflict detection and resolution for sync.
use crate::config::ConflictResolution;
use super::DeviceSyncState; use super::DeviceSyncState;
use crate::config::ConflictResolution;
/// Detect if there's a conflict between local and server state. /// Detect if there's a conflict between local and server state.
pub fn detect_conflict(state: &DeviceSyncState) -> Option<ConflictInfo> { pub fn detect_conflict(state: &DeviceSyncState) -> Option<ConflictInfo> {
@ -57,11 +56,12 @@ pub fn resolve_conflict(
ConflictResolution::ServerWins => ConflictOutcome::UseServer, ConflictResolution::ServerWins => ConflictOutcome::UseServer,
ConflictResolution::ClientWins => ConflictOutcome::UseLocal, ConflictResolution::ClientWins => ConflictOutcome::UseLocal,
ConflictResolution::KeepBoth => { ConflictResolution::KeepBoth => {
let new_path = generate_conflict_path(&conflict.path, &conflict.local_hash); let new_path =
generate_conflict_path(&conflict.path, &conflict.local_hash);
ConflictOutcome::KeepBoth { ConflictOutcome::KeepBoth {
new_local_path: new_path, new_local_path: new_path,
} }
} },
ConflictResolution::Manual => ConflictOutcome::Manual, ConflictResolution::Manual => ConflictOutcome::Manual,
} }
} }
@ -88,7 +88,7 @@ pub fn resolve_by_mtime(conflict: &ConflictInfo) -> ConflictOutcome {
} else { } else {
ConflictOutcome::UseServer ConflictOutcome::UseServer
} }
} },
(Some(_), None) => ConflictOutcome::UseLocal, (Some(_), None) => ConflictOutcome::UseLocal,
(None, Some(_)) => ConflictOutcome::UseServer, (None, Some(_)) => ConflictOutcome::UseServer,
(None, None) => ConflictOutcome::UseServer, // Default to server (None, None) => ConflictOutcome::UseServer, // Default to server

View file

@ -6,9 +6,11 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::config::ConflictResolution; use crate::{
use crate::model::{ContentHash, MediaId}; config::ConflictResolution,
use crate::users::UserId; model::{ContentHash, MediaId},
users::UserId,
};
/// Unique identifier for a sync device. /// Unique identifier for a sync device.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]

View file

@ -6,11 +6,18 @@ use chrono::Utc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use crate::error::Result; use super::{
use crate::model::{ContentHash, MediaId}; DeviceId,
use crate::storage::DynStorageBackend; DeviceSyncState,
FileSyncStatus,
use super::{DeviceId, DeviceSyncState, FileSyncStatus, SyncChangeType, SyncLogEntry}; SyncChangeType,
SyncLogEntry,
};
use crate::{
error::Result,
model::{ContentHash, MediaId},
storage::DynStorageBackend,
};
/// Request from client to get changes since a cursor. /// Request from client to get changes since a cursor.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -183,8 +190,9 @@ pub async fn mark_pending_download(
s.server_mtime = server_mtime; s.server_mtime = server_mtime;
s.sync_status = FileSyncStatus::PendingDownload; s.sync_status = FileSyncStatus::PendingDownload;
s s
} },
None => DeviceSyncState { None => {
DeviceSyncState {
device_id, device_id,
path: path.to_string(), path: path.to_string(),
local_hash: None, local_hash: None,
@ -194,6 +202,7 @@ pub async fn mark_pending_download(
sync_status: FileSyncStatus::PendingDownload, sync_status: FileSyncStatus::PendingDownload,
last_synced_at: None, last_synced_at: None,
conflict_info_json: None, conflict_info_json: None,
}
}, },
}; };

View file

@ -1,8 +1,10 @@
use uuid::Uuid; use uuid::Uuid;
use crate::error::Result; use crate::{
use crate::model::{AuditAction, MediaId, Tag}; error::Result,
use crate::storage::DynStorageBackend; model::{AuditAction, MediaId, Tag},
storage::DynStorageBackend,
};
pub async fn create_tag( pub async fn create_tag(
storage: &DynStorageBackend, storage: &DynStorageBackend,
@ -12,7 +14,11 @@ pub async fn create_tag(
storage.create_tag(name, parent_id).await storage.create_tag(name, parent_id).await
} }
pub async fn tag_media(storage: &DynStorageBackend, media_id: MediaId, tag_id: Uuid) -> Result<()> { pub async fn tag_media(
storage: &DynStorageBackend,
media_id: MediaId,
tag_id: Uuid,
) -> Result<()> {
storage.tag_media(media_id, tag_id).await?; storage.tag_media(media_id, tag_id).await?;
crate::audit::record_action( crate::audit::record_action(
storage, storage,
@ -38,6 +44,9 @@ pub async fn untag_media(
.await .await
} }
pub async fn get_tag_tree(storage: &DynStorageBackend, tag_id: Uuid) -> Result<Vec<Tag>> { pub async fn get_tag_tree(
storage: &DynStorageBackend,
tag_id: Uuid,
) -> Result<Vec<Tag>> {
storage.get_tag_descendants(tag_id).await storage.get_tag_descendants(tag_id).await
} }

View file

@ -1,17 +1,21 @@
use std::path::{Path, PathBuf}; use std::{
use std::process::Command; path::{Path, PathBuf},
process::Command,
};
use tracing::{info, warn}; use tracing::{info, warn};
use crate::config::ThumbnailConfig; use crate::{
use crate::error::{PinakesError, Result}; config::ThumbnailConfig,
use crate::media_type::{BuiltinMediaType, MediaCategory, MediaType}; error::{PinakesError, Result},
use crate::model::MediaId; media_type::{BuiltinMediaType, MediaCategory, MediaType},
model::MediaId,
};
/// Generate a thumbnail for a media file and return the path to the thumbnail. /// Generate a thumbnail for a media file and return the path to the thumbnail.
/// ///
/// Supports images (via `image` crate), videos (via ffmpeg), PDFs (via pdftoppm), /// Supports images (via `image` crate), videos (via ffmpeg), PDFs (via
/// and EPUBs (via cover image extraction). /// pdftoppm), and EPUBs (via cover image extraction).
pub fn generate_thumbnail( pub fn generate_thumbnail(
media_id: MediaId, media_id: MediaId,
source_path: &Path, source_path: &Path,
@ -46,16 +50,20 @@ pub fn generate_thumbnail_with_config(
} else { } else {
generate_image_thumbnail(source_path, &thumb_path, config) generate_image_thumbnail(source_path, &thumb_path, config)
} }
} },
MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config), MediaCategory::Video => {
MediaCategory::Document => match media_type { generate_video_thumbnail(source_path, &thumb_path, config)
},
MediaCategory::Document => {
match media_type {
MediaType::Builtin(BuiltinMediaType::Pdf) => { MediaType::Builtin(BuiltinMediaType::Pdf) => {
generate_pdf_thumbnail(source_path, &thumb_path, config) generate_pdf_thumbnail(source_path, &thumb_path, config)
} },
MediaType::Builtin(BuiltinMediaType::Epub) => { MediaType::Builtin(BuiltinMediaType::Epub) => {
generate_epub_thumbnail(source_path, &thumb_path, config) generate_epub_thumbnail(source_path, &thumb_path, config)
} },
_ => return Ok(None), _ => return Ok(None),
}
}, },
_ => return Ok(None), _ => return Ok(None),
}; };
@ -64,30 +72,42 @@ pub fn generate_thumbnail_with_config(
Ok(()) => { Ok(()) => {
info!(media_id = %media_id, category = ?media_type.category(), "generated thumbnail"); info!(media_id = %media_id, category = ?media_type.category(), "generated thumbnail");
Ok(Some(thumb_path)) Ok(Some(thumb_path))
} },
Err(e) => { Err(e) => {
warn!(media_id = %media_id, error = %e, "failed to generate thumbnail"); warn!(media_id = %media_id, error = %e, "failed to generate thumbnail");
Ok(None) Ok(None)
} },
} }
} }
fn generate_image_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_image_thumbnail(
let img = image::open(source) source: &Path,
.map_err(|e| PinakesError::MetadataExtraction(format!("image open: {e}")))?; dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
let img = image::open(source).map_err(|e| {
PinakesError::MetadataExtraction(format!("image open: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size); let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?; let mut output = std::fs::File::create(dest)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality); let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
thumb &mut output,
.write_with_encoder(encoder) config.quality,
.map_err(|e| PinakesError::MetadataExtraction(format!("thumbnail encode: {e}")))?; );
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("thumbnail encode: {e}"))
})?;
Ok(()) Ok(())
} }
fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_video_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
let ffmpeg = config.ffmpeg_path.as_deref().unwrap_or("ffmpeg"); let ffmpeg = config.ffmpeg_path.as_deref().unwrap_or("ffmpeg");
let status = Command::new(ffmpeg) let status = Command::new(ffmpeg)
@ -105,7 +125,9 @@ fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig
.stderr(std::process::Stdio::null()) .stderr(std::process::Stdio::null())
.status() .status()
.map_err(|e| { .map_err(|e| {
PinakesError::MetadataExtraction(format!("ffmpeg not found or failed to execute: {e}")) PinakesError::MetadataExtraction(format!(
"ffmpeg not found or failed to execute: {e}"
))
})?; })?;
if !status.success() { if !status.success() {
@ -118,7 +140,11 @@ fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig
Ok(()) Ok(())
} }
fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_pdf_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Use pdftoppm to render first page, then resize with image crate // Use pdftoppm to render first page, then resize with image crate
let temp_prefix = dest.with_extension("tmp"); let temp_prefix = dest.with_extension("tmp");
let status = Command::new("pdftoppm") let status = Command::new("pdftoppm")
@ -145,15 +171,18 @@ fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
let rendered = temp_prefix.with_extension("jpg"); let rendered = temp_prefix.with_extension("jpg");
if rendered.exists() { if rendered.exists() {
// Resize to thumbnail size // Resize to thumbnail size
let img = image::open(&rendered) let img = image::open(&rendered).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail open: {e}")))?; PinakesError::MetadataExtraction(format!("pdf thumbnail open: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size); let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?; let mut output = std::fs::File::create(dest)?;
let encoder = let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality); &mut output,
thumb config.quality,
.write_with_encoder(encoder) );
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail encode: {e}")))?; thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("pdf thumbnail encode: {e}"))
})?;
let _ = std::fs::remove_file(&rendered); let _ = std::fs::remove_file(&rendered);
Ok(()) Ok(())
} else { } else {
@ -163,28 +192,36 @@ fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
} }
} }
fn generate_epub_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_epub_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Try to extract cover image from EPUB // Try to extract cover image from EPUB
let mut doc = epub::doc::EpubDoc::new(source) let mut doc = epub::doc::EpubDoc::new(source)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub open: {e}")))?; .map_err(|e| PinakesError::MetadataExtraction(format!("epub open: {e}")))?;
let cover_data = doc.get_cover().map(|(data, _mime)| data).or_else(|| { let cover_data = doc.get_cover().map(|(data, _mime)| data).or_else(|| {
// Fallback: try to find a cover image in the resources // Fallback: try to find a cover image in the resources
doc.get_resource("cover-image") doc
.get_resource("cover-image")
.map(|(data, _)| data) .map(|(data, _)| data)
.or_else(|| doc.get_resource("cover").map(|(data, _)| data)) .or_else(|| doc.get_resource("cover").map(|(data, _)| data))
}); });
if let Some(data) = cover_data { if let Some(data) = cover_data {
let img = image::load_from_memory(&data) let img = image::load_from_memory(&data).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("epub cover decode: {e}")))?; PinakesError::MetadataExtraction(format!("epub cover decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size); let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?; let mut output = std::fs::File::create(dest)?;
let encoder = let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality); &mut output,
thumb config.quality,
.write_with_encoder(encoder) );
.map_err(|e| PinakesError::MetadataExtraction(format!("epub thumbnail encode: {e}")))?; thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("epub thumbnail encode: {e}"))
})?;
Ok(()) Ok(())
} else { } else {
Err(PinakesError::MetadataExtraction( Err(PinakesError::MetadataExtraction(
@ -193,18 +230,28 @@ fn generate_epub_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
} }
} }
fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_raw_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Try dcraw to extract embedded JPEG preview, then resize // Try dcraw to extract embedded JPEG preview, then resize
let temp_ppm = dest.with_extension("ppm"); let temp_ppm = dest.with_extension("ppm");
let status = Command::new("dcraw") let status = Command::new("dcraw")
.args(["-e", "-c"]) .args(["-e", "-c"])
.arg(source) .arg(source)
.stdout(std::fs::File::create(&temp_ppm).map_err(|e| { .stdout(std::fs::File::create(&temp_ppm).map_err(|e| {
PinakesError::MetadataExtraction(format!("failed to create temp file: {e}")) PinakesError::MetadataExtraction(format!(
"failed to create temp file: {e}"
))
})?) })?)
.stderr(std::process::Stdio::null()) .stderr(std::process::Stdio::null())
.status() .status()
.map_err(|e| PinakesError::MetadataExtraction(format!("dcraw not found or failed: {e}")))?; .map_err(|e| {
PinakesError::MetadataExtraction(format!(
"dcraw not found or failed: {e}"
))
})?;
if !status.success() { if !status.success() {
let _ = std::fs::remove_file(&temp_ppm); let _ = std::fs::remove_file(&temp_ppm);
@ -218,15 +265,18 @@ fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
if temp_ppm.exists() { if temp_ppm.exists() {
let result = image::open(&temp_ppm); let result = image::open(&temp_ppm);
let _ = std::fs::remove_file(&temp_ppm); let _ = std::fs::remove_file(&temp_ppm);
let img = result let img = result.map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("raw preview decode: {e}")))?; PinakesError::MetadataExtraction(format!("raw preview decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size); let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?; let mut output = std::fs::File::create(dest)?;
let encoder = let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality); &mut output,
thumb config.quality,
.write_with_encoder(encoder) );
.map_err(|e| PinakesError::MetadataExtraction(format!("raw thumbnail encode: {e}")))?; thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("raw thumbnail encode: {e}"))
})?;
Ok(()) Ok(())
} else { } else {
Err(PinakesError::MetadataExtraction( Err(PinakesError::MetadataExtraction(
@ -235,7 +285,11 @@ fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
} }
} }
fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> { fn generate_heic_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Use heif-convert to convert to JPEG, then resize // Use heif-convert to convert to JPEG, then resize
let temp_jpg = dest.with_extension("tmp.jpg"); let temp_jpg = dest.with_extension("tmp.jpg");
let status = Command::new("heif-convert") let status = Command::new("heif-convert")
@ -245,7 +299,9 @@ fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
.stderr(std::process::Stdio::null()) .stderr(std::process::Stdio::null())
.status() .status()
.map_err(|e| { .map_err(|e| {
PinakesError::MetadataExtraction(format!("heif-convert not found or failed: {e}")) PinakesError::MetadataExtraction(format!(
"heif-convert not found or failed: {e}"
))
})?; })?;
if !status.success() { if !status.success() {
@ -259,15 +315,18 @@ fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
if temp_jpg.exists() { if temp_jpg.exists() {
let result = image::open(&temp_jpg); let result = image::open(&temp_jpg);
let _ = std::fs::remove_file(&temp_jpg); let _ = std::fs::remove_file(&temp_jpg);
let img = let img = result.map_err(|e| {
result.map_err(|e| PinakesError::MetadataExtraction(format!("heic decode: {e}")))?; PinakesError::MetadataExtraction(format!("heic decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size); let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?; let mut output = std::fs::File::create(dest)?;
let encoder = let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality); &mut output,
thumb config.quality,
.write_with_encoder(encoder) );
.map_err(|e| PinakesError::MetadataExtraction(format!("heic thumbnail encode: {e}")))?; thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("heic thumbnail encode: {e}"))
})?;
Ok(()) Ok(())
} else { } else {
Err(PinakesError::MetadataExtraction( Err(PinakesError::MetadataExtraction(
@ -315,8 +374,9 @@ pub fn generate_book_covers(
let media_cover_dir = covers_dir.join(media_id.to_string()); let media_cover_dir = covers_dir.join(media_id.to_string());
std::fs::create_dir_all(&media_cover_dir)?; std::fs::create_dir_all(&media_cover_dir)?;
let img = image::load_from_memory(source_image) let img = image::load_from_memory(source_image).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("cover image load: {e}")))?; PinakesError::MetadataExtraction(format!("cover image load: {e}"))
})?;
let mut results = Vec::new(); let mut results = Vec::new();
@ -334,18 +394,21 @@ pub fn generate_book_covers(
// Generate thumbnail // Generate thumbnail
let thumb = img.thumbnail(width, height); let thumb = img.thumbnail(width, height);
let mut output = std::fs::File::create(&cover_path)?; let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 90); let encoder =
thumb image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 90);
.write_with_encoder(encoder) thumb.write_with_encoder(encoder).map_err(|e| {
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?; PinakesError::MetadataExtraction(format!("cover encode: {e}"))
} })?;
},
None => { None => {
// Save original // Save original
let mut output = std::fs::File::create(&cover_path)?; let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 95); let encoder =
img.write_with_encoder(encoder) image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 95);
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?; img.write_with_encoder(encoder).map_err(|e| {
} PinakesError::MetadataExtraction(format!("cover encode: {e}"))
})?;
},
} }
results.push((size, cover_path)); results.push((size, cover_path));
@ -390,7 +453,8 @@ pub fn extract_pdf_cover(pdf_path: &Path) -> Result<Option<Vec<u8>>> {
let pdftoppm = "pdftoppm"; let pdftoppm = "pdftoppm";
let temp_dir = std::env::temp_dir(); let temp_dir = std::env::temp_dir();
let temp_prefix = temp_dir.join(format!("pdf_cover_{}", uuid::Uuid::new_v4())); let temp_prefix =
temp_dir.join(format!("pdf_cover_{}", uuid::Uuid::new_v4()));
let status = Command::new(pdftoppm) let status = Command::new(pdftoppm)
.args(["-jpeg", "-f", "1", "-l", "1", "-scale-to", "1200"]) .args(["-jpeg", "-f", "1", "-l", "1", "-scale-to", "1200"])

View file

@ -1,18 +1,22 @@
//! Transcoding service for media files using FFmpeg. //! Transcoding service for media files using FFmpeg.
use std::collections::HashMap; use std::{
use std::path::{Path, PathBuf}; collections::HashMap,
use std::sync::Arc; path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore}; use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid; use uuid::Uuid;
use crate::config::{TranscodeProfile, TranscodingConfig}; use crate::{
use crate::model::MediaId; config::{TranscodeProfile, TranscodingConfig},
use crate::storage::DynStorageBackend; model::MediaId,
use crate::users::UserId; storage::DynStorageBackend,
users::UserId,
};
/// A transcoding session for a media item. /// A transcoding session for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -61,8 +65,10 @@ impl TranscodeStatus {
"pending" => Self::Pending, "pending" => Self::Pending,
"transcoding" => Self::Transcoding, "transcoding" => Self::Transcoding,
"complete" => Self::Complete, "complete" => Self::Complete,
"failed" => Self::Failed { "failed" => {
Self::Failed {
error: error_message.unwrap_or("unknown error").to_string(), error: error_message.unwrap_or("unknown error").to_string(),
}
}, },
"cancelled" => Self::Cancelled, "cancelled" => Self::Cancelled,
other => { other => {
@ -71,7 +77,7 @@ impl TranscodeStatus {
other other
); );
Self::Pending Self::Pending
} },
} }
} }
@ -105,7 +111,8 @@ impl TranscodeService {
} }
pub fn cache_dir(&self) -> PathBuf { pub fn cache_dir(&self) -> PathBuf {
self.config self
.config
.cache_dir .cache_dir
.clone() .clone()
.unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode")) .unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode"))
@ -142,8 +149,9 @@ impl TranscodeService {
)) ))
})?; })?;
let expires_at = let expires_at = Some(
Some(Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64)); Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64),
);
let cancel_notify = Arc::new(tokio::sync::Notify::new()); let cancel_notify = Arc::new(tokio::sync::Notify::new());
@ -202,7 +210,7 @@ impl TranscodeService {
tracing::error!("failed to update transcode status: {}", e); tracing::error!("failed to update transcode status: {}", e);
} }
return; return;
} },
}; };
// Mark as transcoding // Mark as transcoding
@ -220,8 +228,11 @@ impl TranscodeService {
} }
// Build FFmpeg args and run // Build FFmpeg args and run
let args = get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref()); let args =
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel).await { get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref());
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel)
.await
{
Ok(()) => { Ok(()) => {
let mut s = sessions.write().await; let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) { if let Some(sess) = s.get_mut(&session_id) {
@ -234,7 +245,7 @@ impl TranscodeService {
{ {
tracing::error!("failed to update transcode status: {}", e); tracing::error!("failed to update transcode status: {}", e);
} }
} },
Err(e) => { Err(e) => {
let error_msg = e.to_string(); let error_msg = e.to_string();
let mut s = sessions.write().await; let mut s = sessions.write().await;
@ -258,7 +269,7 @@ impl TranscodeService {
{ {
tracing::error!("failed to update transcode status: {}", e); tracing::error!("failed to update transcode status: {}", e);
} }
} },
} }
}); });
@ -330,13 +341,19 @@ impl TranscodeService {
for (_id, path) in expired { for (_id, path) in expired {
if let Err(e) = tokio::fs::remove_dir_all(&path).await { if let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove expired transcode cache directory: {}", e); tracing::error!(
"failed to remove expired transcode cache directory: {}",
e
);
} }
} }
} }
/// Get a session by ID from the in-memory store. /// Get a session by ID from the in-memory store.
pub async fn get_session(&self, session_id: Uuid) -> Option<TranscodeSession> { pub async fn get_session(
&self,
session_id: Uuid,
) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await; let sessions = self.sessions.read().await;
sessions.get(&session_id).cloned() sessions.get(&session_id).cloned()
} }
@ -348,20 +365,28 @@ impl TranscodeService {
.file_name() .file_name()
.map(|n| n.to_string_lossy().to_string()) .map(|n| n.to_string_lossy().to_string())
.unwrap_or_default(); .unwrap_or_default();
if safe_name.is_empty() || safe_name.contains('\0') || safe_name.starts_with('.') { if safe_name.is_empty()
|| safe_name.contains('\0')
|| safe_name.starts_with('.')
{
// Return a non-existent path that will fail safely // Return a non-existent path that will fail safely
return self return self
.cache_dir() .cache_dir()
.join(session_id.to_string()) .join(session_id.to_string())
.join("__invalid__"); .join("__invalid__");
} }
self.cache_dir() self
.cache_dir()
.join(session_id.to_string()) .join(session_id.to_string())
.join(safe_name) .join(safe_name)
} }
/// Find a session for a given media_id and profile. /// Find a session for a given media_id and profile.
pub async fn find_session(&self, media_id: MediaId, profile: &str) -> Option<TranscodeSession> { pub async fn find_session(
&self,
media_id: MediaId,
profile: &str,
) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await; let sessions = self.sessions.read().await;
sessions sessions
.values() .values()
@ -440,8 +465,10 @@ async fn run_ffmpeg(
duration_secs: Option<f64>, duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>, cancel: Arc<tokio::sync::Notify>,
) -> Result<(), crate::error::PinakesError> { ) -> Result<(), crate::error::PinakesError> {
use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::{
use tokio::process::Command; io::{AsyncBufReadExt, BufReader},
process::Command,
};
let mut child = Command::new("ffmpeg") let mut child = Command::new("ffmpeg")
.args(args) .args(args)
@ -449,7 +476,10 @@ async fn run_ffmpeg(
.stderr(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped())
.spawn() .spawn()
.map_err(|e| { .map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("failed to spawn ffmpeg: {}", e)) crate::error::PinakesError::InvalidOperation(format!(
"failed to spawn ffmpeg: {}",
e
))
})?; })?;
// Capture stderr in a spawned task for error reporting // Capture stderr in a spawned task for error reporting
@ -486,7 +516,7 @@ async fn run_ffmpeg(
_ => { _ => {
// Duration unknown; don't update progress // Duration unknown; don't update progress
continue; continue;
} },
}; };
let mut s = sessions.write().await; let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) { if let Some(sess) = s.get_mut(&session_id) {

View file

@ -3,19 +3,20 @@
//! Handles file uploads, metadata extraction, and MediaItem creation //! Handles file uploads, metadata extraction, and MediaItem creation
//! for files stored in managed content-addressable storage. //! for files stored in managed content-addressable storage.
use std::collections::HashMap; use std::{collections::HashMap, path::Path};
use std::path::Path;
use chrono::Utc; use chrono::Utc;
use tokio::io::AsyncRead; use tokio::io::AsyncRead;
use tracing::{debug, info}; use tracing::{debug, info};
use crate::error::{PinakesError, Result}; use crate::{
use crate::managed_storage::ManagedStorageService; error::{PinakesError, Result},
use crate::media_type::MediaType; managed_storage::ManagedStorageService,
use crate::metadata; media_type::MediaType,
use crate::model::{MediaId, MediaItem, StorageMode, UploadResult}; metadata,
use crate::storage::DynStorageBackend; model::{MediaId, MediaItem, StorageMode, UploadResult},
storage::DynStorageBackend,
};
/// Process an upload from an async reader. /// Process an upload from an async reader.
/// ///
@ -53,7 +54,8 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
let blob_path = managed.path(&content_hash); let blob_path = managed.path(&content_hash);
// Extract metadata // Extract metadata
let extracted = metadata::extract_metadata(&blob_path, media_type.clone()).ok(); let extracted =
metadata::extract_metadata(&blob_path, media_type.clone()).ok();
// Create or get blob record // Create or get blob record
let mime = mime_type let mime = mime_type
@ -146,7 +148,8 @@ pub async fn process_upload_file(
let reader = tokio::io::BufReader::new(file); let reader = tokio::io::BufReader::new(file);
let filename = original_filename.unwrap_or_else(|| { let filename = original_filename.unwrap_or_else(|| {
path.file_name() path
.file_name()
.and_then(|n| n.to_str()) .and_then(|n| n.to_str())
.unwrap_or("unknown") .unwrap_or("unknown")
}); });

View file

@ -1,12 +1,15 @@
//! User management and authentication //! User management and authentication
use std::collections::HashMap;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid; use uuid::Uuid;
use crate::config::UserRole; use crate::{
use crate::error::{PinakesError, Result}; config::UserRole,
error::{PinakesError, Result},
};
/// User ID /// User ID
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@ -149,7 +152,9 @@ pub mod auth {
argon2 argon2
.hash_password(password.as_bytes(), &salt) .hash_password(password.as_bytes(), &salt)
.map(|hash| hash.to_string()) .map(|hash| hash.to_string())
.map_err(|e| PinakesError::Authentication(format!("failed to hash password: {e}"))) .map_err(|e| {
PinakesError::Authentication(format!("failed to hash password: {e}"))
})
} }
/// Verify a password against a hash /// Verify a password against a hash
@ -159,12 +164,15 @@ pub mod auth {
password_hash::{PasswordHash, PasswordVerifier}, password_hash::{PasswordHash, PasswordVerifier},
}; };
let parsed_hash = PasswordHash::new(hash) let parsed_hash = PasswordHash::new(hash).map_err(|e| {
.map_err(|e| PinakesError::Authentication(format!("invalid password hash: {e}")))?; PinakesError::Authentication(format!("invalid password hash: {e}"))
})?;
Ok(Argon2::default() Ok(
Argon2::default()
.verify_password(password.as_bytes(), &parsed_hash) .verify_password(password.as_bytes(), &parsed_hash)
.is_ok()) .is_ok(),
)
} }
} }

View file

@ -1,8 +1,12 @@
use pinakes_core::books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as}; use pinakes_core::{
use pinakes_core::enrichment::books::BookEnricher; books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as},
use pinakes_core::enrichment::googlebooks::GoogleBooksClient; enrichment::{
use pinakes_core::enrichment::openlibrary::OpenLibraryClient; books::BookEnricher,
use pinakes_core::thumbnail::{CoverSize, extract_epub_cover, generate_book_covers}; googlebooks::GoogleBooksClient,
openlibrary::OpenLibraryClient,
},
thumbnail::{CoverSize, extract_epub_cover, generate_book_covers},
};
#[test] #[test]
fn test_isbn_normalization() { fn test_isbn_normalization() {
@ -138,7 +142,8 @@ fn test_book_cover_generation() {
use image::{ImageBuffer, Rgb}; use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> = let img: ImageBuffer<Rgb<u8>, Vec<u8>> =
ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8])); ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img.write_to( img
.write_to(
&mut std::io::Cursor::new(&mut img_data), &mut std::io::Cursor::new(&mut img_data),
image::ImageFormat::Png, image::ImageFormat::Png,
) )
@ -174,10 +179,10 @@ async fn test_openlibrary_isbn_fetch() {
match result { match result {
Ok(book) => { Ok(book) => {
assert!(book.title.is_some()); assert!(book.title.is_some());
} },
Err(_) => { Err(_) => {
// Network error or book not found - acceptable in tests // Network error or book not found - acceptable in tests
} },
} }
} }
@ -195,9 +200,9 @@ async fn test_googlebooks_isbn_fetch() {
if !books.is_empty() { if !books.is_empty() {
assert!(books[0].volume_info.title.is_some()); assert!(books[0].volume_info.title.is_some());
} }
} },
Err(_) => { Err(_) => {
// Network error - acceptable in tests // Network error - acceptable in tests
} },
} }
} }

View file

@ -1,10 +1,10 @@
use std::collections::HashMap; use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::path::PathBuf;
use std::sync::Arc;
use pinakes_core::media_type::{BuiltinMediaType, MediaType}; use pinakes_core::{
use pinakes_core::model::{ContentHash, MediaId, MediaItem, StorageMode}; media_type::{BuiltinMediaType, MediaType},
use pinakes_core::storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend}; model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use tempfile::TempDir; use tempfile::TempDir;
use uuid::Uuid; use uuid::Uuid;

View file

@ -1,9 +1,9 @@
use std::collections::HashMap; use std::{collections::HashMap, sync::Arc};
use std::sync::Arc;
use pinakes_core::model::*; use pinakes_core::{
use pinakes_core::storage::StorageBackend; model::*,
use pinakes_core::storage::sqlite::SqliteBackend; storage::{StorageBackend, sqlite::SqliteBackend},
};
mod common; mod common;
use common::{make_test_media, setup}; use common::{make_test_media, setup};
@ -164,7 +164,12 @@ async fn test_collections() {
let storage = setup().await; let storage = setup().await;
let col = storage let col = storage
.create_collection("Favorites", CollectionKind::Manual, Some("My faves"), None) .create_collection(
"Favorites",
CollectionKind::Manual,
Some("My faves"),
None,
)
.await .await
.unwrap(); .unwrap();
assert_eq!(col.name, "Favorites"); assert_eq!(col.name, "Favorites");
@ -314,7 +319,9 @@ async fn test_search() {
id: MediaId::new(), id: MediaId::new(),
path: format!("/tmp/{name}").into(), path: format!("/tmp/{name}").into(),
file_name: name.to_string(), file_name: name.to_string(),
media_type: pinakes_core::media_type::MediaType::from_path(std::path::Path::new(name)) media_type: pinakes_core::media_type::MediaType::from_path(
std::path::Path::new(name),
)
.unwrap(), .unwrap(),
content_hash: ContentHash::new(format!("hash{i}")), content_hash: ContentHash::new(format!("hash{i}")),
file_size: 1000 * (i as u64 + 1), file_size: 1000 * (i as u64 + 1),
@ -504,8 +511,7 @@ async fn test_library_statistics_with_data() {
assert!(stats.oldest_item.is_some()); assert!(stats.oldest_item.is_some());
} }
// ===== Phase 2: Media Server Features ===== // Media Server Features
#[tokio::test] #[tokio::test]
async fn test_ratings_crud() { async fn test_ratings_crud() {
let storage = setup().await; let storage = setup().await;

View file

@ -34,7 +34,8 @@ async fn test_detect_untracked_files() {
fs::write(&tracked_file, b"tracked content").unwrap(); fs::write(&tracked_file, b"tracked content").unwrap();
fs::write(&untracked_file, b"untracked content").unwrap(); fs::write(&untracked_file, b"untracked content").unwrap();
let tracked_item = create_test_media_item(tracked_file.clone(), "hash_tracked"); let tracked_item =
create_test_media_item(tracked_file.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap(); storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap(); let report = detect_orphans(&storage).await.unwrap();
@ -117,20 +118,23 @@ async fn test_complete_orphan_workflow() {
storage.add_root_dir(root_dir.clone()).await.unwrap(); storage.add_root_dir(root_dir.clone()).await.unwrap();
let orphaned_path = root_dir.join("orphaned.mp3"); let orphaned_path = root_dir.join("orphaned.mp3");
let orphaned_item = create_test_media_item(orphaned_path.clone(), "hash_orphaned"); let orphaned_item =
create_test_media_item(orphaned_path.clone(), "hash_orphaned");
storage.insert_media(&orphaned_item).await.unwrap(); storage.insert_media(&orphaned_item).await.unwrap();
let untracked_path = root_dir.join("untracked.mp3"); let untracked_path = root_dir.join("untracked.mp3");
fs::write(&untracked_path, b"untracked").unwrap(); fs::write(&untracked_path, b"untracked").unwrap();
let another_orphaned = root_dir.join("another_orphaned.mp3"); let another_orphaned = root_dir.join("another_orphaned.mp3");
let another_item = create_test_media_item(another_orphaned.clone(), "hash_another"); let another_item =
create_test_media_item(another_orphaned.clone(), "hash_another");
storage.insert_media(&another_item).await.unwrap(); storage.insert_media(&another_item).await.unwrap();
let tracked_path = root_dir.join("tracked.mp3"); let tracked_path = root_dir.join("tracked.mp3");
fs::write(&tracked_path, b"tracked").unwrap(); fs::write(&tracked_path, b"tracked").unwrap();
let tracked_item = create_test_media_item(tracked_path.clone(), "hash_tracked"); let tracked_item =
create_test_media_item(tracked_path.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap(); storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap(); let report = detect_orphans(&storage).await.unwrap();

View file

@ -1,6 +1,4 @@
use pinakes_core::links::extract_links; use pinakes_core::{links::extract_links, model::*, storage::StorageBackend};
use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
mod common; mod common;
@ -138,8 +136,8 @@ async fn test_save_links_concurrent_updates() {
let links1 = extract_links(note1_id, "[[target1]]"); let links1 = extract_links(note1_id, "[[target1]]");
let links2 = extract_links(note2_id, "[[target2]] [[target3]]"); let links2 = extract_links(note2_id, "[[target2]] [[target3]]");
// Execute both saves. We do so in sequence since we can't test true concurrency easily // Execute both saves. We do so in sequence since we can't test true
// ...or so I think. Database tests are annoying. // concurrency easily ...or so I think. Database tests are annoying.
storage storage
.save_markdown_links(note1_id, &links1) .save_markdown_links(note1_id, &links1)
.await .await

View file

@ -2,13 +2,15 @@ use chrono::Utc;
use pinakes_core::storage::{SessionData, StorageBackend}; use pinakes_core::storage::{SessionData, StorageBackend};
use tempfile::TempDir; use tempfile::TempDir;
async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend { async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend
{
let temp_dir = TempDir::new().unwrap(); let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir let db_path = temp_dir
.path() .path()
.join(format!("test_{}.db", uuid::Uuid::now_v7())); .join(format!("test_{}.db", uuid::Uuid::now_v7()));
let storage = pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap(); let storage =
pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap(); storage.run_migrations().await.unwrap();
// Keep temp_dir alive by leaking it (tests are short-lived anyway) // Keep temp_dir alive by leaking it (tests are short-lived anyway)

View file

@ -1,12 +1,16 @@
//! Pinakes Plugin API //! Pinakes Plugin API
//! //!
//! This crate defines the stable plugin interface for Pinakes. //! This crate defines the stable plugin interface for Pinakes.
//! Plugins can extend Pinakes by implementing one or more of the provided traits. //! Plugins can extend Pinakes by implementing one or more of the provided
//! traits.
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use async_trait::async_trait; use async_trait::async_trait;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use thiserror::Error; use thiserror::Error;
pub mod manifest; pub mod manifest;
@ -115,7 +119,8 @@ pub struct EnvironmentCapability {
/// Whether environment variable access is allowed /// Whether environment variable access is allowed
pub enabled: bool, pub enabled: bool,
/// Specific environment variables allowed (if None, all allowed when enabled) /// Specific environment variables allowed (if None, all allowed when
/// enabled)
pub allowed_vars: Option<Vec<String>>, pub allowed_vars: Option<Vec<String>>,
} }
@ -162,7 +167,11 @@ pub trait MediaTypeProvider: Plugin {
fn supported_media_types(&self) -> Vec<MediaTypeDefinition>; fn supported_media_types(&self) -> Vec<MediaTypeDefinition>;
/// Check if this plugin can handle the given file /// Check if this plugin can handle the given file
async fn can_handle(&self, path: &Path, mime_type: Option<&str>) -> PluginResult<bool>; async fn can_handle(
&self,
path: &Path,
mime_type: Option<&str>,
) -> PluginResult<bool>;
} }
/// Definition of a custom media type /// Definition of a custom media type
@ -191,7 +200,10 @@ pub struct MediaTypeDefinition {
#[async_trait] #[async_trait]
pub trait MetadataExtractor: Plugin { pub trait MetadataExtractor: Plugin {
/// Extract metadata from a file /// Extract metadata from a file
async fn extract_metadata(&self, path: &Path) -> PluginResult<ExtractedMetadata>; async fn extract_metadata(
&self,
path: &Path,
) -> PluginResult<ExtractedMetadata>;
/// Get the media types this extractor supports /// Get the media types this extractor supports
fn supported_types(&self) -> Vec<String>; fn supported_types(&self) -> Vec<String>;
@ -268,7 +280,10 @@ pub trait SearchBackend: Plugin {
async fn remove_item(&self, item_id: &str) -> PluginResult<()>; async fn remove_item(&self, item_id: &str) -> PluginResult<()>;
/// Perform a search query /// Perform a search query
async fn search(&self, query: &SearchQuery) -> PluginResult<Vec<SearchResult>>; async fn search(
&self,
query: &SearchQuery,
) -> PluginResult<Vec<SearchResult>>;
/// Get search statistics /// Get search statistics
async fn get_stats(&self) -> PluginResult<SearchStats>; async fn get_stats(&self) -> PluginResult<SearchStats>;

View file

@ -1,11 +1,16 @@
//! Plugin manifest parsing and validation //! Plugin manifest parsing and validation
use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error; use thiserror::Error;
use crate::{Capabilities, EnvironmentCapability, FilesystemCapability, NetworkCapability}; use crate::{
Capabilities,
EnvironmentCapability,
FilesystemCapability,
NetworkCapability,
};
/// Plugin manifest file format (TOML) /// Plugin manifest file format (TOML)
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,9 @@
//! Shared types used across the plugin API //! Shared types used across the plugin API
use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use serde::{Deserialize, Serialize};
/// Plugin identifier /// Plugin identifier
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct PluginId(String); pub struct PluginId(String);

View file

@ -1,8 +1,9 @@
//! WASM bridge types and helpers for plugin communication //! WASM bridge types and helpers for plugin communication
use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use serde::{Deserialize, Serialize};
/// Memory allocation info for passing data between host and plugin /// Memory allocation info for passing data between host and plugin
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WasmMemoryAlloc { pub struct WasmMemoryAlloc {
@ -119,19 +120,28 @@ pub mod helpers {
} }
/// Deserialize bytes from WASM to a value /// Deserialize bytes from WASM to a value
pub fn deserialize<T: for<'de> Deserialize<'de>>(bytes: &[u8]) -> Result<T, String> { pub fn deserialize<T: for<'de> Deserialize<'de>>(
serde_json::from_slice(bytes).map_err(|e| format!("Deserialization error: {}", e)) bytes: &[u8],
) -> Result<T, String> {
serde_json::from_slice(bytes)
.map_err(|e| format!("Deserialization error: {}", e))
} }
/// Create a success response /// Create a success response
pub fn ok_response<T: Serialize>(request_id: String, value: &T) -> Result<Vec<u8>, String> { pub fn ok_response<T: Serialize>(
request_id: String,
value: &T,
) -> Result<Vec<u8>, String> {
let result = WasmResult::Ok(serialize(value)?); let result = WasmResult::Ok(serialize(value)?);
let response = PluginResponse { request_id, result }; let response = PluginResponse { request_id, result };
serialize(&response) serialize(&response)
} }
/// Create an error response /// Create an error response
pub fn error_response(request_id: String, error: String) -> Result<Vec<u8>, String> { pub fn error_response(
request_id: String,
error: String,
) -> Result<Vec<u8>, String> {
let result = WasmResult::<Vec<u8>>::Err(error); let result = WasmResult::<Vec<u8>>::Err(error);
let response = PluginResponse { request_id, result }; let response = PluginResponse { request_id, result };
serialize(&response) serialize(&response)
@ -140,8 +150,7 @@ pub mod helpers {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::helpers::*; use super::{helpers::*, *};
use super::*;
#[test] #[test]
fn test_serialize_deserialize() { fn test_serialize_deserialize() {
@ -164,7 +173,7 @@ mod tests {
WasmResult::Ok(data) => { WasmResult::Ok(data) => {
let recovered: String = deserialize(&data).unwrap(); let recovered: String = deserialize(&data).unwrap();
assert_eq!(recovered, value); assert_eq!(recovered, value);
} },
WasmResult::Err(_) => panic!("Expected Ok result"), WasmResult::Err(_) => panic!("Expected Ok result"),
} }
} }
@ -173,7 +182,8 @@ mod tests {
fn test_error_response() { fn test_error_response() {
let request_id = "test-456".to_string(); let request_id = "test-456".to_string();
let error_msg = "Something went wrong"; let error_msg = "Something went wrong";
let response_bytes = error_response(request_id.clone(), error_msg.to_string()).unwrap(); let response_bytes =
error_response(request_id.clone(), error_msg.to_string()).unwrap();
let response: PluginResponse = deserialize(&response_bytes).unwrap(); let response: PluginResponse = deserialize(&response_bytes).unwrap();
assert_eq!(response.request_id, request_id); assert_eq!(response.request_id, request_id);

View file

@ -1,13 +1,30 @@
use std::{collections::HashMap, path::PathBuf};
use async_trait::async_trait; use async_trait::async_trait;
use pinakes_plugin_api::wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage};
use pinakes_plugin_api::{ use pinakes_plugin_api::{
Capabilities, EnvironmentCapability, Event, EventType, ExtractedMetadata, FilesystemCapability, Capabilities,
HealthStatus, MediaTypeDefinition, NetworkCapability, Plugin, PluginContext, PluginError, EnvironmentCapability,
PluginMetadata, PluginResult, SearchIndexItem, SearchQuery, SearchResult, SearchStats, Event,
ThumbnailFormat, ThumbnailInfo, ThumbnailOptions, EventType,
ExtractedMetadata,
FilesystemCapability,
HealthStatus,
MediaTypeDefinition,
NetworkCapability,
Plugin,
PluginContext,
PluginError,
PluginMetadata,
PluginResult,
SearchIndexItem,
SearchQuery,
SearchResult,
SearchStats,
ThumbnailFormat,
ThumbnailInfo,
ThumbnailOptions,
wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage},
}; };
use std::collections::HashMap;
use std::path::PathBuf;
struct TestPlugin { struct TestPlugin {
initialized: bool, initialized: bool,
@ -227,7 +244,8 @@ async fn test_thumbnail_options_serialization() {
}; };
let serialized = serde_json::to_string(&options).unwrap(); let serialized = serde_json::to_string(&options).unwrap();
let deserialized: ThumbnailOptions = serde_json::from_str(&serialized).unwrap(); let deserialized: ThumbnailOptions =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.width, 320); assert_eq!(deserialized.width, 320);
assert_eq!(deserialized.height, 240); assert_eq!(deserialized.height, 240);
@ -249,7 +267,8 @@ async fn test_thumbnail_format_variants() {
format, format,
}; };
let serialized = serde_json::to_string(&options).unwrap(); let serialized = serde_json::to_string(&options).unwrap();
let deserialized: ThumbnailOptions = serde_json::from_str(&serialized).unwrap(); let deserialized: ThumbnailOptions =
serde_json::from_str(&serialized).unwrap();
assert!(matches!(deserialized.format, _)); assert!(matches!(deserialized.format, _));
} }
} }
@ -350,7 +369,10 @@ async fn test_http_request_serialization() {
async fn test_http_response_serialization() { async fn test_http_response_serialization() {
let response = HttpResponse { let response = HttpResponse {
status: 200, status: 200,
headers: HashMap::from([("Content-Type".to_string(), "application/json".to_string())]), headers: HashMap::from([(
"Content-Type".to_string(),
"application/json".to_string(),
)]),
body: b"{\"success\": true}".to_vec(), body: b"{\"success\": true}".to_vec(),
}; };
@ -401,7 +423,9 @@ async fn test_log_level_variants() {
async fn test_plugin_error_variants() { async fn test_plugin_error_variants() {
let errors: Vec<PluginError> = vec![ let errors: Vec<PluginError> = vec![
PluginError::InitializationFailed("WASM load failed".to_string()), PluginError::InitializationFailed("WASM load failed".to_string()),
PluginError::UnsupportedOperation("Custom search not implemented".to_string()), PluginError::UnsupportedOperation(
"Custom search not implemented".to_string(),
),
PluginError::InvalidInput("Invalid file path".to_string()), PluginError::InvalidInput("Invalid file path".to_string()),
PluginError::IoError("File not found".to_string()), PluginError::IoError("File not found".to_string()),
PluginError::MetadataExtractionFailed("Parse error".to_string()), PluginError::MetadataExtractionFailed("Parse error".to_string()),
@ -439,7 +463,8 @@ async fn test_search_index_item_serialization() {
}; };
let serialized = serde_json::to_string(&item).unwrap(); let serialized = serde_json::to_string(&item).unwrap();
let deserialized: SearchIndexItem = serde_json::from_str(&serialized).unwrap(); let deserialized: SearchIndexItem =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.id, "media-456"); assert_eq!(deserialized.id, "media-456");
assert_eq!(deserialized.title, Some("Summer Vacation".to_string())); assert_eq!(deserialized.title, Some("Summer Vacation".to_string()));

View file

@ -1,6 +1,7 @@
use pinakes_plugin_api::PluginManifest;
use std::path::PathBuf; use std::path::PathBuf;
use pinakes_plugin_api::PluginManifest;
#[test] #[test]
fn test_markdown_metadata_manifest() { fn test_markdown_metadata_manifest() {
let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
@ -46,10 +47,11 @@ fn test_heif_support_manifest() {
assert_eq!(manifest.plugin.name, "heif-support"); assert_eq!(manifest.plugin.name, "heif-support");
assert_eq!(manifest.plugin.version, "1.0.0"); assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.api_version, "1.0"); assert_eq!(manifest.plugin.api_version, "1.0");
assert_eq!( assert_eq!(manifest.plugin.kind, vec![
manifest.plugin.kind, "media_type",
vec!["media_type", "metadata_extractor", "thumbnail_generator"] "metadata_extractor",
); "thumbnail_generator"
]);
assert_eq!(manifest.plugin.binary.wasm, "heif_support.wasm"); assert_eq!(manifest.plugin.binary.wasm, "heif_support.wasm");
// Validate capabilities // Validate capabilities

View file

@ -1,20 +1,21 @@
use std::sync::Arc; use std::sync::Arc;
use axum::Router; use axum::{
use axum::extract::DefaultBodyLimit; Router,
use axum::http::{HeaderValue, Method, header}; extract::DefaultBodyLimit,
use axum::middleware; http::{HeaderValue, Method, header},
use axum::routing::{delete, get, patch, post, put}; middleware,
routing::{delete, get, patch, post, put},
};
use tower::ServiceBuilder; use tower::ServiceBuilder;
use tower_governor::GovernorLayer; use tower_governor::{GovernorLayer, governor::GovernorConfigBuilder};
use tower_governor::governor::GovernorConfigBuilder; use tower_http::{
use tower_http::cors::CorsLayer; cors::CorsLayer,
use tower_http::set_header::SetResponseHeaderLayer; set_header::SetResponseHeaderLayer,
use tower_http::trace::TraceLayer; trace::TraceLayer,
};
use crate::auth; use crate::{auth, routes, state::AppState};
use crate::routes;
use crate::state::AppState;
/// Create the router with optional TLS configuration for HSTS headers /// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(state: AppState) -> Router { pub fn create_router(state: AppState) -> Router {
@ -53,7 +54,8 @@ pub fn create_router_with_tls(
.unwrap(), .unwrap(),
); );
// Rate limit for streaming: 5 requests per IP (very restrictive for concurrent streams) // Rate limit for streaming: 5 requests per IP (very restrictive for
// concurrent streams)
let stream_governor = Arc::new( let stream_governor = Arc::new(
GovernorConfigBuilder::default() GovernorConfigBuilder::default()
.per_second(60) // replenish slowly (one per minute) .per_second(60) // replenish slowly (one per minute)
@ -575,8 +577,10 @@ pub fn create_router_with_tls(
// Add HSTS header when TLS is enabled // Add HSTS header when TLS is enabled
if let Some(tls) = tls_config { if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled { if tls.enabled && tls.hsts_enabled {
let hsts_value = format!("max-age={}; includeSubDomains", tls.hsts_max_age); let hsts_value =
let hsts_header = HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| { format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header =
HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains") HeaderValue::from_static("max-age=31536000; includeSubDomains")
}); });

View file

@ -1,8 +1,9 @@
use axum::extract::{Request, State}; use axum::{
use axum::http::StatusCode; extract::{Request, State},
use axum::middleware::Next; http::StatusCode,
use axum::response::{IntoResponse, Response}; middleware::Next,
response::{IntoResponse, Response},
};
use pinakes_core::config::UserRole; use pinakes_core::config::UserRole;
use crate::state::AppState; use crate::state::AppState;
@ -21,9 +22,10 @@ fn constant_time_eq(a: &str, b: &str) -> bool {
/// Axum middleware that checks for a valid Bearer token. /// Axum middleware that checks for a valid Bearer token.
/// ///
/// If `accounts.enabled == true`: look up bearer token in database session store. /// If `accounts.enabled == true`: look up bearer token in database session
/// If `accounts.enabled == false`: use existing api_key logic (unchanged behavior). /// store. If `accounts.enabled == false`: use existing api_key logic (unchanged
/// Skips authentication for the `/health` and `/auth/login` path suffixes. /// behavior). Skips authentication for the `/health` and `/auth/login` path
/// suffixes.
pub async fn require_auth( pub async fn require_auth(
State(state): State<AppState>, State(state): State<AppState>,
mut request: Request, mut request: Request,
@ -70,11 +72,12 @@ pub async fn require_auth(
Ok(None) => { Ok(None) => {
tracing::debug!(path = %path, "rejected: invalid session token"); tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token"); return unauthorized("invalid or expired session token");
} },
Err(e) => { Err(e) => {
tracing::error!(error = %e, "failed to query session from database"); tracing::error!(error = %e, "failed to query session from database");
return (StatusCode::INTERNAL_SERVER_ERROR, "database error").into_response(); return (StatusCode::INTERNAL_SERVER_ERROR, "database error")
} .into_response();
},
}; };
// Check session expiry // Check session expiry
@ -110,7 +113,7 @@ pub async fn require_auth(
_ => { _ => {
tracing::warn!(role = %session.role, "unknown role, defaulting to viewer"); tracing::warn!(role = %session.role, "unknown role, defaulting to viewer");
UserRole::Viewer UserRole::Viewer
} },
}; };
// Inject role and username into request extensions // Inject role and username into request extensions
@ -130,7 +133,9 @@ pub async fn require_auth(
if expected_key.is_empty() { if expected_key.is_empty() {
// Empty key is not allowed - must use authentication_disabled flag // Empty key is not allowed - must use authentication_disabled flag
tracing::error!("empty api_key rejected, use authentication_disabled flag instead"); tracing::error!(
"empty api_key rejected, use authentication_disabled flag instead"
);
return unauthorized("authentication not properly configured"); return unauthorized("authentication not properly configured");
} }
@ -146,12 +151,13 @@ pub async fn require_auth(
tracing::warn!(path = %path, "rejected: invalid API key"); tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key"); return unauthorized("invalid api key");
} }
} },
_ => { _ => {
return unauthorized( return unauthorized(
"missing or malformed Authorization header, expected: Bearer <api_key>", "missing or malformed Authorization header, expected: Bearer \
<api_key>",
); );
} },
} }
// API key matches, grant admin // API key matches, grant admin
@ -202,9 +208,11 @@ pub async fn resolve_user_id(
Err(e) => { Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user"); tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError( Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication("user not found".into()), pinakes_core::error::PinakesError::Authentication(
"user not found".into(),
),
)) ))
} },
} }
} }

View file

@ -1,5 +1,4 @@
use std::collections::HashMap; use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -446,7 +445,9 @@ pub struct TypeCountResponse {
pub count: u64, pub count: u64,
} }
impl From<pinakes_core::storage::LibraryStatistics> for LibraryStatisticsResponse { impl From<pinakes_core::storage::LibraryStatistics>
for LibraryStatisticsResponse
{
fn from(stats: pinakes_core::storage::LibraryStatistics) -> Self { fn from(stats: pinakes_core::storage::LibraryStatistics) -> Self {
Self { Self {
total_media: stats.total_media, total_media: stats.total_media,
@ -545,13 +546,10 @@ impl From<pinakes_core::model::MediaItem> for MediaResponse {
.custom_fields .custom_fields
.into_iter() .into_iter()
.map(|(k, v)| { .map(|(k, v)| {
( (k, CustomFieldResponse {
k,
CustomFieldResponse {
field_type: format!("{:?}", v.field_type).to_lowercase(), field_type: format!("{:?}", v.field_type).to_lowercase(),
value: v.value, value: v.value,
}, })
)
}) })
.collect(), .collect(),
@ -983,7 +981,9 @@ pub struct ExternalMetadataResponse {
pub last_updated: DateTime<Utc>, pub last_updated: DateTime<Utc>,
} }
impl From<pinakes_core::enrichment::ExternalMetadata> for ExternalMetadataResponse { impl From<pinakes_core::enrichment::ExternalMetadata>
for ExternalMetadataResponse
{
fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self { fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self {
let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| { let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| {
tracing::warn!( tracing::warn!(
@ -1018,7 +1018,9 @@ pub struct TranscodeSessionResponse {
pub expires_at: Option<DateTime<Utc>>, pub expires_at: Option<DateTime<Utc>>,
} }
impl From<pinakes_core::transcode::TranscodeSession> for TranscodeSessionResponse { impl From<pinakes_core::transcode::TranscodeSession>
for TranscodeSessionResponse
{
fn from(s: pinakes_core::transcode::TranscodeSession) -> Self { fn from(s: pinakes_core::transcode::TranscodeSession) -> Self {
Self { Self {
id: s.id.to_string(), id: s.id.to_string(),
@ -1066,7 +1068,9 @@ pub struct ManagedStorageStatsResponse {
pub deduplication_ratio: f64, pub deduplication_ratio: f64,
} }
impl From<pinakes_core::model::ManagedStorageStats> for ManagedStorageStatsResponse { impl From<pinakes_core::model::ManagedStorageStats>
for ManagedStorageStatsResponse
{
fn from(stats: pinakes_core::model::ManagedStorageStats) -> Self { fn from(stats: pinakes_core::model::ManagedStorageStats) -> Self {
Self { Self {
total_blobs: stats.total_blobs, total_blobs: stats.total_blobs,
@ -1321,7 +1325,9 @@ pub struct SharePermissionsResponse {
pub can_add: bool, pub can_add: bool,
} }
impl From<pinakes_core::sharing::SharePermissions> for SharePermissionsResponse { impl From<pinakes_core::sharing::SharePermissions>
for SharePermissionsResponse
{
fn from(p: pinakes_core::sharing::SharePermissions) -> Self { fn from(p: pinakes_core::sharing::SharePermissions) -> Self {
Self { Self {
can_view: p.can_view, can_view: p.can_view,
@ -1339,32 +1345,32 @@ impl From<pinakes_core::sharing::Share> for ShareResponse {
let (target_type, target_id) = match &s.target { let (target_type, target_id) = match &s.target {
pinakes_core::sharing::ShareTarget::Media { media_id } => { pinakes_core::sharing::ShareTarget::Media { media_id } => {
("media".to_string(), media_id.0.to_string()) ("media".to_string(), media_id.0.to_string())
} },
pinakes_core::sharing::ShareTarget::Collection { collection_id } => { pinakes_core::sharing::ShareTarget::Collection { collection_id } => {
("collection".to_string(), collection_id.to_string()) ("collection".to_string(), collection_id.to_string())
} },
pinakes_core::sharing::ShareTarget::Tag { tag_id } => { pinakes_core::sharing::ShareTarget::Tag { tag_id } => {
("tag".to_string(), tag_id.to_string()) ("tag".to_string(), tag_id.to_string())
} },
pinakes_core::sharing::ShareTarget::SavedSearch { search_id } => { pinakes_core::sharing::ShareTarget::SavedSearch { search_id } => {
("saved_search".to_string(), search_id.to_string()) ("saved_search".to_string(), search_id.to_string())
} },
}; };
let (recipient_type, recipient_user_id, recipient_group_id, public_token) = let (recipient_type, recipient_user_id, recipient_group_id, public_token) =
match &s.recipient { match &s.recipient {
pinakes_core::sharing::ShareRecipient::PublicLink { token, .. } => { pinakes_core::sharing::ShareRecipient::PublicLink { token, .. } => {
("public_link".to_string(), None, None, Some(token.clone())) ("public_link".to_string(), None, None, Some(token.clone()))
} },
pinakes_core::sharing::ShareRecipient::User { user_id } => { pinakes_core::sharing::ShareRecipient::User { user_id } => {
("user".to_string(), Some(user_id.0.to_string()), None, None) ("user".to_string(), Some(user_id.0.to_string()), None, None)
} },
pinakes_core::sharing::ShareRecipient::Group { group_id } => { pinakes_core::sharing::ShareRecipient::Group { group_id } => {
("group".to_string(), None, Some(group_id.to_string()), None) ("group".to_string(), None, Some(group_id.to_string()), None)
} },
pinakes_core::sharing::ShareRecipient::Federated { .. } => { pinakes_core::sharing::ShareRecipient::Federated { .. } => {
("federated".to_string(), None, None, None) ("federated".to_string(), None, None, None)
} },
}; };
Self { Self {
@ -1430,7 +1436,9 @@ pub struct ShareNotificationResponse {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
} }
impl From<pinakes_core::sharing::ShareNotification> for ShareNotificationResponse { impl From<pinakes_core::sharing::ShareNotification>
for ShareNotificationResponse
{
fn from(n: pinakes_core::sharing::ShareNotification) -> Self { fn from(n: pinakes_core::sharing::ShareNotification) -> Self {
Self { Self {
id: n.id.to_string(), id: n.id.to_string(),

View file

@ -1,5 +1,7 @@
use axum::http::StatusCode; use axum::{
use axum::response::{IntoResponse, Response}; http::StatusCode,
response::{IntoResponse, Response},
};
use serde::Serialize; use serde::Serialize;
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -22,9 +24,11 @@ impl IntoResponse for ApiError {
.unwrap_or_else(|| "unknown".to_string()); .unwrap_or_else(|| "unknown".to_string());
tracing::debug!(path = %path.display(), "file not found"); tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}")) (StatusCode::NOT_FOUND, format!("file not found: {name}"))
} },
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()), PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()), PinakesError::CollectionNotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
},
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()), PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => { PinakesError::UnsupportedMediaType(path) => {
let name = path let name = path
@ -35,10 +39,14 @@ impl IntoResponse for ApiError {
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"), format!("unsupported media type: {name}"),
) )
} },
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()), PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()), PinakesError::InvalidOperation(msg) => {
PinakesError::Authentication(msg) => (StatusCode::UNAUTHORIZED, msg.clone()), (StatusCode::BAD_REQUEST, msg.clone())
},
PinakesError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.clone())
},
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()), PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Config(_) => { PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error"); tracing::error!(error = %self.0, "configuration error");
@ -46,14 +54,14 @@ impl IntoResponse for ApiError {
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(), "internal configuration error".to_string(),
) )
} },
_ => { _ => {
tracing::error!(error = %self.0, "internal server error"); tracing::error!(error = %self.0, "internal server error");
( (
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(), "internal server error".to_string(),
) )
} },
}; };
let body = serde_json::to_string(&ErrorResponse { let body = serde_json::to_string(&ErrorResponse {

View file

@ -1,21 +1,14 @@
use std::path::PathBuf; use std::{path::PathBuf, sync::Arc};
use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use axum::Router; use axum::{Router, response::Redirect, routing::any};
use axum::response::Redirect;
use axum::routing::any;
use clap::Parser; use clap::Parser;
use pinakes_core::{config::Config, storage::StorageBackend};
use pinakes_server::{app, state::AppState};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::info; use tracing::info;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
use pinakes_core::config::Config;
use pinakes_core::storage::StorageBackend;
use pinakes_server::app;
use pinakes_server::state::AppState;
/// Pinakes media cataloging server /// Pinakes media cataloging server
#[derive(Parser)] #[derive(Parser)]
#[command(name = "pinakes-server", version, about)] #[command(name = "pinakes-server", version, about)]
@ -66,7 +59,8 @@ async fn main() -> Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
// Initialize logging // Initialize logging
let env_filter = EnvFilter::try_new(&cli.log_level).unwrap_or_else(|_| EnvFilter::new("info")); let env_filter = EnvFilter::try_new(&cli.log_level)
.unwrap_or_else(|_| EnvFilter::new("info"));
match cli.log_format.as_str() { match cli.log_format.as_str() {
"json" => { "json" => {
@ -74,22 +68,22 @@ async fn main() -> Result<()> {
.with_env_filter(env_filter) .with_env_filter(env_filter)
.json() .json()
.init(); .init();
} },
"pretty" => { "pretty" => {
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_env_filter(env_filter) .with_env_filter(env_filter)
.pretty() .pretty()
.init(); .init();
} },
"full" => { "full" => {
tracing_subscriber::fmt().with_env_filter(env_filter).init(); tracing_subscriber::fmt().with_env_filter(env_filter).init();
} },
_ => { _ => {
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_env_filter(env_filter) .with_env_filter(env_filter)
.compact() .compact()
.init(); .init();
} },
} }
let (config_path, was_explicit) = resolve_config_path(cli.config.as_deref()); let (config_path, was_explicit) = resolve_config_path(cli.config.as_deref());
@ -98,7 +92,8 @@ async fn main() -> Result<()> {
info!(path = %config_path.display(), "loading configuration from file"); info!(path = %config_path.display(), "loading configuration from file");
Config::from_file(&config_path)? Config::from_file(&config_path)?
} else if was_explicit { } else if was_explicit {
// User explicitly provided a config path that doesn't exist - this is an error // User explicitly provided a config path that doesn't exist - this is an
// error
return Err(anyhow::anyhow!( return Err(anyhow::anyhow!(
"configuration file not found: {}", "configuration file not found: {}",
config_path.display() config_path.display()
@ -118,9 +113,12 @@ async fn main() -> Result<()> {
// Warn about authentication configuration // Warn about authentication configuration
if config.server.authentication_disabled { if config.server.authentication_disabled {
tracing::warn!( tracing::warn!(
"⚠️ AUTHENTICATION IS DISABLED - All requests will be allowed without authentication!" "⚠️ AUTHENTICATION IS DISABLED - All requests will be allowed without \
authentication!"
);
tracing::warn!(
"⚠️ This is INSECURE and should only be used for development."
); );
tracing::warn!("⚠️ This is INSECURE and should only be used for development.");
} else { } else {
let has_api_key = config let has_api_key = config
.server .server
@ -142,7 +140,10 @@ async fn main() -> Result<()> {
} }
// Storage backend initialization // Storage backend initialization
let storage: pinakes_core::storage::DynStorageBackend = match config.storage.backend { let storage: pinakes_core::storage::DynStorageBackend = match config
.storage
.backend
{
pinakes_core::config::StorageBackendType::Sqlite => { pinakes_core::config::StorageBackendType::Sqlite => {
let sqlite_config = config.storage.sqlite.as_ref().ok_or_else(|| { let sqlite_config = config.storage.sqlite.as_ref().ok_or_else(|| {
anyhow::anyhow!( anyhow::anyhow!(
@ -150,21 +151,25 @@ async fn main() -> Result<()> {
) )
})?; })?;
info!(path = %sqlite_config.path.display(), "initializing sqlite storage"); info!(path = %sqlite_config.path.display(), "initializing sqlite storage");
let backend = pinakes_core::storage::sqlite::SqliteBackend::new(&sqlite_config.path)?; let backend =
pinakes_core::storage::sqlite::SqliteBackend::new(&sqlite_config.path)?;
backend.run_migrations().await?; backend.run_migrations().await?;
Arc::new(backend) Arc::new(backend)
} },
pinakes_core::config::StorageBackendType::Postgres => { pinakes_core::config::StorageBackendType::Postgres => {
let pg_config = config.storage.postgres.as_ref().ok_or_else(|| { let pg_config = config.storage.postgres.as_ref().ok_or_else(|| {
anyhow::anyhow!( anyhow::anyhow!(
"postgres storage selected but [storage.postgres] config section missing" "postgres storage selected but [storage.postgres] config section \
missing"
) )
})?; })?;
info!(host = %pg_config.host, port = pg_config.port, database = %pg_config.database, "initializing postgres storage"); info!(host = %pg_config.host, port = pg_config.port, database = %pg_config.database, "initializing postgres storage");
let backend = pinakes_core::storage::postgres::PostgresBackend::new(pg_config).await?; let backend =
pinakes_core::storage::postgres::PostgresBackend::new(pg_config)
.await?;
backend.run_migrations().await?; backend.run_migrations().await?;
Arc::new(backend) Arc::new(backend)
} },
}; };
if cli.migrate_only { if cli.migrate_only {
@ -188,8 +193,12 @@ async fn main() -> Result<()> {
let watch_dirs = config.directories.roots.clone(); let watch_dirs = config.directories.roots.clone();
let watch_ignore = config.scanning.ignore_patterns.clone(); let watch_ignore = config.scanning.ignore_patterns.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Err(e) = if let Err(e) = pinakes_core::scan::watch_and_import(
pinakes_core::scan::watch_and_import(watch_storage, watch_dirs, watch_ignore).await watch_storage,
watch_dirs,
watch_ignore,
)
.await
{ {
tracing::error!(error = %e, "filesystem watcher failed"); tracing::error!(error = %e, "filesystem watcher failed");
} }
@ -200,8 +209,9 @@ async fn main() -> Result<()> {
let addr = format!("{}:{}", config.server.host, config.server.port); let addr = format!("{}:{}", config.server.host, config.server.port);
// Initialize transcode service early so the job queue can reference it // Initialize transcode service early so the job queue can reference it
let transcode_service: Option<Arc<pinakes_core::transcode::TranscodeService>> = let transcode_service: Option<
if config.transcoding.enabled { Arc<pinakes_core::transcode::TranscodeService>,
> = if config.transcoding.enabled {
Some(Arc::new(pinakes_core::transcode::TranscodeService::new( Some(Arc::new(pinakes_core::transcode::TranscodeService::new(
config.transcoding.clone(), config.transcoding.clone(),
))) )))
@ -257,12 +267,12 @@ async fn main() -> Result<()> {
}), }),
) )
.await; .await;
} },
Err(e) => { Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await; JobQueue::fail(&jobs, job_id, e.to_string()).await;
},
} }
} },
}
JobKind::GenerateThumbnails { media_ids } => { JobKind::GenerateThumbnails { media_ids } => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir(); let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
let thumb_config = config.thumbnails.clone(); let thumb_config = config.thumbnails.clone();
@ -299,12 +309,12 @@ async fn main() -> Result<()> {
updated.thumbnail_path = Some(path); updated.thumbnail_path = Some(path);
let _ = storage.update_media(&updated).await; let _ = storage.update_media(&updated).await;
generated += 1; generated += 1;
} },
Ok(Ok(None)) => {} Ok(Ok(None)) => {},
Ok(Err(e)) => errors.push(format!("{}: {}", mid, e)), Ok(Err(e)) => errors.push(format!("{}: {}", mid, e)),
Err(e) => errors.push(format!("{}: {}", mid, e)), Err(e) => errors.push(format!("{}: {}", mid, e)),
} }
} },
Err(e) => errors.push(format!("{}: {}", mid, e)), Err(e) => errors.push(format!("{}: {}", mid, e)),
} }
} }
@ -316,14 +326,15 @@ async fn main() -> Result<()> {
}), }),
) )
.await; .await;
} },
JobKind::VerifyIntegrity { media_ids } => { JobKind::VerifyIntegrity { media_ids } => {
let ids = if media_ids.is_empty() { let ids = if media_ids.is_empty() {
None None
} else { } else {
Some(media_ids.as_slice()) Some(media_ids.as_slice())
}; };
match pinakes_core::integrity::verify_integrity(&storage, ids).await { match pinakes_core::integrity::verify_integrity(&storage, ids).await
{
Ok(report) => { Ok(report) => {
JobQueue::complete( JobQueue::complete(
&jobs, &jobs,
@ -331,10 +342,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&report).unwrap_or_default(), serde_json::to_value(&report).unwrap_or_default(),
) )
.await; .await;
} },
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} },
JobKind::OrphanDetection => { JobKind::OrphanDetection => {
match pinakes_core::integrity::detect_orphans(&storage).await { match pinakes_core::integrity::detect_orphans(&storage).await {
Ok(report) => { Ok(report) => {
@ -344,10 +355,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&report).unwrap_or_default(), serde_json::to_value(&report).unwrap_or_default(),
) )
.await; .await;
} },
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} },
JobKind::CleanupThumbnails => { JobKind::CleanupThumbnails => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir(); let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
match pinakes_core::integrity::cleanup_orphaned_thumbnails( match pinakes_core::integrity::cleanup_orphaned_thumbnails(
@ -362,15 +373,19 @@ async fn main() -> Result<()> {
serde_json::json!({ "removed": removed }), serde_json::json!({ "removed": removed }),
) )
.await; .await;
} },
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} },
JobKind::Export { JobKind::Export {
format, format,
destination, destination,
} => { } => {
match pinakes_core::export::export_library(&storage, &format, &destination) match pinakes_core::export::export_library(
&storage,
&format,
&destination,
)
.await .await
{ {
Ok(result) => { Ok(result) => {
@ -380,10 +395,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&result).unwrap_or_default(), serde_json::to_value(&result).unwrap_or_default(),
) )
.await; .await;
} },
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} },
JobKind::Transcode { media_id, profile } => { JobKind::Transcode { media_id, profile } => {
if let Some(ref svc) = transcode_svc { if let Some(ref svc) = transcode_svc {
match storage.get_media(media_id).await { match storage.get_media(media_id).await {
@ -405,19 +420,23 @@ async fn main() -> Result<()> {
serde_json::json!({"session_id": session_id.to_string()}), serde_json::json!({"session_id": session_id.to_string()}),
) )
.await; .await;
} },
Err(e) => { Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await JobQueue::fail(&jobs, job_id, e.to_string()).await
},
} }
} },
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} else { } else {
JobQueue::fail(&jobs, job_id, "transcoding is not enabled".to_string()) JobQueue::fail(
&jobs,
job_id,
"transcoding is not enabled".to_string(),
)
.await; .await;
} }
} },
JobKind::Enrich { media_ids } => { JobKind::Enrich { media_ids } => {
// Enrichment job placeholder // Enrichment job placeholder
JobQueue::complete( JobQueue::complete(
@ -426,7 +445,7 @@ async fn main() -> Result<()> {
serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}), serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}),
) )
.await; .await;
} },
JobKind::CleanupAnalytics => { JobKind::CleanupAnalytics => {
let before = chrono::Utc::now() - chrono::Duration::days(90); let before = chrono::Utc::now() - chrono::Duration::days(90);
match storage.cleanup_old_events(before).await { match storage.cleanup_old_events(before).await {
@ -437,10 +456,10 @@ async fn main() -> Result<()> {
serde_json::json!({"cleaned_up": count}), serde_json::json!({"cleaned_up": count}),
) )
.await; .await;
} },
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
} }
} },
}; };
drop(cancel); drop(cancel);
}) })
@ -452,7 +471,8 @@ async fn main() -> Result<()> {
config.jobs.cache_ttl_secs, config.jobs.cache_ttl_secs,
)); ));
// Initialize plugin manager if plugins are enabled (before moving config into Arc) // Initialize plugin manager if plugins are enabled (before moving config into
// Arc)
let plugin_manager = if config.plugins.enabled { let plugin_manager = if config.plugins.enabled {
match pinakes_core::plugin::PluginManager::new( match pinakes_core::plugin::PluginManager::new(
config.plugins.data_dir.clone(), config.plugins.data_dir.clone(),
@ -462,11 +482,11 @@ async fn main() -> Result<()> {
Ok(pm) => { Ok(pm) => {
tracing::info!("Plugin manager initialized"); tracing::info!("Plugin manager initialized");
Some(Arc::new(pm)) Some(Arc::new(pm))
} },
Err(e) => { Err(e) => {
tracing::warn!("Failed to initialize plugin manager: {}", e); tracing::warn!("Failed to initialize plugin manager: {}", e);
None None
} },
} }
} else { } else {
tracing::info!("Plugins disabled in configuration"); tracing::info!("Plugins disabled in configuration");
@ -511,11 +531,11 @@ async fn main() -> Result<()> {
"managed storage initialized" "managed storage initialized"
); );
Some(Arc::new(service)) Some(Arc::new(service))
} },
Err(e) => { Err(e) => {
tracing::error!(error = %e, "failed to initialize managed storage"); tracing::error!(error = %e, "failed to initialize managed storage");
None None
} },
} }
} else { } else {
tracing::info!("managed storage disabled in configuration"); tracing::info!("managed storage disabled in configuration");
@ -537,11 +557,11 @@ async fn main() -> Result<()> {
"chunked upload manager initialized" "chunked upload manager initialized"
); );
Some(Arc::new(manager)) Some(Arc::new(manager))
} },
Err(e) => { Err(e) => {
tracing::error!(error = %e, "failed to initialize chunked upload manager"); tracing::error!(error = %e, "failed to initialize chunked upload manager");
None None
} },
} }
} else { } else {
tracing::info!("sync disabled, chunked upload manager not initialized"); tracing::info!("sync disabled, chunked upload manager not initialized");
@ -568,7 +588,8 @@ async fn main() -> Result<()> {
let storage_clone = storage.clone(); let storage_clone = storage.clone();
let cancel = shutdown_token.clone(); let cancel = shutdown_token.clone();
tokio::spawn(async move { tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(15 * 60)); let mut interval =
tokio::time::interval(std::time::Duration::from_secs(15 * 60));
loop { loop {
tokio::select! { tokio::select! {
_ = interval.tick() => { _ = interval.tick() => {
@ -595,7 +616,8 @@ async fn main() -> Result<()> {
let manager_clone = manager.clone(); let manager_clone = manager.clone();
let cancel = shutdown_token.clone(); let cancel = shutdown_token.clone();
tokio::spawn(async move { tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(60 * 60)); let mut interval =
tokio::time::interval(std::time::Duration::from_secs(60 * 60));
loop { loop {
tokio::select! { tokio::select! {
_ = interval.tick() => { _ = interval.tick() => {
@ -630,20 +652,19 @@ async fn main() -> Result<()> {
if tls_config.enabled { if tls_config.enabled {
// TLS/HTTPS mode // TLS/HTTPS mode
let cert_path = tls_config let cert_path = tls_config.cert_path.as_ref().ok_or_else(|| {
.cert_path anyhow::anyhow!("TLS enabled but cert_path not specified")
.as_ref() })?;
.ok_or_else(|| anyhow::anyhow!("TLS enabled but cert_path not specified"))?; let key_path = tls_config.key_path.as_ref().ok_or_else(|| {
let key_path = tls_config anyhow::anyhow!("TLS enabled but key_path not specified")
.key_path })?;
.as_ref()
.ok_or_else(|| anyhow::anyhow!("TLS enabled but key_path not specified"))?;
info!(addr = %addr, cert = %cert_path.display(), "server listening with TLS"); info!(addr = %addr, cert = %cert_path.display(), "server listening with TLS");
// Configure TLS // Configure TLS
let tls_config_builder = let tls_config_builder =
axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path).await?; axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path)
.await?;
// Start HTTP redirect server if configured // Start HTTP redirect server if configured
if tls_config.redirect_http { if tls_config.redirect_http {
@ -655,7 +676,8 @@ async fn main() -> Result<()> {
let https_port = config_arc.read().await.server.port; let https_port = config_arc.read().await.server.port;
let https_host = config_arc.read().await.server.host.clone(); let https_host = config_arc.read().await.server.host.clone();
let redirect_router = create_https_redirect_router(https_host, https_port); let redirect_router =
create_https_redirect_router(https_host, https_port);
let shutdown = shutdown_token.clone(); let shutdown = shutdown_token.clone();
tokio::spawn(async move { tokio::spawn(async move {
@ -664,12 +686,13 @@ async fn main() -> Result<()> {
Err(e) => { Err(e) => {
tracing::warn!(error = %e, addr = %http_addr, "failed to bind HTTP redirect listener"); tracing::warn!(error = %e, addr = %http_addr, "failed to bind HTTP redirect listener");
return; return;
} },
}; };
info!(addr = %http_addr, "HTTP redirect server listening"); info!(addr = %http_addr, "HTTP redirect server listening");
let server = axum::serve( let server = axum::serve(
listener, listener,
redirect_router.into_make_service_with_connect_info::<std::net::SocketAddr>(), redirect_router
.into_make_service_with_connect_info::<std::net::SocketAddr>(),
); );
tokio::select! { tokio::select! {
result = server => { result = server => {
@ -692,12 +715,15 @@ async fn main() -> Result<()> {
// Spawn a task to trigger graceful shutdown // Spawn a task to trigger graceful shutdown
tokio::spawn(async move { tokio::spawn(async move {
shutdown_signal().await; shutdown_signal().await;
shutdown_handle.graceful_shutdown(Some(std::time::Duration::from_secs(30))); shutdown_handle
.graceful_shutdown(Some(std::time::Duration::from_secs(30)));
}); });
axum_server::bind_rustls(addr_parsed, tls_config_builder) axum_server::bind_rustls(addr_parsed, tls_config_builder)
.handle(handle) .handle(handle)
.serve(router.into_make_service_with_connect_info::<std::net::SocketAddr>()) .serve(
router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
)
.await?; .await?;
} else { } else {
// Plain HTTP mode // Plain HTTP mode
@ -722,7 +748,8 @@ fn create_https_redirect_router(https_host: String, https_port: u16) -> Router {
Router::new().fallback(any(move |uri: axum::http::Uri| { Router::new().fallback(any(move |uri: axum::http::Uri| {
let https_host = https_host.clone(); let https_host = https_host.clone();
async move { async move {
let path_and_query = uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("/"); let path_and_query =
uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("/");
let https_url = if https_port == 443 { let https_url = if https_port == 443 {
format!("https://{}{}", https_host, path_and_query) format!("https://{}{}", https_host, path_and_query)
@ -738,24 +765,26 @@ fn create_https_redirect_router(https_host: String, https_port: u16) -> Router {
async fn shutdown_signal() { async fn shutdown_signal() {
let ctrl_c = async { let ctrl_c = async {
match tokio::signal::ctrl_c().await { match tokio::signal::ctrl_c().await {
Ok(()) => {} Ok(()) => {},
Err(e) => { Err(e) => {
tracing::warn!(error = %e, "failed to install Ctrl+C handler"); tracing::warn!(error = %e, "failed to install Ctrl+C handler");
std::future::pending::<()>().await; std::future::pending::<()>().await;
} },
} }
}; };
#[cfg(unix)] #[cfg(unix)]
let terminate = async { let terminate = async {
match tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) { match tokio::signal::unix::signal(
tokio::signal::unix::SignalKind::terminate(),
) {
Ok(mut signal) => { Ok(mut signal) => {
signal.recv().await; signal.recv().await;
} },
Err(e) => { Err(e) => {
tracing::warn!(error = %e, "failed to install SIGTERM handler"); tracing::warn!(error = %e, "failed to install SIGTERM handler");
std::future::pending::<()>().await; std::future::pending::<()>().await;
} },
} }
}; };

View file

@ -1,14 +1,14 @@
use axum::Json; use axum::{
use axum::extract::{Extension, Path, Query, State}; Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::{
analytics::{UsageEvent, UsageEventType},
model::MediaId,
};
use uuid::Uuid; use uuid::Uuid;
use crate::auth::resolve_user_id; use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::analytics::{UsageEvent, UsageEventType};
use pinakes_core::model::MediaId;
const MAX_LIMIT: u64 = 100; const MAX_LIMIT: u64 = 100;
@ -21,9 +21,11 @@ pub async fn get_most_viewed(
Ok(Json( Ok(Json(
results results
.into_iter() .into_iter()
.map(|(item, count)| MostViewedResponse { .map(|(item, count)| {
MostViewedResponse {
media: MediaResponse::from(item), media: MediaResponse::from(item),
view_count: count, view_count: count,
}
}) })
.collect(), .collect(),
)) ))
@ -45,10 +47,10 @@ pub async fn record_event(
Extension(username): Extension<String>, Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>, Json(req): Json<RecordUsageEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
let event_type: UsageEventType = req let event_type: UsageEventType =
.event_type req.event_type.parse().map_err(|e: String| {
.parse() ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?; })?;
let user_id = resolve_user_id(&state.storage, &username).await?; let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent { let event = UsageEvent {
id: Uuid::now_v7(), id: Uuid::now_v7(),

View file

@ -1,12 +1,11 @@
use axum::Json; use axum::{
use axum::extract::{Query, State}; Json,
extract::{Query, State},
use crate::dto::*; };
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination; use pinakes_core::model::Pagination;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn list_audit( pub async fn list_audit(
State(state): State<AppState>, State(state): State<AppState>,
Query(params): Query<PaginationParams>, Query(params): Query<PaginationParams>,

View file

@ -1,15 +1,21 @@
use axum::Json; use axum::{
use axum::extract::State; Json,
use axum::http::{HeaderMap, StatusCode}; extract::State,
http::{HeaderMap, StatusCode},
};
use crate::dto::{LoginRequest, LoginResponse, UserInfoResponse}; use crate::{
use crate::state::AppState; dto::{LoginRequest, LoginResponse, UserInfoResponse},
state::AppState,
};
/// Dummy password hash to use for timing-safe comparison when user doesn't exist. /// Dummy password hash to use for timing-safe comparison when user doesn't
/// This is a valid argon2 hash that will always fail verification but takes /// exist. This is a valid argon2 hash that will always fail verification but
/// similar time to verify as a real hash, preventing timing attacks that could /// takes similar time to verify as a real hash, preventing timing attacks that
/// reveal whether a username exists. /// could reveal whether a username exists.
const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk"; const DUMMY_HASH: &str =
"$argon2id$v=19$m=19456,t=2,\
p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
pub async fn login( pub async fn login(
State(state): State<AppState>, State(state): State<AppState>,
@ -121,7 +127,10 @@ pub async fn login(
})) }))
} }
pub async fn logout(State(state): State<AppState>, headers: HeaderMap) -> StatusCode { pub async fn logout(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) { if let Some(token) = extract_bearer_token(&headers) {
// Get username before deleting session // Get username before deleting session
let username = match state.storage.get_session(token).await { let username = match state.storage.get_session(token).await {
@ -185,7 +194,10 @@ fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
} }
/// Revoke all sessions for the current user /// Revoke all sessions for the current user
pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderMap) -> StatusCode { pub async fn revoke_all_sessions(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
let token = match extract_bearer_token(&headers) { let token = match extract_bearer_token(&headers) {
Some(t) => t, Some(t) => t,
None => return StatusCode::UNAUTHORIZED, None => return StatusCode::UNAUTHORIZED,
@ -198,7 +210,7 @@ pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderM
Err(e) => { Err(e) => {
tracing::error!(error = %e, "failed to get session"); tracing::error!(error = %e, "failed to get session");
return StatusCode::INTERNAL_SERVER_ERROR; return StatusCode::INTERNAL_SERVER_ERROR;
} },
}; };
let username = session.username.clone(); let username = session.username.clone();
@ -218,11 +230,11 @@ pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderM
.await; .await;
StatusCode::OK StatusCode::OK
} },
Err(e) => { Err(e) => {
tracing::error!(error = %e, "failed to revoke sessions"); tracing::error!(error = %e, "failed to revoke sessions");
StatusCode::INTERNAL_SERVER_ERROR StatusCode::INTERNAL_SERVER_ERROR
} },
} }
} }
@ -245,7 +257,8 @@ pub async fn list_active_sessions(
State(state): State<AppState>, State(state): State<AppState>,
) -> Result<Json<SessionListResponse>, StatusCode> { ) -> Result<Json<SessionListResponse>, StatusCode> {
// Get all active sessions // Get all active sessions
let sessions = state let sessions =
state
.storage .storage
.list_active_sessions(None) .list_active_sessions(None)
.await .await
@ -256,12 +269,14 @@ pub async fn list_active_sessions(
let session_infos = sessions let session_infos = sessions
.into_iter() .into_iter()
.map(|s| SessionInfo { .map(|s| {
SessionInfo {
username: s.username, username: s.username,
role: s.role, role: s.role,
created_at: s.created_at.to_rfc3339(), created_at: s.created_at.to_rfc3339(),
last_accessed: s.last_accessed.to_rfc3339(), last_accessed: s.last_accessed.to_rfc3339(),
expires_at: s.expires_at.to_rfc3339(), expires_at: s.expires_at.to_rfc3339(),
}
}) })
.collect(); .collect();

View file

@ -1,20 +1,32 @@
use axum::{ use axum::{
Json, Router, Json,
Router,
extract::{Extension, Path, Query, State}, extract::{Extension, Path, Query, State},
http::StatusCode, http::StatusCode,
response::IntoResponse, response::IntoResponse,
routing::{get, put}, routing::{get, put},
}; };
use pinakes_core::{
error::PinakesError,
model::{
AuthorInfo,
BookMetadata,
MediaId,
Pagination,
ReadingProgress,
ReadingStatus,
},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use pinakes_core::{ use crate::{
error::PinakesError, auth::resolve_user_id,
model::{AuthorInfo, BookMetadata, MediaId, Pagination, ReadingProgress, ReadingStatus}, dto::MediaResponse,
error::ApiError,
state::AppState,
}; };
use crate::{auth::resolve_user_id, dto::MediaResponse, error::ApiError, state::AppState};
/// Book metadata response DTO /// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct BookMetadataResponse { pub struct BookMetadataResponse {
@ -45,7 +57,11 @@ impl From<BookMetadata> for BookMetadataResponse {
series_name: meta.series_name, series_name: meta.series_name,
series_index: meta.series_index, series_index: meta.series_index,
format: meta.format, format: meta.format,
authors: meta.authors.into_iter().map(AuthorResponse::from).collect(), authors: meta
.authors
.into_iter()
.map(AuthorResponse::from)
.collect(),
identifiers: meta.identifiers, identifiers: meta.identifiers,
} }
} }
@ -143,7 +159,8 @@ pub async fn get_book_metadata(
Path(media_id): Path<Uuid>, Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> { ) -> Result<impl IntoResponse, ApiError> {
let media_id = MediaId(media_id); let media_id = MediaId(media_id);
let metadata = state let metadata =
state
.storage .storage
.get_book_metadata(media_id) .get_book_metadata(media_id)
.await? .await?
@ -177,18 +194,23 @@ pub async fn list_books(
) )
.await?; .await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect(); let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response)) Ok(Json(response))
} }
/// List all series with book counts /// List all series with book counts
pub async fn list_series(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> { pub async fn list_series(
State(state): State<AppState>,
) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?; let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series let response: Vec<SeriesSummary> = series
.into_iter() .into_iter()
.map(|(name, count)| SeriesSummary { .map(|(name, count)| {
SeriesSummary {
name, name,
book_count: count, book_count: count,
}
}) })
.collect(); .collect();
@ -201,7 +223,8 @@ pub async fn get_series_books(
Path(series_name): Path<String>, Path(series_name): Path<String>,
) -> Result<impl IntoResponse, ApiError> { ) -> Result<impl IntoResponse, ApiError> {
let items = state.storage.get_series_books(&series_name).await?; let items = state.storage.get_series_books(&series_name).await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect(); let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response)) Ok(Json(response))
} }
@ -213,9 +236,11 @@ pub async fn list_authors(
let authors = state.storage.list_all_authors(&pagination).await?; let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors let response: Vec<AuthorSummary> = authors
.into_iter() .into_iter()
.map(|(name, count)| AuthorSummary { .map(|(name, count)| {
AuthorSummary {
name, name,
book_count: count, book_count: count,
}
}) })
.collect(); .collect();
@ -233,7 +258,8 @@ pub async fn get_author_books(
.search_books(None, Some(&author_name), None, None, None, &pagination) .search_books(None, Some(&author_name), None, None, None, &pagination)
.await?; .await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect(); let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response)) Ok(Json(response))
} }
@ -288,7 +314,8 @@ pub async fn get_reading_list(
.get_reading_list(user_id.0, params.status) .get_reading_list(user_id.0, params.status)
.await?; .await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect(); let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response)) Ok(Json(response))
} }

View file

@ -1,12 +1,11 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
};
use pinakes_core::model::{CollectionKind, MediaId};
use uuid::Uuid; use uuid::Uuid;
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{CollectionKind, MediaId};
pub async fn create_collection( pub async fn create_collection(
State(state): State<AppState>, State(state): State<AppState>,
@ -87,7 +86,11 @@ pub async fn remove_member(
State(state): State<AppState>, State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>, Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::remove_member(&state.storage, collection_id, MediaId(media_id)) pinakes_core::collections::remove_member(
&state.storage,
collection_id,
MediaId(media_id),
)
.await?; .await?;
Ok(Json(serde_json::json!({"removed": true}))) Ok(Json(serde_json::json!({"removed": true})))
} }
@ -96,6 +99,8 @@ pub async fn get_members(
State(state): State<AppState>, State(state): State<AppState>,
Path(collection_id): Path<Uuid>, Path(collection_id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> { ) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let items = pinakes_core::collections::get_members(&state.storage, collection_id).await?; let items =
pinakes_core::collections::get_members(&state.storage, collection_id)
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect())) Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
} }

View file

@ -1,11 +1,10 @@
use axum::Json; use axum::{Json, extract::State};
use axum::extract::State;
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResponse>, ApiError> { pub async fn get_config(
State(state): State<AppState>,
) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await; let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?; let roots = state.storage.list_root_dirs().await?;
@ -20,7 +19,8 @@ pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResp
.map(|m| !m.permissions().readonly()) .map(|m| !m.permissions().readonly())
.unwrap_or(false) .unwrap_or(false)
} else { } else {
path.parent() path
.parent()
.map(|parent| { .map(|parent| {
std::fs::metadata(parent) std::fs::metadata(parent)
.map(|m| !m.permissions().readonly()) .map(|m| !m.permissions().readonly())
@ -28,7 +28,7 @@ pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResp
}) })
.unwrap_or(false) .unwrap_or(false)
} }
} },
None => false, None => false,
}; };
@ -132,7 +132,8 @@ pub async fn update_scanning_config(
.map(|m| !m.permissions().readonly()) .map(|m| !m.permissions().readonly())
.unwrap_or(false) .unwrap_or(false)
} else { } else {
path.parent() path
.parent()
.map(|parent| { .map(|parent| {
std::fs::metadata(parent) std::fs::metadata(parent)
.map(|m| !m.permissions().readonly()) .map(|m| !m.permissions().readonly())
@ -140,7 +141,7 @@ pub async fn update_scanning_config(
}) })
.unwrap_or(false) .unwrap_or(false)
} }
} },
None => false, None => false,
}; };

View file

@ -1,9 +1,6 @@
use axum::Json; use axum::{Json, extract::State};
use axum::extract::State;
use crate::dto::DatabaseStatsResponse; use crate::{dto::DatabaseStatsResponse, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
pub async fn database_stats( pub async fn database_stats(
State(state): State<AppState>, State(state): State<AppState>,

View file

@ -1,9 +1,10 @@
use axum::Json; use axum::{Json, extract::State};
use axum::extract::State;
use crate::dto::{DuplicateGroupResponse, MediaResponse}; use crate::{
use crate::error::ApiError; dto::{DuplicateGroupResponse, MediaResponse},
use crate::state::AppState; error::ApiError,
state::AppState,
};
pub async fn list_duplicates( pub async fn list_duplicates(
State(state): State<AppState>, State(state): State<AppState>,

View file

@ -1,12 +1,11 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid; use uuid::Uuid;
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
pub async fn trigger_enrichment( pub async fn trigger_enrichment(
State(state): State<AppState>, State(state): State<AppState>,
@ -39,7 +38,8 @@ pub async fn batch_enrich(
State(state): State<AppState>, State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids: Vec<MediaId> = req.media_ids.into_iter().map(MediaId).collect(); let media_ids: Vec<MediaId> =
req.media_ids.into_iter().map(MediaId).collect();
let job_id = state let job_id = state
.job_queue .job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids }) .submit(pinakes_core::jobs::JobKind::Enrich { media_ids })

View file

@ -1,10 +1,9 @@
use axum::Json;
use axum::extract::State;
use serde::Deserialize;
use std::path::PathBuf; use std::path::PathBuf;
use crate::error::ApiError; use axum::{Json, extract::State};
use crate::state::AppState; use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct ExportRequest { pub struct ExportRequest {
@ -16,7 +15,8 @@ pub async fn trigger_export(
State(state): State<AppState>, State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
// Default export to JSON in data dir // Default export to JSON in data dir
let dest = pinakes_core::config::Config::default_data_dir().join("export.json"); let dest =
pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export { let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json, format: pinakes_core::jobs::ExportFormat::Json,
destination: dest, destination: dest,

View file

@ -1,9 +1,6 @@
use std::time::Instant; use std::time::Instant;
use axum::Json; use axum::{Json, extract::State, http::StatusCode, response::IntoResponse};
use axum::extract::State;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::state::AppState; use crate::state::AppState;
@ -58,10 +55,12 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
// Check database health // Check database health
let db_start = Instant::now(); let db_start = Instant::now();
let db_health = match state.storage.count_media().await { let db_health = match state.storage.count_media().await {
Ok(count) => DatabaseHealth { Ok(count) => {
DatabaseHealth {
status: "ok".to_string(), status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64, latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count), media_count: Some(count),
}
}, },
Err(e) => { Err(e) => {
response.status = "degraded".to_string(); response.status = "degraded".to_string();
@ -70,12 +69,13 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
latency_ms: db_start.elapsed().as_millis() as u64, latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None, media_count: None,
} }
} },
}; };
response.database = Some(db_health); response.database = Some(db_health);
// Check filesystem health (root directories) // Check filesystem health (root directories)
let roots: Vec<std::path::PathBuf> = state.storage.list_root_dirs().await.unwrap_or_default(); let roots: Vec<std::path::PathBuf> =
state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count(); let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() { if roots_accessible < roots.len() {
response.status = "degraded".to_string(); response.status = "degraded".to_string();
@ -130,14 +130,16 @@ pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
"database_latency_ms": latency "database_latency_ms": latency
})), })),
) )
} },
Err(e) => ( Err(e) => {
(
StatusCode::SERVICE_UNAVAILABLE, StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({ Json(serde_json::json!({
"status": "not_ready", "status": "not_ready",
"reason": e.to_string() "reason": e.to_string()
})), })),
), )
},
} }
} }
@ -159,7 +161,9 @@ pub struct JobsHealth {
pub running: usize, pub running: usize,
} }
pub async fn health_detailed(State(state): State<AppState>) -> Json<DetailedHealthResponse> { pub async fn health_detailed(
State(state): State<AppState>,
) -> Json<DetailedHealthResponse> {
// Check database // Check database
let db_start = Instant::now(); let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await { let (db_status, media_count) = match state.storage.count_media().await {

View file

@ -1,9 +1,7 @@
use axum::Json; use axum::{Json, extract::State};
use axum::extract::State;
use serde::Deserialize; use serde::Deserialize;
use crate::error::ApiError; use crate::{error::ApiError, state::AppState};
use crate::state::AppState;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct OrphanResolveRequest { pub struct OrphanResolveRequest {
@ -92,7 +90,8 @@ pub async fn resolve_orphans(
.into_iter() .into_iter()
.map(pinakes_core::model::MediaId) .map(pinakes_core::model::MediaId)
.collect(); .collect();
let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids) let count =
pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await .await
.map_err(ApiError)?; .map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count }))) Ok(Json(serde_json::json!({ "resolved": count })))

View file

@ -1,10 +1,11 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
use crate::error::ApiError; };
use crate::state::AppState;
use pinakes_core::jobs::Job; use pinakes_core::jobs::Job;
use crate::{error::ApiError, state::AppState};
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> { pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await) Json(state.job_queue.list().await)
} }
@ -14,7 +15,8 @@ pub async fn get_job(
Path(id): Path<uuid::Uuid>, Path(id): Path<uuid::Uuid>,
) -> Result<Json<Job>, ApiError> { ) -> Result<Json<Job>, ApiError> {
state.job_queue.status(id).await.map(Json).ok_or_else(|| { state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}")).into() pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}"))
.into()
}) })
} }
@ -26,9 +28,11 @@ pub async fn cancel_job(
if cancelled { if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true }))) Ok(Json(serde_json::json!({ "cancelled": true })))
} else { } else {
Err(pinakes_core::error::PinakesError::NotFound(format!( Err(
pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}" "job not found or already finished: {id}"
)) ))
.into()) .into(),
)
} }
} }

View file

@ -1,16 +1,18 @@
use axum::Json; use axum::{
use axum::extract::{Path, Query, State}; Json,
extract::{Path, Query, State},
};
use pinakes_core::{
model::{MediaId, Pagination},
storage::DynStorageBackend,
};
use uuid::Uuid; use uuid::Uuid;
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{MediaId, Pagination};
use pinakes_core::storage::DynStorageBackend;
/// Apply tags and add to collection after a successful import. /// Apply tags and add to collection after a successful import.
/// Shared logic used by import_with_options, batch_import, and import_directory_endpoint. /// Shared logic used by import_with_options, batch_import, and
/// import_directory_endpoint.
async fn apply_import_post_processing( async fn apply_import_post_processing(
storage: &DynStorageBackend, storage: &DynStorageBackend,
media_id: MediaId, media_id: MediaId,
@ -20,7 +22,9 @@ async fn apply_import_post_processing(
) { ) {
if let Some(tag_ids) = tag_ids { if let Some(tag_ids) = tag_ids {
for tid in tag_ids { for tid in tag_ids {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, *tid).await { if let Err(e) =
pinakes_core::tags::tag_media(storage, media_id, *tid).await
{
tracing::warn!(error = %e, "failed to apply tag during import"); tracing::warn!(error = %e, "failed to apply tag during import");
} }
} }
@ -29,18 +33,21 @@ async fn apply_import_post_processing(
for name in new_tags { for name in new_tags {
match pinakes_core::tags::create_tag(storage, name, None).await { match pinakes_core::tags::create_tag(storage, name, None).await {
Ok(tag) => { Ok(tag) => {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, tag.id).await { if let Err(e) =
pinakes_core::tags::tag_media(storage, media_id, tag.id).await
{
tracing::warn!(error = %e, "failed to apply new tag during import"); tracing::warn!(error = %e, "failed to apply new tag during import");
} }
} },
Err(e) => { Err(e) => {
tracing::warn!(tag_name = %name, error = %e, "failed to create tag during import"); tracing::warn!(tag_name = %name, error = %e, "failed to create tag during import");
} },
} }
} }
} }
if let Some(col_id) = collection_id if let Some(col_id) = collection_id
&& let Err(e) = pinakes_core::collections::add_member(storage, col_id, media_id, 0).await && let Err(e) =
pinakes_core::collections::add_member(storage, col_id, media_id, 0).await
{ {
tracing::warn!(error = %e, "failed to add to collection during import"); tracing::warn!(error = %e, "failed to add to collection during import");
} }
@ -50,7 +57,8 @@ pub async fn import_media(
State(state): State<AppState>, State(state): State<AppState>,
Json(req): Json<ImportRequest>, Json(req): Json<ImportRequest>,
) -> Result<Json<ImportResponse>, ApiError> { ) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?; let result =
pinakes_core::import::import_file(&state.storage, &req.path).await?;
Ok(Json(ImportResponse { Ok(Json(ImportResponse {
media_id: result.media_id.0.to_string(), media_id: result.media_id.0.to_string(),
was_duplicate: result.was_duplicate, was_duplicate: result.was_duplicate,
@ -83,7 +91,11 @@ const MAX_SHORT_TEXT: usize = 500;
/// Maximum length for long text fields (description). /// Maximum length for long text fields (description).
const MAX_LONG_TEXT: usize = 10_000; const MAX_LONG_TEXT: usize = 10_000;
fn validate_optional_text(field: &Option<String>, name: &str, max: usize) -> Result<(), ApiError> { fn validate_optional_text(
field: &Option<String>,
name: &str,
max: usize,
) -> Result<(), ApiError> {
if let Some(v) = field if let Some(v) = field
&& v.len() > max && v.len() > max
{ {
@ -193,8 +205,10 @@ pub async fn stream_media(
Path(id): Path<Uuid>, Path(id): Path<Uuid>,
headers: axum::http::HeaderMap, headers: axum::http::HeaderMap,
) -> Result<axum::response::Response, ApiError> { ) -> Result<axum::response::Response, ApiError> {
use axum::body::Body; use axum::{
use axum::http::{StatusCode, header}; body::Body,
http::{StatusCode, header},
};
use tokio::io::{AsyncReadExt, AsyncSeekExt}; use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio_util::io::ReaderStream; use tokio_util::io::ReaderStream;
@ -222,7 +236,8 @@ pub async fn stream_media(
let content_length = end - start + 1; let content_length = end - start + 1;
let mut file = file; let mut file = file;
file.seek(std::io::SeekFrom::Start(start)) file
.seek(std::io::SeekFrom::Start(start))
.await .await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?; .map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
@ -300,7 +315,8 @@ pub async fn import_with_options(
State(state): State<AppState>, State(state): State<AppState>,
Json(req): Json<ImportWithOptionsRequest>, Json(req): Json<ImportWithOptionsRequest>,
) -> Result<Json<ImportResponse>, ApiError> { ) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?; let result =
pinakes_core::import::import_file(&state.storage, &req.path).await?;
if !result.was_duplicate { if !result.was_duplicate {
apply_import_post_processing( apply_import_post_processing(
@ -358,7 +374,7 @@ pub async fn batch_import(
was_duplicate: result.was_duplicate, was_duplicate: result.was_duplicate,
error: None, error: None,
}); });
} },
Err(e) => { Err(e) => {
errors += 1; errors += 1;
results.push(BatchImportItemResult { results.push(BatchImportItemResult {
@ -367,7 +383,7 @@ pub async fn batch_import(
was_duplicate: false, was_duplicate: false,
error: Some(e.to_string()), error: Some(e.to_string()),
}); });
} },
} }
} }
@ -425,7 +441,7 @@ pub async fn import_directory_endpoint(
was_duplicate: result.was_duplicate, was_duplicate: result.was_duplicate,
error: None, error: None,
}); });
} },
Err(e) => { Err(e) => {
errors += 1; errors += 1;
results.push(BatchImportItemResult { results.push(BatchImportItemResult {
@ -434,7 +450,7 @@ pub async fn import_directory_endpoint(
was_duplicate: false, was_duplicate: false,
error: Some(e.to_string()), error: Some(e.to_string()),
}); });
} },
} }
} }
@ -468,18 +484,23 @@ pub async fn preview_directory(
let roots = state.storage.list_root_dirs().await?; let roots = state.storage.list_root_dirs().await?;
if !roots.is_empty() { if !roots.is_empty() {
let canonical = dir.canonicalize().map_err(|_| { let canonical = dir.canonicalize().map_err(|_| {
pinakes_core::error::PinakesError::InvalidOperation("cannot resolve path".into()) pinakes_core::error::PinakesError::InvalidOperation(
"cannot resolve path".into(),
)
})?; })?;
let allowed = roots.iter().any(|root| canonical.starts_with(root)); let allowed = roots.iter().any(|root| canonical.starts_with(root));
if !allowed { if !allowed {
return Err(pinakes_core::error::PinakesError::InvalidOperation( return Err(
pinakes_core::error::PinakesError::InvalidOperation(
"path is not under a configured root directory".into(), "path is not under a configured root directory".into(),
) )
.into()); .into(),
);
} }
} }
let files: Vec<DirectoryPreviewFile> = tokio::task::spawn_blocking(move || { let files: Vec<DirectoryPreviewFile> =
tokio::task::spawn_blocking(move || {
let mut result = Vec::new(); let mut result = Vec::new();
fn walk_dir( fn walk_dir(
dir: &std::path::Path, dir: &std::path::Path,
@ -504,7 +525,8 @@ pub async fn preview_directory(
walk_dir(&path, recursive, result); walk_dir(&path, recursive, result);
} }
} else if path.is_file() } else if path.is_file()
&& let Some(mt) = pinakes_core::media_type::MediaType::from_path(&path) && let Some(mt) =
pinakes_core::media_type::MediaType::from_path(&path)
{ {
let size = entry.metadata().ok().map(|m| m.len()).unwrap_or(0); let size = entry.metadata().ok().map(|m| m.len()).unwrap_or(0);
let file_name = path let file_name = path
@ -528,7 +550,9 @@ pub async fn preview_directory(
result result
}) })
.await .await
.map_err(|e| pinakes_core::error::PinakesError::Io(std::io::Error::other(e)))?; .map_err(|e| {
pinakes_core::error::PinakesError::Io(std::io::Error::other(e))
})?;
let total_count = files.len(); let total_count = files.len();
let total_size = files.iter().map(|f| f.file_size).sum(); let total_size = files.iter().map(|f| f.file_size).sum();
@ -601,20 +625,25 @@ pub async fn batch_tag(
)); ));
} }
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect(); let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state match state
.storage .storage
.batch_tag_media(&media_ids, &req.tag_ids) .batch_tag_media(&media_ids, &req.tag_ids)
.await .await
{ {
Ok(count) => Ok(Json(BatchOperationResponse { Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize, processed: count as usize,
errors: Vec::new(), errors: Vec::new(),
})), }))
Err(e) => Ok(Json(BatchOperationResponse { },
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0, processed: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
})), }))
},
} }
} }
@ -634,14 +663,18 @@ pub async fn delete_all_media(
} }
match state.storage.delete_all_media().await { match state.storage.delete_all_media().await {
Ok(count) => Ok(Json(BatchOperationResponse { Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize, processed: count as usize,
errors: Vec::new(), errors: Vec::new(),
})), }))
Err(e) => Ok(Json(BatchOperationResponse { },
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0, processed: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
})), }))
},
} }
} }
@ -657,7 +690,8 @@ pub async fn batch_delete(
)); ));
} }
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect(); let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
// Record audit entries BEFORE delete to avoid FK constraint violation. // Record audit entries BEFORE delete to avoid FK constraint violation.
// Use None for media_id since they'll be deleted; include ID in details. // Use None for media_id since they'll be deleted; include ID in details.
@ -675,14 +709,18 @@ pub async fn batch_delete(
} }
match state.storage.batch_delete_media(&media_ids).await { match state.storage.batch_delete_media(&media_ids).await {
Ok(count) => Ok(Json(BatchOperationResponse { Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize, processed: count as usize,
errors: Vec::new(), errors: Vec::new(),
})), }))
Err(e) => Ok(Json(BatchOperationResponse { },
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0, processed: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
})), }))
},
} }
} }
@ -728,7 +766,8 @@ pub async fn batch_update(
)); ));
} }
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect(); let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state match state
.storage .storage
.batch_update_media( .batch_update_media(
@ -742,14 +781,18 @@ pub async fn batch_update(
) )
.await .await
{ {
Ok(count) => Ok(Json(BatchOperationResponse { Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize, processed: count as usize,
errors: Vec::new(), errors: Vec::new(),
})), }))
Err(e) => Ok(Json(BatchOperationResponse { },
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0, processed: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
})), }))
},
} }
} }
@ -757,8 +800,7 @@ pub async fn get_thumbnail(
State(state): State<AppState>, State(state): State<AppState>,
Path(id): Path<Uuid>, Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> { ) -> Result<axum::response::Response, ApiError> {
use axum::body::Body; use axum::{body::Body, http::header};
use axum::http::header;
use tokio_util::io::ReaderStream; use tokio_util::io::ReaderStream;
let item = state.storage.get_media(MediaId(id)).await?; let item = state.storage.get_media(MediaId(id)).await?;
@ -769,9 +811,9 @@ pub async fn get_thumbnail(
)) ))
})?; })?;
let file = tokio::fs::File::open(&thumb_path) let file = tokio::fs::File::open(&thumb_path).await.map_err(|_e| {
.await ApiError(pinakes_core::error::PinakesError::FileNotFound(thumb_path))
.map_err(|_e| ApiError(pinakes_core::error::PinakesError::FileNotFound(thumb_path)))?; })?;
let stream = ReaderStream::new(file); let stream = ReaderStream::new(file);
let body = Body::from_stream(stream); let body = Body::from_stream(stream);
@ -816,7 +858,9 @@ pub async fn rename_media(
path: item.path.to_string_lossy().to_string(), path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()), content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size), file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({ "old_path": old_path }).to_string()), metadata_json: Some(
serde_json::json!({ "old_path": old_path }).to_string(),
),
changed_by_device: None, changed_by_device: None,
timestamp: chrono::Utc::now(), timestamp: chrono::Utc::now(),
}; };
@ -854,7 +898,9 @@ pub async fn move_media_endpoint(
path: item.path.to_string_lossy().to_string(), path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()), content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size), file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({ "old_path": old_path }).to_string()), metadata_json: Some(
serde_json::json!({ "old_path": old_path }).to_string(),
),
changed_by_device: None, changed_by_device: None,
timestamp: chrono::Utc::now(), timestamp: chrono::Utc::now(),
}; };
@ -888,7 +934,8 @@ pub async fn batch_move_media(
)); ));
} }
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect(); let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state match state
.storage .storage
@ -921,11 +968,13 @@ pub async fn batch_move_media(
processed: results.len(), processed: results.len(),
errors: Vec::new(), errors: Vec::new(),
})) }))
} },
Err(e) => Ok(Json(BatchOperationResponse { Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0, processed: 0,
errors: vec![e.to_string()], errors: vec![e.to_string()],
})), }))
},
} }
} }
@ -1088,7 +1137,9 @@ pub async fn permanent_delete_media(
path: item.path.to_string_lossy().to_string(), path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()), content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size), file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({"permanent": true}).to_string()), metadata_json: Some(
serde_json::json!({"permanent": true}).to_string(),
),
changed_by_device: None, changed_by_device: None,
timestamp: chrono::Utc::now(), timestamp: chrono::Utc::now(),
}; };

View file

@ -7,15 +7,22 @@
//! - Link reindexing //! - Link reindexing
use axum::{ use axum::{
Json, Router, Json,
Router,
extract::{Path, Query, State}, extract::{Path, Query, State},
routing::{get, post}, routing::{get, post},
}; };
use pinakes_core::model::{
BacklinkInfo,
GraphData,
GraphEdge,
GraphNode,
MarkdownLink,
MediaId,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
use pinakes_core::model::{BacklinkInfo, GraphData, GraphEdge, GraphNode, MarkdownLink, MediaId};
use crate::{error::ApiError, state::AppState}; use crate::{error::ApiError, state::AppState};
// ===== Response DTOs ===== // ===== Response DTOs =====
@ -205,7 +212,8 @@ pub async fn get_backlinks(
let media_id = MediaId(id); let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?; let backlinks = state.storage.get_backlinks(media_id).await?;
let items: Vec<BacklinkItem> = backlinks.into_iter().map(BacklinkItem::from).collect(); let items: Vec<BacklinkItem> =
backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len(); let count = items.len();
Ok(Json(BacklinksResponse { Ok(Json(BacklinksResponse {
@ -224,7 +232,8 @@ pub async fn get_outgoing_links(
let media_id = MediaId(id); let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?; let links = state.storage.get_outgoing_links(media_id).await?;
let items: Vec<OutgoingLinkItem> = links.into_iter().map(OutgoingLinkItem::from).collect(); let items: Vec<OutgoingLinkItem> =
links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len(); let count = items.len();
Ok(Json(OutgoingLinksResponse { Ok(Json(OutgoingLinksResponse {
@ -263,13 +272,13 @@ pub async fn reindex_links(
// Only process markdown files // Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType}; use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type { match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {} MediaType::Builtin(BuiltinMediaType::Markdown) => {},
_ => { _ => {
return Ok(Json(ReindexResponse { return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(), message: "Skipped: not a markdown file".to_string(),
links_extracted: 0, links_extracted: 0,
})); }));
} },
} }
// Read the file content // Read the file content

View file

@ -1,12 +1,14 @@
use std::collections::HashMap;
use axum::{ use axum::{
Json, Router, Json,
Router,
extract::{Query, State}, extract::{Query, State},
response::IntoResponse, response::IntoResponse,
routing::get, routing::get,
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::{dto::MediaResponse, error::ApiError, state::AppState}; use crate::{dto::MediaResponse, error::ApiError, state::AppState};
@ -83,12 +85,14 @@ pub async fn get_timeline(
.into_iter() .into_iter()
.filter(|item| { .filter(|item| {
item.date_taken.is_some() item.date_taken.is_some()
&& item.media_type.category() == pinakes_core::media_type::MediaCategory::Image && item.media_type.category()
== pinakes_core::media_type::MediaCategory::Image
}) })
.collect(); .collect();
// Group by the requested period // Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> = HashMap::new(); let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> =
HashMap::new();
for photo in photos { for photo in photos {
if let Some(date_taken) = photo.date_taken { if let Some(date_taken) = photo.date_taken {
@ -122,7 +126,8 @@ pub async fn get_timeline(
.map(|(date, items)| { .map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string()); let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len(); let count = items.len();
let items: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect(); let items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
TimelineGroup { TimelineGroup {
date, date,
@ -150,7 +155,8 @@ pub async fn get_map_photos(
let min_lon = query.lon1.min(query.lon2); let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2); let max_lon = query.lon1.max(query.lon2);
// Query all media (we'll filter in-memory for now - could optimize with DB query) // Query all media (we'll filter in-memory for now - could optimize with DB
// query)
let all_media = state let all_media = state
.storage .storage
.list_media(&pinakes_core::model::Pagination { .list_media(&pinakes_core::model::Pagination {

View file

@ -1,15 +1,11 @@
use axum::Json; use axum::{
use axum::extract::{Extension, Path, State}; Json,
extract::{Extension, Path, State},
};
use pinakes_core::{model::MediaId, playlists::Playlist, users::UserId};
use uuid::Uuid; use uuid::Uuid;
use crate::auth::resolve_user_id; use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::playlists::Playlist;
use pinakes_core::users::UserId;
/// Check whether a user has access to a playlist. /// Check whether a user has access to a playlist.
/// ///
@ -89,7 +85,8 @@ pub async fn get_playlist(
Path(id): Path<Uuid>, Path(id): Path<Uuid>,
) -> Result<Json<PlaylistResponse>, ApiError> { ) -> Result<Json<PlaylistResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?; let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist = check_playlist_access(&state.storage, id, user_id, false).await?; let playlist =
check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist))) Ok(Json(PlaylistResponse::from(playlist)))
} }
@ -146,7 +143,7 @@ pub async fn add_item(
None => { None => {
let items = state.storage.get_playlist_items(id).await?; let items = state.storage.get_playlist_items(id).await?;
items.len() as i32 items.len() as i32
} },
}; };
state state
.storage .storage

View file

@ -1,9 +1,9 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
};
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
/// List all installed plugins /// List all installed plugins
pub async fn list_plugins( pub async fn list_plugins(
@ -57,7 +57,8 @@ pub async fn install_plugin(
)) ))
})?; })?;
let plugin_id = plugin_manager let plugin_id =
plugin_manager
.install_plugin(&req.source) .install_plugin(&req.source)
.await .await
.map_err(|e| { .map_err(|e| {
@ -66,7 +67,8 @@ pub async fn install_plugin(
)) ))
})?; })?;
let plugin = plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| { let plugin =
plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound( ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(), "Plugin installed but not found".to_string(),
)) ))

View file

@ -1,9 +1,10 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::error::ApiError; use crate::{error::ApiError, state::AppState};
use crate::state::AppState;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct CreateSavedSearchRequest { pub struct CreateSavedSearchRequest {
@ -52,12 +53,14 @@ pub async fn list_saved_searches(
Ok(Json( Ok(Json(
searches searches
.into_iter() .into_iter()
.map(|s| SavedSearchResponse { .map(|s| {
SavedSearchResponse {
id: s.id.to_string(), id: s.id.to_string(),
name: s.name, name: s.name,
query: s.query, query: s.query,
sort_order: s.sort_order, sort_order: s.sort_order,
created_at: s.created_at, created_at: s.created_at,
}
}) })
.collect(), .collect(),
)) ))

View file

@ -1,9 +1,6 @@
use axum::Json; use axum::{Json, extract::State};
use axum::extract::State;
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
/// Trigger a scan as a background job. Returns the job ID immediately. /// Trigger a scan as a background job. Returns the job ID immediately.
pub async fn trigger_scan( pub async fn trigger_scan(
@ -17,7 +14,9 @@ pub async fn trigger_scan(
})) }))
} }
pub async fn scan_status(State(state): State<AppState>) -> Json<ScanStatusResponse> { pub async fn scan_status(
State(state): State<AppState>,
) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot(); let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len(); let error_count = snapshot.errors.len();
Json(ScanStatusResponse { Json(ScanStatusResponse {

View file

@ -1,9 +1,9 @@
use axum::Json; use axum::{
use axum::extract::{Path, State}; Json,
extract::{Path, State},
};
use crate::dto::ScheduledTaskResponse; use crate::{dto::ScheduledTaskResponse, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
pub async fn list_scheduled_tasks( pub async fn list_scheduled_tasks(
State(state): State<AppState>, State(state): State<AppState>,
@ -11,7 +11,8 @@ pub async fn list_scheduled_tasks(
let tasks = state.scheduler.list_tasks().await; let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter() .into_iter()
.map(|t| ScheduledTaskResponse { .map(|t| {
ScheduledTaskResponse {
id: t.id, id: t.id,
name: t.name, name: t.name,
schedule: t.schedule.display_string(), schedule: t.schedule.display_string(),
@ -19,6 +20,7 @@ pub async fn list_scheduled_tasks(
last_run: t.last_run.map(|dt| dt.to_rfc3339()), last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()), next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status, last_status: t.last_status,
}
}) })
.collect(); .collect();
Ok(Json(responses)) Ok(Json(responses))
@ -29,13 +31,17 @@ pub async fn toggle_scheduled_task(
Path(id): Path<String>, Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.toggle_task(&id).await { match state.scheduler.toggle_task(&id).await {
Some(enabled) => Ok(Json(serde_json::json!({ Some(enabled) => {
Ok(Json(serde_json::json!({
"id": id, "id": id,
"enabled": enabled, "enabled": enabled,
}))), })))
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound( },
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"), format!("scheduled task not found: {id}"),
))), )))
},
} }
} }
@ -44,12 +50,16 @@ pub async fn run_scheduled_task_now(
Path(id): Path<String>, Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> { ) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.run_now(&id).await { match state.scheduler.run_now(&id).await {
Some(job_id) => Ok(Json(serde_json::json!({ Some(job_id) => {
Ok(Json(serde_json::json!({
"id": id, "id": id,
"job_id": job_id, "job_id": job_id,
}))), })))
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound( },
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"), format!("scheduled task not found: {id}"),
))), )))
},
} }
} }

View file

@ -1,12 +1,13 @@
use axum::Json; use axum::{
use axum::extract::{Query, State}; Json,
extract::{Query, State},
};
use pinakes_core::{
model::Pagination,
search::{SearchRequest, SortOrder, parse_search_query},
};
use crate::dto::*; use crate::{dto::*, error::ApiError, state::AppState};
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination;
use pinakes_core::search::{SearchRequest, SortOrder, parse_search_query};
fn resolve_sort(sort: Option<&str>) -> SortOrder { fn resolve_sort(sort: Option<&str>) -> SortOrder {
match sort { match sort {

Some files were not shown because too many files have changed in this diff Show more