treewide: fix various UI bugs; optimize crypto dependencies & format

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8fe8b38c1d9c4fecd40ff71f88d2ae06a6a6964
This commit is contained in:
raf 2026-02-10 12:56:05 +03:00
commit 3ccddce7fd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
178 changed files with 58342 additions and 54241 deletions

8
.clippy.toml Normal file
View file

@ -0,0 +1,8 @@
await-holding-invalid-types = [
"generational_box::GenerationalRef",
{ path = "generational_box::GenerationalRef", reason = "Reads should not be held over an await point. This will cause any writes to fail while the await is pending since the read borrow is still active." },
"generational_box::GenerationalRefMut",
{ path = "generational_box::GenerationalRefMut", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
"dioxus_signals::WriteLock",
{ path = "dioxus_signals::WriteLock", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
]

27
.rustfmt.toml Normal file
View file

@ -0,0 +1,27 @@
condense_wildcard_suffixes = true
doc_comment_code_block_width = 80
edition = "2024" # Keep in sync with Cargo.toml.
enum_discrim_align_threshold = 60
force_explicit_abi = false
force_multiline_blocks = true
format_code_in_doc_comments = true
format_macro_matchers = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Upper"
imports_granularity = "Crate"
imports_layout = "HorizontalVertical"
inline_attribute_width = 60
match_block_trailing_comma = true
max_width = 80
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
overflow_delimited_expr = true
struct_field_align_threshold = 60
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

57
Cargo.lock generated
View file

@ -28,6 +28,18 @@ dependencies = [
"cpufeatures 0.2.17",
]
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.4"
@ -799,6 +811,12 @@ dependencies = [
"objc",
]
[[package]]
name = "codemap"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24"
[[package]]
name = "color_quant"
version = "1.1.0"
@ -3071,6 +3089,31 @@ dependencies = [
"web-time",
]
[[package]]
name = "grass"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7a68216437ef68f0738e48d6c7bb9e6e6a92237e001b03d838314b068f33c94"
dependencies = [
"clap",
"getrandom 0.2.17",
"grass_compiler",
]
[[package]]
name = "grass_compiler"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d9e3df7f0222ce5184154973d247c591d9aadc28ce7a73c6cd31100c9facff6"
dependencies = [
"codemap",
"indexmap",
"lasso",
"once_cell",
"phf 0.11.3",
"rand 0.8.5",
]
[[package]]
name = "gray_matter"
version = "0.3.2"
@ -3169,6 +3212,10 @@ name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
"allocator-api2",
]
[[package]]
name = "hashbrown"
@ -3911,6 +3958,15 @@ version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f"
[[package]]
name = "lasso"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e14eda50a3494b3bf7b9ce51c52434a761e383d7238ce1dd5dcec2fbc13e9fb"
dependencies = [
"hashbrown 0.14.5",
]
[[package]]
name = "lazy-js-bundle"
version = "0.7.3"
@ -5370,6 +5426,7 @@ dependencies = [
"dioxus-free-icons",
"futures",
"gloo-timers",
"grass",
"gray_matter",
"pulldown-cmark",
"rand 0.10.0",

View file

@ -135,3 +135,12 @@ http = "1.4.0"
# WASM runtime for plugins
wasmtime = { version = "41.0.3", features = ["component-model"] }
wit-bindgen = "0.52.0"
[profile.dev.package]
blake3 = { opt-level = 3 }
image = { opt-level = 3 }
regex = { opt-level = 3 }
argon2 = { opt-level = 3 }
matroska = { opt-level = 3 }
lopdf = { opt-level = 3 }
lofty = { opt-level = 3 }

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
use crate::{model::MediaId, users::UserId};
/// A tracked usage event for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,10 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::{AuditAction, AuditEntry, MediaId};
use crate::storage::DynStorageBackend;
use crate::{
error::Result,
model::{AuditAction, AuditEntry, MediaId},
storage::DynStorageBackend,
};
pub async fn record_action(
storage: &DynStorageBackend,

View file

@ -2,7 +2,8 @@ use crate::error::{PinakesError, Result};
/// Normalize ISBN to ISBN-13 format
pub fn normalize_isbn(isbn: &str) -> Result<String> {
// Remove hyphens, spaces, and any non-numeric characters (except X for ISBN-10)
// Remove hyphens, spaces, and any non-numeric characters (except X for
// ISBN-10)
let clean: String = isbn
.chars()
.filter(|c| c.is_ascii_digit() || *c == 'X' || *c == 'x')
@ -19,11 +20,13 @@ pub fn normalize_isbn(isbn: &str) -> Result<String> {
isbn
)))
}
}
_ => Err(PinakesError::InvalidData(format!(
},
_ => {
Err(PinakesError::InvalidData(format!(
"Invalid ISBN length: {}",
isbn
))),
)))
},
}
}
@ -57,7 +60,9 @@ fn calculate_isbn13_check_digit(isbn_without_check: &str) -> Result<u32> {
let sum: u32 = isbn_without_check
.chars()
.enumerate()
.filter_map(|(i, c)| c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 }))
.filter_map(|(i, c)| {
c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })
})
.sum();
let check_digit = (10 - (sum % 10)) % 10;
@ -73,7 +78,9 @@ fn is_valid_isbn13(isbn13: &str) -> bool {
let sum: u32 = isbn13
.chars()
.enumerate()
.filter_map(|(i, c)| c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 }))
.filter_map(|(i, c)| {
c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 })
})
.sum();
sum.is_multiple_of(10)
@ -128,7 +135,7 @@ pub fn parse_author_file_as(name: &str) -> String {
let surname = parts.last().unwrap();
let given_names = parts[..parts.len() - 1].join(" ");
format!("{}, {}", surname, given_names)
}
},
}
}
@ -159,7 +166,8 @@ mod tests {
#[test]
fn test_extract_isbn() {
let text = "This book's ISBN is 978-0-306-40615-7 and was published in 2020.";
let text =
"This book's ISBN is 978-0-306-40615-7 and was published in 2020.";
assert_eq!(
extract_isbn_from_text(text),
Some("9780306406157".to_string())

View file

@ -7,10 +7,14 @@
//! - Metrics tracking (hit rate, size, evictions)
//! - Specialized caches for different data types
use std::hash::Hash;
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
use std::{
hash::Hash,
sync::{
Arc,
atomic::{AtomicU64, Ordering},
},
time::Duration,
};
use moka::future::Cache as MokaCache;
@ -97,7 +101,11 @@ where
}
/// Create a new cache with TTL, max capacity, and time-to-idle.
pub fn new_with_idle(ttl: Duration, tti: Duration, max_capacity: u64) -> Self {
pub fn new_with_idle(
ttl: Duration,
tti: Duration,
max_capacity: u64,
) -> Self {
let inner = MokaCache::builder()
.time_to_live(ttl)
.time_to_idle(tti)
@ -116,11 +124,11 @@ where
Some(value) => {
self.metrics.record_hit();
Some(value)
}
},
None => {
self.metrics.record_miss();
None
}
},
}
}
@ -172,7 +180,12 @@ impl QueryCache {
}
/// Generate a cache key from query parameters.
fn make_key(query: &str, offset: u64, limit: u64, sort: Option<&str>) -> String {
fn make_key(
query: &str,
offset: u64,
limit: u64,
sort: Option<&str>,
) -> String {
use std::hash::{DefaultHasher, Hasher};
let mut hasher = DefaultHasher::new();
hasher.write(query.as_bytes());
@ -234,7 +247,8 @@ impl MetadataCache {
}
pub async fn insert(&self, content_hash: &str, metadata_json: String) {
self.inner
self
.inner
.insert(content_hash.to_string(), metadata_json)
.await;
}
@ -332,7 +346,8 @@ pub struct CacheLayer {
}
impl CacheLayer {
/// Create a new cache layer with the specified TTL (using defaults for other settings).
/// Create a new cache layer with the specified TTL (using defaults for other
/// settings).
pub fn new(ttl_secs: u64) -> Self {
let config = CacheConfig {
response_ttl_secs: ttl_secs,
@ -413,8 +428,10 @@ pub struct CacheLayerStats {
impl CacheLayerStats {
/// Get the overall hit rate across all caches.
pub fn overall_hit_rate(&self) -> f64 {
let total_hits =
self.responses.hits + self.queries.hits + self.metadata.hits + self.media.hits;
let total_hits = self.responses.hits
+ self.queries.hits
+ self.metadata.hits
+ self.media.hits;
let total_requests = total_hits
+ self.responses.misses
+ self.queries.misses
@ -430,7 +447,10 @@ impl CacheLayerStats {
/// Get the total number of entries across all caches.
pub fn total_entries(&self) -> u64 {
self.responses.size + self.queries.size + self.metadata.size + self.media.size
self.responses.size
+ self.queries.size
+ self.metadata.size
+ self.media.size
}
}

View file

@ -1,8 +1,6 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::*;
use crate::storage::DynStorageBackend;
use crate::{error::Result, model::*, storage::DynStorageBackend};
pub async fn create_collection(
storage: &DynStorageBackend,
@ -72,7 +70,9 @@ pub async fn get_members(
} else {
Ok(Vec::new())
}
}
CollectionKind::Manual => storage.get_collection_members(collection_id).await,
},
CollectionKind::Manual => {
storage.get_collection_members(collection_id).await
},
}
}

View file

@ -52,7 +52,7 @@ fn expand_env_var_string(input: &str) -> crate::error::Result<String> {
"environment variable not set: {}",
var_name
)));
}
},
}
} else if ch == '\\' {
// Handle escaped characters
@ -249,7 +249,9 @@ pub struct UserAccount {
pub role: UserRole,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[derive(
Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize,
)]
#[serde(rename_all = "lowercase")]
pub enum UserRole {
Admin,
@ -811,12 +813,13 @@ pub struct ServerConfig {
pub host: String,
pub port: u16,
/// Optional API key for bearer token authentication.
/// If set, all requests (except /health) must include `Authorization: Bearer <key>`.
/// Can also be set via `PINAKES_API_KEY` environment variable.
/// If set, all requests (except /health) must include `Authorization: Bearer
/// <key>`. Can also be set via `PINAKES_API_KEY` environment variable.
pub api_key: Option<String>,
/// Explicitly disable authentication (INSECURE - use only for development).
/// When true, all requests are allowed without authentication.
/// This must be explicitly set to true; empty api_key alone is not sufficient.
/// This must be explicitly set to true; empty api_key alone is not
/// sufficient.
#[serde(default)]
pub authentication_disabled: bool,
/// TLS/HTTPS configuration
@ -903,7 +906,9 @@ impl TlsConfig {
impl Config {
pub fn from_file(path: &Path) -> crate::error::Result<Self> {
let content = std::fs::read_to_string(path).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to read config file: {e}"))
crate::error::PinakesError::Config(format!(
"failed to read config file: {e}"
))
})?;
let mut config: Self = toml::from_str(&content).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to parse config: {e}"))
@ -934,13 +939,16 @@ impl Config {
// Enrichment API keys
if let Some(ref api_key) = self.enrichment.sources.musicbrainz.api_key {
self.enrichment.sources.musicbrainz.api_key = Some(expand_env_var_string(api_key)?);
self.enrichment.sources.musicbrainz.api_key =
Some(expand_env_var_string(api_key)?);
}
if let Some(ref api_key) = self.enrichment.sources.tmdb.api_key {
self.enrichment.sources.tmdb.api_key = Some(expand_env_var_string(api_key)?);
self.enrichment.sources.tmdb.api_key =
Some(expand_env_var_string(api_key)?);
}
if let Some(ref api_key) = self.enrichment.sources.lastfm.api_key {
self.enrichment.sources.lastfm.api_key = Some(expand_env_var_string(api_key)?);
self.enrichment.sources.lastfm.api_key =
Some(expand_env_var_string(api_key)?);
}
Ok(())
@ -964,7 +972,9 @@ impl Config {
std::fs::create_dir_all(parent)?;
}
let content = toml::to_string_pretty(self).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to serialize config: {e}"))
crate::error::PinakesError::Config(format!(
"failed to serialize config: {e}"
))
})?;
std::fs::write(path, content)?;
Ok(())
@ -974,7 +984,8 @@ impl Config {
pub fn ensure_dirs(&self) -> crate::error::Result<()> {
if let Some(ref sqlite) = self.storage.sqlite {
if let Some(parent) = sqlite.path.parent() {
// Skip if parent is empty string (happens with bare filenames like "pinakes.db")
// Skip if parent is empty string (happens with bare filenames like
// "pinakes.db")
if !parent.as_os_str().is_empty() {
std::fs::create_dir_all(parent)?;
let metadata = std::fs::metadata(parent)?;
@ -1015,19 +1026,22 @@ impl Config {
if self.scanning.poll_interval_secs == 0 {
return Err("poll interval cannot be 0".into());
}
if self.scanning.import_concurrency == 0 || self.scanning.import_concurrency > 256 {
if self.scanning.import_concurrency == 0
|| self.scanning.import_concurrency > 256
{
return Err("import_concurrency must be between 1 and 256".into());
}
// Validate authentication configuration
let has_api_key = self.server.api_key.as_ref().is_some_and(|k| !k.is_empty());
let has_api_key =
self.server.api_key.as_ref().is_some_and(|k| !k.is_empty());
let has_accounts = !self.accounts.users.is_empty();
let auth_disabled = self.server.authentication_disabled;
if !auth_disabled && !has_api_key && !has_accounts {
return Err(
"authentication is not configured: set an api_key, configure user accounts, \
or explicitly set authentication_disabled = true"
"authentication is not configured: set an api_key, configure user \
accounts, or explicitly set authentication_disabled = true"
.into(),
);
}
@ -1036,9 +1050,11 @@ impl Config {
if let Some(ref api_key) = self.server.api_key
&& api_key.is_empty()
{
return Err("empty api_key is not allowed. To disable authentication, \
set authentication_disabled = true instead"
.into());
return Err(
"empty api_key is not allowed. To disable authentication, set \
authentication_disabled = true instead"
.into(),
);
}
// Require TLS when authentication is enabled on non-localhost
@ -1052,8 +1068,8 @@ impl Config {
&& !self.server.tls.enabled
{
return Err(
"TLS must be enabled when authentication is used on non-localhost hosts. \
Set server.tls.enabled = true or bind to localhost only"
"TLS must be enabled when authentication is used on non-localhost \
hosts. Set server.tls.enabled = true or bind to localhost only"
.into(),
);
}

View file

@ -1,12 +1,17 @@
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::googlebooks::GoogleBooksClient;
use super::openlibrary::OpenLibraryClient;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use super::{
EnrichmentSourceType,
ExternalMetadata,
MetadataEnricher,
googlebooks::GoogleBooksClient,
openlibrary::OpenLibraryClient,
};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
/// Book enricher that tries OpenLibrary first, then falls back to Google Books
pub struct BookEnricher {
@ -23,7 +28,10 @@ impl BookEnricher {
}
/// Try to enrich from OpenLibrary first
pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
pub async fn try_openlibrary(
&self,
isbn: &str,
) -> Result<Option<ExternalMetadata>> {
match self.openlibrary.fetch_by_isbn(isbn).await {
Ok(book) => {
let metadata_json = serde_json::to_string(&book).map_err(|e| {
@ -39,13 +47,16 @@ impl BookEnricher {
confidence: calculate_openlibrary_confidence(&book),
last_updated: Utc::now(),
}))
}
},
Err(_) => Ok(None),
}
}
/// Try to enrich from Google Books
pub async fn try_googlebooks(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
pub async fn try_googlebooks(
&self,
isbn: &str,
) -> Result<Option<ExternalMetadata>> {
match self.googlebooks.fetch_by_isbn(isbn).await {
Ok(books) if !books.is_empty() => {
let book = &books[0];
@ -62,7 +73,7 @@ impl BookEnricher {
confidence: calculate_googlebooks_confidence(&book.volume_info),
last_updated: Utc::now(),
}))
}
},
_ => Ok(None),
}
}
@ -123,7 +134,8 @@ impl MetadataEnricher for BookEnricher {
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
// Try ISBN-based enrichment first by checking title/description for ISBN patterns
// Try ISBN-based enrichment first by checking title/description for ISBN
// patterns
if let Some(ref title) = item.title {
if let Some(isbn) = crate::books::extract_isbn_from_text(title) {
if let Some(mut metadata) = self.try_openlibrary(&isbn).await? {
@ -147,7 +159,9 @@ impl MetadataEnricher for BookEnricher {
}
/// Calculate confidence score for OpenLibrary metadata
pub fn calculate_openlibrary_confidence(book: &super::openlibrary::OpenLibraryBook) -> f64 {
pub fn calculate_openlibrary_confidence(
book: &super::openlibrary::OpenLibraryBook,
) -> f64 {
let mut score: f64 = 0.5; // Base score
if book.title.is_some() {
@ -173,7 +187,9 @@ pub fn calculate_openlibrary_confidence(book: &super::openlibrary::OpenLibraryBo
}
/// Calculate confidence score for Google Books metadata
pub fn calculate_googlebooks_confidence(info: &super::googlebooks::VolumeInfo) -> f64 {
pub fn calculate_googlebooks_confidence(
info: &super::googlebooks::VolumeInfo,
) -> f64 {
let mut score: f64 = 0.5; // Base score
if info.title.is_some() {

View file

@ -31,8 +31,7 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key));
}
let response =
self.client.get(&url).send().await.map_err(|e| {
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books request failed: {}", e))
})?;
@ -44,14 +43,21 @@ impl GoogleBooksClient {
}
let volumes: GoogleBooksResponse = response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse Google Books response: {}", e))
PinakesError::External(format!(
"Failed to parse Google Books response: {}",
e
))
})?;
Ok(volumes.items)
}
/// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<GoogleBook>> {
pub async fn search(
&self,
title: &str,
author: Option<&str>,
) -> Result<Vec<GoogleBook>> {
let mut query = format!("intitle:{}", urlencoding::encode(title));
if let Some(author) = author {
@ -67,8 +73,7 @@ impl GoogleBooksClient {
url.push_str(&format!("&key={}", key));
}
let response =
self.client.get(&url).send().await.map_err(|e| {
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Google Books search failed: {}", e))
})?;
@ -93,12 +98,10 @@ impl GoogleBooksClient {
.replace("&zoom=1", "&zoom=2")
.replace("&edge=curl", "");
let response = self
.client
.get(&high_res_link)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
let response =
self.client.get(&high_res_link).send().await.map_err(|e| {
PinakesError::External(format!("Cover download failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -107,11 +110,9 @@ impl GoogleBooksClient {
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
PinakesError::External(format!("Failed to read cover data: {}", e))
})
}
}
@ -201,7 +202,8 @@ pub struct ImageLinks {
impl ImageLinks {
/// Get the best available image link (highest resolution)
pub fn best_link(&self) -> Option<&String> {
self.extra_large
self
.extra_large
.as_ref()
.or(self.large.as_ref())
.or(self.medium.as_ref())

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct LastFmEnricher {
client: reqwest::Client,
@ -70,11 +71,15 @@ impl MetadataEnricher for LastFmEnricher {
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm response read failed: {e}"))
PinakesError::MetadataExtraction(format!(
"Last.fm response read failed: {e}"
))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm JSON parse failed: {e}"))
PinakesError::MetadataExtraction(format!(
"Last.fm JSON parse failed: {e}"
))
})?;
// Check for error response

View file

@ -11,8 +11,10 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::error::Result;
use crate::model::{MediaId, MediaItem};
use crate::{
error::Result,
model::{MediaId, MediaItem},
};
/// Externally-sourced metadata for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct MusicBrainzEnricher {
client: reqwest::Client,
@ -37,8 +38,8 @@ impl MusicBrainzEnricher {
fn escape_lucene_query(s: &str) -> String {
let special_chars = [
'+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\',
'/',
'+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*',
'?', ':', '\\', '/',
];
let mut escaped = String::with_capacity(s.len() * 2);
for c in s.chars() {
@ -80,7 +81,9 @@ impl MetadataEnricher for MusicBrainzEnricher {
.send()
.await
.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz request failed: {e}"))
PinakesError::MetadataExtraction(format!(
"MusicBrainz request failed: {e}"
))
})?;
if !resp.status().is_success() {
@ -97,12 +100,16 @@ impl MetadataEnricher for MusicBrainzEnricher {
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz response read failed: {e}"))
PinakesError::MetadataExtraction(format!(
"MusicBrainz response read failed: {e}"
))
})?;
// Parse to check if we got results
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz JSON parse failed: {e}"))
PinakesError::MetadataExtraction(format!(
"MusicBrainz JSON parse failed: {e}"
))
})?;
let recordings = json.get("recordings").and_then(|r| r.as_array());

View file

@ -30,8 +30,7 @@ impl OpenLibraryClient {
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> {
let url = format!("{}/isbn/{}.json", self.base_url, isbn);
let response =
self.client.get(&url).send().await.map_err(|e| {
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("OpenLibrary request failed: {}", e))
})?;
@ -43,7 +42,10 @@ impl OpenLibraryClient {
}
response.json::<OpenLibraryBook>().await.map_err(|e| {
PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e))
PinakesError::External(format!(
"Failed to parse OpenLibrary response: {}",
e
))
})
}
@ -65,12 +67,9 @@ impl OpenLibraryClient {
url.push_str("&limit=5");
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary search failed: {}", e)))?;
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("OpenLibrary search failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -79,7 +78,8 @@ impl OpenLibraryClient {
)));
}
let search_response: OpenLibrarySearchResponse = response.json().await.map_err(|e| {
let search_response: OpenLibrarySearchResponse =
response.json().await.map_err(|e| {
PinakesError::External(format!("Failed to parse search results: {}", e))
})?;
@ -87,7 +87,11 @@ impl OpenLibraryClient {
}
/// Fetch cover image by cover ID
pub async fn fetch_cover(&self, cover_id: i64, size: CoverSize) -> Result<Vec<u8>> {
pub async fn fetch_cover(
&self,
cover_id: i64,
size: CoverSize,
) -> Result<Vec<u8>> {
let size_str = match size {
CoverSize::Small => "S",
CoverSize::Medium => "M",
@ -99,12 +103,9 @@ impl OpenLibraryClient {
cover_id, size_str
);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Cover download failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -113,15 +114,17 @@ impl OpenLibraryClient {
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
PinakesError::External(format!("Failed to read cover data: {}", e))
})
}
/// Fetch cover by ISBN
pub async fn fetch_cover_by_isbn(&self, isbn: &str, size: CoverSize) -> Result<Vec<u8>> {
pub async fn fetch_cover_by_isbn(
&self,
isbn: &str,
size: CoverSize,
) -> Result<Vec<u8>> {
let size_str = match size {
CoverSize::Small => "S",
CoverSize::Medium => "M",
@ -133,12 +136,9 @@ impl OpenLibraryClient {
isbn, size_str
);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
let response = self.client.get(&url).send().await.map_err(|e| {
PinakesError::External(format!("Cover download failed: {}", e))
})?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
@ -147,11 +147,9 @@ impl OpenLibraryClient {
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
response.bytes().await.map(|b| b.to_vec()).map_err(|e| {
PinakesError::External(format!("Failed to read cover data: {}", e))
})
}
}
@ -278,7 +276,8 @@ mod tests {
#[test]
fn test_string_or_object_parsing() {
let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap();
let string_desc: StringOrObject =
serde_json::from_str(r#""Simple description""#).unwrap();
assert_eq!(string_desc.as_str(), "Simple description");
let object_desc: StringOrObject =

View file

@ -5,10 +5,11 @@ use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
use crate::{
error::{PinakesError, Result},
model::MediaItem,
};
pub struct TmdbEnricher {
client: reqwest::Client,
@ -54,7 +55,9 @@ impl MetadataEnricher for TmdbEnricher {
])
.send()
.await
.map_err(|e| PinakesError::MetadataExtraction(format!("TMDB request failed: {e}")))?;
.map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB request failed: {e}"))
})?;
if !resp.status().is_success() {
let status = resp.status();
@ -72,7 +75,9 @@ impl MetadataEnricher for TmdbEnricher {
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB response read failed: {e}"))
PinakesError::MetadataExtraction(format!(
"TMDB response read failed: {e}"
))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {

View file

@ -1,9 +1,12 @@
//! Auto-detection of photo events and albums based on time and location proximity
//! Auto-detection of photo events and albums based on time and location
//! proximity
use chrono::{DateTime, Utc};
use crate::error::Result;
use crate::model::{MediaId, MediaItem};
use crate::{
error::Result,
model::{MediaId, MediaItem},
};
/// Configuration for event detection
#[derive(Debug, Clone)]
@ -53,7 +56,9 @@ fn haversine_distance(lat1: f64, lon1: f64, lat2: f64, lon2: f64) -> f64 {
let dlon = (lon2 - lon1).to_radians();
let a = (dlat / 2.0).sin().powi(2)
+ lat1.to_radians().cos() * lat2.to_radians().cos() * (dlon / 2.0).sin().powi(2);
+ lat1.to_radians().cos()
* lat2.to_radians().cos()
* (dlon / 2.0).sin().powi(2);
let c = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
@ -103,7 +108,7 @@ pub fn detect_events(
(Some(max_dist), Some((lat1, lon1)), Some((lat2, lon2))) => {
let dist = haversine_distance(lat1, lon1, lat2, lon2);
dist <= max_dist
}
},
// If no location constraint or missing GPS, consider location OK
_ => true,
};
@ -124,7 +129,8 @@ pub fn detect_events(
} else {
// Start new event if current has enough photos
if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
let event_name =
format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,
@ -145,7 +151,8 @@ pub fn detect_events(
// Don't forget the last event
if current_event_items.len() >= config.min_photos {
let event_name = format!("Event on {}", current_start_time.format("%Y-%m-%d"));
let event_name =
format!("Event on {}", current_start_time.format("%Y-%m-%d"));
events.push(DetectedEvent {
suggested_name: event_name,

View file

@ -2,9 +2,7 @@ use std::path::Path;
use serde::{Deserialize, Serialize};
use crate::error::Result;
use crate::jobs::ExportFormat;
use crate::storage::DynStorageBackend;
use crate::{error::Result, jobs::ExportFormat, storage::DynStorageBackend};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExportResult {
@ -28,13 +26,17 @@ pub async fn export_library(
match format {
ExportFormat::Json => {
let json = serde_json::to_string_pretty(&items)
.map_err(|e| crate::error::PinakesError::Config(format!("json serialize: {e}")))?;
let json = serde_json::to_string_pretty(&items).map_err(|e| {
crate::error::PinakesError::Config(format!("json serialize: {e}"))
})?;
std::fs::write(destination, json)?;
}
},
ExportFormat::Csv => {
let mut csv = String::new();
csv.push_str("id,path,file_name,media_type,content_hash,file_size,title,artist,album,genre,year,duration_secs,description,created_at,updated_at\n");
csv.push_str(
"id,path,file_name,media_type,content_hash,file_size,title,artist,\
album,genre,year,duration_secs,description,created_at,updated_at\n",
);
for item in &items {
csv.push_str(&format!(
"{},{},{},{:?},{},{},{},{},{},{},{},{},{},{},{}\n",
@ -49,7 +51,8 @@ pub async fn export_library(
item.album.as_deref().unwrap_or(""),
item.genre.as_deref().unwrap_or(""),
item.year.map(|y| y.to_string()).unwrap_or_default(),
item.duration_secs
item
.duration_secs
.map(|d| d.to_string())
.unwrap_or_default(),
item.description.as_deref().unwrap_or(""),
@ -58,7 +61,7 @@ pub async fn export_library(
));
}
std::fs::write(destination, csv)?;
}
},
}
Ok(ExportResult {

View file

@ -1,7 +1,6 @@
use std::path::Path;
use crate::error::Result;
use crate::model::ContentHash;
use crate::{error::Result, model::ContentHash};
const BUFFER_SIZE: usize = 65536;

View file

@ -1,17 +1,21 @@
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use std::{
path::{Path, PathBuf},
time::SystemTime,
};
use tracing::info;
use crate::audit;
use crate::error::{PinakesError, Result};
use crate::hash::compute_file_hash;
use crate::links;
use crate::media_type::{BuiltinMediaType, MediaType};
use crate::metadata;
use crate::model::*;
use crate::storage::DynStorageBackend;
use crate::thumbnail;
use crate::{
audit,
error::{PinakesError, Result},
hash::compute_file_hash,
links,
media_type::{BuiltinMediaType, MediaType},
metadata,
model::*,
storage::DynStorageBackend,
thumbnail,
};
pub struct ImportResult {
pub media_id: MediaId,
@ -51,9 +55,13 @@ fn get_file_mtime(path: &Path) -> Option<i64> {
.map(|d| d.as_secs() as i64)
}
/// Check that a canonicalized path falls under at least one configured root directory.
/// If no roots are configured, all paths are allowed (for ad-hoc imports).
pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) -> Result<()> {
/// Check that a canonicalized path falls under at least one configured root
/// directory. If no roots are configured, all paths are allowed (for ad-hoc
/// imports).
pub async fn validate_path_in_roots(
storage: &DynStorageBackend,
path: &Path,
) -> Result<()> {
let roots = storage.list_root_dirs().await?;
if roots.is_empty() {
return Ok(());
@ -71,7 +79,10 @@ pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) ->
)))
}
pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<ImportResult> {
pub async fn import_file(
storage: &DynStorageBackend,
path: &Path,
) -> Result<ImportResult> {
import_file_with_options(storage, path, &ImportOptions::default()).await
}
@ -98,7 +109,8 @@ pub async fn import_file_with_options(
if options.incremental
&& !options.force
&& let Some(existing) = storage.get_media_by_path(&path).await?
&& let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime)
&& let (Some(stored_mtime), Some(curr_mtime)) =
(existing.file_mtime, current_mtime)
&& stored_mtime == curr_mtime
{
return Ok(ImportResult {
@ -154,7 +166,12 @@ pub async fn import_file_with_options(
let thumb_dir = thumbnail::default_thumbnail_dir();
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
thumbnail::generate_thumbnail(media_id, &source, media_type_clone, &thumb_dir)
thumbnail::generate_thumbnail(
media_id,
&source,
media_type_clone,
&thumb_dir,
)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
@ -170,7 +187,8 @@ pub async fn import_file_with_options(
};
// Check if this is a markdown file for link extraction
let is_markdown = media_type == MediaType::Builtin(BuiltinMediaType::Markdown);
let is_markdown =
media_type == MediaType::Builtin(BuiltinMediaType::Markdown);
let item = MediaItem {
id: media_id,
@ -263,7 +281,10 @@ pub async fn import_file_with_options(
})
}
pub(crate) fn should_ignore(path: &std::path::Path, patterns: &[String]) -> bool {
pub(crate) fn should_ignore(
path: &std::path::Path,
patterns: &[String],
) -> bool {
for component in path.components() {
if let std::path::Component::Normal(name) = component {
let name_str = name.to_string_lossy();
@ -373,7 +394,7 @@ pub async fn import_directory_with_options(
Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e));
}
},
}
}
}
@ -385,7 +406,7 @@ pub async fn import_directory_with_options(
Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e));
}
},
}
}

View file

@ -1,14 +1,18 @@
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
use crate::error::Result;
use crate::hash::compute_file_hash;
use crate::media_type::MediaType;
use crate::model::{ContentHash, MediaId};
use crate::storage::DynStorageBackend;
use crate::{
error::Result,
hash::compute_file_hash,
media_type::MediaType,
model::{ContentHash, MediaId},
storage::DynStorageBackend,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrphanReport {
@ -69,13 +73,17 @@ impl std::str::FromStr for IntegrityStatus {
}
/// Detect orphaned media items (files that no longer exist on disk),
/// untracked files (files on disk not in database), and moved files (same hash, different path).
pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport> {
/// untracked files (files on disk not in database), and moved files (same hash,
/// different path).
pub async fn detect_orphans(
storage: &DynStorageBackend,
) -> Result<OrphanReport> {
let media_paths = storage.list_media_paths().await?;
let mut orphaned_ids = Vec::new();
// Build hash index: ContentHash -> Vec<(MediaId, PathBuf)>
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> = HashMap::new();
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
HashMap::new();
for (id, path, hash) in &media_paths {
hash_index
.entry(hash.clone())
@ -91,7 +99,8 @@ pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport>
}
// Detect moved files (orphaned items with same hash existing elsewhere)
let moved_files = detect_moved_files(&orphaned_ids, &media_paths, &hash_index);
let moved_files =
detect_moved_files(&orphaned_ids, &media_paths, &hash_index);
// Detect untracked files (on disk but not in DB)
let untracked_paths = detect_untracked_files(storage, &media_paths).await?;
@ -122,7 +131,7 @@ fn detect_moved_files(
// Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash)
let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths
.iter()
.filter(|(id, _, _)| orphaned_ids.contains(id))
.filter(|(id, ..)| orphaned_ids.contains(id))
.map(|(id, path, hash)| (*id, (path.clone(), hash.clone())))
.collect();
@ -228,10 +237,10 @@ async fn detect_untracked_files(
if MediaType::from_path(path).is_some() {
paths.push(path.to_path_buf());
}
}
},
Err(e) => {
warn!(error = %e, "failed to read directory entry");
}
},
}
}
@ -244,13 +253,13 @@ async fn detect_untracked_files(
match result {
Ok(Ok(paths)) => {
filesystem_paths.extend(paths);
}
},
Ok(Err(e)) => {
warn!(error = %e, "failed to walk directory");
}
},
Err(e) => {
warn!(error = %e, "task join error");
}
},
}
}
@ -274,11 +283,11 @@ pub async fn resolve_orphans(
let count = storage.batch_delete_media(ids).await?;
info!(count, "resolved orphans by deletion");
Ok(count)
}
},
OrphanAction::Ignore => {
info!(count = ids.len(), "orphans ignored");
Ok(0)
}
},
}
}
@ -289,11 +298,13 @@ pub async fn verify_integrity(
) -> Result<VerificationReport> {
let all_paths = storage.list_media_paths().await?;
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> = if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> = ids.iter().copied().collect();
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> =
if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> =
ids.iter().copied().collect();
all_paths
.into_iter()
.filter(|(id, _, _)| id_set.contains(id))
.filter(|(id, ..)| id_set.contains(id))
.collect()
} else {
all_paths
@ -321,10 +332,10 @@ pub async fn verify_integrity(
.mismatched
.push((id, expected_hash.0.clone(), actual_hash.0));
}
}
},
Err(e) => {
report.errors.push((id, e.to_string()));
}
},
}
}
@ -347,7 +358,7 @@ pub async fn cleanup_orphaned_thumbnails(
let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths
.iter()
.map(|(id, _, _)| id.0.to_string())
.map(|(id, ..)| id.0.to_string())
.collect();
let mut removed = 0;

View file

@ -1,6 +1,4 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@ -96,7 +94,8 @@ impl JobQueue {
{
let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> = Arc::new(RwLock::new(HashMap::new()));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> =
Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new()));
@ -128,7 +127,8 @@ impl JobQueue {
}
}
let handle = executor(item.job_id, item.kind, item.cancel, jobs.clone());
let handle =
executor(item.job_id, item.kind, item.cancel, jobs.clone());
let _ = handle.await;
// Clean up cancellation token
@ -215,7 +215,11 @@ impl JobQueue {
}
/// Mark a job as completed.
pub async fn complete(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, result: Value) {
pub async fn complete(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
id: Uuid,
result: Value,
) {
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Completed { result };
@ -224,7 +228,11 @@ impl JobQueue {
}
/// Mark a job as failed.
pub async fn fail(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, error: String) {
pub async fn fail(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
id: Uuid,
error: String,
) {
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Failed { error };
@ -246,7 +254,7 @@ impl JobQueue {
JobStatus::Running { .. } => running += 1,
JobStatus::Completed { .. } => completed += 1,
JobStatus::Failed { .. } => failed += 1,
JobStatus::Cancelled => {} // Don't count cancelled jobs
JobStatus::Cancelled => {}, // Don't count cancelled jobs
}
}

View file

@ -1,4 +1,5 @@
//! Markdown link extraction and management for Obsidian-style bidirectional links.
//! Markdown link extraction and management for Obsidian-style bidirectional
//! links.
//!
//! This module provides:
//! - Wikilink extraction (`[[target]]` and `[[target|display]]`)
@ -24,7 +25,10 @@ const CONTEXT_CHARS_AFTER: usize = 50;
/// - Wikilinks: `[[target]]` and `[[target|display text]]`
/// - Embeds: `![[target]]`
/// - Markdown links: `[text](path)` (internal paths only, no http/https)
pub fn extract_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
pub fn extract_links(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
let mut links = Vec::new();
// Extract wikilinks: [[target]] or [[target|display]]
@ -40,8 +44,12 @@ pub fn extract_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
}
/// Extract wikilinks from content.
/// Matches: `[[target]]` or `[[target|display text]]` but NOT `![[...]]` (embeds)
fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
/// Matches: `[[target]]` or `[[target|display text]]` but NOT `![[...]]`
/// (embeds)
fn extract_wikilinks(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
// Match [[...]] - we'll manually filter out embeds that are preceded by !
let re = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new();
@ -62,7 +70,12 @@ fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end());
let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink {
id: Uuid::now_v7(),
@ -83,7 +96,10 @@ fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLin
/// Extract embeds from content.
/// Matches: `![[target]]`
fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
fn extract_embeds(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
let re = Regex::new(r"!\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new();
@ -93,7 +109,12 @@ fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink>
let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end());
let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink {
id: Uuid::now_v7(),
@ -114,7 +135,10 @@ fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink>
/// Extract markdown links from content.
/// Matches: `[text](path)` but only for internal paths (no http/https)
fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
fn extract_markdown_links(
source_media_id: MediaId,
content: &str,
) -> Vec<MarkdownLink> {
// Match [text](path) where path doesn't start with http:// or https://
let re = Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap();
let mut links = Vec::new();
@ -126,7 +150,8 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
// Skip markdown images: ![alt](image.png)
// Check if the character immediately before '[' is '!'
if match_start > 0 && line.as_bytes().get(match_start - 1) == Some(&b'!') {
if match_start > 0 && line.as_bytes().get(match_start - 1) == Some(&b'!')
{
continue;
}
@ -150,7 +175,12 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
// Remove any anchor from the path for resolution
let target_path = path.split('#').next().unwrap_or(path);
let context = extract_context(content, line_num, full_match.start(), full_match.end());
let context = extract_context(
content,
line_num,
full_match.start(),
full_match.end(),
);
links.push(MarkdownLink {
id: Uuid::now_v7(),
@ -170,7 +200,12 @@ fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<Markdo
}
/// Extract surrounding context for a link.
fn extract_context(content: &str, line_num: usize, _start: usize, _end: usize) -> String {
fn extract_context(
content: &str,
line_num: usize,
_start: usize,
_end: usize,
) -> String {
let lines: Vec<&str> = content.lines().collect();
if line_num >= lines.len() {
return String::new();
@ -192,7 +227,8 @@ fn extract_context(content: &str, line_num: usize, _start: usize, _end: usize) -
// Truncate long lines
if line_len > CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER {
line.chars()
line
.chars()
.take(CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER)
.collect()
} else {
@ -279,7 +315,9 @@ pub fn resolve_link_candidates(
/// Obsidian uses the `aliases` field in frontmatter to define alternative names
/// for a note that can be used in wikilinks.
pub fn extract_aliases(content: &str) -> Vec<String> {
let Ok(parsed) = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(content) else {
let Ok(parsed) =
gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(content)
else {
return Vec::new();
};
@ -296,7 +334,8 @@ pub fn extract_aliases(content: &str) -> Vec<String> {
};
match aliases {
gray_matter::Pod::Array(arr) => arr
gray_matter::Pod::Array(arr) => {
arr
.iter()
.filter_map(|a| {
if let gray_matter::Pod::String(s) = a {
@ -305,11 +344,12 @@ pub fn extract_aliases(content: &str) -> Vec<String> {
None
}
})
.collect(),
.collect()
},
gray_matter::Pod::String(s) => {
// Single alias as string
vec![s.clone()]
}
},
_ => Vec::new(),
}
}
@ -366,7 +406,8 @@ mod tests {
#[test]
fn test_skip_external_links() {
let content = "Visit [our site](https://example.com) or [email us](mailto:test@test.com).";
let content = "Visit [our site](https://example.com) or [email \
us](mailto:test@test.com).";
let links = extract_links(test_media_id(), content);
assert!(links.is_empty());
@ -407,7 +448,8 @@ And an embedded image: ![[diagram.png]]
let source_path = std::path::Path::new("/notes/projects/readme.md");
let root_dirs = vec![std::path::PathBuf::from("/notes")];
let candidates = resolve_link_candidates("My Note", source_path, &root_dirs);
let candidates =
resolve_link_candidates("My Note", source_path, &root_dirs);
// Should include relative path and .md variations
assert!(!candidates.is_empty());
@ -505,7 +547,8 @@ Mixed: [link](file.md) then ![image](pic.png) then [another](other.md)
}
// Verify correct targets were extracted (links, not images)
let targets: Vec<&str> = links.iter().map(|l| l.target_path.as_str()).collect();
let targets: Vec<&str> =
links.iter().map(|l| l.target_path.as_str()).collect();
assert!(
targets.contains(&"docs/guide.md"),
"Should contain docs/guide.md"

View file

@ -7,12 +7,16 @@
use std::path::{Path, PathBuf};
use tokio::fs;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::{
fs,
io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader},
};
use tracing::{debug, info, warn};
use crate::error::{PinakesError, Result};
use crate::model::ContentHash;
use crate::{
error::{PinakesError, Result},
model::ContentHash,
};
/// Content-addressable storage service for managed files.
#[derive(Debug, Clone)]
@ -24,7 +28,11 @@ pub struct ManagedStorageService {
impl ManagedStorageService {
/// Create a new managed storage service.
pub fn new(root_dir: PathBuf, max_upload_size: u64, verify_on_read: bool) -> Self {
pub fn new(
root_dir: PathBuf,
max_upload_size: u64,
verify_on_read: bool,
) -> Self {
Self {
root_dir,
max_upload_size,
@ -60,7 +68,8 @@ impl ManagedStorageService {
/// Store a file from an async reader, computing the hash as we go.
///
/// Returns the content hash and file size.
/// If the file already exists with the same hash, returns early (deduplication).
/// If the file already exists with the same hash, returns early
/// (deduplication).
pub async fn store_stream<R: AsyncRead + Unpin>(
&self,
mut reader: R,
@ -256,7 +265,9 @@ impl ManagedStorageService {
let mut sub_entries = fs::read_dir(&path).await?;
while let Some(sub_entry) = sub_entries.next_entry().await? {
let sub_path = sub_entry.path();
if sub_path.is_dir() && sub_path.file_name().map(|n| n.len()) == Some(2) {
if sub_path.is_dir()
&& sub_path.file_name().map(|n| n.len()) == Some(2)
{
let mut file_entries = fs::read_dir(&sub_path).await?;
while let Some(file_entry) = file_entries.next_entry().await? {
let file_path = file_entry.path();
@ -323,13 +334,15 @@ impl ManagedStorageService {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
use super::*;
#[tokio::test]
async fn test_store_and_retrieve() {
let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap();
let data = b"hello, world!";
@ -345,7 +358,8 @@ mod tests {
#[tokio::test]
async fn test_deduplication() {
let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap();
let data = b"duplicate content";
@ -359,7 +373,8 @@ mod tests {
#[tokio::test]
async fn test_verify_integrity() {
let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, true);
let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, true);
service.init().await.unwrap();
let data = b"verify me";
@ -371,7 +386,8 @@ mod tests {
#[tokio::test]
async fn test_upload_too_large() {
let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 100, false);
let service =
ManagedStorageService::new(dir.path().to_path_buf(), 100, false);
service.init().await.unwrap();
let data = vec![0u8; 200];
@ -383,7 +399,8 @@ mod tests {
#[tokio::test]
async fn test_delete() {
let dir = tempdir().unwrap();
let service = ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
let service =
ManagedStorageService::new(dir.path().to_path_buf(), 1024 * 1024, false);
service.init().await.unwrap();
let data = b"delete me";

View file

@ -139,7 +139,8 @@ impl BuiltinMediaType {
}
pub fn from_path(path: &Path) -> Option<Self> {
path.extension()
path
.extension()
.and_then(|e| e.to_str())
.and_then(Self::from_extension)
}
@ -181,9 +182,12 @@ impl BuiltinMediaType {
pub fn category(&self) -> MediaCategory {
match self {
Self::Mp3 | Self::Flac | Self::Ogg | Self::Wav | Self::Aac | Self::Opus => {
MediaCategory::Audio
}
Self::Mp3
| Self::Flac
| Self::Ogg
| Self::Wav
| Self::Aac
| Self::Opus => MediaCategory::Audio,
Self::Mp4 | Self::Mkv | Self::Avi | Self::Webm => MediaCategory::Video,
Self::Pdf | Self::Epub | Self::Djvu => MediaCategory::Document,
Self::Markdown | Self::PlainText => MediaCategory::Text,

View file

@ -3,9 +3,10 @@
//! This module provides an extensible media type system that supports both
//! built-in media types and plugin-registered custom types.
use serde::{Deserialize, Serialize};
use std::path::Path;
use serde::{Deserialize, Serialize};
pub mod builtin;
pub mod registry;
@ -50,15 +51,18 @@ impl MediaType {
pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self {
Self::Builtin(b) => b.name(),
Self::Custom(id) => registry
Self::Custom(id) => {
registry
.get(id)
.map(|d| d.name.clone())
.unwrap_or_else(|| id.clone()),
.unwrap_or_else(|| id.clone())
},
}
}
/// Get the category for this media type
/// For custom types without a registry, returns MediaCategory::Document as default
/// For custom types without a registry, returns MediaCategory::Document as
/// default
pub fn category(&self) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
@ -67,13 +71,18 @@ impl MediaType {
}
/// Get the category for this media type with registry support
pub fn category_with_registry(&self, registry: &MediaTypeRegistry) -> MediaCategory {
pub fn category_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
Self::Custom(id) => registry
Self::Custom(id) => {
registry
.get(id)
.and_then(|d| d.category)
.unwrap_or(MediaCategory::Document),
.unwrap_or(MediaCategory::Document)
},
}
}
@ -87,13 +96,18 @@ impl MediaType {
}
/// Get the MIME type with registry support
pub fn mime_type_with_registry(&self, registry: &MediaTypeRegistry) -> String {
pub fn mime_type_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> String {
match self {
Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(id) => registry
Self::Custom(id) => {
registry
.get(id)
.and_then(|d| d.mime_types.first().cloned())
.unwrap_or_else(|| "application/octet-stream".to_string()),
.unwrap_or_else(|| "application/octet-stream".to_string())
},
}
}
@ -101,19 +115,28 @@ impl MediaType {
/// For custom types without a registry, returns an empty vec
pub fn extensions(&self) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Builtin(b) => {
b.extensions().iter().map(|s| s.to_string()).collect()
},
Self::Custom(_) => vec![],
}
}
/// Get file extensions with registry support
pub fn extensions_with_registry(&self, registry: &MediaTypeRegistry) -> Vec<String> {
pub fn extensions_with_registry(
&self,
registry: &MediaTypeRegistry,
) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Custom(id) => registry
Self::Builtin(b) => {
b.extensions().iter().map(|s| s.to_string()).collect()
},
Self::Custom(id) => {
registry
.get(id)
.map(|d| d.extensions.clone())
.unwrap_or_default(),
.unwrap_or_default()
},
}
}
@ -131,8 +154,12 @@ impl MediaType {
BuiltinMediaType::from_extension(ext).map(Self::Builtin)
}
/// Resolve a media type from file extension with registry (includes custom types)
pub fn from_extension_with_registry(ext: &str, registry: &MediaTypeRegistry) -> Option<Self> {
/// Resolve a media type from file extension with registry (includes custom
/// types)
pub fn from_extension_with_registry(
ext: &str,
registry: &MediaTypeRegistry,
) -> Option<Self> {
// Try built-in types first
if let Some(builtin) = BuiltinMediaType::from_extension(ext) {
return Some(Self::Builtin(builtin));
@ -147,14 +174,19 @@ impl MediaType {
/// Resolve a media type from file path (built-in types only)
/// Use from_path_with_registry for custom types
pub fn from_path(path: &Path) -> Option<Self> {
path.extension()
path
.extension()
.and_then(|e| e.to_str())
.and_then(Self::from_extension)
}
/// Resolve a media type from file path with registry (includes custom types)
pub fn from_path_with_registry(path: &Path, registry: &MediaTypeRegistry) -> Option<Self> {
path.extension()
pub fn from_path_with_registry(
path: &Path,
registry: &MediaTypeRegistry,
) -> Option<Self> {
path
.extension()
.and_then(|e| e.to_str())
.and_then(|ext| Self::from_extension_with_registry(ext, registry))
}

View file

@ -1,8 +1,9 @@
//! Media type registry for managing both built-in and custom media types
use std::collections::HashMap;
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use super::MediaCategory;
@ -59,7 +60,8 @@ impl MediaTypeRegistry {
let ext_lower = ext.to_lowercase();
if self.extension_map.contains_key(&ext_lower) {
// Extension already registered - this is OK, we'll use the first one
// In a more sophisticated system, we might track multiple types per extension
// In a more sophisticated system, we might track multiple types per
// extension
continue;
}
self.extension_map.insert(ext_lower, descriptor.id.clone());
@ -97,7 +99,8 @@ impl MediaTypeRegistry {
/// Get a media type by file extension
pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> {
let ext_lower = ext.to_lowercase();
self.extension_map
self
.extension_map
.get(&ext_lower)
.and_then(|id| self.types.get(id))
}
@ -109,7 +112,8 @@ impl MediaTypeRegistry {
/// List media types from a specific plugin
pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> {
self.types
self
.types
.values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.collect()
@ -117,7 +121,8 @@ impl MediaTypeRegistry {
/// List built-in media types (plugin_id is None)
pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> {
self.types
self
.types
.values()
.filter(|d| d.plugin_id.is_none())
.collect()

View file

@ -1,19 +1,23 @@
use std::path::Path;
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use lofty::{
file::{AudioFile, TaggedFileExt},
tag::Accessor,
};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct AudioExtractor;
impl MetadataExtractor for AudioExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("audio metadata: {e}")))?;
let tagged_file = lofty::read_from_path(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("audio metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default();
@ -33,15 +37,18 @@ impl MetadataExtractor for AudioExtractor {
.or_else(|| tagged_file.first_tag())
{
if let Some(track) = tag.track() {
meta.extra
meta
.extra
.insert("track_number".to_string(), track.to_string());
}
if let Some(disc) = tag.disk() {
meta.extra
meta
.extra
.insert("disc_number".to_string(), disc.to_string());
}
if let Some(comment) = tag.comment() {
meta.extra
meta
.extra
.insert("comment".to_string(), comment.to_string());
}
}
@ -53,15 +60,18 @@ impl MetadataExtractor for AudioExtractor {
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
meta
.extra
.insert("bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
meta
.extra
.insert("channels".to_string(), channels.to_string());
}

View file

@ -1,9 +1,10 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct DocumentExtractor;
@ -128,15 +129,20 @@ fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> {
match obj {
lopdf::Object::String(bytes, _) => Some(String::from_utf8_lossy(bytes).into_owned()),
lopdf::Object::Name(name) => Some(String::from_utf8_lossy(name).into_owned()),
lopdf::Object::String(bytes, _) => {
Some(String::from_utf8_lossy(bytes).into_owned())
},
lopdf::Object::Name(name) => {
Some(String::from_utf8_lossy(name).into_owned())
},
_ => None,
}
}
fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
let mut doc = epub::doc::EpubDoc::new(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB parse: {e}")))?;
let mut doc = epub::doc::EpubDoc::new(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("EPUB parse: {e}"))
})?;
let mut meta = ExtractedMetadata {
title: doc.mdata("title").map(|item| item.value.clone()),
@ -156,7 +162,9 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
}
if let Some(date) = doc.mdata("date") {
// Try to parse as YYYY-MM-DD or just YYYY
if let Ok(parsed_date) = chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d") {
if let Ok(parsed_date) =
chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d")
{
book_meta.publication_date = Some(parsed_date);
} else if let Ok(year) = date.value.parse::<i32>() {
book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1);
@ -212,7 +220,7 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
} else {
"other"
}
}
},
};
// Try to normalize ISBN
@ -245,7 +253,8 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
if let Some(opf_content) = opf_data {
// Look for <meta name="calibre:series" content="Series Name"/>
if let Some(series_start) = opf_content.find("name=\"calibre:series\"")
&& let Some(content_start) = opf_content[series_start..].find("content=\"")
&& let Some(content_start) =
opf_content[series_start..].find("content=\"")
{
let after_content = &opf_content[series_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"') {

View file

@ -1,9 +1,10 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct ImageExtractor;
@ -14,7 +15,8 @@ impl MetadataExtractor for ImageExtractor {
let file = std::fs::File::open(path)?;
let mut buf_reader = std::io::BufReader::new(&file);
let exif_data = match exif::Reader::new().read_from_container(&mut buf_reader) {
let exif_data =
match exif::Reader::new().read_from_container(&mut buf_reader) {
Ok(exif) => exif,
Err(_) => return Ok(meta),
};
@ -29,21 +31,26 @@ impl MetadataExtractor for ImageExtractor {
}
if let Some(height) = exif_data
.get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY))
.or_else(|| {
exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY)
})
&& let Some(h) = field_to_u32(height)
{
meta.extra.insert("height".to_string(), h.to_string());
}
// Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) {
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY)
{
let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val);
}
}
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) {
if let Some(model) =
exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY)
{
let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_model = Some(val.clone());
@ -77,9 +84,11 @@ impl MetadataExtractor for ImageExtractor {
{
meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val);
meta.extra
meta
.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra
meta
.extra
.insert("gps_longitude".to_string(), format!("{lon_val:.6}"));
}
@ -92,19 +101,25 @@ impl MetadataExtractor for ImageExtractor {
meta.extra.insert("iso".to_string(), val);
}
}
if let Some(exposure) = exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY) {
if let Some(exposure) =
exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY)
{
let val = exposure.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("exposure_time".to_string(), val);
}
}
if let Some(aperture) = exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY) {
if let Some(aperture) =
exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY)
{
let val = aperture.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("f_number".to_string(), val);
}
}
if let Some(focal) = exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY) {
if let Some(focal) =
exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY)
{
let val = focal.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("focal_length".to_string(), val);
@ -112,16 +127,21 @@ impl MetadataExtractor for ImageExtractor {
}
// Lens model
if let Some(lens) = exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY) {
if let Some(lens) =
exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY)
{
let val = lens.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.extra
meta
.extra
.insert("lens_model".to_string(), val.trim_matches('"').to_string());
}
}
// Flash
if let Some(flash) = exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY) {
if let Some(flash) =
exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY)
{
let val = flash.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("flash".to_string(), val);
@ -129,7 +149,9 @@ impl MetadataExtractor for ImageExtractor {
}
// Orientation
if let Some(orientation) = exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
if let Some(orientation) =
exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY)
{
let val = orientation.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("orientation".to_string(), val);
@ -137,7 +159,9 @@ impl MetadataExtractor for ImageExtractor {
}
// Software
if let Some(software) = exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY) {
if let Some(software) =
exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY)
{
let val = software.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("software".to_string(), val);
@ -145,7 +169,9 @@ impl MetadataExtractor for ImageExtractor {
}
// Image description as title
if let Some(desc) = exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY) {
if let Some(desc) =
exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY)
{
let val = desc.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.title = Some(val.trim_matches('"').to_string());
@ -153,7 +179,9 @@ impl MetadataExtractor for ImageExtractor {
}
// Artist
if let Some(artist) = exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY) {
if let Some(artist) =
exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY)
{
let val = artist.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.artist = Some(val.trim_matches('"').to_string());
@ -161,7 +189,9 @@ impl MetadataExtractor for ImageExtractor {
}
// Copyright as description
if let Some(copyright) = exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY) {
if let Some(copyright) =
exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY)
{
let val = copyright.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.description = Some(val.trim_matches('"').to_string());
@ -201,7 +231,10 @@ fn field_to_u32(field: &exif::Field) -> Option<u32> {
}
}
fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f64> {
fn dms_to_decimal(
dms_field: &exif::Field,
ref_field: &exif::Field,
) -> Option<f64> {
if let exif::Value::Rational(ref rationals) = dms_field.value
&& rationals.len() >= 3
{
@ -241,8 +274,9 @@ fn parse_exif_datetime(s: &str) -> Option<chrono::DateTime<chrono::Utc>> {
}
/// Generate a perceptual hash for an image file.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity detection.
/// Returns a hex-encoded hash string, or None if the image cannot be processed.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity
/// detection. Returns a hex-encoded hash string, or None if the image cannot be
/// processed.
pub fn generate_perceptual_hash(path: &Path) -> Option<String> {
use image_hasher::{HashAlg, HasherConfig};

View file

@ -1,16 +1,18 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct MarkdownExtractor;
impl MetadataExtractor for MarkdownExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let content = std::fs::read_to_string(path)?;
let parsed = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
let parsed =
gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
let mut meta = ExtractedMetadata::default();

View file

@ -4,12 +4,13 @@ pub mod image;
pub mod markdown;
pub mod video;
use std::collections::HashMap;
use std::path::Path;
use std::{collections::HashMap, path::Path};
use crate::error::Result;
use crate::media_type::MediaType;
use crate::model::ExtractedBookMetadata;
use crate::{
error::Result,
media_type::MediaType,
model::ExtractedBookMetadata,
};
#[derive(Debug, Clone, Default)]
pub struct ExtractedMetadata {
@ -37,7 +38,10 @@ pub trait MetadataExtractor: Send + Sync {
fn supported_types(&self) -> Vec<MediaType>;
}
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> {
pub fn extract_metadata(
path: &Path,
media_type: MediaType,
) -> Result<ExtractedMetadata> {
let extractors: Vec<Box<dyn MetadataExtractor>> = vec![
Box::new(audio::AudioExtractor),
Box::new(document::DocumentExtractor),

View file

@ -1,9 +1,10 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct VideoExtractor;
@ -44,23 +45,26 @@ fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
format!("{}x{}", v.pixel_width, v.pixel_height),
);
if !track.codec_id.is_empty() {
meta.extra
meta
.extra
.insert("video_codec".to_string(), track.codec_id.clone());
}
}
},
matroska::Settings::Audio(a) => {
meta.extra.insert(
"sample_rate".to_string(),
format!("{} Hz", a.sample_rate as u32),
);
meta.extra
meta
.extra
.insert("channels".to_string(), a.channels.to_string());
if !track.codec_id.is_empty() {
meta.extra
meta
.extra
.insert("audio_codec".to_string(), track.codec_id.clone());
}
}
_ => {}
},
_ => {},
}
}
@ -68,11 +72,14 @@ fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
}
fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use lofty::{
file::{AudioFile, TaggedFileExt},
tag::Accessor,
};
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("MP4 metadata: {e}")))?;
let tagged_file = lofty::read_from_path(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("MP4 metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default();
@ -102,15 +109,18 @@ fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
meta
.extra
.insert("audio_bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
meta
.extra
.insert("channels".to_string(), channels.to_string());
}

View file

@ -1,6 +1,4 @@
use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
use std::{collections::HashMap, fmt, path::PathBuf};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@ -47,7 +45,9 @@ impl fmt::Display for ContentHash {
// ===== Managed Storage Types =====
/// Storage mode for media items
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
#[derive(
Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize,
)]
#[serde(rename_all = "lowercase")]
pub enum StorageMode {
/// File exists on disk, referenced by path
@ -126,7 +126,8 @@ pub struct MediaItem {
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental scanning
/// File modification time (Unix timestamp in seconds), used for incremental
/// scanning
pub file_mtime: Option<i64>,
// Photo-specific metadata
@ -139,10 +140,12 @@ pub struct MediaItem {
pub perceptual_hash: Option<String>,
// Managed storage fields
/// How the file is stored (external on disk or managed in content-addressable storage)
/// How the file is stored (external on disk or managed in
/// content-addressable storage)
#[serde(default)]
pub storage_mode: StorageMode,
/// Original filename for uploaded files (preserved separately from file_name)
/// Original filename for uploaded files (preserved separately from
/// file_name)
pub original_filename: Option<String>,
/// When the file was uploaded to managed storage
pub uploaded_at: Option<DateTime<Utc>>,

View file

@ -1,5 +1,4 @@
use std::path::Path;
use std::process::Command;
use std::{path::Path, process::Command};
use crate::error::{PinakesError, Result};
@ -12,10 +11,9 @@ pub struct XdgOpener;
impl Opener for XdgOpener {
fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("xdg-open")
.arg(path)
.status()
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run xdg-open: {e}")))?;
let status = Command::new("xdg-open").arg(path).status().map_err(|e| {
PinakesError::InvalidOperation(format!("failed to run xdg-open: {e}"))
})?;
if status.success() {
Ok(())
} else {
@ -31,10 +29,9 @@ pub struct MacOpener;
impl Opener for MacOpener {
fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("open")
.arg(path)
.status()
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run open: {e}")))?;
let status = Command::new("open").arg(path).status().map_err(|e| {
PinakesError::InvalidOperation(format!("failed to run open: {e}"))
})?;
if status.success() {
Ok(())
} else {
@ -55,7 +52,9 @@ impl Opener for WindowsOpener {
.arg(path)
.status()
.map_err(|e| {
PinakesError::InvalidOperation(format!("failed to run cmd /c start: {e}"))
PinakesError::InvalidOperation(format!(
"failed to run cmd /c start: {e}"
))
})?;
if status.success() {
Ok(())

View file

@ -37,14 +37,21 @@ use crate::error::{PinakesError, Result};
///
/// ```no_run
/// use std::path::PathBuf;
///
/// use pinakes_core::path_validation::validate_path;
///
/// let allowed_roots = vec![PathBuf::from("/media"), PathBuf::from("/home/user/documents")];
/// let allowed_roots = vec![
/// PathBuf::from("/media"),
/// PathBuf::from("/home/user/documents"),
/// ];
/// let path = PathBuf::from("/media/music/song.mp3");
///
/// let validated = validate_path(&path, &allowed_roots).unwrap();
/// ```
pub fn validate_path(path: &Path, allowed_roots: &[PathBuf]) -> Result<PathBuf> {
pub fn validate_path(
path: &Path,
allowed_roots: &[PathBuf],
) -> Result<PathBuf> {
// Handle the case where no roots are configured
if allowed_roots.is_empty() {
return Err(PinakesError::PathNotAllowed(
@ -97,12 +104,14 @@ pub fn validate_path(path: &Path, allowed_roots: &[PathBuf]) -> Result<PathBuf>
/// Validates a path relative to a single root directory.
///
/// This is a convenience wrapper for `validate_path` when you only have one root.
/// This is a convenience wrapper for `validate_path` when you only have one
/// root.
pub fn validate_path_single_root(path: &Path, root: &Path) -> Result<PathBuf> {
validate_path(path, &[root.to_path_buf()])
}
/// Checks if a path appears to contain traversal sequences without canonicalizing.
/// Checks if a path appears to contain traversal sequences without
/// canonicalizing.
///
/// This is a quick pre-check that can reject obviously malicious paths without
/// hitting the filesystem. It should be used in addition to `validate_path`,
@ -144,7 +153,8 @@ pub fn sanitize_filename(filename: &str) -> String {
.chars()
.filter(|c| {
// Allow alphanumeric, common punctuation, and unicode letters
c.is_alphanumeric() || matches!(*c, '-' | '_' | '.' | ' ' | '(' | ')' | '[' | ']')
c.is_alphanumeric()
|| matches!(*c, '-' | '_' | '.' | ' ' | '(' | ')' | '[' | ']')
})
.collect();
@ -174,7 +184,8 @@ pub fn sanitize_filename(filename: &str) -> String {
///
/// # Returns
///
/// The joined path if safe, or an error if the relative path would escape the base.
/// The joined path if safe, or an error if the relative path would escape the
/// base.
pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
// Reject absolute paths in the relative component
if relative.starts_with('/') || relative.starts_with('\\') {
@ -210,18 +221,18 @@ pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
match component {
Component::Normal(name) => {
current = current.join(name);
}
},
Component::ParentDir => {
return Err(PinakesError::PathNotAllowed(
"path traversal detected".to_string(),
));
}
},
Component::CurDir => continue,
_ => {
return Err(PinakesError::PathNotAllowed(
"invalid path component".to_string(),
));
}
},
}
}
@ -230,10 +241,12 @@ pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
use super::*;
fn setup_test_dirs() -> TempDir {
let temp = TempDir::new().unwrap();
fs::create_dir_all(temp.path().join("allowed")).unwrap();

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
use crate::{model::MediaId, users::UserId};
/// A user-owned playlist of media items.
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,9 @@
//! Plugin loader for discovering and loading plugins from the filesystem
use std::path::{Path, PathBuf};
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginManifest;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
use walkdir::WalkDir;
@ -34,10 +35,10 @@ impl PluginLoader {
Ok(found) => {
info!("Found {} plugins in {:?}", found.len(), dir);
manifests.extend(found);
}
},
Err(e) => {
warn!("Error discovering plugins in {:?}: {}", dir, e);
}
},
}
}
@ -45,7 +46,10 @@ impl PluginLoader {
}
/// Discover plugins in a specific directory
async fn discover_in_directory(&self, dir: &Path) -> Result<Vec<PluginManifest>> {
async fn discover_in_directory(
&self,
dir: &Path,
) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new();
// Walk the directory looking for plugin.toml files
@ -58,7 +62,7 @@ impl PluginLoader {
Err(e) => {
warn!("Error reading directory entry: {}", e);
continue;
}
},
};
let path = entry.path();
@ -71,10 +75,10 @@ impl PluginLoader {
Ok(manifest) => {
info!("Loaded manifest for plugin: {}", manifest.plugin.name);
manifests.push(manifest);
}
},
Err(e) => {
warn!("Failed to load manifest from {:?}: {}", path, e);
}
},
}
}
}
@ -83,7 +87,10 @@ impl PluginLoader {
}
/// Resolve the WASM binary path from a manifest
pub fn resolve_wasm_path(&self, manifest: &PluginManifest) -> Result<PathBuf> {
pub fn resolve_wasm_path(
&self,
manifest: &PluginManifest,
) -> Result<PathBuf> {
// The WASM path in the manifest is relative to the manifest file
// We need to search for it in the plugin directories
@ -103,7 +110,8 @@ impl PluginLoader {
// Resolve WASM path relative to this directory
let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm);
if wasm_path.exists() {
// Verify the resolved path is within the plugin directory (prevent path traversal)
// Verify the resolved path is within the plugin directory (prevent path
// traversal)
let canonical_wasm = wasm_path
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?;
@ -189,7 +197,8 @@ impl PluginLoader {
}
// Write archive to a unique temp file
let temp_archive = dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7()));
let temp_archive =
dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7()));
std::fs::write(&temp_archive, &bytes)?;
// Extract using tar with -C to target directory
@ -276,7 +285,8 @@ impl PluginLoader {
));
}
// Verify the WASM path is within the plugin directory (prevent path traversal)
// Verify the WASM path is within the plugin directory (prevent path
// traversal)
let canonical_wasm = wasm_path.canonicalize()?;
let canonical_path = path.canonicalize()?;
if !canonical_wasm.starts_with(&canonical_path) {
@ -309,9 +319,10 @@ impl PluginLoader {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use super::*;
#[tokio::test]
async fn test_discover_plugins_empty() {
let temp_dir = TempDir::new().unwrap();
@ -341,7 +352,8 @@ wasm = "plugin.wasm"
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create dummy WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00")
.unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap();
@ -375,7 +387,8 @@ wasm = "plugin.wasm"
assert!(loader.validate_plugin_package(&plugin_dir).is_err());
// Create valid WASM file (magic number only)
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00")
.unwrap();
// Should succeed now
assert!(loader.validate_plugin_package(&plugin_dir).is_ok());

View file

@ -1,7 +1,8 @@
//! Plugin system for Pinakes
//!
//! This module provides a comprehensive plugin architecture that allows extending
//! Pinakes with custom media types, metadata extractors, search backends, and more.
//! This module provides a comprehensive plugin architecture that allows
//! extending Pinakes with custom media types, metadata extractors, search
//! backends, and more.
//!
//! # Architecture
//!
@ -10,10 +11,10 @@
//! - Hot-reload support for development
//! - Automatic plugin discovery from configured directories
use std::{path::PathBuf, sync::Arc};
use anyhow::Result;
use pinakes_plugin_api::{PluginContext, PluginMetadata};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, error, info, warn};
@ -96,7 +97,11 @@ impl From<crate::config::PluginsConfig> for PluginManagerConfig {
impl PluginManager {
/// Create a new plugin manager
pub fn new(data_dir: PathBuf, cache_dir: PathBuf, config: PluginManagerConfig) -> Result<Self> {
pub fn new(
data_dir: PathBuf,
cache_dir: PathBuf,
config: PluginManagerConfig,
) -> Result<Self> {
// Ensure directories exist
std::fs::create_dir_all(&data_dir)?;
std::fs::create_dir_all(&cache_dir)?;
@ -129,10 +134,10 @@ impl PluginManager {
Ok(plugin_id) => {
info!("Loaded plugin: {}", plugin_id);
loaded_plugins.push(plugin_id);
}
},
Err(e) => {
warn!("Failed to load plugin {}: {}", manifest.plugin.name, e);
}
},
}
}
@ -147,7 +152,10 @@ impl PluginManager {
let plugin_id = manifest.plugin_id();
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
if plugin_id.contains('/')
|| plugin_id.contains('\\')
|| plugin_id.contains("..")
{
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
@ -179,7 +187,11 @@ impl PluginManager {
(
k.clone(),
serde_json::to_value(v).unwrap_or_else(|e| {
tracing::warn!("failed to serialize config value for key {}: {}", k, e);
tracing::warn!(
"failed to serialize config value for key {}: {}",
k,
e
);
serde_json::Value::Null
}),
)
@ -193,12 +205,15 @@ impl PluginManager {
let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?;
// Initialize plugin
let init_succeeded = match wasm_plugin.call_function("initialize", &[]).await {
let init_succeeded = match wasm_plugin
.call_function("initialize", &[])
.await
{
Ok(_) => true,
Err(e) => {
tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e);
false
}
},
};
// Register plugin
@ -207,7 +222,11 @@ impl PluginManager {
name: manifest.plugin.name.clone(),
version: manifest.plugin.version.clone(),
author: manifest.plugin.author.clone().unwrap_or_default(),
description: manifest.plugin.description.clone().unwrap_or_default(),
description: manifest
.plugin
.description
.clone()
.unwrap_or_default(),
api_version: manifest.plugin.api_version.clone(),
capabilities_required: capabilities,
};
@ -238,7 +257,8 @@ impl PluginManager {
info!("Installing plugin from: {}", source);
// Download/copy plugin to plugins directory
let plugin_path = if source.starts_with("http://") || source.starts_with("https://") {
let plugin_path =
if source.starts_with("http://") || source.starts_with("https://") {
// Download from URL
self.loader.download_plugin(source).await?
} else {
@ -248,7 +268,8 @@ impl PluginManager {
// Load the manifest
let manifest_path = plugin_path.join("plugin.toml");
let manifest = pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?;
let manifest =
pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?;
// Load the plugin
self.load_plugin_from_manifest(&manifest).await
@ -257,7 +278,10 @@ impl PluginManager {
/// Uninstall a plugin
pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> {
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
if plugin_id.contains('/')
|| plugin_id.contains('\\')
|| plugin_id.contains("..")
{
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
@ -314,7 +338,8 @@ impl PluginManager {
info!("Shutting down all plugins");
let registry = self.registry.read().await;
let plugin_ids: Vec<String> = registry.list_all().iter().map(|p| p.id.clone()).collect();
let plugin_ids: Vec<String> =
registry.list_all().iter().map(|p| p.id.clone()).collect();
for plugin_id in plugin_ids {
if let Err(e) = self.shutdown_plugin(&plugin_id).await {
@ -355,14 +380,16 @@ impl PluginManager {
info!("Reloading plugin: {}", plugin_id);
// Re-read the manifest from disk if possible, falling back to cached version
// Re-read the manifest from disk if possible, falling back to cached
// version
let manifest = {
let registry = self.registry.read().await;
let plugin = registry
.get(plugin_id)
.ok_or_else(|| anyhow::anyhow!("Plugin not found"))?;
if let Some(ref manifest_path) = plugin.manifest_path {
pinakes_plugin_api::PluginManifest::from_file(manifest_path).unwrap_or_else(|e| {
pinakes_plugin_api::PluginManifest::from_file(manifest_path)
.unwrap_or_else(|e| {
warn!("Failed to re-read manifest from disk, using cached: {}", e);
plugin.manifest.clone()
})
@ -387,9 +414,10 @@ impl PluginManager {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use super::*;
#[tokio::test]
async fn test_plugin_manager_creation() {
let temp_dir = TempDir::new().unwrap();
@ -397,7 +425,8 @@ mod tests {
let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir.clone(), cache_dir.clone(), config);
let manager =
PluginManager::new(data_dir.clone(), cache_dir.clone(), config);
assert!(manager.is_ok());
assert!(data_dir.exists());

View file

@ -1,10 +1,9 @@
//! Plugin registry for managing loaded plugins
use std::path::PathBuf;
use std::{collections::HashMap, path::PathBuf};
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use std::collections::HashMap;
use super::runtime::WasmPlugin;
@ -45,7 +44,8 @@ impl PluginRegistry {
/// Unregister a plugin by ID
pub fn unregister(&mut self, plugin_id: &str) -> Result<()> {
self.plugins
self
.plugins
.remove(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
Ok(())
@ -105,7 +105,8 @@ impl PluginRegistry {
/// Get plugins by kind (e.g., "media_type", "metadata_extractor")
pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> {
self.plugins
self
.plugins
.values()
.filter(|p| p.manifest.plugin.kind.contains(&kind.to_string()))
.collect()
@ -130,10 +131,12 @@ impl Default for PluginRegistry {
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::Capabilities;
use std::collections::HashMap;
use pinakes_plugin_api::Capabilities;
use super::*;
fn create_test_plugin(id: &str, kind: Vec<String>) -> RegisteredPlugin {
let manifest = PluginManifest {
plugin: pinakes_plugin_api::manifest::PluginInfo {
@ -176,7 +179,8 @@ mod tests {
#[test]
fn test_registry_register_and_get() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
@ -187,7 +191,8 @@ mod tests {
#[test]
fn test_registry_duplicate_register() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
let result = registry.register(plugin);
@ -198,7 +203,8 @@ mod tests {
#[test]
fn test_registry_unregister() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
registry.unregister("test-plugin").unwrap();
@ -209,7 +215,8 @@ mod tests {
#[test]
fn test_registry_enable_disable() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
let plugin =
create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true));
@ -228,22 +235,19 @@ mod tests {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.register(create_test_plugin("plugin1", vec![
"media_type".to_string(),
]))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["metadata_extractor".to_string()],
))
.register(create_test_plugin("plugin2", vec![
"metadata_extractor".to_string(),
]))
.unwrap();
registry
.register(create_test_plugin(
"plugin3",
vec!["media_type".to_string()],
))
.register(create_test_plugin("plugin3", vec![
"media_type".to_string(),
]))
.unwrap();
let media_type_plugins = registry.get_by_kind("media_type");
@ -258,16 +262,14 @@ mod tests {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.register(create_test_plugin("plugin1", vec![
"media_type".to_string(),
]))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["media_type".to_string()],
))
.register(create_test_plugin("plugin2", vec![
"media_type".to_string(),
]))
.unwrap();
assert_eq!(registry.count(), 2);

View file

@ -1,9 +1,9 @@
//! WASM runtime for executing plugins
use std::{path::Path, sync::Arc};
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginContext;
use std::path::Path;
use std::sync::Arc;
use wasmtime::*;
/// WASM runtime wrapper for executing plugins
@ -75,7 +75,11 @@ impl WasmPlugin {
///
/// Creates a fresh store and instance per invocation with host functions
/// linked, calls the requested exported function, and returns the result.
pub async fn call_function(&self, function_name: &str, params: &[u8]) -> Result<Vec<u8>> {
pub async fn call_function(
&self,
function_name: &str,
params: &[u8],
) -> Result<Vec<u8>> {
let engine = self.module.engine();
// Create store with per-invocation data
@ -108,12 +112,17 @@ impl WasmPlugin {
if !params.is_empty()
&& let Some(mem) = &memory
{
// Call the plugin's alloc function if available, otherwise write at offset 0
let offset = if let Ok(alloc) = instance.get_typed_func::<i32, i32>(&mut store, "alloc")
// Call the plugin's alloc function if available, otherwise write at
// offset 0
let offset = if let Ok(alloc) =
instance.get_typed_func::<i32, i32>(&mut store, "alloc")
{
let result = alloc.call_async(&mut store, params.len() as i32).await?;
if result < 0 {
return Err(anyhow!("plugin alloc returned negative offset: {}", result));
return Err(anyhow!(
"plugin alloc returned negative offset: {}",
result
));
}
result as usize
} else {
@ -128,9 +137,12 @@ impl WasmPlugin {
}
// Look up the exported function and call it
let func = instance
let func =
instance
.get_func(&mut store, function_name)
.ok_or_else(|| anyhow!("exported function '{}' not found", function_name))?;
.ok_or_else(|| {
anyhow!("exported function '{}' not found", function_name)
})?;
let func_ty = func.ty(&store);
let param_count = func_ty.params().len();
@ -141,7 +153,8 @@ impl WasmPlugin {
// Call with appropriate params based on function signature
if param_count == 2 && !params.is_empty() {
// Convention: (ptr, len)
func.call_async(
func
.call_async(
&mut store,
&[Val::I32(alloc_offset), Val::I32(params.len() as i32)],
&mut results,
@ -151,8 +164,10 @@ impl WasmPlugin {
func.call_async(&mut store, &[], &mut results).await?;
} else {
// Generic: fill with zeroes
let params_vals: Vec<Val> = (0..param_count).map(|_| Val::I32(0)).collect();
func.call_async(&mut store, &params_vals, &mut results)
let params_vals: Vec<Val> =
(0..param_count).map(|_| Val::I32(0)).collect();
func
.call_async(&mut store, &params_vals, &mut results)
.await?;
}
@ -199,7 +214,10 @@ impl HostFunctions {
linker.func_wrap(
"env",
"host_log",
|mut caller: Caller<'_, PluginStoreData>, level: i32, ptr: i32, len: i32| {
|mut caller: Caller<'_, PluginStoreData>,
level: i32,
ptr: i32,
len: i32| {
if ptr < 0 || len < 0 {
return;
}
@ -226,7 +244,10 @@ impl HostFunctions {
linker.func_wrap(
"env",
"host_read_file",
|mut caller: Caller<'_, PluginStoreData>, path_ptr: i32, path_len: i32| -> i32 {
|mut caller: Caller<'_, PluginStoreData>,
path_ptr: i32,
path_len: i32|
-> i32 {
if path_ptr < 0 || path_len < 0 {
return -1;
}
@ -259,7 +280,9 @@ impl HostFunctions {
.filesystem
.read
.iter()
.any(|allowed| allowed.canonicalize().is_ok_and(|a| path.starts_with(a)));
.any(|allowed| {
allowed.canonicalize().is_ok_and(|a| path.starts_with(a))
});
if !can_read {
tracing::warn!(path = %path_str, "plugin read access denied");
@ -271,7 +294,7 @@ impl HostFunctions {
let len = contents.len() as i32;
caller.data_mut().exchange_buffer = contents;
len
}
},
Err(_) => -1,
}
},
@ -303,7 +326,8 @@ impl HostFunctions {
return -1;
}
let path_str = match std::str::from_utf8(&mem_data[path_start..path_end]) {
let path_str =
match std::str::from_utf8(&mem_data[path_start..path_end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
@ -314,7 +338,8 @@ impl HostFunctions {
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
path
.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
@ -352,7 +377,10 @@ impl HostFunctions {
linker.func_wrap(
"env",
"host_http_request",
|mut caller: Caller<'_, PluginStoreData>, url_ptr: i32, url_len: i32| -> i32 {
|mut caller: Caller<'_, PluginStoreData>,
url_ptr: i32,
url_len: i32|
-> i32 {
if url_ptr < 0 || url_len < 0 {
return -1;
}
@ -378,7 +406,8 @@ impl HostFunctions {
}
// Use block_in_place to avoid blocking the async runtime's thread pool.
// Falls back to a blocking client with timeout if block_in_place is unavailable.
// Falls back to a blocking client with timeout if block_in_place is
// unavailable.
let result = std::panic::catch_unwind(|| {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async {
@ -402,7 +431,7 @@ impl HostFunctions {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
},
Ok(Err(_)) => -1,
Err(_) => {
// block_in_place panicked (e.g. current-thread runtime);
@ -415,17 +444,19 @@ impl HostFunctions {
Err(_) => return -1,
};
match client.get(&url_str).send() {
Ok(resp) => match resp.bytes() {
Ok(resp) => {
match resp.bytes() {
Ok(bytes) => {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
Err(_) => -1,
},
Err(_) => -1,
}
},
Err(_) => -1,
}
},
}
},
)?;
@ -434,7 +465,10 @@ impl HostFunctions {
linker.func_wrap(
"env",
"host_get_config",
|mut caller: Caller<'_, PluginStoreData>, key_ptr: i32, key_len: i32| -> i32 {
|mut caller: Caller<'_, PluginStoreData>,
key_ptr: i32,
key_len: i32|
-> i32 {
if key_ptr < 0 || key_len < 0 {
return -1;
}
@ -460,7 +494,7 @@ impl HostFunctions {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes;
len
}
},
None => -1,
}
},
@ -470,7 +504,10 @@ impl HostFunctions {
linker.func_wrap(
"env",
"host_get_buffer",
|mut caller: Caller<'_, PluginStoreData>, dest_ptr: i32, dest_len: i32| -> i32 {
|mut caller: Caller<'_, PluginStoreData>,
dest_ptr: i32,
dest_len: i32|
-> i32 {
if dest_ptr < 0 || dest_len < 0 {
return -1;
}
@ -497,10 +534,12 @@ impl HostFunctions {
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::PluginContext;
use std::collections::HashMap;
use pinakes_plugin_api::PluginContext;
use super::*;
#[test]
fn test_wasm_runtime_creation() {
let runtime = WasmRuntime::new();

View file

@ -1,8 +1,9 @@
//! Capability-based security for plugins
use std::path::{Path, PathBuf};
use anyhow::{Result, anyhow};
use pinakes_plugin_api::Capabilities;
use std::path::{Path, PathBuf};
/// Capability enforcer validates and enforces plugin capabilities
pub struct CapabilityEnforcer {
@ -65,7 +66,10 @@ impl CapabilityEnforcer {
}
/// Validate capabilities requested by a plugin
pub fn validate_capabilities(&self, capabilities: &Capabilities) -> Result<()> {
pub fn validate_capabilities(
&self,
capabilities: &Capabilities,
) -> Result<()> {
// Validate memory limit
if let Some(memory) = capabilities.max_memory_bytes
&& memory > self.max_memory_limit
@ -94,7 +98,8 @@ impl CapabilityEnforcer {
// Validate network access
if capabilities.network.enabled && !self.allow_network_default {
return Err(anyhow!(
"Plugin requests network access, but network access is disabled by policy"
"Plugin requests network access, but network access is disabled by \
policy"
));
}
@ -102,7 +107,10 @@ impl CapabilityEnforcer {
}
/// Validate filesystem access capabilities
fn validate_filesystem_access(&self, capabilities: &Capabilities) -> Result<()> {
fn validate_filesystem_access(
&self,
capabilities: &Capabilities,
) -> Result<()> {
// Check read paths
for path in &capabilities.filesystem.read {
if !self.is_read_allowed(path) {
@ -149,7 +157,8 @@ impl CapabilityEnforcer {
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
path
.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
@ -169,7 +178,11 @@ impl CapabilityEnforcer {
}
/// Check if a specific domain is allowed
pub fn is_domain_allowed(&self, capabilities: &Capabilities, domain: &str) -> bool {
pub fn is_domain_allowed(
&self,
capabilities: &Capabilities,
domain: &str,
) -> bool {
if !capabilities.network.enabled {
return false;
}
@ -213,10 +226,11 @@ impl Default for CapabilityEnforcer {
#[cfg(test)]
mod tests {
use super::*;
#[allow(unused_imports)]
use pinakes_plugin_api::{FilesystemCapability, NetworkCapability};
use super::*;
#[test]
fn test_validate_memory_limit() {
let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB
@ -250,7 +264,8 @@ mod tests {
let test_file = allowed_dir.join("test.txt");
std::fs::write(&test_file, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_read_path(allowed_dir.clone());
let enforcer =
CapabilityEnforcer::new().allow_read_path(allowed_dir.clone());
assert!(enforcer.is_read_allowed(&test_file));
assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd")));
@ -271,7 +286,8 @@ mod tests {
let existing = output_dir.join("file.txt");
std::fs::write(&existing, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_write_path(output_dir.clone());
let enforcer =
CapabilityEnforcer::new().allow_write_path(output_dir.clone());
assert!(enforcer.is_write_allowed(&existing));
// New file in allowed dir (parent exists)

View file

@ -1,14 +1,17 @@
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::{
path::{Path, PathBuf},
sync::{
Arc,
Mutex,
atomic::{AtomicBool, AtomicUsize, Ordering},
},
};
use notify::{PollWatcher, RecursiveMode, Watcher};
use tokio::sync::mpsc;
use tracing::{info, warn};
use crate::error::Result;
use crate::import;
use crate::storage::DynStorageBackend;
use crate::{error::Result, import, storage::DynStorageBackend};
pub struct ScanStatus {
pub scanning: bool,
@ -28,7 +31,8 @@ pub struct ScanOptions {
pub force_full: bool,
}
/// Shared scan progress that can be read by the status endpoint while a scan runs.
/// Shared scan progress that can be read by the status endpoint while a scan
/// runs.
#[derive(Clone)]
pub struct ScanProgress {
pub is_scanning: Arc<AtomicBool>,
@ -101,7 +105,14 @@ pub async fn scan_directory(
dir: &Path,
ignore_patterns: &[String],
) -> Result<ScanStatus> {
scan_directory_with_options(storage, dir, ignore_patterns, None, &ScanOptions::default()).await
scan_directory_with_options(
storage,
dir,
ignore_patterns,
None,
&ScanOptions::default(),
)
.await
}
/// Scan a directory with incremental scanning support
@ -114,7 +125,8 @@ pub async fn scan_directory_incremental(
incremental: true,
force_full: false,
};
scan_directory_with_options(storage, dir, ignore_patterns, None, &options).await
scan_directory_with_options(storage, dir, ignore_patterns, None, &options)
.await
}
pub async fn scan_directory_with_progress(
@ -133,7 +145,8 @@ pub async fn scan_directory_with_progress(
.await
}
/// Scan a directory with full options including progress tracking and incremental mode
/// Scan a directory with full options including progress tracking and
/// incremental mode
pub async fn scan_directory_with_options(
storage: &DynStorageBackend,
dir: &Path,
@ -179,14 +192,14 @@ pub async fn scan_directory_with_options(
} else {
processed += 1;
}
}
},
Err(e) => {
let msg = e.to_string();
if let Some(p) = progress {
p.record_error(msg.clone());
}
errors.push(msg);
}
},
}
}
@ -221,7 +234,13 @@ pub async fn scan_all_roots(
storage: &DynStorageBackend,
ignore_patterns: &[String],
) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_options(storage, ignore_patterns, None, &ScanOptions::default()).await
scan_all_roots_with_options(
storage,
ignore_patterns,
None,
&ScanOptions::default(),
)
.await
}
/// Scan all roots incrementally (skip unchanged files)
@ -241,7 +260,13 @@ pub async fn scan_all_roots_with_progress(
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_options(storage, ignore_patterns, progress, &ScanOptions::default()).await
scan_all_roots_with_options(
storage,
ignore_patterns,
progress,
&ScanOptions::default(),
)
.await
}
/// Scan all roots with full options including progress and incremental mode
@ -255,7 +280,13 @@ pub async fn scan_all_roots_with_options(
let mut statuses = Vec::new();
for root in roots {
match scan_directory_with_options(storage, &root, ignore_patterns, progress, scan_options)
match scan_directory_with_options(
storage,
&root,
ignore_patterns,
progress,
scan_options,
)
.await
{
Ok(status) => statuses.push(status),
@ -268,7 +299,7 @@ pub async fn scan_all_roots_with_options(
files_skipped: 0,
errors: vec![e.to_string()],
});
}
},
}
}
@ -285,15 +316,18 @@ impl FileWatcher {
let (tx, rx) = mpsc::channel(1024);
// Try the recommended (native) watcher first, fall back to polling
let watcher: Box<dyn Watcher + Send> = match Self::try_native_watcher(dirs, tx.clone()) {
let watcher: Box<dyn Watcher + Send> = match Self::try_native_watcher(
dirs,
tx.clone(),
) {
Ok(w) => {
info!("using native filesystem watcher");
w
}
},
Err(native_err) => {
warn!(error = %native_err, "native watcher failed, falling back to polling");
Self::polling_watcher(dirs, tx)?
}
},
};
Ok(Self {
@ -307,8 +341,8 @@ impl FileWatcher {
tx: mpsc::Sender<PathBuf>,
) -> std::result::Result<Box<dyn Watcher + Send>, notify::Error> {
let tx_clone = tx.clone();
let mut watcher =
notify::recommended_watcher(move |res: notify::Result<notify::Event>| {
let mut watcher = notify::recommended_watcher(
move |res: notify::Result<notify::Event>| {
if let Ok(event) = res {
for path in event.paths {
if tx_clone.blocking_send(path).is_err() {
@ -317,7 +351,8 @@ impl FileWatcher {
}
}
}
})?;
},
)?;
for dir in dirs {
watcher.watch(dir, RecursiveMode::Recursive)?;
@ -350,9 +385,9 @@ impl FileWatcher {
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?;
for dir in dirs {
watcher
.watch(dir, RecursiveMode::Recursive)
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?;
watcher.watch(dir, RecursiveMode::Recursive).map_err(|e| {
crate::error::PinakesError::Io(std::io::Error::other(e))
})?;
}
Ok(Box::new(watcher))

View file

@ -1,5 +1,4 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Datelike, Utc};
use serde::{Deserialize, Serialize};
@ -7,21 +6,34 @@ use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken;
use uuid::Uuid;
use crate::config::Config;
use crate::jobs::{JobKind, JobQueue};
use crate::{
config::Config,
jobs::{JobKind, JobQueue},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum Schedule {
Interval { secs: u64 },
Daily { hour: u32, minute: u32 },
Weekly { day: u32, hour: u32, minute: u32 },
Interval {
secs: u64,
},
Daily {
hour: u32,
minute: u32,
},
Weekly {
day: u32,
hour: u32,
minute: u32,
},
}
impl Schedule {
pub fn next_run(&self, from: DateTime<Utc>) -> DateTime<Utc> {
match self {
Schedule::Interval { secs } => from + chrono::Duration::seconds(*secs as i64),
Schedule::Interval { secs } => {
from + chrono::Duration::seconds(*secs as i64)
},
Schedule::Daily { hour, minute } => {
let today = from
.date_naive()
@ -33,7 +45,7 @@ impl Schedule {
} else {
today_utc + chrono::Duration::days(1)
}
}
},
Schedule::Weekly { day, hour, minute } => {
let current_day = from.weekday().num_days_from_monday();
let target_day = *day;
@ -52,12 +64,13 @@ impl Schedule {
}
7
};
let target_date = from.date_naive() + chrono::Duration::days(days_ahead as i64);
let target_date =
from.date_naive() + chrono::Duration::days(days_ahead as i64);
target_date
.and_hms_opt(*hour, *minute, 0)
.unwrap_or_default()
.and_utc()
}
},
}
}
@ -71,8 +84,10 @@ impl Schedule {
} else {
format!("Every {}s", secs)
}
}
Schedule::Daily { hour, minute } => format!("Daily {hour:02}:{minute:02}"),
},
Schedule::Daily { hour, minute } => {
format!("Daily {hour:02}:{minute:02}")
},
Schedule::Weekly { day, hour, minute } => {
let day_name = match day {
0 => "Mon",
@ -84,7 +99,7 @@ impl Schedule {
_ => "Sun",
};
format!("{day_name} {hour:02}:{minute:02}")
}
},
}
}
}
@ -99,10 +114,12 @@ pub struct ScheduledTask {
pub last_run: Option<DateTime<Utc>>,
pub next_run: Option<DateTime<Utc>>,
pub last_status: Option<String>,
/// Whether a job for this task is currently running. Skipped during serialization.
/// Whether a job for this task is currently running. Skipped during
/// serialization.
#[serde(default, skip_serializing)]
pub running: bool,
/// The job ID of the last submitted job. Skipped during serialization/deserialization.
/// The job ID of the last submitted job. Skipped during
/// serialization/deserialization.
#[serde(skip)]
pub last_job_id: Option<Uuid>,
}
@ -156,7 +173,10 @@ impl TaskScheduler {
id: "orphan_detection".to_string(),
name: "Orphan Detection".to_string(),
kind: JobKind::OrphanDetection,
schedule: Schedule::Daily { hour: 2, minute: 0 },
schedule: Schedule::Daily {
hour: 2,
minute: 0,
},
enabled: false,
last_run: None,
next_run: None,
@ -191,7 +211,8 @@ impl TaskScheduler {
}
}
/// Restore saved task state from config. Should be called once after construction.
/// Restore saved task state from config. Should be called once after
/// construction.
pub async fn restore_state(&self) {
let saved = self.config.read().await.scheduled_tasks.clone();
if saved.is_empty() {
@ -224,11 +245,13 @@ impl TaskScheduler {
let tasks = self.tasks.read().await;
let task_configs: Vec<crate::config::ScheduledTaskConfig> = tasks
.iter()
.map(|t| crate::config::ScheduledTaskConfig {
.map(|t| {
crate::config::ScheduledTaskConfig {
id: t.id.clone(),
enabled: t.enabled,
schedule: t.schedule.clone(),
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
}
})
.collect();
drop(tasks);
@ -300,7 +323,8 @@ impl TaskScheduler {
/// holding the write lock across await points. Returns when the
/// cancellation token is triggered.
pub async fn run(&self) {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
let mut interval =
tokio::time::interval(std::time::Duration::from_secs(30));
loop {
tokio::select! {
_ = interval.tick() => {}
@ -328,16 +352,16 @@ impl TaskScheduler {
JobStatus::Completed { .. } => {
task.running = false;
task.last_status = Some("completed".to_string());
}
},
JobStatus::Failed { error } => {
task.running = false;
task.last_status = Some(format!("failed: {error}"));
}
},
JobStatus::Cancelled => {
task.running = false;
task.last_status = Some("cancelled".to_string());
}
_ => {} // still pending or running
},
_ => {}, // still pending or running
}
}
}
@ -377,9 +401,10 @@ impl TaskScheduler {
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
use super::*;
#[test]
fn test_interval_next_run() {
let from = Utc.with_ymd_and_hms(2025, 6, 15, 12, 0, 0).unwrap();
@ -427,7 +452,8 @@ mod tests {
#[test]
fn test_weekly_same_day_future() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 14:00, current is 10:00 => today.
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 14:00, current is 10:00
// => today.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly {
day: 6,
@ -440,7 +466,8 @@ mod tests {
#[test]
fn test_weekly_same_day_past() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 08:00, current is 10:00 => next week.
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 08:00, current is 10:00
// => next week.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly {
day: 6,
@ -492,14 +519,18 @@ mod tests {
"Every 30s"
);
assert_eq!(
Schedule::Daily { hour: 3, minute: 0 }.display_string(),
Schedule::Daily {
hour: 3,
minute: 0,
}
.display_string(),
"Daily 03:00"
);
assert_eq!(
Schedule::Weekly {
day: 0,
hour: 3,
minute: 0
minute: 0,
}
.display_string(),
"Mon 03:00"
@ -508,7 +539,7 @@ mod tests {
Schedule::Weekly {
day: 6,
hour: 14,
minute: 30
minute: 30,
}
.display_string(),
"Sun 14:30"

View file

@ -1,7 +1,10 @@
use serde::{Deserialize, Serialize};
use winnow::combinator::{alt, delimited, preceded, repeat};
use winnow::token::{take_till, take_while};
use winnow::{ModalResult, Parser};
use winnow::{
ModalResult,
Parser,
combinator::{alt, delimited, preceded, repeat},
token::{take_till, take_while},
};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum SearchQuery {
@ -131,7 +134,7 @@ fn parse_date_value(s: &str) -> Option<DateValue> {
return Some(DateValue::DaysAgo(days));
}
None
}
},
}
}
@ -152,15 +155,15 @@ fn parse_size_value(s: &str) -> Option<i64> {
}
fn field_match(input: &mut &str) -> ModalResult<SearchQuery> {
let field_name =
take_while(1.., |c: char| c.is_alphanumeric() || c == '_').map(|s: &str| s.to_string());
let field_name = take_while(1.., |c: char| c.is_alphanumeric() || c == '_')
.map(|s: &str| s.to_string());
(field_name, ':', word_or_quoted)
.map(|(field, _, value)| {
// Handle special field types
match field.as_str() {
"type" => return SearchQuery::TypeFilter(value),
"tag" => return SearchQuery::TagFilter(value),
_ => {}
_ => {},
}
// Check for range queries: field:start..end
@ -305,7 +308,8 @@ fn atom(input: &mut &str) -> ModalResult<SearchQuery> {
fn and_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let first = atom.parse_next(input)?;
let rest: Vec<SearchQuery> = repeat(0.., preceded(ws, atom)).parse_next(input)?;
let rest: Vec<SearchQuery> =
repeat(0.., preceded(ws, atom)).parse_next(input)?;
if rest.is_empty() {
Ok(first)
} else {
@ -352,13 +356,10 @@ mod tests {
#[test]
fn test_field_match() {
let q = parse_search_query("artist:Beatles").unwrap();
assert_eq!(
q,
SearchQuery::FieldMatch {
assert_eq!(q, SearchQuery::FieldMatch {
field: "artist".into(),
value: "Beatles".into()
}
);
value: "Beatles".into(),
});
}
#[test]
@ -427,127 +428,97 @@ mod tests {
#[test]
fn test_range_query_year() {
let q = parse_search_query("year:2020..2023").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
assert_eq!(q, SearchQuery::RangeQuery {
field: "year".into(),
start: Some(2020),
end: Some(2023)
}
);
end: Some(2023),
});
}
#[test]
fn test_range_query_open_start() {
let q = parse_search_query("year:..2023").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
assert_eq!(q, SearchQuery::RangeQuery {
field: "year".into(),
start: None,
end: Some(2023)
}
);
end: Some(2023),
});
}
#[test]
fn test_range_query_open_end() {
let q = parse_search_query("year:2020..").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
assert_eq!(q, SearchQuery::RangeQuery {
field: "year".into(),
start: Some(2020),
end: None
}
);
end: None,
});
}
#[test]
fn test_compare_greater_than() {
let q = parse_search_query("year:>2020").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
assert_eq!(q, SearchQuery::CompareQuery {
field: "year".into(),
op: CompareOp::GreaterThan,
value: 2020
}
);
value: 2020,
});
}
#[test]
fn test_compare_less_or_equal() {
let q = parse_search_query("year:<=2023").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
assert_eq!(q, SearchQuery::CompareQuery {
field: "year".into(),
op: CompareOp::LessOrEqual,
value: 2023
}
);
value: 2023,
});
}
#[test]
fn test_size_compare_mb() {
let q = parse_search_query("size:>10MB").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
assert_eq!(q, SearchQuery::CompareQuery {
field: "size".into(),
op: CompareOp::GreaterThan,
value: 10 * 1024 * 1024
}
);
value: 10 * 1024 * 1024,
});
}
#[test]
fn test_size_range_gb() {
let q = parse_search_query("size:1GB..2GB").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
assert_eq!(q, SearchQuery::RangeQuery {
field: "size".into(),
start: Some(1024 * 1024 * 1024),
end: Some(2 * 1024 * 1024 * 1024)
}
);
end: Some(2 * 1024 * 1024 * 1024),
});
}
#[test]
fn test_date_query_today() {
let q = parse_search_query("created:today").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
assert_eq!(q, SearchQuery::DateQuery {
field: "created".into(),
value: DateValue::Today
}
);
value: DateValue::Today,
});
}
#[test]
fn test_date_query_last_week() {
let q = parse_search_query("modified:last-week").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
assert_eq!(q, SearchQuery::DateQuery {
field: "modified".into(),
value: DateValue::LastWeek
}
);
value: DateValue::LastWeek,
});
}
#[test]
fn test_date_query_days_ago() {
let q = parse_search_query("created:last-30d").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
assert_eq!(q, SearchQuery::DateQuery {
field: "created".into(),
value: DateValue::DaysAgo(30)
}
);
value: DateValue::DaysAgo(30),
});
}
}

View file

@ -12,8 +12,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
use crate::{model::MediaId, users::UserId};
/// Unique identifier for a share.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@ -99,7 +98,9 @@ impl ShareRecipient {
}
/// Permissions granted by a share.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[derive(
Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize,
)]
pub struct SharePermissions {
/// Can view the content
pub can_view: bool,

View file

@ -4,8 +4,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
use crate::{model::MediaId, users::UserId};
/// A user's rating for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -17,7 +17,9 @@ pub fn run_sqlite_migrations(conn: &mut rusqlite::Connection) -> Result<()> {
Ok(())
}
pub async fn run_postgres_migrations(client: &mut tokio_postgres::Client) -> Result<()> {
pub async fn run_postgres_migrations(
client: &mut tokio_postgres::Client,
) -> Result<()> {
postgres_migrations::migrations::runner()
.run_async(client)
.await

View file

@ -2,23 +2,23 @@ pub mod migrations;
pub mod postgres;
pub mod sqlite;
use std::path::PathBuf;
use std::sync::Arc;
use uuid::Uuid;
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use uuid::Uuid;
use crate::analytics::UsageEvent;
use crate::enrichment::ExternalMetadata;
use crate::error::Result;
use crate::model::*;
use crate::playlists::Playlist;
use crate::search::{SearchRequest, SearchResults};
use crate::social::{Comment, Rating, ShareLink};
use crate::subtitles::Subtitle;
use crate::transcode::{TranscodeSession, TranscodeStatus};
use crate::users::UserId;
use crate::{
analytics::UsageEvent,
enrichment::ExternalMetadata,
error::Result,
model::*,
playlists::Playlist,
search::{SearchRequest, SearchResults},
social::{Comment, Rating, ShareLink},
subtitles::Subtitle,
transcode::{TranscodeSession, TranscodeStatus},
users::UserId,
};
/// Statistics about the database.
#[derive(Debug, Clone, Default)]
@ -57,16 +57,27 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn insert_media(&self, item: &MediaItem) -> Result<()>;
async fn get_media(&self, id: MediaId) -> Result<MediaItem>;
async fn count_media(&self) -> Result<u64>;
async fn get_media_by_hash(&self, hash: &ContentHash) -> Result<Option<MediaItem>>;
async fn get_media_by_hash(
&self,
hash: &ContentHash,
) -> Result<Option<MediaItem>>;
/// Get a media item by its file path (used for incremental scanning)
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>;
async fn get_media_by_path(
&self,
path: &std::path::Path,
) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination)
-> Result<Vec<MediaItem>>;
async fn update_media(&self, item: &MediaItem) -> Result<()>;
async fn delete_media(&self, id: MediaId) -> Result<()>;
async fn delete_all_media(&self) -> Result<u64>;
// Tags
async fn create_tag(&self, name: &str, parent_id: Option<Uuid>) -> Result<Tag>;
async fn create_tag(
&self,
name: &str,
parent_id: Option<Uuid>,
) -> Result<Tag>;
async fn get_tag(&self, id: Uuid) -> Result<Tag>;
async fn list_tags(&self) -> Result<Vec<Tag>>;
async fn delete_tag(&self, id: Uuid) -> Result<()>;
@ -92,8 +103,15 @@ pub trait StorageBackend: Send + Sync + 'static {
media_id: MediaId,
position: i32,
) -> Result<()>;
async fn remove_from_collection(&self, collection_id: Uuid, media_id: MediaId) -> Result<()>;
async fn get_collection_members(&self, collection_id: Uuid) -> Result<Vec<MediaItem>>;
async fn remove_from_collection(
&self,
collection_id: Uuid,
media_id: MediaId,
) -> Result<()>;
async fn get_collection_members(
&self,
collection_id: Uuid,
) -> Result<Vec<MediaItem>>;
// Search
async fn search(&self, request: &SearchRequest) -> Result<SearchResults>;
@ -117,7 +135,11 @@ pub trait StorageBackend: Send + Sync + 'static {
&self,
media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>;
async fn delete_custom_field(&self, media_id: MediaId, name: &str) -> Result<()>;
async fn delete_custom_field(
&self,
media_id: MediaId,
name: &str,
) -> Result<()>;
// Batch operations (transactional where supported)
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
@ -129,7 +151,11 @@ pub trait StorageBackend: Send + Sync + 'static {
Ok(count)
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
async fn batch_tag_media(
&self,
media_ids: &[MediaId],
tag_ids: &[Uuid],
) -> Result<u64> {
let mut count = 0u64;
for media_id in media_ids {
for tag_id in tag_ids {
@ -141,7 +167,9 @@ pub trait StorageBackend: Send + Sync + 'static {
}
// Integrity
async fn list_media_paths(&self) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>;
async fn list_media_paths(
&self,
) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>;
// Batch metadata update
#[allow(clippy::too_many_arguments)]
@ -191,12 +219,16 @@ pub trait StorageBackend: Send + Sync + 'static {
query: &str,
sort_order: Option<&str>,
) -> Result<()>;
async fn list_saved_searches(&self) -> Result<Vec<crate::model::SavedSearch>>;
async fn list_saved_searches(&self)
-> Result<Vec<crate::model::SavedSearch>>;
async fn delete_saved_search(&self, id: uuid::Uuid) -> Result<()>;
// Duplicates
async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>;
async fn find_perceptual_duplicates(&self, threshold: u32) -> Result<Vec<Vec<MediaItem>>>;
async fn find_perceptual_duplicates(
&self,
threshold: u32,
) -> Result<Vec<Vec<MediaItem>>>;
// Database management
async fn database_stats(&self) -> Result<DatabaseStats>;
@ -215,8 +247,14 @@ pub trait StorageBackend: Send + Sync + 'static {
// User Management
async fn list_users(&self) -> Result<Vec<crate::users::User>>;
async fn get_user(&self, id: crate::users::UserId) -> Result<crate::users::User>;
async fn get_user_by_username(&self, username: &str) -> Result<crate::users::User>;
async fn get_user(
&self,
id: crate::users::UserId,
) -> Result<crate::users::User>;
async fn get_user_by_username(
&self,
username: &str,
) -> Result<crate::users::User>;
async fn create_user(
&self,
username: &str,
@ -248,23 +286,26 @@ pub trait StorageBackend: Send + Sync + 'static {
root_path: &str,
) -> Result<()>;
/// Check if a user has access to a specific media item based on library permissions.
/// Returns the permission level if access is granted, or an error if denied.
/// Admin users (role=admin) bypass library checks and have full access.
/// Check if a user has access to a specific media item based on library
/// permissions. Returns the permission level if access is granted, or an
/// error if denied. Admin users (role=admin) bypass library checks and have
/// full access.
async fn check_library_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<crate::users::LibraryPermission> {
// Default implementation: get the media item's path and check against user's library access
// Default implementation: get the media item's path and check against
// user's library access
let media = self.get_media(media_id).await?;
let path_str = media.path.to_string_lossy().to_string();
// Get user's library permissions
let libraries = self.get_user_libraries(user_id).await?;
// If user has no library restrictions, they have no access (unless they're admin)
// This default impl requires at least one matching library permission
// If user has no library restrictions, they have no access (unless they're
// admin) This default impl requires at least one matching library
// permission
for lib in &libraries {
if path_str.starts_with(&lib.root_path) {
return Ok(lib.permission);
@ -310,7 +351,11 @@ pub trait StorageBackend: Send + Sync + 'static {
review: Option<&str>,
) -> Result<Rating>;
async fn get_media_ratings(&self, media_id: MediaId) -> Result<Vec<Rating>>;
async fn get_user_rating(&self, user_id: UserId, media_id: MediaId) -> Result<Option<Rating>>;
async fn get_user_rating(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<Option<Rating>>;
async fn delete_rating(&self, id: Uuid) -> Result<()>;
// ===== Comments =====
@ -321,18 +366,31 @@ pub trait StorageBackend: Send + Sync + 'static {
text: &str,
parent_id: Option<Uuid>,
) -> Result<Comment>;
async fn get_media_comments(&self, media_id: MediaId) -> Result<Vec<Comment>>;
async fn get_media_comments(&self, media_id: MediaId)
-> Result<Vec<Comment>>;
async fn delete_comment(&self, id: Uuid) -> Result<()>;
// ===== Favorites =====
async fn add_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn remove_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn add_favorite(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<()>;
async fn remove_favorite(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<()>;
async fn get_user_favorites(
&self,
user_id: UserId,
pagination: &Pagination,
) -> Result<Vec<MediaItem>>;
async fn is_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<bool>;
async fn is_favorite(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<bool>;
// ===== Share Links =====
async fn create_share_link(
@ -358,7 +416,10 @@ pub trait StorageBackend: Send + Sync + 'static {
filter_query: Option<&str>,
) -> Result<Playlist>;
async fn get_playlist(&self, id: Uuid) -> Result<Playlist>;
async fn list_playlists(&self, owner_id: Option<UserId>) -> Result<Vec<Playlist>>;
async fn list_playlists(
&self,
owner_id: Option<UserId>,
) -> Result<Vec<Playlist>>;
async fn update_playlist(
&self,
id: Uuid,
@ -373,8 +434,15 @@ pub trait StorageBackend: Send + Sync + 'static {
media_id: MediaId,
position: i32,
) -> Result<()>;
async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()>;
async fn get_playlist_items(&self, playlist_id: Uuid) -> Result<Vec<MediaItem>>;
async fn remove_from_playlist(
&self,
playlist_id: Uuid,
media_id: MediaId,
) -> Result<()>;
async fn get_playlist_items(
&self,
playlist_id: Uuid,
) -> Result<Vec<MediaItem>>;
async fn reorder_playlist(
&self,
playlist_id: Uuid,
@ -391,29 +459,53 @@ pub trait StorageBackend: Send + Sync + 'static {
limit: u64,
) -> Result<Vec<UsageEvent>>;
async fn get_most_viewed(&self, limit: u64) -> Result<Vec<(MediaItem, u64)>>;
async fn get_recently_viewed(&self, user_id: UserId, limit: u64) -> Result<Vec<MediaItem>>;
async fn get_recently_viewed(
&self,
user_id: UserId,
limit: u64,
) -> Result<Vec<MediaItem>>;
async fn update_watch_progress(
&self,
user_id: UserId,
media_id: MediaId,
progress_secs: f64,
) -> Result<()>;
async fn get_watch_progress(&self, user_id: UserId, media_id: MediaId) -> Result<Option<f64>>;
async fn get_watch_progress(
&self,
user_id: UserId,
media_id: MediaId,
) -> Result<Option<f64>>;
async fn cleanup_old_events(&self, before: DateTime<Utc>) -> Result<u64>;
// ===== Subtitles =====
async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>;
async fn get_media_subtitles(&self, media_id: MediaId) -> Result<Vec<Subtitle>>;
async fn get_media_subtitles(
&self,
media_id: MediaId,
) -> Result<Vec<Subtitle>>;
async fn delete_subtitle(&self, id: Uuid) -> Result<()>;
async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()>;
async fn update_subtitle_offset(
&self,
id: Uuid,
offset_ms: i64,
) -> Result<()>;
// ===== External Metadata (Enrichment) =====
async fn store_external_metadata(&self, meta: &ExternalMetadata) -> Result<()>;
async fn get_external_metadata(&self, media_id: MediaId) -> Result<Vec<ExternalMetadata>>;
async fn store_external_metadata(
&self,
meta: &ExternalMetadata,
) -> Result<()>;
async fn get_external_metadata(
&self,
media_id: MediaId,
) -> Result<Vec<ExternalMetadata>>;
async fn delete_external_metadata(&self, id: Uuid) -> Result<()>;
// ===== Transcode Sessions =====
async fn create_transcode_session(&self, session: &TranscodeSession) -> Result<()>;
async fn create_transcode_session(
&self,
session: &TranscodeSession,
) -> Result<()>;
async fn get_transcode_session(&self, id: Uuid) -> Result<TranscodeSession>;
async fn list_transcode_sessions(
&self,
@ -425,14 +517,20 @@ pub trait StorageBackend: Send + Sync + 'static {
status: TranscodeStatus,
progress: f32,
) -> Result<()>;
async fn cleanup_expired_transcodes(&self, before: DateTime<Utc>) -> Result<u64>;
async fn cleanup_expired_transcodes(
&self,
before: DateTime<Utc>,
) -> Result<u64>;
// ===== Session Management =====
/// Create a new session in the database
async fn create_session(&self, session: &SessionData) -> Result<()>;
/// Get a session by its token, returns None if not found or expired
async fn get_session(&self, session_token: &str) -> Result<Option<SessionData>>;
async fn get_session(
&self,
session_token: &str,
) -> Result<Option<SessionData>>;
/// Update the last_accessed timestamp for a session
async fn touch_session(&self, session_token: &str) -> Result<()>;
@ -447,12 +545,18 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn delete_expired_sessions(&self) -> Result<u64>;
/// List all active sessions (optionally filtered by username)
async fn list_active_sessions(&self, username: Option<&str>) -> Result<Vec<SessionData>>;
async fn list_active_sessions(
&self,
username: Option<&str>,
) -> Result<Vec<SessionData>>;
// Book Management Methods
/// Upsert book metadata for a media item
async fn upsert_book_metadata(&self, metadata: &crate::model::BookMetadata) -> Result<()>;
async fn upsert_book_metadata(
&self,
metadata: &crate::model::BookMetadata,
) -> Result<()>;
/// Get book metadata for a media item
async fn get_book_metadata(
@ -468,16 +572,23 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<()>;
/// Get all authors for a book
async fn get_book_authors(&self, media_id: MediaId) -> Result<Vec<crate::model::AuthorInfo>>;
async fn get_book_authors(
&self,
media_id: MediaId,
) -> Result<Vec<crate::model::AuthorInfo>>;
/// List all distinct authors with book counts
async fn list_all_authors(&self, pagination: &Pagination) -> Result<Vec<(String, u64)>>;
async fn list_all_authors(
&self,
pagination: &Pagination,
) -> Result<Vec<(String, u64)>>;
/// List all series with book counts
async fn list_series(&self) -> Result<Vec<(String, u64)>>;
/// Get all books in a series, ordered by series_index
async fn get_series_books(&self, series_name: &str) -> Result<Vec<MediaItem>>;
async fn get_series_books(&self, series_name: &str)
-> Result<Vec<MediaItem>>;
/// Update reading progress for a user and book
async fn update_reading_progress(
@ -531,7 +642,8 @@ pub trait StorageBackend: Send + Sync + 'static {
/// Increment the reference count for a blob
async fn increment_blob_ref(&self, hash: &ContentHash) -> Result<()>;
/// Decrement the reference count for a blob. Returns true if blob should be deleted.
/// Decrement the reference count for a blob. Returns true if blob should be
/// deleted.
async fn decrement_blob_ref(&self, hash: &ContentHash) -> Result<bool>;
/// Update the last_verified timestamp for a blob
@ -556,7 +668,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<crate::sync::SyncDevice>;
/// Get a sync device by ID
async fn get_device(&self, id: crate::sync::DeviceId) -> Result<crate::sync::SyncDevice>;
async fn get_device(
&self,
id: crate::sync::DeviceId,
) -> Result<crate::sync::SyncDevice>;
/// Get a sync device by its token hash
async fn get_device_by_token(
@ -565,10 +680,14 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sync::SyncDevice>>;
/// List all devices for a user
async fn list_user_devices(&self, user_id: UserId) -> Result<Vec<crate::sync::SyncDevice>>;
async fn list_user_devices(
&self,
user_id: UserId,
) -> Result<Vec<crate::sync::SyncDevice>>;
/// Update a sync device
async fn update_device(&self, device: &crate::sync::SyncDevice) -> Result<()>;
async fn update_device(&self, device: &crate::sync::SyncDevice)
-> Result<()>;
/// Delete a sync device
async fn delete_device(&self, id: crate::sync::DeviceId) -> Result<()>;
@ -579,7 +698,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Sync Log =====
/// Record a change in the sync log
async fn record_sync_change(&self, change: &crate::sync::SyncLogEntry) -> Result<()>;
async fn record_sync_change(
&self,
change: &crate::sync::SyncLogEntry,
) -> Result<()>;
/// Get changes since a cursor position
async fn get_changes_since(
@ -604,7 +726,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sync::DeviceSyncState>>;
/// Insert or update device sync state
async fn upsert_device_sync_state(&self, state: &crate::sync::DeviceSyncState) -> Result<()>;
async fn upsert_device_sync_state(
&self,
state: &crate::sync::DeviceSyncState,
) -> Result<()>;
/// List all pending sync items for a device
async fn list_pending_sync(
@ -615,19 +740,35 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Upload Sessions (Chunked Uploads) =====
/// Create a new upload session
async fn create_upload_session(&self, session: &crate::sync::UploadSession) -> Result<()>;
async fn create_upload_session(
&self,
session: &crate::sync::UploadSession,
) -> Result<()>;
/// Get an upload session by ID
async fn get_upload_session(&self, id: Uuid) -> Result<crate::sync::UploadSession>;
async fn get_upload_session(
&self,
id: Uuid,
) -> Result<crate::sync::UploadSession>;
/// Update an upload session
async fn update_upload_session(&self, session: &crate::sync::UploadSession) -> Result<()>;
async fn update_upload_session(
&self,
session: &crate::sync::UploadSession,
) -> Result<()>;
/// Record a received chunk
async fn record_chunk(&self, upload_id: Uuid, chunk: &crate::sync::ChunkInfo) -> Result<()>;
async fn record_chunk(
&self,
upload_id: Uuid,
chunk: &crate::sync::ChunkInfo,
) -> Result<()>;
/// Get all chunks for an upload
async fn get_upload_chunks(&self, upload_id: Uuid) -> Result<Vec<crate::sync::ChunkInfo>>;
async fn get_upload_chunks(
&self,
upload_id: Uuid,
) -> Result<Vec<crate::sync::ChunkInfo>>;
/// Clean up expired upload sessions
async fn cleanup_expired_uploads(&self) -> Result<u64>;
@ -635,7 +776,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Sync Conflicts =====
/// Record a sync conflict
async fn record_conflict(&self, conflict: &crate::sync::SyncConflict) -> Result<()>;
async fn record_conflict(
&self,
conflict: &crate::sync::SyncConflict,
) -> Result<()>;
/// Get unresolved conflicts for a device
async fn get_unresolved_conflicts(
@ -653,13 +797,22 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Enhanced Sharing =====
/// Create a new share
async fn create_share(&self, share: &crate::sharing::Share) -> Result<crate::sharing::Share>;
async fn create_share(
&self,
share: &crate::sharing::Share,
) -> Result<crate::sharing::Share>;
/// Get a share by ID
async fn get_share(&self, id: crate::sharing::ShareId) -> Result<crate::sharing::Share>;
async fn get_share(
&self,
id: crate::sharing::ShareId,
) -> Result<crate::sharing::Share>;
/// Get a share by its public token
async fn get_share_by_token(&self, token: &str) -> Result<crate::sharing::Share>;
async fn get_share_by_token(
&self,
token: &str,
) -> Result<crate::sharing::Share>;
/// List shares created by a user
async fn list_shares_by_owner(
@ -682,13 +835,19 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Vec<crate::sharing::Share>>;
/// Update a share
async fn update_share(&self, share: &crate::sharing::Share) -> Result<crate::sharing::Share>;
async fn update_share(
&self,
share: &crate::sharing::Share,
) -> Result<crate::sharing::Share>;
/// Delete a share
async fn delete_share(&self, id: crate::sharing::ShareId) -> Result<()>;
/// Record that a share was accessed
async fn record_share_access(&self, id: crate::sharing::ShareId) -> Result<()>;
async fn record_share_access(
&self,
id: crate::sharing::ShareId,
) -> Result<()>;
/// Check share access for a user and target
async fn check_share_access(
@ -705,7 +864,10 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Option<crate::sharing::SharePermissions>>;
/// Batch delete shares
async fn batch_delete_shares(&self, ids: &[crate::sharing::ShareId]) -> Result<u64>;
async fn batch_delete_shares(
&self,
ids: &[crate::sharing::ShareId],
) -> Result<u64>;
/// Clean up expired shares
async fn cleanup_expired_shares(&self) -> Result<u64>;
@ -713,7 +875,10 @@ pub trait StorageBackend: Send + Sync + 'static {
// ===== Share Activity =====
/// Record share activity
async fn record_share_activity(&self, activity: &crate::sharing::ShareActivity) -> Result<()>;
async fn record_share_activity(
&self,
activity: &crate::sharing::ShareActivity,
) -> Result<()>;
/// Get activity for a share
async fn get_share_activity(
@ -754,7 +919,11 @@ pub trait StorageBackend: Send + Sync + 'static {
/// For external storage, this actually moves the file on disk.
/// For managed storage, this only updates the path in metadata.
/// Returns the old path for sync log recording.
async fn move_media(&self, id: MediaId, new_directory: &std::path::Path) -> Result<String>;
async fn move_media(
&self,
id: MediaId,
new_directory: &std::path::Path,
) -> Result<String>;
/// Batch move multiple media items to a new directory.
async fn batch_move_media(
@ -779,7 +948,8 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn restore_media(&self, id: MediaId) -> Result<()>;
/// List all soft-deleted media items.
async fn list_trash(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>;
async fn list_trash(&self, pagination: &Pagination)
-> Result<Vec<MediaItem>>;
/// Permanently delete all items in trash.
async fn empty_trash(&self) -> Result<u64>;
@ -807,15 +977,19 @@ pub trait StorageBackend: Send + Sync + 'static {
) -> Result<Vec<crate::model::MarkdownLink>>;
/// Get backlinks (incoming links) to a media item.
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>>;
async fn get_backlinks(
&self,
media_id: MediaId,
) -> Result<Vec<crate::model::BacklinkInfo>>;
/// Clear all links for a media item.
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()>;
/// Get graph data for visualization.
///
/// If `center_id` is provided, returns nodes within `depth` hops of that node.
/// If `center_id` is None, returns the entire graph (limited by internal max).
/// If `center_id` is provided, returns nodes within `depth` hops of that
/// node. If `center_id` is None, returns the entire graph (limited by
/// internal max).
async fn get_graph_data(
&self,
center_id: Option<MediaId>,

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -3,14 +3,15 @@
use std::path::{Path, PathBuf};
use chrono::Utc;
use tokio::fs;
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
use tokio::{
fs,
io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt},
};
use tracing::{debug, info};
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use super::{ChunkInfo, UploadSession};
use crate::error::{PinakesError, Result};
/// Manager for chunked uploads.
#[derive(Debug, Clone)]
@ -144,7 +145,8 @@ impl ChunkedUploadManager {
}
// Verify chunk indices
let mut indices: Vec<u64> = received_chunks.iter().map(|c| c.chunk_index).collect();
let mut indices: Vec<u64> =
received_chunks.iter().map(|c| c.chunk_index).collect();
indices.sort();
for (i, idx) in indices.iter().enumerate() {
if *idx != i as u64 {
@ -243,11 +245,11 @@ async fn compute_file_hash(path: &Path) -> Result<String> {
#[cfg(test)]
mod tests {
use super::*;
use crate::model::ContentHash;
use crate::sync::UploadStatus;
use tempfile::tempdir;
use super::*;
use crate::{model::ContentHash, sync::UploadStatus};
#[tokio::test]
async fn test_chunked_upload() {
let dir = tempdir().unwrap();

View file

@ -1,8 +1,7 @@
//! Conflict detection and resolution for sync.
use crate::config::ConflictResolution;
use super::DeviceSyncState;
use crate::config::ConflictResolution;
/// Detect if there's a conflict between local and server state.
pub fn detect_conflict(state: &DeviceSyncState) -> Option<ConflictInfo> {
@ -57,11 +56,12 @@ pub fn resolve_conflict(
ConflictResolution::ServerWins => ConflictOutcome::UseServer,
ConflictResolution::ClientWins => ConflictOutcome::UseLocal,
ConflictResolution::KeepBoth => {
let new_path = generate_conflict_path(&conflict.path, &conflict.local_hash);
let new_path =
generate_conflict_path(&conflict.path, &conflict.local_hash);
ConflictOutcome::KeepBoth {
new_local_path: new_path,
}
}
},
ConflictResolution::Manual => ConflictOutcome::Manual,
}
}
@ -88,7 +88,7 @@ pub fn resolve_by_mtime(conflict: &ConflictInfo) -> ConflictOutcome {
} else {
ConflictOutcome::UseServer
}
}
},
(Some(_), None) => ConflictOutcome::UseLocal,
(None, Some(_)) => ConflictOutcome::UseServer,
(None, None) => ConflictOutcome::UseServer, // Default to server

View file

@ -6,9 +6,11 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::config::ConflictResolution;
use crate::model::{ContentHash, MediaId};
use crate::users::UserId;
use crate::{
config::ConflictResolution,
model::{ContentHash, MediaId},
users::UserId,
};
/// Unique identifier for a sync device.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]

View file

@ -6,11 +6,18 @@ use chrono::Utc;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::error::Result;
use crate::model::{ContentHash, MediaId};
use crate::storage::DynStorageBackend;
use super::{DeviceId, DeviceSyncState, FileSyncStatus, SyncChangeType, SyncLogEntry};
use super::{
DeviceId,
DeviceSyncState,
FileSyncStatus,
SyncChangeType,
SyncLogEntry,
};
use crate::{
error::Result,
model::{ContentHash, MediaId},
storage::DynStorageBackend,
};
/// Request from client to get changes since a cursor.
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -183,8 +190,9 @@ pub async fn mark_pending_download(
s.server_mtime = server_mtime;
s.sync_status = FileSyncStatus::PendingDownload;
s
}
None => DeviceSyncState {
},
None => {
DeviceSyncState {
device_id,
path: path.to_string(),
local_hash: None,
@ -194,6 +202,7 @@ pub async fn mark_pending_download(
sync_status: FileSyncStatus::PendingDownload,
last_synced_at: None,
conflict_info_json: None,
}
},
};

View file

@ -1,8 +1,10 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::{AuditAction, MediaId, Tag};
use crate::storage::DynStorageBackend;
use crate::{
error::Result,
model::{AuditAction, MediaId, Tag},
storage::DynStorageBackend,
};
pub async fn create_tag(
storage: &DynStorageBackend,
@ -12,7 +14,11 @@ pub async fn create_tag(
storage.create_tag(name, parent_id).await
}
pub async fn tag_media(storage: &DynStorageBackend, media_id: MediaId, tag_id: Uuid) -> Result<()> {
pub async fn tag_media(
storage: &DynStorageBackend,
media_id: MediaId,
tag_id: Uuid,
) -> Result<()> {
storage.tag_media(media_id, tag_id).await?;
crate::audit::record_action(
storage,
@ -38,6 +44,9 @@ pub async fn untag_media(
.await
}
pub async fn get_tag_tree(storage: &DynStorageBackend, tag_id: Uuid) -> Result<Vec<Tag>> {
pub async fn get_tag_tree(
storage: &DynStorageBackend,
tag_id: Uuid,
) -> Result<Vec<Tag>> {
storage.get_tag_descendants(tag_id).await
}

View file

@ -1,17 +1,21 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{
path::{Path, PathBuf},
process::Command,
};
use tracing::{info, warn};
use crate::config::ThumbnailConfig;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaCategory, MediaType};
use crate::model::MediaId;
use crate::{
config::ThumbnailConfig,
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaCategory, MediaType},
model::MediaId,
};
/// Generate a thumbnail for a media file and return the path to the thumbnail.
///
/// Supports images (via `image` crate), videos (via ffmpeg), PDFs (via pdftoppm),
/// and EPUBs (via cover image extraction).
/// Supports images (via `image` crate), videos (via ffmpeg), PDFs (via
/// pdftoppm), and EPUBs (via cover image extraction).
pub fn generate_thumbnail(
media_id: MediaId,
source_path: &Path,
@ -46,16 +50,20 @@ pub fn generate_thumbnail_with_config(
} else {
generate_image_thumbnail(source_path, &thumb_path, config)
}
}
MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config),
MediaCategory::Document => match media_type {
},
MediaCategory::Video => {
generate_video_thumbnail(source_path, &thumb_path, config)
},
MediaCategory::Document => {
match media_type {
MediaType::Builtin(BuiltinMediaType::Pdf) => {
generate_pdf_thumbnail(source_path, &thumb_path, config)
}
},
MediaType::Builtin(BuiltinMediaType::Epub) => {
generate_epub_thumbnail(source_path, &thumb_path, config)
}
},
_ => return Ok(None),
}
},
_ => return Ok(None),
};
@ -64,30 +72,42 @@ pub fn generate_thumbnail_with_config(
Ok(()) => {
info!(media_id = %media_id, category = ?media_type.category(), "generated thumbnail");
Ok(Some(thumb_path))
}
},
Err(e) => {
warn!(media_id = %media_id, error = %e, "failed to generate thumbnail");
Ok(None)
}
},
}
}
fn generate_image_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
let img = image::open(source)
.map_err(|e| PinakesError::MetadataExtraction(format!("image open: {e}")))?;
fn generate_image_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
let img = image::open(source).map_err(|e| {
PinakesError::MetadataExtraction(format!("image open: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("thumbnail encode: {e}")))?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
&mut output,
config.quality,
);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("thumbnail encode: {e}"))
})?;
Ok(())
}
fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
fn generate_video_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
let ffmpeg = config.ffmpeg_path.as_deref().unwrap_or("ffmpeg");
let status = Command::new(ffmpeg)
@ -105,7 +125,9 @@ fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
PinakesError::MetadataExtraction(format!("ffmpeg not found or failed to execute: {e}"))
PinakesError::MetadataExtraction(format!(
"ffmpeg not found or failed to execute: {e}"
))
})?;
if !status.success() {
@ -118,7 +140,11 @@ fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig
Ok(())
}
fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
fn generate_pdf_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Use pdftoppm to render first page, then resize with image crate
let temp_prefix = dest.with_extension("tmp");
let status = Command::new("pdftoppm")
@ -145,15 +171,18 @@ fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
let rendered = temp_prefix.with_extension("jpg");
if rendered.exists() {
// Resize to thumbnail size
let img = image::open(&rendered)
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail open: {e}")))?;
let img = image::open(&rendered).map_err(|e| {
PinakesError::MetadataExtraction(format!("pdf thumbnail open: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail encode: {e}")))?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
&mut output,
config.quality,
);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("pdf thumbnail encode: {e}"))
})?;
let _ = std::fs::remove_file(&rendered);
Ok(())
} else {
@ -163,28 +192,36 @@ fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
}
}
fn generate_epub_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
fn generate_epub_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Try to extract cover image from EPUB
let mut doc = epub::doc::EpubDoc::new(source)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub open: {e}")))?;
let cover_data = doc.get_cover().map(|(data, _mime)| data).or_else(|| {
// Fallback: try to find a cover image in the resources
doc.get_resource("cover-image")
doc
.get_resource("cover-image")
.map(|(data, _)| data)
.or_else(|| doc.get_resource("cover").map(|(data, _)| data))
});
if let Some(data) = cover_data {
let img = image::load_from_memory(&data)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub cover decode: {e}")))?;
let img = image::load_from_memory(&data).map_err(|e| {
PinakesError::MetadataExtraction(format!("epub cover decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub thumbnail encode: {e}")))?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
&mut output,
config.quality,
);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("epub thumbnail encode: {e}"))
})?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
@ -193,18 +230,28 @@ fn generate_epub_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
}
}
fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
fn generate_raw_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Try dcraw to extract embedded JPEG preview, then resize
let temp_ppm = dest.with_extension("ppm");
let status = Command::new("dcraw")
.args(["-e", "-c"])
.arg(source)
.stdout(std::fs::File::create(&temp_ppm).map_err(|e| {
PinakesError::MetadataExtraction(format!("failed to create temp file: {e}"))
PinakesError::MetadataExtraction(format!(
"failed to create temp file: {e}"
))
})?)
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| PinakesError::MetadataExtraction(format!("dcraw not found or failed: {e}")))?;
.map_err(|e| {
PinakesError::MetadataExtraction(format!(
"dcraw not found or failed: {e}"
))
})?;
if !status.success() {
let _ = std::fs::remove_file(&temp_ppm);
@ -218,15 +265,18 @@ fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
if temp_ppm.exists() {
let result = image::open(&temp_ppm);
let _ = std::fs::remove_file(&temp_ppm);
let img = result
.map_err(|e| PinakesError::MetadataExtraction(format!("raw preview decode: {e}")))?;
let img = result.map_err(|e| {
PinakesError::MetadataExtraction(format!("raw preview decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("raw thumbnail encode: {e}")))?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
&mut output,
config.quality,
);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("raw thumbnail encode: {e}"))
})?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
@ -235,7 +285,11 @@ fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
}
}
fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
fn generate_heic_thumbnail(
source: &Path,
dest: &Path,
config: &ThumbnailConfig,
) -> Result<()> {
// Use heif-convert to convert to JPEG, then resize
let temp_jpg = dest.with_extension("tmp.jpg");
let status = Command::new("heif-convert")
@ -245,7 +299,9 @@ fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
PinakesError::MetadataExtraction(format!("heif-convert not found or failed: {e}"))
PinakesError::MetadataExtraction(format!(
"heif-convert not found or failed: {e}"
))
})?;
if !status.success() {
@ -259,15 +315,18 @@ fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
if temp_jpg.exists() {
let result = image::open(&temp_jpg);
let _ = std::fs::remove_file(&temp_jpg);
let img =
result.map_err(|e| PinakesError::MetadataExtraction(format!("heic decode: {e}")))?;
let img = result.map_err(|e| {
PinakesError::MetadataExtraction(format!("heic decode: {e}"))
})?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("heic thumbnail encode: {e}")))?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(
&mut output,
config.quality,
);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("heic thumbnail encode: {e}"))
})?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
@ -315,8 +374,9 @@ pub fn generate_book_covers(
let media_cover_dir = covers_dir.join(media_id.to_string());
std::fs::create_dir_all(&media_cover_dir)?;
let img = image::load_from_memory(source_image)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover image load: {e}")))?;
let img = image::load_from_memory(source_image).map_err(|e| {
PinakesError::MetadataExtraction(format!("cover image load: {e}"))
})?;
let mut results = Vec::new();
@ -334,18 +394,21 @@ pub fn generate_book_covers(
// Generate thumbnail
let thumb = img.thumbnail(width, height);
let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 90);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?;
}
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 90);
thumb.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("cover encode: {e}"))
})?;
},
None => {
// Save original
let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 95);
img.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?;
}
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 95);
img.write_with_encoder(encoder).map_err(|e| {
PinakesError::MetadataExtraction(format!("cover encode: {e}"))
})?;
},
}
results.push((size, cover_path));
@ -390,7 +453,8 @@ pub fn extract_pdf_cover(pdf_path: &Path) -> Result<Option<Vec<u8>>> {
let pdftoppm = "pdftoppm";
let temp_dir = std::env::temp_dir();
let temp_prefix = temp_dir.join(format!("pdf_cover_{}", uuid::Uuid::new_v4()));
let temp_prefix =
temp_dir.join(format!("pdf_cover_{}", uuid::Uuid::new_v4()));
let status = Command::new(pdftoppm)
.args(["-jpeg", "-f", "1", "-l", "1", "-scale-to", "1200"])

View file

@ -1,18 +1,22 @@
//! Transcoding service for media files using FFmpeg.
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
use crate::config::{TranscodeProfile, TranscodingConfig};
use crate::model::MediaId;
use crate::storage::DynStorageBackend;
use crate::users::UserId;
use crate::{
config::{TranscodeProfile, TranscodingConfig},
model::MediaId,
storage::DynStorageBackend,
users::UserId,
};
/// A transcoding session for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -61,8 +65,10 @@ impl TranscodeStatus {
"pending" => Self::Pending,
"transcoding" => Self::Transcoding,
"complete" => Self::Complete,
"failed" => Self::Failed {
"failed" => {
Self::Failed {
error: error_message.unwrap_or("unknown error").to_string(),
}
},
"cancelled" => Self::Cancelled,
other => {
@ -71,7 +77,7 @@ impl TranscodeStatus {
other
);
Self::Pending
}
},
}
}
@ -105,7 +111,8 @@ impl TranscodeService {
}
pub fn cache_dir(&self) -> PathBuf {
self.config
self
.config
.cache_dir
.clone()
.unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode"))
@ -142,8 +149,9 @@ impl TranscodeService {
))
})?;
let expires_at =
Some(Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64));
let expires_at = Some(
Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64),
);
let cancel_notify = Arc::new(tokio::sync::Notify::new());
@ -202,7 +210,7 @@ impl TranscodeService {
tracing::error!("failed to update transcode status: {}", e);
}
return;
}
},
};
// Mark as transcoding
@ -220,8 +228,11 @@ impl TranscodeService {
}
// Build FFmpeg args and run
let args = get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref());
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel).await {
let args =
get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref());
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel)
.await
{
Ok(()) => {
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
@ -234,7 +245,7 @@ impl TranscodeService {
{
tracing::error!("failed to update transcode status: {}", e);
}
}
},
Err(e) => {
let error_msg = e.to_string();
let mut s = sessions.write().await;
@ -258,7 +269,7 @@ impl TranscodeService {
{
tracing::error!("failed to update transcode status: {}", e);
}
}
},
}
});
@ -330,13 +341,19 @@ impl TranscodeService {
for (_id, path) in expired {
if let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove expired transcode cache directory: {}", e);
tracing::error!(
"failed to remove expired transcode cache directory: {}",
e
);
}
}
}
/// Get a session by ID from the in-memory store.
pub async fn get_session(&self, session_id: Uuid) -> Option<TranscodeSession> {
pub async fn get_session(
&self,
session_id: Uuid,
) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions.get(&session_id).cloned()
}
@ -348,20 +365,28 @@ impl TranscodeService {
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
if safe_name.is_empty() || safe_name.contains('\0') || safe_name.starts_with('.') {
if safe_name.is_empty()
|| safe_name.contains('\0')
|| safe_name.starts_with('.')
{
// Return a non-existent path that will fail safely
return self
.cache_dir()
.join(session_id.to_string())
.join("__invalid__");
}
self.cache_dir()
self
.cache_dir()
.join(session_id.to_string())
.join(safe_name)
}
/// Find a session for a given media_id and profile.
pub async fn find_session(&self, media_id: MediaId, profile: &str) -> Option<TranscodeSession> {
pub async fn find_session(
&self,
media_id: MediaId,
profile: &str,
) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions
.values()
@ -440,8 +465,10 @@ async fn run_ffmpeg(
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,
) -> Result<(), crate::error::PinakesError> {
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::{
io::{AsyncBufReadExt, BufReader},
process::Command,
};
let mut child = Command::new("ffmpeg")
.args(args)
@ -449,7 +476,10 @@ async fn run_ffmpeg(
.stderr(std::process::Stdio::piped())
.spawn()
.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("failed to spawn ffmpeg: {}", e))
crate::error::PinakesError::InvalidOperation(format!(
"failed to spawn ffmpeg: {}",
e
))
})?;
// Capture stderr in a spawned task for error reporting
@ -486,7 +516,7 @@ async fn run_ffmpeg(
_ => {
// Duration unknown; don't update progress
continue;
}
},
};
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {

View file

@ -3,19 +3,20 @@
//! Handles file uploads, metadata extraction, and MediaItem creation
//! for files stored in managed content-addressable storage.
use std::collections::HashMap;
use std::path::Path;
use std::{collections::HashMap, path::Path};
use chrono::Utc;
use tokio::io::AsyncRead;
use tracing::{debug, info};
use crate::error::{PinakesError, Result};
use crate::managed_storage::ManagedStorageService;
use crate::media_type::MediaType;
use crate::metadata;
use crate::model::{MediaId, MediaItem, StorageMode, UploadResult};
use crate::storage::DynStorageBackend;
use crate::{
error::{PinakesError, Result},
managed_storage::ManagedStorageService,
media_type::MediaType,
metadata,
model::{MediaId, MediaItem, StorageMode, UploadResult},
storage::DynStorageBackend,
};
/// Process an upload from an async reader.
///
@ -53,7 +54,8 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
let blob_path = managed.path(&content_hash);
// Extract metadata
let extracted = metadata::extract_metadata(&blob_path, media_type.clone()).ok();
let extracted =
metadata::extract_metadata(&blob_path, media_type.clone()).ok();
// Create or get blob record
let mime = mime_type
@ -146,7 +148,8 @@ pub async fn process_upload_file(
let reader = tokio::io::BufReader::new(file);
let filename = original_filename.unwrap_or_else(|| {
path.file_name()
path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
});

View file

@ -1,12 +1,15 @@
//! User management and authentication
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
use crate::config::UserRole;
use crate::error::{PinakesError, Result};
use crate::{
config::UserRole,
error::{PinakesError, Result},
};
/// User ID
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@ -149,7 +152,9 @@ pub mod auth {
argon2
.hash_password(password.as_bytes(), &salt)
.map(|hash| hash.to_string())
.map_err(|e| PinakesError::Authentication(format!("failed to hash password: {e}")))
.map_err(|e| {
PinakesError::Authentication(format!("failed to hash password: {e}"))
})
}
/// Verify a password against a hash
@ -159,12 +164,15 @@ pub mod auth {
password_hash::{PasswordHash, PasswordVerifier},
};
let parsed_hash = PasswordHash::new(hash)
.map_err(|e| PinakesError::Authentication(format!("invalid password hash: {e}")))?;
let parsed_hash = PasswordHash::new(hash).map_err(|e| {
PinakesError::Authentication(format!("invalid password hash: {e}"))
})?;
Ok(Argon2::default()
Ok(
Argon2::default()
.verify_password(password.as_bytes(), &parsed_hash)
.is_ok())
.is_ok(),
)
}
}

View file

@ -1,8 +1,12 @@
use pinakes_core::books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as};
use pinakes_core::enrichment::books::BookEnricher;
use pinakes_core::enrichment::googlebooks::GoogleBooksClient;
use pinakes_core::enrichment::openlibrary::OpenLibraryClient;
use pinakes_core::thumbnail::{CoverSize, extract_epub_cover, generate_book_covers};
use pinakes_core::{
books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as},
enrichment::{
books::BookEnricher,
googlebooks::GoogleBooksClient,
openlibrary::OpenLibraryClient,
},
thumbnail::{CoverSize, extract_epub_cover, generate_book_covers},
};
#[test]
fn test_isbn_normalization() {
@ -138,7 +142,8 @@ fn test_book_cover_generation() {
use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> =
ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img.write_to(
img
.write_to(
&mut std::io::Cursor::new(&mut img_data),
image::ImageFormat::Png,
)
@ -174,10 +179,10 @@ async fn test_openlibrary_isbn_fetch() {
match result {
Ok(book) => {
assert!(book.title.is_some());
}
},
Err(_) => {
// Network error or book not found - acceptable in tests
}
},
}
}
@ -195,9 +200,9 @@ async fn test_googlebooks_isbn_fetch() {
if !books.is_empty() {
assert!(books[0].volume_info.title.is_some());
}
}
},
Err(_) => {
// Network error - acceptable in tests
}
},
}
}

View file

@ -1,10 +1,10 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
use pinakes_core::model::{ContentHash, MediaId, MediaItem, StorageMode};
use pinakes_core::storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend};
use pinakes_core::{
media_type::{BuiltinMediaType, MediaType},
model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use tempfile::TempDir;
use uuid::Uuid;

View file

@ -1,9 +1,9 @@
use std::collections::HashMap;
use std::sync::Arc;
use std::{collections::HashMap, sync::Arc};
use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
use pinakes_core::storage::sqlite::SqliteBackend;
use pinakes_core::{
model::*,
storage::{StorageBackend, sqlite::SqliteBackend},
};
mod common;
use common::{make_test_media, setup};
@ -164,7 +164,12 @@ async fn test_collections() {
let storage = setup().await;
let col = storage
.create_collection("Favorites", CollectionKind::Manual, Some("My faves"), None)
.create_collection(
"Favorites",
CollectionKind::Manual,
Some("My faves"),
None,
)
.await
.unwrap();
assert_eq!(col.name, "Favorites");
@ -314,7 +319,9 @@ async fn test_search() {
id: MediaId::new(),
path: format!("/tmp/{name}").into(),
file_name: name.to_string(),
media_type: pinakes_core::media_type::MediaType::from_path(std::path::Path::new(name))
media_type: pinakes_core::media_type::MediaType::from_path(
std::path::Path::new(name),
)
.unwrap(),
content_hash: ContentHash::new(format!("hash{i}")),
file_size: 1000 * (i as u64 + 1),
@ -504,8 +511,7 @@ async fn test_library_statistics_with_data() {
assert!(stats.oldest_item.is_some());
}
// ===== Phase 2: Media Server Features =====
// Media Server Features
#[tokio::test]
async fn test_ratings_crud() {
let storage = setup().await;

View file

@ -34,7 +34,8 @@ async fn test_detect_untracked_files() {
fs::write(&tracked_file, b"tracked content").unwrap();
fs::write(&untracked_file, b"untracked content").unwrap();
let tracked_item = create_test_media_item(tracked_file.clone(), "hash_tracked");
let tracked_item =
create_test_media_item(tracked_file.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();
@ -117,20 +118,23 @@ async fn test_complete_orphan_workflow() {
storage.add_root_dir(root_dir.clone()).await.unwrap();
let orphaned_path = root_dir.join("orphaned.mp3");
let orphaned_item = create_test_media_item(orphaned_path.clone(), "hash_orphaned");
let orphaned_item =
create_test_media_item(orphaned_path.clone(), "hash_orphaned");
storage.insert_media(&orphaned_item).await.unwrap();
let untracked_path = root_dir.join("untracked.mp3");
fs::write(&untracked_path, b"untracked").unwrap();
let another_orphaned = root_dir.join("another_orphaned.mp3");
let another_item = create_test_media_item(another_orphaned.clone(), "hash_another");
let another_item =
create_test_media_item(another_orphaned.clone(), "hash_another");
storage.insert_media(&another_item).await.unwrap();
let tracked_path = root_dir.join("tracked.mp3");
fs::write(&tracked_path, b"tracked").unwrap();
let tracked_item = create_test_media_item(tracked_path.clone(), "hash_tracked");
let tracked_item =
create_test_media_item(tracked_path.clone(), "hash_tracked");
storage.insert_media(&tracked_item).await.unwrap();
let report = detect_orphans(&storage).await.unwrap();

View file

@ -1,6 +1,4 @@
use pinakes_core::links::extract_links;
use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
use pinakes_core::{links::extract_links, model::*, storage::StorageBackend};
mod common;
@ -138,8 +136,8 @@ async fn test_save_links_concurrent_updates() {
let links1 = extract_links(note1_id, "[[target1]]");
let links2 = extract_links(note2_id, "[[target2]] [[target3]]");
// Execute both saves. We do so in sequence since we can't test true concurrency easily
// ...or so I think. Database tests are annoying.
// Execute both saves. We do so in sequence since we can't test true
// concurrency easily ...or so I think. Database tests are annoying.
storage
.save_markdown_links(note1_id, &links1)
.await

View file

@ -2,13 +2,15 @@ use chrono::Utc;
use pinakes_core::storage::{SessionData, StorageBackend};
use tempfile::TempDir;
async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend {
async fn setup_sqlite_storage() -> pinakes_core::storage::sqlite::SqliteBackend
{
let temp_dir = TempDir::new().unwrap();
let db_path = temp_dir
.path()
.join(format!("test_{}.db", uuid::Uuid::now_v7()));
let storage = pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap();
let storage =
pinakes_core::storage::sqlite::SqliteBackend::new(&db_path).unwrap();
storage.run_migrations().await.unwrap();
// Keep temp_dir alive by leaking it (tests are short-lived anyway)

View file

@ -1,12 +1,16 @@
//! Pinakes Plugin API
//!
//! This crate defines the stable plugin interface for Pinakes.
//! Plugins can extend Pinakes by implementing one or more of the provided traits.
//! Plugins can extend Pinakes by implementing one or more of the provided
//! traits.
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use thiserror::Error;
pub mod manifest;
@ -115,7 +119,8 @@ pub struct EnvironmentCapability {
/// Whether environment variable access is allowed
pub enabled: bool,
/// Specific environment variables allowed (if None, all allowed when enabled)
/// Specific environment variables allowed (if None, all allowed when
/// enabled)
pub allowed_vars: Option<Vec<String>>,
}
@ -162,7 +167,11 @@ pub trait MediaTypeProvider: Plugin {
fn supported_media_types(&self) -> Vec<MediaTypeDefinition>;
/// Check if this plugin can handle the given file
async fn can_handle(&self, path: &Path, mime_type: Option<&str>) -> PluginResult<bool>;
async fn can_handle(
&self,
path: &Path,
mime_type: Option<&str>,
) -> PluginResult<bool>;
}
/// Definition of a custom media type
@ -191,7 +200,10 @@ pub struct MediaTypeDefinition {
#[async_trait]
pub trait MetadataExtractor: Plugin {
/// Extract metadata from a file
async fn extract_metadata(&self, path: &Path) -> PluginResult<ExtractedMetadata>;
async fn extract_metadata(
&self,
path: &Path,
) -> PluginResult<ExtractedMetadata>;
/// Get the media types this extractor supports
fn supported_types(&self) -> Vec<String>;
@ -268,7 +280,10 @@ pub trait SearchBackend: Plugin {
async fn remove_item(&self, item_id: &str) -> PluginResult<()>;
/// Perform a search query
async fn search(&self, query: &SearchQuery) -> PluginResult<Vec<SearchResult>>;
async fn search(
&self,
query: &SearchQuery,
) -> PluginResult<Vec<SearchResult>>;
/// Get search statistics
async fn get_stats(&self) -> PluginResult<SearchStats>;

View file

@ -1,11 +1,16 @@
//! Plugin manifest parsing and validation
use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error;
use crate::{Capabilities, EnvironmentCapability, FilesystemCapability, NetworkCapability};
use crate::{
Capabilities,
EnvironmentCapability,
FilesystemCapability,
NetworkCapability,
};
/// Plugin manifest file format (TOML)
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,8 +1,9 @@
//! Shared types used across the plugin API
use serde::{Deserialize, Serialize};
use std::fmt;
use serde::{Deserialize, Serialize};
/// Plugin identifier
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct PluginId(String);

View file

@ -1,8 +1,9 @@
//! WASM bridge types and helpers for plugin communication
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
/// Memory allocation info for passing data between host and plugin
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WasmMemoryAlloc {
@ -119,19 +120,28 @@ pub mod helpers {
}
/// Deserialize bytes from WASM to a value
pub fn deserialize<T: for<'de> Deserialize<'de>>(bytes: &[u8]) -> Result<T, String> {
serde_json::from_slice(bytes).map_err(|e| format!("Deserialization error: {}", e))
pub fn deserialize<T: for<'de> Deserialize<'de>>(
bytes: &[u8],
) -> Result<T, String> {
serde_json::from_slice(bytes)
.map_err(|e| format!("Deserialization error: {}", e))
}
/// Create a success response
pub fn ok_response<T: Serialize>(request_id: String, value: &T) -> Result<Vec<u8>, String> {
pub fn ok_response<T: Serialize>(
request_id: String,
value: &T,
) -> Result<Vec<u8>, String> {
let result = WasmResult::Ok(serialize(value)?);
let response = PluginResponse { request_id, result };
serialize(&response)
}
/// Create an error response
pub fn error_response(request_id: String, error: String) -> Result<Vec<u8>, String> {
pub fn error_response(
request_id: String,
error: String,
) -> Result<Vec<u8>, String> {
let result = WasmResult::<Vec<u8>>::Err(error);
let response = PluginResponse { request_id, result };
serialize(&response)
@ -140,8 +150,7 @@ pub mod helpers {
#[cfg(test)]
mod tests {
use super::helpers::*;
use super::*;
use super::{helpers::*, *};
#[test]
fn test_serialize_deserialize() {
@ -164,7 +173,7 @@ mod tests {
WasmResult::Ok(data) => {
let recovered: String = deserialize(&data).unwrap();
assert_eq!(recovered, value);
}
},
WasmResult::Err(_) => panic!("Expected Ok result"),
}
}
@ -173,7 +182,8 @@ mod tests {
fn test_error_response() {
let request_id = "test-456".to_string();
let error_msg = "Something went wrong";
let response_bytes = error_response(request_id.clone(), error_msg.to_string()).unwrap();
let response_bytes =
error_response(request_id.clone(), error_msg.to_string()).unwrap();
let response: PluginResponse = deserialize(&response_bytes).unwrap();
assert_eq!(response.request_id, request_id);

View file

@ -1,13 +1,30 @@
use std::{collections::HashMap, path::PathBuf};
use async_trait::async_trait;
use pinakes_plugin_api::wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage};
use pinakes_plugin_api::{
Capabilities, EnvironmentCapability, Event, EventType, ExtractedMetadata, FilesystemCapability,
HealthStatus, MediaTypeDefinition, NetworkCapability, Plugin, PluginContext, PluginError,
PluginMetadata, PluginResult, SearchIndexItem, SearchQuery, SearchResult, SearchStats,
ThumbnailFormat, ThumbnailInfo, ThumbnailOptions,
Capabilities,
EnvironmentCapability,
Event,
EventType,
ExtractedMetadata,
FilesystemCapability,
HealthStatus,
MediaTypeDefinition,
NetworkCapability,
Plugin,
PluginContext,
PluginError,
PluginMetadata,
PluginResult,
SearchIndexItem,
SearchQuery,
SearchResult,
SearchStats,
ThumbnailFormat,
ThumbnailInfo,
ThumbnailOptions,
wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage},
};
use std::collections::HashMap;
use std::path::PathBuf;
struct TestPlugin {
initialized: bool,
@ -227,7 +244,8 @@ async fn test_thumbnail_options_serialization() {
};
let serialized = serde_json::to_string(&options).unwrap();
let deserialized: ThumbnailOptions = serde_json::from_str(&serialized).unwrap();
let deserialized: ThumbnailOptions =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.width, 320);
assert_eq!(deserialized.height, 240);
@ -249,7 +267,8 @@ async fn test_thumbnail_format_variants() {
format,
};
let serialized = serde_json::to_string(&options).unwrap();
let deserialized: ThumbnailOptions = serde_json::from_str(&serialized).unwrap();
let deserialized: ThumbnailOptions =
serde_json::from_str(&serialized).unwrap();
assert!(matches!(deserialized.format, _));
}
}
@ -350,7 +369,10 @@ async fn test_http_request_serialization() {
async fn test_http_response_serialization() {
let response = HttpResponse {
status: 200,
headers: HashMap::from([("Content-Type".to_string(), "application/json".to_string())]),
headers: HashMap::from([(
"Content-Type".to_string(),
"application/json".to_string(),
)]),
body: b"{\"success\": true}".to_vec(),
};
@ -401,7 +423,9 @@ async fn test_log_level_variants() {
async fn test_plugin_error_variants() {
let errors: Vec<PluginError> = vec![
PluginError::InitializationFailed("WASM load failed".to_string()),
PluginError::UnsupportedOperation("Custom search not implemented".to_string()),
PluginError::UnsupportedOperation(
"Custom search not implemented".to_string(),
),
PluginError::InvalidInput("Invalid file path".to_string()),
PluginError::IoError("File not found".to_string()),
PluginError::MetadataExtractionFailed("Parse error".to_string()),
@ -439,7 +463,8 @@ async fn test_search_index_item_serialization() {
};
let serialized = serde_json::to_string(&item).unwrap();
let deserialized: SearchIndexItem = serde_json::from_str(&serialized).unwrap();
let deserialized: SearchIndexItem =
serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized.id, "media-456");
assert_eq!(deserialized.title, Some("Summer Vacation".to_string()));

View file

@ -1,6 +1,7 @@
use pinakes_plugin_api::PluginManifest;
use std::path::PathBuf;
use pinakes_plugin_api::PluginManifest;
#[test]
fn test_markdown_metadata_manifest() {
let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
@ -46,10 +47,11 @@ fn test_heif_support_manifest() {
assert_eq!(manifest.plugin.name, "heif-support");
assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.api_version, "1.0");
assert_eq!(
manifest.plugin.kind,
vec!["media_type", "metadata_extractor", "thumbnail_generator"]
);
assert_eq!(manifest.plugin.kind, vec![
"media_type",
"metadata_extractor",
"thumbnail_generator"
]);
assert_eq!(manifest.plugin.binary.wasm, "heif_support.wasm");
// Validate capabilities

View file

@ -1,20 +1,21 @@
use std::sync::Arc;
use axum::Router;
use axum::extract::DefaultBodyLimit;
use axum::http::{HeaderValue, Method, header};
use axum::middleware;
use axum::routing::{delete, get, patch, post, put};
use axum::{
Router,
extract::DefaultBodyLimit,
http::{HeaderValue, Method, header},
middleware,
routing::{delete, get, patch, post, put},
};
use tower::ServiceBuilder;
use tower_governor::GovernorLayer;
use tower_governor::governor::GovernorConfigBuilder;
use tower_http::cors::CorsLayer;
use tower_http::set_header::SetResponseHeaderLayer;
use tower_http::trace::TraceLayer;
use tower_governor::{GovernorLayer, governor::GovernorConfigBuilder};
use tower_http::{
cors::CorsLayer,
set_header::SetResponseHeaderLayer,
trace::TraceLayer,
};
use crate::auth;
use crate::routes;
use crate::state::AppState;
use crate::{auth, routes, state::AppState};
/// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(state: AppState) -> Router {
@ -53,7 +54,8 @@ pub fn create_router_with_tls(
.unwrap(),
);
// Rate limit for streaming: 5 requests per IP (very restrictive for concurrent streams)
// Rate limit for streaming: 5 requests per IP (very restrictive for
// concurrent streams)
let stream_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(60) // replenish slowly (one per minute)
@ -575,8 +577,10 @@ pub fn create_router_with_tls(
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {
let hsts_value = format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header = HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
let hsts_value =
format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header =
HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains")
});

View file

@ -1,8 +1,9 @@
use axum::extract::{Request, State};
use axum::http::StatusCode;
use axum::middleware::Next;
use axum::response::{IntoResponse, Response};
use axum::{
extract::{Request, State},
http::StatusCode,
middleware::Next,
response::{IntoResponse, Response},
};
use pinakes_core::config::UserRole;
use crate::state::AppState;
@ -21,9 +22,10 @@ fn constant_time_eq(a: &str, b: &str) -> bool {
/// Axum middleware that checks for a valid Bearer token.
///
/// If `accounts.enabled == true`: look up bearer token in database session store.
/// If `accounts.enabled == false`: use existing api_key logic (unchanged behavior).
/// Skips authentication for the `/health` and `/auth/login` path suffixes.
/// If `accounts.enabled == true`: look up bearer token in database session
/// store. If `accounts.enabled == false`: use existing api_key logic (unchanged
/// behavior). Skips authentication for the `/health` and `/auth/login` path
/// suffixes.
pub async fn require_auth(
State(state): State<AppState>,
mut request: Request,
@ -70,11 +72,12 @@ pub async fn require_auth(
Ok(None) => {
tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token");
}
},
Err(e) => {
tracing::error!(error = %e, "failed to query session from database");
return (StatusCode::INTERNAL_SERVER_ERROR, "database error").into_response();
}
return (StatusCode::INTERNAL_SERVER_ERROR, "database error")
.into_response();
},
};
// Check session expiry
@ -110,7 +113,7 @@ pub async fn require_auth(
_ => {
tracing::warn!(role = %session.role, "unknown role, defaulting to viewer");
UserRole::Viewer
}
},
};
// Inject role and username into request extensions
@ -130,7 +133,9 @@ pub async fn require_auth(
if expected_key.is_empty() {
// Empty key is not allowed - must use authentication_disabled flag
tracing::error!("empty api_key rejected, use authentication_disabled flag instead");
tracing::error!(
"empty api_key rejected, use authentication_disabled flag instead"
);
return unauthorized("authentication not properly configured");
}
@ -146,12 +151,13 @@ pub async fn require_auth(
tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key");
}
}
},
_ => {
return unauthorized(
"missing or malformed Authorization header, expected: Bearer <api_key>",
"missing or malformed Authorization header, expected: Bearer \
<api_key>",
);
}
},
}
// API key matches, grant admin
@ -202,9 +208,11 @@ pub async fn resolve_user_id(
Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication("user not found".into()),
pinakes_core::error::PinakesError::Authentication(
"user not found".into(),
),
))
}
},
}
}

View file

@ -1,5 +1,4 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::{collections::HashMap, path::PathBuf};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@ -446,7 +445,9 @@ pub struct TypeCountResponse {
pub count: u64,
}
impl From<pinakes_core::storage::LibraryStatistics> for LibraryStatisticsResponse {
impl From<pinakes_core::storage::LibraryStatistics>
for LibraryStatisticsResponse
{
fn from(stats: pinakes_core::storage::LibraryStatistics) -> Self {
Self {
total_media: stats.total_media,
@ -545,13 +546,10 @@ impl From<pinakes_core::model::MediaItem> for MediaResponse {
.custom_fields
.into_iter()
.map(|(k, v)| {
(
k,
CustomFieldResponse {
(k, CustomFieldResponse {
field_type: format!("{:?}", v.field_type).to_lowercase(),
value: v.value,
},
)
})
})
.collect(),
@ -983,7 +981,9 @@ pub struct ExternalMetadataResponse {
pub last_updated: DateTime<Utc>,
}
impl From<pinakes_core::enrichment::ExternalMetadata> for ExternalMetadataResponse {
impl From<pinakes_core::enrichment::ExternalMetadata>
for ExternalMetadataResponse
{
fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self {
let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| {
tracing::warn!(
@ -1018,7 +1018,9 @@ pub struct TranscodeSessionResponse {
pub expires_at: Option<DateTime<Utc>>,
}
impl From<pinakes_core::transcode::TranscodeSession> for TranscodeSessionResponse {
impl From<pinakes_core::transcode::TranscodeSession>
for TranscodeSessionResponse
{
fn from(s: pinakes_core::transcode::TranscodeSession) -> Self {
Self {
id: s.id.to_string(),
@ -1066,7 +1068,9 @@ pub struct ManagedStorageStatsResponse {
pub deduplication_ratio: f64,
}
impl From<pinakes_core::model::ManagedStorageStats> for ManagedStorageStatsResponse {
impl From<pinakes_core::model::ManagedStorageStats>
for ManagedStorageStatsResponse
{
fn from(stats: pinakes_core::model::ManagedStorageStats) -> Self {
Self {
total_blobs: stats.total_blobs,
@ -1321,7 +1325,9 @@ pub struct SharePermissionsResponse {
pub can_add: bool,
}
impl From<pinakes_core::sharing::SharePermissions> for SharePermissionsResponse {
impl From<pinakes_core::sharing::SharePermissions>
for SharePermissionsResponse
{
fn from(p: pinakes_core::sharing::SharePermissions) -> Self {
Self {
can_view: p.can_view,
@ -1339,32 +1345,32 @@ impl From<pinakes_core::sharing::Share> for ShareResponse {
let (target_type, target_id) = match &s.target {
pinakes_core::sharing::ShareTarget::Media { media_id } => {
("media".to_string(), media_id.0.to_string())
}
},
pinakes_core::sharing::ShareTarget::Collection { collection_id } => {
("collection".to_string(), collection_id.to_string())
}
},
pinakes_core::sharing::ShareTarget::Tag { tag_id } => {
("tag".to_string(), tag_id.to_string())
}
},
pinakes_core::sharing::ShareTarget::SavedSearch { search_id } => {
("saved_search".to_string(), search_id.to_string())
}
},
};
let (recipient_type, recipient_user_id, recipient_group_id, public_token) =
match &s.recipient {
pinakes_core::sharing::ShareRecipient::PublicLink { token, .. } => {
("public_link".to_string(), None, None, Some(token.clone()))
}
},
pinakes_core::sharing::ShareRecipient::User { user_id } => {
("user".to_string(), Some(user_id.0.to_string()), None, None)
}
},
pinakes_core::sharing::ShareRecipient::Group { group_id } => {
("group".to_string(), None, Some(group_id.to_string()), None)
}
},
pinakes_core::sharing::ShareRecipient::Federated { .. } => {
("federated".to_string(), None, None, None)
}
},
};
Self {
@ -1430,7 +1436,9 @@ pub struct ShareNotificationResponse {
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::sharing::ShareNotification> for ShareNotificationResponse {
impl From<pinakes_core::sharing::ShareNotification>
for ShareNotificationResponse
{
fn from(n: pinakes_core::sharing::ShareNotification) -> Self {
Self {
id: n.id.to_string(),

View file

@ -1,5 +1,7 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use serde::Serialize;
#[derive(Debug, Serialize)]
@ -22,9 +24,11 @@ impl IntoResponse for ApiError {
.unwrap_or_else(|| "unknown".to_string());
tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}"))
}
},
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
},
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => {
let name = path
@ -35,10 +39,14 @@ impl IntoResponse for ApiError {
StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"),
)
}
},
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::Authentication(msg) => (StatusCode::UNAUTHORIZED, msg.clone()),
PinakesError::InvalidOperation(msg) => {
(StatusCode::BAD_REQUEST, msg.clone())
},
PinakesError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.clone())
},
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
@ -46,14 +54,14 @@ impl IntoResponse for ApiError {
StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(),
)
}
},
_ => {
tracing::error!(error = %self.0, "internal server error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(),
)
}
},
};
let body = serde_json::to_string(&ErrorResponse {

View file

@ -1,21 +1,14 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{path::PathBuf, sync::Arc};
use anyhow::Result;
use axum::Router;
use axum::response::Redirect;
use axum::routing::any;
use axum::{Router, response::Redirect, routing::any};
use clap::Parser;
use pinakes_core::{config::Config, storage::StorageBackend};
use pinakes_server::{app, state::AppState};
use tokio::sync::RwLock;
use tracing::info;
use tracing_subscriber::EnvFilter;
use pinakes_core::config::Config;
use pinakes_core::storage::StorageBackend;
use pinakes_server::app;
use pinakes_server::state::AppState;
/// Pinakes media cataloging server
#[derive(Parser)]
#[command(name = "pinakes-server", version, about)]
@ -66,7 +59,8 @@ async fn main() -> Result<()> {
let cli = Cli::parse();
// Initialize logging
let env_filter = EnvFilter::try_new(&cli.log_level).unwrap_or_else(|_| EnvFilter::new("info"));
let env_filter = EnvFilter::try_new(&cli.log_level)
.unwrap_or_else(|_| EnvFilter::new("info"));
match cli.log_format.as_str() {
"json" => {
@ -74,22 +68,22 @@ async fn main() -> Result<()> {
.with_env_filter(env_filter)
.json()
.init();
}
},
"pretty" => {
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.pretty()
.init();
}
},
"full" => {
tracing_subscriber::fmt().with_env_filter(env_filter).init();
}
},
_ => {
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.compact()
.init();
}
},
}
let (config_path, was_explicit) = resolve_config_path(cli.config.as_deref());
@ -98,7 +92,8 @@ async fn main() -> Result<()> {
info!(path = %config_path.display(), "loading configuration from file");
Config::from_file(&config_path)?
} else if was_explicit {
// User explicitly provided a config path that doesn't exist - this is an error
// User explicitly provided a config path that doesn't exist - this is an
// error
return Err(anyhow::anyhow!(
"configuration file not found: {}",
config_path.display()
@ -118,9 +113,12 @@ async fn main() -> Result<()> {
// Warn about authentication configuration
if config.server.authentication_disabled {
tracing::warn!(
"⚠️ AUTHENTICATION IS DISABLED - All requests will be allowed without authentication!"
"⚠️ AUTHENTICATION IS DISABLED - All requests will be allowed without \
authentication!"
);
tracing::warn!(
"⚠️ This is INSECURE and should only be used for development."
);
tracing::warn!("⚠️ This is INSECURE and should only be used for development.");
} else {
let has_api_key = config
.server
@ -142,7 +140,10 @@ async fn main() -> Result<()> {
}
// Storage backend initialization
let storage: pinakes_core::storage::DynStorageBackend = match config.storage.backend {
let storage: pinakes_core::storage::DynStorageBackend = match config
.storage
.backend
{
pinakes_core::config::StorageBackendType::Sqlite => {
let sqlite_config = config.storage.sqlite.as_ref().ok_or_else(|| {
anyhow::anyhow!(
@ -150,21 +151,25 @@ async fn main() -> Result<()> {
)
})?;
info!(path = %sqlite_config.path.display(), "initializing sqlite storage");
let backend = pinakes_core::storage::sqlite::SqliteBackend::new(&sqlite_config.path)?;
let backend =
pinakes_core::storage::sqlite::SqliteBackend::new(&sqlite_config.path)?;
backend.run_migrations().await?;
Arc::new(backend)
}
},
pinakes_core::config::StorageBackendType::Postgres => {
let pg_config = config.storage.postgres.as_ref().ok_or_else(|| {
anyhow::anyhow!(
"postgres storage selected but [storage.postgres] config section missing"
"postgres storage selected but [storage.postgres] config section \
missing"
)
})?;
info!(host = %pg_config.host, port = pg_config.port, database = %pg_config.database, "initializing postgres storage");
let backend = pinakes_core::storage::postgres::PostgresBackend::new(pg_config).await?;
let backend =
pinakes_core::storage::postgres::PostgresBackend::new(pg_config)
.await?;
backend.run_migrations().await?;
Arc::new(backend)
}
},
};
if cli.migrate_only {
@ -188,8 +193,12 @@ async fn main() -> Result<()> {
let watch_dirs = config.directories.roots.clone();
let watch_ignore = config.scanning.ignore_patterns.clone();
tokio::spawn(async move {
if let Err(e) =
pinakes_core::scan::watch_and_import(watch_storage, watch_dirs, watch_ignore).await
if let Err(e) = pinakes_core::scan::watch_and_import(
watch_storage,
watch_dirs,
watch_ignore,
)
.await
{
tracing::error!(error = %e, "filesystem watcher failed");
}
@ -200,8 +209,9 @@ async fn main() -> Result<()> {
let addr = format!("{}:{}", config.server.host, config.server.port);
// Initialize transcode service early so the job queue can reference it
let transcode_service: Option<Arc<pinakes_core::transcode::TranscodeService>> =
if config.transcoding.enabled {
let transcode_service: Option<
Arc<pinakes_core::transcode::TranscodeService>,
> = if config.transcoding.enabled {
Some(Arc::new(pinakes_core::transcode::TranscodeService::new(
config.transcoding.clone(),
)))
@ -257,12 +267,12 @@ async fn main() -> Result<()> {
}),
)
.await;
}
},
Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await;
},
}
}
}
},
JobKind::GenerateThumbnails { media_ids } => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
let thumb_config = config.thumbnails.clone();
@ -299,12 +309,12 @@ async fn main() -> Result<()> {
updated.thumbnail_path = Some(path);
let _ = storage.update_media(&updated).await;
generated += 1;
}
Ok(Ok(None)) => {}
},
Ok(Ok(None)) => {},
Ok(Err(e)) => errors.push(format!("{}: {}", mid, e)),
Err(e) => errors.push(format!("{}: {}", mid, e)),
}
}
},
Err(e) => errors.push(format!("{}: {}", mid, e)),
}
}
@ -316,14 +326,15 @@ async fn main() -> Result<()> {
}),
)
.await;
}
},
JobKind::VerifyIntegrity { media_ids } => {
let ids = if media_ids.is_empty() {
None
} else {
Some(media_ids.as_slice())
};
match pinakes_core::integrity::verify_integrity(&storage, ids).await {
match pinakes_core::integrity::verify_integrity(&storage, ids).await
{
Ok(report) => {
JobQueue::complete(
&jobs,
@ -331,10 +342,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&report).unwrap_or_default(),
)
.await;
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
},
JobKind::OrphanDetection => {
match pinakes_core::integrity::detect_orphans(&storage).await {
Ok(report) => {
@ -344,10 +355,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&report).unwrap_or_default(),
)
.await;
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
},
JobKind::CleanupThumbnails => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
match pinakes_core::integrity::cleanup_orphaned_thumbnails(
@ -362,15 +373,19 @@ async fn main() -> Result<()> {
serde_json::json!({ "removed": removed }),
)
.await;
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
},
JobKind::Export {
format,
destination,
} => {
match pinakes_core::export::export_library(&storage, &format, &destination)
match pinakes_core::export::export_library(
&storage,
&format,
&destination,
)
.await
{
Ok(result) => {
@ -380,10 +395,10 @@ async fn main() -> Result<()> {
serde_json::to_value(&result).unwrap_or_default(),
)
.await;
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
},
JobKind::Transcode { media_id, profile } => {
if let Some(ref svc) = transcode_svc {
match storage.get_media(media_id).await {
@ -405,19 +420,23 @@ async fn main() -> Result<()> {
serde_json::json!({"session_id": session_id.to_string()}),
)
.await;
}
},
Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await
},
}
}
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
} else {
JobQueue::fail(&jobs, job_id, "transcoding is not enabled".to_string())
JobQueue::fail(
&jobs,
job_id,
"transcoding is not enabled".to_string(),
)
.await;
}
}
},
JobKind::Enrich { media_ids } => {
// Enrichment job placeholder
JobQueue::complete(
@ -426,7 +445,7 @@ async fn main() -> Result<()> {
serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}),
)
.await;
}
},
JobKind::CleanupAnalytics => {
let before = chrono::Utc::now() - chrono::Duration::days(90);
match storage.cleanup_old_events(before).await {
@ -437,10 +456,10 @@ async fn main() -> Result<()> {
serde_json::json!({"cleaned_up": count}),
)
.await;
}
},
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
},
};
drop(cancel);
})
@ -452,7 +471,8 @@ async fn main() -> Result<()> {
config.jobs.cache_ttl_secs,
));
// Initialize plugin manager if plugins are enabled (before moving config into Arc)
// Initialize plugin manager if plugins are enabled (before moving config into
// Arc)
let plugin_manager = if config.plugins.enabled {
match pinakes_core::plugin::PluginManager::new(
config.plugins.data_dir.clone(),
@ -462,11 +482,11 @@ async fn main() -> Result<()> {
Ok(pm) => {
tracing::info!("Plugin manager initialized");
Some(Arc::new(pm))
}
},
Err(e) => {
tracing::warn!("Failed to initialize plugin manager: {}", e);
None
}
},
}
} else {
tracing::info!("Plugins disabled in configuration");
@ -511,11 +531,11 @@ async fn main() -> Result<()> {
"managed storage initialized"
);
Some(Arc::new(service))
}
},
Err(e) => {
tracing::error!(error = %e, "failed to initialize managed storage");
None
}
},
}
} else {
tracing::info!("managed storage disabled in configuration");
@ -537,11 +557,11 @@ async fn main() -> Result<()> {
"chunked upload manager initialized"
);
Some(Arc::new(manager))
}
},
Err(e) => {
tracing::error!(error = %e, "failed to initialize chunked upload manager");
None
}
},
}
} else {
tracing::info!("sync disabled, chunked upload manager not initialized");
@ -568,7 +588,8 @@ async fn main() -> Result<()> {
let storage_clone = storage.clone();
let cancel = shutdown_token.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(15 * 60));
let mut interval =
tokio::time::interval(std::time::Duration::from_secs(15 * 60));
loop {
tokio::select! {
_ = interval.tick() => {
@ -595,7 +616,8 @@ async fn main() -> Result<()> {
let manager_clone = manager.clone();
let cancel = shutdown_token.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(60 * 60));
let mut interval =
tokio::time::interval(std::time::Duration::from_secs(60 * 60));
loop {
tokio::select! {
_ = interval.tick() => {
@ -630,20 +652,19 @@ async fn main() -> Result<()> {
if tls_config.enabled {
// TLS/HTTPS mode
let cert_path = tls_config
.cert_path
.as_ref()
.ok_or_else(|| anyhow::anyhow!("TLS enabled but cert_path not specified"))?;
let key_path = tls_config
.key_path
.as_ref()
.ok_or_else(|| anyhow::anyhow!("TLS enabled but key_path not specified"))?;
let cert_path = tls_config.cert_path.as_ref().ok_or_else(|| {
anyhow::anyhow!("TLS enabled but cert_path not specified")
})?;
let key_path = tls_config.key_path.as_ref().ok_or_else(|| {
anyhow::anyhow!("TLS enabled but key_path not specified")
})?;
info!(addr = %addr, cert = %cert_path.display(), "server listening with TLS");
// Configure TLS
let tls_config_builder =
axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path).await?;
axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path)
.await?;
// Start HTTP redirect server if configured
if tls_config.redirect_http {
@ -655,7 +676,8 @@ async fn main() -> Result<()> {
let https_port = config_arc.read().await.server.port;
let https_host = config_arc.read().await.server.host.clone();
let redirect_router = create_https_redirect_router(https_host, https_port);
let redirect_router =
create_https_redirect_router(https_host, https_port);
let shutdown = shutdown_token.clone();
tokio::spawn(async move {
@ -664,12 +686,13 @@ async fn main() -> Result<()> {
Err(e) => {
tracing::warn!(error = %e, addr = %http_addr, "failed to bind HTTP redirect listener");
return;
}
},
};
info!(addr = %http_addr, "HTTP redirect server listening");
let server = axum::serve(
listener,
redirect_router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
redirect_router
.into_make_service_with_connect_info::<std::net::SocketAddr>(),
);
tokio::select! {
result = server => {
@ -692,12 +715,15 @@ async fn main() -> Result<()> {
// Spawn a task to trigger graceful shutdown
tokio::spawn(async move {
shutdown_signal().await;
shutdown_handle.graceful_shutdown(Some(std::time::Duration::from_secs(30)));
shutdown_handle
.graceful_shutdown(Some(std::time::Duration::from_secs(30)));
});
axum_server::bind_rustls(addr_parsed, tls_config_builder)
.handle(handle)
.serve(router.into_make_service_with_connect_info::<std::net::SocketAddr>())
.serve(
router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
)
.await?;
} else {
// Plain HTTP mode
@ -722,7 +748,8 @@ fn create_https_redirect_router(https_host: String, https_port: u16) -> Router {
Router::new().fallback(any(move |uri: axum::http::Uri| {
let https_host = https_host.clone();
async move {
let path_and_query = uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("/");
let path_and_query =
uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("/");
let https_url = if https_port == 443 {
format!("https://{}{}", https_host, path_and_query)
@ -738,24 +765,26 @@ fn create_https_redirect_router(https_host: String, https_port: u16) -> Router {
async fn shutdown_signal() {
let ctrl_c = async {
match tokio::signal::ctrl_c().await {
Ok(()) => {}
Ok(()) => {},
Err(e) => {
tracing::warn!(error = %e, "failed to install Ctrl+C handler");
std::future::pending::<()>().await;
}
},
}
};
#[cfg(unix)]
let terminate = async {
match tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) {
match tokio::signal::unix::signal(
tokio::signal::unix::SignalKind::terminate(),
) {
Ok(mut signal) => {
signal.recv().await;
}
},
Err(e) => {
tracing::warn!(error = %e, "failed to install SIGTERM handler");
std::future::pending::<()>().await;
}
},
}
};

View file

@ -1,14 +1,14 @@
use axum::Json;
use axum::extract::{Extension, Path, Query, State};
use axum::{
Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::{
analytics::{UsageEvent, UsageEventType},
model::MediaId,
};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::analytics::{UsageEvent, UsageEventType};
use pinakes_core::model::MediaId;
use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
const MAX_LIMIT: u64 = 100;
@ -21,9 +21,11 @@ pub async fn get_most_viewed(
Ok(Json(
results
.into_iter()
.map(|(item, count)| MostViewedResponse {
.map(|(item, count)| {
MostViewedResponse {
media: MediaResponse::from(item),
view_count: count,
}
})
.collect(),
))
@ -45,10 +47,10 @@ pub async fn record_event(
Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let event_type: UsageEventType = req
.event_type
.parse()
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?;
let event_type: UsageEventType =
req.event_type.parse().map_err(|e: String| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
})?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent {
id: Uuid::now_v7(),

View file

@ -1,12 +1,11 @@
use axum::Json;
use axum::extract::{Query, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use axum::{
Json,
extract::{Query, State},
};
use pinakes_core::model::Pagination;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn list_audit(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,

View file

@ -1,15 +1,21 @@
use axum::Json;
use axum::extract::State;
use axum::http::{HeaderMap, StatusCode};
use axum::{
Json,
extract::State,
http::{HeaderMap, StatusCode},
};
use crate::dto::{LoginRequest, LoginResponse, UserInfoResponse};
use crate::state::AppState;
use crate::{
dto::{LoginRequest, LoginResponse, UserInfoResponse},
state::AppState,
};
/// Dummy password hash to use for timing-safe comparison when user doesn't exist.
/// This is a valid argon2 hash that will always fail verification but takes
/// similar time to verify as a real hash, preventing timing attacks that could
/// reveal whether a username exists.
const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
/// Dummy password hash to use for timing-safe comparison when user doesn't
/// exist. This is a valid argon2 hash that will always fail verification but
/// takes similar time to verify as a real hash, preventing timing attacks that
/// could reveal whether a username exists.
const DUMMY_HASH: &str =
"$argon2id$v=19$m=19456,t=2,\
p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
pub async fn login(
State(state): State<AppState>,
@ -121,7 +127,10 @@ pub async fn login(
}))
}
pub async fn logout(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
pub async fn logout(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
// Get username before deleting session
let username = match state.storage.get_session(token).await {
@ -185,7 +194,10 @@ fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
}
/// Revoke all sessions for the current user
pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
pub async fn revoke_all_sessions(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
let token = match extract_bearer_token(&headers) {
Some(t) => t,
None => return StatusCode::UNAUTHORIZED,
@ -198,7 +210,7 @@ pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderM
Err(e) => {
tracing::error!(error = %e, "failed to get session");
return StatusCode::INTERNAL_SERVER_ERROR;
}
},
};
let username = session.username.clone();
@ -218,11 +230,11 @@ pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderM
.await;
StatusCode::OK
}
},
Err(e) => {
tracing::error!(error = %e, "failed to revoke sessions");
StatusCode::INTERNAL_SERVER_ERROR
}
},
}
}
@ -245,7 +257,8 @@ pub async fn list_active_sessions(
State(state): State<AppState>,
) -> Result<Json<SessionListResponse>, StatusCode> {
// Get all active sessions
let sessions = state
let sessions =
state
.storage
.list_active_sessions(None)
.await
@ -256,12 +269,14 @@ pub async fn list_active_sessions(
let session_infos = sessions
.into_iter()
.map(|s| SessionInfo {
.map(|s| {
SessionInfo {
username: s.username,
role: s.role,
created_at: s.created_at.to_rfc3339(),
last_accessed: s.last_accessed.to_rfc3339(),
expires_at: s.expires_at.to_rfc3339(),
}
})
.collect();

View file

@ -1,20 +1,32 @@
use axum::{
Json, Router,
Json,
Router,
extract::{Extension, Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, put},
};
use pinakes_core::{
error::PinakesError,
model::{
AuthorInfo,
BookMetadata,
MediaId,
Pagination,
ReadingProgress,
ReadingStatus,
},
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::{
error::PinakesError,
model::{AuthorInfo, BookMetadata, MediaId, Pagination, ReadingProgress, ReadingStatus},
use crate::{
auth::resolve_user_id,
dto::MediaResponse,
error::ApiError,
state::AppState,
};
use crate::{auth::resolve_user_id, dto::MediaResponse, error::ApiError, state::AppState};
/// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct BookMetadataResponse {
@ -45,7 +57,11 @@ impl From<BookMetadata> for BookMetadataResponse {
series_name: meta.series_name,
series_index: meta.series_index,
format: meta.format,
authors: meta.authors.into_iter().map(AuthorResponse::from).collect(),
authors: meta
.authors
.into_iter()
.map(AuthorResponse::from)
.collect(),
identifiers: meta.identifiers,
}
}
@ -143,7 +159,8 @@ pub async fn get_book_metadata(
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let media_id = MediaId(media_id);
let metadata = state
let metadata =
state
.storage
.get_book_metadata(media_id)
.await?
@ -177,18 +194,23 @@ pub async fn list_books(
)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// List all series with book counts
pub async fn list_series(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
pub async fn list_series(
State(state): State<AppState>,
) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series
.into_iter()
.map(|(name, count)| SeriesSummary {
.map(|(name, count)| {
SeriesSummary {
name,
book_count: count,
}
})
.collect();
@ -201,7 +223,8 @@ pub async fn get_series_books(
Path(series_name): Path<String>,
) -> Result<impl IntoResponse, ApiError> {
let items = state.storage.get_series_books(&series_name).await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
@ -213,9 +236,11 @@ pub async fn list_authors(
let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors
.into_iter()
.map(|(name, count)| AuthorSummary {
.map(|(name, count)| {
AuthorSummary {
name,
book_count: count,
}
})
.collect();
@ -233,7 +258,8 @@ pub async fn get_author_books(
.search_books(None, Some(&author_name), None, None, None, &pagination)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
@ -288,7 +314,8 @@ pub async fn get_reading_list(
.get_reading_list(user_id.0, params.status)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}

View file

@ -1,12 +1,11 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::{CollectionKind, MediaId};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{CollectionKind, MediaId};
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn create_collection(
State(state): State<AppState>,
@ -87,7 +86,11 @@ pub async fn remove_member(
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::remove_member(&state.storage, collection_id, MediaId(media_id))
pinakes_core::collections::remove_member(
&state.storage,
collection_id,
MediaId(media_id),
)
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
@ -96,6 +99,8 @@ pub async fn get_members(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let items = pinakes_core::collections::get_members(&state.storage, collection_id).await?;
let items =
pinakes_core::collections::get_members(&state.storage, collection_id)
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}

View file

@ -1,11 +1,10 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResponse>, ApiError> {
pub async fn get_config(
State(state): State<AppState>,
) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?;
@ -20,7 +19,8 @@ pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResp
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
path
.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
@ -28,7 +28,7 @@ pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResp
})
.unwrap_or(false)
}
}
},
None => false,
};
@ -132,7 +132,8 @@ pub async fn update_scanning_config(
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
path
.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
@ -140,7 +141,7 @@ pub async fn update_scanning_config(
})
.unwrap_or(false)
}
}
},
None => false,
};

View file

@ -1,9 +1,6 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::DatabaseStatsResponse;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::DatabaseStatsResponse, error::ApiError, state::AppState};
pub async fn database_stats(
State(state): State<AppState>,

View file

@ -1,9 +1,10 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::{DuplicateGroupResponse, MediaResponse};
use crate::error::ApiError;
use crate::state::AppState;
use crate::{
dto::{DuplicateGroupResponse, MediaResponse},
error::ApiError,
state::AppState,
};
pub async fn list_duplicates(
State(state): State<AppState>,

View file

@ -1,12 +1,11 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn trigger_enrichment(
State(state): State<AppState>,
@ -39,7 +38,8 @@ pub async fn batch_enrich(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids: Vec<MediaId> = req.media_ids.into_iter().map(MediaId).collect();
let media_ids: Vec<MediaId> =
req.media_ids.into_iter().map(MediaId).collect();
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids })

View file

@ -1,10 +1,9 @@
use axum::Json;
use axum::extract::State;
use serde::Deserialize;
use std::path::PathBuf;
use crate::error::ApiError;
use crate::state::AppState;
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct ExportRequest {
@ -16,7 +15,8 @@ pub async fn trigger_export(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Default export to JSON in data dir
let dest = pinakes_core::config::Config::default_data_dir().join("export.json");
let dest =
pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json,
destination: dest,

View file

@ -1,9 +1,6 @@
use std::time::Instant;
use axum::Json;
use axum::extract::State;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use axum::{Json, extract::State, http::StatusCode, response::IntoResponse};
use serde::{Deserialize, Serialize};
use crate::state::AppState;
@ -58,10 +55,12 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
// Check database health
let db_start = Instant::now();
let db_health = match state.storage.count_media().await {
Ok(count) => DatabaseHealth {
Ok(count) => {
DatabaseHealth {
status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count),
}
},
Err(e) => {
response.status = "degraded".to_string();
@ -70,12 +69,13 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None,
}
}
},
};
response.database = Some(db_health);
// Check filesystem health (root directories)
let roots: Vec<std::path::PathBuf> = state.storage.list_root_dirs().await.unwrap_or_default();
let roots: Vec<std::path::PathBuf> =
state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();
@ -130,14 +130,16 @@ pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
"database_latency_ms": latency
})),
)
}
Err(e) => (
},
Err(e) => {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"status": "not_ready",
"reason": e.to_string()
})),
),
)
},
}
}
@ -159,7 +161,9 @@ pub struct JobsHealth {
pub running: usize,
}
pub async fn health_detailed(State(state): State<AppState>) -> Json<DetailedHealthResponse> {
pub async fn health_detailed(
State(state): State<AppState>,
) -> Json<DetailedHealthResponse> {
// Check database
let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await {

View file

@ -1,9 +1,7 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct OrphanResolveRequest {
@ -92,7 +90,8 @@ pub async fn resolve_orphans(
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
let count =
pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count })))

View file

@ -1,10 +1,11 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::error::ApiError;
use crate::state::AppState;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::jobs::Job;
use crate::{error::ApiError, state::AppState};
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await)
}
@ -14,7 +15,8 @@ pub async fn get_job(
Path(id): Path<uuid::Uuid>,
) -> Result<Json<Job>, ApiError> {
state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}")).into()
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}"))
.into()
})
}
@ -26,9 +28,11 @@ pub async fn cancel_job(
if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true })))
} else {
Err(pinakes_core::error::PinakesError::NotFound(format!(
Err(
pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}"
))
.into())
.into(),
)
}
}

View file

@ -1,16 +1,18 @@
use axum::Json;
use axum::extract::{Path, Query, State};
use axum::{
Json,
extract::{Path, Query, State},
};
use pinakes_core::{
model::{MediaId, Pagination},
storage::DynStorageBackend,
};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{MediaId, Pagination};
use pinakes_core::storage::DynStorageBackend;
use crate::{dto::*, error::ApiError, state::AppState};
/// Apply tags and add to collection after a successful import.
/// Shared logic used by import_with_options, batch_import, and import_directory_endpoint.
/// Shared logic used by import_with_options, batch_import, and
/// import_directory_endpoint.
async fn apply_import_post_processing(
storage: &DynStorageBackend,
media_id: MediaId,
@ -20,7 +22,9 @@ async fn apply_import_post_processing(
) {
if let Some(tag_ids) = tag_ids {
for tid in tag_ids {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, *tid).await {
if let Err(e) =
pinakes_core::tags::tag_media(storage, media_id, *tid).await
{
tracing::warn!(error = %e, "failed to apply tag during import");
}
}
@ -29,18 +33,21 @@ async fn apply_import_post_processing(
for name in new_tags {
match pinakes_core::tags::create_tag(storage, name, None).await {
Ok(tag) => {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, tag.id).await {
if let Err(e) =
pinakes_core::tags::tag_media(storage, media_id, tag.id).await
{
tracing::warn!(error = %e, "failed to apply new tag during import");
}
}
},
Err(e) => {
tracing::warn!(tag_name = %name, error = %e, "failed to create tag during import");
}
},
}
}
}
if let Some(col_id) = collection_id
&& let Err(e) = pinakes_core::collections::add_member(storage, col_id, media_id, 0).await
&& let Err(e) =
pinakes_core::collections::add_member(storage, col_id, media_id, 0).await
{
tracing::warn!(error = %e, "failed to add to collection during import");
}
@ -50,7 +57,8 @@ pub async fn import_media(
State(state): State<AppState>,
Json(req): Json<ImportRequest>,
) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?;
let result =
pinakes_core::import::import_file(&state.storage, &req.path).await?;
Ok(Json(ImportResponse {
media_id: result.media_id.0.to_string(),
was_duplicate: result.was_duplicate,
@ -83,7 +91,11 @@ const MAX_SHORT_TEXT: usize = 500;
/// Maximum length for long text fields (description).
const MAX_LONG_TEXT: usize = 10_000;
fn validate_optional_text(field: &Option<String>, name: &str, max: usize) -> Result<(), ApiError> {
fn validate_optional_text(
field: &Option<String>,
name: &str,
max: usize,
) -> Result<(), ApiError> {
if let Some(v) = field
&& v.len() > max
{
@ -193,8 +205,10 @@ pub async fn stream_media(
Path(id): Path<Uuid>,
headers: axum::http::HeaderMap,
) -> Result<axum::response::Response, ApiError> {
use axum::body::Body;
use axum::http::{StatusCode, header};
use axum::{
body::Body,
http::{StatusCode, header},
};
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio_util::io::ReaderStream;
@ -222,7 +236,8 @@ pub async fn stream_media(
let content_length = end - start + 1;
let mut file = file;
file.seek(std::io::SeekFrom::Start(start))
file
.seek(std::io::SeekFrom::Start(start))
.await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
@ -300,7 +315,8 @@ pub async fn import_with_options(
State(state): State<AppState>,
Json(req): Json<ImportWithOptionsRequest>,
) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?;
let result =
pinakes_core::import::import_file(&state.storage, &req.path).await?;
if !result.was_duplicate {
apply_import_post_processing(
@ -358,7 +374,7 @@ pub async fn batch_import(
was_duplicate: result.was_duplicate,
error: None,
});
}
},
Err(e) => {
errors += 1;
results.push(BatchImportItemResult {
@ -367,7 +383,7 @@ pub async fn batch_import(
was_duplicate: false,
error: Some(e.to_string()),
});
}
},
}
}
@ -425,7 +441,7 @@ pub async fn import_directory_endpoint(
was_duplicate: result.was_duplicate,
error: None,
});
}
},
Err(e) => {
errors += 1;
results.push(BatchImportItemResult {
@ -434,7 +450,7 @@ pub async fn import_directory_endpoint(
was_duplicate: false,
error: Some(e.to_string()),
});
}
},
}
}
@ -468,18 +484,23 @@ pub async fn preview_directory(
let roots = state.storage.list_root_dirs().await?;
if !roots.is_empty() {
let canonical = dir.canonicalize().map_err(|_| {
pinakes_core::error::PinakesError::InvalidOperation("cannot resolve path".into())
pinakes_core::error::PinakesError::InvalidOperation(
"cannot resolve path".into(),
)
})?;
let allowed = roots.iter().any(|root| canonical.starts_with(root));
if !allowed {
return Err(pinakes_core::error::PinakesError::InvalidOperation(
return Err(
pinakes_core::error::PinakesError::InvalidOperation(
"path is not under a configured root directory".into(),
)
.into());
.into(),
);
}
}
let files: Vec<DirectoryPreviewFile> = tokio::task::spawn_blocking(move || {
let files: Vec<DirectoryPreviewFile> =
tokio::task::spawn_blocking(move || {
let mut result = Vec::new();
fn walk_dir(
dir: &std::path::Path,
@ -504,7 +525,8 @@ pub async fn preview_directory(
walk_dir(&path, recursive, result);
}
} else if path.is_file()
&& let Some(mt) = pinakes_core::media_type::MediaType::from_path(&path)
&& let Some(mt) =
pinakes_core::media_type::MediaType::from_path(&path)
{
let size = entry.metadata().ok().map(|m| m.len()).unwrap_or(0);
let file_name = path
@ -528,7 +550,9 @@ pub async fn preview_directory(
result
})
.await
.map_err(|e| pinakes_core::error::PinakesError::Io(std::io::Error::other(e)))?;
.map_err(|e| {
pinakes_core::error::PinakesError::Io(std::io::Error::other(e))
})?;
let total_count = files.len();
let total_size = files.iter().map(|f| f.file_size).sum();
@ -601,20 +625,25 @@ pub async fn batch_tag(
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state
.storage
.batch_tag_media(&media_ids, &req.tag_ids)
.await
{
Ok(count) => Ok(Json(BatchOperationResponse {
Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
}))
},
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}))
},
}
}
@ -634,14 +663,18 @@ pub async fn delete_all_media(
}
match state.storage.delete_all_media().await {
Ok(count) => Ok(Json(BatchOperationResponse {
Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
}))
},
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}))
},
}
}
@ -657,7 +690,8 @@ pub async fn batch_delete(
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
// Record audit entries BEFORE delete to avoid FK constraint violation.
// Use None for media_id since they'll be deleted; include ID in details.
@ -675,14 +709,18 @@ pub async fn batch_delete(
}
match state.storage.batch_delete_media(&media_ids).await {
Ok(count) => Ok(Json(BatchOperationResponse {
Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
}))
},
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}))
},
}
}
@ -728,7 +766,8 @@ pub async fn batch_update(
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state
.storage
.batch_update_media(
@ -742,14 +781,18 @@ pub async fn batch_update(
)
.await
{
Ok(count) => Ok(Json(BatchOperationResponse {
Ok(count) => {
Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
}))
},
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}))
},
}
}
@ -757,8 +800,7 @@ pub async fn get_thumbnail(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
use axum::body::Body;
use axum::http::header;
use axum::{body::Body, http::header};
use tokio_util::io::ReaderStream;
let item = state.storage.get_media(MediaId(id)).await?;
@ -769,9 +811,9 @@ pub async fn get_thumbnail(
))
})?;
let file = tokio::fs::File::open(&thumb_path)
.await
.map_err(|_e| ApiError(pinakes_core::error::PinakesError::FileNotFound(thumb_path)))?;
let file = tokio::fs::File::open(&thumb_path).await.map_err(|_e| {
ApiError(pinakes_core::error::PinakesError::FileNotFound(thumb_path))
})?;
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
@ -816,7 +858,9 @@ pub async fn rename_media(
path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({ "old_path": old_path }).to_string()),
metadata_json: Some(
serde_json::json!({ "old_path": old_path }).to_string(),
),
changed_by_device: None,
timestamp: chrono::Utc::now(),
};
@ -854,7 +898,9 @@ pub async fn move_media_endpoint(
path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({ "old_path": old_path }).to_string()),
metadata_json: Some(
serde_json::json!({ "old_path": old_path }).to_string(),
),
changed_by_device: None,
timestamp: chrono::Utc::now(),
};
@ -888,7 +934,8 @@ pub async fn batch_move_media(
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
let media_ids: Vec<MediaId> =
req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state
.storage
@ -921,11 +968,13 @@ pub async fn batch_move_media(
processed: results.len(),
errors: Vec::new(),
}))
}
Err(e) => Ok(Json(BatchOperationResponse {
},
Err(e) => {
Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}))
},
}
}
@ -1088,7 +1137,9 @@ pub async fn permanent_delete_media(
path: item.path.to_string_lossy().to_string(),
content_hash: Some(item.content_hash.clone()),
file_size: Some(item.file_size),
metadata_json: Some(serde_json::json!({"permanent": true}).to_string()),
metadata_json: Some(
serde_json::json!({"permanent": true}).to_string(),
),
changed_by_device: None,
timestamp: chrono::Utc::now(),
};

View file

@ -7,15 +7,22 @@
//! - Link reindexing
use axum::{
Json, Router,
Json,
Router,
extract::{Path, Query, State},
routing::{get, post},
};
use pinakes_core::model::{
BacklinkInfo,
GraphData,
GraphEdge,
GraphNode,
MarkdownLink,
MediaId,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::model::{BacklinkInfo, GraphData, GraphEdge, GraphNode, MarkdownLink, MediaId};
use crate::{error::ApiError, state::AppState};
// ===== Response DTOs =====
@ -205,7 +212,8 @@ pub async fn get_backlinks(
let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?;
let items: Vec<BacklinkItem> = backlinks.into_iter().map(BacklinkItem::from).collect();
let items: Vec<BacklinkItem> =
backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len();
Ok(Json(BacklinksResponse {
@ -224,7 +232,8 @@ pub async fn get_outgoing_links(
let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?;
let items: Vec<OutgoingLinkItem> = links.into_iter().map(OutgoingLinkItem::from).collect();
let items: Vec<OutgoingLinkItem> =
links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len();
Ok(Json(OutgoingLinksResponse {
@ -263,13 +272,13 @@ pub async fn reindex_links(
// Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {}
MediaType::Builtin(BuiltinMediaType::Markdown) => {},
_ => {
return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(),
links_extracted: 0,
}));
}
},
}
// Read the file content

View file

@ -1,12 +1,14 @@
use std::collections::HashMap;
use axum::{
Json, Router,
Json,
Router,
extract::{Query, State},
response::IntoResponse,
routing::get,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::{dto::MediaResponse, error::ApiError, state::AppState};
@ -83,12 +85,14 @@ pub async fn get_timeline(
.into_iter()
.filter(|item| {
item.date_taken.is_some()
&& item.media_type.category() == pinakes_core::media_type::MediaCategory::Image
&& item.media_type.category()
== pinakes_core::media_type::MediaCategory::Image
})
.collect();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> = HashMap::new();
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> =
HashMap::new();
for photo in photos {
if let Some(date_taken) = photo.date_taken {
@ -122,7 +126,8 @@ pub async fn get_timeline(
.map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len();
let items: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
let items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
TimelineGroup {
date,
@ -150,7 +155,8 @@ pub async fn get_map_photos(
let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2);
// Query all media (we'll filter in-memory for now - could optimize with DB query)
// Query all media (we'll filter in-memory for now - could optimize with DB
// query)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {

View file

@ -1,15 +1,11 @@
use axum::Json;
use axum::extract::{Extension, Path, State};
use axum::{
Json,
extract::{Extension, Path, State},
};
use pinakes_core::{model::MediaId, playlists::Playlist, users::UserId};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::playlists::Playlist;
use pinakes_core::users::UserId;
use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
/// Check whether a user has access to a playlist.
///
@ -89,7 +85,8 @@ pub async fn get_playlist(
Path(id): Path<Uuid>,
) -> Result<Json<PlaylistResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist = check_playlist_access(&state.storage, id, user_id, false).await?;
let playlist =
check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
@ -146,7 +143,7 @@ pub async fn add_item(
None => {
let items = state.storage.get_playlist_items(id).await?;
items.len() as i32
}
},
};
state
.storage

View file

@ -1,9 +1,9 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
/// List all installed plugins
pub async fn list_plugins(
@ -57,7 +57,8 @@ pub async fn install_plugin(
))
})?;
let plugin_id = plugin_manager
let plugin_id =
plugin_manager
.install_plugin(&req.source)
.await
.map_err(|e| {
@ -66,7 +67,8 @@ pub async fn install_plugin(
))
})?;
let plugin = plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
let plugin =
plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(),
))

View file

@ -1,9 +1,10 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use serde::{Deserialize, Serialize};
use crate::error::ApiError;
use crate::state::AppState;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct CreateSavedSearchRequest {
@ -52,12 +53,14 @@ pub async fn list_saved_searches(
Ok(Json(
searches
.into_iter()
.map(|s| SavedSearchResponse {
.map(|s| {
SavedSearchResponse {
id: s.id.to_string(),
name: s.name,
query: s.query,
sort_order: s.sort_order,
created_at: s.created_at,
}
})
.collect(),
))

View file

@ -1,9 +1,6 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
/// Trigger a scan as a background job. Returns the job ID immediately.
pub async fn trigger_scan(
@ -17,7 +14,9 @@ pub async fn trigger_scan(
}))
}
pub async fn scan_status(State(state): State<AppState>) -> Json<ScanStatusResponse> {
pub async fn scan_status(
State(state): State<AppState>,
) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len();
Json(ScanStatusResponse {

View file

@ -1,9 +1,9 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use crate::dto::ScheduledTaskResponse;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::ScheduledTaskResponse, error::ApiError, state::AppState};
pub async fn list_scheduled_tasks(
State(state): State<AppState>,
@ -11,7 +11,8 @@ pub async fn list_scheduled_tasks(
let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter()
.map(|t| ScheduledTaskResponse {
.map(|t| {
ScheduledTaskResponse {
id: t.id,
name: t.name,
schedule: t.schedule.display_string(),
@ -19,6 +20,7 @@ pub async fn list_scheduled_tasks(
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status,
}
})
.collect();
Ok(Json(responses))
@ -29,13 +31,17 @@ pub async fn toggle_scheduled_task(
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.toggle_task(&id).await {
Some(enabled) => Ok(Json(serde_json::json!({
Some(enabled) => {
Ok(Json(serde_json::json!({
"id": id,
"enabled": enabled,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
)))
},
}
}
@ -44,12 +50,16 @@ pub async fn run_scheduled_task_now(
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.run_now(&id).await {
Some(job_id) => Ok(Json(serde_json::json!({
Some(job_id) => {
Ok(Json(serde_json::json!({
"id": id,
"job_id": job_id,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
)))
},
}
}

View file

@ -1,12 +1,13 @@
use axum::Json;
use axum::extract::{Query, State};
use axum::{
Json,
extract::{Query, State},
};
use pinakes_core::{
model::Pagination,
search::{SearchRequest, SortOrder, parse_search_query},
};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination;
use pinakes_core::search::{SearchRequest, SortOrder, parse_search_query};
use crate::{dto::*, error::ApiError, state::AppState};
fn resolve_sort(sort: Option<&str>) -> SortOrder {
match sort {

Some files were not shown because too many files have changed in this diff Show more