From c4adc4e3e0d1060664dc79c057692e86d6490ba2 Mon Sep 17 00:00:00 2001 From: NotAShelf Date: Mon, 2 Feb 2026 17:32:11 +0300 Subject: [PATCH] various: simplify code; work on security and performance Signed-off-by: NotAShelf Change-Id: I9a5114addcab5fbff430ab2b919b83466a6a6964 --- crates/pinakes-core/Cargo.toml | 7 +- crates/pinakes-core/src/analytics.rs | 69 + crates/pinakes-core/src/cache.rs | 8 +- crates/pinakes-core/src/config.rs | 242 +++ crates/pinakes-core/src/enrichment/lastfm.rs | 109 + crates/pinakes-core/src/enrichment/mod.rs | 66 + .../src/enrichment/musicbrainz.rs | 134 ++ crates/pinakes-core/src/enrichment/tmdb.rs | 109 + crates/pinakes-core/src/error.rs | 12 + crates/pinakes-core/src/events.rs | 26 + crates/pinakes-core/src/export.rs | 2 +- crates/pinakes-core/src/import.rs | 12 +- crates/pinakes-core/src/integrity.rs | 14 +- crates/pinakes-core/src/jobs.rs | 8 + crates/pinakes-core/src/lib.rs | 8 + .../{media_type.rs => media_type/builtin.rs} | 45 +- crates/pinakes-core/src/media_type/mod.rs | 232 +++ .../pinakes-core/src/media_type/registry.rs | 285 +++ crates/pinakes-core/src/metadata/audio.rs | 18 +- crates/pinakes-core/src/metadata/document.rs | 16 +- crates/pinakes-core/src/metadata/image.rs | 34 +- crates/pinakes-core/src/metadata/markdown.rs | 9 +- crates/pinakes-core/src/metadata/mod.rs | 2 +- crates/pinakes-core/src/metadata/video.rs | 16 +- crates/pinakes-core/src/playlists.rs | 31 + crates/pinakes-core/src/plugin/loader.rs | 407 ++++ crates/pinakes-core/src/plugin/mod.rs | 419 ++++ crates/pinakes-core/src/plugin/registry.rs | 280 +++ crates/pinakes-core/src/plugin/runtime.rs | 582 ++++++ crates/pinakes-core/src/plugin/security.rs | 341 ++++ crates/pinakes-core/src/social.rs | 52 + crates/pinakes-core/src/storage/mod.rs | 170 ++ crates/pinakes-core/src/storage/postgres.rs | 1450 ++++++++++++- crates/pinakes-core/src/storage/sqlite.rs | 1797 ++++++++++++++++- crates/pinakes-core/src/subtitles.rs | 62 + crates/pinakes-core/src/thumbnail.rs | 12 +- crates/pinakes-core/src/transcode.rs | 545 +++++ crates/pinakes-core/src/users.rs | 210 ++ crates/pinakes-core/tests/integration_test.rs | 466 ++++- crates/pinakes-plugin-api/Cargo.toml | 27 + crates/pinakes-plugin-api/src/lib.rs | 374 ++++ crates/pinakes-plugin-api/src/manifest.rs | 263 +++ crates/pinakes-plugin-api/src/types.rs | 156 ++ crates/pinakes-plugin-api/src/wasm.rs | 186 ++ .../tests/validate_examples.rs | 67 + crates/pinakes-server/Cargo.toml | 3 + crates/pinakes-server/src/app.rs | 183 +- crates/pinakes-server/src/auth.rs | 20 + crates/pinakes-server/src/dto.rs | 428 ++++ crates/pinakes-server/src/error.rs | 2 + crates/pinakes-server/src/main.rs | 98 +- crates/pinakes-server/src/routes/analytics.rs | 94 + .../pinakes-server/src/routes/enrichment.rs | 48 + crates/pinakes-server/src/routes/integrity.rs | 4 +- crates/pinakes-server/src/routes/mod.rs | 9 + crates/pinakes-server/src/routes/playlists.rs | 208 ++ crates/pinakes-server/src/routes/plugins.rs | 149 ++ crates/pinakes-server/src/routes/social.rs | 199 ++ crates/pinakes-server/src/routes/streaming.rs | 238 +++ crates/pinakes-server/src/routes/subtitles.rs | 123 ++ crates/pinakes-server/src/routes/transcode.rs | 63 + crates/pinakes-server/src/routes/users.rs | 191 ++ crates/pinakes-server/src/state.rs | 4 + crates/pinakes-server/tests/api_test.rs | 802 +++++++- crates/pinakes-server/tests/plugin_test.rs | 211 ++ crates/pinakes-tui/src/app.rs | 148 +- crates/pinakes-tui/src/client.rs | 3 + crates/pinakes-tui/src/input.rs | 39 +- crates/pinakes-tui/src/ui/duplicates.rs | 17 +- crates/pinakes-tui/src/ui/tasks.rs | 8 +- crates/pinakes-ui/src/app.rs | 95 +- crates/pinakes-ui/src/components/import.rs | 29 +- .../pinakes-ui/src/components/media_player.rs | 18 + crates/pinakes-ui/src/styles.rs | 51 +- 74 files changed, 12714 insertions(+), 151 deletions(-) create mode 100644 crates/pinakes-core/src/analytics.rs create mode 100644 crates/pinakes-core/src/enrichment/lastfm.rs create mode 100644 crates/pinakes-core/src/enrichment/mod.rs create mode 100644 crates/pinakes-core/src/enrichment/musicbrainz.rs create mode 100644 crates/pinakes-core/src/enrichment/tmdb.rs rename crates/pinakes-core/src/{media_type.rs => media_type/builtin.rs} (76%) create mode 100644 crates/pinakes-core/src/media_type/mod.rs create mode 100644 crates/pinakes-core/src/media_type/registry.rs create mode 100644 crates/pinakes-core/src/playlists.rs create mode 100644 crates/pinakes-core/src/plugin/loader.rs create mode 100644 crates/pinakes-core/src/plugin/mod.rs create mode 100644 crates/pinakes-core/src/plugin/registry.rs create mode 100644 crates/pinakes-core/src/plugin/runtime.rs create mode 100644 crates/pinakes-core/src/plugin/security.rs create mode 100644 crates/pinakes-core/src/social.rs create mode 100644 crates/pinakes-core/src/subtitles.rs create mode 100644 crates/pinakes-core/src/transcode.rs create mode 100644 crates/pinakes-core/src/users.rs create mode 100644 crates/pinakes-plugin-api/Cargo.toml create mode 100644 crates/pinakes-plugin-api/src/lib.rs create mode 100644 crates/pinakes-plugin-api/src/manifest.rs create mode 100644 crates/pinakes-plugin-api/src/types.rs create mode 100644 crates/pinakes-plugin-api/src/wasm.rs create mode 100644 crates/pinakes-plugin-api/tests/validate_examples.rs create mode 100644 crates/pinakes-server/src/routes/analytics.rs create mode 100644 crates/pinakes-server/src/routes/enrichment.rs create mode 100644 crates/pinakes-server/src/routes/playlists.rs create mode 100644 crates/pinakes-server/src/routes/plugins.rs create mode 100644 crates/pinakes-server/src/routes/social.rs create mode 100644 crates/pinakes-server/src/routes/streaming.rs create mode 100644 crates/pinakes-server/src/routes/subtitles.rs create mode 100644 crates/pinakes-server/src/routes/transcode.rs create mode 100644 crates/pinakes-server/src/routes/users.rs create mode 100644 crates/pinakes-server/tests/plugin_test.rs diff --git a/crates/pinakes-core/Cargo.toml b/crates/pinakes-core/Cargo.toml index 60c5764..8a12fb4 100644 --- a/crates/pinakes-core/Cargo.toml +++ b/crates/pinakes-core/Cargo.toml @@ -32,8 +32,13 @@ mime_guess = { workspace = true } async-trait = { workspace = true } kamadak-exif = { workspace = true } image = { workspace = true } -tokio-util = { version = "0.7", features = ["rt"] } +tokio-util = { workspace = true } reqwest = { workspace = true } +argon2 = { workspace = true } + +# Plugin system +pinakes-plugin-api = { path = "../pinakes-plugin-api" } +wasmtime = { workspace = true } [dev-dependencies] tempfile = "3" diff --git a/crates/pinakes-core/src/analytics.rs b/crates/pinakes-core/src/analytics.rs new file mode 100644 index 0000000..bb605e2 --- /dev/null +++ b/crates/pinakes-core/src/analytics.rs @@ -0,0 +1,69 @@ +//! Usage analytics and watch history tracking. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::model::MediaId; +use crate::users::UserId; + +/// A tracked usage event for a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UsageEvent { + pub id: Uuid, + pub media_id: Option, + pub user_id: Option, + pub event_type: UsageEventType, + pub timestamp: DateTime, + pub duration_secs: Option, + pub context_json: Option, +} + +/// Types of usage events that can be tracked. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum UsageEventType { + View, + Play, + Export, + Share, + Search, +} + +impl std::fmt::Display for UsageEventType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::View => "view", + Self::Play => "play", + Self::Export => "export", + Self::Share => "share", + Self::Search => "search", + }; + write!(f, "{s}") + } +} + +impl std::str::FromStr for UsageEventType { + type Err = String; + + fn from_str(s: &str) -> std::result::Result { + match s { + "view" => Ok(Self::View), + "play" => Ok(Self::Play), + "export" => Ok(Self::Export), + "share" => Ok(Self::Share), + "search" => Ok(Self::Search), + _ => Err(format!("unknown usage event type: {s}")), + } + } +} + +/// Watch history entry tracking progress through media. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WatchHistory { + pub id: Uuid, + pub user_id: UserId, + pub media_id: MediaId, + pub progress_secs: f64, + pub last_watched: DateTime, +} diff --git a/crates/pinakes-core/src/cache.rs b/crates/pinakes-core/src/cache.rs index cff0d30..f822e5f 100644 --- a/crates/pinakes-core/src/cache.rs +++ b/crates/pinakes-core/src/cache.rs @@ -45,10 +45,10 @@ where pub async fn get(&self, key: &K) -> Option { let map = self.entries.read().await; - if let Some(entry) = map.get(key) { - if entry.inserted_at.elapsed() < self.ttl { - return Some(entry.value.clone()); - } + if let Some(entry) = map.get(key) + && entry.inserted_at.elapsed() < self.ttl + { + return Some(entry.value.clone()); } None } diff --git a/crates/pinakes-core/src/config.rs b/crates/pinakes-core/src/config.rs index 3d3d6b9..056f553 100644 --- a/crates/pinakes-core/src/config.rs +++ b/crates/pinakes-core/src/config.rs @@ -20,6 +20,16 @@ pub struct Config { pub webhooks: Vec, #[serde(default)] pub scheduled_tasks: Vec, + #[serde(default)] + pub plugins: PluginsConfig, + #[serde(default)] + pub transcoding: TranscodingConfig, + #[serde(default)] + pub enrichment: EnrichmentConfig, + #[serde(default)] + pub cloud: CloudConfig, + #[serde(default)] + pub analytics: AnalyticsConfig, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -192,6 +202,233 @@ impl std::fmt::Display for UserRole { } } +// ===== Plugin Configuration ===== + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginsConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default = "default_plugin_data_dir")] + pub data_dir: PathBuf, + #[serde(default = "default_plugin_cache_dir")] + pub cache_dir: PathBuf, + #[serde(default)] + pub plugin_dirs: Vec, + #[serde(default)] + pub enable_hot_reload: bool, + #[serde(default)] + pub allow_unsigned: bool, + #[serde(default = "default_max_concurrent_ops")] + pub max_concurrent_ops: usize, + #[serde(default = "default_plugin_timeout")] + pub plugin_timeout_secs: u64, +} + +fn default_plugin_data_dir() -> PathBuf { + Config::default_data_dir().join("plugins").join("data") +} + +fn default_plugin_cache_dir() -> PathBuf { + Config::default_data_dir().join("plugins").join("cache") +} + +fn default_max_concurrent_ops() -> usize { + 4 +} + +fn default_plugin_timeout() -> u64 { + 30 +} + +impl Default for PluginsConfig { + fn default() -> Self { + Self { + enabled: false, + data_dir: default_plugin_data_dir(), + cache_dir: default_plugin_cache_dir(), + plugin_dirs: vec![], + enable_hot_reload: false, + allow_unsigned: false, + max_concurrent_ops: default_max_concurrent_ops(), + plugin_timeout_secs: default_plugin_timeout(), + } + } +} + +// ===== Transcoding Configuration ===== + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TranscodingConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default)] + pub cache_dir: Option, + #[serde(default = "default_cache_ttl_hours")] + pub cache_ttl_hours: u64, + #[serde(default = "default_max_concurrent_transcodes")] + pub max_concurrent: usize, + #[serde(default)] + pub hardware_acceleration: Option, + #[serde(default)] + pub profiles: Vec, +} + +fn default_cache_ttl_hours() -> u64 { + 48 +} + +fn default_max_concurrent_transcodes() -> usize { + 2 +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TranscodeProfile { + pub name: String, + pub video_codec: String, + pub audio_codec: String, + pub max_bitrate_kbps: u32, + pub max_resolution: String, +} + +impl Default for TranscodingConfig { + fn default() -> Self { + Self { + enabled: false, + cache_dir: None, + cache_ttl_hours: default_cache_ttl_hours(), + max_concurrent: default_max_concurrent_transcodes(), + hardware_acceleration: None, + profiles: vec![ + TranscodeProfile { + name: "high".to_string(), + video_codec: "h264".to_string(), + audio_codec: "aac".to_string(), + max_bitrate_kbps: 8000, + max_resolution: "1080p".to_string(), + }, + TranscodeProfile { + name: "medium".to_string(), + video_codec: "h264".to_string(), + audio_codec: "aac".to_string(), + max_bitrate_kbps: 4000, + max_resolution: "720p".to_string(), + }, + ], + } + } +} + +// ===== Enrichment Configuration ===== + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EnrichmentConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default)] + pub auto_enrich_on_import: bool, + #[serde(default)] + pub sources: EnrichmentSources, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct EnrichmentSources { + #[serde(default)] + pub musicbrainz: EnrichmentSource, + #[serde(default)] + pub tmdb: EnrichmentSource, + #[serde(default)] + pub lastfm: EnrichmentSource, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EnrichmentSource { + #[serde(default)] + pub enabled: bool, + #[serde(default)] + pub api_key: Option, + #[serde(default)] + pub api_endpoint: Option, +} + +// ===== Cloud Configuration ===== + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CloudConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default = "default_auto_sync_interval")] + pub auto_sync_interval_mins: u64, + #[serde(default)] + pub accounts: Vec, +} + +fn default_auto_sync_interval() -> u64 { + 60 +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CloudAccount { + pub id: String, + pub provider: String, + #[serde(default)] + pub enabled: bool, + #[serde(default)] + pub sync_rules: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CloudSyncRule { + pub local_path: PathBuf, + pub remote_path: String, + pub direction: CloudSyncDirection, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CloudSyncDirection { + Upload, + Download, + Bidirectional, +} + +impl Default for CloudConfig { + fn default() -> Self { + Self { + enabled: false, + auto_sync_interval_mins: default_auto_sync_interval(), + accounts: vec![], + } + } +} + +// ===== Analytics Configuration ===== + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AnalyticsConfig { + #[serde(default)] + pub enabled: bool, + #[serde(default = "default_true")] + pub track_usage: bool, + #[serde(default = "default_retention_days")] + pub retention_days: u64, +} + +fn default_retention_days() -> u64 { + 90 +} + +impl Default for AnalyticsConfig { + fn default() -> Self { + Self { + enabled: false, + track_usage: true, + retention_days: default_retention_days(), + } + } +} + +// ===== Storage Configuration ===== + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct StorageConfig { pub backend: StorageBackendType, @@ -379,6 +616,11 @@ impl Default for Config { thumbnails: ThumbnailConfig::default(), webhooks: vec![], scheduled_tasks: vec![], + plugins: PluginsConfig::default(), + transcoding: TranscodingConfig::default(), + enrichment: EnrichmentConfig::default(), + cloud: CloudConfig::default(), + analytics: AnalyticsConfig::default(), } } } diff --git a/crates/pinakes-core/src/enrichment/lastfm.rs b/crates/pinakes-core/src/enrichment/lastfm.rs new file mode 100644 index 0000000..260f4fc --- /dev/null +++ b/crates/pinakes-core/src/enrichment/lastfm.rs @@ -0,0 +1,109 @@ +//! Last.fm metadata enrichment for audio files. + +use std::time::Duration; + +use chrono::Utc; +use uuid::Uuid; + +use crate::error::{PinakesError, Result}; +use crate::model::MediaItem; + +use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; + +pub struct LastFmEnricher { + client: reqwest::Client, + api_key: String, + base_url: String, +} + +impl LastFmEnricher { + pub fn new(api_key: String) -> Self { + Self { + client: reqwest::Client::builder() + .timeout(Duration::from_secs(10)) + .connect_timeout(Duration::from_secs(5)) + .build() + .expect("failed to build HTTP client with configured timeouts"), + api_key, + base_url: "https://ws.audioscrobbler.com/2.0".to_string(), + } + } +} + +#[async_trait::async_trait] +impl MetadataEnricher for LastFmEnricher { + fn source(&self) -> EnrichmentSourceType { + EnrichmentSourceType::LastFm + } + + async fn enrich(&self, item: &MediaItem) -> Result> { + let artist = match &item.artist { + Some(a) if !a.is_empty() => a, + _ => return Ok(None), + }; + + let title = match &item.title { + Some(t) if !t.is_empty() => t, + _ => return Ok(None), + }; + + let url = format!("{}/", self.base_url); + + let resp = self + .client + .get(&url) + .query(&[ + ("method", "track.getInfo"), + ("api_key", self.api_key.as_str()), + ("artist", artist.as_str()), + ("track", title.as_str()), + ("format", "json"), + ]) + .send() + .await + .map_err(|e| { + PinakesError::MetadataExtraction(format!("Last.fm request failed: {e}")) + })?; + + if !resp.status().is_success() { + return Ok(None); + } + + let body = resp.text().await.map_err(|e| { + PinakesError::MetadataExtraction(format!("Last.fm response read failed: {e}")) + })?; + + let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { + PinakesError::MetadataExtraction(format!("Last.fm JSON parse failed: {e}")) + })?; + + // Check for error response + if json.get("error").is_some() { + return Ok(None); + } + + let track = match json.get("track") { + Some(t) => t, + None => return Ok(None), + }; + + let mbid = track.get("mbid").and_then(|m| m.as_str()).map(String::from); + let listeners = track + .get("listeners") + .and_then(|l| l.as_str()) + .and_then(|l| l.parse::().ok()) + .unwrap_or(0.0); + // Normalize listeners to confidence (arbitrary scale) + let confidence = (listeners / 1_000_000.0).min(1.0); + + Ok(Some(ExternalMetadata { + id: Uuid::now_v7(), + media_id: item.id, + source: EnrichmentSourceType::LastFm, + external_id: mbid, + metadata_json: body, + confidence, + last_updated: Utc::now(), + })) + } +} diff --git a/crates/pinakes-core/src/enrichment/mod.rs b/crates/pinakes-core/src/enrichment/mod.rs new file mode 100644 index 0000000..518160c --- /dev/null +++ b/crates/pinakes-core/src/enrichment/mod.rs @@ -0,0 +1,66 @@ +//! Metadata enrichment from external sources. + +pub mod lastfm; +pub mod musicbrainz; +pub mod tmdb; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::Result; +use crate::model::{MediaId, MediaItem}; + +/// Externally-sourced metadata for a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalMetadata { + pub id: Uuid, + pub media_id: MediaId, + pub source: EnrichmentSourceType, + pub external_id: Option, + pub metadata_json: String, + pub confidence: f64, + pub last_updated: DateTime, +} + +/// Supported enrichment data sources. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum EnrichmentSourceType { + #[serde(rename = "musicbrainz")] + MusicBrainz, + #[serde(rename = "tmdb")] + Tmdb, + #[serde(rename = "lastfm")] + LastFm, +} + +impl std::fmt::Display for EnrichmentSourceType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::MusicBrainz => "musicbrainz", + Self::Tmdb => "tmdb", + Self::LastFm => "lastfm", + }; + write!(f, "{s}") + } +} + +impl std::str::FromStr for EnrichmentSourceType { + type Err = String; + + fn from_str(s: &str) -> std::result::Result { + match s { + "musicbrainz" => Ok(Self::MusicBrainz), + "tmdb" => Ok(Self::Tmdb), + "lastfm" => Ok(Self::LastFm), + _ => Err(format!("unknown enrichment source: {s}")), + } + } +} + +/// Trait for metadata enrichment providers. +#[async_trait::async_trait] +pub trait MetadataEnricher: Send + Sync { + fn source(&self) -> EnrichmentSourceType; + async fn enrich(&self, item: &MediaItem) -> Result>; +} diff --git a/crates/pinakes-core/src/enrichment/musicbrainz.rs b/crates/pinakes-core/src/enrichment/musicbrainz.rs new file mode 100644 index 0000000..3d5c779 --- /dev/null +++ b/crates/pinakes-core/src/enrichment/musicbrainz.rs @@ -0,0 +1,134 @@ +//! MusicBrainz metadata enrichment for audio files. + +use std::time::Duration; + +use chrono::Utc; +use uuid::Uuid; + +use crate::error::{PinakesError, Result}; +use crate::model::MediaItem; + +use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; + +pub struct MusicBrainzEnricher { + client: reqwest::Client, + base_url: String, +} + +impl Default for MusicBrainzEnricher { + fn default() -> Self { + Self::new() + } +} + +impl MusicBrainzEnricher { + pub fn new() -> Self { + Self { + client: reqwest::Client::builder() + .user_agent("Pinakes/0.1 (https://github.com/notashelf/pinakes)") + .timeout(Duration::from_secs(10)) + .connect_timeout(Duration::from_secs(5)) + .build() + .expect("failed to build HTTP client with configured timeouts"), + base_url: "https://musicbrainz.org/ws/2".to_string(), + } + } +} + +fn escape_lucene_query(s: &str) -> String { + let special_chars = [ + '+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\', + '/', + ]; + let mut escaped = String::with_capacity(s.len() * 2); + for c in s.chars() { + if special_chars.contains(&c) { + escaped.push('\\'); + } + escaped.push(c); + } + escaped +} + +#[async_trait::async_trait] +impl MetadataEnricher for MusicBrainzEnricher { + fn source(&self) -> EnrichmentSourceType { + EnrichmentSourceType::MusicBrainz + } + + async fn enrich(&self, item: &MediaItem) -> Result> { + let title = match &item.title { + Some(t) if !t.is_empty() => t, + _ => return Ok(None), + }; + + let mut query = format!("recording:{}", escape_lucene_query(title)); + if let Some(ref artist) = item.artist { + query.push_str(&format!(" AND artist:{}", escape_lucene_query(artist))); + } + + let url = format!("{}/recording/", self.base_url); + + let resp = self + .client + .get(&url) + .query(&[ + ("query", &query), + ("fmt", &"json".to_string()), + ("limit", &"1".to_string()), + ]) + .send() + .await + .map_err(|e| { + PinakesError::MetadataExtraction(format!("MusicBrainz request failed: {e}")) + })?; + + if !resp.status().is_success() { + let status = resp.status(); + if status == reqwest::StatusCode::TOO_MANY_REQUESTS + || status == reqwest::StatusCode::SERVICE_UNAVAILABLE + { + return Err(PinakesError::MetadataExtraction(format!( + "MusicBrainz rate limited (HTTP {})", + status.as_u16() + ))); + } + return Ok(None); + } + + let body = resp.text().await.map_err(|e| { + PinakesError::MetadataExtraction(format!("MusicBrainz response read failed: {e}")) + })?; + + // Parse to check if we got results + let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { + PinakesError::MetadataExtraction(format!("MusicBrainz JSON parse failed: {e}")) + })?; + + let recordings = json.get("recordings").and_then(|r| r.as_array()); + if recordings.is_none_or(|r| r.is_empty()) { + return Ok(None); + } + + let recording = &recordings.unwrap()[0]; + let external_id = recording + .get("id") + .and_then(|id| id.as_str()) + .map(String::from); + let score = recording + .get("score") + .and_then(|s| s.as_f64()) + .unwrap_or(0.0) + / 100.0; + + Ok(Some(ExternalMetadata { + id: Uuid::now_v7(), + media_id: item.id, + source: EnrichmentSourceType::MusicBrainz, + external_id, + metadata_json: body, + confidence: score, + last_updated: Utc::now(), + })) + } +} diff --git a/crates/pinakes-core/src/enrichment/tmdb.rs b/crates/pinakes-core/src/enrichment/tmdb.rs new file mode 100644 index 0000000..28d0a8c --- /dev/null +++ b/crates/pinakes-core/src/enrichment/tmdb.rs @@ -0,0 +1,109 @@ +//! TMDB (The Movie Database) metadata enrichment for video files. + +use std::time::Duration; + +use chrono::Utc; +use uuid::Uuid; + +use crate::error::{PinakesError, Result}; +use crate::model::MediaItem; + +use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher}; + +pub struct TmdbEnricher { + client: reqwest::Client, + api_key: String, + base_url: String, +} + +impl TmdbEnricher { + pub fn new(api_key: String) -> Self { + Self { + client: reqwest::Client::builder() + .timeout(Duration::from_secs(10)) + .connect_timeout(Duration::from_secs(5)) + .build() + .expect("failed to build HTTP client with configured timeouts"), + api_key, + base_url: "https://api.themoviedb.org/3".to_string(), + } + } +} + +#[async_trait::async_trait] +impl MetadataEnricher for TmdbEnricher { + fn source(&self) -> EnrichmentSourceType { + EnrichmentSourceType::Tmdb + } + + async fn enrich(&self, item: &MediaItem) -> Result> { + let title = match &item.title { + Some(t) if !t.is_empty() => t, + _ => return Ok(None), + }; + + let url = format!("{}/search/movie", self.base_url); + + let resp = self + .client + .get(&url) + .query(&[ + ("api_key", &self.api_key), + ("query", &title.to_string()), + ("page", &"1".to_string()), + ]) + .send() + .await + .map_err(|e| PinakesError::MetadataExtraction(format!("TMDB request failed: {e}")))?; + + if !resp.status().is_success() { + let status = resp.status(); + if status == reqwest::StatusCode::UNAUTHORIZED { + return Err(PinakesError::MetadataExtraction( + "TMDB API key is invalid (401)".into(), + )); + } + if status == reqwest::StatusCode::TOO_MANY_REQUESTS { + tracing::warn!("TMDB rate limit exceeded (429)"); + return Ok(None); + } + tracing::debug!(status = %status, "TMDB search returned non-success status"); + return Ok(None); + } + + let body = resp.text().await.map_err(|e| { + PinakesError::MetadataExtraction(format!("TMDB response read failed: {e}")) + })?; + + let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| { + PinakesError::MetadataExtraction(format!("TMDB JSON parse failed: {e}")) + })?; + + let results = json.get("results").and_then(|r| r.as_array()); + if results.is_none_or(|r| r.is_empty()) { + return Ok(None); + } + + let movie = &results.unwrap()[0]; + let external_id = match movie.get("id").and_then(|id| id.as_i64()) { + Some(id) => id.to_string(), + None => return Ok(None), + }; + let popularity = movie + .get("popularity") + .and_then(|p| p.as_f64()) + .unwrap_or(0.0); + // Normalize popularity to 0-1 range (TMDB popularity can be very high) + let confidence = (popularity / 100.0).min(1.0); + + Ok(Some(ExternalMetadata { + id: Uuid::now_v7(), + media_id: item.id, + source: EnrichmentSourceType::Tmdb, + external_id: Some(external_id), + metadata_json: body, + confidence, + last_updated: Utc::now(), + })) + } +} diff --git a/crates/pinakes-core/src/error.rs b/crates/pinakes-core/src/error.rs index 0ad2ccc..8019d9e 100644 --- a/crates/pinakes-core/src/error.rs +++ b/crates/pinakes-core/src/error.rs @@ -42,6 +42,12 @@ pub enum PinakesError { #[error("invalid operation: {0}")] InvalidOperation(String), + + #[error("authentication error: {0}")] + Authentication(String), + + #[error("authorization error: {0}")] + Authorization(String), } impl From for PinakesError { @@ -56,4 +62,10 @@ impl From for PinakesError { } } +impl From for PinakesError { + fn from(e: serde_json::Error) -> Self { + PinakesError::Database(format!("JSON serialization error: {}", e)) + } +} + pub type Result = std::result::Result; diff --git a/crates/pinakes-core/src/events.rs b/crates/pinakes-core/src/events.rs index d0d72ca..d38afeb 100644 --- a/crates/pinakes-core/src/events.rs +++ b/crates/pinakes-core/src/events.rs @@ -27,6 +27,27 @@ pub enum PinakesEvent { expected: String, actual: String, }, + MediaRated { + media_id: String, + user_id: String, + stars: u8, + }, + MediaCommented { + media_id: String, + user_id: String, + }, + PlaylistCreated { + playlist_id: String, + owner_id: String, + }, + TranscodeStarted { + media_id: String, + profile: String, + }, + TranscodeCompleted { + media_id: String, + profile: String, + }, } impl PinakesEvent { @@ -37,6 +58,11 @@ impl PinakesEvent { Self::MediaDeleted { .. } => "media_deleted", Self::ScanCompleted { .. } => "scan_completed", Self::IntegrityMismatch { .. } => "integrity_mismatch", + Self::MediaRated { .. } => "media_rated", + Self::MediaCommented { .. } => "media_commented", + Self::PlaylistCreated { .. } => "playlist_created", + Self::TranscodeStarted { .. } => "transcode_started", + Self::TranscodeCompleted { .. } => "transcode_completed", } } } diff --git a/crates/pinakes-core/src/export.rs b/crates/pinakes-core/src/export.rs index 9e2feac..e611dfe 100644 --- a/crates/pinakes-core/src/export.rs +++ b/crates/pinakes-core/src/export.rs @@ -23,7 +23,7 @@ pub async fn export_library( limit: u64::MAX, sort: None, }; - let items = storage.list_media(&&pagination).await?; + let items = storage.list_media(&pagination).await?; let count = items.len(); match format { diff --git a/crates/pinakes-core/src/import.rs b/crates/pinakes-core/src/import.rs index ff29801..4ce805f 100644 --- a/crates/pinakes-core/src/import.rs +++ b/crates/pinakes-core/src/import.rs @@ -64,9 +64,12 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result Result, + }, + CleanupAnalytics, } #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/crates/pinakes-core/src/lib.rs b/crates/pinakes-core/src/lib.rs index 34fee25..f6aa069 100644 --- a/crates/pinakes-core/src/lib.rs +++ b/crates/pinakes-core/src/lib.rs @@ -1,7 +1,9 @@ +pub mod analytics; pub mod audit; pub mod cache; pub mod collections; pub mod config; +pub mod enrichment; pub mod error; pub mod events; pub mod export; @@ -13,9 +15,15 @@ pub mod media_type; pub mod metadata; pub mod model; pub mod opener; +pub mod playlists; +pub mod plugin; pub mod scan; pub mod scheduler; pub mod search; +pub mod social; pub mod storage; +pub mod subtitles; pub mod tags; pub mod thumbnail; +pub mod transcode; +pub mod users; diff --git a/crates/pinakes-core/src/media_type.rs b/crates/pinakes-core/src/media_type/builtin.rs similarity index 76% rename from crates/pinakes-core/src/media_type.rs rename to crates/pinakes-core/src/media_type/builtin.rs index 483098d..92068ff 100644 --- a/crates/pinakes-core/src/media_type.rs +++ b/crates/pinakes-core/src/media_type/builtin.rs @@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] -pub enum MediaType { +pub enum BuiltinMediaType { // Audio Mp3, Flac, @@ -60,7 +60,48 @@ pub enum MediaCategory { Image, } -impl MediaType { +impl BuiltinMediaType { + /// Get the unique ID for this media type + pub fn id(&self) -> String { + format!("{:?}", self).to_lowercase() + } + + /// Get the display name for this media type + pub fn name(&self) -> String { + match self { + Self::Mp3 => "MP3 Audio".to_string(), + Self::Flac => "FLAC Audio".to_string(), + Self::Ogg => "OGG Audio".to_string(), + Self::Wav => "WAV Audio".to_string(), + Self::Aac => "AAC Audio".to_string(), + Self::Opus => "Opus Audio".to_string(), + Self::Mp4 => "MP4 Video".to_string(), + Self::Mkv => "MKV Video".to_string(), + Self::Avi => "AVI Video".to_string(), + Self::Webm => "WebM Video".to_string(), + Self::Pdf => "PDF Document".to_string(), + Self::Epub => "EPUB eBook".to_string(), + Self::Djvu => "DjVu Document".to_string(), + Self::Markdown => "Markdown".to_string(), + Self::PlainText => "Plain Text".to_string(), + Self::Jpeg => "JPEG Image".to_string(), + Self::Png => "PNG Image".to_string(), + Self::Gif => "GIF Image".to_string(), + Self::Webp => "WebP Image".to_string(), + Self::Svg => "SVG Image".to_string(), + Self::Avif => "AVIF Image".to_string(), + Self::Tiff => "TIFF Image".to_string(), + Self::Bmp => "BMP Image".to_string(), + Self::Cr2 => "Canon RAW (CR2)".to_string(), + Self::Nef => "Nikon RAW (NEF)".to_string(), + Self::Arw => "Sony RAW (ARW)".to_string(), + Self::Dng => "Adobe DNG RAW".to_string(), + Self::Orf => "Olympus RAW (ORF)".to_string(), + Self::Rw2 => "Panasonic RAW (RW2)".to_string(), + Self::Heic => "HEIC Image".to_string(), + } + } + pub fn from_extension(ext: &str) -> Option { match ext.to_ascii_lowercase().as_str() { "mp3" => Some(Self::Mp3), diff --git a/crates/pinakes-core/src/media_type/mod.rs b/crates/pinakes-core/src/media_type/mod.rs new file mode 100644 index 0000000..678f8f9 --- /dev/null +++ b/crates/pinakes-core/src/media_type/mod.rs @@ -0,0 +1,232 @@ +//! Extensible media type system +//! +//! This module provides an extensible media type system that supports both +//! built-in media types and plugin-registered custom types. + +use serde::{Deserialize, Serialize}; +use std::path::Path; + +pub mod builtin; +pub mod registry; + +pub use builtin::{BuiltinMediaType, MediaCategory}; +pub use registry::{MediaTypeDescriptor, MediaTypeRegistry}; + +/// Media type identifier - can be either built-in or custom +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(untagged)] +pub enum MediaType { + /// Built-in media type (backward compatible) + Builtin(BuiltinMediaType), + + /// Custom media type from a plugin + Custom(String), +} + +impl MediaType { + /// Create a new custom media type + pub fn custom(id: impl Into) -> Self { + Self::Custom(id.into()) + } + + /// Get the type ID as a string + pub fn id(&self) -> String { + match self { + Self::Builtin(b) => b.id(), + Self::Custom(id) => id.clone(), + } + } + + /// Get the display name for this media type + /// For custom types without a registry, returns the ID as the name + pub fn name(&self) -> String { + match self { + Self::Builtin(b) => b.name(), + Self::Custom(id) => id.clone(), + } + } + + /// Get the display name for this media type with registry support + pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String { + match self { + Self::Builtin(b) => b.name(), + Self::Custom(id) => registry + .get(id) + .map(|d| d.name.clone()) + .unwrap_or_else(|| id.clone()), + } + } + + /// Get the category for this media type + /// For custom types without a registry, returns MediaCategory::Document as default + pub fn category(&self) -> MediaCategory { + match self { + Self::Builtin(b) => b.category(), + Self::Custom(_) => MediaCategory::Document, + } + } + + /// Get the category for this media type with registry support + pub fn category_with_registry(&self, registry: &MediaTypeRegistry) -> MediaCategory { + match self { + Self::Builtin(b) => b.category(), + Self::Custom(id) => registry + .get(id) + .and_then(|d| d.category) + .unwrap_or(MediaCategory::Document), + } + } + + /// Get the MIME type + /// For custom types without a registry, returns "application/octet-stream" + pub fn mime_type(&self) -> String { + match self { + Self::Builtin(b) => b.mime_type().to_string(), + Self::Custom(_) => "application/octet-stream".to_string(), + } + } + + /// Get the MIME type with registry support + pub fn mime_type_with_registry(&self, registry: &MediaTypeRegistry) -> String { + match self { + Self::Builtin(b) => b.mime_type().to_string(), + Self::Custom(id) => registry + .get(id) + .and_then(|d| d.mime_types.first().cloned()) + .unwrap_or_else(|| "application/octet-stream".to_string()), + } + } + + /// Get file extensions + /// For custom types without a registry, returns an empty vec + pub fn extensions(&self) -> Vec { + match self { + Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(), + Self::Custom(_) => vec![], + } + } + + /// Get file extensions with registry support + pub fn extensions_with_registry(&self, registry: &MediaTypeRegistry) -> Vec { + match self { + Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(), + Self::Custom(id) => registry + .get(id) + .map(|d| d.extensions.clone()) + .unwrap_or_default(), + } + } + + /// Check if this is a RAW image format + pub fn is_raw(&self) -> bool { + match self { + Self::Builtin(b) => b.is_raw(), + Self::Custom(_) => false, + } + } + + /// Resolve a media type from file extension (built-in types only) + /// Use from_extension_with_registry for custom types + pub fn from_extension(ext: &str) -> Option { + BuiltinMediaType::from_extension(ext).map(Self::Builtin) + } + + /// Resolve a media type from file extension with registry (includes custom types) + pub fn from_extension_with_registry(ext: &str, registry: &MediaTypeRegistry) -> Option { + // Try built-in types first + if let Some(builtin) = BuiltinMediaType::from_extension(ext) { + return Some(Self::Builtin(builtin)); + } + + // Try registered custom types + registry + .get_by_extension(ext) + .map(|desc| Self::Custom(desc.id.clone())) + } + + /// Resolve a media type from file path (built-in types only) + /// Use from_path_with_registry for custom types + pub fn from_path(path: &Path) -> Option { + path.extension() + .and_then(|e| e.to_str()) + .and_then(Self::from_extension) + } + + /// Resolve a media type from file path with registry (includes custom types) + pub fn from_path_with_registry(path: &Path, registry: &MediaTypeRegistry) -> Option { + path.extension() + .and_then(|e| e.to_str()) + .and_then(|ext| Self::from_extension_with_registry(ext, registry)) + } +} + +// Implement From for easier conversion +impl From for MediaType { + fn from(builtin: BuiltinMediaType) -> Self { + Self::Builtin(builtin) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_builtin_media_type() { + let mt = MediaType::Builtin(BuiltinMediaType::Mp3); + + assert_eq!(mt.id(), "mp3"); + assert_eq!(mt.mime_type(), "audio/mpeg"); + assert_eq!(mt.category(), MediaCategory::Audio); + } + + #[test] + fn test_custom_media_type() { + let mut registry = MediaTypeRegistry::new(); + + let descriptor = MediaTypeDescriptor { + id: "heif".to_string(), + name: "HEIF Image".to_string(), + category: Some(MediaCategory::Image), + extensions: vec!["heif".to_string()], + mime_types: vec!["image/heif".to_string()], + plugin_id: Some("heif-plugin".to_string()), + }; + + registry.register(descriptor).unwrap(); + + let mt = MediaType::custom("heif"); + assert_eq!(mt.id(), "heif"); + assert_eq!(mt.mime_type_with_registry(®istry), "image/heif"); + assert_eq!(mt.category_with_registry(®istry), MediaCategory::Image); + } + + #[test] + fn test_from_extension_builtin() { + let registry = MediaTypeRegistry::new(); + let mt = MediaType::from_extension_with_registry("mp3", ®istry); + + assert!(mt.is_some()); + assert_eq!(mt.unwrap(), MediaType::Builtin(BuiltinMediaType::Mp3)); + } + + #[test] + fn test_from_extension_custom() { + let mut registry = MediaTypeRegistry::new(); + + let descriptor = MediaTypeDescriptor { + id: "customformat".to_string(), + name: "Custom Format".to_string(), + category: Some(MediaCategory::Image), + extensions: vec!["xyz".to_string()], + mime_types: vec!["application/x-custom".to_string()], + plugin_id: Some("custom-plugin".to_string()), + }; + + registry.register(descriptor).unwrap(); + + let mt = MediaType::from_extension_with_registry("xyz", ®istry); + assert!(mt.is_some()); + assert_eq!(mt.unwrap(), MediaType::custom("customformat")); + } +} diff --git a/crates/pinakes-core/src/media_type/registry.rs b/crates/pinakes-core/src/media_type/registry.rs new file mode 100644 index 0000000..232acf4 --- /dev/null +++ b/crates/pinakes-core/src/media_type/registry.rs @@ -0,0 +1,285 @@ +//! Media type registry for managing both built-in and custom media types + +use anyhow::{Result, anyhow}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +use super::MediaCategory; + +/// Descriptor for a media type (built-in or custom) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MediaTypeDescriptor { + /// Unique identifier + pub id: String, + + /// Display name + pub name: String, + + /// Category + pub category: Option, + + /// File extensions + pub extensions: Vec, + + /// MIME types + pub mime_types: Vec, + + /// Plugin that registered this type (None for built-in types) + pub plugin_id: Option, +} + +/// Registry for media types +#[derive(Debug, Clone)] +pub struct MediaTypeRegistry { + /// Map of media type ID to descriptor + types: HashMap, + + /// Map of extension to media type ID + extension_map: HashMap, +} + +impl MediaTypeRegistry { + /// Create a new empty registry + pub fn new() -> Self { + Self { + types: HashMap::new(), + extension_map: HashMap::new(), + } + } + + /// Register a new media type + pub fn register(&mut self, descriptor: MediaTypeDescriptor) -> Result<()> { + // Check if ID is already registered + if self.types.contains_key(&descriptor.id) { + return Err(anyhow!("Media type already registered: {}", descriptor.id)); + } + + // Register extensions + for ext in &descriptor.extensions { + let ext_lower = ext.to_lowercase(); + if self.extension_map.contains_key(&ext_lower) { + // Extension already registered - this is OK, we'll use the first one + // In a more sophisticated system, we might track multiple types per extension + continue; + } + self.extension_map.insert(ext_lower, descriptor.id.clone()); + } + + // Register the type + self.types.insert(descriptor.id.clone(), descriptor); + + Ok(()) + } + + /// Unregister a media type + pub fn unregister(&mut self, id: &str) -> Result<()> { + let descriptor = self + .types + .remove(id) + .ok_or_else(|| anyhow!("Media type not found: {}", id))?; + + // Remove extensions + for ext in &descriptor.extensions { + let ext_lower = ext.to_lowercase(); + if self.extension_map.get(&ext_lower) == Some(&descriptor.id) { + self.extension_map.remove(&ext_lower); + } + } + + Ok(()) + } + + /// Get a media type descriptor by ID + pub fn get(&self, id: &str) -> Option<&MediaTypeDescriptor> { + self.types.get(id) + } + + /// Get a media type by file extension + pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> { + let ext_lower = ext.to_lowercase(); + self.extension_map + .get(&ext_lower) + .and_then(|id| self.types.get(id)) + } + + /// List all registered media types + pub fn list_all(&self) -> Vec<&MediaTypeDescriptor> { + self.types.values().collect() + } + + /// List media types from a specific plugin + pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> { + self.types + .values() + .filter(|d| d.plugin_id.as_deref() == Some(plugin_id)) + .collect() + } + + /// List built-in media types (plugin_id is None) + pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> { + self.types + .values() + .filter(|d| d.plugin_id.is_none()) + .collect() + } + + /// Get count of registered types + pub fn count(&self) -> usize { + self.types.len() + } + + /// Check if a media type is registered + pub fn contains(&self, id: &str) -> bool { + self.types.contains_key(id) + } + + /// Unregister all types from a specific plugin + pub fn unregister_plugin(&mut self, plugin_id: &str) -> Result { + let type_ids: Vec = self + .types + .values() + .filter(|d| d.plugin_id.as_deref() == Some(plugin_id)) + .map(|d| d.id.clone()) + .collect(); + + let count = type_ids.len(); + + for id in type_ids { + self.unregister(&id)?; + } + + Ok(count) + } +} + +impl Default for MediaTypeRegistry { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn create_test_descriptor(id: &str, ext: &str) -> MediaTypeDescriptor { + MediaTypeDescriptor { + id: id.to_string(), + name: format!("{} Type", id), + category: Some(MediaCategory::Document), + extensions: vec![ext.to_string()], + mime_types: vec![format!("application/{}", id)], + plugin_id: Some("test-plugin".to_string()), + } + } + + #[test] + fn test_register_and_get() { + let mut registry = MediaTypeRegistry::new(); + let descriptor = create_test_descriptor("test", "tst"); + + registry.register(descriptor.clone()).unwrap(); + + let retrieved = registry.get("test").unwrap(); + assert_eq!(retrieved.id, "test"); + assert_eq!(retrieved.name, "test Type"); + } + + #[test] + fn test_register_duplicate() { + let mut registry = MediaTypeRegistry::new(); + let descriptor = create_test_descriptor("test", "tst"); + + registry.register(descriptor.clone()).unwrap(); + let result = registry.register(descriptor); + + assert!(result.is_err()); + } + + #[test] + fn test_get_by_extension() { + let mut registry = MediaTypeRegistry::new(); + let descriptor = create_test_descriptor("test", "tst"); + + registry.register(descriptor).unwrap(); + + let retrieved = registry.get_by_extension("tst").unwrap(); + assert_eq!(retrieved.id, "test"); + + // Test case insensitivity + let retrieved = registry.get_by_extension("TST").unwrap(); + assert_eq!(retrieved.id, "test"); + } + + #[test] + fn test_unregister() { + let mut registry = MediaTypeRegistry::new(); + let descriptor = create_test_descriptor("test", "tst"); + + registry.register(descriptor).unwrap(); + assert!(registry.contains("test")); + + registry.unregister("test").unwrap(); + assert!(!registry.contains("test")); + + // Extension should also be removed + assert!(registry.get_by_extension("tst").is_none()); + } + + #[test] + fn test_list_by_plugin() { + let mut registry = MediaTypeRegistry::new(); + + let desc1 = MediaTypeDescriptor { + id: "type1".to_string(), + name: "Type 1".to_string(), + category: Some(MediaCategory::Document), + extensions: vec!["t1".to_string()], + mime_types: vec!["application/type1".to_string()], + plugin_id: Some("plugin1".to_string()), + }; + + let desc2 = MediaTypeDescriptor { + id: "type2".to_string(), + name: "Type 2".to_string(), + category: Some(MediaCategory::Document), + extensions: vec!["t2".to_string()], + mime_types: vec!["application/type2".to_string()], + plugin_id: Some("plugin2".to_string()), + }; + + registry.register(desc1).unwrap(); + registry.register(desc2).unwrap(); + + let plugin1_types = registry.list_by_plugin("plugin1"); + assert_eq!(plugin1_types.len(), 1); + assert_eq!(plugin1_types[0].id, "type1"); + + let plugin2_types = registry.list_by_plugin("plugin2"); + assert_eq!(plugin2_types.len(), 1); + assert_eq!(plugin2_types[0].id, "type2"); + } + + #[test] + fn test_unregister_plugin() { + let mut registry = MediaTypeRegistry::new(); + + for i in 1..=3 { + let desc = MediaTypeDescriptor { + id: format!("type{}", i), + name: format!("Type {}", i), + category: Some(MediaCategory::Document), + extensions: vec![format!("t{}", i)], + mime_types: vec![format!("application/type{}", i)], + plugin_id: Some("test-plugin".to_string()), + }; + registry.register(desc).unwrap(); + } + + assert_eq!(registry.count(), 3); + + let removed = registry.unregister_plugin("test-plugin").unwrap(); + assert_eq!(removed, 3); + assert_eq!(registry.count(), 0); + } +} diff --git a/crates/pinakes-core/src/metadata/audio.rs b/crates/pinakes-core/src/metadata/audio.rs index e2a8b0a..41d3a71 100644 --- a/crates/pinakes-core/src/metadata/audio.rs +++ b/crates/pinakes-core/src/metadata/audio.rs @@ -4,7 +4,7 @@ use lofty::file::{AudioFile, TaggedFileExt}; use lofty::tag::Accessor; use crate::error::{PinakesError, Result}; -use crate::media_type::MediaType; +use crate::media_type::{BuiltinMediaType, MediaType}; use super::{ExtractedMetadata, MetadataExtractor}; @@ -68,14 +68,14 @@ impl MetadataExtractor for AudioExtractor { Ok(meta) } - fn supported_types(&self) -> &[MediaType] { - &[ - MediaType::Mp3, - MediaType::Flac, - MediaType::Ogg, - MediaType::Wav, - MediaType::Aac, - MediaType::Opus, + fn supported_types(&self) -> Vec { + vec![ + MediaType::Builtin(BuiltinMediaType::Mp3), + MediaType::Builtin(BuiltinMediaType::Flac), + MediaType::Builtin(BuiltinMediaType::Ogg), + MediaType::Builtin(BuiltinMediaType::Wav), + MediaType::Builtin(BuiltinMediaType::Aac), + MediaType::Builtin(BuiltinMediaType::Opus), ] } } diff --git a/crates/pinakes-core/src/metadata/document.rs b/crates/pinakes-core/src/metadata/document.rs index 4aa7817..56ae9af 100644 --- a/crates/pinakes-core/src/metadata/document.rs +++ b/crates/pinakes-core/src/metadata/document.rs @@ -1,7 +1,7 @@ use std::path::Path; use crate::error::{PinakesError, Result}; -use crate::media_type::MediaType; +use crate::media_type::{BuiltinMediaType, MediaType}; use super::{ExtractedMetadata, MetadataExtractor}; @@ -10,15 +10,19 @@ pub struct DocumentExtractor; impl MetadataExtractor for DocumentExtractor { fn extract(&self, path: &Path) -> Result { match MediaType::from_path(path) { - Some(MediaType::Pdf) => extract_pdf(path), - Some(MediaType::Epub) => extract_epub(path), - Some(MediaType::Djvu) => extract_djvu(path), + Some(MediaType::Builtin(BuiltinMediaType::Pdf)) => extract_pdf(path), + Some(MediaType::Builtin(BuiltinMediaType::Epub)) => extract_epub(path), + Some(MediaType::Builtin(BuiltinMediaType::Djvu)) => extract_djvu(path), _ => Ok(ExtractedMetadata::default()), } } - fn supported_types(&self) -> &[MediaType] { - &[MediaType::Pdf, MediaType::Epub, MediaType::Djvu] + fn supported_types(&self) -> Vec { + vec![ + MediaType::Builtin(BuiltinMediaType::Pdf), + MediaType::Builtin(BuiltinMediaType::Epub), + MediaType::Builtin(BuiltinMediaType::Djvu), + ] } } diff --git a/crates/pinakes-core/src/metadata/image.rs b/crates/pinakes-core/src/metadata/image.rs index a38d465..b57d46d 100644 --- a/crates/pinakes-core/src/metadata/image.rs +++ b/crates/pinakes-core/src/metadata/image.rs @@ -1,7 +1,7 @@ use std::path::Path; use crate::error::Result; -use crate::media_type::MediaType; +use crate::media_type::{BuiltinMediaType, MediaType}; use super::{ExtractedMetadata, MetadataExtractor}; @@ -163,24 +163,24 @@ impl MetadataExtractor for ImageExtractor { Ok(meta) } - fn supported_types(&self) -> &[MediaType] { - &[ - MediaType::Jpeg, - MediaType::Png, - MediaType::Gif, - MediaType::Webp, - MediaType::Avif, - MediaType::Tiff, - MediaType::Bmp, + fn supported_types(&self) -> Vec { + vec![ + MediaType::Builtin(BuiltinMediaType::Jpeg), + MediaType::Builtin(BuiltinMediaType::Png), + MediaType::Builtin(BuiltinMediaType::Gif), + MediaType::Builtin(BuiltinMediaType::Webp), + MediaType::Builtin(BuiltinMediaType::Avif), + MediaType::Builtin(BuiltinMediaType::Tiff), + MediaType::Builtin(BuiltinMediaType::Bmp), // RAW formats (TIFF-based, kamadak-exif handles these) - MediaType::Cr2, - MediaType::Nef, - MediaType::Arw, - MediaType::Dng, - MediaType::Orf, - MediaType::Rw2, + MediaType::Builtin(BuiltinMediaType::Cr2), + MediaType::Builtin(BuiltinMediaType::Nef), + MediaType::Builtin(BuiltinMediaType::Arw), + MediaType::Builtin(BuiltinMediaType::Dng), + MediaType::Builtin(BuiltinMediaType::Orf), + MediaType::Builtin(BuiltinMediaType::Rw2), // HEIC - MediaType::Heic, + MediaType::Builtin(BuiltinMediaType::Heic), ] } } diff --git a/crates/pinakes-core/src/metadata/markdown.rs b/crates/pinakes-core/src/metadata/markdown.rs index 7da1714..b17901c 100644 --- a/crates/pinakes-core/src/metadata/markdown.rs +++ b/crates/pinakes-core/src/metadata/markdown.rs @@ -1,7 +1,7 @@ use std::path::Path; use crate::error::Result; -use crate::media_type::MediaType; +use crate::media_type::{BuiltinMediaType, MediaType}; use super::{ExtractedMetadata, MetadataExtractor}; @@ -34,7 +34,10 @@ impl MetadataExtractor for MarkdownExtractor { Ok(meta) } - fn supported_types(&self) -> &[MediaType] { - &[MediaType::Markdown, MediaType::PlainText] + fn supported_types(&self) -> Vec { + vec![ + MediaType::Builtin(BuiltinMediaType::Markdown), + MediaType::Builtin(BuiltinMediaType::PlainText), + ] } } diff --git a/crates/pinakes-core/src/metadata/mod.rs b/crates/pinakes-core/src/metadata/mod.rs index fb776d3..4b048da 100644 --- a/crates/pinakes-core/src/metadata/mod.rs +++ b/crates/pinakes-core/src/metadata/mod.rs @@ -24,7 +24,7 @@ pub struct ExtractedMetadata { pub trait MetadataExtractor: Send + Sync { fn extract(&self, path: &Path) -> Result; - fn supported_types(&self) -> &[MediaType]; + fn supported_types(&self) -> Vec; } pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result { diff --git a/crates/pinakes-core/src/metadata/video.rs b/crates/pinakes-core/src/metadata/video.rs index 8cc6c4d..f565996 100644 --- a/crates/pinakes-core/src/metadata/video.rs +++ b/crates/pinakes-core/src/metadata/video.rs @@ -1,7 +1,7 @@ use std::path::Path; use crate::error::{PinakesError, Result}; -use crate::media_type::MediaType; +use crate::media_type::{BuiltinMediaType, MediaType}; use super::{ExtractedMetadata, MetadataExtractor}; @@ -10,18 +10,16 @@ pub struct VideoExtractor; impl MetadataExtractor for VideoExtractor { fn extract(&self, path: &Path) -> Result { match MediaType::from_path(path) { - Some(MediaType::Mkv) => extract_mkv(path), - Some(MediaType::Mp4) => extract_mp4(path), + Some(MediaType::Builtin(BuiltinMediaType::Mkv)) => extract_mkv(path), + Some(MediaType::Builtin(BuiltinMediaType::Mp4)) => extract_mp4(path), _ => Ok(ExtractedMetadata::default()), } } - fn supported_types(&self) -> &[MediaType] { - &[ - MediaType::Mp4, - MediaType::Mkv, - MediaType::Avi, - MediaType::Webm, + fn supported_types(&self) -> Vec { + vec![ + MediaType::Builtin(BuiltinMediaType::Mp4), + MediaType::Builtin(BuiltinMediaType::Mkv), ] } } diff --git a/crates/pinakes-core/src/playlists.rs b/crates/pinakes-core/src/playlists.rs new file mode 100644 index 0000000..b45c4be --- /dev/null +++ b/crates/pinakes-core/src/playlists.rs @@ -0,0 +1,31 @@ +//! Playlist management: ordered collections of media items. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::model::MediaId; +use crate::users::UserId; + +/// A user-owned playlist of media items. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Playlist { + pub id: Uuid, + pub owner_id: UserId, + pub name: String, + pub description: Option, + pub is_public: bool, + pub is_smart: bool, + pub filter_query: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// An item within a playlist at a specific position. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PlaylistItem { + pub playlist_id: Uuid, + pub media_id: MediaId, + pub position: i32, + pub added_at: DateTime, +} diff --git a/crates/pinakes-core/src/plugin/loader.rs b/crates/pinakes-core/src/plugin/loader.rs new file mode 100644 index 0000000..4201d10 --- /dev/null +++ b/crates/pinakes-core/src/plugin/loader.rs @@ -0,0 +1,407 @@ +//! Plugin loader for discovering and loading plugins from the filesystem + +use anyhow::{Result, anyhow}; +use pinakes_plugin_api::PluginManifest; +use std::path::{Path, PathBuf}; +use tracing::{debug, info, warn}; +use walkdir::WalkDir; + +/// Plugin loader handles discovery and loading of plugins from directories +pub struct PluginLoader { + /// Directories to search for plugins + plugin_dirs: Vec, +} + +impl PluginLoader { + /// Create a new plugin loader + pub fn new(plugin_dirs: Vec) -> Self { + Self { plugin_dirs } + } + + /// Discover all plugins in configured directories + pub async fn discover_plugins(&self) -> Result> { + let mut manifests = Vec::new(); + + for dir in &self.plugin_dirs { + if !dir.exists() { + warn!("Plugin directory does not exist: {:?}", dir); + continue; + } + + info!("Discovering plugins in: {:?}", dir); + + match self.discover_in_directory(dir).await { + Ok(found) => { + info!("Found {} plugins in {:?}", found.len(), dir); + manifests.extend(found); + } + Err(e) => { + warn!("Error discovering plugins in {:?}: {}", dir, e); + } + } + } + + Ok(manifests) + } + + /// Discover plugins in a specific directory + async fn discover_in_directory(&self, dir: &Path) -> Result> { + let mut manifests = Vec::new(); + + // Walk the directory looking for plugin.toml files + for entry in WalkDir::new(dir) + .max_depth(3) // Don't go too deep + .follow_links(false) + { + let entry = match entry { + Ok(e) => e, + Err(e) => { + warn!("Error reading directory entry: {}", e); + continue; + } + }; + + let path = entry.path(); + + // Look for plugin.toml files + if path.file_name() == Some(std::ffi::OsStr::new("plugin.toml")) { + debug!("Found plugin manifest: {:?}", path); + + match PluginManifest::from_file(path) { + Ok(manifest) => { + info!("Loaded manifest for plugin: {}", manifest.plugin.name); + manifests.push(manifest); + } + Err(e) => { + warn!("Failed to load manifest from {:?}: {}", path, e); + } + } + } + } + + Ok(manifests) + } + + /// Resolve the WASM binary path from a manifest + pub fn resolve_wasm_path(&self, manifest: &PluginManifest) -> Result { + // The WASM path in the manifest is relative to the manifest file + // We need to search for it in the plugin directories + + for dir in &self.plugin_dirs { + // Look for a directory matching the plugin name + let plugin_dir = dir.join(&manifest.plugin.name); + if !plugin_dir.exists() { + continue; + } + + // Check for plugin.toml in this directory + let manifest_path = plugin_dir.join("plugin.toml"); + if !manifest_path.exists() { + continue; + } + + // Resolve WASM path relative to this directory + let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm); + if wasm_path.exists() { + // Verify the resolved path is within the plugin directory (prevent path traversal) + let canonical_wasm = wasm_path + .canonicalize() + .map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?; + let canonical_plugin_dir = plugin_dir + .canonicalize() + .map_err(|e| anyhow!("Failed to canonicalize plugin dir: {}", e))?; + if !canonical_wasm.starts_with(&canonical_plugin_dir) { + return Err(anyhow!( + "WASM binary path escapes plugin directory: {:?}", + wasm_path + )); + } + return Ok(canonical_wasm); + } + } + + Err(anyhow!( + "WASM binary not found for plugin: {}", + manifest.plugin.name + )) + } + + /// Download a plugin from a URL + pub async fn download_plugin(&self, url: &str) -> Result { + // Only allow HTTPS downloads + if !url.starts_with("https://") { + return Err(anyhow!( + "Only HTTPS URLs are allowed for plugin downloads: {}", + url + )); + } + + let dest_dir = self + .plugin_dirs + .first() + .ok_or_else(|| anyhow!("No plugin directories configured"))?; + + std::fs::create_dir_all(dest_dir)?; + + // Download the archive with timeout and size limits + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(300)) + .build() + .map_err(|e| anyhow!("Failed to build HTTP client: {}", e))?; + + let response = client + .get(url) + .send() + .await + .map_err(|e| anyhow!("Failed to download plugin: {}", e))?; + + if !response.status().is_success() { + return Err(anyhow!( + "Plugin download failed with status: {}", + response.status() + )); + } + + // Check content-length header before downloading + const MAX_PLUGIN_SIZE: u64 = 100 * 1024 * 1024; // 100 MB + if let Some(content_length) = response.content_length() + && content_length > MAX_PLUGIN_SIZE { + return Err(anyhow!( + "Plugin archive too large: {} bytes (max {} bytes)", + content_length, + MAX_PLUGIN_SIZE + )); + } + + let bytes = response + .bytes() + .await + .map_err(|e| anyhow!("Failed to read plugin response: {}", e))?; + + // Check actual size after download + if bytes.len() as u64 > MAX_PLUGIN_SIZE { + return Err(anyhow!( + "Plugin archive too large: {} bytes (max {} bytes)", + bytes.len(), + MAX_PLUGIN_SIZE + )); + } + + // Write archive to a unique temp file + let temp_archive = dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7())); + std::fs::write(&temp_archive, &bytes)?; + + // Extract using tar with -C to target directory + let canonical_dest = dest_dir + .canonicalize() + .map_err(|e| anyhow!("Failed to canonicalize dest dir: {}", e))?; + let output = std::process::Command::new("tar") + .args([ + "xzf", + &temp_archive.to_string_lossy(), + "-C", + &canonical_dest.to_string_lossy(), + ]) + .output() + .map_err(|e| anyhow!("Failed to extract plugin archive: {}", e))?; + + // Clean up the archive + let _ = std::fs::remove_file(&temp_archive); + + if !output.status.success() { + return Err(anyhow!( + "Failed to extract plugin archive: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + + // Validate that all extracted files are within dest_dir + for entry in WalkDir::new(&canonical_dest).follow_links(false) { + let entry = entry?; + let entry_canonical = entry.path().canonicalize()?; + if !entry_canonical.starts_with(&canonical_dest) { + return Err(anyhow!( + "Extracted file escapes destination directory: {:?}", + entry.path() + )); + } + } + + // Find the extracted plugin directory by looking for plugin.toml + for entry in WalkDir::new(dest_dir).max_depth(2).follow_links(false) { + let entry = entry?; + if entry.file_name() == "plugin.toml" { + let plugin_dir = entry + .path() + .parent() + .ok_or_else(|| anyhow!("Invalid plugin.toml location"))?; + + // Validate the manifest + let manifest = PluginManifest::from_file(entry.path())?; + info!("Downloaded and extracted plugin: {}", manifest.plugin.name); + + return Ok(plugin_dir.to_path_buf()); + } + } + + Err(anyhow!( + "No plugin.toml found after extracting archive from: {}", + url + )) + } + + /// Validate a plugin package + pub fn validate_plugin_package(&self, path: &Path) -> Result<()> { + // Check that the path exists + if !path.exists() { + return Err(anyhow!("Plugin path does not exist: {:?}", path)); + } + + // Check for plugin.toml + let manifest_path = path.join("plugin.toml"); + if !manifest_path.exists() { + return Err(anyhow!("Missing plugin.toml in {:?}", path)); + } + + // Parse and validate manifest + let manifest = PluginManifest::from_file(&manifest_path)?; + + // Check that WASM binary exists + let wasm_path = path.join(&manifest.plugin.binary.wasm); + if !wasm_path.exists() { + return Err(anyhow!( + "WASM binary not found: {}", + manifest.plugin.binary.wasm + )); + } + + // Verify the WASM path is within the plugin directory (prevent path traversal) + let canonical_wasm = wasm_path.canonicalize()?; + let canonical_path = path.canonicalize()?; + if !canonical_wasm.starts_with(&canonical_path) { + return Err(anyhow!( + "WASM binary path escapes plugin directory: {:?}", + wasm_path + )); + } + + // Validate WASM file + let wasm_bytes = std::fs::read(&wasm_path)?; + if wasm_bytes.len() < 4 || &wasm_bytes[0..4] != b"\0asm" { + return Err(anyhow!("Invalid WASM file: {:?}", wasm_path)); + } + + Ok(()) + } + + /// Get plugin directory path for a given plugin name + pub fn get_plugin_dir(&self, plugin_name: &str) -> Option { + for dir in &self.plugin_dirs { + let plugin_dir = dir.join(plugin_name); + if plugin_dir.exists() { + return Some(plugin_dir); + } + } + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[tokio::test] + async fn test_discover_plugins_empty() { + let temp_dir = TempDir::new().unwrap(); + let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]); + + let manifests = loader.discover_plugins().await.unwrap(); + assert_eq!(manifests.len(), 0); + } + + #[tokio::test] + async fn test_discover_plugins_with_manifest() { + let temp_dir = TempDir::new().unwrap(); + let plugin_dir = temp_dir.path().join("test-plugin"); + std::fs::create_dir(&plugin_dir).unwrap(); + + // Create a valid manifest + let manifest_content = r#" +[plugin] +name = "test-plugin" +version = "1.0.0" +api_version = "1.0" +kind = ["media_type"] + +[plugin.binary] +wasm = "plugin.wasm" +"#; + std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap(); + + // Create dummy WASM file + std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap(); + + let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]); + let manifests = loader.discover_plugins().await.unwrap(); + + assert_eq!(manifests.len(), 1); + assert_eq!(manifests[0].plugin.name, "test-plugin"); + } + + #[test] + fn test_validate_plugin_package() { + let temp_dir = TempDir::new().unwrap(); + let plugin_dir = temp_dir.path().join("test-plugin"); + std::fs::create_dir(&plugin_dir).unwrap(); + + // Create a valid manifest + let manifest_content = r#" +[plugin] +name = "test-plugin" +version = "1.0.0" +api_version = "1.0" +kind = ["media_type"] + +[plugin.binary] +wasm = "plugin.wasm" +"#; + std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap(); + + let loader = PluginLoader::new(vec![]); + + // Should fail without WASM file + assert!(loader.validate_plugin_package(&plugin_dir).is_err()); + + // Create valid WASM file (magic number only) + std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap(); + + // Should succeed now + assert!(loader.validate_plugin_package(&plugin_dir).is_ok()); + } + + #[test] + fn test_validate_invalid_wasm() { + let temp_dir = TempDir::new().unwrap(); + let plugin_dir = temp_dir.path().join("test-plugin"); + std::fs::create_dir(&plugin_dir).unwrap(); + + let manifest_content = r#" +[plugin] +name = "test-plugin" +version = "1.0.0" +api_version = "1.0" +kind = ["media_type"] + +[plugin.binary] +wasm = "plugin.wasm" +"#; + std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap(); + + // Create invalid WASM file + std::fs::write(plugin_dir.join("plugin.wasm"), b"not wasm").unwrap(); + + let loader = PluginLoader::new(vec![]); + assert!(loader.validate_plugin_package(&plugin_dir).is_err()); + } +} diff --git a/crates/pinakes-core/src/plugin/mod.rs b/crates/pinakes-core/src/plugin/mod.rs new file mode 100644 index 0000000..2321835 --- /dev/null +++ b/crates/pinakes-core/src/plugin/mod.rs @@ -0,0 +1,419 @@ +//! Plugin system for Pinakes +//! +//! This module provides a comprehensive plugin architecture that allows extending +//! Pinakes with custom media types, metadata extractors, search backends, and more. +//! +//! # Architecture +//! +//! - Plugins are compiled to WASM and run in a sandboxed environment +//! - Capability-based security controls what plugins can access +//! - Hot-reload support for development +//! - Automatic plugin discovery from configured directories + +use anyhow::Result; +use pinakes_plugin_api::{PluginContext, PluginMetadata}; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; +use tracing::{debug, error, info, warn}; + +pub mod loader; +pub mod registry; +pub mod runtime; +pub mod security; + +pub use loader::PluginLoader; +pub use registry::{PluginRegistry, RegisteredPlugin}; +pub use runtime::{WasmPlugin, WasmRuntime}; +pub use security::CapabilityEnforcer; + +/// Plugin manager coordinates plugin lifecycle and operations +pub struct PluginManager { + /// Plugin registry + registry: Arc>, + + /// WASM runtime for executing plugins + runtime: Arc, + + /// Plugin loader for discovery and loading + loader: PluginLoader, + + /// Capability enforcer for security + enforcer: CapabilityEnforcer, + + /// Plugin data directory + data_dir: PathBuf, + + /// Plugin cache directory + cache_dir: PathBuf, + + /// Configuration + config: PluginManagerConfig, +} + +/// Configuration for the plugin manager +#[derive(Debug, Clone)] +pub struct PluginManagerConfig { + /// Directories to search for plugins + pub plugin_dirs: Vec, + + /// Whether to enable hot-reload (for development) + pub enable_hot_reload: bool, + + /// Whether to allow unsigned plugins + pub allow_unsigned: bool, + + /// Maximum number of concurrent plugin operations + pub max_concurrent_ops: usize, + + /// Plugin timeout in seconds + pub plugin_timeout_secs: u64, +} + +impl Default for PluginManagerConfig { + fn default() -> Self { + Self { + plugin_dirs: vec![], + enable_hot_reload: false, + allow_unsigned: false, + max_concurrent_ops: 4, + plugin_timeout_secs: 30, + } + } +} + +impl From for PluginManagerConfig { + fn from(cfg: crate::config::PluginsConfig) -> Self { + Self { + plugin_dirs: cfg.plugin_dirs, + enable_hot_reload: cfg.enable_hot_reload, + allow_unsigned: cfg.allow_unsigned, + max_concurrent_ops: cfg.max_concurrent_ops, + plugin_timeout_secs: cfg.plugin_timeout_secs, + } + } +} + +impl PluginManager { + /// Create a new plugin manager + pub fn new(data_dir: PathBuf, cache_dir: PathBuf, config: PluginManagerConfig) -> Result { + // Ensure directories exist + std::fs::create_dir_all(&data_dir)?; + std::fs::create_dir_all(&cache_dir)?; + + let runtime = Arc::new(WasmRuntime::new()?); + let registry = Arc::new(RwLock::new(PluginRegistry::new())); + let loader = PluginLoader::new(config.plugin_dirs.clone()); + let enforcer = CapabilityEnforcer::new(); + + Ok(Self { + registry, + runtime, + loader, + enforcer, + data_dir, + cache_dir, + config, + }) + } + + /// Discover and load all plugins from configured directories + pub async fn discover_and_load_all(&self) -> Result> { + info!("Discovering plugins from {:?}", self.config.plugin_dirs); + + let manifests = self.loader.discover_plugins().await?; + let mut loaded_plugins = Vec::new(); + + for manifest in manifests { + match self.load_plugin_from_manifest(&manifest).await { + Ok(plugin_id) => { + info!("Loaded plugin: {}", plugin_id); + loaded_plugins.push(plugin_id); + } + Err(e) => { + warn!("Failed to load plugin {}: {}", manifest.plugin.name, e); + } + } + } + + Ok(loaded_plugins) + } + + /// Load a plugin from a manifest file + async fn load_plugin_from_manifest( + &self, + manifest: &pinakes_plugin_api::PluginManifest, + ) -> Result { + let plugin_id = manifest.plugin_id(); + + // Validate plugin_id to prevent path traversal + if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") { + return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id)); + } + + // Check if already loaded + { + let registry = self.registry.read().await; + if registry.is_loaded(&plugin_id) { + return Ok(plugin_id); + } + } + + // Validate capabilities + let capabilities = manifest.to_capabilities(); + self.enforcer.validate_capabilities(&capabilities)?; + + // Create plugin context + let plugin_data_dir = self.data_dir.join(&plugin_id); + let plugin_cache_dir = self.cache_dir.join(&plugin_id); + tokio::fs::create_dir_all(&plugin_data_dir).await?; + tokio::fs::create_dir_all(&plugin_cache_dir).await?; + + let context = PluginContext { + data_dir: plugin_data_dir, + cache_dir: plugin_cache_dir, + config: manifest + .config + .iter() + .map(|(k, v)| { + ( + k.clone(), + serde_json::to_value(v).unwrap_or_else(|e| { + tracing::warn!("failed to serialize config value for key {}: {}", k, e); + serde_json::Value::Null + }), + ) + }) + .collect(), + capabilities: capabilities.clone(), + }; + + // Load WASM binary + let wasm_path = self.loader.resolve_wasm_path(manifest)?; + let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?; + + // Initialize plugin + let init_succeeded = match wasm_plugin.call_function("initialize", &[]).await { + Ok(_) => true, + Err(e) => { + tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e); + false + } + }; + + // Register plugin + let metadata = PluginMetadata { + id: plugin_id.clone(), + name: manifest.plugin.name.clone(), + version: manifest.plugin.version.clone(), + author: manifest.plugin.author.clone().unwrap_or_default(), + description: manifest.plugin.description.clone().unwrap_or_default(), + api_version: manifest.plugin.api_version.clone(), + capabilities_required: capabilities, + }; + + // Derive manifest_path from the loader's plugin directories + let manifest_path = self + .loader + .get_plugin_dir(&manifest.plugin.name) + .map(|dir| dir.join("plugin.toml")); + + let registered = RegisteredPlugin { + id: plugin_id.clone(), + metadata, + wasm_plugin, + manifest: manifest.clone(), + manifest_path, + enabled: init_succeeded, + }; + + let mut registry = self.registry.write().await; + registry.register(registered)?; + + Ok(plugin_id) + } + + /// Install a plugin from a file or URL + pub async fn install_plugin(&self, source: &str) -> Result { + info!("Installing plugin from: {}", source); + + // Download/copy plugin to plugins directory + let plugin_path = if source.starts_with("http://") || source.starts_with("https://") { + // Download from URL + self.loader.download_plugin(source).await? + } else { + // Copy from local file + PathBuf::from(source) + }; + + // Load the manifest + let manifest_path = plugin_path.join("plugin.toml"); + let manifest = pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?; + + // Load the plugin + self.load_plugin_from_manifest(&manifest).await + } + + /// Uninstall a plugin + pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> { + // Validate plugin_id to prevent path traversal + if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") { + return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id)); + } + + info!("Uninstalling plugin: {}", plugin_id); + + // Shutdown plugin first + self.shutdown_plugin(plugin_id).await?; + + // Remove from registry + let mut registry = self.registry.write().await; + registry.unregister(plugin_id)?; + + // Remove plugin data and cache + let plugin_data_dir = self.data_dir.join(plugin_id); + let plugin_cache_dir = self.cache_dir.join(plugin_id); + + if plugin_data_dir.exists() { + std::fs::remove_dir_all(&plugin_data_dir)?; + } + if plugin_cache_dir.exists() { + std::fs::remove_dir_all(&plugin_cache_dir)?; + } + + Ok(()) + } + + /// Enable a plugin + pub async fn enable_plugin(&self, plugin_id: &str) -> Result<()> { + let mut registry = self.registry.write().await; + registry.enable(plugin_id) + } + + /// Disable a plugin + pub async fn disable_plugin(&self, plugin_id: &str) -> Result<()> { + let mut registry = self.registry.write().await; + registry.disable(plugin_id) + } + + /// Shutdown a specific plugin + pub async fn shutdown_plugin(&self, plugin_id: &str) -> Result<()> { + debug!("Shutting down plugin: {}", plugin_id); + + let registry = self.registry.read().await; + if let Some(plugin) = registry.get(plugin_id) { + plugin.wasm_plugin.call_function("shutdown", &[]).await.ok(); + Ok(()) + } else { + Err(anyhow::anyhow!("Plugin not found: {}", plugin_id)) + } + } + + /// Shutdown all plugins + pub async fn shutdown_all(&self) -> Result<()> { + info!("Shutting down all plugins"); + + let registry = self.registry.read().await; + let plugin_ids: Vec = registry.list_all().iter().map(|p| p.id.clone()).collect(); + + for plugin_id in plugin_ids { + if let Err(e) = self.shutdown_plugin(&plugin_id).await { + error!("Failed to shutdown plugin {}: {}", plugin_id, e); + } + } + + Ok(()) + } + + /// Get list of all registered plugins + pub async fn list_plugins(&self) -> Vec { + let registry = self.registry.read().await; + registry + .list_all() + .iter() + .map(|p| p.metadata.clone()) + .collect() + } + + /// Get plugin metadata by ID + pub async fn get_plugin(&self, plugin_id: &str) -> Option { + let registry = self.registry.read().await; + registry.get(plugin_id).map(|p| p.metadata.clone()) + } + + /// Check if a plugin is loaded and enabled + pub async fn is_plugin_enabled(&self, plugin_id: &str) -> bool { + let registry = self.registry.read().await; + registry.is_enabled(plugin_id).unwrap_or(false) + } + + /// Reload a plugin (for hot-reload during development) + pub async fn reload_plugin(&self, plugin_id: &str) -> Result<()> { + if !self.config.enable_hot_reload { + return Err(anyhow::anyhow!("Hot-reload is disabled")); + } + + info!("Reloading plugin: {}", plugin_id); + + // Re-read the manifest from disk if possible, falling back to cached version + let manifest = { + let registry = self.registry.read().await; + let plugin = registry + .get(plugin_id) + .ok_or_else(|| anyhow::anyhow!("Plugin not found"))?; + if let Some(ref manifest_path) = plugin.manifest_path { + pinakes_plugin_api::PluginManifest::from_file(manifest_path).unwrap_or_else(|e| { + warn!("Failed to re-read manifest from disk, using cached: {}", e); + plugin.manifest.clone() + }) + } else { + plugin.manifest.clone() + } + }; + + // Shutdown and unload current version + self.shutdown_plugin(plugin_id).await?; + { + let mut registry = self.registry.write().await; + registry.unregister(plugin_id)?; + } + + // Reload from manifest + self.load_plugin_from_manifest(&manifest).await?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[tokio::test] + async fn test_plugin_manager_creation() { + let temp_dir = TempDir::new().unwrap(); + let data_dir = temp_dir.path().join("data"); + let cache_dir = temp_dir.path().join("cache"); + + let config = PluginManagerConfig::default(); + let manager = PluginManager::new(data_dir.clone(), cache_dir.clone(), config); + + assert!(manager.is_ok()); + assert!(data_dir.exists()); + assert!(cache_dir.exists()); + } + + #[tokio::test] + async fn test_list_plugins_empty() { + let temp_dir = TempDir::new().unwrap(); + let data_dir = temp_dir.path().join("data"); + let cache_dir = temp_dir.path().join("cache"); + + let config = PluginManagerConfig::default(); + let manager = PluginManager::new(data_dir, cache_dir, config).unwrap(); + + let plugins = manager.list_plugins().await; + assert_eq!(plugins.len(), 0); + } +} diff --git a/crates/pinakes-core/src/plugin/registry.rs b/crates/pinakes-core/src/plugin/registry.rs new file mode 100644 index 0000000..93a5e8b --- /dev/null +++ b/crates/pinakes-core/src/plugin/registry.rs @@ -0,0 +1,280 @@ +//! Plugin registry for managing loaded plugins + +use std::path::PathBuf; + +use anyhow::{Result, anyhow}; +use pinakes_plugin_api::{PluginManifest, PluginMetadata}; +use std::collections::HashMap; + +use super::runtime::WasmPlugin; + +/// A registered plugin with its metadata and runtime state +#[derive(Clone)] +pub struct RegisteredPlugin { + pub id: String, + pub metadata: PluginMetadata, + pub wasm_plugin: WasmPlugin, + pub manifest: PluginManifest, + pub manifest_path: Option, + pub enabled: bool, +} + +/// Plugin registry maintains the state of all loaded plugins +pub struct PluginRegistry { + /// Map of plugin ID to registered plugin + plugins: HashMap, +} + +impl PluginRegistry { + /// Create a new empty registry + pub fn new() -> Self { + Self { + plugins: HashMap::new(), + } + } + + /// Register a new plugin + pub fn register(&mut self, plugin: RegisteredPlugin) -> Result<()> { + if self.plugins.contains_key(&plugin.id) { + return Err(anyhow!("Plugin already registered: {}", plugin.id)); + } + + self.plugins.insert(plugin.id.clone(), plugin); + Ok(()) + } + + /// Unregister a plugin by ID + pub fn unregister(&mut self, plugin_id: &str) -> Result<()> { + self.plugins + .remove(plugin_id) + .ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?; + Ok(()) + } + + /// Get a plugin by ID + pub fn get(&self, plugin_id: &str) -> Option<&RegisteredPlugin> { + self.plugins.get(plugin_id) + } + + /// Get a mutable reference to a plugin by ID + pub fn get_mut(&mut self, plugin_id: &str) -> Option<&mut RegisteredPlugin> { + self.plugins.get_mut(plugin_id) + } + + /// Check if a plugin is loaded + pub fn is_loaded(&self, plugin_id: &str) -> bool { + self.plugins.contains_key(plugin_id) + } + + /// Check if a plugin is enabled. Returns `None` if the plugin is not found. + pub fn is_enabled(&self, plugin_id: &str) -> Option { + self.plugins.get(plugin_id).map(|p| p.enabled) + } + + /// Enable a plugin + pub fn enable(&mut self, plugin_id: &str) -> Result<()> { + let plugin = self + .plugins + .get_mut(plugin_id) + .ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?; + + plugin.enabled = true; + Ok(()) + } + + /// Disable a plugin + pub fn disable(&mut self, plugin_id: &str) -> Result<()> { + let plugin = self + .plugins + .get_mut(plugin_id) + .ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?; + + plugin.enabled = false; + Ok(()) + } + + /// List all registered plugins + pub fn list_all(&self) -> Vec<&RegisteredPlugin> { + self.plugins.values().collect() + } + + /// List all enabled plugins + pub fn list_enabled(&self) -> Vec<&RegisteredPlugin> { + self.plugins.values().filter(|p| p.enabled).collect() + } + + /// Get plugins by kind (e.g., "media_type", "metadata_extractor") + pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> { + self.plugins + .values() + .filter(|p| p.manifest.plugin.kind.contains(&kind.to_string())) + .collect() + } + + /// Get count of registered plugins + pub fn count(&self) -> usize { + self.plugins.len() + } + + /// Get count of enabled plugins + pub fn count_enabled(&self) -> usize { + self.plugins.values().filter(|p| p.enabled).count() + } +} + +impl Default for PluginRegistry { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pinakes_plugin_api::Capabilities; + use std::collections::HashMap; + + fn create_test_plugin(id: &str, kind: Vec) -> RegisteredPlugin { + let manifest = PluginManifest { + plugin: pinakes_plugin_api::manifest::PluginInfo { + name: id.to_string(), + version: "1.0.0".to_string(), + api_version: "1.0".to_string(), + author: Some("Test".to_string()), + description: Some("Test plugin".to_string()), + homepage: None, + license: None, + kind, + binary: pinakes_plugin_api::manifest::PluginBinary { + wasm: "test.wasm".to_string(), + entrypoint: None, + }, + dependencies: vec![], + }, + capabilities: Default::default(), + config: HashMap::new(), + }; + + RegisteredPlugin { + id: id.to_string(), + metadata: PluginMetadata { + id: id.to_string(), + name: id.to_string(), + version: "1.0.0".to_string(), + author: "Test".to_string(), + description: "Test plugin".to_string(), + api_version: "1.0".to_string(), + capabilities_required: Capabilities::default(), + }, + wasm_plugin: WasmPlugin::default(), + manifest, + manifest_path: None, + enabled: true, + } + } + + #[test] + fn test_registry_register_and_get() { + let mut registry = PluginRegistry::new(); + let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); + + registry.register(plugin.clone()).unwrap(); + + assert!(registry.is_loaded("test-plugin")); + assert!(registry.get("test-plugin").is_some()); + } + + #[test] + fn test_registry_duplicate_register() { + let mut registry = PluginRegistry::new(); + let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); + + registry.register(plugin.clone()).unwrap(); + let result = registry.register(plugin); + + assert!(result.is_err()); + } + + #[test] + fn test_registry_unregister() { + let mut registry = PluginRegistry::new(); + let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); + + registry.register(plugin).unwrap(); + registry.unregister("test-plugin").unwrap(); + + assert!(!registry.is_loaded("test-plugin")); + } + + #[test] + fn test_registry_enable_disable() { + let mut registry = PluginRegistry::new(); + let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]); + + registry.register(plugin).unwrap(); + assert_eq!(registry.is_enabled("test-plugin"), Some(true)); + + registry.disable("test-plugin").unwrap(); + assert_eq!(registry.is_enabled("test-plugin"), Some(false)); + + registry.enable("test-plugin").unwrap(); + assert_eq!(registry.is_enabled("test-plugin"), Some(true)); + + assert_eq!(registry.is_enabled("nonexistent"), None); + } + + #[test] + fn test_registry_get_by_kind() { + let mut registry = PluginRegistry::new(); + + registry + .register(create_test_plugin( + "plugin1", + vec!["media_type".to_string()], + )) + .unwrap(); + registry + .register(create_test_plugin( + "plugin2", + vec!["metadata_extractor".to_string()], + )) + .unwrap(); + registry + .register(create_test_plugin( + "plugin3", + vec!["media_type".to_string()], + )) + .unwrap(); + + let media_type_plugins = registry.get_by_kind("media_type"); + assert_eq!(media_type_plugins.len(), 2); + + let extractor_plugins = registry.get_by_kind("metadata_extractor"); + assert_eq!(extractor_plugins.len(), 1); + } + + #[test] + fn test_registry_counts() { + let mut registry = PluginRegistry::new(); + + registry + .register(create_test_plugin( + "plugin1", + vec!["media_type".to_string()], + )) + .unwrap(); + registry + .register(create_test_plugin( + "plugin2", + vec!["media_type".to_string()], + )) + .unwrap(); + + assert_eq!(registry.count(), 2); + assert_eq!(registry.count_enabled(), 2); + + registry.disable("plugin1").unwrap(); + assert_eq!(registry.count(), 2); + assert_eq!(registry.count_enabled(), 1); + } +} diff --git a/crates/pinakes-core/src/plugin/runtime.rs b/crates/pinakes-core/src/plugin/runtime.rs new file mode 100644 index 0000000..2a04cf6 --- /dev/null +++ b/crates/pinakes-core/src/plugin/runtime.rs @@ -0,0 +1,582 @@ +//! WASM runtime for executing plugins + +use anyhow::{Result, anyhow}; +use pinakes_plugin_api::PluginContext; +use std::path::Path; +use std::sync::Arc; +use wasmtime::*; + +/// WASM runtime wrapper for executing plugins +pub struct WasmRuntime { + engine: Engine, +} + +impl WasmRuntime { + /// Create a new WASM runtime + pub fn new() -> Result { + let mut config = Config::new(); + + // Enable WASM features + config.wasm_component_model(true); + config.async_support(true); + + // Set resource limits + config.max_wasm_stack(1024 * 1024); // 1MB stack + config.consume_fuel(true); // Enable fuel metering for CPU limits + + let engine = Engine::new(&config)?; + + Ok(Self { engine }) + } + + /// Load a plugin from a WASM file + pub async fn load_plugin( + &self, + wasm_path: &Path, + context: PluginContext, + ) -> Result { + if !wasm_path.exists() { + return Err(anyhow!("WASM file not found: {:?}", wasm_path)); + } + + // Read WASM bytes + let wasm_bytes = std::fs::read(wasm_path)?; + + // Compile module + let module = Module::new(&self.engine, &wasm_bytes)?; + + Ok(WasmPlugin { + module: Arc::new(module), + context, + }) + } +} + +/// Store data passed to each WASM invocation +pub struct PluginStoreData { + pub context: PluginContext, + pub exchange_buffer: Vec, +} + +/// A loaded WASM plugin instance +#[derive(Clone)] +pub struct WasmPlugin { + module: Arc, + context: PluginContext, +} + +impl WasmPlugin { + /// Get the plugin context + pub fn context(&self) -> &PluginContext { + &self.context + } + + /// Execute a plugin function + /// + /// Creates a fresh store and instance per invocation with host functions + /// linked, calls the requested exported function, and returns the result. + pub async fn call_function(&self, function_name: &str, params: &[u8]) -> Result> { + let engine = self.module.engine(); + + // Create store with per-invocation data + let store_data = PluginStoreData { + context: self.context.clone(), + exchange_buffer: Vec::new(), + }; + let mut store = Store::new(engine, store_data); + + // Set fuel limit based on capabilities + if let Some(max_cpu_time_ms) = self.context.capabilities.max_cpu_time_ms { + let fuel = max_cpu_time_ms * 100_000; + store.set_fuel(fuel)?; + } else { + store.set_fuel(1_000_000_000)?; + } + + // Set up linker with host functions + let mut linker = Linker::new(engine); + HostFunctions::setup_linker(&mut linker)?; + + // Instantiate the module + let instance = linker.instantiate_async(&mut store, &self.module).await?; + + // Get the memory export (if available) + let memory = instance.get_memory(&mut store, "memory"); + + // If there are params and memory is available, write them + let mut alloc_offset: i32 = 0; + if !params.is_empty() + && let Some(mem) = &memory { + // Call the plugin's alloc function if available, otherwise write at offset 0 + let offset = if let Ok(alloc) = + instance.get_typed_func::(&mut store, "alloc") + { + let result = alloc.call_async(&mut store, params.len() as i32).await?; + if result < 0 { + return Err(anyhow!("plugin alloc returned negative offset: {}", result)); + } + result as usize + } else { + 0 + }; + + alloc_offset = offset as i32; + let mem_data = mem.data_mut(&mut store); + if offset + params.len() <= mem_data.len() { + mem_data[offset..offset + params.len()].copy_from_slice(params); + } + } + + // Look up the exported function and call it + let func = instance + .get_func(&mut store, function_name) + .ok_or_else(|| anyhow!("exported function '{}' not found", function_name))?; + + let func_ty = func.ty(&store); + let param_count = func_ty.params().len(); + let result_count = func_ty.results().len(); + + let mut results = vec![Val::I32(0); result_count]; + + // Call with appropriate params based on function signature + if param_count == 2 && !params.is_empty() { + // Convention: (ptr, len) + func.call_async( + &mut store, + &[Val::I32(alloc_offset), Val::I32(params.len() as i32)], + &mut results, + ) + .await?; + } else if param_count == 0 { + func.call_async(&mut store, &[], &mut results).await?; + } else { + // Generic: fill with zeroes + let params_vals: Vec = (0..param_count).map(|_| Val::I32(0)).collect(); + func.call_async(&mut store, ¶ms_vals, &mut results) + .await?; + } + + // Read result from exchange buffer (host functions may have written data) + let exchange = std::mem::take(&mut store.data_mut().exchange_buffer); + if !exchange.is_empty() { + return Ok(exchange); + } + + // Otherwise serialize the return values + if let Some(Val::I32(ret)) = results.first() { + Ok(ret.to_le_bytes().to_vec()) + } else { + Ok(Vec::new()) + } + } +} + +#[cfg(test)] +impl Default for WasmPlugin { + fn default() -> Self { + let engine = Engine::default(); + let module = Module::new(&engine, br#"(module)"#).unwrap(); + + Self { + module: Arc::new(module), + context: PluginContext { + data_dir: std::env::temp_dir(), + cache_dir: std::env::temp_dir(), + config: std::collections::HashMap::new(), + capabilities: Default::default(), + }, + } + } +} + +/// Host functions that plugins can call +pub struct HostFunctions; + +impl HostFunctions { + /// Set up host functions in a linker + pub fn setup_linker(linker: &mut Linker) -> Result<()> { + // host_log: log a message from the plugin + linker.func_wrap( + "env", + "host_log", + |mut caller: Caller<'_, PluginStoreData>, level: i32, ptr: i32, len: i32| { + if ptr < 0 || len < 0 { + return; + } + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + if let Some(mem) = memory { + let data = mem.data(&caller); + let start = ptr as usize; + let end = start + len as usize; + if end <= data.len() + && let Ok(msg) = std::str::from_utf8(&data[start..end]) { + match level { + 0 => tracing::error!(plugin = true, "{}", msg), + 1 => tracing::warn!(plugin = true, "{}", msg), + 2 => tracing::info!(plugin = true, "{}", msg), + _ => tracing::debug!(plugin = true, "{}", msg), + } + } + } + }, + )?; + + // host_read_file: read a file into the exchange buffer + linker.func_wrap( + "env", + "host_read_file", + |mut caller: Caller<'_, PluginStoreData>, path_ptr: i32, path_len: i32| -> i32 { + if path_ptr < 0 || path_len < 0 { + return -1; + } + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + let Some(mem) = memory else { return -1 }; + + let data = mem.data(&caller); + let start = path_ptr as usize; + let end = start + path_len as usize; + if end > data.len() { + return -1; + } + + let path_str = match std::str::from_utf8(&data[start..end]) { + Ok(s) => s.to_string(), + Err(_) => return -1, + }; + + // Canonicalize path before checking permissions to prevent traversal + let path = match std::path::Path::new(&path_str).canonicalize() { + Ok(p) => p, + Err(_) => return -1, + }; + + // Check read permission against canonicalized path + let can_read = caller + .data() + .context + .capabilities + .filesystem + .read + .iter() + .any(|allowed| { + allowed + .canonicalize() + .is_ok_and(|a| path.starts_with(a)) + }); + + if !can_read { + tracing::warn!(path = %path_str, "plugin read access denied"); + return -2; + } + + match std::fs::read(&path) { + Ok(contents) => { + let len = contents.len() as i32; + caller.data_mut().exchange_buffer = contents; + len + } + Err(_) => -1, + } + }, + )?; + + // host_write_file: write data to a file + linker.func_wrap( + "env", + "host_write_file", + |mut caller: Caller<'_, PluginStoreData>, + path_ptr: i32, + path_len: i32, + data_ptr: i32, + data_len: i32| + -> i32 { + if path_ptr < 0 || path_len < 0 || data_ptr < 0 || data_len < 0 { + return -1; + } + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + let Some(mem) = memory else { return -1 }; + + let mem_data = mem.data(&caller); + let path_start = path_ptr as usize; + let path_end = path_start + path_len as usize; + let data_start = data_ptr as usize; + let data_end = data_start + data_len as usize; + + if path_end > mem_data.len() || data_end > mem_data.len() { + return -1; + } + + let path_str = match std::str::from_utf8(&mem_data[path_start..path_end]) { + Ok(s) => s.to_string(), + Err(_) => return -1, + }; + let file_data = mem_data[data_start..data_end].to_vec(); + + // Canonicalize path for write (file may not exist yet) + let path = std::path::Path::new(&path_str); + let canonical = if path.exists() { + path.canonicalize().ok() + } else { + path.parent() + .and_then(|p| p.canonicalize().ok()) + .map(|p| p.join(path.file_name().unwrap_or_default())) + }; + let Some(canonical) = canonical else { + return -1; + }; + + // Check write permission against canonicalized path + let can_write = caller + .data() + .context + .capabilities + .filesystem + .write + .iter() + .any(|allowed| { + allowed + .canonicalize() + .is_ok_and(|a| canonical.starts_with(a)) + }); + + if !can_write { + tracing::warn!(path = %path_str, "plugin write access denied"); + return -2; + } + + match std::fs::write(&canonical, &file_data) { + Ok(()) => 0, + Err(_) => -1, + } + }, + )?; + + // host_http_request: make an HTTP request (blocking) + linker.func_wrap( + "env", + "host_http_request", + |mut caller: Caller<'_, PluginStoreData>, url_ptr: i32, url_len: i32| -> i32 { + if url_ptr < 0 || url_len < 0 { + return -1; + } + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + let Some(mem) = memory else { return -1 }; + + let data = mem.data(&caller); + let start = url_ptr as usize; + let end = start + url_len as usize; + if end > data.len() { + return -1; + } + + let url_str = match std::str::from_utf8(&data[start..end]) { + Ok(s) => s.to_string(), + Err(_) => return -1, + }; + + // Check network permission + if !caller.data().context.capabilities.network.enabled { + tracing::warn!(url = %url_str, "plugin network access denied"); + return -2; + } + + // Use block_in_place to avoid blocking the async runtime's thread pool. + // Falls back to a blocking client with timeout if block_in_place is unavailable. + let result = std::panic::catch_unwind(|| { + tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(async { + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| e.to_string())?; + let resp = client + .get(&url_str) + .send() + .await + .map_err(|e| e.to_string())?; + let bytes = resp.bytes().await.map_err(|e| e.to_string())?; + Ok::<_, String>(bytes) + }) + }) + }); + + match result { + Ok(Ok(bytes)) => { + let len = bytes.len() as i32; + caller.data_mut().exchange_buffer = bytes.to_vec(); + len + } + Ok(Err(_)) => -1, + Err(_) => { + // block_in_place panicked (e.g. current-thread runtime); + // fall back to blocking client with timeout + let client = match reqwest::blocking::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + { + Ok(c) => c, + Err(_) => return -1, + }; + match client.get(&url_str).send() { + Ok(resp) => match resp.bytes() { + Ok(bytes) => { + let len = bytes.len() as i32; + caller.data_mut().exchange_buffer = bytes.to_vec(); + len + } + Err(_) => -1, + }, + Err(_) => -1, + } + } + } + }, + )?; + + // host_get_config: read a config key into the exchange buffer + linker.func_wrap( + "env", + "host_get_config", + |mut caller: Caller<'_, PluginStoreData>, key_ptr: i32, key_len: i32| -> i32 { + if key_ptr < 0 || key_len < 0 { + return -1; + } + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + let Some(mem) = memory else { return -1 }; + + let data = mem.data(&caller); + let start = key_ptr as usize; + let end = start + key_len as usize; + if end > data.len() { + return -1; + } + + let key_str = match std::str::from_utf8(&data[start..end]) { + Ok(s) => s.to_string(), + Err(_) => return -1, + }; + + match caller.data().context.config.get(&key_str) { + Some(value) => { + let json = value.to_string(); + let bytes = json.into_bytes(); + let len = bytes.len() as i32; + caller.data_mut().exchange_buffer = bytes; + len + } + None => -1, + } + }, + )?; + + // host_get_buffer: copy the exchange buffer to WASM memory + linker.func_wrap( + "env", + "host_get_buffer", + |mut caller: Caller<'_, PluginStoreData>, dest_ptr: i32, dest_len: i32| -> i32 { + if dest_ptr < 0 || dest_len < 0 { + return -1; + } + let buf = caller.data().exchange_buffer.clone(); + let copy_len = buf.len().min(dest_len as usize); + + let memory = caller.get_export("memory").and_then(|e| e.into_memory()); + let Some(mem) = memory else { return -1 }; + + let mem_data = mem.data_mut(&mut caller); + let start = dest_ptr as usize; + if start + copy_len > mem_data.len() { + return -1; + } + + mem_data[start..start + copy_len].copy_from_slice(&buf[..copy_len]); + copy_len as i32 + }, + )?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pinakes_plugin_api::PluginContext; + use std::collections::HashMap; + + #[test] + fn test_wasm_runtime_creation() { + let runtime = WasmRuntime::new(); + assert!(runtime.is_ok()); + } + + #[test] + fn test_host_functions_file_access() { + let mut capabilities = pinakes_plugin_api::Capabilities::default(); + capabilities.filesystem.read.push("/tmp".into()); + capabilities.filesystem.write.push("/tmp/output".into()); + + let context = PluginContext { + data_dir: "/tmp/data".into(), + cache_dir: "/tmp/cache".into(), + config: HashMap::new(), + capabilities, + }; + + // Verify capability checks work via context fields + let can_read = context + .capabilities + .filesystem + .read + .iter() + .any(|p| Path::new("/tmp/test.txt").starts_with(p)); + assert!(can_read); + + let cant_read = context + .capabilities + .filesystem + .read + .iter() + .any(|p| Path::new("/etc/passwd").starts_with(p)); + assert!(!cant_read); + + let can_write = context + .capabilities + .filesystem + .write + .iter() + .any(|p| Path::new("/tmp/output/file.txt").starts_with(p)); + assert!(can_write); + + let cant_write = context + .capabilities + .filesystem + .write + .iter() + .any(|p| Path::new("/tmp/file.txt").starts_with(p)); + assert!(!cant_write); + } + + #[test] + fn test_host_functions_network_access() { + let mut context = PluginContext { + data_dir: "/tmp/data".into(), + cache_dir: "/tmp/cache".into(), + config: HashMap::new(), + capabilities: Default::default(), + }; + + assert!(!context.capabilities.network.enabled); + + context.capabilities.network.enabled = true; + assert!(context.capabilities.network.enabled); + } + + #[test] + fn test_linker_setup() { + let engine = Engine::default(); + let mut linker = Linker::::new(&engine); + let result = HostFunctions::setup_linker(&mut linker); + assert!(result.is_ok()); + } +} diff --git a/crates/pinakes-core/src/plugin/security.rs b/crates/pinakes-core/src/plugin/security.rs new file mode 100644 index 0000000..6d4c458 --- /dev/null +++ b/crates/pinakes-core/src/plugin/security.rs @@ -0,0 +1,341 @@ +//! Capability-based security for plugins + +use anyhow::{Result, anyhow}; +use pinakes_plugin_api::Capabilities; +use std::path::{Path, PathBuf}; + +/// Capability enforcer validates and enforces plugin capabilities +pub struct CapabilityEnforcer { + /// Maximum allowed memory per plugin (bytes) + max_memory_limit: usize, + + /// Maximum allowed CPU time per plugin (milliseconds) + max_cpu_time_limit: u64, + + /// Allowed filesystem read paths (system-wide) + allowed_read_paths: Vec, + + /// Allowed filesystem write paths (system-wide) + allowed_write_paths: Vec, + + /// Whether to allow network access by default + allow_network_default: bool, +} + +impl CapabilityEnforcer { + /// Create a new capability enforcer with default limits + pub fn new() -> Self { + Self { + max_memory_limit: 512 * 1024 * 1024, // 512 MB + max_cpu_time_limit: 60 * 1000, // 60 seconds + allowed_read_paths: vec![], + allowed_write_paths: vec![], + allow_network_default: false, + } + } + + /// Set maximum memory limit + pub fn with_max_memory(mut self, bytes: usize) -> Self { + self.max_memory_limit = bytes; + self + } + + /// Set maximum CPU time limit + pub fn with_max_cpu_time(mut self, milliseconds: u64) -> Self { + self.max_cpu_time_limit = milliseconds; + self + } + + /// Add allowed read path + pub fn allow_read_path(mut self, path: PathBuf) -> Self { + self.allowed_read_paths.push(path); + self + } + + /// Add allowed write path + pub fn allow_write_path(mut self, path: PathBuf) -> Self { + self.allowed_write_paths.push(path); + self + } + + /// Set default network access policy + pub fn with_network_default(mut self, allow: bool) -> Self { + self.allow_network_default = allow; + self + } + + /// Validate capabilities requested by a plugin + pub fn validate_capabilities(&self, capabilities: &Capabilities) -> Result<()> { + // Validate memory limit + if let Some(memory) = capabilities.max_memory_bytes + && memory > self.max_memory_limit + { + return Err(anyhow!( + "Requested memory ({} bytes) exceeds limit ({} bytes)", + memory, + self.max_memory_limit + )); + } + + // Validate CPU time limit + if let Some(cpu_time) = capabilities.max_cpu_time_ms + && cpu_time > self.max_cpu_time_limit + { + return Err(anyhow!( + "Requested CPU time ({} ms) exceeds limit ({} ms)", + cpu_time, + self.max_cpu_time_limit + )); + } + + // Validate filesystem access + self.validate_filesystem_access(capabilities)?; + + // Validate network access + if capabilities.network.enabled && !self.allow_network_default { + return Err(anyhow!( + "Plugin requests network access, but network access is disabled by policy" + )); + } + + Ok(()) + } + + /// Validate filesystem access capabilities + fn validate_filesystem_access(&self, capabilities: &Capabilities) -> Result<()> { + // Check read paths + for path in &capabilities.filesystem.read { + if !self.is_read_allowed(path) { + return Err(anyhow!( + "Plugin requests read access to {:?} which is not in allowed paths", + path + )); + } + } + + // Check write paths + for path in &capabilities.filesystem.write { + if !self.is_write_allowed(path) { + return Err(anyhow!( + "Plugin requests write access to {:?} which is not in allowed paths", + path + )); + } + } + + Ok(()) + } + + /// Check if a path is allowed for reading + pub fn is_read_allowed(&self, path: &Path) -> bool { + if self.allowed_read_paths.is_empty() { + return false; // deny-all when unconfigured + } + let Ok(canonical) = path.canonicalize() else { + return false; + }; + self.allowed_read_paths.iter().any(|allowed| { + allowed + .canonicalize() + .is_ok_and(|a| canonical.starts_with(a)) + }) + } + + /// Check if a path is allowed for writing + pub fn is_write_allowed(&self, path: &Path) -> bool { + if self.allowed_write_paths.is_empty() { + return false; // deny-all when unconfigured + } + let canonical = if path.exists() { + path.canonicalize().ok() + } else { + path.parent() + .and_then(|p| p.canonicalize().ok()) + .map(|p| p.join(path.file_name().unwrap_or_default())) + }; + let Some(canonical) = canonical else { + return false; + }; + self.allowed_write_paths.iter().any(|allowed| { + allowed + .canonicalize() + .is_ok_and(|a| canonical.starts_with(a)) + }) + } + + /// Check if network access is allowed for a plugin + pub fn is_network_allowed(&self, capabilities: &Capabilities) -> bool { + capabilities.network.enabled && self.allow_network_default + } + + /// Check if a specific domain is allowed + pub fn is_domain_allowed(&self, capabilities: &Capabilities, domain: &str) -> bool { + if !capabilities.network.enabled { + return false; + } + + // If no domain restrictions, allow all domains + if capabilities.network.allowed_domains.is_none() { + return self.allow_network_default; + } + + // Check against allowed domains list + capabilities + .network + .allowed_domains + .as_ref() + .map(|domains| domains.iter().any(|d| d.eq_ignore_ascii_case(domain))) + .unwrap_or(false) + } + + /// Get effective memory limit for a plugin + pub fn get_memory_limit(&self, capabilities: &Capabilities) -> usize { + capabilities + .max_memory_bytes + .unwrap_or(self.max_memory_limit) + .min(self.max_memory_limit) + } + + /// Get effective CPU time limit for a plugin + pub fn get_cpu_time_limit(&self, capabilities: &Capabilities) -> u64 { + capabilities + .max_cpu_time_ms + .unwrap_or(self.max_cpu_time_limit) + .min(self.max_cpu_time_limit) + } +} + +impl Default for CapabilityEnforcer { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[allow(unused_imports)] + use pinakes_plugin_api::{FilesystemCapability, NetworkCapability}; + + #[test] + fn test_validate_memory_limit() { + let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB + + let mut caps = Capabilities::default(); + caps.max_memory_bytes = Some(50 * 1024 * 1024); // 50 MB - OK + assert!(enforcer.validate_capabilities(&caps).is_ok()); + + caps.max_memory_bytes = Some(200 * 1024 * 1024); // 200 MB - exceeds limit + assert!(enforcer.validate_capabilities(&caps).is_err()); + } + + #[test] + fn test_validate_cpu_time_limit() { + let enforcer = CapabilityEnforcer::new().with_max_cpu_time(30_000); // 30 seconds + + let mut caps = Capabilities::default(); + caps.max_cpu_time_ms = Some(10_000); // 10 seconds - OK + assert!(enforcer.validate_capabilities(&caps).is_ok()); + + caps.max_cpu_time_ms = Some(60_000); // 60 seconds - exceeds limit + assert!(enforcer.validate_capabilities(&caps).is_err()); + } + + #[test] + fn test_filesystem_read_allowed() { + // Use real temp directories so canonicalize works + let tmp = tempfile::tempdir().unwrap(); + let allowed_dir = tmp.path().join("allowed"); + std::fs::create_dir_all(&allowed_dir).unwrap(); + let test_file = allowed_dir.join("test.txt"); + std::fs::write(&test_file, "test").unwrap(); + + let enforcer = CapabilityEnforcer::new().allow_read_path(allowed_dir.clone()); + + assert!(enforcer.is_read_allowed(&test_file)); + assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd"))); + } + + #[test] + fn test_filesystem_read_denied_when_empty() { + let enforcer = CapabilityEnforcer::new(); + assert!(!enforcer.is_read_allowed(Path::new("/tmp/test.txt"))); + } + + #[test] + fn test_filesystem_write_allowed() { + let tmp = tempfile::tempdir().unwrap(); + let output_dir = tmp.path().join("output"); + std::fs::create_dir_all(&output_dir).unwrap(); + // Existing file in allowed dir + let existing = output_dir.join("file.txt"); + std::fs::write(&existing, "test").unwrap(); + + let enforcer = CapabilityEnforcer::new().allow_write_path(output_dir.clone()); + + assert!(enforcer.is_write_allowed(&existing)); + // New file in allowed dir (parent exists) + assert!(enforcer.is_write_allowed(&output_dir.join("new_file.txt"))); + assert!(!enforcer.is_write_allowed(Path::new("/etc/config"))); + } + + #[test] + fn test_filesystem_write_denied_when_empty() { + let enforcer = CapabilityEnforcer::new(); + assert!(!enforcer.is_write_allowed(Path::new("/tmp/file.txt"))); + } + + #[test] + fn test_network_allowed() { + let enforcer = CapabilityEnforcer::new().with_network_default(true); + + let mut caps = Capabilities::default(); + caps.network.enabled = true; + + assert!(enforcer.is_network_allowed(&caps)); + + caps.network.enabled = false; + assert!(!enforcer.is_network_allowed(&caps)); + } + + #[test] + fn test_domain_restrictions() { + let enforcer = CapabilityEnforcer::new().with_network_default(true); + + let mut caps = Capabilities::default(); + caps.network.enabled = true; + caps.network.allowed_domains = Some(vec![ + "api.example.com".to_string(), + "cdn.example.com".to_string(), + ]); + + assert!(enforcer.is_domain_allowed(&caps, "api.example.com")); + assert!(enforcer.is_domain_allowed(&caps, "cdn.example.com")); + assert!(!enforcer.is_domain_allowed(&caps, "evil.com")); + } + + #[test] + fn test_get_effective_limits() { + let enforcer = CapabilityEnforcer::new() + .with_max_memory(100 * 1024 * 1024) + .with_max_cpu_time(30_000); + + let mut caps = Capabilities::default(); + + // No limits specified - use defaults + assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024); + assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000); + + // Plugin requests lower limits - use plugin's + caps.max_memory_bytes = Some(50 * 1024 * 1024); + caps.max_cpu_time_ms = Some(10_000); + assert_eq!(enforcer.get_memory_limit(&caps), 50 * 1024 * 1024); + assert_eq!(enforcer.get_cpu_time_limit(&caps), 10_000); + + // Plugin requests higher limits - cap at system max + caps.max_memory_bytes = Some(200 * 1024 * 1024); + caps.max_cpu_time_ms = Some(60_000); + assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024); + assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000); + } +} diff --git a/crates/pinakes-core/src/social.rs b/crates/pinakes-core/src/social.rs new file mode 100644 index 0000000..63bea0d --- /dev/null +++ b/crates/pinakes-core/src/social.rs @@ -0,0 +1,52 @@ +//! Social features: ratings, comments, favorites, and share links. + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::model::MediaId; +use crate::users::UserId; + +/// A user's rating for a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Rating { + pub id: Uuid, + pub user_id: UserId, + pub media_id: MediaId, + pub stars: u8, + pub review_text: Option, + pub created_at: DateTime, +} + +/// A comment on a media item, supporting threaded replies. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Comment { + pub id: Uuid, + pub user_id: UserId, + pub media_id: MediaId, + pub parent_comment_id: Option, + pub text: String, + pub created_at: DateTime, +} + +/// A user's favorite bookmark for a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Favorite { + pub user_id: UserId, + pub media_id: MediaId, + pub created_at: DateTime, +} + +/// A shareable link to a media item with optional password and expiration. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ShareLink { + pub id: Uuid, + pub media_id: MediaId, + pub created_by: UserId, + pub token: String, + #[serde(skip_serializing)] + pub password_hash: Option, + pub expires_at: Option>, + pub view_count: u64, + pub created_at: DateTime, +} diff --git a/crates/pinakes-core/src/storage/mod.rs b/crates/pinakes-core/src/storage/mod.rs index 73e4241..acfebcb 100644 --- a/crates/pinakes-core/src/storage/mod.rs +++ b/crates/pinakes-core/src/storage/mod.rs @@ -7,9 +7,18 @@ use std::sync::Arc; use uuid::Uuid; +use chrono::{DateTime, Utc}; + +use crate::analytics::UsageEvent; +use crate::enrichment::ExternalMetadata; use crate::error::Result; use crate::model::*; +use crate::playlists::Playlist; use crate::search::{SearchRequest, SearchResults}; +use crate::social::{Comment, Rating, ShareLink}; +use crate::subtitles::Subtitle; +use crate::transcode::{TranscodeSession, TranscodeStatus}; +use crate::users::UserId; /// Statistics about the database. #[derive(Debug, Clone, Default)] @@ -187,6 +196,167 @@ pub trait StorageBackend: Send + Sync + 'static { // Library statistics async fn library_statistics(&self) -> Result; + + // User Management + async fn list_users(&self) -> Result>; + async fn get_user(&self, id: crate::users::UserId) -> Result; + async fn get_user_by_username(&self, username: &str) -> Result; + async fn create_user( + &self, + username: &str, + password_hash: &str, + role: crate::config::UserRole, + profile: Option, + ) -> Result; + async fn update_user( + &self, + id: crate::users::UserId, + password_hash: Option<&str>, + role: Option, + profile: Option, + ) -> Result; + async fn delete_user(&self, id: crate::users::UserId) -> Result<()>; + async fn get_user_libraries( + &self, + user_id: crate::users::UserId, + ) -> Result>; + async fn grant_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + permission: crate::users::LibraryPermission, + ) -> Result<()>; + async fn revoke_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + ) -> Result<()>; + + // ===== Ratings ===== + async fn rate_media( + &self, + user_id: UserId, + media_id: MediaId, + stars: u8, + review: Option<&str>, + ) -> Result; + async fn get_media_ratings(&self, media_id: MediaId) -> Result>; + async fn get_user_rating(&self, user_id: UserId, media_id: MediaId) -> Result>; + async fn delete_rating(&self, id: Uuid) -> Result<()>; + + // ===== Comments ===== + async fn add_comment( + &self, + user_id: UserId, + media_id: MediaId, + text: &str, + parent_id: Option, + ) -> Result; + async fn get_media_comments(&self, media_id: MediaId) -> Result>; + async fn delete_comment(&self, id: Uuid) -> Result<()>; + + // ===== Favorites ===== + async fn add_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>; + async fn remove_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>; + async fn get_user_favorites( + &self, + user_id: UserId, + pagination: &Pagination, + ) -> Result>; + async fn is_favorite(&self, user_id: UserId, media_id: MediaId) -> Result; + + // ===== Share Links ===== + async fn create_share_link( + &self, + media_id: MediaId, + created_by: UserId, + token: &str, + password_hash: Option<&str>, + expires_at: Option>, + ) -> Result; + async fn get_share_link(&self, token: &str) -> Result; + async fn increment_share_views(&self, token: &str) -> Result<()>; + async fn delete_share_link(&self, id: Uuid) -> Result<()>; + + // ===== Playlists ===== + async fn create_playlist( + &self, + owner_id: UserId, + name: &str, + description: Option<&str>, + is_public: bool, + is_smart: bool, + filter_query: Option<&str>, + ) -> Result; + async fn get_playlist(&self, id: Uuid) -> Result; + async fn list_playlists(&self, owner_id: Option) -> Result>; + async fn update_playlist( + &self, + id: Uuid, + name: Option<&str>, + description: Option<&str>, + is_public: Option, + ) -> Result; + async fn delete_playlist(&self, id: Uuid) -> Result<()>; + async fn add_to_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + position: i32, + ) -> Result<()>; + async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()>; + async fn get_playlist_items(&self, playlist_id: Uuid) -> Result>; + async fn reorder_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + new_position: i32, + ) -> Result<()>; + + // ===== Analytics ===== + async fn record_usage_event(&self, event: &UsageEvent) -> Result<()>; + async fn get_usage_events( + &self, + media_id: Option, + user_id: Option, + limit: u64, + ) -> Result>; + async fn get_most_viewed(&self, limit: u64) -> Result>; + async fn get_recently_viewed(&self, user_id: UserId, limit: u64) -> Result>; + async fn update_watch_progress( + &self, + user_id: UserId, + media_id: MediaId, + progress_secs: f64, + ) -> Result<()>; + async fn get_watch_progress(&self, user_id: UserId, media_id: MediaId) -> Result>; + async fn cleanup_old_events(&self, before: DateTime) -> Result; + + // ===== Subtitles ===== + async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>; + async fn get_media_subtitles(&self, media_id: MediaId) -> Result>; + async fn delete_subtitle(&self, id: Uuid) -> Result<()>; + async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()>; + + // ===== External Metadata (Enrichment) ===== + async fn store_external_metadata(&self, meta: &ExternalMetadata) -> Result<()>; + async fn get_external_metadata(&self, media_id: MediaId) -> Result>; + async fn delete_external_metadata(&self, id: Uuid) -> Result<()>; + + // ===== Transcode Sessions ===== + async fn create_transcode_session(&self, session: &TranscodeSession) -> Result<()>; + async fn get_transcode_session(&self, id: Uuid) -> Result; + async fn list_transcode_sessions( + &self, + media_id: Option, + ) -> Result>; + async fn update_transcode_status( + &self, + id: Uuid, + status: TranscodeStatus, + progress: f32, + ) -> Result<()>; + async fn cleanup_expired_transcodes(&self, before: DateTime) -> Result; } /// Comprehensive library statistics. diff --git a/crates/pinakes-core/src/storage/postgres.rs b/crates/pinakes-core/src/storage/postgres.rs index 0099e50..0b89151 100644 --- a/crates/pinakes-core/src/storage/postgres.rs +++ b/crates/pinakes-core/src/storage/postgres.rs @@ -1585,9 +1585,1455 @@ impl StorageBackend for PostgresBackend { .await .map_err(|_| PinakesError::Database("library_statistics query timed out".to_string()))? } + + async fn list_users(&self) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client + .query("SELECT id, username, password_hash, role, created_at, updated_at FROM users ORDER BY created_at DESC", &[]) + .await?; + let mut users = Vec::with_capacity(rows.len()); + for row in rows { + let user_id: uuid::Uuid = row.get::<_, uuid::Uuid>(0); + let profile = self.load_user_profile(user_id).await?; + users.push(crate::users::User { + id: crate::users::UserId(user_id), + username: row.get(1), + password_hash: row.get(2), + role: serde_json::from_value(row.get(3)).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: row.get(4), + updated_at: row.get(5), + }); + } + Ok(users) + } + + async fn get_user(&self, id: crate::users::UserId) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let row = client + .query_opt("SELECT id, username, password_hash, role, created_at, updated_at FROM users WHERE id = $1", &[&id.0]) + .await? + .ok_or_else(|| PinakesError::NotFound(format!("user {}", id.0)))?; + let profile = self.load_user_profile(id.0).await?; + Ok(crate::users::User { + id: crate::users::UserId(row.get::<_, uuid::Uuid>(0)), + username: row.get(1), + password_hash: row.get(2), + role: serde_json::from_value(row.get(3)).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: row.get(4), + updated_at: row.get(5), + }) + } + + async fn get_user_by_username(&self, username: &str) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let row = client + .query_opt("SELECT id, username, password_hash, role, created_at, updated_at FROM users WHERE username = $1", &[&username]) + .await? + .ok_or_else(|| PinakesError::NotFound(format!("user with username {}", username)))?; + let user_id: uuid::Uuid = row.get::<_, uuid::Uuid>(0); + let profile = self.load_user_profile(user_id).await?; + Ok(crate::users::User { + id: crate::users::UserId(user_id), + username: row.get(1), + password_hash: row.get(2), + role: serde_json::from_value(row.get(3)).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: row.get(4), + updated_at: row.get(5), + }) + } + + async fn create_user( + &self, + username: &str, + password_hash: &str, + role: crate::config::UserRole, + profile: Option, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = uuid::Uuid::now_v7(); + let now = chrono::Utc::now(); + let role_json = serde_json::to_value(role)?; + + client + .execute( + "INSERT INTO users (id, username, password_hash, role, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)", + &[&id, &username, &password_hash, &role_json, &now, &now], + ) + .await?; + + let user_profile = if let Some(prof) = profile.clone() { + let prefs_json = serde_json::to_value(&prof.preferences)?; + client + .execute( + "INSERT INTO user_profiles (user_id, avatar_path, bio, preferences_json, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)", + &[&id, &prof.avatar_path, &prof.bio, &prefs_json, &now, &now], + ) + .await?; + prof + } else { + crate::users::UserProfile { + avatar_path: None, + bio: None, + preferences: Default::default(), + } + }; + + Ok(crate::users::User { + id: crate::users::UserId(id), + username: username.to_string(), + password_hash: password_hash.to_string(), + role, + profile: user_profile, + created_at: now, + updated_at: now, + }) + } + + async fn update_user( + &self, + id: crate::users::UserId, + password_hash: Option<&str>, + role: Option, + profile: Option, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let now = chrono::Utc::now(); + + // Update password and/or role if provided + if password_hash.is_some() || role.is_some() { + let mut updates = vec!["updated_at = $1".to_string()]; + let mut param_idx = 2; + + let pw_update = if password_hash.is_some() { + let s = format!("password_hash = ${}", param_idx); + param_idx += 1; + Some(s) + } else { + None + }; + if let Some(ref s) = pw_update { + updates.push(s.clone()); + } + + let role_json: Option = if let Some(ref r) = role { + param_idx += 1; + Some(serde_json::to_value(r)?) + } else { + None + }; + if role_json.is_some() { + updates.push(format!("role = ${}", param_idx - 1)); + } + + let sql = format!( + "UPDATE users SET {} WHERE id = ${}", + updates.join(", "), + param_idx + ); + + let mut params: Vec<&(dyn tokio_postgres::types::ToSql + Sync)> = vec![&now]; + if let Some(ref pw) = password_hash { + params.push(pw); + } + if let Some(ref rj) = role_json { + params.push(rj); + } + params.push(&id.0); + + client.execute(&sql, ¶ms).await?; + } + + // Update profile if provided + if let Some(prof) = profile { + let prefs_json = serde_json::to_value(&prof.preferences)?; + client + .execute( + "INSERT INTO user_profiles (user_id, avatar_path, bio, preferences_json, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (user_id) DO UPDATE SET avatar_path = $2, bio = $3, preferences_json = $4, updated_at = $6", + &[&id.0, &prof.avatar_path, &prof.bio, &prefs_json, &now, &now], + ) + .await?; + } + + // Fetch updated user + self.get_user(id).await + } + + async fn delete_user(&self, id: crate::users::UserId) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + + // Delete profile first due to foreign key + client + .execute("DELETE FROM user_profiles WHERE user_id = $1", &[&id.0]) + .await?; + // Delete library access + client + .execute("DELETE FROM user_libraries WHERE user_id = $1", &[&id.0]) + .await?; + // Delete user + let affected = client + .execute("DELETE FROM users WHERE id = $1", &[&id.0]) + .await?; + if affected == 0 { + return Err(PinakesError::NotFound(format!("user {}", id.0))); + } + Ok(()) + } + + async fn get_user_libraries( + &self, + user_id: crate::users::UserId, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client + .query("SELECT user_id, root_path, permission, granted_at FROM user_libraries WHERE user_id = $1", &[&user_id.0]) + .await?; + let mut libraries = Vec::with_capacity(rows.len()); + for row in rows { + libraries.push(crate::users::UserLibraryAccess { + user_id: crate::users::UserId(row.get::<_, uuid::Uuid>(0)), + root_path: row.get(1), + permission: serde_json::from_value(row.get(2)) + .unwrap_or(crate::users::LibraryPermission::Read), + granted_at: row.get(3), + }); + } + Ok(libraries) + } + + async fn grant_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + permission: crate::users::LibraryPermission, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let perm_json = serde_json::to_value(permission)?; + let now = chrono::Utc::now(); + client + .execute( + "INSERT INTO user_libraries (user_id, root_path, permission, granted_at) VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id, root_path) DO UPDATE SET permission = $3, granted_at = $4", + &[&user_id.0, &root_path, &perm_json, &now], + ) + .await?; + Ok(()) + } + + async fn revoke_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute( + "DELETE FROM user_libraries WHERE user_id = $1 AND root_path = $2", + &[&user_id.0, &root_path], + ) + .await?; + Ok(()) + } + + // ===== Ratings ===== + async fn rate_media( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + stars: u8, + review: Option<&str>, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = Uuid::now_v7(); + let now = Utc::now(); + let stars_i32 = stars as i32; + let row = client.query_one( + "INSERT INTO ratings (id, user_id, media_id, stars, review_text, created_at) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT (user_id, media_id) DO UPDATE SET stars = $4, review_text = $5 RETURNING id, created_at", + &[&id, &user_id.0, &media_id.0, &stars_i32, &review, &now], + ).await?; + let actual_id: Uuid = row.get(0); + let actual_created_at: chrono::DateTime = row.get(1); + Ok(crate::social::Rating { + id: actual_id, + user_id, + media_id, + stars, + review_text: review.map(String::from), + created_at: actual_created_at, + }) + } + + async fn get_media_ratings(&self, media_id: MediaId) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, user_id, media_id, stars, review_text, created_at FROM ratings WHERE media_id = $1 ORDER BY created_at DESC", + &[&media_id.0], + ).await?; + Ok(rows + .iter() + .map(|row| crate::social::Rating { + id: row.get("id"), + user_id: crate::users::UserId(row.get("user_id")), + media_id: MediaId(row.get("media_id")), + stars: row.get::<_, i32>("stars") as u8, + review_text: row.get("review_text"), + created_at: row.get("created_at"), + }) + .collect()) + } + + async fn get_user_rating( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, user_id, media_id, stars, review_text, created_at FROM ratings WHERE user_id = $1 AND media_id = $2", + &[&user_id.0, &media_id.0], + ).await?; + Ok(rows.first().map(|row| crate::social::Rating { + id: row.get("id"), + user_id: crate::users::UserId(row.get("user_id")), + media_id: MediaId(row.get("media_id")), + stars: row.get::<_, i32>("stars") as u8, + review_text: row.get("review_text"), + created_at: row.get("created_at"), + })) + } + + async fn delete_rating(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM ratings WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + // ===== Comments ===== + async fn add_comment( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + text: &str, + parent_id: Option, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = Uuid::now_v7(); + let now = Utc::now(); + client.execute( + "INSERT INTO comments (id, user_id, media_id, parent_comment_id, text, created_at) VALUES ($1, $2, $3, $4, $5, $6)", + &[&id, &user_id.0, &media_id.0, &parent_id, &text, &now], + ).await?; + Ok(crate::social::Comment { + id, + user_id, + media_id, + parent_comment_id: parent_id, + text: text.to_string(), + created_at: now, + }) + } + + async fn get_media_comments(&self, media_id: MediaId) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, user_id, media_id, parent_comment_id, text, created_at FROM comments WHERE media_id = $1 ORDER BY created_at ASC", + &[&media_id.0], + ).await?; + Ok(rows + .iter() + .map(|row| crate::social::Comment { + id: row.get("id"), + user_id: crate::users::UserId(row.get("user_id")), + media_id: MediaId(row.get("media_id")), + parent_comment_id: row.get("parent_comment_id"), + text: row.get("text"), + created_at: row.get("created_at"), + }) + .collect()) + } + + async fn delete_comment(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM comments WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + // ===== Favorites ===== + async fn add_favorite(&self, user_id: crate::users::UserId, media_id: MediaId) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let now = Utc::now(); + client.execute( + "INSERT INTO favorites (user_id, media_id, created_at) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", + &[&user_id.0, &media_id.0, &now], + ).await?; + Ok(()) + } + + async fn remove_favorite( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute( + "DELETE FROM favorites WHERE user_id = $1 AND media_id = $2", + &[&user_id.0, &media_id.0], + ) + .await?; + Ok(()) + } + + async fn get_user_favorites( + &self, + user_id: crate::users::UserId, + pagination: &Pagination, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = $1 ORDER BY f.created_at DESC LIMIT $2 OFFSET $3", + &[&user_id.0, &(pagination.limit as i64), &(pagination.offset as i64)], + ).await?; + let mut items: Vec = rows + .iter() + .map(row_to_media_item) + .collect::>>()?; + + // Batch-load custom fields + if !items.is_empty() { + let ids: Vec = items.iter().map(|i| i.id.0).collect(); + let cf_rows = client + .query( + "SELECT media_id, field_name, field_type, field_value + FROM custom_fields WHERE media_id = ANY($1)", + &[&ids], + ) + .await?; + let mut cf_map: HashMap> = HashMap::new(); + for row in &cf_rows { + let mid: Uuid = row.get("media_id"); + let name: String = row.get("field_name"); + let ft_str: String = row.get("field_type"); + let value: String = row.get("field_value"); + let field_type = custom_field_type_from_string(&ft_str)?; + cf_map + .entry(mid) + .or_default() + .insert(name, CustomField { field_type, value }); + } + for item in &mut items { + if let Some(fields) = cf_map.remove(&item.id.0) { + item.custom_fields = fields; + } + } + } + + Ok(items) + } + + async fn is_favorite(&self, user_id: crate::users::UserId, media_id: MediaId) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let row = client + .query_one( + "SELECT COUNT(*) FROM favorites WHERE user_id = $1 AND media_id = $2", + &[&user_id.0, &media_id.0], + ) + .await?; + let count: i64 = row.get(0); + Ok(count > 0) + } + + // ===== Share Links ===== + async fn create_share_link( + &self, + media_id: MediaId, + created_by: crate::users::UserId, + token: &str, + password_hash: Option<&str>, + expires_at: Option>, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = Uuid::now_v7(); + let now = Utc::now(); + let view_count: i32 = 0; + client.execute( + "INSERT INTO share_links (id, media_id, created_by, token, password_hash, expires_at, view_count, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)", + &[&id, &media_id.0, &created_by.0, &token, &password_hash, &expires_at, &view_count, &now], + ).await?; + Ok(crate::social::ShareLink { + id, + media_id, + created_by, + token: token.to_string(), + password_hash: password_hash.map(String::from), + expires_at, + view_count: 0, + created_at: now, + }) + } + + async fn get_share_link(&self, token: &str) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, media_id, created_by, token, password_hash, expires_at, view_count, created_at FROM share_links WHERE token = $1", + &[&token], + ).await?; + let row = rows + .first() + .ok_or_else(|| PinakesError::NotFound("share link not found".into()))?; + Ok(crate::social::ShareLink { + id: row.get("id"), + media_id: MediaId(row.get("media_id")), + created_by: crate::users::UserId(row.get("created_by")), + token: row.get("token"), + password_hash: row.get("password_hash"), + expires_at: row.get("expires_at"), + view_count: row.get::<_, i32>("view_count") as u64, + created_at: row.get("created_at"), + }) + } + + async fn increment_share_views(&self, token: &str) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute( + "UPDATE share_links SET view_count = view_count + 1 WHERE token = $1", + &[&token], + ) + .await?; + Ok(()) + } + + async fn delete_share_link(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM share_links WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + // ===== Playlists ===== + async fn create_playlist( + &self, + owner_id: crate::users::UserId, + name: &str, + description: Option<&str>, + is_public: bool, + is_smart: bool, + filter_query: Option<&str>, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = Uuid::now_v7(); + let now = Utc::now(); + client.execute( + "INSERT INTO playlists (id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)", + &[&id, &owner_id.0, &name, &description, &is_public, &is_smart, &filter_query, &now, &now], + ).await?; + Ok(crate::playlists::Playlist { + id, + owner_id, + name: name.to_string(), + description: description.map(String::from), + is_public, + is_smart, + filter_query: filter_query.map(String::from), + created_at: now, + updated_at: now, + }) + } + + async fn get_playlist(&self, id: Uuid) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists WHERE id = $1", + &[&id], + ).await?; + let row = rows + .first() + .ok_or_else(|| PinakesError::NotFound(format!("playlist {id}")))?; + Ok(crate::playlists::Playlist { + id: row.get("id"), + owner_id: crate::users::UserId(row.get("owner_id")), + name: row.get("name"), + description: row.get("description"), + is_public: row.get("is_public"), + is_smart: row.get("is_smart"), + filter_query: row.get("filter_query"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + } + + async fn list_playlists( + &self, + owner_id: Option, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = match owner_id { + Some(uid) => client.query( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists WHERE owner_id = $1 OR is_public = true ORDER BY updated_at DESC", + &[&uid.0], + ).await?, + None => client.query( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists ORDER BY updated_at DESC", + &[], + ).await?, + }; + Ok(rows + .iter() + .map(|row| crate::playlists::Playlist { + id: row.get("id"), + owner_id: crate::users::UserId(row.get("owner_id")), + name: row.get("name"), + description: row.get("description"), + is_public: row.get("is_public"), + is_smart: row.get("is_smart"), + filter_query: row.get("filter_query"), + created_at: row.get("created_at"), + updated_at: row.get("updated_at"), + }) + .collect()) + } + + async fn update_playlist( + &self, + id: Uuid, + name: Option<&str>, + description: Option<&str>, + is_public: Option, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let now = Utc::now(); + // Build dynamic update + let mut set_parts = vec!["updated_at = $1".to_string()]; + let mut params: Vec> = + vec![Box::new(now)]; + let mut idx = 2; + if let Some(n) = name { + set_parts.push(format!("name = ${idx}")); + params.push(Box::new(n.to_string())); + idx += 1; + } + if let Some(d) = description { + set_parts.push(format!("description = ${idx}")); + params.push(Box::new(d.to_string())); + idx += 1; + } + if let Some(p) = is_public { + set_parts.push(format!("is_public = ${idx}")); + params.push(Box::new(p)); + idx += 1; + } + params.push(Box::new(id)); + let sql = format!( + "UPDATE playlists SET {} WHERE id = ${idx}", + set_parts.join(", ") + ); + let param_refs: Vec<&(dyn tokio_postgres::types::ToSql + Sync)> = params + .iter() + .map(|p| &**p as &(dyn tokio_postgres::types::ToSql + Sync)) + .collect(); + client.execute(&sql, ¶m_refs).await?; + self.get_playlist(id).await + } + + async fn delete_playlist(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM playlists WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + async fn add_to_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + position: i32, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let now = Utc::now(); + client.execute( + "INSERT INTO playlist_items (playlist_id, media_id, position, added_at) VALUES ($1, $2, $3, $4) ON CONFLICT (playlist_id, media_id) DO UPDATE SET position = $3", + &[&playlist_id, &media_id.0, &position, &now], + ).await?; + Ok(()) + } + + async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute( + "DELETE FROM playlist_items WHERE playlist_id = $1 AND media_id = $2", + &[&playlist_id, &media_id.0], + ) + .await?; + Ok(()) + } + + async fn get_playlist_items(&self, playlist_id: Uuid) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = $1 ORDER BY pi.position ASC", + &[&playlist_id], + ).await?; + let mut items: Vec = rows + .iter() + .map(row_to_media_item) + .collect::>>()?; + + // Batch-load custom fields + if !items.is_empty() { + let ids: Vec = items.iter().map(|i| i.id.0).collect(); + let cf_rows = client + .query( + "SELECT media_id, field_name, field_type, field_value + FROM custom_fields WHERE media_id = ANY($1)", + &[&ids], + ) + .await?; + let mut cf_map: HashMap> = HashMap::new(); + for row in &cf_rows { + let mid: Uuid = row.get("media_id"); + let name: String = row.get("field_name"); + let ft_str: String = row.get("field_type"); + let value: String = row.get("field_value"); + let field_type = custom_field_type_from_string(&ft_str)?; + cf_map + .entry(mid) + .or_default() + .insert(name, CustomField { field_type, value }); + } + for item in &mut items { + if let Some(fields) = cf_map.remove(&item.id.0) { + item.custom_fields = fields; + } + } + } + + Ok(items) + } + + async fn reorder_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + new_position: i32, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute( + "UPDATE playlist_items SET position = $1 WHERE playlist_id = $2 AND media_id = $3", + &[&new_position, &playlist_id, &media_id.0], + ) + .await?; + Ok(()) + } + + // ===== Analytics ===== + async fn record_usage_event(&self, event: &crate::analytics::UsageEvent) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let media_id = event.media_id.map(|m| m.0); + let user_id = event.user_id.map(|u| u.0); + let event_type = event.event_type.to_string(); + let context: Option = event + .context_json + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()); + client.execute( + "INSERT INTO usage_events (id, media_id, user_id, event_type, timestamp, duration_secs, context_json) VALUES ($1, $2, $3, $4, $5, $6, $7)", + &[&event.id, &media_id, &user_id, &event_type, &event.timestamp, &event.duration_secs, &context], + ).await?; + Ok(()) + } + + async fn get_usage_events( + &self, + media_id: Option, + user_id: Option, + limit: u64, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let mut conditions = Vec::new(); + let mut params: Vec> = Vec::new(); + let mut idx = 1; + if let Some(mid) = media_id { + conditions.push(format!("media_id = ${idx}")); + params.push(Box::new(mid.0)); + idx += 1; + } + if let Some(uid) = user_id { + conditions.push(format!("user_id = ${idx}")); + params.push(Box::new(uid.0)); + idx += 1; + } + let where_clause = if conditions.is_empty() { + String::new() + } else { + format!("WHERE {}", conditions.join(" AND ")) + }; + params.push(Box::new(limit as i64)); + let sql = format!( + "SELECT id, media_id, user_id, event_type, timestamp, duration_secs, context_json FROM usage_events {} ORDER BY timestamp DESC LIMIT ${idx}", + where_clause + ); + let param_refs: Vec<&(dyn tokio_postgres::types::ToSql + Sync)> = params + .iter() + .map(|p| &**p as &(dyn tokio_postgres::types::ToSql + Sync)) + .collect(); + let rows = client.query(&sql, ¶m_refs).await?; + Ok(rows + .iter() + .map(|row| { + let event_type_str: String = row.get("event_type"); + let context_json: Option = row.get("context_json"); + crate::analytics::UsageEvent { + id: row.get("id"), + media_id: row.get::<_, Option>("media_id").map(MediaId), + user_id: row + .get::<_, Option>("user_id") + .map(crate::users::UserId), + event_type: event_type_str + .parse() + .unwrap_or(crate::analytics::UsageEventType::View), + timestamp: row.get("timestamp"), + duration_secs: row.get("duration_secs"), + context_json: context_json.map(|v| v.to_string()), + } + }) + .collect()) + } + + async fn get_most_viewed(&self, limit: u64) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY view_count DESC LIMIT $1", + &[&(limit as i64)], + ).await?; + let mut results = Vec::new(); + for row in &rows { + let item = row_to_media_item(row)?; + let count: i64 = row.get(16); + results.push((item, count as u64)); + } + + // Batch-load custom fields + if !results.is_empty() { + let ids: Vec = results.iter().map(|(i, _)| i.id.0).collect(); + let cf_rows = client + .query( + "SELECT media_id, field_name, field_type, field_value + FROM custom_fields WHERE media_id = ANY($1)", + &[&ids], + ) + .await?; + let mut cf_map: HashMap> = HashMap::new(); + for row in &cf_rows { + let mid: Uuid = row.get("media_id"); + let name: String = row.get("field_name"); + let ft_str: String = row.get("field_type"); + let value: String = row.get("field_value"); + let field_type = custom_field_type_from_string(&ft_str)?; + cf_map + .entry(mid) + .or_default() + .insert(name, CustomField { field_type, value }); + } + for (item, _) in &mut results { + if let Some(fields) = cf_map.remove(&item.id.0) { + item.custom_fields = fields; + } + } + } + + Ok(results) + } + + async fn get_recently_viewed( + &self, + user_id: crate::users::UserId, + limit: u64, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = $1 AND ue.event_type IN ('view', 'play') GROUP BY m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at ORDER BY MAX(ue.timestamp) DESC LIMIT $2", + &[&user_id.0, &(limit as i64)], + ).await?; + let mut items: Vec = rows + .iter() + .map(row_to_media_item) + .collect::>>()?; + + // Batch-load custom fields + if !items.is_empty() { + let ids: Vec = items.iter().map(|i| i.id.0).collect(); + let cf_rows = client + .query( + "SELECT media_id, field_name, field_type, field_value + FROM custom_fields WHERE media_id = ANY($1)", + &[&ids], + ) + .await?; + let mut cf_map: HashMap> = HashMap::new(); + for row in &cf_rows { + let mid: Uuid = row.get("media_id"); + let name: String = row.get("field_name"); + let ft_str: String = row.get("field_type"); + let value: String = row.get("field_value"); + let field_type = custom_field_type_from_string(&ft_str)?; + cf_map + .entry(mid) + .or_default() + .insert(name, CustomField { field_type, value }); + } + for item in &mut items { + if let Some(fields) = cf_map.remove(&item.id.0) { + item.custom_fields = fields; + } + } + } + + Ok(items) + } + + async fn update_watch_progress( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + progress_secs: f64, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let id = Uuid::now_v7(); + let now = Utc::now(); + client.execute( + "INSERT INTO watch_history (id, user_id, media_id, progress_secs, last_watched) VALUES ($1, $2, $3, $4, $5) ON CONFLICT (user_id, media_id) DO UPDATE SET progress_secs = $4, last_watched = $5", + &[&id, &user_id.0, &media_id.0, &progress_secs, &now], + ).await?; + Ok(()) + } + + async fn get_watch_progress( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client + .query( + "SELECT progress_secs FROM watch_history WHERE user_id = $1 AND media_id = $2", + &[&user_id.0, &media_id.0], + ) + .await?; + Ok(rows.first().map(|row| row.get("progress_secs"))) + } + + async fn cleanup_old_events(&self, before: chrono::DateTime) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let affected = client + .execute("DELETE FROM usage_events WHERE timestamp < $1", &[&before]) + .await?; + Ok(affected) + } + + // ===== Subtitles ===== + async fn add_subtitle(&self, subtitle: &crate::subtitles::Subtitle) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let format_str = subtitle.format.to_string(); + let file_path = subtitle + .file_path + .as_ref() + .map(|p| p.to_string_lossy().to_string()); + let track_index = subtitle.track_index.map(|i| i as i32); + let offset_ms = subtitle.offset_ms as i32; + client.execute( + "INSERT INTO subtitles (id, media_id, language, format, file_path, is_embedded, track_index, offset_ms, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)", + &[&subtitle.id, &subtitle.media_id.0, &subtitle.language, &format_str, &file_path, &subtitle.is_embedded, &track_index, &offset_ms, &subtitle.created_at], + ).await?; + Ok(()) + } + + async fn get_media_subtitles( + &self, + media_id: MediaId, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, media_id, language, format, file_path, is_embedded, track_index, offset_ms, created_at FROM subtitles WHERE media_id = $1", + &[&media_id.0], + ).await?; + Ok(rows + .iter() + .map(|row| { + let format_str: String = row.get("format"); + crate::subtitles::Subtitle { + id: row.get("id"), + media_id: MediaId(row.get("media_id")), + language: row.get("language"), + format: format_str + .parse() + .unwrap_or(crate::subtitles::SubtitleFormat::Srt), + file_path: row + .get::<_, Option>("file_path") + .map(std::path::PathBuf::from), + is_embedded: row.get("is_embedded"), + track_index: row.get::<_, Option>("track_index").map(|i| i as usize), + offset_ms: row.get::<_, i32>("offset_ms") as i64, + created_at: row.get("created_at"), + } + }) + .collect()) + } + + async fn delete_subtitle(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM subtitles WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let offset = offset_ms as i32; + client + .execute( + "UPDATE subtitles SET offset_ms = $1 WHERE id = $2", + &[&offset, &id], + ) + .await?; + Ok(()) + } + + // ===== External Metadata (Enrichment) ===== + async fn store_external_metadata( + &self, + meta: &crate::enrichment::ExternalMetadata, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let source = meta.source.to_string(); + let metadata_json: serde_json::Value = serde_json::from_str(&meta.metadata_json) + .unwrap_or_else(|e| { + tracing::warn!( + "failed to deserialize metadata_json for external metadata {}: {}", + meta.id, + e + ); + serde_json::Value::Object(Default::default()) + }); + client.execute( + "INSERT INTO external_metadata (id, media_id, source, external_id, metadata_json, confidence, last_updated) VALUES ($1, $2, $3, $4, $5, $6, $7) ON CONFLICT (id) DO UPDATE SET metadata_json = $5, confidence = $6, last_updated = $7", + &[&meta.id, &meta.media_id.0, &source, &meta.external_id, &metadata_json, &meta.confidence, &meta.last_updated], + ).await?; + Ok(()) + } + + async fn get_external_metadata( + &self, + media_id: MediaId, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, media_id, source, external_id, metadata_json, confidence, last_updated FROM external_metadata WHERE media_id = $1", + &[&media_id.0], + ).await?; + Ok(rows + .iter() + .map(|row| { + let source_str: String = row.get("source"); + let metadata_json: serde_json::Value = row.get("metadata_json"); + crate::enrichment::ExternalMetadata { + id: row.get("id"), + media_id: MediaId(row.get("media_id")), + source: source_str + .parse() + .unwrap_or(crate::enrichment::EnrichmentSourceType::MusicBrainz), + external_id: row.get("external_id"), + metadata_json: metadata_json.to_string(), + confidence: row.get("confidence"), + last_updated: row.get("last_updated"), + } + }) + .collect()) + } + + async fn delete_external_metadata(&self, id: Uuid) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + client + .execute("DELETE FROM external_metadata WHERE id = $1", &[&id]) + .await?; + Ok(()) + } + + // ===== Transcode Sessions ===== + async fn create_transcode_session( + &self, + session: &crate::transcode::TranscodeSession, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let user_id = session.user_id.map(|u| u.0); + let cache_path = session.cache_path.to_string_lossy().to_string(); + let status = session.status.as_str().to_string(); + let error_message = session.status.error_message().map(String::from); + let progress = session.progress as f64; + client.execute( + "INSERT INTO transcode_sessions (id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + &[&session.id, &session.media_id.0, &user_id, &session.profile, &cache_path, &status, &progress, &error_message, &session.created_at, &session.expires_at], + ).await?; + Ok(()) + } + + async fn get_transcode_session(&self, id: Uuid) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = client.query( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions WHERE id = $1", + &[&id], + ).await?; + let row = rows + .first() + .ok_or_else(|| PinakesError::NotFound(format!("transcode session {id}")))?; + let status_str: String = row.get("status"); + let error_msg: Option = row.get("error_message"); + let progress: f64 = row.get("progress"); + Ok(crate::transcode::TranscodeSession { + id: row.get("id"), + media_id: MediaId(row.get("media_id")), + user_id: row + .get::<_, Option>("user_id") + .map(crate::users::UserId), + profile: row.get("profile"), + cache_path: std::path::PathBuf::from(row.get::<_, String>("cache_path")), + status: crate::transcode::TranscodeStatus::from_db(&status_str, error_msg.as_deref()), + progress: progress as f32, + created_at: row.get("created_at"), + expires_at: row.get("expires_at"), + duration_secs: None, + child_cancel: None, + }) + } + + async fn list_transcode_sessions( + &self, + media_id: Option, + ) -> Result> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let rows = match media_id { + Some(mid) => client.query( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions WHERE media_id = $1 ORDER BY created_at DESC", + &[&mid.0], + ).await?, + None => client.query( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions ORDER BY created_at DESC", + &[], + ).await?, + }; + Ok(rows + .iter() + .map(|row| { + let status_str: String = row.get("status"); + let error_msg: Option = row.get("error_message"); + let progress: f64 = row.get("progress"); + crate::transcode::TranscodeSession { + id: row.get("id"), + media_id: MediaId(row.get("media_id")), + user_id: row + .get::<_, Option>("user_id") + .map(crate::users::UserId), + profile: row.get("profile"), + cache_path: std::path::PathBuf::from(row.get::<_, String>("cache_path")), + status: crate::transcode::TranscodeStatus::from_db( + &status_str, + error_msg.as_deref(), + ), + progress: progress as f32, + created_at: row.get("created_at"), + expires_at: row.get("expires_at"), + duration_secs: None, + child_cancel: None, + } + }) + .collect()) + } + + async fn update_transcode_status( + &self, + id: Uuid, + status: crate::transcode::TranscodeStatus, + progress: f32, + ) -> Result<()> { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let status_str = status.as_str().to_string(); + let error_message = status.error_message().map(String::from); + let progress_f64 = progress as f64; + client.execute( + "UPDATE transcode_sessions SET status = $1, progress = $2, error_message = $3 WHERE id = $4", + &[&status_str, &progress_f64, &error_message, &id], + ).await?; + Ok(()) + } + + async fn cleanup_expired_transcodes( + &self, + before: chrono::DateTime, + ) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let affected = client + .execute( + "DELETE FROM transcode_sessions WHERE expires_at IS NOT NULL AND expires_at < $1", + &[&before], + ) + .await?; + Ok(affected) + } } impl PostgresBackend { + async fn load_user_profile(&self, user_id: uuid::Uuid) -> Result { + let client = self + .pool + .get() + .await + .map_err(|e| PinakesError::Database(format!("pool error: {e}")))?; + let row = client + .query_opt( + "SELECT avatar_path, bio, preferences_json FROM user_profiles WHERE user_id = $1", + &[&user_id], + ) + .await?; + match row { + Some(row) => { + let prefs_json: serde_json::Value = row.get::<_, serde_json::Value>(2); + let preferences: crate::users::UserPreferences = + serde_json::from_value(prefs_json).unwrap_or_default(); + Ok(crate::users::UserProfile { + avatar_path: row.get(0), + bio: row.get(1), + preferences, + }) + } + None => Ok(crate::users::UserProfile { + avatar_path: None, + bio: None, + preferences: Default::default(), + }), + } + } + async fn library_statistics_inner(&self) -> Result { let client = self .pool @@ -1767,7 +3213,9 @@ mod tests { #[test] fn test_media_type_roundtrip() { - let mt = MediaType::Mp3; + use crate::media_type::BuiltinMediaType; + + let mt = MediaType::Builtin(BuiltinMediaType::Mp3); let s = media_type_to_string(&mt); assert_eq!(s, "mp3"); let parsed = media_type_from_string(&s).unwrap(); diff --git a/crates/pinakes-core/src/storage/sqlite.rs b/crates/pinakes-core/src/storage/sqlite.rs index a08ea4f..1090441 100644 --- a/crates/pinakes-core/src/storage/sqlite.rs +++ b/crates/pinakes-core/src/storage/sqlite.rs @@ -70,11 +70,13 @@ fn parse_datetime(s: &str) -> DateTime { } fn parse_media_type(s: &str) -> MediaType { + use crate::media_type::BuiltinMediaType; + // MediaType derives Serialize/Deserialize with serde rename_all = "lowercase", so // a JSON round-trip uses e.g. `"mp3"`. We store the bare lowercase string in the // database, so we must wrap it in quotes for serde_json. let quoted = format!("\"{s}\""); - serde_json::from_str("ed).unwrap_or(MediaType::PlainText) + serde_json::from_str("ed).unwrap_or(MediaType::Builtin(BuiltinMediaType::PlainText)) } fn media_type_to_str(mt: &MediaType) -> String { @@ -202,6 +204,37 @@ fn str_to_custom_field_type(s: &str) -> CustomFieldType { } } +fn load_user_profile_sync( + db: &Connection, + user_id_str: &str, +) -> rusqlite::Result { + let result = db.query_row( + "SELECT avatar_path, bio, preferences_json FROM user_profiles WHERE user_id = ?", + [user_id_str], + |row| { + let avatar_path: Option = row.get(0)?; + let bio: Option = row.get(1)?; + let prefs_str: String = row.get(2)?; + let preferences: crate::users::UserPreferences = + serde_json::from_str(&prefs_str).unwrap_or_default(); + Ok(crate::users::UserProfile { + avatar_path, + bio, + preferences, + }) + }, + ); + match result { + Ok(profile) => Ok(profile), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(crate::users::UserProfile { + avatar_path: None, + bio: None, + preferences: Default::default(), + }), + Err(e) => Err(e), + } +} + fn load_custom_fields_sync( db: &Connection, media_id: MediaId, @@ -1643,6 +1676,1768 @@ impl StorageBackend for SqliteBackend { .map_err(|_| PinakesError::Database("library_statistics query timed out".to_string()))? .map_err(|e| PinakesError::Database(e.to_string()))? } + + async fn list_users(&self) -> Result> { + let conn = self.conn.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT id, username, password_hash, role, created_at, updated_at FROM users ORDER BY created_at DESC" + )?; + let users = stmt + .query_map([], |row| { + let id_str: String = row.get(0)?; + let profile = load_user_profile_sync(&db, &id_str)?; + Ok(crate::users::User { + id: crate::users::UserId(parse_uuid(&id_str)?), + username: row.get(1)?, + password_hash: row.get(2)?, + role: serde_json::from_str(&row.get::<_, String>(3)?) + .unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(4)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + updated_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(5)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + }) + })? + .collect::, _>>()?; + Ok(users) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("list_users query timed out".to_string()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_user(&self, id: crate::users::UserId) -> Result { + let conn = self.conn.clone(); + let id_str = id.0.to_string(); + let id_str_for_err = id_str.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let opt = db.query_row( + "SELECT id, username, password_hash, role, created_at, updated_at FROM users WHERE id = ?", + [&id_str], + |row| { + let id_str: String = row.get(0)?; + let profile = load_user_profile_sync(&db, &id_str)?; + Ok(crate::users::User { + id: crate::users::UserId(parse_uuid(&id_str)?), + username: row.get(1)?, + password_hash: row.get(2)?, + role: serde_json::from_str(&row.get::<_, String>(3)?).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(4)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + updated_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(5)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + }) + } + ).optional()?; + opt.ok_or_else(|| PinakesError::NotFound(format!("user {}", id_str))) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| { + PinakesError::Database(format!("get_user query timed out for {}", id_str_for_err)) + })? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_user_by_username(&self, username: &str) -> Result { + let conn = self.conn.clone(); + let username = username.to_string(); + let username_for_err = username.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let opt = db.query_row( + "SELECT id, username, password_hash, role, created_at, updated_at FROM users WHERE username = ?", + [&username], + |row| { + let id_str: String = row.get(0)?; + let profile = load_user_profile_sync(&db, &id_str)?; + Ok(crate::users::User { + id: crate::users::UserId(parse_uuid(&id_str)?), + username: row.get(1)?, + password_hash: row.get(2)?, + role: serde_json::from_str(&row.get::<_, String>(3)?).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(4)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + updated_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(5)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + }) + } + ).optional()?; + opt.ok_or_else(|| PinakesError::NotFound(format!("user with username {}", username))) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| { + PinakesError::Database(format!( + "get_user_by_username query timed out for {}", + username_for_err + )) + })? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn create_user( + &self, + username: &str, + password_hash: &str, + role: crate::config::UserRole, + profile: Option, + ) -> Result { + let conn = self.conn.clone(); + let username = username.to_string(); + let password_hash = password_hash.to_string(); + let fut = tokio::task::spawn_blocking(move || -> Result { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + + let tx = db.unchecked_transaction()?; + + let id = crate::users::UserId(uuid::Uuid::now_v7()); + let id_str = id.0.to_string(); + let now = chrono::Utc::now(); + let role_str = serde_json::to_string(&role) + .map_err(|e| PinakesError::Database(format!("failed to serialize role: {}", e)))?; + + tx.execute( + "INSERT INTO users (id, username, password_hash, role, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", + rusqlite::params![&id_str, &username, &password_hash, &role_str, now.to_rfc3339(), now.to_rfc3339()], + )?; + + let user_profile = if let Some(prof) = profile.clone() { + let prefs_json = serde_json::to_string(&prof.preferences).map_err(|e| { + PinakesError::Database(format!("failed to serialize preferences: {}", e)) + })?; + tx.execute( + "INSERT INTO user_profiles (user_id, avatar_path, bio, preferences_json, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", + rusqlite::params![&id_str, &prof.avatar_path, &prof.bio, &prefs_json, now.to_rfc3339(), now.to_rfc3339()], + )?; + prof + } else { + crate::users::UserProfile { + avatar_path: None, + bio: None, + preferences: Default::default(), + } + }; + + tx.commit()?; + + Ok(crate::users::User { + id, + username, + password_hash, + role, + profile: user_profile, + created_at: now, + updated_at: now, + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("create_user query timed out".to_string()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn update_user( + &self, + id: crate::users::UserId, + password_hash: Option<&str>, + role: Option, + profile: Option, + ) -> Result { + let conn = self.conn.clone(); + let password_hash = password_hash.map(|s| s.to_string()); + let id_str = id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || -> Result { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + + let tx = db.unchecked_transaction()?; + let now = chrono::Utc::now(); + + // Update password and/or role if provided + if password_hash.is_some() || role.is_some() { + let mut updates = vec!["updated_at = ?"]; + let mut params: Vec> = Vec::new(); + params.push(Box::new(now.to_rfc3339())); + + if let Some(ref pw) = password_hash { + updates.push("password_hash = ?"); + params.push(Box::new(pw.clone())); + } + if let Some(ref r) = role { + updates.push("role = ?"); + let role_str = serde_json::to_string(r).map_err(|e| { + PinakesError::Database(format!("failed to serialize role: {}", e)) + })?; + params.push(Box::new(role_str)); + } + + params.push(Box::new(id_str.clone())); + + let sql = format!("UPDATE users SET {} WHERE id = ?", updates.join(", ")); + let param_refs: Vec<&dyn rusqlite::ToSql> = + params.iter().map(|p| p.as_ref()).collect(); + tx.execute(&sql, param_refs.as_slice())?; + } + + // Update profile if provided + if let Some(prof) = profile { + let prefs_json = serde_json::to_string(&prof.preferences).map_err(|e| { + PinakesError::Database(format!("failed to serialize preferences: {}", e)) + })?; + tx.execute( + "INSERT OR REPLACE INTO user_profiles (user_id, avatar_path, bio, preferences_json, created_at, updated_at) VALUES (?, ?, ?, ?, COALESCE((SELECT created_at FROM user_profiles WHERE user_id = ?), ?), ?)", + rusqlite::params![&id_str, &prof.avatar_path, &prof.bio, &prefs_json, &id_str, now.to_rfc3339(), now.to_rfc3339()], + )?; + } + + tx.commit()?; + + // Fetch updated user + Ok(db.query_row( + "SELECT id, username, password_hash, role, created_at, updated_at FROM users WHERE id = ?", + [&id_str], + |row| { + let id_str: String = row.get(0)?; + let profile = load_user_profile_sync(&db, &id_str)?; + Ok(crate::users::User { + id: crate::users::UserId(parse_uuid(&id_str)?), + username: row.get(1)?, + password_hash: row.get(2)?, + role: serde_json::from_str(&row.get::<_, String>(3)?).unwrap_or(crate::config::UserRole::Viewer), + profile, + created_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(4)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + updated_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(5)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + }) + } + )?) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("update_user query timed out".to_string()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_user(&self, id: crate::users::UserId) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || -> Result<()> { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + + let tx = db.unchecked_transaction()?; + + // Delete profile first due to foreign key + tx.execute("DELETE FROM user_profiles WHERE user_id = ?", [&id_str])?; + // Delete library access + tx.execute("DELETE FROM user_libraries WHERE user_id = ?", [&id_str])?; + // Delete user + let affected = tx.execute("DELETE FROM users WHERE id = ?", [&id_str])?; + if affected == 0 { + return Err(PinakesError::NotFound(format!("user {}", id_str))); + } + + tx.commit()?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_user query timed out".to_string()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_user_libraries( + &self, + user_id: crate::users::UserId, + ) -> Result> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT user_id, root_path, permission, granted_at FROM user_libraries WHERE user_id = ?" + )?; + let libraries = stmt + .query_map([&user_id_str], |row| { + let id_str: String = row.get(0)?; + Ok(crate::users::UserLibraryAccess { + user_id: crate::users::UserId(parse_uuid(&id_str)?), + root_path: row.get(1)?, + permission: serde_json::from_str(&row.get::<_, String>(2)?) + .unwrap_or(crate::users::LibraryPermission::Read), + granted_at: chrono::DateTime::parse_from_rfc3339(&row.get::<_, String>(3)?) + .unwrap_or_else(|_| chrono::Utc::now().into()) + .with_timezone(&chrono::Utc), + }) + })? + .filter_map(|r| r.ok()) + .collect::>(); + Ok(libraries) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_user_libraries query timed out".to_string()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn grant_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + permission: crate::users::LibraryPermission, + ) -> Result<()> { + let conn = self.conn.clone(); + let root_path = root_path.to_string(); + let user_id_str = user_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || -> Result<()> { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let perm_str = serde_json::to_string(&permission).map_err(|e| { + PinakesError::Database(format!("failed to serialize permission: {}", e)) + })?; + let now = chrono::Utc::now(); + db.execute( + "INSERT OR REPLACE INTO user_libraries (user_id, root_path, permission, granted_at) VALUES (?, ?, ?, ?)", + rusqlite::params![&user_id_str, &root_path, &perm_str, now.to_rfc3339()], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| { + PinakesError::Database("grant_library_access query timed out".to_string()) + })? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn revoke_library_access( + &self, + user_id: crate::users::UserId, + root_path: &str, + ) -> Result<()> { + let conn = self.conn.clone(); + let root_path = root_path.to_string(); + let user_id_str = user_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "DELETE FROM user_libraries WHERE user_id = ? AND root_path = ?", + rusqlite::params![&user_id_str, &root_path], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| { + PinakesError::Database("revoke_library_access query timed out".to_string()) + })? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Ratings ===== + async fn rate_media( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + stars: u8, + review: Option<&str>, + ) -> Result { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let review = review.map(String::from); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let id = Uuid::now_v7(); + let id_str = id.to_string(); + let now = chrono::Utc::now(); + db.execute( + "INSERT OR REPLACE INTO ratings (id, user_id, media_id, stars, review_text, created_at) VALUES (?, ?, ?, ?, ?, ?)", + params![&id_str, &user_id_str, &media_id_str, stars as i32, &review, now.to_rfc3339()], + )?; + // SELECT the actual row to get the real id and created_at (INSERT OR REPLACE may have kept existing values) + let (actual_id, actual_created_at) = db.query_row( + "SELECT id, created_at FROM ratings WHERE user_id = ? AND media_id = ?", + params![&user_id_str, &media_id_str], + |row| { + let rid_str: String = row.get(0)?; + let created_str: String = row.get(1)?; + Ok((parse_uuid(&rid_str)?, parse_datetime(&created_str))) + }, + )?; + Ok(crate::social::Rating { + id: actual_id, + user_id, + media_id, + stars, + review_text: review, + created_at: actual_created_at, + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("rate_media timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_media_ratings(&self, media_id: MediaId) -> Result> { + let conn = self.conn.clone(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT id, user_id, media_id, stars, review_text, created_at FROM ratings WHERE media_id = ? ORDER BY created_at DESC" + )?; + let ratings = stmt + .query_map([&media_id_str], |row| { + let id_str: String = row.get(0)?; + let uid_str: String = row.get(1)?; + let mid_str: String = row.get(2)?; + let created_str: String = row.get(5)?; + Ok(crate::social::Rating { + id: parse_uuid(&id_str)?, + user_id: crate::users::UserId(parse_uuid(&uid_str)?), + media_id: MediaId(parse_uuid(&mid_str)?), + stars: row.get::<_, i32>(3)? as u8, + review_text: row.get(4)?, + created_at: parse_datetime(&created_str), + }) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(ratings) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_media_ratings timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_user_rating( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let result = db.query_row( + "SELECT id, user_id, media_id, stars, review_text, created_at FROM ratings WHERE user_id = ? AND media_id = ?", + params![&user_id_str, &media_id_str], + |row| { + let id_str: String = row.get(0)?; + let uid_str: String = row.get(1)?; + let mid_str: String = row.get(2)?; + let created_str: String = row.get(5)?; + Ok(crate::social::Rating { + id: parse_uuid(&id_str)?, + user_id: crate::users::UserId(parse_uuid(&uid_str)?), + media_id: MediaId(parse_uuid(&mid_str)?), + stars: row.get::<_, i32>(3)? as u8, + review_text: row.get(4)?, + created_at: parse_datetime(&created_str), + }) + }, + ).optional()?; + Ok(result) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_user_rating timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_rating(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM ratings WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_rating timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Comments ===== + async fn add_comment( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + text: &str, + parent_id: Option, + ) -> Result { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let text = text.to_string(); + let parent_str = parent_id.map(|p| p.to_string()); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let id = Uuid::now_v7(); + let id_str = id.to_string(); + let now = chrono::Utc::now(); + db.execute( + "INSERT INTO comments (id, user_id, media_id, parent_comment_id, text, created_at) VALUES (?, ?, ?, ?, ?, ?)", + params![&id_str, &user_id_str, &media_id_str, &parent_str, &text, now.to_rfc3339()], + )?; + Ok(crate::social::Comment { + id, + user_id, + media_id, + parent_comment_id: parent_id, + text, + created_at: now, + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("add_comment timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_media_comments(&self, media_id: MediaId) -> Result> { + let conn = self.conn.clone(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT id, user_id, media_id, parent_comment_id, text, created_at FROM comments WHERE media_id = ? ORDER BY created_at ASC" + )?; + let comments = stmt + .query_map([&media_id_str], |row| { + let id_str: String = row.get(0)?; + let uid_str: String = row.get(1)?; + let mid_str: String = row.get(2)?; + let parent_str: Option = row.get(3)?; + let created_str: String = row.get(5)?; + Ok(crate::social::Comment { + id: parse_uuid(&id_str)?, + user_id: crate::users::UserId(parse_uuid(&uid_str)?), + media_id: MediaId(parse_uuid(&mid_str)?), + parent_comment_id: parent_str.and_then(|s| Uuid::parse_str(&s).ok()), + text: row.get(4)?, + created_at: parse_datetime(&created_str), + }) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(comments) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_media_comments timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_comment(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM comments WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_comment timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Favorites ===== + async fn add_favorite(&self, user_id: crate::users::UserId, media_id: MediaId) -> Result<()> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let now = chrono::Utc::now(); + db.execute( + "INSERT OR IGNORE INTO favorites (user_id, media_id, created_at) VALUES (?, ?, ?)", + params![&user_id_str, &media_id_str, now.to_rfc3339()], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("add_favorite timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn remove_favorite( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result<()> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "DELETE FROM favorites WHERE user_id = ? AND media_id = ?", + params![&user_id_str, &media_id_str], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("remove_favorite timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_user_favorites( + &self, + user_id: crate::users::UserId, + pagination: &Pagination, + ) -> Result> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let limit = pagination.limit as i64; + let offset = pagination.offset as i64; + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN favorites f ON m.id = f.media_id WHERE f.user_id = ? ORDER BY f.created_at DESC LIMIT ? OFFSET ?" + )?; + let mut items: Vec = stmt + .query_map(params![&user_id_str, limit, offset], row_to_media_item)? + .filter_map(|r| r.ok()) + .collect(); + load_custom_fields_batch(&db, &mut items)?; + Ok(items) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_user_favorites timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn is_favorite(&self, user_id: crate::users::UserId, media_id: MediaId) -> Result { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let count: i64 = db.query_row( + "SELECT COUNT(*) FROM favorites WHERE user_id = ? AND media_id = ?", + params![&user_id_str, &media_id_str], + |row| row.get(0), + )?; + Ok(count > 0) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("is_favorite timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Share Links ===== + async fn create_share_link( + &self, + media_id: MediaId, + created_by: crate::users::UserId, + token: &str, + password_hash: Option<&str>, + expires_at: Option>, + ) -> Result { + let conn = self.conn.clone(); + let media_id_str = media_id.0.to_string(); + let created_by_str = created_by.0.to_string(); + let token = token.to_string(); + let password_hash = password_hash.map(String::from); + let expires_str = expires_at.map(|dt| dt.to_rfc3339()); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let id = Uuid::now_v7(); + let id_str = id.to_string(); + let now = chrono::Utc::now(); + db.execute( + "INSERT INTO share_links (id, media_id, created_by, token, password_hash, expires_at, view_count, created_at) VALUES (?, ?, ?, ?, ?, ?, 0, ?)", + params![&id_str, &media_id_str, &created_by_str, &token, &password_hash, &expires_str, now.to_rfc3339()], + )?; + Ok(crate::social::ShareLink { + id, + media_id, + created_by, + token, + password_hash, + expires_at, + view_count: 0, + created_at: now, + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("create_share_link timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_share_link(&self, token: &str) -> Result { + let conn = self.conn.clone(); + let token = token.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.query_row( + "SELECT id, media_id, created_by, token, password_hash, expires_at, view_count, created_at FROM share_links WHERE token = ?", + [&token], + |row| { + let id_str: String = row.get(0)?; + let mid_str: String = row.get(1)?; + let uid_str: String = row.get(2)?; + let expires_str: Option = row.get(5)?; + let created_str: String = row.get(7)?; + Ok(crate::social::ShareLink { + id: parse_uuid(&id_str)?, + media_id: MediaId(parse_uuid(&mid_str)?), + created_by: crate::users::UserId(parse_uuid(&uid_str)?), + token: row.get(3)?, + password_hash: row.get(4)?, + expires_at: expires_str.map(|s| parse_datetime(&s)), + view_count: row.get::<_, i64>(6)? as u64, + created_at: parse_datetime(&created_str), + }) + }, + ).map_err(|e| match e { + rusqlite::Error::QueryReturnedNoRows => PinakesError::NotFound("share link not found".into()), + _ => PinakesError::Database(e.to_string()), + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_share_link timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn increment_share_views(&self, token: &str) -> Result<()> { + let conn = self.conn.clone(); + let token = token.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "UPDATE share_links SET view_count = view_count + 1 WHERE token = ?", + [&token], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("increment_share_views timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_share_link(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM share_links WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_share_link timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Playlists ===== + async fn create_playlist( + &self, + owner_id: crate::users::UserId, + name: &str, + description: Option<&str>, + is_public: bool, + is_smart: bool, + filter_query: Option<&str>, + ) -> Result { + let conn = self.conn.clone(); + let owner_id_str = owner_id.0.to_string(); + let name = name.to_string(); + let description = description.map(String::from); + let filter_query = filter_query.map(String::from); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let id = Uuid::now_v7(); + let id_str = id.to_string(); + let now = chrono::Utc::now(); + db.execute( + "INSERT INTO playlists (id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + params![&id_str, &owner_id_str, &name, &description, is_public as i32, is_smart as i32, &filter_query, now.to_rfc3339(), now.to_rfc3339()], + )?; + Ok(crate::playlists::Playlist { + id, + owner_id, + name, + description, + is_public, + is_smart, + filter_query, + created_at: now, + updated_at: now, + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("create_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_playlist(&self, id: Uuid) -> Result { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.query_row( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists WHERE id = ?", + [&id_str], + |row| { + let id_str: String = row.get(0)?; + let owner_str: String = row.get(1)?; + let created_str: String = row.get(7)?; + let updated_str: String = row.get(8)?; + Ok(crate::playlists::Playlist { + id: parse_uuid(&id_str)?, + owner_id: crate::users::UserId(parse_uuid(&owner_str)?), + name: row.get(2)?, + description: row.get(3)?, + is_public: row.get::<_, i32>(4)? != 0, + is_smart: row.get::<_, i32>(5)? != 0, + filter_query: row.get(6)?, + created_at: parse_datetime(&created_str), + updated_at: parse_datetime(&updated_str), + }) + }, + ).map_err(|e| match e { + rusqlite::Error::QueryReturnedNoRows => PinakesError::NotFound(format!("playlist {id}")), + _ => PinakesError::Database(e.to_string()), + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn list_playlists( + &self, + owner_id: Option, + ) -> Result> { + let conn = self.conn.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let (sql, param): (String, Option) = match owner_id { + Some(uid) => ( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists WHERE owner_id = ? OR is_public = 1 ORDER BY updated_at DESC".to_string(), + Some(uid.0.to_string()), + ), + None => ( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists ORDER BY updated_at DESC".to_string(), + None, + ), + }; + let mut stmt = db.prepare(&sql)?; + let rows = if let Some(ref p) = param { + stmt.query_map([p], |row| { + let id_str: String = row.get(0)?; + let owner_str: String = row.get(1)?; + let created_str: String = row.get(7)?; + let updated_str: String = row.get(8)?; + Ok(crate::playlists::Playlist { + id: parse_uuid(&id_str)?, + owner_id: crate::users::UserId(parse_uuid(&owner_str)?), + name: row.get(2)?, + description: row.get(3)?, + is_public: row.get::<_, i32>(4)? != 0, + is_smart: row.get::<_, i32>(5)? != 0, + filter_query: row.get(6)?, + created_at: parse_datetime(&created_str), + updated_at: parse_datetime(&updated_str), + }) + })? + .filter_map(|r| r.ok()) + .collect() + } else { + stmt.query_map([], |row| { + let id_str: String = row.get(0)?; + let owner_str: String = row.get(1)?; + let created_str: String = row.get(7)?; + let updated_str: String = row.get(8)?; + Ok(crate::playlists::Playlist { + id: parse_uuid(&id_str)?, + owner_id: crate::users::UserId(parse_uuid(&owner_str)?), + name: row.get(2)?, + description: row.get(3)?, + is_public: row.get::<_, i32>(4)? != 0, + is_smart: row.get::<_, i32>(5)? != 0, + filter_query: row.get(6)?, + created_at: parse_datetime(&created_str), + updated_at: parse_datetime(&updated_str), + }) + })? + .filter_map(|r| r.ok()) + .collect() + }; + Ok(rows) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("list_playlists timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn update_playlist( + &self, + id: Uuid, + name: Option<&str>, + description: Option<&str>, + is_public: Option, + ) -> Result { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let name = name.map(String::from); + let description = description.map(String::from); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let now = chrono::Utc::now(); + let mut updates = vec!["updated_at = ?".to_string()]; + let mut sql_params: Vec> = vec![Box::new(now.to_rfc3339())]; + if let Some(ref n) = name { + updates.push("name = ?".to_string()); + sql_params.push(Box::new(n.clone())); + } + if let Some(ref d) = description { + updates.push("description = ?".to_string()); + sql_params.push(Box::new(d.clone())); + } + if let Some(p) = is_public { + updates.push("is_public = ?".to_string()); + sql_params.push(Box::new(p as i32)); + } + sql_params.push(Box::new(id_str.clone())); + let sql = format!("UPDATE playlists SET {} WHERE id = ?", updates.join(", ")); + let param_refs: Vec<&dyn rusqlite::ToSql> = + sql_params.iter().map(|p| p.as_ref()).collect(); + db.execute(&sql, param_refs.as_slice())?; + // Fetch updated + db.query_row( + "SELECT id, owner_id, name, description, is_public, is_smart, filter_query, created_at, updated_at FROM playlists WHERE id = ?", + [&id_str], + |row| { + let id_str: String = row.get(0)?; + let owner_str: String = row.get(1)?; + let created_str: String = row.get(7)?; + let updated_str: String = row.get(8)?; + Ok(crate::playlists::Playlist { + id: parse_uuid(&id_str)?, + owner_id: crate::users::UserId(parse_uuid(&owner_str)?), + name: row.get(2)?, + description: row.get(3)?, + is_public: row.get::<_, i32>(4)? != 0, + is_smart: row.get::<_, i32>(5)? != 0, + filter_query: row.get(6)?, + created_at: parse_datetime(&created_str), + updated_at: parse_datetime(&updated_str), + }) + }, + ).map_err(|e| match e { + rusqlite::Error::QueryReturnedNoRows => PinakesError::NotFound(format!("playlist {}", id_str)), + _ => PinakesError::Database(e.to_string()), + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("update_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_playlist(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM playlists WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn add_to_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + position: i32, + ) -> Result<()> { + let conn = self.conn.clone(); + let playlist_id_str = playlist_id.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let now = chrono::Utc::now(); + db.execute( + "INSERT OR REPLACE INTO playlist_items (playlist_id, media_id, position, added_at) VALUES (?, ?, ?, ?)", + params![&playlist_id_str, &media_id_str, position, now.to_rfc3339()], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("add_to_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()> { + let conn = self.conn.clone(); + let playlist_id_str = playlist_id.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "DELETE FROM playlist_items WHERE playlist_id = ? AND media_id = ?", + params![&playlist_id_str, &media_id_str], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("remove_from_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_playlist_items(&self, playlist_id: Uuid) -> Result> { + let conn = self.conn.clone(); + let playlist_id_str = playlist_id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN playlist_items pi ON m.id = pi.media_id WHERE pi.playlist_id = ? ORDER BY pi.position ASC" + )?; + let mut items: Vec = stmt + .query_map([&playlist_id_str], row_to_media_item)? + .filter_map(|r| r.ok()) + .collect(); + load_custom_fields_batch(&db, &mut items)?; + Ok(items) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_playlist_items timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn reorder_playlist( + &self, + playlist_id: Uuid, + media_id: MediaId, + new_position: i32, + ) -> Result<()> { + let conn = self.conn.clone(); + let playlist_id_str = playlist_id.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "UPDATE playlist_items SET position = ? WHERE playlist_id = ? AND media_id = ?", + params![new_position, &playlist_id_str, &media_id_str], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("reorder_playlist timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Analytics ===== + async fn record_usage_event(&self, event: &crate::analytics::UsageEvent) -> Result<()> { + let conn = self.conn.clone(); + let id_str = event.id.to_string(); + let media_id_str = event.media_id.map(|m| m.0.to_string()); + let user_id_str = event.user_id.map(|u| u.0.to_string()); + let event_type = event.event_type.to_string(); + let ts = event.timestamp.to_rfc3339(); + let duration = event.duration_secs; + let context = event.context_json.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "INSERT INTO usage_events (id, media_id, user_id, event_type, timestamp, duration_secs, context_json) VALUES (?, ?, ?, ?, ?, ?, ?)", + params![&id_str, &media_id_str, &user_id_str, &event_type, &ts, &duration, &context], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("record_usage_event timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_usage_events( + &self, + media_id: Option, + user_id: Option, + limit: u64, + ) -> Result> { + let conn = self.conn.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut conditions = Vec::new(); + let mut sql_params: Vec> = Vec::new(); + if let Some(mid) = media_id { + conditions.push("media_id = ?".to_string()); + sql_params.push(Box::new(mid.0.to_string())); + } + if let Some(uid) = user_id { + conditions.push("user_id = ?".to_string()); + sql_params.push(Box::new(uid.0.to_string())); + } + let where_clause = if conditions.is_empty() { + String::new() + } else { + format!("WHERE {}", conditions.join(" AND ")) + }; + sql_params.push(Box::new(limit as i64)); + let sql = format!( + "SELECT id, media_id, user_id, event_type, timestamp, duration_secs, context_json FROM usage_events {} ORDER BY timestamp DESC LIMIT ?", + where_clause + ); + let mut stmt = db.prepare(&sql)?; + let param_refs: Vec<&dyn rusqlite::ToSql> = + sql_params.iter().map(|p| p.as_ref()).collect(); + let events = stmt + .query_map(param_refs.as_slice(), |row| { + let id_str: String = row.get(0)?; + let mid_str: Option = row.get(1)?; + let uid_str: Option = row.get(2)?; + let event_type_str: String = row.get(3)?; + let ts_str: String = row.get(4)?; + Ok(crate::analytics::UsageEvent { + id: parse_uuid(&id_str)?, + media_id: mid_str.and_then(|s| Uuid::parse_str(&s).ok()).map(MediaId), + user_id: uid_str + .and_then(|s| Uuid::parse_str(&s).ok()) + .map(crate::users::UserId), + event_type: event_type_str + .parse() + .unwrap_or(crate::analytics::UsageEventType::View), + timestamp: parse_datetime(&ts_str), + duration_secs: row.get(5)?, + context_json: row.get(6)?, + }) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(events) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_usage_events timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_most_viewed(&self, limit: u64) -> Result> { + let conn = self.conn.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at, COUNT(ue.id) as view_count FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.event_type IN ('view', 'play') GROUP BY m.id ORDER BY view_count DESC LIMIT ?" + )?; + let mut items: Vec<(MediaItem, u64)> = stmt + .query_map([limit as i64], |row| { + let item = row_to_media_item(row)?; + let count: i64 = row.get(16)?; + Ok((item, count as u64)) + })? + .filter_map(|r| r.ok()) + .collect(); + // Load custom fields for each item + let mut media_items: Vec = items.iter().map(|(i, _)| i.clone()).collect(); + load_custom_fields_batch(&db, &mut media_items)?; + for (i, (item, _)) in items.iter_mut().enumerate() { + item.custom_fields = std::mem::take(&mut media_items[i].custom_fields); + } + Ok(items) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_most_viewed timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_recently_viewed( + &self, + user_id: crate::users::UserId, + limit: u64, + ) -> Result> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash, m.file_size, m.title, m.artist, m.album, m.genre, m.year, m.duration_secs, m.description, m.thumbnail_path, m.created_at, m.updated_at FROM media_items m JOIN usage_events ue ON m.id = ue.media_id WHERE ue.user_id = ? AND ue.event_type IN ('view', 'play') GROUP BY m.id ORDER BY MAX(ue.timestamp) DESC LIMIT ?" + )?; + let mut items: Vec = stmt + .query_map(params![&user_id_str, limit as i64], row_to_media_item)? + .filter_map(|r| r.ok()) + .collect(); + load_custom_fields_batch(&db, &mut items)?; + Ok(items) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_recently_viewed timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn update_watch_progress( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + progress_secs: f64, + ) -> Result<()> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let id = Uuid::now_v7().to_string(); + let now = chrono::Utc::now(); + db.execute( + "INSERT INTO watch_history (id, user_id, media_id, progress_secs, last_watched) VALUES (?, ?, ?, ?, ?) ON CONFLICT(user_id, media_id) DO UPDATE SET progress_secs = excluded.progress_secs, last_watched = excluded.last_watched", + params![&id, &user_id_str, &media_id_str, progress_secs, now.to_rfc3339()], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("update_watch_progress timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_watch_progress( + &self, + user_id: crate::users::UserId, + media_id: MediaId, + ) -> Result> { + let conn = self.conn.clone(); + let user_id_str = user_id.0.to_string(); + let media_id_str = media_id.0.to_string(); + let fut = + tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let result = db.query_row( + "SELECT progress_secs FROM watch_history WHERE user_id = ? AND media_id = ?", + params![&user_id_str, &media_id_str], + |row| row.get(0), + ).optional()?; + Ok(result) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_watch_progress timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn cleanup_old_events(&self, before: chrono::DateTime) -> Result { + let conn = self.conn.clone(); + let before_str = before.to_rfc3339(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let affected = db.execute( + "DELETE FROM usage_events WHERE timestamp < ?", + [&before_str], + )?; + Ok(affected as u64) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("cleanup_old_events timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Subtitles ===== + async fn add_subtitle(&self, subtitle: &crate::subtitles::Subtitle) -> Result<()> { + let conn = self.conn.clone(); + let id_str = subtitle.id.to_string(); + let media_id_str = subtitle.media_id.0.to_string(); + let language = subtitle.language.clone(); + let format = subtitle.format.to_string(); + let file_path = subtitle + .file_path + .as_ref() + .map(|p| p.to_string_lossy().to_string()); + let is_embedded = subtitle.is_embedded; + let track_index = subtitle.track_index.map(|i| i as i64); + let offset_ms = subtitle.offset_ms; + let now = subtitle.created_at.to_rfc3339(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "INSERT INTO subtitles (id, media_id, language, format, file_path, is_embedded, track_index, offset_ms, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", + params![&id_str, &media_id_str, &language, &format, &file_path, is_embedded as i32, &track_index, offset_ms, &now], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("add_subtitle timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_media_subtitles( + &self, + media_id: MediaId, + ) -> Result> { + let conn = self.conn.clone(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT id, media_id, language, format, file_path, is_embedded, track_index, offset_ms, created_at FROM subtitles WHERE media_id = ?" + )?; + let subtitles = stmt + .query_map([&media_id_str], |row| { + let id_str: String = row.get(0)?; + let mid_str: String = row.get(1)?; + let format_str: String = row.get(3)?; + let created_str: String = row.get(8)?; + Ok(crate::subtitles::Subtitle { + id: parse_uuid(&id_str)?, + media_id: MediaId(parse_uuid(&mid_str)?), + language: row.get(2)?, + format: format_str + .parse() + .unwrap_or(crate::subtitles::SubtitleFormat::Srt), + file_path: row + .get::<_, Option>(4)? + .map(std::path::PathBuf::from), + is_embedded: row.get::<_, i32>(5)? != 0, + track_index: row.get::<_, Option>(6)?.map(|i| i as usize), + offset_ms: row.get(7)?, + created_at: parse_datetime(&created_str), + }) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(subtitles) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_media_subtitles timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_subtitle(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM subtitles WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_subtitle timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "UPDATE subtitles SET offset_ms = ? WHERE id = ?", + params![offset_ms, &id_str], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("update_subtitle_offset timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== External Metadata (Enrichment) ===== + async fn store_external_metadata( + &self, + meta: &crate::enrichment::ExternalMetadata, + ) -> Result<()> { + let conn = self.conn.clone(); + let id_str = meta.id.to_string(); + let media_id_str = meta.media_id.0.to_string(); + let source = meta.source.to_string(); + let external_id = meta.external_id.clone(); + let metadata_json = meta.metadata_json.clone(); + let confidence = meta.confidence; + let last_updated = meta.last_updated.to_rfc3339(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "INSERT OR REPLACE INTO external_metadata (id, media_id, source, external_id, metadata_json, confidence, last_updated) VALUES (?, ?, ?, ?, ?, ?, ?)", + params![&id_str, &media_id_str, &source, &external_id, &metadata_json, confidence, &last_updated], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("store_external_metadata timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_external_metadata( + &self, + media_id: MediaId, + ) -> Result> { + let conn = self.conn.clone(); + let media_id_str = media_id.0.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let mut stmt = db.prepare( + "SELECT id, media_id, source, external_id, metadata_json, confidence, last_updated FROM external_metadata WHERE media_id = ?" + )?; + let metas = stmt + .query_map([&media_id_str], |row| { + let id_str: String = row.get(0)?; + let mid_str: String = row.get(1)?; + let source_str: String = row.get(2)?; + let updated_str: String = row.get(6)?; + Ok(crate::enrichment::ExternalMetadata { + id: parse_uuid(&id_str)?, + media_id: MediaId(parse_uuid(&mid_str)?), + source: source_str + .parse() + .unwrap_or(crate::enrichment::EnrichmentSourceType::MusicBrainz), + external_id: row.get(3)?, + metadata_json: row.get(4)?, + confidence: row.get(5)?, + last_updated: parse_datetime(&updated_str), + }) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(metas) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_external_metadata timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn delete_external_metadata(&self, id: Uuid) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute("DELETE FROM external_metadata WHERE id = ?", [&id_str])?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("delete_external_metadata timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + // ===== Transcode Sessions ===== + async fn create_transcode_session( + &self, + session: &crate::transcode::TranscodeSession, + ) -> Result<()> { + let conn = self.conn.clone(); + let id_str = session.id.to_string(); + let media_id_str = session.media_id.0.to_string(); + let user_id_str = session.user_id.map(|u| u.0.to_string()); + let profile = session.profile.clone(); + let cache_path = session.cache_path.to_string_lossy().to_string(); + let status = session.status.as_str().to_string(); + let progress = session.progress; + let error_message = session.status.error_message().map(String::from); + let created_at = session.created_at.to_rfc3339(); + let expires_at = session.expires_at.map(|dt| dt.to_rfc3339()); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "INSERT INTO transcode_sessions (id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + params![&id_str, &media_id_str, &user_id_str, &profile, &cache_path, &status, progress, &error_message, &created_at, &expires_at], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("create_transcode_session timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn get_transcode_session(&self, id: Uuid) -> Result { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.query_row( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions WHERE id = ?", + [&id_str], + |row| { + let id_str: String = row.get(0)?; + let mid_str: String = row.get(1)?; + let uid_str: Option = row.get(2)?; + let status_str: String = row.get(5)?; + let error_msg: Option = row.get(7)?; + let created_str: String = row.get(8)?; + let expires_str: Option = row.get(9)?; + Ok(crate::transcode::TranscodeSession { + id: parse_uuid(&id_str)?, + media_id: MediaId(parse_uuid(&mid_str)?), + user_id: uid_str.and_then(|s| Uuid::parse_str(&s).ok()).map(crate::users::UserId), + profile: row.get(3)?, + cache_path: std::path::PathBuf::from(row.get::<_, String>(4)?), + status: crate::transcode::TranscodeStatus::from_db(&status_str, error_msg.as_deref()), + progress: row.get(6)?, + created_at: parse_datetime(&created_str), + expires_at: expires_str.map(|s| parse_datetime(&s)), + duration_secs: None, + child_cancel: None, + }) + }, + ).map_err(|e| match e { + rusqlite::Error::QueryReturnedNoRows => PinakesError::NotFound(format!("transcode session {id}")), + _ => PinakesError::Database(e.to_string()), + }) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("get_transcode_session timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn list_transcode_sessions( + &self, + media_id: Option, + ) -> Result> { + let conn = self.conn.clone(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let (sql, param) = match media_id { + Some(mid) => ( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions WHERE media_id = ? ORDER BY created_at DESC".to_string(), + Some(mid.0.to_string()), + ), + None => ( + "SELECT id, media_id, user_id, profile, cache_path, status, progress, error_message, created_at, expires_at FROM transcode_sessions ORDER BY created_at DESC".to_string(), + None, + ), + }; + let mut stmt = db.prepare(&sql)?; + let parse_row = |row: &Row| -> rusqlite::Result { + let id_str: String = row.get(0)?; + let mid_str: String = row.get(1)?; + let uid_str: Option = row.get(2)?; + let status_str: String = row.get(5)?; + let error_msg: Option = row.get(7)?; + let created_str: String = row.get(8)?; + let expires_str: Option = row.get(9)?; + Ok(crate::transcode::TranscodeSession { + id: parse_uuid(&id_str)?, + media_id: MediaId(parse_uuid(&mid_str)?), + user_id: uid_str + .and_then(|s| Uuid::parse_str(&s).ok()) + .map(crate::users::UserId), + profile: row.get(3)?, + cache_path: std::path::PathBuf::from(row.get::<_, String>(4)?), + status: crate::transcode::TranscodeStatus::from_db( + &status_str, + error_msg.as_deref(), + ), + progress: row.get(6)?, + created_at: parse_datetime(&created_str), + expires_at: expires_str.map(|s| parse_datetime(&s)), + duration_secs: None, + child_cancel: None, + }) + }; + let sessions: Vec<_> = if let Some(ref p) = param { + stmt.query_map([p], parse_row)? + .filter_map(|r| r.ok()) + .collect() + } else { + stmt.query_map([], parse_row)? + .filter_map(|r| r.ok()) + .collect() + }; + Ok(sessions) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("list_transcode_sessions timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn update_transcode_status( + &self, + id: Uuid, + status: crate::transcode::TranscodeStatus, + progress: f32, + ) -> Result<()> { + let conn = self.conn.clone(); + let id_str = id.to_string(); + let status_str = status.as_str().to_string(); + let error_message = status.error_message().map(String::from); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + db.execute( + "UPDATE transcode_sessions SET status = ?, progress = ?, error_message = ? WHERE id = ?", + params![&status_str, progress, &error_message, &id_str], + )?; + Ok(()) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("update_transcode_status timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } + + async fn cleanup_expired_transcodes( + &self, + before: chrono::DateTime, + ) -> Result { + let conn = self.conn.clone(); + let before_str = before.to_rfc3339(); + let fut = tokio::task::spawn_blocking(move || { + let db = conn.lock().map_err(|e| { + PinakesError::Database(format!("failed to acquire database lock: {}", e)) + })?; + let affected = db.execute( + "DELETE FROM transcode_sessions WHERE expires_at IS NOT NULL AND expires_at < ?", + [&before_str], + )?; + Ok(affected as u64) + }); + tokio::time::timeout(std::time::Duration::from_secs(10), fut) + .await + .map_err(|_| PinakesError::Database("cleanup_expired_transcodes timed out".into()))? + .map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))? + } } // Needed for `query_row(...).optional()` diff --git a/crates/pinakes-core/src/subtitles.rs b/crates/pinakes-core/src/subtitles.rs new file mode 100644 index 0000000..4f41d7b --- /dev/null +++ b/crates/pinakes-core/src/subtitles.rs @@ -0,0 +1,62 @@ +//! Subtitle management for video media items. + +use std::path::PathBuf; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::model::MediaId; + +/// A subtitle track associated with a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Subtitle { + pub id: Uuid, + pub media_id: MediaId, + pub language: Option, + pub format: SubtitleFormat, + pub file_path: Option, + pub is_embedded: bool, + pub track_index: Option, + pub offset_ms: i64, + pub created_at: DateTime, +} + +/// Supported subtitle formats. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SubtitleFormat { + Srt, + Vtt, + Ass, + Ssa, + Pgs, +} + +impl std::fmt::Display for SubtitleFormat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::Srt => "srt", + Self::Vtt => "vtt", + Self::Ass => "ass", + Self::Ssa => "ssa", + Self::Pgs => "pgs", + }; + write!(f, "{s}") + } +} + +impl std::str::FromStr for SubtitleFormat { + type Err = String; + + fn from_str(s: &str) -> std::result::Result { + match s { + "srt" => Ok(Self::Srt), + "vtt" => Ok(Self::Vtt), + "ass" => Ok(Self::Ass), + "ssa" => Ok(Self::Ssa), + "pgs" => Ok(Self::Pgs), + _ => Err(format!("unknown subtitle format: {s}")), + } + } +} diff --git a/crates/pinakes-core/src/thumbnail.rs b/crates/pinakes-core/src/thumbnail.rs index e41f008..61709c0 100644 --- a/crates/pinakes-core/src/thumbnail.rs +++ b/crates/pinakes-core/src/thumbnail.rs @@ -5,7 +5,7 @@ use tracing::{info, warn}; use crate::config::ThumbnailConfig; use crate::error::{PinakesError, Result}; -use crate::media_type::{MediaCategory, MediaType}; +use crate::media_type::{BuiltinMediaType, MediaCategory, MediaType}; use crate::model::MediaId; /// Generate a thumbnail for a media file and return the path to the thumbnail. @@ -41,7 +41,7 @@ pub fn generate_thumbnail_with_config( MediaCategory::Image => { if media_type.is_raw() { generate_raw_thumbnail(source_path, &thumb_path, config) - } else if media_type == MediaType::Heic { + } else if media_type == MediaType::Builtin(BuiltinMediaType::Heic) { generate_heic_thumbnail(source_path, &thumb_path, config) } else { generate_image_thumbnail(source_path, &thumb_path, config) @@ -49,8 +49,12 @@ pub fn generate_thumbnail_with_config( } MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config), MediaCategory::Document => match media_type { - MediaType::Pdf => generate_pdf_thumbnail(source_path, &thumb_path, config), - MediaType::Epub => generate_epub_thumbnail(source_path, &thumb_path, config), + MediaType::Builtin(BuiltinMediaType::Pdf) => { + generate_pdf_thumbnail(source_path, &thumb_path, config) + } + MediaType::Builtin(BuiltinMediaType::Epub) => { + generate_epub_thumbnail(source_path, &thumb_path, config) + } _ => return Ok(None), }, _ => return Ok(None), diff --git a/crates/pinakes-core/src/transcode.rs b/crates/pinakes-core/src/transcode.rs new file mode 100644 index 0000000..0c3a611 --- /dev/null +++ b/crates/pinakes-core/src/transcode.rs @@ -0,0 +1,545 @@ +//! Transcoding service for media files using FFmpeg. + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use tokio::sync::{RwLock, Semaphore}; +use uuid::Uuid; + +use crate::config::{TranscodeProfile, TranscodingConfig}; +use crate::model::MediaId; +use crate::storage::DynStorageBackend; +use crate::users::UserId; + +/// A transcoding session for a media item. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TranscodeSession { + pub id: Uuid, + pub media_id: MediaId, + pub user_id: Option, + pub profile: String, + pub cache_path: PathBuf, + pub status: TranscodeStatus, + pub progress: f32, + pub created_at: DateTime, + pub expires_at: Option>, + /// Duration of the source media in seconds, used for progress calculation. + #[serde(default)] + pub duration_secs: Option, + /// Handle to cancel the child FFmpeg process. + #[serde(skip)] + pub child_cancel: Option>, +} + +/// Status of a transcode session. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case", tag = "state")] +pub enum TranscodeStatus { + Pending, + Transcoding, + Complete, + Failed { error: String }, + Cancelled, +} + +impl TranscodeStatus { + pub fn as_str(&self) -> &str { + match self { + Self::Pending => "pending", + Self::Transcoding => "transcoding", + Self::Complete => "complete", + Self::Failed { .. } => "failed", + Self::Cancelled => "cancelled", + } + } + + pub fn from_db(status: &str, error_message: Option<&str>) -> Self { + match status { + "pending" => Self::Pending, + "transcoding" => Self::Transcoding, + "complete" => Self::Complete, + "failed" => Self::Failed { + error: error_message.unwrap_or("unknown error").to_string(), + }, + "cancelled" => Self::Cancelled, + other => { + tracing::warn!( + "unknown transcode status '{}', defaulting to Pending", + other + ); + Self::Pending + } + } + } + + pub fn error_message(&self) -> Option<&str> { + match self { + Self::Failed { error } => Some(error), + _ => None, + } + } +} + +/// Service managing transcoding sessions and FFmpeg invocations. +pub struct TranscodeService { + pub config: TranscodingConfig, + pub sessions: Arc>>, + semaphore: Arc, +} + +impl TranscodeService { + pub fn new(config: TranscodingConfig) -> Self { + let max_concurrent = config.max_concurrent.max(1); + Self { + sessions: Arc::new(RwLock::new(HashMap::new())), + semaphore: Arc::new(Semaphore::new(max_concurrent)), + config, + } + } + + pub fn is_enabled(&self) -> bool { + self.config.enabled + } + + pub fn cache_dir(&self) -> PathBuf { + self.config + .cache_dir + .clone() + .unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode")) + } + + /// Start a transcode job for a media item. + pub async fn start_transcode( + &self, + media_id: MediaId, + source_path: &Path, + profile_name: &str, + duration_secs: Option, + storage: &DynStorageBackend, + ) -> crate::error::Result { + let profile = self + .config + .profiles + .iter() + .find(|p| p.name == profile_name) + .cloned() + .ok_or_else(|| { + crate::error::PinakesError::InvalidOperation(format!( + "unknown transcode profile: {}", + profile_name + )) + })?; + + let session_id = Uuid::now_v7(); + let session_dir = self.cache_dir().join(session_id.to_string()); + tokio::fs::create_dir_all(&session_dir).await.map_err(|e| { + crate::error::PinakesError::InvalidOperation(format!( + "failed to create session directory: {}", + e + )) + })?; + + let expires_at = + Some(Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64)); + + let cancel_notify = Arc::new(tokio::sync::Notify::new()); + + let session = TranscodeSession { + id: session_id, + media_id, + user_id: None, + profile: profile_name.to_string(), + cache_path: session_dir.clone(), + status: TranscodeStatus::Pending, + progress: 0.0, + created_at: Utc::now(), + expires_at, + duration_secs, + child_cancel: Some(cancel_notify.clone()), + }; + + // Store session in DB + storage.create_transcode_session(&session).await?; + + // Store in memory + { + let mut sessions = self.sessions.write().await; + sessions.insert(session_id, session); + } + + // Spawn the FFmpeg task + let sessions = self.sessions.clone(); + let semaphore = self.semaphore.clone(); + let source = source_path.to_path_buf(); + let hw_accel = self.config.hardware_acceleration.clone(); + let storage = storage.clone(); + let cancel = cancel_notify.clone(); + + tokio::spawn(async move { + // Acquire semaphore permit to limit concurrency + let _permit = match semaphore.acquire().await { + Ok(permit) => permit, + Err(e) => { + tracing::error!("failed to acquire transcode semaphore: {}", e); + let error_msg = format!("semaphore closed: {}", e); + let mut s = sessions.write().await; + if let Some(sess) = s.get_mut(&session_id) { + sess.status = TranscodeStatus::Failed { + error: error_msg.clone(), + }; + } + if let Err(e) = storage + .update_transcode_status( + session_id, + TranscodeStatus::Failed { error: error_msg }, + 0.0, + ) + .await + { + tracing::error!("failed to update transcode status: {}", e); + } + return; + } + }; + + // Mark as transcoding + { + let mut s = sessions.write().await; + if let Some(sess) = s.get_mut(&session_id) { + sess.status = TranscodeStatus::Transcoding; + } + } + if let Err(e) = storage + .update_transcode_status(session_id, TranscodeStatus::Transcoding, 0.0) + .await + { + tracing::error!("failed to update transcode status: {}", e); + } + + // Build FFmpeg args and run + let args = get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref()); + match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel).await { + Ok(()) => { + let mut s = sessions.write().await; + if let Some(sess) = s.get_mut(&session_id) { + sess.status = TranscodeStatus::Complete; + sess.progress = 1.0; + } + if let Err(e) = storage + .update_transcode_status(session_id, TranscodeStatus::Complete, 1.0) + .await + { + tracing::error!("failed to update transcode status: {}", e); + } + } + Err(e) => { + let error_msg = e.to_string(); + let mut s = sessions.write().await; + if let Some(sess) = s.get_mut(&session_id) { + // Don't overwrite Cancelled status + if matches!(sess.status, TranscodeStatus::Cancelled) { + return; + } + sess.status = TranscodeStatus::Failed { + error: error_msg.clone(), + }; + } + drop(s); + if let Err(e) = storage + .update_transcode_status( + session_id, + TranscodeStatus::Failed { error: error_msg }, + 0.0, + ) + .await + { + tracing::error!("failed to update transcode status: {}", e); + } + } + } + }); + + Ok(session_id) + } + + /// Cancel a transcode session and clean up cache files. + pub async fn cancel_transcode( + &self, + session_id: Uuid, + storage: &DynStorageBackend, + ) -> crate::error::Result<()> { + let (cache_path, cancel_notify) = { + let mut sessions = self.sessions.write().await; + if let Some(sess) = sessions.get_mut(&session_id) { + sess.status = TranscodeStatus::Cancelled; + let cancel = sess.child_cancel.take(); + (Some(sess.cache_path.clone()), cancel) + } else { + (None, None) + } + }; + + // Signal the child process to be killed + if let Some(notify) = cancel_notify { + notify.notify_one(); + } + + storage + .update_transcode_status(session_id, TranscodeStatus::Cancelled, 0.0) + .await?; + + // Clean up cache directory + if let Some(path) = cache_path + && let Err(e) = tokio::fs::remove_dir_all(&path).await { + tracing::error!("failed to remove transcode cache directory: {}", e); + } + + Ok(()) + } + + /// Remove expired transcode sessions and their cache directories. + pub async fn cleanup_expired(&self) { + let now = Utc::now(); + + // Collect expired entries and remove them from the map under the lock. + let expired: Vec<(Uuid, PathBuf)> = { + let mut sessions = self.sessions.write().await; + let expired: Vec<(Uuid, PathBuf)> = sessions + .iter() + .filter_map(|(id, sess)| { + if let Some(expires) = sess.expires_at + && now > expires { + return Some((*id, sess.cache_path.clone())); + } + None + }) + .collect(); + + for (id, _) in &expired { + sessions.remove(id); + } + + expired + }; + // Lock is dropped here; perform filesystem cleanup outside the lock. + + for (_id, path) in expired { + if let Err(e) = tokio::fs::remove_dir_all(&path).await { + tracing::error!("failed to remove expired transcode cache directory: {}", e); + } + } + } + + /// Get a session by ID from the in-memory store. + pub async fn get_session(&self, session_id: Uuid) -> Option { + let sessions = self.sessions.read().await; + sessions.get(&session_id).cloned() + } + + /// Resolve the path to a specific segment file on disk. + pub fn segment_path(&self, session_id: Uuid, segment_name: &str) -> PathBuf { + // Sanitize segment_name to prevent path traversal + let safe_name = std::path::Path::new(segment_name) + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_default(); + if safe_name.is_empty() || safe_name.contains('\0') || safe_name.starts_with('.') { + // Return a non-existent path that will fail safely + return self + .cache_dir() + .join(session_id.to_string()) + .join("__invalid__"); + } + self.cache_dir() + .join(session_id.to_string()) + .join(safe_name) + } + + /// Find a session for a given media_id and profile. + pub async fn find_session(&self, media_id: MediaId, profile: &str) -> Option { + let sessions = self.sessions.read().await; + sessions + .values() + .find(|s| s.media_id == media_id && s.profile == profile) + .cloned() + } +} + +/// Parse a resolution string like "360p", "720p", "1080p" into (width, height). +pub fn parse_resolution(res: &str) -> (u32, u32) { + match res.trim_end_matches('p') { + "360" => (640, 360), + "480" => (854, 480), + "720" => (1280, 720), + "1080" => (1920, 1080), + "1440" => (2560, 1440), + "2160" | "4k" => (3840, 2160), + _ => (1280, 720), // default to 720p + } +} + +/// Estimate bandwidth (bits/sec) from a profile's max_bitrate_kbps. +pub fn estimate_bandwidth(profile: &TranscodeProfile) -> u32 { + profile.max_bitrate_kbps * 1000 +} + +/// Build FFmpeg CLI arguments for transcoding. +fn get_ffmpeg_args( + source: &Path, + output_dir: &Path, + profile: &TranscodeProfile, + hw_accel: Option<&str>, +) -> Vec { + let (w, h) = parse_resolution(&profile.max_resolution); + let playlist = output_dir.join("playlist.m3u8"); + let segment_pattern = output_dir.join("segment%d.ts"); + + let mut args = Vec::new(); + + // Hardware acceleration + if let Some(accel) = hw_accel { + args.extend_from_slice(&["-hwaccel".to_string(), accel.to_string()]); + } + + args.extend_from_slice(&[ + "-i".to_string(), + source.to_string_lossy().to_string(), + "-c:v".to_string(), + profile.video_codec.clone(), + "-c:a".to_string(), + profile.audio_codec.clone(), + "-b:v".to_string(), + format!("{}k", profile.max_bitrate_kbps), + "-vf".to_string(), + format!("scale={}:{}", w, h), + "-f".to_string(), + "hls".to_string(), + "-hls_time".to_string(), + "10".to_string(), + "-hls_segment_filename".to_string(), + segment_pattern.to_string_lossy().to_string(), + "-progress".to_string(), + "pipe:1".to_string(), + "-y".to_string(), + playlist.to_string_lossy().to_string(), + ]); + + args +} + +/// Run FFmpeg as a child process, parsing progress from stdout. +async fn run_ffmpeg( + args: &[String], + sessions: &Arc>>, + session_id: Uuid, + duration_secs: Option, + cancel: Arc, +) -> Result<(), crate::error::PinakesError> { + use tokio::io::{AsyncBufReadExt, BufReader}; + use tokio::process::Command; + + let mut child = Command::new("ffmpeg") + .args(args) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .spawn() + .map_err(|e| { + crate::error::PinakesError::InvalidOperation(format!("failed to spawn ffmpeg: {}", e)) + })?; + + // Capture stderr in a spawned task for error reporting + let stderr_handle = if let Some(stderr) = child.stderr.take() { + let reader = BufReader::new(stderr); + Some(tokio::spawn(async move { + let mut lines = reader.lines(); + let mut collected = Vec::new(); + while let Ok(Some(line)) = lines.next_line().await { + collected.push(line); + } + collected + })) + } else { + None + }; + + // Parse progress from stdout + let stdout_handle = if let Some(stdout) = child.stdout.take() { + let reader = BufReader::new(stdout); + let mut lines = reader.lines(); + let sessions = sessions.clone(); + + Some(tokio::spawn(async move { + while let Ok(Some(line)) = lines.next_line().await { + // FFmpeg progress output: "out_time_us=12345678" + if let Some(time_str) = line.strip_prefix("out_time_us=") + && let Ok(us) = time_str.trim().parse::() { + let secs = us / 1_000_000.0; + // Calculate progress based on known duration + let progress = match duration_secs { + Some(dur) if dur > 0.0 => (secs / dur).min(0.99) as f32, + _ => { + // Duration unknown; don't update progress + continue; + } + }; + let mut s = sessions.write().await; + if let Some(sess) = s.get_mut(&session_id) { + sess.progress = progress; + } + } + } + })) + } else { + None + }; + + // Wait for child, but also listen for cancellation + let status = tokio::select! { + result = child.wait() => { + result.map_err(|e| { + crate::error::PinakesError::InvalidOperation(format!("ffmpeg process error: {}", e)) + })? + } + _ = cancel.notified() => { + // Kill the child process on cancel + if let Err(e) = child.kill().await { + tracing::error!("failed to kill ffmpeg process: {}", e); + } + return Err(crate::error::PinakesError::InvalidOperation( + "cancelled by user".to_string(), + )); + } + }; + + // Await the stdout reader task + if let Some(handle) = stdout_handle { + let _ = handle.await; + } + + // Collect stderr output for error reporting + let stderr_output = if let Some(handle) = stderr_handle { + handle.await.unwrap_or_default() + } else { + Vec::new() + }; + + if !status.success() { + let last_stderr = stderr_output + .iter() + .rev() + .take(10) + .rev() + .cloned() + .collect::>() + .join("\n"); + return Err(crate::error::PinakesError::InvalidOperation(format!( + "ffmpeg exited with status: {}\nstderr:\n{}", + status, last_stderr + ))); + } + + Ok(()) +} diff --git a/crates/pinakes-core/src/users.rs b/crates/pinakes-core/src/users.rs new file mode 100644 index 0000000..394250a --- /dev/null +++ b/crates/pinakes-core/src/users.rs @@ -0,0 +1,210 @@ +//! User management and authentication + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use uuid::Uuid; + +use crate::config::UserRole; +use crate::error::{PinakesError, Result}; + +/// User ID +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct UserId(pub Uuid); + +impl UserId { + pub fn new() -> Self { + Self(Uuid::now_v7()) + } +} + +impl Default for UserId { + fn default() -> Self { + Self::new() + } +} + +impl std::fmt::Display for UserId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for UserId { + fn from(id: Uuid) -> Self { + Self(id) + } +} + +/// User account with profile information +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct User { + pub id: UserId, + pub username: String, + #[serde(skip_serializing)] + pub password_hash: String, + pub role: UserRole, + pub profile: UserProfile, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// User profile information +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct UserProfile { + pub avatar_path: Option, + pub bio: Option, + pub preferences: UserPreferences, +} + +/// User-specific preferences +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct UserPreferences { + /// UI theme preference + pub theme: Option, + + /// Language preference + pub language: Option, + + /// Default video quality preference for transcoding + pub default_video_quality: Option, + + /// Whether to auto-play media + pub auto_play: bool, + + /// Custom preferences (extensible) + pub custom: HashMap, +} + +/// Library access permission +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum LibraryPermission { + /// Can only read/view media + Read, + + /// Can read and modify media metadata + Write, + + /// Full control including deletion and sharing + Admin, +} + +impl LibraryPermission { + pub fn can_read(&self) -> bool { + true + } + + pub fn can_write(&self) -> bool { + matches!(self, Self::Write | Self::Admin) + } + + pub fn can_admin(&self) -> bool { + matches!(self, Self::Admin) + } +} + +/// User's access to a specific library root +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserLibraryAccess { + pub user_id: UserId, + pub root_path: String, + pub permission: LibraryPermission, + pub granted_at: DateTime, +} + +/// User creation request +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateUserRequest { + pub username: String, + #[serde(skip_serializing)] + pub password: String, + pub role: UserRole, + pub profile: Option, +} + +/// User update request +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdateUserRequest { + #[serde(skip_serializing)] + pub password: Option, + pub role: Option, + pub profile: Option, +} + +/// User authentication +pub mod auth { + use super::*; + + /// Hash a password using Argon2 + pub fn hash_password(password: &str) -> Result { + use argon2::{ + Argon2, + password_hash::{PasswordHasher, SaltString, rand_core::OsRng}, + }; + + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + + argon2 + .hash_password(password.as_bytes(), &salt) + .map(|hash| hash.to_string()) + .map_err(|e| PinakesError::Authentication(format!("failed to hash password: {e}"))) + } + + /// Verify a password against a hash + pub fn verify_password(password: &str, hash: &str) -> Result { + use argon2::{ + Argon2, + password_hash::{PasswordHash, PasswordVerifier}, + }; + + let parsed_hash = PasswordHash::new(hash) + .map_err(|e| PinakesError::Authentication(format!("invalid password hash: {e}")))?; + + Ok(Argon2::default() + .verify_password(password.as_bytes(), &parsed_hash) + .is_ok()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_hash_and_verify_password() { + let password = "test_password_123"; + let hash = auth::hash_password(password).unwrap(); + + assert!(auth::verify_password(password, &hash).unwrap()); + assert!(!auth::verify_password("wrong_password", &hash).unwrap()); + } + + #[test] + fn test_user_preferences_default() { + let prefs = UserPreferences::default(); + assert_eq!(prefs.theme, None); + assert_eq!(prefs.language, None); + assert!(!prefs.auto_play); + assert!(prefs.custom.is_empty()); + } + + #[test] + fn test_library_permission_levels() { + let read = LibraryPermission::Read; + assert!(read.can_read()); + assert!(!read.can_write()); + assert!(!read.can_admin()); + + let write = LibraryPermission::Write; + assert!(write.can_read()); + assert!(write.can_write()); + assert!(!write.can_admin()); + + let admin = LibraryPermission::Admin; + assert!(admin.can_read()); + assert!(admin.can_write()); + assert!(admin.can_admin()); + } +} diff --git a/crates/pinakes-core/tests/integration_test.rs b/crates/pinakes-core/tests/integration_test.rs index 673efb8..f84e9b0 100644 --- a/crates/pinakes-core/tests/integration_test.rs +++ b/crates/pinakes-core/tests/integration_test.rs @@ -21,7 +21,9 @@ async fn test_media_crud() { id, path: "/tmp/test.txt".into(), file_name: "test.txt".to_string(), - media_type: pinakes_core::media_type::MediaType::PlainText, + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::PlainText, + ), content_hash: ContentHash::new("abc123".to_string()), file_size: 100, title: Some("Test Title".to_string()), @@ -97,7 +99,9 @@ async fn test_tags() { id, path: "/tmp/song.mp3".into(), file_name: "song.mp3".to_string(), - media_type: pinakes_core::media_type::MediaType::Mp3, + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::Mp3, + ), content_hash: ContentHash::new("hash1".to_string()), file_size: 5000, title: Some("Test Song".to_string()), @@ -147,7 +151,9 @@ async fn test_collections() { id, path: "/tmp/doc.pdf".into(), file_name: "doc.pdf".to_string(), - media_type: pinakes_core::media_type::MediaType::Pdf, + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::Pdf, + ), content_hash: ContentHash::new("pdfhash".to_string()), file_size: 10000, title: None, @@ -192,7 +198,9 @@ async fn test_custom_fields() { id, path: "/tmp/test.md".into(), file_name: "test.md".to_string(), - media_type: pinakes_core::media_type::MediaType::Markdown, + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::Markdown, + ), content_hash: ContentHash::new("mdhash".to_string()), file_size: 500, title: None, @@ -387,7 +395,9 @@ async fn test_library_statistics_with_data() { id: MediaId::new(), path: "/tmp/stats_test.mp3".into(), file_name: "stats_test.mp3".to_string(), - media_type: pinakes_core::media_type::MediaType::Mp3, + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::Mp3, + ), content_hash: ContentHash::new("stats_hash".to_string()), file_size: 5000, title: Some("Stats Song".to_string()), @@ -412,3 +422,449 @@ async fn test_library_statistics_with_data() { assert!(stats.newest_item.is_some()); assert!(stats.oldest_item.is_some()); } + +// ===== Phase 2: Media Server Features ===== + +fn make_test_media(hash: &str) -> MediaItem { + let now = chrono::Utc::now(); + MediaItem { + id: MediaId::new(), + path: format!("/tmp/test_{hash}.mp4").into(), + file_name: format!("test_{hash}.mp4"), + media_type: pinakes_core::media_type::MediaType::Builtin( + pinakes_core::media_type::BuiltinMediaType::Mp4, + ), + content_hash: ContentHash::new(hash.to_string()), + file_size: 1000, + title: Some(format!("Test {hash}")), + artist: Some("Test Artist".to_string()), + album: None, + genre: None, + year: Some(2024), + duration_secs: Some(120.0), + description: None, + thumbnail_path: None, + custom_fields: HashMap::new(), + created_at: now, + updated_at: now, + } +} + +#[tokio::test] +async fn test_ratings_crud() { + let storage = setup().await; + let item = make_test_media("rating1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + + // Rate media + let rating = storage + .rate_media(user_id, item.id, 4, Some("Great video")) + .await + .unwrap(); + assert_eq!(rating.stars, 4); + assert_eq!(rating.review_text.as_deref(), Some("Great video")); + + // Get user's rating + let fetched = storage.get_user_rating(user_id, item.id).await.unwrap(); + assert!(fetched.is_some()); + assert_eq!(fetched.unwrap().stars, 4); + + // Get media ratings + let ratings = storage.get_media_ratings(item.id).await.unwrap(); + assert_eq!(ratings.len(), 1); + + // Delete rating + storage.delete_rating(rating.id).await.unwrap(); + let empty = storage.get_media_ratings(item.id).await.unwrap(); + assert!(empty.is_empty()); +} + +#[tokio::test] +async fn test_comments_crud() { + let storage = setup().await; + let item = make_test_media("comment1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + + // Add comment + let comment = storage + .add_comment(user_id, item.id, "Nice video!", None) + .await + .unwrap(); + assert_eq!(comment.text, "Nice video!"); + assert!(comment.parent_comment_id.is_none()); + + // Add reply + let reply = storage + .add_comment(user_id, item.id, "Thanks!", Some(comment.id)) + .await + .unwrap(); + assert_eq!(reply.parent_comment_id, Some(comment.id)); + + // List comments + let comments = storage.get_media_comments(item.id).await.unwrap(); + assert_eq!(comments.len(), 2); + + // Delete comment + storage.delete_comment(reply.id).await.unwrap(); + let remaining = storage.get_media_comments(item.id).await.unwrap(); + assert_eq!(remaining.len(), 1); +} + +#[tokio::test] +async fn test_favorites_toggle() { + let storage = setup().await; + let item = make_test_media("fav1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + + // Not a favorite initially + assert!(!storage.is_favorite(user_id, item.id).await.unwrap()); + + // Add favorite + storage.add_favorite(user_id, item.id).await.unwrap(); + assert!(storage.is_favorite(user_id, item.id).await.unwrap()); + + // List favorites + let favs = storage + .get_user_favorites(user_id, &Pagination::default()) + .await + .unwrap(); + assert_eq!(favs.len(), 1); + + // Remove favorite + storage.remove_favorite(user_id, item.id).await.unwrap(); + assert!(!storage.is_favorite(user_id, item.id).await.unwrap()); +} + +#[tokio::test] +async fn test_share_links() { + let storage = setup().await; + let item = make_test_media("share1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + let token = "test_share_token_abc123"; + + // Create share link + let link = storage + .create_share_link(item.id, user_id, token, None, None) + .await + .unwrap(); + assert_eq!(link.token, token); + assert_eq!(link.view_count, 0); + + // Get share link + let fetched = storage.get_share_link(token).await.unwrap(); + assert_eq!(fetched.media_id, item.id); + + // Increment views + storage.increment_share_views(token).await.unwrap(); + let updated = storage.get_share_link(token).await.unwrap(); + assert_eq!(updated.view_count, 1); + + // Delete share link + storage.delete_share_link(link.id).await.unwrap(); + let result = storage.get_share_link(token).await; + assert!(result.is_err()); +} + +#[tokio::test] +async fn test_playlists_crud() { + let storage = setup().await; + let item1 = make_test_media("pl1"); + let item2 = make_test_media("pl2"); + storage.insert_media(&item1).await.unwrap(); + storage.insert_media(&item2).await.unwrap(); + + let owner = pinakes_core::users::UserId::new(); + + // Create playlist + let playlist = storage + .create_playlist( + owner, + "My Playlist", + Some("A test playlist"), + true, + false, + None, + ) + .await + .unwrap(); + assert_eq!(playlist.name, "My Playlist"); + assert!(playlist.is_public); + + // Get playlist + let fetched = storage.get_playlist(playlist.id).await.unwrap(); + assert_eq!(fetched.name, "My Playlist"); + + // Add items + storage + .add_to_playlist(playlist.id, item1.id, 0) + .await + .unwrap(); + storage + .add_to_playlist(playlist.id, item2.id, 1) + .await + .unwrap(); + + // Get playlist items + let items = storage.get_playlist_items(playlist.id).await.unwrap(); + assert_eq!(items.len(), 2); + + // Reorder + storage + .reorder_playlist(playlist.id, item2.id, 0) + .await + .unwrap(); + + // Remove item + storage + .remove_from_playlist(playlist.id, item1.id) + .await + .unwrap(); + let items = storage.get_playlist_items(playlist.id).await.unwrap(); + assert_eq!(items.len(), 1); + + // Update playlist + let updated = storage + .update_playlist(playlist.id, Some("Renamed"), None, Some(false)) + .await + .unwrap(); + assert_eq!(updated.name, "Renamed"); + assert!(!updated.is_public); + + // List playlists + let playlists = storage.list_playlists(None).await.unwrap(); + assert!(!playlists.is_empty()); + + // Delete playlist + storage.delete_playlist(playlist.id).await.unwrap(); + let result = storage.get_playlist(playlist.id).await; + assert!(result.is_err()); +} + +#[tokio::test] +async fn test_analytics_usage_events() { + let storage = setup().await; + let item = make_test_media("analytics1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + + // Record events + let event = pinakes_core::analytics::UsageEvent { + id: uuid::Uuid::now_v7(), + media_id: Some(item.id), + user_id: Some(user_id), + event_type: pinakes_core::analytics::UsageEventType::View, + timestamp: chrono::Utc::now(), + duration_secs: Some(60.0), + context_json: None, + }; + storage.record_usage_event(&event).await.unwrap(); + + // Get usage events + let events = storage + .get_usage_events(Some(item.id), None, 10) + .await + .unwrap(); + assert_eq!(events.len(), 1); + assert_eq!( + events[0].event_type, + pinakes_core::analytics::UsageEventType::View + ); + + // Most viewed + let most_viewed = storage.get_most_viewed(10).await.unwrap(); + assert_eq!(most_viewed.len(), 1); + assert_eq!(most_viewed[0].1, 1); + + // Recently viewed + let recent = storage.get_recently_viewed(user_id, 10).await.unwrap(); + assert_eq!(recent.len(), 1); +} + +#[tokio::test] +async fn test_watch_progress() { + let storage = setup().await; + let item = make_test_media("progress1"); + storage.insert_media(&item).await.unwrap(); + + let user_id = pinakes_core::users::UserId::new(); + + // No progress initially + let progress = storage.get_watch_progress(user_id, item.id).await.unwrap(); + assert!(progress.is_none()); + + // Update progress + storage + .update_watch_progress(user_id, item.id, 45.5) + .await + .unwrap(); + let progress = storage.get_watch_progress(user_id, item.id).await.unwrap(); + assert_eq!(progress, Some(45.5)); + + // Update again (should upsert) + storage + .update_watch_progress(user_id, item.id, 90.0) + .await + .unwrap(); + let progress = storage.get_watch_progress(user_id, item.id).await.unwrap(); + assert_eq!(progress, Some(90.0)); +} + +#[tokio::test] +async fn test_cleanup_old_events() { + let storage = setup().await; + + let old_event = pinakes_core::analytics::UsageEvent { + id: uuid::Uuid::now_v7(), + media_id: None, + user_id: None, + event_type: pinakes_core::analytics::UsageEventType::Search, + timestamp: chrono::Utc::now() - chrono::Duration::days(100), + duration_secs: None, + context_json: None, + }; + storage.record_usage_event(&old_event).await.unwrap(); + + let cutoff = chrono::Utc::now() - chrono::Duration::days(90); + let cleaned = storage.cleanup_old_events(cutoff).await.unwrap(); + assert_eq!(cleaned, 1); +} + +#[tokio::test] +async fn test_subtitles_crud() { + let storage = setup().await; + let item = make_test_media("sub1"); + storage.insert_media(&item).await.unwrap(); + + let subtitle = pinakes_core::subtitles::Subtitle { + id: uuid::Uuid::now_v7(), + media_id: item.id, + language: Some("en".to_string()), + format: pinakes_core::subtitles::SubtitleFormat::Srt, + file_path: Some("/tmp/test.srt".into()), + is_embedded: false, + track_index: None, + offset_ms: 0, + created_at: chrono::Utc::now(), + }; + storage.add_subtitle(&subtitle).await.unwrap(); + + // Get subtitles + let subs = storage.get_media_subtitles(item.id).await.unwrap(); + assert_eq!(subs.len(), 1); + assert_eq!(subs[0].language.as_deref(), Some("en")); + assert_eq!(subs[0].format, pinakes_core::subtitles::SubtitleFormat::Srt); + + // Update offset + storage + .update_subtitle_offset(subtitle.id, 500) + .await + .unwrap(); + let updated = storage.get_media_subtitles(item.id).await.unwrap(); + assert_eq!(updated[0].offset_ms, 500); + + // Delete subtitle + storage.delete_subtitle(subtitle.id).await.unwrap(); + let empty = storage.get_media_subtitles(item.id).await.unwrap(); + assert!(empty.is_empty()); +} + +#[tokio::test] +async fn test_external_metadata() { + let storage = setup().await; + let item = make_test_media("enrich1"); + storage.insert_media(&item).await.unwrap(); + + let meta = pinakes_core::enrichment::ExternalMetadata { + id: uuid::Uuid::now_v7(), + media_id: item.id, + source: pinakes_core::enrichment::EnrichmentSourceType::MusicBrainz, + external_id: Some("mb-123".to_string()), + metadata_json: r#"{"title":"Test"}"#.to_string(), + confidence: 0.85, + last_updated: chrono::Utc::now(), + }; + storage.store_external_metadata(&meta).await.unwrap(); + + // Get external metadata + let metas = storage.get_external_metadata(item.id).await.unwrap(); + assert_eq!(metas.len(), 1); + assert_eq!( + metas[0].source, + pinakes_core::enrichment::EnrichmentSourceType::MusicBrainz + ); + assert_eq!(metas[0].external_id.as_deref(), Some("mb-123")); + assert!((metas[0].confidence - 0.85).abs() < 0.01); + + // Delete + storage.delete_external_metadata(meta.id).await.unwrap(); + let empty = storage.get_external_metadata(item.id).await.unwrap(); + assert!(empty.is_empty()); +} + +#[tokio::test] +async fn test_transcode_sessions() { + let storage = setup().await; + let item = make_test_media("transcode1"); + storage.insert_media(&item).await.unwrap(); + + let session = pinakes_core::transcode::TranscodeSession { + id: uuid::Uuid::now_v7(), + media_id: item.id, + user_id: None, + profile: "720p".to_string(), + cache_path: "/tmp/transcode/test.mp4".into(), + status: pinakes_core::transcode::TranscodeStatus::Pending, + progress: 0.0, + created_at: chrono::Utc::now(), + expires_at: Some(chrono::Utc::now() + chrono::Duration::hours(24)), + duration_secs: None, + child_cancel: None, + }; + storage.create_transcode_session(&session).await.unwrap(); + + // Get session + let fetched = storage.get_transcode_session(session.id).await.unwrap(); + assert_eq!(fetched.profile, "720p"); + assert_eq!(fetched.status.as_str(), "pending"); + + // Update status + storage + .update_transcode_status( + session.id, + pinakes_core::transcode::TranscodeStatus::Transcoding, + 0.5, + ) + .await + .unwrap(); + let updated = storage.get_transcode_session(session.id).await.unwrap(); + assert_eq!(updated.status.as_str(), "transcoding"); + assert!((updated.progress - 0.5).abs() < 0.01); + + // List sessions + let sessions = storage.list_transcode_sessions(None).await.unwrap(); + assert_eq!(sessions.len(), 1); + + // List by media ID + let sessions = storage + .list_transcode_sessions(Some(item.id)) + .await + .unwrap(); + assert_eq!(sessions.len(), 1); + + // Cleanup expired + let far_future = chrono::Utc::now() + chrono::Duration::days(365); + let cleaned = storage + .cleanup_expired_transcodes(far_future) + .await + .unwrap(); + assert_eq!(cleaned, 1); +} diff --git a/crates/pinakes-plugin-api/Cargo.toml b/crates/pinakes-plugin-api/Cargo.toml new file mode 100644 index 0000000..b33befe --- /dev/null +++ b/crates/pinakes-plugin-api/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "pinakes-plugin-api" +version.workspace = true +edition.workspace = true +license.workspace = true + +[dependencies] +# Core dependencies +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +async-trait = { workspace = true } + +# For plugin manifest parsing +toml = { workspace = true } + +# For media types and identifiers +uuid = { workspace = true } +chrono = { workspace = true } +mime_guess = { workspace = true } + +# WASM bridge types +wit-bindgen = { workspace = true, optional = true } + +[features] +default = [] +wasm = ["wit-bindgen"] diff --git a/crates/pinakes-plugin-api/src/lib.rs b/crates/pinakes-plugin-api/src/lib.rs new file mode 100644 index 0000000..fda4b40 --- /dev/null +++ b/crates/pinakes-plugin-api/src/lib.rs @@ -0,0 +1,374 @@ +//! Pinakes Plugin API +//! +//! This crate defines the stable plugin interface for Pinakes. +//! Plugins can extend Pinakes by implementing one or more of the provided traits. + +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; +use thiserror::Error; + +pub mod manifest; +pub mod types; +pub mod wasm; + +pub use manifest::PluginManifest; +pub use types::*; + +/// Plugin API version - plugins must match this version +pub const PLUGIN_API_VERSION: &str = "1.0"; + +/// Result type for plugin operations +pub type PluginResult = Result; + +/// Errors that can occur in plugin operations +#[derive(Debug, Error, Serialize, Deserialize)] +pub enum PluginError { + #[error("Plugin initialization failed: {0}")] + InitializationFailed(String), + + #[error("Unsupported operation: {0}")] + UnsupportedOperation(String), + + #[error("Invalid input: {0}")] + InvalidInput(String), + + #[error("IO error: {0}")] + IoError(String), + + #[error("Metadata extraction failed: {0}")] + MetadataExtractionFailed(String), + + #[error("Thumbnail generation failed: {0}")] + ThumbnailGenerationFailed(String), + + #[error("Search backend error: {0}")] + SearchBackendError(String), + + #[error("Permission denied: {0}")] + PermissionDenied(String), + + #[error("Resource limit exceeded: {0}")] + ResourceLimitExceeded(String), + + #[error("Plugin error: {0}")] + Other(String), +} + +/// Context provided to plugins during initialization +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginContext { + /// Plugin's data directory for persistent storage + pub data_dir: PathBuf, + + /// Plugin's cache directory for temporary data + pub cache_dir: PathBuf, + + /// Plugin configuration from manifest + pub config: HashMap, + + /// Capabilities granted to the plugin + pub capabilities: Capabilities, +} + +/// Capabilities that can be granted to plugins +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct Capabilities { + /// Filesystem access permissions + pub filesystem: FilesystemCapability, + + /// Network access permissions + pub network: NetworkCapability, + + /// Environment variable access + pub environment: EnvironmentCapability, + + /// Maximum memory usage in bytes + pub max_memory_bytes: Option, + + /// Maximum CPU time in milliseconds + pub max_cpu_time_ms: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct FilesystemCapability { + /// Paths allowed for reading + pub read: Vec, + + /// Paths allowed for writing + pub write: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct NetworkCapability { + /// Whether network access is allowed + pub enabled: bool, + + /// Allowed domains (if None, all domains allowed when enabled) + pub allowed_domains: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct EnvironmentCapability { + /// Whether environment variable access is allowed + pub enabled: bool, + + /// Specific environment variables allowed (if None, all allowed when enabled) + pub allowed_vars: Option>, +} + +/// Base trait that all plugins must implement +#[async_trait] +pub trait Plugin: Send + Sync { + /// Get plugin metadata + fn metadata(&self) -> &PluginMetadata; + + /// Initialize the plugin with provided context + async fn initialize(&mut self, context: PluginContext) -> PluginResult<()>; + + /// Shutdown the plugin gracefully + async fn shutdown(&mut self) -> PluginResult<()>; + + /// Get plugin health status + async fn health_check(&self) -> PluginResult; +} + +/// Plugin metadata +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginMetadata { + pub id: String, + pub name: String, + pub version: String, + pub author: String, + pub description: String, + pub api_version: String, + pub capabilities_required: Capabilities, +} + +/// Health status of a plugin +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HealthStatus { + pub healthy: bool, + pub message: Option, + pub metrics: HashMap, +} + +/// Trait for plugins that provide custom media type support +#[async_trait] +pub trait MediaTypeProvider: Plugin { + /// Get the list of media types this plugin supports + fn supported_media_types(&self) -> Vec; + + /// Check if this plugin can handle the given file + async fn can_handle(&self, path: &PathBuf, mime_type: Option<&str>) -> PluginResult; +} + +/// Definition of a custom media type +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MediaTypeDefinition { + /// Unique identifier for this media type + pub id: String, + + /// Display name + pub name: String, + + /// Category (e.g., "video", "audio", "document", "image") + pub category: String, + + /// File extensions associated with this type + pub extensions: Vec, + + /// MIME types associated with this type + pub mime_types: Vec, + + /// Icon name or path + pub icon: Option, +} + +/// Trait for plugins that extract metadata from files +#[async_trait] +pub trait MetadataExtractor: Plugin { + /// Extract metadata from a file + async fn extract_metadata(&self, path: &PathBuf) -> PluginResult; + + /// Get the media types this extractor supports + fn supported_types(&self) -> Vec; +} + +/// Metadata extracted from a file +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ExtractedMetadata { + pub title: Option, + pub description: Option, + pub author: Option, + pub created_at: Option, + pub duration_secs: Option, + pub width: Option, + pub height: Option, + pub file_size_bytes: Option, + pub codec: Option, + pub bitrate_kbps: Option, + + /// Custom metadata fields specific to this file type + pub custom_fields: HashMap, + + /// Tags extracted from the file + pub tags: Vec, +} + +/// Trait for plugins that generate thumbnails +#[async_trait] +pub trait ThumbnailGenerator: Plugin { + /// Generate a thumbnail for the given file + async fn generate_thumbnail( + &self, + path: &PathBuf, + output_path: &PathBuf, + options: ThumbnailOptions, + ) -> PluginResult; + + /// Get the media types this generator supports + fn supported_types(&self) -> Vec; +} + +/// Options for thumbnail generation +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ThumbnailOptions { + pub width: u32, + pub height: u32, + pub quality: u8, + pub format: ThumbnailFormat, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ThumbnailFormat { + Jpeg, + Png, + WebP, +} + +/// Information about a generated thumbnail +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ThumbnailInfo { + pub path: PathBuf, + pub width: u32, + pub height: u32, + pub file_size_bytes: u64, +} + +/// Trait for plugins that provide custom search backends +#[async_trait] +pub trait SearchBackend: Plugin { + /// Index a media item for search + async fn index_item(&self, item: &SearchIndexItem) -> PluginResult<()>; + + /// Remove an item from the search index + async fn remove_item(&self, item_id: &str) -> PluginResult<()>; + + /// Perform a search query + async fn search(&self, query: &SearchQuery) -> PluginResult>; + + /// Get search statistics + async fn get_stats(&self) -> PluginResult; +} + +/// Item to be indexed for search +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchIndexItem { + pub id: String, + pub title: Option, + pub description: Option, + pub content: Option, + pub tags: Vec, + pub media_type: String, + pub metadata: HashMap, +} + +/// Search query +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchQuery { + pub query_text: String, + pub filters: HashMap, + pub limit: usize, + pub offset: usize, +} + +/// Search result +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + pub id: String, + pub score: f64, + pub highlights: Vec, +} + +/// Search statistics +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchStats { + pub total_indexed: usize, + pub index_size_bytes: u64, + pub last_update: Option, +} + +/// Trait for plugins that handle events +#[async_trait] +pub trait EventHandler: Plugin { + /// Handle an event + async fn handle_event(&self, event: &Event) -> PluginResult<()>; + + /// Get the event types this handler is interested in + fn interested_events(&self) -> Vec; +} + +/// Event type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum EventType { + MediaImported, + MediaUpdated, + MediaDeleted, + MediaTagged, + MediaUntagged, + CollectionCreated, + CollectionUpdated, + CollectionDeleted, + ScanStarted, + ScanCompleted, + Custom(String), +} + +/// Event data +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Event { + pub event_type: EventType, + pub timestamp: String, + pub data: HashMap, +} + +/// Trait for plugins that provide UI themes +#[async_trait] +pub trait ThemeProvider: Plugin { + /// Get available themes from this provider + fn get_themes(&self) -> Vec; + + /// Load a specific theme by ID + async fn load_theme(&self, theme_id: &str) -> PluginResult; +} + +/// Theme definition +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ThemeDefinition { + pub id: String, + pub name: String, + pub description: String, + pub author: String, + pub preview_url: Option, +} + +/// Theme data +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Theme { + pub id: String, + pub colors: HashMap, + pub fonts: HashMap, + pub custom_css: Option, +} diff --git a/crates/pinakes-plugin-api/src/manifest.rs b/crates/pinakes-plugin-api/src/manifest.rs new file mode 100644 index 0000000..f3e9a7e --- /dev/null +++ b/crates/pinakes-plugin-api/src/manifest.rs @@ -0,0 +1,263 @@ +//! Plugin manifest parsing and validation + +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::Path; +use thiserror::Error; + +use crate::{Capabilities, EnvironmentCapability, FilesystemCapability, NetworkCapability}; + +/// Plugin manifest file format (TOML) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginManifest { + pub plugin: PluginInfo, + + #[serde(default)] + pub capabilities: ManifestCapabilities, + + #[serde(default)] + pub config: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginInfo { + pub name: String, + pub version: String, + pub api_version: String, + pub author: Option, + pub description: Option, + pub homepage: Option, + pub license: Option, + + /// Plugin kind(s) - e.g., ["media_type", "metadata_extractor"] + pub kind: Vec, + + /// Binary configuration + pub binary: PluginBinary, + + /// Dependencies on other plugins + #[serde(default)] + pub dependencies: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginBinary { + /// Path to WASM binary + pub wasm: String, + + /// Optional entrypoint function name (default: "_start") + pub entrypoint: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ManifestCapabilities { + #[serde(default)] + pub filesystem: ManifestFilesystemCapability, + + #[serde(default)] + pub network: bool, + + #[serde(default)] + pub environment: Option>, + + #[serde(default)] + pub max_memory_mb: Option, + + #[serde(default)] + pub max_cpu_time_secs: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct ManifestFilesystemCapability { + #[serde(default)] + pub read: Vec, + + #[serde(default)] + pub write: Vec, +} + +#[derive(Debug, Error)] +pub enum ManifestError { + #[error("Failed to read manifest file: {0}")] + IoError(#[from] std::io::Error), + + #[error("Failed to parse manifest: {0}")] + ParseError(#[from] toml::de::Error), + + #[error("Invalid manifest: {0}")] + ValidationError(String), +} + +impl PluginManifest { + /// Load and parse a plugin manifest from a TOML file + pub fn from_file(path: &Path) -> Result { + let content = std::fs::read_to_string(path)?; + let manifest: Self = toml::from_str(&content)?; + manifest.validate()?; + Ok(manifest) + } + + /// Parse a manifest from TOML string + pub fn from_str(content: &str) -> Result { + let manifest: Self = toml::from_str(content)?; + manifest.validate()?; + Ok(manifest) + } + + /// Validate the manifest + pub fn validate(&self) -> Result<(), ManifestError> { + // Check API version format + if self.plugin.api_version.is_empty() { + return Err(ManifestError::ValidationError( + "api_version cannot be empty".to_string(), + )); + } + + // Check version format (basic semver check) + if !self.plugin.version.contains('.') { + return Err(ManifestError::ValidationError( + "version must be in semver format (e.g., 1.0.0)".to_string(), + )); + } + + // Check that at least one kind is specified + if self.plugin.kind.is_empty() { + return Err(ManifestError::ValidationError( + "at least one plugin kind must be specified".to_string(), + )); + } + + // Check plugin kinds are valid + let valid_kinds = [ + "media_type", + "metadata_extractor", + "thumbnail_generator", + "search_backend", + "event_handler", + "theme_provider", + ]; + + for kind in &self.plugin.kind { + if !valid_kinds.contains(&kind.as_str()) { + return Err(ManifestError::ValidationError(format!( + "Invalid plugin kind: {}. Must be one of: {}", + kind, + valid_kinds.join(", ") + ))); + } + } + + // Check WASM binary path is not empty + if self.plugin.binary.wasm.is_empty() { + return Err(ManifestError::ValidationError( + "WASM binary path cannot be empty".to_string(), + )); + } + + Ok(()) + } + + /// Convert manifest capabilities to API capabilities + pub fn to_capabilities(&self) -> Capabilities { + Capabilities { + filesystem: FilesystemCapability { + read: self + .capabilities + .filesystem + .read + .iter() + .map(|s| s.into()) + .collect(), + write: self + .capabilities + .filesystem + .write + .iter() + .map(|s| s.into()) + .collect(), + }, + network: NetworkCapability { + enabled: self.capabilities.network, + allowed_domains: None, + }, + environment: EnvironmentCapability { + enabled: self.capabilities.environment.is_some(), + allowed_vars: self.capabilities.environment.clone(), + }, + max_memory_bytes: self + .capabilities + .max_memory_mb + .map(|mb| mb.saturating_mul(1024).saturating_mul(1024)), + max_cpu_time_ms: self + .capabilities + .max_cpu_time_secs + .map(|secs| secs.saturating_mul(1000)), + } + } + + /// Get plugin ID (derived from name and version) + pub fn plugin_id(&self) -> String { + format!("{}@{}", self.plugin.name, self.plugin.version) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_valid_manifest() { + let toml = r#" +[plugin] +name = "heif-support" +version = "1.0.0" +api_version = "1.0" +author = "Test Author" +description = "HEIF image support" +kind = ["media_type", "metadata_extractor"] + +[plugin.binary] +wasm = "plugin.wasm" + +[capabilities.filesystem] +read = ["/tmp/pinakes-thumbnails"] +"#; + + let manifest = PluginManifest::from_str(toml).unwrap(); + assert_eq!(manifest.plugin.name, "heif-support"); + assert_eq!(manifest.plugin.version, "1.0.0"); + assert_eq!(manifest.plugin.kind.len(), 2); + } + + #[test] + fn test_invalid_api_version() { + let toml = r#" +[plugin] +name = "test" +version = "1.0.0" +api_version = "" +kind = ["media_type"] + +[plugin.binary] +wasm = "plugin.wasm" +"#; + + assert!(PluginManifest::from_str(toml).is_err()); + } + + #[test] + fn test_invalid_kind() { + let toml = r#" +[plugin] +name = "test" +version = "1.0.0" +api_version = "1.0" +kind = ["invalid_kind"] + +[plugin.binary] +wasm = "plugin.wasm" +"#; + + assert!(PluginManifest::from_str(toml).is_err()); + } +} diff --git a/crates/pinakes-plugin-api/src/types.rs b/crates/pinakes-plugin-api/src/types.rs new file mode 100644 index 0000000..0cb0344 --- /dev/null +++ b/crates/pinakes-plugin-api/src/types.rs @@ -0,0 +1,156 @@ +//! Shared types used across the plugin API + +use serde::{Deserialize, Serialize}; +use std::fmt; + +/// Plugin identifier +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct PluginId(String); + +impl PluginId { + pub fn new(id: impl Into) -> Self { + Self(id.into()) + } + + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for PluginId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for PluginId { + fn from(s: String) -> Self { + Self(s) + } +} + +impl From<&str> for PluginId { + fn from(s: &str) -> Self { + Self(s.to_string()) + } +} + +/// Plugin lifecycle state +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum PluginState { + /// Plugin is being loaded + Loading, + + /// Plugin is initialized and ready + Ready, + + /// Plugin is running + Running, + + /// Plugin encountered an error + Error, + + /// Plugin is being shut down + ShuttingDown, + + /// Plugin is stopped + Stopped, +} + +impl fmt::Display for PluginState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Loading => write!(f, "loading"), + Self::Ready => write!(f, "ready"), + Self::Running => write!(f, "running"), + Self::Error => write!(f, "error"), + Self::ShuttingDown => write!(f, "shutting_down"), + Self::Stopped => write!(f, "stopped"), + } + } +} + +/// Plugin installation status +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginStatus { + pub id: PluginId, + pub name: String, + pub version: String, + pub state: PluginState, + pub enabled: bool, + pub error_message: Option, +} + +/// Version information +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Version { + pub major: u32, + pub minor: u32, + pub patch: u32, +} + +impl Version { + pub fn new(major: u32, minor: u32, patch: u32) -> Self { + Self { + major, + minor, + patch, + } + } + + /// Parse version from string (e.g., "1.2.3") + pub fn parse(s: &str) -> Option { + let parts: Vec<&str> = s.split('.').collect(); + if parts.len() != 3 { + return None; + } + + Some(Self { + major: parts[0].parse().ok()?, + minor: parts[1].parse().ok()?, + patch: parts[2].parse().ok()?, + }) + } + + /// Check if this version is compatible with another version + /// Compatible if major version matches and minor version is >= required + pub fn is_compatible_with(&self, required: &Version) -> bool { + self.major == required.major && self.minor >= required.minor + } +} + +impl fmt::Display for Version { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}.{}.{}", self.major, self.minor, self.patch) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_version_parse() { + let v = Version::parse("1.2.3").unwrap(); + assert_eq!(v.major, 1); + assert_eq!(v.minor, 2); + assert_eq!(v.patch, 3); + } + + #[test] + fn test_version_compatibility() { + let v1 = Version::new(1, 2, 0); + let v2 = Version::new(1, 1, 0); + let v3 = Version::new(2, 0, 0); + + assert!(v1.is_compatible_with(&v2)); // 1.2 >= 1.1 + assert!(!v2.is_compatible_with(&v1)); // 1.1 < 1.2 + assert!(!v1.is_compatible_with(&v3)); // Different major version + } + + #[test] + fn test_version_display() { + let v = Version::new(1, 2, 3); + assert_eq!(v.to_string(), "1.2.3"); + } +} diff --git a/crates/pinakes-plugin-api/src/wasm.rs b/crates/pinakes-plugin-api/src/wasm.rs new file mode 100644 index 0000000..f9ec932 --- /dev/null +++ b/crates/pinakes-plugin-api/src/wasm.rs @@ -0,0 +1,186 @@ +//! WASM bridge types and helpers for plugin communication + +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// Memory allocation info for passing data between host and plugin +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WasmMemoryAlloc { + /// Pointer to allocated memory + pub ptr: u32, + + /// Size of allocation in bytes + pub len: u32, +} + +/// Request from host to plugin +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HostRequest { + /// Request ID for matching with response + pub request_id: String, + + /// Method name being called + pub method: String, + + /// Serialized parameters + pub params: Vec, +} + +/// Response from plugin to host +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PluginResponse { + /// Request ID this response corresponds to + pub request_id: String, + + /// Success or error + pub result: WasmResult>, +} + +/// Result type for WASM operations +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum WasmResult { + Ok(T), + Err(String), +} + +impl From> for WasmResult { + fn from(r: Result) -> Self { + match r { + Ok(v) => WasmResult::Ok(v), + Err(e) => WasmResult::Err(e), + } + } +} + +/// Host functions available to plugins +pub mod host_functions { + /// Log a message from plugin + pub const LOG: &str = "host_log"; + + /// Read a file (if permitted) + pub const READ_FILE: &str = "host_read_file"; + + /// Write a file (if permitted) + pub const WRITE_FILE: &str = "host_write_file"; + + /// Make an HTTP request (if permitted) + pub const HTTP_REQUEST: &str = "host_http_request"; + + /// Get configuration value + pub const GET_CONFIG: &str = "host_get_config"; + + /// Emit an event + pub const EMIT_EVENT: &str = "host_emit_event"; +} + +/// Log level for plugin logging +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +pub enum LogLevel { + Trace, + Debug, + Info, + Warn, + Error, +} + +/// Log message from plugin +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LogMessage { + pub level: LogLevel, + pub target: String, + pub message: String, + pub fields: HashMap, +} + +/// HTTP request parameters +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HttpRequest { + pub method: String, + pub url: String, + pub headers: HashMap, + pub body: Option>, +} + +/// HTTP response +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HttpResponse { + pub status: u16, + pub headers: HashMap, + pub body: Vec, +} + +/// Helper functions for serializing/deserializing data across WASM boundary +pub mod helpers { + use super::*; + + /// Serialize a value to bytes for passing to WASM + pub fn serialize(value: &T) -> Result, String> { + serde_json::to_vec(value).map_err(|e| format!("Serialization error: {}", e)) + } + + /// Deserialize bytes from WASM to a value + pub fn deserialize Deserialize<'de>>(bytes: &[u8]) -> Result { + serde_json::from_slice(bytes).map_err(|e| format!("Deserialization error: {}", e)) + } + + /// Create a success response + pub fn ok_response(request_id: String, value: &T) -> Result, String> { + let result = WasmResult::Ok(serialize(value)?); + let response = PluginResponse { request_id, result }; + serialize(&response) + } + + /// Create an error response + pub fn error_response(request_id: String, error: String) -> Result, String> { + let result = WasmResult::>::Err(error); + let response = PluginResponse { request_id, result }; + serialize(&response) + } +} + +#[cfg(test)] +mod tests { + use super::helpers::*; + use super::*; + + #[test] + fn test_serialize_deserialize() { + let data = vec![1u8, 2, 3, 4]; + let bytes = serialize(&data).unwrap(); + let recovered: Vec = deserialize(&bytes).unwrap(); + assert_eq!(data, recovered); + } + + #[test] + fn test_ok_response() { + let request_id = "test-123".to_string(); + let value = "success"; + let response_bytes = ok_response(request_id.clone(), &value).unwrap(); + + let response: PluginResponse = deserialize(&response_bytes).unwrap(); + assert_eq!(response.request_id, request_id); + + match response.result { + WasmResult::Ok(data) => { + let recovered: String = deserialize(&data).unwrap(); + assert_eq!(recovered, value); + } + WasmResult::Err(_) => panic!("Expected Ok result"), + } + } + + #[test] + fn test_error_response() { + let request_id = "test-456".to_string(); + let error_msg = "Something went wrong"; + let response_bytes = error_response(request_id.clone(), error_msg.to_string()).unwrap(); + + let response: PluginResponse = deserialize(&response_bytes).unwrap(); + assert_eq!(response.request_id, request_id); + + match response.result { + WasmResult::Err(msg) => assert_eq!(msg, error_msg), + WasmResult::Ok(_) => panic!("Expected Err result"), + } + } +} diff --git a/crates/pinakes-plugin-api/tests/validate_examples.rs b/crates/pinakes-plugin-api/tests/validate_examples.rs new file mode 100644 index 0000000..3b6e416 --- /dev/null +++ b/crates/pinakes-plugin-api/tests/validate_examples.rs @@ -0,0 +1,67 @@ +use pinakes_plugin_api::PluginManifest; +use std::path::PathBuf; + +#[test] +fn test_markdown_metadata_manifest() { + let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .parent() + .unwrap() + .join("examples/plugins/markdown-metadata/plugin.toml"); + + let manifest = PluginManifest::from_file(&manifest_path) + .expect("Failed to parse markdown-metadata plugin.toml"); + + assert_eq!(manifest.plugin.name, "markdown-metadata"); + assert_eq!(manifest.plugin.version, "1.0.0"); + assert_eq!(manifest.plugin.api_version, "1.0"); + assert_eq!(manifest.plugin.kind, vec!["metadata_extractor"]); + assert_eq!(manifest.plugin.binary.wasm, "markdown_metadata.wasm"); + + // Validate capabilities + let caps = manifest.to_capabilities(); + assert_eq!(caps.filesystem.read.len(), 0); + assert_eq!(caps.filesystem.write.len(), 0); + assert!(!caps.network.enabled); + + // Validate config + assert!(manifest.config.contains_key("extract_tags")); + assert!(manifest.config.contains_key("parse_yaml")); + assert!(manifest.config.contains_key("max_file_size")); +} + +#[test] +fn test_heif_support_manifest() { + let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .parent() + .unwrap() + .join("examples/plugins/heif-support/plugin.toml"); + + let manifest = PluginManifest::from_file(&manifest_path) + .expect("Failed to parse heif-support plugin.toml"); + + assert_eq!(manifest.plugin.name, "heif-support"); + assert_eq!(manifest.plugin.version, "1.0.0"); + assert_eq!(manifest.plugin.api_version, "1.0"); + assert_eq!( + manifest.plugin.kind, + vec!["media_type", "metadata_extractor", "thumbnail_generator"] + ); + assert_eq!(manifest.plugin.binary.wasm, "heif_support.wasm"); + + // Validate capabilities + let caps = manifest.to_capabilities(); + assert_eq!(caps.filesystem.read.len(), 1); + assert_eq!(caps.filesystem.write.len(), 1); + assert!(!caps.network.enabled); + assert_eq!(caps.max_memory_bytes, Some(256 * 1024 * 1024)); // 256MB + assert_eq!(caps.max_cpu_time_ms, Some(30 * 1000)); // 30 seconds + + // Validate config + assert!(manifest.config.contains_key("extract_exif")); + assert!(manifest.config.contains_key("generate_thumbnails")); + assert!(manifest.config.contains_key("thumbnail_quality")); +} diff --git a/crates/pinakes-server/Cargo.toml b/crates/pinakes-server/Cargo.toml index a2bc53f..e8f8353 100644 --- a/crates/pinakes-server/Cargo.toml +++ b/crates/pinakes-server/Cargo.toml @@ -6,6 +6,7 @@ license.workspace = true [dependencies] pinakes-core = { path = "../pinakes-core" } +pinakes-plugin-api = { path = "../pinakes-plugin-api" } tokio = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } @@ -25,6 +26,8 @@ tower_governor = { workspace = true } tokio-util = { version = "0.7", features = ["io"] } argon2 = { workspace = true } rand = "0.9" +percent-encoding = "2" [dev-dependencies] http-body-util = "0.1" +tempfile = "3" diff --git a/crates/pinakes-server/src/app.rs b/crates/pinakes-server/src/app.rs index 8702cd2..3f5f9ab 100644 --- a/crates/pinakes-server/src/app.rs +++ b/crates/pinakes-server/src/app.rs @@ -40,6 +40,9 @@ pub fn create_router(state: AppState) -> Router { config: login_governor, }); + // Public routes (no auth required) + let public_routes = Router::new().route("/s/{token}", get(routes::social::access_shared_media)); + // Read-only routes: any authenticated user (Viewer+) let viewer_routes = Router::new() .route("/health", get(routes::health::health)) @@ -87,7 +90,82 @@ pub fn create_router(state: AppState) -> Router { .route("/webhooks", get(routes::webhooks::list_webhooks)) // Auth endpoints (self-service) — login handled separately with stricter rate limit .route("/auth/logout", post(routes::auth::logout)) - .route("/auth/me", get(routes::auth::me)); + .route("/auth/me", get(routes::auth::me)) + // Social: ratings & comments (read) + .route( + "/media/{id}/ratings", + get(routes::social::get_media_ratings), + ) + .route( + "/media/{id}/comments", + get(routes::social::get_media_comments), + ) + // Favorites (read) + .route("/favorites", get(routes::social::list_favorites)) + // Playlists (read) + .route("/playlists", get(routes::playlists::list_playlists)) + .route("/playlists/{id}", get(routes::playlists::get_playlist)) + .route("/playlists/{id}/items", get(routes::playlists::list_items)) + .route( + "/playlists/{id}/shuffle", + post(routes::playlists::shuffle_playlist), + ) + // Analytics (read) + .route( + "/analytics/most-viewed", + get(routes::analytics::get_most_viewed), + ) + .route( + "/analytics/recently-viewed", + get(routes::analytics::get_recently_viewed), + ) + .route("/analytics/events", post(routes::analytics::record_event)) + .route( + "/media/{id}/progress", + get(routes::analytics::get_watch_progress), + ) + .route( + "/media/{id}/progress", + post(routes::analytics::update_watch_progress), + ) + // Subtitles (read) + .route( + "/media/{id}/subtitles", + get(routes::subtitles::list_subtitles), + ) + .route( + "/media/{media_id}/subtitles/{subtitle_id}/content", + get(routes::subtitles::get_subtitle_content), + ) + // Enrichment (read) + .route( + "/media/{id}/external-metadata", + get(routes::enrichment::get_external_metadata), + ) + // Transcode (read) + .route("/transcode/{id}", get(routes::transcode::get_session)) + .route("/transcode", get(routes::transcode::list_sessions)) + // Streaming + .route( + "/media/{id}/stream/hls/master.m3u8", + get(routes::streaming::hls_master_playlist), + ) + .route( + "/media/{id}/stream/hls/{profile}/playlist.m3u8", + get(routes::streaming::hls_variant_playlist), + ) + .route( + "/media/{id}/stream/hls/{profile}/{segment}", + get(routes::streaming::hls_segment), + ) + .route( + "/media/{id}/stream/dash/manifest.mpd", + get(routes::streaming::dash_manifest), + ) + .route( + "/media/{id}/stream/dash/{profile}/{segment}", + get(routes::streaming::dash_segment), + ); // Write routes: Editor+ required let editor_routes = Router::new() @@ -190,6 +268,58 @@ pub fn create_router(state: AppState) -> Router { ) // Webhooks .route("/webhooks/test", post(routes::webhooks::test_webhook)) + // Social: ratings & comments (write) + .route("/media/{id}/ratings", post(routes::social::rate_media)) + .route("/media/{id}/comments", post(routes::social::add_comment)) + // Favorites (write) + .route("/favorites", post(routes::social::add_favorite)) + .route( + "/favorites/{media_id}", + delete(routes::social::remove_favorite), + ) + // Share links + .route("/share", post(routes::social::create_share_link)) + // Playlists (write) + .route("/playlists", post(routes::playlists::create_playlist)) + .route("/playlists/{id}", patch(routes::playlists::update_playlist)) + .route( + "/playlists/{id}", + delete(routes::playlists::delete_playlist), + ) + .route("/playlists/{id}/items", post(routes::playlists::add_item)) + .route( + "/playlists/{id}/items/{media_id}", + delete(routes::playlists::remove_item), + ) + .route( + "/playlists/{id}/reorder", + post(routes::playlists::reorder_item), + ) + // Subtitles (write) + .route( + "/media/{id}/subtitles", + post(routes::subtitles::add_subtitle), + ) + .route( + "/subtitles/{id}", + delete(routes::subtitles::delete_subtitle), + ) + .route( + "/subtitles/{id}/offset", + patch(routes::subtitles::update_offset), + ) + // Enrichment (write) + .route( + "/media/{id}/enrich", + post(routes::enrichment::trigger_enrichment), + ) + .route("/jobs/enrich", post(routes::enrichment::batch_enrich)) + // Transcode (write) + .route( + "/media/{id}/transcode", + post(routes::transcode::start_transcode), + ) + .route("/transcode/{id}", delete(routes::transcode::cancel_session)) .layer(middleware::from_fn(auth::require_editor)); // Admin-only routes: destructive/config operations @@ -203,14 +333,33 @@ pub fn create_router(state: AppState) -> Router { .route("/config/ui", put(routes::config::update_ui_config)) .route("/database/vacuum", post(routes::database::vacuum_database)) .route("/database/clear", post(routes::database::clear_database)) + // Plugin management + .route("/plugins", get(routes::plugins::list_plugins)) + .route("/plugins/{id}", get(routes::plugins::get_plugin)) + .route("/plugins/install", post(routes::plugins::install_plugin)) + .route("/plugins/{id}", delete(routes::plugins::uninstall_plugin)) + .route("/plugins/{id}/toggle", post(routes::plugins::toggle_plugin)) + .route("/plugins/{id}/reload", post(routes::plugins::reload_plugin)) + // User management + .route("/users", get(routes::users::list_users)) + .route("/users", post(routes::users::create_user)) + .route("/users/{id}", get(routes::users::get_user)) + .route("/users/{id}", patch(routes::users::update_user)) + .route("/users/{id}", delete(routes::users::delete_user)) + .route( + "/users/{id}/libraries", + get(routes::users::get_user_libraries), + ) + .route( + "/users/{id}/libraries", + post(routes::users::grant_library_access), + ) + .route( + "/users/{id}/libraries", + delete(routes::users::revoke_library_access), + ) .layer(middleware::from_fn(auth::require_admin)); - let api = Router::new() - .merge(login_route) - .merge(viewer_routes) - .merge(editor_routes) - .merge(admin_routes); - // CORS: allow same-origin by default, plus the desktop UI origin let cors = CorsLayer::new() .allow_origin([ @@ -228,13 +377,25 @@ pub fn create_router(state: AppState) -> Router { .allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION]) .allow_credentials(true); - Router::new() - .nest("/api/v1", api) - .layer(DefaultBodyLimit::max(10 * 1024 * 1024)) + // Create protected routes with auth middleware + let protected_api = Router::new() + .merge(viewer_routes) + .merge(editor_routes) + .merge(admin_routes) .layer(middleware::from_fn_with_state( state.clone(), auth::require_auth, - )) + )); + + // Combine protected and public routes + let full_api = Router::new() + .merge(login_route) + .merge(public_routes) + .merge(protected_api); + + Router::new() + .nest("/api/v1", full_api) + .layer(DefaultBodyLimit::max(10 * 1024 * 1024)) .layer(GovernorLayer { config: global_governor, }) diff --git a/crates/pinakes-server/src/auth.rs b/crates/pinakes-server/src/auth.rs index d094006..6a372ff 100644 --- a/crates/pinakes-server/src/auth.rs +++ b/crates/pinakes-server/src/auth.rs @@ -85,6 +85,7 @@ pub async fn require_auth( if expected_key.is_empty() { // Empty key means no auth required request.extensions_mut().insert(UserRole::Admin); + request.extensions_mut().insert("admin".to_string()); return next.run(request).await; } @@ -110,6 +111,7 @@ pub async fn require_auth( } // When no api_key is configured, or key matches, grant admin request.extensions_mut().insert(UserRole::Admin); + request.extensions_mut().insert("admin".to_string()); } next.run(request).await @@ -143,6 +145,24 @@ pub async fn require_admin(request: Request, next: Next) -> Response { } } +/// Resolve the authenticated username (from request extensions) to a UserId. +/// +/// Returns an error if the user cannot be found. +pub async fn resolve_user_id( + storage: &pinakes_core::storage::DynStorageBackend, + username: &str, +) -> Result { + match storage.get_user_by_username(username).await { + Ok(user) => Ok(user.id), + Err(e) => { + tracing::warn!(username = %username, error = ?e, "failed to resolve user"); + Err(crate::error::ApiError( + pinakes_core::error::PinakesError::Authentication("user not found".into()), + )) + } + } +} + fn unauthorized(message: &str) -> Response { let body = format!(r#"{{"error":"{message}"}}"#); ( diff --git a/crates/pinakes-server/src/dto.rs b/crates/pinakes-server/src/dto.rs index b1aaaf7..9c4fc8d 100644 --- a/crates/pinakes-server/src/dto.rs +++ b/crates/pinakes-server/src/dto.rs @@ -551,3 +551,431 @@ impl From for AuditEntryResponse { } } } + +// Plugins +#[derive(Debug, Serialize)] +pub struct PluginResponse { + pub id: String, + pub name: String, + pub version: String, + pub author: String, + pub description: String, + pub api_version: String, + pub enabled: bool, +} + +#[derive(Debug, Deserialize)] +pub struct InstallPluginRequest { + pub source: String, // URL or file path +} + +#[derive(Debug, Deserialize)] +pub struct TogglePluginRequest { + pub enabled: bool, +} + +impl PluginResponse { + pub fn new(meta: pinakes_plugin_api::PluginMetadata, enabled: bool) -> Self { + Self { + id: meta.id, + name: meta.name, + version: meta.version, + author: meta.author, + description: meta.description, + api_version: meta.api_version, + enabled, + } + } +} + +// Users +#[derive(Debug, Serialize)] +pub struct UserResponse { + pub id: String, + pub username: String, + pub role: String, + pub profile: UserProfileResponse, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct UserProfileResponse { + pub avatar_path: Option, + pub bio: Option, + pub preferences: UserPreferencesResponse, +} + +#[derive(Debug, Serialize)] +pub struct UserPreferencesResponse { + pub theme: Option, + pub language: Option, + pub default_video_quality: Option, + pub auto_play: bool, +} + +#[derive(Debug, Serialize)] +pub struct UserLibraryResponse { + pub user_id: String, + pub root_path: String, + pub permission: String, + pub granted_at: DateTime, +} + +#[derive(Debug, Deserialize)] +pub struct GrantLibraryAccessRequest { + pub root_path: String, + pub permission: pinakes_core::users::LibraryPermission, +} + +#[derive(Debug, Deserialize)] +pub struct RevokeLibraryAccessRequest { + pub root_path: String, +} + +impl From for UserResponse { + fn from(user: pinakes_core::users::User) -> Self { + Self { + id: user.id.0.to_string(), + username: user.username, + role: user.role.to_string(), + profile: UserProfileResponse { + avatar_path: user.profile.avatar_path, + bio: user.profile.bio, + preferences: UserPreferencesResponse { + theme: user.profile.preferences.theme, + language: user.profile.preferences.language, + default_video_quality: user.profile.preferences.default_video_quality, + auto_play: user.profile.preferences.auto_play, + }, + }, + created_at: user.created_at, + updated_at: user.updated_at, + } + } +} + +impl From for UserLibraryResponse { + fn from(access: pinakes_core::users::UserLibraryAccess) -> Self { + Self { + user_id: access.user_id.0.to_string(), + root_path: access.root_path, + permission: format!("{:?}", access.permission).to_lowercase(), + granted_at: access.granted_at, + } + } +} + +// ===== Social (Ratings, Comments, Favorites, Shares) ===== + +#[derive(Debug, Serialize)] +pub struct RatingResponse { + pub id: String, + pub user_id: String, + pub media_id: String, + pub stars: u8, + pub review_text: Option, + pub created_at: DateTime, +} + +impl From for RatingResponse { + fn from(r: pinakes_core::social::Rating) -> Self { + Self { + id: r.id.to_string(), + user_id: r.user_id.0.to_string(), + media_id: r.media_id.0.to_string(), + stars: r.stars, + review_text: r.review_text, + created_at: r.created_at, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CreateRatingRequest { + pub stars: u8, + pub review_text: Option, +} + +#[derive(Debug, Serialize)] +pub struct CommentResponse { + pub id: String, + pub user_id: String, + pub media_id: String, + pub parent_comment_id: Option, + pub text: String, + pub created_at: DateTime, +} + +impl From for CommentResponse { + fn from(c: pinakes_core::social::Comment) -> Self { + Self { + id: c.id.to_string(), + user_id: c.user_id.0.to_string(), + media_id: c.media_id.0.to_string(), + parent_comment_id: c.parent_comment_id.map(|id| id.to_string()), + text: c.text, + created_at: c.created_at, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CreateCommentRequest { + pub text: String, + pub parent_id: Option, +} + +#[derive(Debug, Deserialize)] +pub struct FavoriteRequest { + pub media_id: Uuid, +} + +#[derive(Debug, Deserialize)] +pub struct CreateShareLinkRequest { + pub media_id: Uuid, + pub password: Option, + pub expires_in_hours: Option, +} + +#[derive(Debug, Serialize)] +pub struct ShareLinkResponse { + pub id: String, + pub media_id: String, + pub token: String, + pub expires_at: Option>, + pub view_count: u64, + pub created_at: DateTime, +} + +impl From for ShareLinkResponse { + fn from(s: pinakes_core::social::ShareLink) -> Self { + Self { + id: s.id.to_string(), + media_id: s.media_id.0.to_string(), + token: s.token, + expires_at: s.expires_at, + view_count: s.view_count, + created_at: s.created_at, + } + } +} + +// ===== Playlists ===== + +#[derive(Debug, Serialize)] +pub struct PlaylistResponse { + pub id: String, + pub owner_id: String, + pub name: String, + pub description: Option, + pub is_public: bool, + pub is_smart: bool, + pub filter_query: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +impl From for PlaylistResponse { + fn from(p: pinakes_core::playlists::Playlist) -> Self { + Self { + id: p.id.to_string(), + owner_id: p.owner_id.0.to_string(), + name: p.name, + description: p.description, + is_public: p.is_public, + is_smart: p.is_smart, + filter_query: p.filter_query, + created_at: p.created_at, + updated_at: p.updated_at, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CreatePlaylistRequest { + pub name: String, + pub description: Option, + pub is_public: Option, + pub is_smart: Option, + pub filter_query: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UpdatePlaylistRequest { + pub name: Option, + pub description: Option, + pub is_public: Option, +} + +#[derive(Debug, Deserialize)] +pub struct PlaylistItemRequest { + pub media_id: Uuid, + pub position: Option, +} + +#[derive(Debug, Deserialize)] +pub struct ReorderPlaylistRequest { + pub media_id: Uuid, + pub new_position: i32, +} + +// ===== Analytics ===== + +#[derive(Debug, Serialize)] +pub struct UsageEventResponse { + pub id: String, + pub media_id: Option, + pub user_id: Option, + pub event_type: String, + pub timestamp: DateTime, + pub duration_secs: Option, +} + +impl From for UsageEventResponse { + fn from(e: pinakes_core::analytics::UsageEvent) -> Self { + Self { + id: e.id.to_string(), + media_id: e.media_id.map(|m| m.0.to_string()), + user_id: e.user_id.map(|u| u.0.to_string()), + event_type: e.event_type.to_string(), + timestamp: e.timestamp, + duration_secs: e.duration_secs, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct RecordUsageEventRequest { + pub media_id: Option, + pub event_type: String, + pub duration_secs: Option, + pub context: Option, +} + +#[derive(Debug, Serialize)] +pub struct MostViewedResponse { + pub media: MediaResponse, + pub view_count: u64, +} + +#[derive(Debug, Deserialize)] +pub struct WatchProgressRequest { + pub progress_secs: f64, +} + +#[derive(Debug, Serialize)] +pub struct WatchProgressResponse { + pub progress_secs: f64, +} + +// ===== Subtitles ===== + +#[derive(Debug, Serialize)] +pub struct SubtitleResponse { + pub id: String, + pub media_id: String, + pub language: Option, + pub format: String, + pub is_embedded: bool, + pub track_index: Option, + pub offset_ms: i64, + pub created_at: DateTime, +} + +impl From for SubtitleResponse { + fn from(s: pinakes_core::subtitles::Subtitle) -> Self { + Self { + id: s.id.to_string(), + media_id: s.media_id.0.to_string(), + language: s.language, + format: s.format.to_string(), + is_embedded: s.is_embedded, + track_index: s.track_index, + offset_ms: s.offset_ms, + created_at: s.created_at, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct AddSubtitleRequest { + pub language: Option, + pub format: String, + pub file_path: Option, + pub is_embedded: Option, + pub track_index: Option, + pub offset_ms: Option, +} + +#[derive(Debug, Deserialize)] +pub struct UpdateSubtitleOffsetRequest { + pub offset_ms: i64, +} + +// ===== Enrichment ===== + +#[derive(Debug, Serialize)] +pub struct ExternalMetadataResponse { + pub id: String, + pub media_id: String, + pub source: String, + pub external_id: Option, + pub metadata: serde_json::Value, + pub confidence: f64, + pub last_updated: DateTime, +} + +impl From for ExternalMetadataResponse { + fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self { + let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| { + tracing::warn!( + "failed to deserialize external metadata JSON for media {}: {}", + m.media_id.0, + e + ); + serde_json::Value::Null + }); + Self { + id: m.id.to_string(), + media_id: m.media_id.0.to_string(), + source: m.source.to_string(), + external_id: m.external_id, + metadata, + confidence: m.confidence, + last_updated: m.last_updated, + } + } +} + +// ===== Transcode ===== + +#[derive(Debug, Serialize)] +pub struct TranscodeSessionResponse { + pub id: String, + pub media_id: String, + pub profile: String, + pub status: String, + pub progress: f32, + pub created_at: DateTime, + pub expires_at: Option>, +} + +impl From for TranscodeSessionResponse { + fn from(s: pinakes_core::transcode::TranscodeSession) -> Self { + Self { + id: s.id.to_string(), + media_id: s.media_id.0.to_string(), + profile: s.profile, + status: s.status.as_str().to_string(), + progress: s.progress, + created_at: s.created_at, + expires_at: s.expires_at, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CreateTranscodeRequest { + pub profile: String, +} diff --git a/crates/pinakes-server/src/error.rs b/crates/pinakes-server/src/error.rs index 768a2cf..c2277c6 100644 --- a/crates/pinakes-server/src/error.rs +++ b/crates/pinakes-server/src/error.rs @@ -38,6 +38,8 @@ impl IntoResponse for ApiError { } PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()), PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()), + PinakesError::Authentication(msg) => (StatusCode::UNAUTHORIZED, msg.clone()), + PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()), PinakesError::Config(_) => { tracing::error!(error = %self.0, "configuration error"); ( diff --git a/crates/pinakes-server/src/main.rs b/crates/pinakes-server/src/main.rs index 3727612..8144748 100644 --- a/crates/pinakes-server/src/main.rs +++ b/crates/pinakes-server/src/main.rs @@ -161,17 +161,29 @@ async fn main() -> Result<()> { let addr = format!("{}:{}", config.server.host, config.server.port); + // Initialize transcode service early so the job queue can reference it + let transcode_service: Option> = + if config.transcoding.enabled { + Some(Arc::new(pinakes_core::transcode::TranscodeService::new( + config.transcoding.clone(), + ))) + } else { + None + }; + // Initialize job queue with executor let job_storage = storage.clone(); let job_config = config.clone(); + let job_transcode = transcode_service.clone(); let job_queue = pinakes_core::jobs::JobQueue::new( config.jobs.worker_count, move |job_id, kind, cancel, jobs| { let storage = job_storage.clone(); let config = job_config.clone(); + let transcode_svc = job_transcode.clone(); tokio::spawn(async move { use pinakes_core::jobs::{JobKind, JobQueue}; - let result = match kind { + match kind { JobKind::Scan { path } => { let ignore = config.scanning.ignore_patterns.clone(); let res = if let Some(p) = path { @@ -232,7 +244,7 @@ async fn main() -> Result<()> { match storage.get_media(*mid).await { Ok(item) => { let source = item.path.clone(); - let mt = item.media_type; + let mt = item.media_type.clone(); let id = item.id; let td = thumb_dir.clone(); let tc = thumb_config.clone(); @@ -333,8 +345,65 @@ async fn main() -> Result<()> { Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, } } + JobKind::Transcode { media_id, profile } => { + if let Some(ref svc) = transcode_svc { + match storage.get_media(media_id).await { + Ok(item) => { + match svc + .start_transcode( + media_id, + &item.path, + &profile, + item.duration_secs, + &storage, + ) + .await + { + Ok(session_id) => { + JobQueue::complete( + &jobs, + job_id, + serde_json::json!({"session_id": session_id.to_string()}), + ) + .await; + } + Err(e) => { + JobQueue::fail(&jobs, job_id, e.to_string()).await + } + } + } + Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, + } + } else { + JobQueue::fail(&jobs, job_id, "transcoding is not enabled".to_string()) + .await; + } + } + JobKind::Enrich { media_ids } => { + // Enrichment job placeholder + JobQueue::complete( + &jobs, + job_id, + serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}), + ) + .await; + } + JobKind::CleanupAnalytics => { + let before = chrono::Utc::now() - chrono::Duration::days(90); + match storage.cleanup_old_events(before).await { + Ok(count) => { + JobQueue::complete( + &jobs, + job_id, + serde_json::json!({"cleaned_up": count}), + ) + .await; + } + Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await, + } + } }; - let _ = result; + (); drop(cancel); }) }, @@ -345,6 +414,27 @@ async fn main() -> Result<()> { config.jobs.cache_ttl_secs, )); + // Initialize plugin manager if plugins are enabled (before moving config into Arc) + let plugin_manager = if config.plugins.enabled { + match pinakes_core::plugin::PluginManager::new( + config.plugins.data_dir.clone(), + config.plugins.cache_dir.clone(), + config.plugins.clone().into(), + ) { + Ok(pm) => { + tracing::info!("Plugin manager initialized"); + Some(Arc::new(pm)) + } + Err(e) => { + tracing::warn!("Failed to initialize plugin manager: {}", e); + None + } + } + } else { + tracing::info!("Plugins disabled in configuration"); + None + }; + // Initialize scheduler with cancellation support let shutdown_token = tokio_util::sync::CancellationToken::new(); let config_arc = Arc::new(RwLock::new(config)); @@ -376,6 +466,8 @@ async fn main() -> Result<()> { job_queue, cache, scheduler, + plugin_manager, + transcode_service, }; // Periodic session cleanup (every 15 minutes) diff --git a/crates/pinakes-server/src/routes/analytics.rs b/crates/pinakes-server/src/routes/analytics.rs new file mode 100644 index 0000000..a9da3f3 --- /dev/null +++ b/crates/pinakes-server/src/routes/analytics.rs @@ -0,0 +1,94 @@ +use axum::Json; +use axum::extract::{Extension, Path, Query, State}; +use uuid::Uuid; + +use crate::auth::resolve_user_id; +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::analytics::{UsageEvent, UsageEventType}; +use pinakes_core::model::MediaId; + +const MAX_LIMIT: u64 = 100; + +pub async fn get_most_viewed( + State(state): State, + Query(params): Query, +) -> Result>, ApiError> { + let limit = params.limit.unwrap_or(20).min(MAX_LIMIT); + let results = state.storage.get_most_viewed(limit).await?; + Ok(Json( + results + .into_iter() + .map(|(item, count)| MostViewedResponse { + media: MediaResponse::from(item), + view_count: count, + }) + .collect(), + )) +} + +pub async fn get_recently_viewed( + State(state): State, + Extension(username): Extension, + Query(params): Query, +) -> Result>, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + let limit = params.limit.unwrap_or(20).min(MAX_LIMIT); + let items = state.storage.get_recently_viewed(user_id, limit).await?; + Ok(Json(items.into_iter().map(MediaResponse::from).collect())) +} + +pub async fn record_event( + State(state): State, + Extension(username): Extension, + Json(req): Json, +) -> Result, ApiError> { + let event_type: UsageEventType = req + .event_type + .parse() + .map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?; + let user_id = resolve_user_id(&state.storage, &username).await?; + let event = UsageEvent { + id: Uuid::now_v7(), + media_id: req.media_id.map(MediaId), + user_id: Some(user_id), + event_type, + timestamp: chrono::Utc::now(), + duration_secs: req.duration_secs, + context_json: req.context.map(|v| v.to_string()), + }; + state.storage.record_usage_event(&event).await?; + Ok(Json(serde_json::json!({"recorded": true}))) +} + +pub async fn get_watch_progress( + State(state): State, + Extension(username): Extension, + Path(id): Path, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + let progress = state + .storage + .get_watch_progress(user_id, MediaId(id)) + .await? + .unwrap_or(0.0); + Ok(Json(WatchProgressResponse { + progress_secs: progress, + })) +} + +pub async fn update_watch_progress( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + state + .storage + .update_watch_progress(user_id, MediaId(id), req.progress_secs) + .await?; + Ok(Json(serde_json::json!({"updated": true}))) +} diff --git a/crates/pinakes-server/src/routes/enrichment.rs b/crates/pinakes-server/src/routes/enrichment.rs new file mode 100644 index 0000000..c16751e --- /dev/null +++ b/crates/pinakes-server/src/routes/enrichment.rs @@ -0,0 +1,48 @@ +use axum::Json; +use axum::extract::{Path, State}; +use uuid::Uuid; + +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::MediaId; + +pub async fn trigger_enrichment( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + // Submit enrichment as a background job + let job_id = state + .job_queue + .submit(pinakes_core::jobs::JobKind::Enrich { + media_ids: vec![MediaId(id)], + }) + .await; + Ok(Json(serde_json::json!({"job_id": job_id.to_string()}))) +} + +pub async fn get_external_metadata( + State(state): State, + Path(id): Path, +) -> Result>, ApiError> { + let metadata = state.storage.get_external_metadata(MediaId(id)).await?; + Ok(Json( + metadata + .into_iter() + .map(ExternalMetadataResponse::from) + .collect(), + )) +} + +pub async fn batch_enrich( + State(state): State, + Json(req): Json, // Reuse: has media_ids field +) -> Result, ApiError> { + let media_ids: Vec = req.media_ids.into_iter().map(MediaId).collect(); + let job_id = state + .job_queue + .submit(pinakes_core::jobs::JobKind::Enrich { media_ids }) + .await; + Ok(Json(serde_json::json!({"job_id": job_id.to_string()}))) +} diff --git a/crates/pinakes-server/src/routes/integrity.rs b/crates/pinakes-server/src/routes/integrity.rs index 9ebd096..8ab513b 100644 --- a/crates/pinakes-server/src/routes/integrity.rs +++ b/crates/pinakes-server/src/routes/integrity.rs @@ -26,7 +26,7 @@ pub async fn trigger_verify_integrity( let media_ids = req .media_ids .into_iter() - .map(|id| pinakes_core::model::MediaId(id)) + .map(pinakes_core::model::MediaId) .collect(); let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids }; let job_id = state.job_queue.submit(kind).await; @@ -94,6 +94,6 @@ pub async fn resolve_orphans( .collect(); let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids) .await - .map_err(|e| ApiError(e))?; + .map_err(ApiError)?; Ok(Json(serde_json::json!({ "resolved": count }))) } diff --git a/crates/pinakes-server/src/routes/mod.rs b/crates/pinakes-server/src/routes/mod.rs index f4ab83e..10c0723 100644 --- a/crates/pinakes-server/src/routes/mod.rs +++ b/crates/pinakes-server/src/routes/mod.rs @@ -1,18 +1,27 @@ +pub mod analytics; pub mod audit; pub mod auth; pub mod collections; pub mod config; pub mod database; pub mod duplicates; +pub mod enrichment; pub mod export; pub mod health; pub mod integrity; pub mod jobs; pub mod media; +pub mod playlists; +pub mod plugins; pub mod saved_searches; pub mod scan; pub mod scheduled_tasks; pub mod search; +pub mod social; pub mod statistics; +pub mod streaming; +pub mod subtitles; pub mod tags; +pub mod transcode; +pub mod users; pub mod webhooks; diff --git a/crates/pinakes-server/src/routes/playlists.rs b/crates/pinakes-server/src/routes/playlists.rs new file mode 100644 index 0000000..efef325 --- /dev/null +++ b/crates/pinakes-server/src/routes/playlists.rs @@ -0,0 +1,208 @@ +use axum::Json; +use axum::extract::{Extension, Path, State}; +use uuid::Uuid; + +use crate::auth::resolve_user_id; +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::MediaId; +use pinakes_core::playlists::Playlist; +use pinakes_core::users::UserId; + +/// Check whether a user has access to a playlist. +/// +/// * `require_write` – when `true` only the playlist owner is allowed (for +/// mutations such as update, delete, add/remove/reorder items). When `false` +/// the playlist must either be public or owned by the requesting user. +async fn check_playlist_access( + storage: &pinakes_core::storage::DynStorageBackend, + playlist_id: Uuid, + user_id: UserId, + require_write: bool, +) -> Result { + let playlist = storage.get_playlist(playlist_id).await.map_err(ApiError)?; + if require_write { + // Write operations require ownership + if playlist.owner_id != user_id { + return Err(ApiError(pinakes_core::error::PinakesError::Authorization( + "only the playlist owner can modify this playlist".into(), + ))); + } + } else { + // Read operations: allow if public or owner + if !playlist.is_public && playlist.owner_id != user_id { + return Err(ApiError(pinakes_core::error::PinakesError::Authorization( + "playlist is private".into(), + ))); + } + } + Ok(playlist) +} + +pub async fn create_playlist( + State(state): State, + Extension(username): Extension, + Json(req): Json, +) -> Result, ApiError> { + if req.name.is_empty() || req.name.chars().count() > 255 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "playlist name must be 1-255 characters".into(), + ), + )); + } + let owner_id = resolve_user_id(&state.storage, &username).await?; + let playlist = state + .storage + .create_playlist( + owner_id, + &req.name, + req.description.as_deref(), + req.is_public.unwrap_or(false), + req.is_smart.unwrap_or(false), + req.filter_query.as_deref(), + ) + .await?; + Ok(Json(PlaylistResponse::from(playlist))) +} + +pub async fn list_playlists( + State(state): State, + Extension(username): Extension, +) -> Result>, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + // Fetch all playlists and filter to only public ones plus the user's own + let playlists = state.storage.list_playlists(None).await?; + let visible: Vec = playlists + .into_iter() + .filter(|p| p.is_public || p.owner_id == user_id) + .map(PlaylistResponse::from) + .collect(); + Ok(Json(visible)) +} + +pub async fn get_playlist( + State(state): State, + Extension(username): Extension, + Path(id): Path, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + let playlist = check_playlist_access(&state.storage, id, user_id, false).await?; + Ok(Json(PlaylistResponse::from(playlist))) +} + +pub async fn update_playlist( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + if let Some(ref name) = req.name + && (name.is_empty() || name.chars().count() > 255) { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "playlist name must be 1-255 characters".into(), + ), + )); + } + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, true).await?; + let playlist = state + .storage + .update_playlist( + id, + req.name.as_deref(), + req.description.as_deref(), + req.is_public, + ) + .await?; + Ok(Json(PlaylistResponse::from(playlist))) +} + +pub async fn delete_playlist( + State(state): State, + Extension(username): Extension, + Path(id): Path, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, true).await?; + state.storage.delete_playlist(id).await?; + Ok(Json(serde_json::json!({"deleted": true}))) +} + +pub async fn add_item( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, true).await?; + let position = match req.position { + Some(p) => p, + None => { + let items = state.storage.get_playlist_items(id).await?; + items.len() as i32 + } + }; + state + .storage + .add_to_playlist(id, MediaId(req.media_id), position) + .await?; + Ok(Json(serde_json::json!({"added": true}))) +} + +pub async fn remove_item( + State(state): State, + Extension(username): Extension, + Path((id, media_id)): Path<(Uuid, Uuid)>, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, true).await?; + state + .storage + .remove_from_playlist(id, MediaId(media_id)) + .await?; + Ok(Json(serde_json::json!({"removed": true}))) +} + +pub async fn list_items( + State(state): State, + Extension(username): Extension, + Path(id): Path, +) -> Result>, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, false).await?; + let items = state.storage.get_playlist_items(id).await?; + Ok(Json(items.into_iter().map(MediaResponse::from).collect())) +} + +pub async fn reorder_item( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, true).await?; + state + .storage + .reorder_playlist(id, MediaId(req.media_id), req.new_position) + .await?; + Ok(Json(serde_json::json!({"reordered": true}))) +} + +pub async fn shuffle_playlist( + State(state): State, + Extension(username): Extension, + Path(id): Path, +) -> Result>, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + check_playlist_access(&state.storage, id, user_id, false).await?; + use rand::seq::SliceRandom; + let mut items = state.storage.get_playlist_items(id).await?; + items.shuffle(&mut rand::rng()); + Ok(Json(items.into_iter().map(MediaResponse::from).collect())) +} diff --git a/crates/pinakes-server/src/routes/plugins.rs b/crates/pinakes-server/src/routes/plugins.rs new file mode 100644 index 0000000..efcd217 --- /dev/null +++ b/crates/pinakes-server/src/routes/plugins.rs @@ -0,0 +1,149 @@ +use axum::Json; +use axum::extract::{Path, State}; + +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +/// List all installed plugins +pub async fn list_plugins( + State(state): State, +) -> Result>, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + let plugins = plugin_manager.list_plugins().await; + let mut responses = Vec::with_capacity(plugins.len()); + for meta in plugins { + let enabled = plugin_manager.is_plugin_enabled(&meta.id).await; + responses.push(PluginResponse::new(meta, enabled)); + } + Ok(Json(responses)) +} + +/// Get a specific plugin by ID +pub async fn get_plugin( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + let plugin = plugin_manager.get_plugin(&id).await.ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::NotFound(format!( + "Plugin not found: {}", + id + ))) + })?; + + let enabled = plugin_manager.is_plugin_enabled(&id).await; + Ok(Json(PluginResponse::new(plugin, enabled))) +} + +/// Install a plugin from URL or file path +pub async fn install_plugin( + State(state): State, + Json(req): Json, +) -> Result, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + let plugin_id = plugin_manager + .install_plugin(&req.source) + .await + .map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("Failed to install plugin: {}", e), + )) + })?; + + let plugin = plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::NotFound( + "Plugin installed but not found".to_string(), + )) + })?; + + let enabled = plugin_manager.is_plugin_enabled(&plugin_id).await; + Ok(Json(PluginResponse::new(plugin, enabled))) +} + +/// Uninstall a plugin +pub async fn uninstall_plugin( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + plugin_manager.uninstall_plugin(&id).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("Failed to uninstall plugin: {}", e), + )) + })?; + + Ok(Json(serde_json::json!({"uninstalled": true}))) +} + +/// Enable or disable a plugin +pub async fn toggle_plugin( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + if req.enabled { + plugin_manager.enable_plugin(&id).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("Failed to enable plugin: {}", e), + )) + })?; + } else { + plugin_manager.disable_plugin(&id).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("Failed to disable plugin: {}", e), + )) + })?; + } + + Ok(Json(serde_json::json!({ + "id": id, + "enabled": req.enabled + }))) +} + +/// Reload a plugin (for development) +pub async fn reload_plugin( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Plugin system is not enabled".to_string(), + )) + })?; + + plugin_manager.reload_plugin(&id).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("Failed to reload plugin: {}", e), + )) + })?; + + Ok(Json(serde_json::json!({"reloaded": true}))) +} diff --git a/crates/pinakes-server/src/routes/social.rs b/crates/pinakes-server/src/routes/social.rs new file mode 100644 index 0000000..0c0a31d --- /dev/null +++ b/crates/pinakes-server/src/routes/social.rs @@ -0,0 +1,199 @@ +use axum::Json; +use axum::extract::{Extension, Path, Query, State}; +use serde::Deserialize; +use uuid::Uuid; + +use crate::auth::resolve_user_id; +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::{MediaId, Pagination}; + +#[derive(Deserialize)] +pub struct ShareLinkQuery { + pub password: Option, +} + +// ===== Ratings ===== + +pub async fn rate_media( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + if req.stars < 1 || req.stars > 5 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "stars must be between 1 and 5".into(), + ), + )); + } + let user_id = resolve_user_id(&state.storage, &username).await?; + let rating = state + .storage + .rate_media(user_id, MediaId(id), req.stars, req.review_text.as_deref()) + .await?; + Ok(Json(RatingResponse::from(rating))) +} + +pub async fn get_media_ratings( + State(state): State, + Path(id): Path, +) -> Result>, ApiError> { + let ratings = state.storage.get_media_ratings(MediaId(id)).await?; + Ok(Json( + ratings.into_iter().map(RatingResponse::from).collect(), + )) +} + +// ===== Comments ===== + +pub async fn add_comment( + State(state): State, + Extension(username): Extension, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let char_count = req.text.chars().count(); + if char_count == 0 || char_count > 10_000 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "comment text must be 1-10000 characters".into(), + ), + )); + } + let user_id = resolve_user_id(&state.storage, &username).await?; + let comment = state + .storage + .add_comment(user_id, MediaId(id), &req.text, req.parent_id) + .await?; + Ok(Json(CommentResponse::from(comment))) +} + +pub async fn get_media_comments( + State(state): State, + Path(id): Path, +) -> Result>, ApiError> { + let comments = state.storage.get_media_comments(MediaId(id)).await?; + Ok(Json( + comments.into_iter().map(CommentResponse::from).collect(), + )) +} + +// ===== Favorites ===== + +pub async fn add_favorite( + State(state): State, + Extension(username): Extension, + Json(req): Json, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + state + .storage + .add_favorite(user_id, MediaId(req.media_id)) + .await?; + Ok(Json(serde_json::json!({"added": true}))) +} + +pub async fn remove_favorite( + State(state): State, + Extension(username): Extension, + Path(media_id): Path, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + state + .storage + .remove_favorite(user_id, MediaId(media_id)) + .await?; + Ok(Json(serde_json::json!({"removed": true}))) +} + +pub async fn list_favorites( + State(state): State, + Extension(username): Extension, +) -> Result>, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + let items = state + .storage + .get_user_favorites(user_id, &Pagination::default()) + .await?; + Ok(Json(items.into_iter().map(MediaResponse::from).collect())) +} + +// ===== Share Links ===== + +pub async fn create_share_link( + State(state): State, + Extension(username): Extension, + Json(req): Json, +) -> Result, ApiError> { + let user_id = resolve_user_id(&state.storage, &username).await?; + let token = uuid::Uuid::now_v7().to_string().replace('-', ""); + let password_hash = match req.password.as_ref() { + Some(p) => Some(pinakes_core::users::auth::hash_password(p).map_err(ApiError)?), + None => None, + }; + const MAX_EXPIRY_HOURS: u64 = 8760; // 1 year + if let Some(h) = req.expires_in_hours + && h > MAX_EXPIRY_HOURS { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation(format!( + "expires_in_hours cannot exceed {}", + MAX_EXPIRY_HOURS + )), + )); + } + let expires_at = req + .expires_in_hours + .map(|h| chrono::Utc::now() + chrono::Duration::hours(h as i64)); + let link = state + .storage + .create_share_link( + MediaId(req.media_id), + user_id, + &token, + password_hash.as_deref(), + expires_at, + ) + .await?; + Ok(Json(ShareLinkResponse::from(link))) +} + +pub async fn access_shared_media( + State(state): State, + Path(token): Path, + Query(query): Query, +) -> Result, ApiError> { + let link = state.storage.get_share_link(&token).await?; + // Check expiration + if let Some(expires) = link.expires_at + && chrono::Utc::now() > expires { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "share link has expired".into(), + ), + )); + } + // Verify password if set + if let Some(ref hash) = link.password_hash { + let password = match query.password.as_deref() { + Some(p) => p, + None => { + return Err(ApiError(pinakes_core::error::PinakesError::Authentication( + "password required for this share link".into(), + ))); + } + }; + let valid = pinakes_core::users::auth::verify_password(password, hash).unwrap_or(false); + if !valid { + return Err(ApiError(pinakes_core::error::PinakesError::Authentication( + "invalid share link password".into(), + ))); + } + } + state.storage.increment_share_views(&token).await?; + let item = state.storage.get_media(link.media_id).await?; + Ok(Json(MediaResponse::from(item))) +} diff --git a/crates/pinakes-server/src/routes/streaming.rs b/crates/pinakes-server/src/routes/streaming.rs new file mode 100644 index 0000000..0ab5fb8 --- /dev/null +++ b/crates/pinakes-server/src/routes/streaming.rs @@ -0,0 +1,238 @@ +use axum::extract::{Path, State}; +use axum::http::StatusCode; +use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode}; +use uuid::Uuid; + +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::MediaId; +use pinakes_core::transcode::{estimate_bandwidth, parse_resolution}; + +fn escape_xml(s: &str) -> String { + s.replace('&', "&") + .replace('<', "<") + .replace('>', ">") + .replace('"', """) + .replace('\'', "'") +} + +pub async fn hls_master_playlist( + State(state): State, + Path(id): Path, +) -> Result { + // Verify media exists + let _item = state.storage.get_media(MediaId(id)).await?; + + let config = state.config.read().await; + let profiles = &config.transcoding.profiles; + + let mut playlist = String::from("#EXTM3U\n#EXT-X-VERSION:3\n\n"); + + for profile in profiles { + let (w, h) = parse_resolution(&profile.max_resolution); + let bandwidth = estimate_bandwidth(profile); + let encoded_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string(); + playlist.push_str(&format!( + "#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},RESOLUTION={w}x{h}\n\ + /api/v1/media/{id}/stream/hls/{encoded_name}/playlist.m3u8\n\n", + )); + } + + Ok(axum::response::Response::builder() + .header("Content-Type", "application/vnd.apple.mpegurl") + .body(axum::body::Body::from(playlist)) + .unwrap()) +} + +pub async fn hls_variant_playlist( + State(state): State, + Path((id, profile)): Path<(Uuid, String)>, +) -> Result { + let item = state.storage.get_media(MediaId(id)).await?; + let duration = item.duration_secs.unwrap_or(0.0); + if duration <= 0.0 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "cannot generate HLS playlist for media with unknown or zero duration".into(), + ), + )); + } + let segment_duration = 10.0; + let num_segments = (duration / segment_duration).ceil() as usize; + + let mut playlist = String::from( + "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#EXT-X-MEDIA-SEQUENCE:0\n", + ); + for i in 0..num_segments.max(1) { + let seg_dur = if i == num_segments.saturating_sub(1) && duration > 0.0 { + duration - (i as f64 * segment_duration) + } else { + segment_duration + }; + playlist.push_str(&format!("#EXTINF:{seg_dur:.3},\n")); + playlist.push_str(&format!( + "/api/v1/media/{id}/stream/hls/{profile}/segment{i}.ts\n" + )); + } + playlist.push_str("#EXT-X-ENDLIST\n"); + + Ok(axum::response::Response::builder() + .header("Content-Type", "application/vnd.apple.mpegurl") + .body(axum::body::Body::from(playlist)) + .unwrap()) +} + +pub async fn hls_segment( + State(state): State, + Path((id, profile, segment)): Path<(Uuid, String, String)>, +) -> Result { + // Strict validation: reject path traversal, null bytes, leading dots + if segment.is_empty() + || segment.starts_with('.') + || segment.contains('\0') + || segment.contains("..") + || segment.contains('/') + || segment.contains('\\') + { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()), + )); + } + + let media_id = MediaId(id); + + // Look for an active/completed transcode session + if let Some(transcode_service) = &state.transcode_service + && let Some(session) = transcode_service.find_session(media_id, &profile).await { + let segment_path = session.cache_path.join(&segment); + + if segment_path.exists() { + let data = tokio::fs::read(&segment_path).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("failed to read segment: {}", e), + )) + })?; + + return Ok(axum::response::Response::builder() + .header("Content-Type", "video/MP2T") + .body(axum::body::Body::from(data)) + .unwrap()); + } + + // Session exists but segment not ready yet + return Ok(axum::response::Response::builder() + .status(StatusCode::ACCEPTED) + .header("Retry-After", "2") + .body(axum::body::Body::from("segment not yet available")) + .unwrap()); + } + + Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "no transcode session found; start a transcode first via POST /media/{id}/transcode" + .into(), + ), + )) +} + +pub async fn dash_manifest( + State(state): State, + Path(id): Path, +) -> Result { + let item = state.storage.get_media(MediaId(id)).await?; + let duration = item.duration_secs.unwrap_or(0.0); + if duration <= 0.0 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "cannot generate DASH manifest for media with unknown or zero duration".into(), + ), + )); + } + let hours = (duration / 3600.0) as u32; + let minutes = ((duration % 3600.0) / 60.0) as u32; + let seconds = duration % 60.0; + + let config = state.config.read().await; + let profiles = &config.transcoding.profiles; + + let mut representations = String::new(); + for profile in profiles { + let (w, h) = parse_resolution(&profile.max_resolution); + let bandwidth = estimate_bandwidth(profile); + let xml_name = escape_xml(&profile.name); + let url_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string(); + representations.push_str(&format!( + r#" + + +"#, + )); + } + + let mpd = format!( + r#" + + + +{representations} + +"# + ); + + Ok(axum::response::Response::builder() + .header("Content-Type", "application/dash+xml") + .body(axum::body::Body::from(mpd)) + .unwrap()) +} + +pub async fn dash_segment( + State(state): State, + Path((id, profile, segment)): Path<(Uuid, String, String)>, +) -> Result { + // Strict validation: reject path traversal, null bytes, leading dots + if segment.is_empty() + || segment.starts_with('.') + || segment.contains('\0') + || segment.contains("..") + || segment.contains('/') + || segment.contains('\\') + { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()), + )); + } + + let media_id = MediaId(id); + + if let Some(transcode_service) = &state.transcode_service + && let Some(session) = transcode_service.find_session(media_id, &profile).await { + let segment_path = session.cache_path.join(&segment); + + if segment_path.exists() { + let data = tokio::fs::read(&segment_path).await.map_err(|e| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("failed to read segment: {}", e), + )) + })?; + + return Ok(axum::response::Response::builder() + .header("Content-Type", "video/mp4") + .body(axum::body::Body::from(data)) + .unwrap()); + } + + return Ok(axum::response::Response::builder() + .status(StatusCode::ACCEPTED) + .header("Retry-After", "2") + .body(axum::body::Body::from("segment not yet available")) + .unwrap()); + } + + Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "no transcode session found; start a transcode first via POST /media/{id}/transcode" + .into(), + ), + )) +} diff --git a/crates/pinakes-server/src/routes/subtitles.rs b/crates/pinakes-server/src/routes/subtitles.rs new file mode 100644 index 0000000..7577135 --- /dev/null +++ b/crates/pinakes-server/src/routes/subtitles.rs @@ -0,0 +1,123 @@ +use axum::Json; +use axum::extract::{Path, State}; +use uuid::Uuid; + +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::MediaId; +use pinakes_core::subtitles::{Subtitle, SubtitleFormat}; + +pub async fn list_subtitles( + State(state): State, + Path(id): Path, +) -> Result>, ApiError> { + let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?; + Ok(Json( + subtitles.into_iter().map(SubtitleResponse::from).collect(), + )) +} + +pub async fn add_subtitle( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let format: SubtitleFormat = req + .format + .parse() + .map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?; + let is_embedded = req.is_embedded.unwrap_or(false); + if !is_embedded && req.file_path.is_none() { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "file_path is required for non-embedded subtitles".into(), + ), + )); + } + if is_embedded && req.track_index.is_none() { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "track_index is required for embedded subtitles".into(), + ), + )); + } + let subtitle = Subtitle { + id: Uuid::now_v7(), + media_id: MediaId(id), + language: req.language, + format, + file_path: req.file_path.map(std::path::PathBuf::from), + is_embedded, + track_index: req.track_index, + offset_ms: req.offset_ms.unwrap_or(0), + created_at: chrono::Utc::now(), + }; + state.storage.add_subtitle(&subtitle).await?; + Ok(Json(SubtitleResponse::from(subtitle))) +} + +pub async fn delete_subtitle( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + state.storage.delete_subtitle(id).await?; + Ok(Json(serde_json::json!({"deleted": true}))) +} + +pub async fn get_subtitle_content( + State(state): State, + Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>, +) -> Result { + let subtitles = state.storage.get_media_subtitles(MediaId(media_id)).await?; + let subtitle = subtitles + .into_iter() + .find(|s| s.id == subtitle_id) + .ok_or_else(|| { + ApiError(pinakes_core::error::PinakesError::NotFound(format!( + "subtitle {subtitle_id}" + ))) + })?; + + if let Some(ref path) = subtitle.file_path { + let content = tokio::fs::read_to_string(path).await.map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + ApiError(pinakes_core::error::PinakesError::FileNotFound( + path.clone(), + )) + } else { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + format!("failed to read subtitle file {}: {}", path.display(), e), + )) + } + })?; + let content_type = match subtitle.format { + SubtitleFormat::Vtt => "text/vtt", + SubtitleFormat::Srt => "application/x-subrip", + _ => "text/plain", + }; + Ok(axum::response::Response::builder() + .header("Content-Type", content_type) + .body(axum::body::Body::from(content)) + .unwrap()) + } else { + Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "subtitle is embedded, no file to serve".into(), + ), + )) + } +} + +pub async fn update_offset( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + state + .storage + .update_subtitle_offset(id, req.offset_ms) + .await?; + Ok(Json(serde_json::json!({"updated": true}))) +} diff --git a/crates/pinakes-server/src/routes/transcode.rs b/crates/pinakes-server/src/routes/transcode.rs new file mode 100644 index 0000000..902f3b7 --- /dev/null +++ b/crates/pinakes-server/src/routes/transcode.rs @@ -0,0 +1,63 @@ +use axum::Json; +use axum::extract::{Path, Query, State}; +use uuid::Uuid; + +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::model::MediaId; + +pub async fn start_transcode( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let job_id = state + .job_queue + .submit(pinakes_core::jobs::JobKind::Transcode { + media_id: MediaId(id), + profile: req.profile, + }) + .await; + Ok(Json(serde_json::json!({"job_id": job_id.to_string()}))) +} + +pub async fn get_session( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let session = state.storage.get_transcode_session(id).await?; + Ok(Json(TranscodeSessionResponse::from(session))) +} + +pub async fn list_sessions( + State(state): State, + Query(params): Query, +) -> Result>, ApiError> { + let _ = params; // reserved for future filtering + let sessions = state.storage.list_transcode_sessions(None).await?; + Ok(Json( + sessions + .into_iter() + .map(TranscodeSessionResponse::from) + .collect(), + )) +} + +pub async fn cancel_session( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + if let Some(transcode_service) = &state.transcode_service { + transcode_service + .cancel_transcode(id, &state.storage) + .await?; + } else { + state + .storage + .update_transcode_status(id, pinakes_core::transcode::TranscodeStatus::Cancelled, 0.0) + .await?; + } + Ok(Json(serde_json::json!({"cancelled": true}))) +} diff --git a/crates/pinakes-server/src/routes/users.rs b/crates/pinakes-server/src/routes/users.rs new file mode 100644 index 0000000..2618efb --- /dev/null +++ b/crates/pinakes-server/src/routes/users.rs @@ -0,0 +1,191 @@ +use axum::Json; +use axum::extract::{Path, State}; + +use crate::dto::*; +use crate::error::ApiError; +use crate::state::AppState; + +use pinakes_core::users::{CreateUserRequest, UpdateUserRequest, UserId}; + +/// List all users (admin only) +pub async fn list_users( + State(state): State, +) -> Result>, ApiError> { + let users = state.storage.list_users().await?; + Ok(Json(users.into_iter().map(UserResponse::from).collect())) +} + +/// Create a new user (admin only) +pub async fn create_user( + State(state): State, + Json(req): Json, +) -> Result, ApiError> { + // Validate username + if req.username.is_empty() || req.username.len() > 255 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "username must be 1-255 characters".into(), + ), + )); + } + + // Validate password + if req.password.len() < 8 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "password must be at least 8 characters".into(), + ), + )); + } + + // Hash password + let password_hash = pinakes_core::users::auth::hash_password(&req.password)?; + + // Create user - rely on DB unique constraint for username to avoid TOCTOU race + let user = state + .storage + .create_user(&req.username, &password_hash, req.role, req.profile) + .await + .map_err(|e| { + // Map unique constraint violations to a user-friendly conflict error + let err_str = e.to_string(); + if err_str.contains("UNIQUE") + || err_str.contains("unique") + || err_str.contains("duplicate key") + { + ApiError(pinakes_core::error::PinakesError::DuplicateHash( + "username already exists".into(), + )) + } else { + ApiError(e) + } + })?; + + Ok(Json(UserResponse::from(user))) +} + +/// Get a specific user by ID +pub async fn get_user( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + let user = state.storage.get_user(user_id).await?; + Ok(Json(UserResponse::from(user))) +} + +/// Update a user +pub async fn update_user( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + // Hash password if provided + let password_hash = if let Some(ref password) = req.password { + if password.len() < 8 { + return Err(ApiError( + pinakes_core::error::PinakesError::InvalidOperation( + "password must be at least 8 characters".into(), + ), + )); + } + Some(pinakes_core::users::auth::hash_password(password)?) + } else { + None + }; + + let user = state + .storage + .update_user(user_id, password_hash.as_deref(), req.role, req.profile) + .await?; + + Ok(Json(UserResponse::from(user))) +} + +/// Delete a user (admin only) +pub async fn delete_user( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + state.storage.delete_user(user_id).await?; + Ok(Json(serde_json::json!({"deleted": true}))) +} + +/// Get user's accessible libraries +pub async fn get_user_libraries( + State(state): State, + Path(id): Path, +) -> Result>, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + let libraries = state.storage.get_user_libraries(user_id).await?; + Ok(Json( + libraries + .into_iter() + .map(UserLibraryResponse::from) + .collect(), + )) +} + +/// Grant library access to a user (admin only) +pub async fn grant_library_access( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + state + .storage + .grant_library_access(user_id, &req.root_path, req.permission) + .await?; + + Ok(Json(serde_json::json!({"granted": true}))) +} + +/// Revoke library access from a user (admin only) +/// +/// Uses a JSON body instead of a path parameter because root_path may contain +/// slashes that conflict with URL routing. +pub async fn revoke_library_access( + State(state): State, + Path(id): Path, + Json(req): Json, +) -> Result, ApiError> { + let user_id: UserId = id.parse::().map(UserId::from).map_err(|_| { + ApiError(pinakes_core::error::PinakesError::InvalidOperation( + "Invalid user ID".into(), + )) + })?; + + state + .storage + .revoke_library_access(user_id, &req.root_path) + .await?; + Ok(Json(serde_json::json!({"revoked": true}))) +} diff --git a/crates/pinakes-server/src/state.rs b/crates/pinakes-server/src/state.rs index aa5d367..3d67a08 100644 --- a/crates/pinakes-server/src/state.rs +++ b/crates/pinakes-server/src/state.rs @@ -7,9 +7,11 @@ use tokio::sync::RwLock; use pinakes_core::cache::CacheLayer; use pinakes_core::config::{Config, UserRole}; use pinakes_core::jobs::JobQueue; +use pinakes_core::plugin::PluginManager; use pinakes_core::scan::ScanProgress; use pinakes_core::scheduler::TaskScheduler; use pinakes_core::storage::DynStorageBackend; +use pinakes_core::transcode::TranscodeService; /// Default session TTL: 24 hours. pub const SESSION_TTL_SECS: i64 = 24 * 60 * 60; @@ -47,4 +49,6 @@ pub struct AppState { pub job_queue: Arc, pub cache: Arc, pub scheduler: Arc, + pub plugin_manager: Option>, + pub transcode_service: Option>, } diff --git a/crates/pinakes-server/tests/api_test.rs b/crates/pinakes-server/tests/api_test.rs index dc65955..151711a 100644 --- a/crates/pinakes-server/tests/api_test.rs +++ b/crates/pinakes-server/tests/api_test.rs @@ -10,8 +10,10 @@ use tower::ServiceExt; use pinakes_core::cache::CacheLayer; use pinakes_core::config::{ - AccountsConfig, Config, DirectoryConfig, JobsConfig, ScanningConfig, ServerConfig, - SqliteConfig, StorageBackendType, StorageConfig, ThumbnailConfig, UiConfig, WebhookConfig, + AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig, + JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType, + StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, UserAccount, UserRole, + WebhookConfig, }; use pinakes_core::jobs::JobQueue; use pinakes_core::storage::StorageBackend; @@ -41,12 +43,57 @@ fn post_json(uri: &str, body: &str) -> Request { req } -async fn setup_app() -> axum::Router { - let backend = SqliteBackend::in_memory().expect("in-memory SQLite"); - backend.run_migrations().await.expect("migrations"); - let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend; +/// Build a GET request with Bearer auth +fn get_authed(uri: &str, token: &str) -> Request { + let mut req = Request::builder() + .uri(uri) + .header("authorization", format!("Bearer {}", token)) + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + req +} - let config = Config { +/// Build a POST JSON request with Bearer auth +fn post_json_authed(uri: &str, body: &str, token: &str) -> Request { + let mut req = Request::builder() + .method("POST") + .uri(uri) + .header("content-type", "application/json") + .header("authorization", format!("Bearer {}", token)) + .body(Body::from(body.to_string())) + .unwrap(); + req.extensions_mut().insert(test_addr()); + req +} + +/// Build a DELETE request with Bearer auth +fn delete_authed(uri: &str, token: &str) -> Request { + let mut req = Request::builder() + .method("DELETE") + .uri(uri) + .header("authorization", format!("Bearer {}", token)) + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + req +} + +/// Build a PATCH JSON request with Bearer auth +fn patch_json_authed(uri: &str, body: &str, token: &str) -> Request { + let mut req = Request::builder() + .method("PATCH") + .uri(uri) + .header("content-type", "application/json") + .header("authorization", format!("Bearer {}", token)) + .body(Body::from(body.to_string())) + .unwrap(); + req.extensions_mut().insert(test_addr()); + req +} + +fn default_config() -> Config { + Config { storage: StorageConfig { backend: StorageBackendType::Sqlite, sqlite: Some(SqliteConfig { @@ -72,7 +119,20 @@ async fn setup_app() -> axum::Router { thumbnails: ThumbnailConfig::default(), webhooks: Vec::::new(), scheduled_tasks: vec![], - }; + plugins: PluginsConfig::default(), + transcoding: TranscodingConfig::default(), + enrichment: EnrichmentConfig::default(), + cloud: CloudConfig::default(), + analytics: AnalyticsConfig::default(), + } +} + +async fn setup_app() -> axum::Router { + let backend = SqliteBackend::in_memory().expect("in-memory SQLite"); + backend.run_migrations().await.expect("migrations"); + let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend; + + let config = default_config(); let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {})); let config = Arc::new(RwLock::new(config)); @@ -92,11 +152,117 @@ async fn setup_app() -> axum::Router { job_queue, cache: Arc::new(CacheLayer::new(60)), scheduler: Arc::new(scheduler), + plugin_manager: None, + transcode_service: None, }; pinakes_server::app::create_router(state) } +/// Hash a password for test user accounts +fn hash_password(password: &str) -> String { + pinakes_core::users::auth::hash_password(password).unwrap() +} + +/// Set up an app with accounts enabled and three pre-seeded users. +/// Returns (Router, admin_token, editor_token, viewer_token). +async fn setup_app_with_auth() -> (axum::Router, String, String, String) { + let backend = SqliteBackend::in_memory().expect("in-memory SQLite"); + backend.run_migrations().await.expect("migrations"); + let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend; + + // Create users in database so resolve_user_id works + let users_to_create = vec![ + ("admin", "adminpass", UserRole::Admin), + ("editor", "editorpass", UserRole::Editor), + ("viewer", "viewerpass", UserRole::Viewer), + ]; + for (username, password, role) in &users_to_create { + let password_hash = hash_password(password); + storage + .create_user(username, &password_hash, *role, None) + .await + .expect("create user"); + } + + let mut config = default_config(); + config.accounts.enabled = true; + config.accounts.users = vec![ + UserAccount { + username: "admin".to_string(), + password_hash: hash_password("adminpass"), + role: UserRole::Admin, + }, + UserAccount { + username: "editor".to_string(), + password_hash: hash_password("editorpass"), + role: UserRole::Editor, + }, + UserAccount { + username: "viewer".to_string(), + password_hash: hash_password("viewerpass"), + role: UserRole::Viewer, + }, + ]; + + let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {})); + let config = Arc::new(RwLock::new(config)); + let scheduler = pinakes_core::scheduler::TaskScheduler::new( + job_queue.clone(), + tokio_util::sync::CancellationToken::new(), + config.clone(), + None, + ); + + let state = pinakes_server::state::AppState { + storage, + config, + config_path: None, + scan_progress: pinakes_core::scan::ScanProgress::new(), + sessions: Arc::new(RwLock::new(std::collections::HashMap::new())), + job_queue, + cache: Arc::new(CacheLayer::new(60)), + scheduler: Arc::new(scheduler), + plugin_manager: None, + transcode_service: None, + }; + + let app = pinakes_server::app::create_router(state); + + // Login each user to get tokens + let admin_token = login_user(app.clone(), "admin", "adminpass").await; + let editor_token = login_user(app.clone(), "editor", "editorpass").await; + let viewer_token = login_user(app.clone(), "viewer", "viewerpass").await; + + (app, admin_token, editor_token, viewer_token) +} + +async fn login_user(app: axum::Router, username: &str, password: &str) -> String { + let body = format!(r#"{{"username":"{}","password":"{}"}}"#, username, password); + let response = app + .oneshot(post_json("/api/v1/auth/login", &body)) + .await + .unwrap(); + assert_eq!( + response.status(), + StatusCode::OK, + "login failed for user {}", + username + ); + let body = response.into_body().collect().await.unwrap().to_bytes(); + let result: serde_json::Value = serde_json::from_slice(&body).unwrap(); + result["token"].as_str().unwrap().to_string() +} + +async fn response_body(response: axum::response::Response) -> serde_json::Value { + let body = response.into_body().collect().await.unwrap().to_bytes(); + serde_json::from_slice(&body).unwrap_or(serde_json::Value::Null) +} + +// =================================================================== +// Existing tests (no auth) +// =================================================================== + #[tokio::test] async fn test_list_media_empty() { let app = setup_app().await; @@ -210,3 +376,623 @@ async fn test_scheduled_tasks_endpoint() { assert!(tasks[0]["name"].is_string()); assert!(tasks[0]["schedule"].is_string()); } + +#[tokio::test] +async fn test_user_management_crud() { + let app = setup_app().await; + + // Create a user + let response = app + .clone() + .oneshot(post_json( + "/api/v1/users", + r#"{"username":"testuser","password":"password123","role":"viewer"}"#, + )) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().collect().await.unwrap().to_bytes(); + let user: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_eq!(user["username"], "testuser"); + assert_eq!(user["role"], "viewer"); + let user_id = user["id"].as_str().unwrap(); + + // List users + let response = app.clone().oneshot(get("/api/v1/users")).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().collect().await.unwrap().to_bytes(); + let users: Vec = serde_json::from_slice(&body).unwrap(); + assert_eq!(users.len(), 1); + assert_eq!(users[0]["username"], "testuser"); + + // Get specific user + let response = app + .clone() + .oneshot(get(&format!("/api/v1/users/{}", user_id))) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().collect().await.unwrap().to_bytes(); + let retrieved_user: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_eq!(retrieved_user["username"], "testuser"); + + // Delete user + let mut req = Request::builder() + .method("DELETE") + .uri(&format!("/api/v1/users/{}", user_id)) + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + + let response = app.clone().oneshot(req).await.unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Verify user is deleted + let response = app + .oneshot(get(&format!("/api/v1/users/{}", user_id))) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn test_health_endpoint() { + let app = setup_app().await; + + // Health endpoint should be publicly accessible + let response = app.oneshot(get("/api/v1/health")).await.unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +#[tokio::test] +async fn test_user_duplicate_username() { + let app = setup_app().await; + + // Create first user + let response = app + .clone() + .oneshot(post_json( + "/api/v1/users", + r#"{"username":"duplicate","password":"password1","role":"viewer"}"#, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Try to create user with same username + let response = app + .oneshot(post_json( + "/api/v1/users", + r#"{"username":"duplicate","password":"password2","role":"viewer"}"#, + )) + .await + .unwrap(); + + // Should fail with conflict (409) for duplicate username + assert_eq!(response.status(), StatusCode::CONFLICT); +} + +// =================================================================== +// Authentication tests +// =================================================================== + +#[tokio::test] +async fn test_unauthenticated_request_rejected() { + let (app, _, _, _) = setup_app_with_auth().await; + + // Request without Bearer token + let response = app.oneshot(get("/api/v1/media")).await.unwrap(); + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn test_invalid_token_rejected() { + let (app, _, _, _) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/media", "totally-invalid-token")) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn test_login_valid_credentials() { + let (app, _, _, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json( + "/api/v1/auth/login", + r#"{"username":"admin","password":"adminpass"}"#, + )) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert!(body["token"].is_string()); + assert_eq!(body["username"], "admin"); + assert_eq!(body["role"], "admin"); +} + +#[tokio::test] +async fn test_login_invalid_password() { + let (app, _, _, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json( + "/api/v1/auth/login", + r#"{"username":"admin","password":"wrongpassword"}"#, + )) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn test_login_unknown_user() { + let (app, _, _, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json( + "/api/v1/auth/login", + r#"{"username":"nonexistent","password":"whatever"}"#, + )) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn test_auth_me_endpoint() { + let (app, admin_token, _, _) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/auth/me", &admin_token)) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert_eq!(body["username"], "admin"); + assert_eq!(body["role"], "admin"); +} + +#[tokio::test] +async fn test_logout() { + let (app, admin_token, _, _) = setup_app_with_auth().await; + + // Logout + let response = app + .clone() + .oneshot(post_json_authed("/api/v1/auth/logout", "", &admin_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Subsequent requests with same token should fail + let response = app + .oneshot(get_authed("/api/v1/media", &admin_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::UNAUTHORIZED); +} + +// =================================================================== +// Authorization / RBAC tests +// =================================================================== + +#[tokio::test] +async fn test_viewer_cannot_access_editor_routes() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + // POST /tags is an editor-only route + let response = app + .oneshot(post_json_authed( + "/api/v1/tags", + r#"{"name":"test"}"#, + &viewer_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn test_viewer_cannot_access_admin_routes() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + // GET /users is an admin-only route + let response = app + .oneshot(get_authed("/api/v1/users", &viewer_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn test_editor_cannot_access_admin_routes() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/users", &editor_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn test_editor_can_write() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json_authed( + "/api/v1/tags", + r#"{"name":"EditorTag"}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +#[tokio::test] +async fn test_admin_can_access_all() { + let (app, admin_token, _, _) = setup_app_with_auth().await; + + // Viewer route + let response = app + .clone() + .oneshot(get_authed("/api/v1/media", &admin_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Editor route + let response = app + .clone() + .oneshot(post_json_authed( + "/api/v1/tags", + r#"{"name":"AdminTag"}"#, + &admin_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Admin route + let response = app + .oneshot(get_authed("/api/v1/users", &admin_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +// =================================================================== +// Phase 2 feature tests: Social +// =================================================================== + +#[tokio::test] +async fn test_rating_invalid_stars_zero() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/ratings", + r#"{"stars":0}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn test_rating_invalid_stars_six() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/ratings", + r#"{"stars":6}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn test_comment_empty_text() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/comments", + r#"{"text":""}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn test_favorites_list_empty() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/favorites", &viewer_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert!(body.as_array().unwrap().is_empty()); +} + +// =================================================================== +// Phase 2 feature tests: Playlists +// =================================================================== + +#[tokio::test] +async fn test_playlist_crud() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + // Create + let response = app + .clone() + .oneshot(post_json_authed( + "/api/v1/playlists", + r#"{"name":"My Playlist"}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + let playlist_id = body["id"].as_str().unwrap().to_string(); + assert_eq!(body["name"], "My Playlist"); + + // List + let response = app + .clone() + .oneshot(get_authed("/api/v1/playlists", &editor_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert_eq!(body.as_array().unwrap().len(), 1); + + // Get + let response = app + .clone() + .oneshot(get_authed( + &format!("/api/v1/playlists/{}", playlist_id), + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // Update + let response = app + .clone() + .oneshot(patch_json_authed( + &format!("/api/v1/playlists/{}", playlist_id), + r#"{"name":"Updated Playlist","description":"A test description"}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert_eq!(body["name"], "Updated Playlist"); + + // Delete + let response = app + .clone() + .oneshot(delete_authed( + &format!("/api/v1/playlists/{}", playlist_id), + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +#[tokio::test] +async fn test_playlist_empty_name() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let response = app + .oneshot(post_json_authed( + "/api/v1/playlists", + r#"{"name":""}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +// =================================================================== +// Phase 2 feature tests: Analytics +// =================================================================== + +#[tokio::test] +async fn test_most_viewed_empty() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/analytics/most-viewed", &viewer_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert!(body.as_array().unwrap().is_empty()); +} + +#[tokio::test] +async fn test_record_event_and_query() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + // Record an event + let response = app + .clone() + .oneshot(post_json_authed( + "/api/v1/analytics/events", + r#"{"event_type":"view","duration_secs":5.0}"#, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert_eq!(body["recorded"], true); +} + +// =================================================================== +// Phase 2 feature tests: Streaming/Transcode +// =================================================================== + +#[tokio::test] +async fn test_transcode_session_not_found() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed( + "/api/v1/transcode/00000000-0000-0000-0000-000000000000", + &viewer_token, + )) + .await + .unwrap(); + // Should be 404 or 500 (not found in DB) + assert!( + response.status() == StatusCode::NOT_FOUND + || response.status() == StatusCode::INTERNAL_SERVER_ERROR + ); +} + +#[tokio::test] +async fn test_transcode_list_empty() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/transcode", &viewer_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = response_body(response).await; + assert!(body.as_array().unwrap().is_empty()); +} + +#[tokio::test] +async fn test_hls_segment_no_session() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/stream/hls/720p/segment0.ts", + &viewer_token, + )) + .await + .unwrap(); + // Should fail because media doesn't exist or no transcode session + assert!( + response.status() == StatusCode::BAD_REQUEST + || response.status() == StatusCode::NOT_FOUND + || response.status() == StatusCode::INTERNAL_SERVER_ERROR + ); +} + +// =================================================================== +// Phase 2 feature tests: Subtitles +// =================================================================== + +#[tokio::test] +async fn test_subtitles_list() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + // Should return empty for nonexistent media (or not found) + let response = app + .oneshot(get_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/subtitles", + &viewer_token, + )) + .await + .unwrap(); + assert!( + response.status() == StatusCode::OK + || response.status() == StatusCode::NOT_FOUND + || response.status() == StatusCode::INTERNAL_SERVER_ERROR + ); +} + +// =================================================================== +// Health: public access test +// =================================================================== + +#[tokio::test] +async fn test_health_public() { + let (app, _, _, _) = setup_app_with_auth().await; + + // Health endpoint should be accessible without auth even when accounts enabled + let response = app.oneshot(get("/api/v1/health")).await.unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +// =================================================================== +// Input validation & edge case tests +// =================================================================== + +#[tokio::test] +async fn test_invalid_uuid_in_path() { + let (app, _, _, viewer_token) = setup_app_with_auth().await; + + let response = app + .oneshot(get_authed("/api/v1/media/not-a-uuid", &viewer_token)) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn test_oversized_comment() { + let (app, _, editor_token, _) = setup_app_with_auth().await; + + let long_text: String = "x".repeat(10_001); + let body = format!(r#"{{"text":"{}"}}"#, long_text); + let response = app + .oneshot(post_json_authed( + "/api/v1/media/00000000-0000-0000-0000-000000000000/comments", + &body, + &editor_token, + )) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn test_share_link_expired() { + // Uses no-auth setup since share links are complex to test with auth + // (need real media items). Verify the expire check logic works. + let app = setup_app().await; + + // First import a dummy file to get a media_id — but we can't without a real file. + // So let's test the public share access endpoint with a nonexistent token. + let response = app + .oneshot(get("/api/v1/s/nonexistent_token")) + .await + .unwrap(); + // Should fail with not found or internal error (no such share link) + assert!( + response.status() == StatusCode::NOT_FOUND + || response.status() == StatusCode::INTERNAL_SERVER_ERROR + ); +} diff --git a/crates/pinakes-server/tests/plugin_test.rs b/crates/pinakes-server/tests/plugin_test.rs new file mode 100644 index 0000000..e998816 --- /dev/null +++ b/crates/pinakes-server/tests/plugin_test.rs @@ -0,0 +1,211 @@ +use std::net::SocketAddr; +use std::sync::Arc; + +use axum::body::Body; +use axum::extract::ConnectInfo; +use axum::http::{Request, StatusCode}; +use http_body_util::BodyExt; +use tokio::sync::RwLock; +use tower::ServiceExt; + +use pinakes_core::cache::CacheLayer; +use pinakes_core::config::{ + AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig, + JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType, + StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, WebhookConfig, +}; +use pinakes_core::jobs::JobQueue; +use pinakes_core::plugin::PluginManager; +use pinakes_core::storage::StorageBackend; +use pinakes_core::storage::sqlite::SqliteBackend; + +/// Fake socket address for tests (governor needs ConnectInfo) +fn test_addr() -> ConnectInfo { + ConnectInfo("127.0.0.1:9999".parse().unwrap()) +} + +/// Build a GET request with ConnectInfo for rate limiter compatibility +fn get(uri: &str) -> Request { + let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap(); + req.extensions_mut().insert(test_addr()); + req +} + +async fn setup_app_with_plugins() -> (axum::Router, Arc, tempfile::TempDir) { + let backend = SqliteBackend::in_memory().expect("in-memory SQLite"); + backend.run_migrations().await.expect("migrations"); + let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend; + + // Create temp directories for plugin manager (automatically cleaned up when TempDir drops) + let temp_dir = tempfile::TempDir::new().expect("create temp dir"); + let data_dir = temp_dir.path().join("data"); + let cache_dir = temp_dir.path().join("cache"); + std::fs::create_dir_all(&data_dir).expect("create data dir"); + std::fs::create_dir_all(&cache_dir).expect("create cache dir"); + + let plugin_config = PluginsConfig { + enabled: true, + data_dir: data_dir.clone(), + cache_dir: cache_dir.clone(), + plugin_dirs: vec![], + enable_hot_reload: false, + allow_unsigned: true, + max_concurrent_ops: 2, + plugin_timeout_secs: 10, + }; + + let plugin_manager = PluginManager::new(data_dir, cache_dir, plugin_config.clone().into()) + .expect("create plugin manager"); + let plugin_manager = Arc::new(plugin_manager); + + let config = Config { + storage: StorageConfig { + backend: StorageBackendType::Sqlite, + sqlite: Some(SqliteConfig { + path: ":memory:".into(), + }), + postgres: None, + }, + directories: DirectoryConfig { roots: vec![] }, + scanning: ScanningConfig { + watch: false, + poll_interval_secs: 300, + ignore_patterns: vec![], + import_concurrency: 8, + }, + server: ServerConfig { + host: "127.0.0.1".to_string(), + port: 3000, + api_key: None, + }, + ui: UiConfig::default(), + accounts: AccountsConfig::default(), + jobs: JobsConfig::default(), + thumbnails: ThumbnailConfig::default(), + webhooks: Vec::::new(), + scheduled_tasks: vec![], + plugins: plugin_config, + transcoding: TranscodingConfig::default(), + enrichment: EnrichmentConfig::default(), + cloud: CloudConfig::default(), + analytics: AnalyticsConfig::default(), + }; + + let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {})); + let config = Arc::new(RwLock::new(config)); + let scheduler = pinakes_core::scheduler::TaskScheduler::new( + job_queue.clone(), + tokio_util::sync::CancellationToken::new(), + config.clone(), + None, + ); + + let state = pinakes_server::state::AppState { + storage, + config, + config_path: None, + scan_progress: pinakes_core::scan::ScanProgress::new(), + sessions: Arc::new(RwLock::new(std::collections::HashMap::new())), + job_queue, + cache: Arc::new(CacheLayer::new(60)), + scheduler: Arc::new(scheduler), + plugin_manager: Some(plugin_manager.clone()), + transcode_service: None, + }; + + let router = pinakes_server::app::create_router(state); + (router, plugin_manager, temp_dir) +} + +#[tokio::test] +async fn test_list_plugins_empty() { + let (app, _pm, _tmp) = setup_app_with_plugins().await; + + let response = app.oneshot(get("/api/v1/plugins")).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().collect().await.unwrap().to_bytes(); + let plugins: Vec = serde_json::from_slice(&body).unwrap(); + assert_eq!(plugins.len(), 0, "should start with no plugins loaded"); +} + +#[tokio::test] +async fn test_plugin_manager_exists() { + let (app, _pm, _tmp) = setup_app_with_plugins().await; + + // Verify plugin manager is accessible + let plugins = _pm.list_plugins().await; + assert_eq!(plugins.len(), 0); + + // Verify API endpoint works + let response = app.oneshot(get("/api/v1/plugins")).await.unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} + +#[tokio::test] +async fn test_plugin_not_found() { + let (app, _pm, _tmp) = setup_app_with_plugins().await; + + let response = app + .oneshot(get("/api/v1/plugins/nonexistent")) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn test_plugin_enable_disable() { + let (app, pm, _tmp) = setup_app_with_plugins().await; + + // Verify plugin manager is initialized + assert!(pm.list_plugins().await.is_empty()); + + // For this test, we would need to actually load a plugin first + // Since we don't have a real WASM plugin loaded, we'll just verify + // the endpoints exist and return appropriate errors + + let mut req = Request::builder() + .method("POST") + .uri("/api/v1/plugins/test-plugin/enable") + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + + let response = app.clone().oneshot(req).await.unwrap(); + + // Should be NOT_FOUND since plugin doesn't exist + assert_eq!(response.status(), StatusCode::NOT_FOUND); + + // Test disable endpoint + let mut req = Request::builder() + .method("POST") + .uri("/api/v1/plugins/test-plugin/disable") + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + + let response = app.oneshot(req).await.unwrap(); + + // Should also be NOT_FOUND + assert_eq!(response.status(), StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn test_plugin_uninstall_not_found() { + let (app, _pm, _tmp) = setup_app_with_plugins().await; + + let mut req = Request::builder() + .method("DELETE") + .uri("/api/v1/plugins/nonexistent") + .body(Body::empty()) + .unwrap(); + req.extensions_mut().insert(test_addr()); + + let response = app.oneshot(req).await.unwrap(); + + // Expect 400 or 404 when plugin doesn't exist + assert!( + response.status() == StatusCode::BAD_REQUEST || response.status() == StatusCode::NOT_FOUND + ); +} diff --git a/crates/pinakes-tui/src/app.rs b/crates/pinakes-tui/src/app.rs index 04edd4d..4f4fb2e 100644 --- a/crates/pinakes-tui/src/app.rs +++ b/crates/pinakes-tui/src/app.rs @@ -54,7 +54,7 @@ pub struct AppState { pub total_media_count: u64, pub server_url: String, // Duplicates view - pub duplicate_groups: Vec>, + pub duplicate_groups: Vec, pub duplicates_selected: Option, // Database view pub database_stats: Option>, @@ -249,16 +249,11 @@ fn handle_api_result(state: &mut AppState, result: ApiResult) { } } ApiResult::Duplicates(groups) => { - let flat: Vec> = - groups.into_iter().map(|g| g.items).collect(); - state.duplicate_groups = flat; - if !state.duplicate_groups.is_empty() { + if !groups.is_empty() { state.duplicates_selected = Some(0); } - state.status_message = Some(format!( - "Found {} duplicate groups", - state.duplicate_groups.len() - )); + state.status_message = Some(format!("Found {} duplicate groups", groups.len())); + state.duplicate_groups = groups; } ApiResult::DatabaseStats(stats) => { state.database_stats = Some(vec![ @@ -617,6 +612,13 @@ async fn handle_action( } } } + // Also fetch background jobs info + match client.list_jobs().await { + Ok(jobs) => { + tracing::debug!("Found {} background jobs", jobs.len()); + } + Err(e) => tracing::warn!("Failed to list jobs: {}", e), + } }); } Action::QueueView => { @@ -1024,6 +1026,134 @@ async fn handle_action( "?: Help q: Quit /: Search i: Import o: Open t: Tags c: Collections a: Audit s: Scan S: Settings r: Refresh Home/End: Top/Bottom".into() ); } + Action::Edit => { + if state.current_view == View::Detail + && let Some(ref media) = state.selected_media { + // Populate edit fields from selected media + state.edit_title = media.title.clone().unwrap_or_default(); + state.edit_artist = media.artist.clone().unwrap_or_default(); + state.edit_album = media.album.clone().unwrap_or_default(); + state.edit_genre = media.genre.clone().unwrap_or_default(); + state.edit_year = media.year.map(|y| y.to_string()).unwrap_or_default(); + state.edit_description = media.description.clone().unwrap_or_default(); + state.edit_field_index = Some(0); + state.input_mode = true; + state.current_view = View::MetadataEdit; + } + } + Action::Vacuum => { + if state.current_view == View::Database { + state.status_message = Some("Vacuuming database...".to_string()); + let client = client.clone(); + let tx = event_sender.clone(); + tokio::spawn(async move { + match client.vacuum_database().await { + Ok(()) => { + tracing::info!("Database vacuum completed"); + // Refresh stats after vacuum + if let Ok(stats) = client.database_stats().await { + let _ = + tx.send(AppEvent::ApiResult(ApiResult::DatabaseStats(stats))); + } + } + Err(e) => { + tracing::error!("Vacuum failed: {}", e); + let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!( + "Vacuum failed: {e}" + )))); + } + } + }); + } + } + Action::Toggle => { + if state.current_view == View::Tasks + && let Some(idx) = state.scheduled_tasks_selected + && let Some(task) = state.scheduled_tasks.get(idx) { + let task_id = task.id.clone(); + let client = client.clone(); + let tx = event_sender.clone(); + tokio::spawn(async move { + match client.toggle_scheduled_task(&task_id).await { + Ok(()) => { + // Refresh tasks list + if let Ok(tasks) = client.list_scheduled_tasks().await { + let _ = tx.send(AppEvent::ApiResult( + ApiResult::ScheduledTasks(tasks), + )); + } + } + Err(e) => { + tracing::error!("Failed to toggle task: {}", e); + let _ = tx.send(AppEvent::ApiResult(ApiResult::Error( + format!("Toggle task failed: {e}"), + ))); + } + } + }); + } + } + Action::RunNow => { + if state.current_view == View::Tasks + && let Some(idx) = state.scheduled_tasks_selected + && let Some(task) = state.scheduled_tasks.get(idx) { + let task_id = task.id.clone(); + let task_name = task.name.clone(); + state.status_message = Some(format!("Running task: {task_name}...")); + let client = client.clone(); + let tx = event_sender.clone(); + tokio::spawn(async move { + match client.run_task_now(&task_id).await { + Ok(()) => { + // Refresh tasks list + if let Ok(tasks) = client.list_scheduled_tasks().await { + let _ = tx.send(AppEvent::ApiResult( + ApiResult::ScheduledTasks(tasks), + )); + } + } + Err(e) => { + tracing::error!("Failed to run task: {}", e); + let _ = tx.send(AppEvent::ApiResult(ApiResult::Error( + format!("Run task failed: {e}"), + ))); + } + } + }); + } + } + Action::Save => { + if state.current_view == View::MetadataEdit + && let Some(ref media) = state.selected_media { + let updates = serde_json::json!({ + "title": if state.edit_title.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_title.clone()) }, + "artist": if state.edit_artist.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_artist.clone()) }, + "album": if state.edit_album.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_album.clone()) }, + "genre": if state.edit_genre.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_genre.clone()) }, + "year": state.edit_year.parse::().ok(), + "description": if state.edit_description.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_description.clone()) }, + }); + let media_id = media.id.clone(); + let client = client.clone(); + let tx = event_sender.clone(); + state.status_message = Some("Saving...".to_string()); + tokio::spawn(async move { + match client.update_media(&media_id, updates).await { + Ok(_) => { + let _ = tx.send(AppEvent::ApiResult(ApiResult::MediaUpdated)); + } + Err(e) => { + tracing::error!("Failed to update media: {}", e); + let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!( + "Update failed: {e}" + )))); + } + } + }); + state.input_mode = false; + state.current_view = View::Detail; + } + } Action::NavigateLeft | Action::NavigateRight | Action::None => {} } } diff --git a/crates/pinakes-tui/src/client.rs b/crates/pinakes-tui/src/client.rs index 24480d9..57cbb81 100644 --- a/crates/pinakes-tui/src/client.rs +++ b/crates/pinakes-tui/src/client.rs @@ -101,7 +101,10 @@ pub struct DuplicateGroupResponse { pub items: Vec, } +/// Background job response from the API. +/// Fields are used for deserialization; the job count is logged in the Database view. #[derive(Debug, Clone, Deserialize)] +#[allow(dead_code)] pub struct JobResponse { pub id: String, pub kind: serde_json::Value, diff --git a/crates/pinakes-tui/src/input.rs b/crates/pinakes-tui/src/input.rs index e21823b..56881d5 100644 --- a/crates/pinakes-tui/src/input.rs +++ b/crates/pinakes-tui/src/input.rs @@ -36,6 +36,11 @@ pub enum Action { TagMedia, UntagMedia, Help, + Edit, + Vacuum, + Toggle, + RunNow, + Save, Char(char), Backspace, None, @@ -43,11 +48,15 @@ pub enum Action { pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Action { if in_input_mode { - match key.code { - KeyCode::Esc => Action::Back, - KeyCode::Enter => Action::Select, - KeyCode::Char(c) => Action::Char(c), - KeyCode::Backspace => Action::Backspace, + match (key.code, key.modifiers) { + (KeyCode::Esc, _) => Action::Back, + (KeyCode::Enter, _) => Action::Select, + (KeyCode::Char('s'), KeyModifiers::CONTROL) => match current_view { + View::MetadataEdit => Action::Save, + _ => Action::Select, + }, + (KeyCode::Char(c), _) => Action::Char(c), + (KeyCode::Backspace, _) => Action::Backspace, _ => Action::None, } } else { @@ -70,10 +79,13 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac }, (KeyCode::Char('o'), _) => Action::Open, (KeyCode::Char('e'), _) => match current_view { - View::Detail => Action::Select, + View::Detail => Action::Edit, _ => Action::None, }, - (KeyCode::Char('t'), _) => Action::TagView, + (KeyCode::Char('t'), _) => match current_view { + View::Tasks => Action::Toggle, + _ => Action::TagView, + }, (KeyCode::Char('c'), _) => Action::CollectionView, (KeyCode::Char('a'), _) => Action::AuditView, (KeyCode::Char('S'), _) => Action::SettingsView, @@ -82,11 +94,24 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac (KeyCode::Char('Q'), _) => Action::QueueView, (KeyCode::Char('X'), _) => Action::StatisticsView, (KeyCode::Char('T'), _) => Action::TasksView, + // Ctrl+S must come before plain 's' to ensure proper precedence + (KeyCode::Char('s'), KeyModifiers::CONTROL) => match current_view { + View::MetadataEdit => Action::Save, + _ => Action::None, + }, (KeyCode::Char('s'), _) => Action::ScanTrigger, (KeyCode::Char('r'), _) => Action::Refresh, (KeyCode::Char('n'), _) => Action::CreateTag, (KeyCode::Char('+'), _) => Action::TagMedia, (KeyCode::Char('-'), _) => Action::UntagMedia, + (KeyCode::Char('v'), _) => match current_view { + View::Database => Action::Vacuum, + _ => Action::None, + }, + (KeyCode::Char('x'), _) => match current_view { + View::Tasks => Action::RunNow, + _ => Action::None, + }, (KeyCode::Tab, _) => Action::NextTab, (KeyCode::BackTab, _) => Action::PrevTab, (KeyCode::PageUp, _) => Action::PageUp, diff --git a/crates/pinakes-tui/src/ui/duplicates.rs b/crates/pinakes-tui/src/ui/duplicates.rs index 724ef6f..8f50142 100644 --- a/crates/pinakes-tui/src/ui/duplicates.rs +++ b/crates/pinakes-tui/src/ui/duplicates.rs @@ -15,14 +15,17 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) { } else { let mut list_items = Vec::new(); for (i, group) in state.duplicate_groups.iter().enumerate() { + // Show truncated hash (first 16 chars) for identification + let hash_display = if group.content_hash.len() > 16 { + &group.content_hash[..16] + } else { + &group.content_hash + }; let header = format!( - "Group {} ({} items, hash: {})", + "Group {} ({} items, hash: {}...)", i + 1, - group.len(), - group - .first() - .map(|m| m.content_hash.as_str()) - .unwrap_or("?") + group.items.len(), + hash_display ); list_items.push(ListItem::new(Line::from(Span::styled( header, @@ -30,7 +33,7 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) { .fg(Color::Yellow) .add_modifier(Modifier::BOLD), )))); - for item in group { + for item in &group.items { let line = format!(" {} - {}", item.file_name, item.path); let is_selected = state .duplicates_selected diff --git a/crates/pinakes-tui/src/ui/tasks.rs b/crates/pinakes-tui/src/ui/tasks.rs index e35c75f..01149e1 100644 --- a/crates/pinakes-tui/src/ui/tasks.rs +++ b/crates/pinakes-tui/src/ui/tasks.rs @@ -37,9 +37,15 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) { .map(super::format_date) .unwrap_or("-"); let status = task.last_status.as_deref().unwrap_or("-"); + // Show abbreviated task ID (first 8 chars) + let task_id_short = if task.id.len() > 8 { + &task.id[..8] + } else { + &task.id + }; let text = format!( - " {enabled_marker} {:<20} {:<16} Last: {:<12} Next: {:<12} Status: {}", + " {enabled_marker} [{task_id_short}] {:<20} {:<16} Last: {:<12} Next: {:<12} Status: {}", task.name, task.schedule, last_run, next_run, status ); diff --git a/crates/pinakes-ui/src/app.rs b/crates/pinakes-ui/src/app.rs index b439acb..f85a48f 100644 --- a/crates/pinakes-ui/src/app.rs +++ b/crates/pinakes-ui/src/app.rs @@ -48,7 +48,7 @@ pub fn App() -> Element { let base_url = std::env::var("PINAKES_SERVER_URL").unwrap_or_else(|_| "http://localhost:3000".into()); let api_key = std::env::var("PINAKES_API_KEY").ok(); - let client = use_signal(|| ApiClient::new(&base_url, api_key.as_deref())); + let mut client = use_signal(|| ApiClient::new(&base_url, api_key.as_deref())); let server_url = use_signal(|| base_url.clone()); let mut current_view = use_signal(|| View::Library); @@ -103,10 +103,13 @@ pub fn App() -> Element { // Auth state let mut auth_required = use_signal(|| false); let mut current_user = use_signal(|| Option::::None); - let _login_error = use_signal(|| Option::::None); - let _login_loading = use_signal(|| false); + let mut login_error = use_signal(|| Option::::None); + let mut login_loading = use_signal(|| false); let mut auto_play_media = use_signal(|| false); + // Import state for UI feedback + let mut import_in_progress = use_signal(|| false); + // Check auth on startup let client_auth = client.read().clone(); use_effect(move || { @@ -117,10 +120,16 @@ pub fn App() -> Element { current_user.set(Some(user)); auth_required.set(false); } - Err(_) => { - // Check if server has accounts enabled by trying login endpoint - // If we get a 401 on /auth/me, accounts may be enabled - auth_required.set(false); // Will be set to true if needed + Err(e) => { + // Check if this is an auth error (401) vs network error + let err_str = e.to_string(); + if err_str.contains("401") + || err_str.contains("unauthorized") + || err_str.contains("Unauthorized") + { + auth_required.set(true); + } + // For network errors, don't require auth (server offline state handles this) } } // Load UI config @@ -255,6 +264,33 @@ pub fn App() -> Element { } }; + // Login handler for auth flow + let on_login_submit = { + move |(username, password): (String, String)| { + let login_client = client.read().clone(); + spawn(async move { + login_loading.set(true); + login_error.set(None); + + match login_client.login(&username, &password).await { + Ok(resp) => { + // Update the signal with a new client that has the token set + client.write().set_token(&resp.token); + current_user.set(Some(UserInfoResponse { + username: resp.username, + role: resp.role, + })); + auth_required.set(false); + } + Err(e) => { + login_error.set(Some(format!("Login failed: {e}"))); + } + } + login_loading.set(false); + }); + } + }; + let view_title = use_memo(move || current_view.read().title()); let _total_pages = use_memo(move || { let ps = *media_page_size.read(); @@ -265,8 +301,15 @@ pub fn App() -> Element { rsx! { style { {styles::CSS} } - // Phase 7.1: Keyboard shortcuts - div { class: "app", + if *auth_required.read() { + crate::components::login::Login { + on_login: on_login_submit, + error: login_error.read().clone(), + loading: *login_loading.read(), + } + } else { + // Phase 7.1: Keyboard shortcuts + div { class: "app", tabindex: "0", onkeydown: { move |evt: KeyboardEvent| { @@ -316,7 +359,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{25a6}" } - "Library" + span { class: "nav-item-text", "Library" } // Phase 7.2: Badge span { class: "nav-badge", "{media_total_count}" } } @@ -324,7 +367,7 @@ pub fn App() -> Element { class: if *current_view.read() == View::Search { "nav-item active" } else { "nav-item" }, onclick: move |_| current_view.set(View::Search), span { class: "nav-icon", "\u{2315}" } - "Search" + span { class: "nav-item-text", "Search" } } button { class: if *current_view.read() == View::Import { "nav-item active" } else { "nav-item" }, @@ -341,7 +384,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2912}" } - "Import" + span { class: "nav-item-text", "Import" } } } @@ -357,7 +400,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2605}" } - "Tags" + span { class: "nav-item-text", "Tags" } // Phase 7.2: Badge span { class: "nav-badge", "{tags_list.read().len()}" } } @@ -373,7 +416,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2630}" } - "Collections" + span { class: "nav-item-text", "Collections" } // Phase 7.2: Badge span { class: "nav-badge", "{collections_list.read().len()}" } } @@ -391,7 +434,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2637}" } - "Audit" + span { class: "nav-item-text", "Audit" } } button { class: if *current_view.read() == View::Duplicates { "nav-item active" } else { "nav-item" }, @@ -408,7 +451,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2261}" } - "Duplicates" + span { class: "nav-item-text", "Duplicates" } } button { class: if *current_view.read() == View::Settings { "nav-item active" } else { "nav-item" }, @@ -425,7 +468,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2699}" } - "Settings" + span { class: "nav-item-text", "Settings" } } button { class: if *current_view.read() == View::Database { "nav-item active" } else { "nav-item" }, @@ -442,7 +485,7 @@ pub fn App() -> Element { } }, span { class: "nav-icon", "\u{2750}" } - "Database" + span { class: "nav-item-text", "Database" } } } @@ -1142,6 +1185,7 @@ pub fn App() -> Element { tags: tags_list.read().clone(), collections: collections_list.read().clone(), scan_progress: scan_progress.read().clone(), + is_importing: *import_in_progress.read(), on_import_file: { let client = client.read().clone(); let refresh_media = refresh_media.clone(); @@ -1150,6 +1194,7 @@ pub fn App() -> Element { let client = client.clone(); let refresh_media = refresh_media.clone(); let refresh_tags = refresh_tags.clone(); + import_in_progress.set(true); spawn(async move { if tag_ids.is_empty() && new_tags.is_empty() && col_id.is_none() { match client.import_file(&path).await { @@ -1179,6 +1224,7 @@ pub fn App() -> Element { Err(e) => show_toast(format!("Import failed: {e}"), true), } } + import_in_progress.set(false); }); } }, @@ -1190,8 +1236,8 @@ pub fn App() -> Element { let client = client.clone(); let refresh_media = refresh_media.clone(); let refresh_tags = refresh_tags.clone(); + import_in_progress.set(true); spawn(async move { - show_toast("Importing directory...".into(), false); match client.import_directory(&path, &tag_ids, &new_tags, col_id.as_deref()).await { Ok(resp) => { show_toast( @@ -1208,6 +1254,7 @@ pub fn App() -> Element { } Err(e) => show_toast(format!("Directory import failed: {e}"), true), } + import_in_progress.set(false); }); } }, @@ -1218,8 +1265,8 @@ pub fn App() -> Element { move |_| { let client = client.clone(); let refresh_media = refresh_media.clone(); + import_in_progress.set(true); spawn(async move { - show_toast("Scanning...".into(), false); match client.trigger_scan().await { Ok(_results) => { // Poll scan status until done @@ -1242,6 +1289,7 @@ pub fn App() -> Element { } Err(e) => show_toast(format!("Scan failed: {e}"), true), } + import_in_progress.set(false); }); } }, @@ -1253,8 +1301,9 @@ pub fn App() -> Element { let client = client.clone(); let refresh_media = refresh_media.clone(); let refresh_tags = refresh_tags.clone(); + let file_count = paths.len(); + import_in_progress.set(true); spawn(async move { - show_toast(format!("Importing {} files...", paths.len()), false); match client.batch_import(&paths, &tag_ids, &new_tags, col_id.as_deref()).await { Ok(resp) => { show_toast( @@ -1269,8 +1318,9 @@ pub fn App() -> Element { preview_files.set(Vec::new()); preview_total_size.set(0); } - Err(e) => show_toast(format!("Batch import failed: {e}"), true), + Err(e) => show_toast(format!("Batch import failed ({file_count} files): {e}"), true), } + import_in_progress.set(false); }); } }, @@ -1556,6 +1606,7 @@ pub fn App() -> Element { } } } + } // end else (auth not required) // Phase 1.4: Toast queue - show up to 3 stacked from bottom div { class: "toast-container", diff --git a/crates/pinakes-ui/src/components/import.rs b/crates/pinakes-ui/src/components/import.rs index 0b97d74..0963565 100644 --- a/crates/pinakes-ui/src/components/import.rs +++ b/crates/pinakes-ui/src/components/import.rs @@ -22,6 +22,7 @@ pub fn Import( preview_files: Vec, preview_total_size: u64, scan_progress: Option, + #[props(default = false)] is_importing: bool, ) -> Element { let mut import_mode = use_signal(|| 0usize); let mut file_path = use_signal(String::new); @@ -44,6 +45,19 @@ pub fn Import( let current_mode = *import_mode.read(); rsx! { + // Import status panel (shown when import is in progress) + if is_importing { + div { class: "import-status-panel", + div { class: "import-status-header", + div { class: "status-dot checking" } + span { "Import in progress..." } + } + div { class: "progress-bar", + div { class: "progress-fill indeterminate" } + } + } + } + // Tab bar div { class: "import-tabs", button { @@ -114,6 +128,7 @@ pub fn Import( } button { class: "btn btn-primary", + disabled: is_importing, onclick: { let mut file_path = file_path; let mut selected_tags = selected_tags; @@ -133,7 +148,7 @@ pub fn Import( } } }, - "Import" + if is_importing { "Importing..." } else { "Import" } } } } @@ -494,7 +509,7 @@ pub fn Import( rsx! { button { class: "btn btn-primary", - disabled: !has_selected, + disabled: !has_selected || is_importing, onclick: { let mut selected_file_paths = selected_file_paths; let mut selected_tags = selected_tags; @@ -514,7 +529,9 @@ pub fn Import( } } }, - if has_selected { + if is_importing { + "Importing..." + } else if has_selected { "Import Selected ({sel_count})" } else { "Import Selected" @@ -526,6 +543,7 @@ pub fn Import( // Import entire directory button { class: "btn btn-secondary", + disabled: is_importing, onclick: { let mut dir_path = dir_path; let mut selected_tags = selected_tags; @@ -547,7 +565,7 @@ pub fn Import( } } }, - "Import Entire Directory" + if is_importing { "Importing..." } else { "Import Entire Directory" } } } } @@ -569,8 +587,9 @@ pub fn Import( div { class: "mb-16", style: "text-align: center;", button { class: "btn btn-primary", + disabled: is_importing, onclick: move |_| on_scan.call(()), - "Scan All Roots" + if is_importing { "Scanning..." } else { "Scan All Roots" } } } diff --git a/crates/pinakes-ui/src/components/media_player.rs b/crates/pinakes-ui/src/components/media_player.rs index f780580..2294506 100644 --- a/crates/pinakes-ui/src/components/media_player.rs +++ b/crates/pinakes-ui/src/components/media_player.rs @@ -41,14 +41,20 @@ impl Default for PlayQueue { } impl PlayQueue { + /// Check if the queue is empty. + #[allow(dead_code)] pub fn is_empty(&self) -> bool { self.items.is_empty() } + /// Get the current item in the queue. + #[allow(dead_code)] pub fn current(&self) -> Option<&QueueItem> { self.items.get(self.current_index) } + /// Advance to the next item based on repeat mode. + #[allow(dead_code)] pub fn next(&mut self) -> Option<&QueueItem> { if self.items.is_empty() { return None; @@ -70,6 +76,8 @@ impl PlayQueue { } } + /// Go to the previous item based on repeat mode. + #[allow(dead_code)] pub fn previous(&mut self) -> Option<&QueueItem> { if self.items.is_empty() { return None; @@ -82,10 +90,14 @@ impl PlayQueue { self.items.get(self.current_index) } + /// Add an item to the queue. + #[allow(dead_code)] pub fn add(&mut self, item: QueueItem) { self.items.push(item); } + /// Remove an item from the queue by index. + #[allow(dead_code)] pub fn remove(&mut self, index: usize) { if index < self.items.len() { self.items.remove(index); @@ -95,11 +107,15 @@ impl PlayQueue { } } + /// Clear all items from the queue. + #[allow(dead_code)] pub fn clear(&mut self) { self.items.clear(); self.current_index = 0; } + /// Toggle between repeat modes: Off -> All -> One -> Off. + #[allow(dead_code)] pub fn toggle_repeat(&mut self) { self.repeat = match self.repeat { RepeatMode::Off => RepeatMode::All, @@ -108,6 +124,8 @@ impl PlayQueue { }; } + /// Toggle shuffle mode on/off. + #[allow(dead_code)] pub fn toggle_shuffle(&mut self) { self.shuffle = !self.shuffle; } diff --git a/crates/pinakes-ui/src/styles.rs b/crates/pinakes-ui/src/styles.rs index 642814d..89bf615 100644 --- a/crates/pinakes-ui/src/styles.rs +++ b/crates/pinakes-ui/src/styles.rs @@ -81,6 +81,15 @@ body { .sidebar.collapsed .nav-item { justify-content: center; padding: 8px; border-left: none; } .sidebar.collapsed .nav-icon { width: auto; margin: 0; } +/* Nav item text - hide when collapsed */ +.nav-item-text { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.sidebar.collapsed .nav-item-text { display: none; } + .sidebar-toggle { background: none; border: none; @@ -1550,6 +1559,34 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); } transition: width 0.3s ease; } +.progress-fill.indeterminate { + width: 30%; + animation: indeterminate 1.5s ease-in-out infinite; +} + +@keyframes indeterminate { + 0% { transform: translateX(-100%); } + 100% { transform: translateX(400%); } +} + +/* ── Import status panel ── */ +.import-status-panel { + background: var(--bg-2); + border: 1px solid var(--accent); + border-radius: var(--radius); + padding: 12px 16px; + margin-bottom: 16px; +} + +.import-status-header { + display: flex; + align-items: center; + gap: 8px; + margin-bottom: 8px; + font-size: 13px; + color: var(--text-0); +} + /* ── Tag confirmation ── */ .tag-confirm-delete { display: inline-flex; @@ -2336,14 +2373,26 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); } align-items: center; gap: 6px; font-size: 12px; - flex-wrap: wrap; + overflow: hidden; + min-width: 0; } .user-name { font-weight: 500; color: var(--text-0); + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 90px; + flex-shrink: 1; } +/* Hide user details in collapsed sidebar, show only logout icon */ +.sidebar.collapsed .user-info .user-name, +.sidebar.collapsed .user-info .role-badge { display: none; } + +.sidebar.collapsed .user-info .btn { padding: 6px; } + .role-badge { display: inline-block; padding: 1px 6px;