various: simplify code; work on security and performance

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9a5114addcab5fbff430ab2b919b83466a6a6964
This commit is contained in:
raf 2026-02-02 17:32:11 +03:00
commit c4adc4e3e0
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
75 changed files with 12921 additions and 358 deletions

View file

@ -32,8 +32,13 @@ mime_guess = { workspace = true }
async-trait = { workspace = true }
kamadak-exif = { workspace = true }
image = { workspace = true }
tokio-util = { version = "0.7", features = ["rt"] }
tokio-util = { workspace = true }
reqwest = { workspace = true }
argon2 = { workspace = true }
# Plugin system
pinakes-plugin-api = { path = "../pinakes-plugin-api" }
wasmtime = { workspace = true }
[dev-dependencies]
tempfile = "3"

View file

@ -0,0 +1,69 @@
//! Usage analytics and watch history tracking.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A tracked usage event for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UsageEvent {
pub id: Uuid,
pub media_id: Option<MediaId>,
pub user_id: Option<UserId>,
pub event_type: UsageEventType,
pub timestamp: DateTime<Utc>,
pub duration_secs: Option<f64>,
pub context_json: Option<String>,
}
/// Types of usage events that can be tracked.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum UsageEventType {
View,
Play,
Export,
Share,
Search,
}
impl std::fmt::Display for UsageEventType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::View => "view",
Self::Play => "play",
Self::Export => "export",
Self::Share => "share",
Self::Search => "search",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for UsageEventType {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"view" => Ok(Self::View),
"play" => Ok(Self::Play),
"export" => Ok(Self::Export),
"share" => Ok(Self::Share),
"search" => Ok(Self::Search),
_ => Err(format!("unknown usage event type: {s}")),
}
}
}
/// Watch history entry tracking progress through media.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WatchHistory {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub progress_secs: f64,
pub last_watched: DateTime<Utc>,
}

View file

@ -45,10 +45,10 @@ where
pub async fn get(&self, key: &K) -> Option<V> {
let map = self.entries.read().await;
if let Some(entry) = map.get(key) {
if entry.inserted_at.elapsed() < self.ttl {
return Some(entry.value.clone());
}
if let Some(entry) = map.get(key)
&& entry.inserted_at.elapsed() < self.ttl
{
return Some(entry.value.clone());
}
None
}

View file

@ -20,6 +20,16 @@ pub struct Config {
pub webhooks: Vec<WebhookConfig>,
#[serde(default)]
pub scheduled_tasks: Vec<ScheduledTaskConfig>,
#[serde(default)]
pub plugins: PluginsConfig,
#[serde(default)]
pub transcoding: TranscodingConfig,
#[serde(default)]
pub enrichment: EnrichmentConfig,
#[serde(default)]
pub cloud: CloudConfig,
#[serde(default)]
pub analytics: AnalyticsConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -192,6 +202,233 @@ impl std::fmt::Display for UserRole {
}
}
// ===== Plugin Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginsConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_plugin_data_dir")]
pub data_dir: PathBuf,
#[serde(default = "default_plugin_cache_dir")]
pub cache_dir: PathBuf,
#[serde(default)]
pub plugin_dirs: Vec<PathBuf>,
#[serde(default)]
pub enable_hot_reload: bool,
#[serde(default)]
pub allow_unsigned: bool,
#[serde(default = "default_max_concurrent_ops")]
pub max_concurrent_ops: usize,
#[serde(default = "default_plugin_timeout")]
pub plugin_timeout_secs: u64,
}
fn default_plugin_data_dir() -> PathBuf {
Config::default_data_dir().join("plugins").join("data")
}
fn default_plugin_cache_dir() -> PathBuf {
Config::default_data_dir().join("plugins").join("cache")
}
fn default_max_concurrent_ops() -> usize {
4
}
fn default_plugin_timeout() -> u64 {
30
}
impl Default for PluginsConfig {
fn default() -> Self {
Self {
enabled: false,
data_dir: default_plugin_data_dir(),
cache_dir: default_plugin_cache_dir(),
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: false,
max_concurrent_ops: default_max_concurrent_ops(),
plugin_timeout_secs: default_plugin_timeout(),
}
}
}
// ===== Transcoding Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodingConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub cache_dir: Option<PathBuf>,
#[serde(default = "default_cache_ttl_hours")]
pub cache_ttl_hours: u64,
#[serde(default = "default_max_concurrent_transcodes")]
pub max_concurrent: usize,
#[serde(default)]
pub hardware_acceleration: Option<String>,
#[serde(default)]
pub profiles: Vec<TranscodeProfile>,
}
fn default_cache_ttl_hours() -> u64 {
48
}
fn default_max_concurrent_transcodes() -> usize {
2
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodeProfile {
pub name: String,
pub video_codec: String,
pub audio_codec: String,
pub max_bitrate_kbps: u32,
pub max_resolution: String,
}
impl Default for TranscodingConfig {
fn default() -> Self {
Self {
enabled: false,
cache_dir: None,
cache_ttl_hours: default_cache_ttl_hours(),
max_concurrent: default_max_concurrent_transcodes(),
hardware_acceleration: None,
profiles: vec![
TranscodeProfile {
name: "high".to_string(),
video_codec: "h264".to_string(),
audio_codec: "aac".to_string(),
max_bitrate_kbps: 8000,
max_resolution: "1080p".to_string(),
},
TranscodeProfile {
name: "medium".to_string(),
video_codec: "h264".to_string(),
audio_codec: "aac".to_string(),
max_bitrate_kbps: 4000,
max_resolution: "720p".to_string(),
},
],
}
}
}
// ===== Enrichment Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct EnrichmentConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub auto_enrich_on_import: bool,
#[serde(default)]
pub sources: EnrichmentSources,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct EnrichmentSources {
#[serde(default)]
pub musicbrainz: EnrichmentSource,
#[serde(default)]
pub tmdb: EnrichmentSource,
#[serde(default)]
pub lastfm: EnrichmentSource,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct EnrichmentSource {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub api_key: Option<String>,
#[serde(default)]
pub api_endpoint: Option<String>,
}
// ===== Cloud Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_auto_sync_interval")]
pub auto_sync_interval_mins: u64,
#[serde(default)]
pub accounts: Vec<CloudAccount>,
}
fn default_auto_sync_interval() -> u64 {
60
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudAccount {
pub id: String,
pub provider: String,
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub sync_rules: Vec<CloudSyncRule>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudSyncRule {
pub local_path: PathBuf,
pub remote_path: String,
pub direction: CloudSyncDirection,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CloudSyncDirection {
Upload,
Download,
Bidirectional,
}
impl Default for CloudConfig {
fn default() -> Self {
Self {
enabled: false,
auto_sync_interval_mins: default_auto_sync_interval(),
accounts: vec![],
}
}
}
// ===== Analytics Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalyticsConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_true")]
pub track_usage: bool,
#[serde(default = "default_retention_days")]
pub retention_days: u64,
}
fn default_retention_days() -> u64 {
90
}
impl Default for AnalyticsConfig {
fn default() -> Self {
Self {
enabled: false,
track_usage: true,
retention_days: default_retention_days(),
}
}
}
// ===== Storage Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StorageConfig {
pub backend: StorageBackendType,
@ -379,6 +616,11 @@ impl Default for Config {
thumbnails: ThumbnailConfig::default(),
webhooks: vec![],
scheduled_tasks: vec![],
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
}
}
}

View file

@ -0,0 +1,109 @@
//! Last.fm metadata enrichment for audio files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct LastFmEnricher {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl LastFmEnricher {
pub fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
api_key,
base_url: "https://ws.audioscrobbler.com/2.0".to_string(),
}
}
}
#[async_trait::async_trait]
impl MetadataEnricher for LastFmEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::LastFm
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let artist = match &item.artist {
Some(a) if !a.is_empty() => a,
_ => return Ok(None),
};
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let url = format!("{}/", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("method", "track.getInfo"),
("api_key", self.api_key.as_str()),
("artist", artist.as_str()),
("track", title.as_str()),
("format", "json"),
])
.send()
.await
.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm request failed: {e}"))
})?;
if !resp.status().is_success() {
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm response read failed: {e}"))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm JSON parse failed: {e}"))
})?;
// Check for error response
if json.get("error").is_some() {
return Ok(None);
}
let track = match json.get("track") {
Some(t) => t,
None => return Ok(None),
};
let mbid = track.get("mbid").and_then(|m| m.as_str()).map(String::from);
let listeners = track
.get("listeners")
.and_then(|l| l.as_str())
.and_then(|l| l.parse::<f64>().ok())
.unwrap_or(0.0);
// Normalize listeners to confidence (arbitrary scale)
let confidence = (listeners / 1_000_000.0).min(1.0);
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::LastFm,
external_id: mbid,
metadata_json: body,
confidence,
last_updated: Utc::now(),
}))
}
}

View file

@ -0,0 +1,66 @@
//! Metadata enrichment from external sources.
pub mod lastfm;
pub mod musicbrainz;
pub mod tmdb;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::error::Result;
use crate::model::{MediaId, MediaItem};
/// Externally-sourced metadata for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExternalMetadata {
pub id: Uuid,
pub media_id: MediaId,
pub source: EnrichmentSourceType,
pub external_id: Option<String>,
pub metadata_json: String,
pub confidence: f64,
pub last_updated: DateTime<Utc>,
}
/// Supported enrichment data sources.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum EnrichmentSourceType {
#[serde(rename = "musicbrainz")]
MusicBrainz,
#[serde(rename = "tmdb")]
Tmdb,
#[serde(rename = "lastfm")]
LastFm,
}
impl std::fmt::Display for EnrichmentSourceType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::MusicBrainz => "musicbrainz",
Self::Tmdb => "tmdb",
Self::LastFm => "lastfm",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for EnrichmentSourceType {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"musicbrainz" => Ok(Self::MusicBrainz),
"tmdb" => Ok(Self::Tmdb),
"lastfm" => Ok(Self::LastFm),
_ => Err(format!("unknown enrichment source: {s}")),
}
}
}
/// Trait for metadata enrichment providers.
#[async_trait::async_trait]
pub trait MetadataEnricher: Send + Sync {
fn source(&self) -> EnrichmentSourceType;
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>>;
}

View file

@ -0,0 +1,134 @@
//! MusicBrainz metadata enrichment for audio files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct MusicBrainzEnricher {
client: reqwest::Client,
base_url: String,
}
impl Default for MusicBrainzEnricher {
fn default() -> Self {
Self::new()
}
}
impl MusicBrainzEnricher {
pub fn new() -> Self {
Self {
client: reqwest::Client::builder()
.user_agent("Pinakes/0.1 (https://github.com/notashelf/pinakes)")
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
base_url: "https://musicbrainz.org/ws/2".to_string(),
}
}
}
fn escape_lucene_query(s: &str) -> String {
let special_chars = [
'+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\',
'/',
];
let mut escaped = String::with_capacity(s.len() * 2);
for c in s.chars() {
if special_chars.contains(&c) {
escaped.push('\\');
}
escaped.push(c);
}
escaped
}
#[async_trait::async_trait]
impl MetadataEnricher for MusicBrainzEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::MusicBrainz
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let mut query = format!("recording:{}", escape_lucene_query(title));
if let Some(ref artist) = item.artist {
query.push_str(&format!(" AND artist:{}", escape_lucene_query(artist)));
}
let url = format!("{}/recording/", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("query", &query),
("fmt", &"json".to_string()),
("limit", &"1".to_string()),
])
.send()
.await
.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz request failed: {e}"))
})?;
if !resp.status().is_success() {
let status = resp.status();
if status == reqwest::StatusCode::TOO_MANY_REQUESTS
|| status == reqwest::StatusCode::SERVICE_UNAVAILABLE
{
return Err(PinakesError::MetadataExtraction(format!(
"MusicBrainz rate limited (HTTP {})",
status.as_u16()
)));
}
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz response read failed: {e}"))
})?;
// Parse to check if we got results
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz JSON parse failed: {e}"))
})?;
let recordings = json.get("recordings").and_then(|r| r.as_array());
if recordings.is_none_or(|r| r.is_empty()) {
return Ok(None);
}
let recording = &recordings.unwrap()[0];
let external_id = recording
.get("id")
.and_then(|id| id.as_str())
.map(String::from);
let score = recording
.get("score")
.and_then(|s| s.as_f64())
.unwrap_or(0.0)
/ 100.0;
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::MusicBrainz,
external_id,
metadata_json: body,
confidence: score,
last_updated: Utc::now(),
}))
}
}

View file

@ -0,0 +1,109 @@
//! TMDB (The Movie Database) metadata enrichment for video files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct TmdbEnricher {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl TmdbEnricher {
pub fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
api_key,
base_url: "https://api.themoviedb.org/3".to_string(),
}
}
}
#[async_trait::async_trait]
impl MetadataEnricher for TmdbEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::Tmdb
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let url = format!("{}/search/movie", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("api_key", &self.api_key),
("query", &title.to_string()),
("page", &"1".to_string()),
])
.send()
.await
.map_err(|e| PinakesError::MetadataExtraction(format!("TMDB request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
if status == reqwest::StatusCode::UNAUTHORIZED {
return Err(PinakesError::MetadataExtraction(
"TMDB API key is invalid (401)".into(),
));
}
if status == reqwest::StatusCode::TOO_MANY_REQUESTS {
tracing::warn!("TMDB rate limit exceeded (429)");
return Ok(None);
}
tracing::debug!(status = %status, "TMDB search returned non-success status");
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB response read failed: {e}"))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB JSON parse failed: {e}"))
})?;
let results = json.get("results").and_then(|r| r.as_array());
if results.is_none_or(|r| r.is_empty()) {
return Ok(None);
}
let movie = &results.unwrap()[0];
let external_id = match movie.get("id").and_then(|id| id.as_i64()) {
Some(id) => id.to_string(),
None => return Ok(None),
};
let popularity = movie
.get("popularity")
.and_then(|p| p.as_f64())
.unwrap_or(0.0);
// Normalize popularity to 0-1 range (TMDB popularity can be very high)
let confidence = (popularity / 100.0).min(1.0);
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::Tmdb,
external_id: Some(external_id),
metadata_json: body,
confidence,
last_updated: Utc::now(),
}))
}
}

View file

@ -42,6 +42,12 @@ pub enum PinakesError {
#[error("invalid operation: {0}")]
InvalidOperation(String),
#[error("authentication error: {0}")]
Authentication(String),
#[error("authorization error: {0}")]
Authorization(String),
}
impl From<rusqlite::Error> for PinakesError {
@ -56,4 +62,10 @@ impl From<tokio_postgres::Error> for PinakesError {
}
}
impl From<serde_json::Error> for PinakesError {
fn from(e: serde_json::Error) -> Self {
PinakesError::Database(format!("JSON serialization error: {}", e))
}
}
pub type Result<T> = std::result::Result<T, PinakesError>;

View file

@ -27,6 +27,27 @@ pub enum PinakesEvent {
expected: String,
actual: String,
},
MediaRated {
media_id: String,
user_id: String,
stars: u8,
},
MediaCommented {
media_id: String,
user_id: String,
},
PlaylistCreated {
playlist_id: String,
owner_id: String,
},
TranscodeStarted {
media_id: String,
profile: String,
},
TranscodeCompleted {
media_id: String,
profile: String,
},
}
impl PinakesEvent {
@ -37,6 +58,11 @@ impl PinakesEvent {
Self::MediaDeleted { .. } => "media_deleted",
Self::ScanCompleted { .. } => "scan_completed",
Self::IntegrityMismatch { .. } => "integrity_mismatch",
Self::MediaRated { .. } => "media_rated",
Self::MediaCommented { .. } => "media_commented",
Self::PlaylistCreated { .. } => "playlist_created",
Self::TranscodeStarted { .. } => "transcode_started",
Self::TranscodeCompleted { .. } => "transcode_completed",
}
}
}

View file

@ -23,7 +23,7 @@ pub async fn export_library(
limit: u64::MAX,
sort: None,
};
let items = storage.list_media(&&pagination).await?;
let items = storage.list_media(&pagination).await?;
let count = items.len();
match format {

View file

@ -64,9 +64,12 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
let extracted = {
let path_clone = path.clone();
tokio::task::spawn_blocking(move || metadata::extract_metadata(&path_clone, media_type))
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
metadata::extract_metadata(&path_clone, media_type_clone)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
};
let file_name = path
@ -82,8 +85,9 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
let thumb_path = {
let source = path.clone();
let thumb_dir = thumbnail::default_thumbnail_dir();
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
thumbnail::generate_thumbnail(media_id, &source, media_type, &thumb_dir)
thumbnail::generate_thumbnail(media_id, &source, media_type_clone, &thumb_dir)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??

View file

@ -184,13 +184,13 @@ pub async fn cleanup_orphaned_thumbnails(
let entries = std::fs::read_dir(thumbnail_dir)?;
for entry in entries.flatten() {
let path = entry.path();
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
if !known_ids.contains(stem) {
if let Err(e) = std::fs::remove_file(&path) {
warn!(path = %path.display(), error = %e, "failed to remove orphaned thumbnail");
} else {
removed += 1;
}
if let Some(stem) = path.file_stem().and_then(|s| s.to_str())
&& !known_ids.contains(stem)
{
if let Err(e) = std::fs::remove_file(&path) {
warn!(path = %path.display(), error = %e, "failed to remove orphaned thumbnail");
} else {
removed += 1;
}
}
}

View file

@ -29,6 +29,14 @@ pub enum JobKind {
format: ExportFormat,
destination: PathBuf,
},
Transcode {
media_id: MediaId,
profile: String,
},
Enrich {
media_ids: Vec<MediaId>,
},
CleanupAnalytics,
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,7 +1,9 @@
pub mod analytics;
pub mod audit;
pub mod cache;
pub mod collections;
pub mod config;
pub mod enrichment;
pub mod error;
pub mod events;
pub mod export;
@ -13,9 +15,15 @@ pub mod media_type;
pub mod metadata;
pub mod model;
pub mod opener;
pub mod playlists;
pub mod plugin;
pub mod scan;
pub mod scheduler;
pub mod search;
pub mod social;
pub mod storage;
pub mod subtitles;
pub mod tags;
pub mod thumbnail;
pub mod transcode;
pub mod users;

View file

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MediaType {
pub enum BuiltinMediaType {
// Audio
Mp3,
Flac,
@ -60,7 +60,48 @@ pub enum MediaCategory {
Image,
}
impl MediaType {
impl BuiltinMediaType {
/// Get the unique ID for this media type
pub fn id(&self) -> String {
format!("{:?}", self).to_lowercase()
}
/// Get the display name for this media type
pub fn name(&self) -> String {
match self {
Self::Mp3 => "MP3 Audio".to_string(),
Self::Flac => "FLAC Audio".to_string(),
Self::Ogg => "OGG Audio".to_string(),
Self::Wav => "WAV Audio".to_string(),
Self::Aac => "AAC Audio".to_string(),
Self::Opus => "Opus Audio".to_string(),
Self::Mp4 => "MP4 Video".to_string(),
Self::Mkv => "MKV Video".to_string(),
Self::Avi => "AVI Video".to_string(),
Self::Webm => "WebM Video".to_string(),
Self::Pdf => "PDF Document".to_string(),
Self::Epub => "EPUB eBook".to_string(),
Self::Djvu => "DjVu Document".to_string(),
Self::Markdown => "Markdown".to_string(),
Self::PlainText => "Plain Text".to_string(),
Self::Jpeg => "JPEG Image".to_string(),
Self::Png => "PNG Image".to_string(),
Self::Gif => "GIF Image".to_string(),
Self::Webp => "WebP Image".to_string(),
Self::Svg => "SVG Image".to_string(),
Self::Avif => "AVIF Image".to_string(),
Self::Tiff => "TIFF Image".to_string(),
Self::Bmp => "BMP Image".to_string(),
Self::Cr2 => "Canon RAW (CR2)".to_string(),
Self::Nef => "Nikon RAW (NEF)".to_string(),
Self::Arw => "Sony RAW (ARW)".to_string(),
Self::Dng => "Adobe DNG RAW".to_string(),
Self::Orf => "Olympus RAW (ORF)".to_string(),
Self::Rw2 => "Panasonic RAW (RW2)".to_string(),
Self::Heic => "HEIC Image".to_string(),
}
}
pub fn from_extension(ext: &str) -> Option<Self> {
match ext.to_ascii_lowercase().as_str() {
"mp3" => Some(Self::Mp3),

View file

@ -0,0 +1,232 @@
//! Extensible media type system
//!
//! This module provides an extensible media type system that supports both
//! built-in media types and plugin-registered custom types.
use serde::{Deserialize, Serialize};
use std::path::Path;
pub mod builtin;
pub mod registry;
pub use builtin::{BuiltinMediaType, MediaCategory};
pub use registry::{MediaTypeDescriptor, MediaTypeRegistry};
/// Media type identifier - can be either built-in or custom
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MediaType {
/// Built-in media type (backward compatible)
Builtin(BuiltinMediaType),
/// Custom media type from a plugin
Custom(String),
}
impl MediaType {
/// Create a new custom media type
pub fn custom(id: impl Into<String>) -> Self {
Self::Custom(id.into())
}
/// Get the type ID as a string
pub fn id(&self) -> String {
match self {
Self::Builtin(b) => b.id(),
Self::Custom(id) => id.clone(),
}
}
/// Get the display name for this media type
/// For custom types without a registry, returns the ID as the name
pub fn name(&self) -> String {
match self {
Self::Builtin(b) => b.name(),
Self::Custom(id) => id.clone(),
}
}
/// Get the display name for this media type with registry support
pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self {
Self::Builtin(b) => b.name(),
Self::Custom(id) => registry
.get(id)
.map(|d| d.name.clone())
.unwrap_or_else(|| id.clone()),
}
}
/// Get the category for this media type
/// For custom types without a registry, returns MediaCategory::Document as default
pub fn category(&self) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
Self::Custom(_) => MediaCategory::Document,
}
}
/// Get the category for this media type with registry support
pub fn category_with_registry(&self, registry: &MediaTypeRegistry) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
Self::Custom(id) => registry
.get(id)
.and_then(|d| d.category)
.unwrap_or(MediaCategory::Document),
}
}
/// Get the MIME type
/// For custom types without a registry, returns "application/octet-stream"
pub fn mime_type(&self) -> String {
match self {
Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(_) => "application/octet-stream".to_string(),
}
}
/// Get the MIME type with registry support
pub fn mime_type_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self {
Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(id) => registry
.get(id)
.and_then(|d| d.mime_types.first().cloned())
.unwrap_or_else(|| "application/octet-stream".to_string()),
}
}
/// Get file extensions
/// For custom types without a registry, returns an empty vec
pub fn extensions(&self) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Custom(_) => vec![],
}
}
/// Get file extensions with registry support
pub fn extensions_with_registry(&self, registry: &MediaTypeRegistry) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Custom(id) => registry
.get(id)
.map(|d| d.extensions.clone())
.unwrap_or_default(),
}
}
/// Check if this is a RAW image format
pub fn is_raw(&self) -> bool {
match self {
Self::Builtin(b) => b.is_raw(),
Self::Custom(_) => false,
}
}
/// Resolve a media type from file extension (built-in types only)
/// Use from_extension_with_registry for custom types
pub fn from_extension(ext: &str) -> Option<Self> {
BuiltinMediaType::from_extension(ext).map(Self::Builtin)
}
/// Resolve a media type from file extension with registry (includes custom types)
pub fn from_extension_with_registry(ext: &str, registry: &MediaTypeRegistry) -> Option<Self> {
// Try built-in types first
if let Some(builtin) = BuiltinMediaType::from_extension(ext) {
return Some(Self::Builtin(builtin));
}
// Try registered custom types
registry
.get_by_extension(ext)
.map(|desc| Self::Custom(desc.id.clone()))
}
/// Resolve a media type from file path (built-in types only)
/// Use from_path_with_registry for custom types
pub fn from_path(path: &Path) -> Option<Self> {
path.extension()
.and_then(|e| e.to_str())
.and_then(Self::from_extension)
}
/// Resolve a media type from file path with registry (includes custom types)
pub fn from_path_with_registry(path: &Path, registry: &MediaTypeRegistry) -> Option<Self> {
path.extension()
.and_then(|e| e.to_str())
.and_then(|ext| Self::from_extension_with_registry(ext, registry))
}
}
// Implement From<BuiltinMediaType> for easier conversion
impl From<BuiltinMediaType> for MediaType {
fn from(builtin: BuiltinMediaType) -> Self {
Self::Builtin(builtin)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_builtin_media_type() {
let mt = MediaType::Builtin(BuiltinMediaType::Mp3);
assert_eq!(mt.id(), "mp3");
assert_eq!(mt.mime_type(), "audio/mpeg");
assert_eq!(mt.category(), MediaCategory::Audio);
}
#[test]
fn test_custom_media_type() {
let mut registry = MediaTypeRegistry::new();
let descriptor = MediaTypeDescriptor {
id: "heif".to_string(),
name: "HEIF Image".to_string(),
category: Some(MediaCategory::Image),
extensions: vec!["heif".to_string()],
mime_types: vec!["image/heif".to_string()],
plugin_id: Some("heif-plugin".to_string()),
};
registry.register(descriptor).unwrap();
let mt = MediaType::custom("heif");
assert_eq!(mt.id(), "heif");
assert_eq!(mt.mime_type_with_registry(&registry), "image/heif");
assert_eq!(mt.category_with_registry(&registry), MediaCategory::Image);
}
#[test]
fn test_from_extension_builtin() {
let registry = MediaTypeRegistry::new();
let mt = MediaType::from_extension_with_registry("mp3", &registry);
assert!(mt.is_some());
assert_eq!(mt.unwrap(), MediaType::Builtin(BuiltinMediaType::Mp3));
}
#[test]
fn test_from_extension_custom() {
let mut registry = MediaTypeRegistry::new();
let descriptor = MediaTypeDescriptor {
id: "customformat".to_string(),
name: "Custom Format".to_string(),
category: Some(MediaCategory::Image),
extensions: vec!["xyz".to_string()],
mime_types: vec!["application/x-custom".to_string()],
plugin_id: Some("custom-plugin".to_string()),
};
registry.register(descriptor).unwrap();
let mt = MediaType::from_extension_with_registry("xyz", &registry);
assert!(mt.is_some());
assert_eq!(mt.unwrap(), MediaType::custom("customformat"));
}
}

View file

@ -0,0 +1,285 @@
//! Media type registry for managing both built-in and custom media types
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use super::MediaCategory;
/// Descriptor for a media type (built-in or custom)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MediaTypeDescriptor {
/// Unique identifier
pub id: String,
/// Display name
pub name: String,
/// Category
pub category: Option<MediaCategory>,
/// File extensions
pub extensions: Vec<String>,
/// MIME types
pub mime_types: Vec<String>,
/// Plugin that registered this type (None for built-in types)
pub plugin_id: Option<String>,
}
/// Registry for media types
#[derive(Debug, Clone)]
pub struct MediaTypeRegistry {
/// Map of media type ID to descriptor
types: HashMap<String, MediaTypeDescriptor>,
/// Map of extension to media type ID
extension_map: HashMap<String, String>,
}
impl MediaTypeRegistry {
/// Create a new empty registry
pub fn new() -> Self {
Self {
types: HashMap::new(),
extension_map: HashMap::new(),
}
}
/// Register a new media type
pub fn register(&mut self, descriptor: MediaTypeDescriptor) -> Result<()> {
// Check if ID is already registered
if self.types.contains_key(&descriptor.id) {
return Err(anyhow!("Media type already registered: {}", descriptor.id));
}
// Register extensions
for ext in &descriptor.extensions {
let ext_lower = ext.to_lowercase();
if self.extension_map.contains_key(&ext_lower) {
// Extension already registered - this is OK, we'll use the first one
// In a more sophisticated system, we might track multiple types per extension
continue;
}
self.extension_map.insert(ext_lower, descriptor.id.clone());
}
// Register the type
self.types.insert(descriptor.id.clone(), descriptor);
Ok(())
}
/// Unregister a media type
pub fn unregister(&mut self, id: &str) -> Result<()> {
let descriptor = self
.types
.remove(id)
.ok_or_else(|| anyhow!("Media type not found: {}", id))?;
// Remove extensions
for ext in &descriptor.extensions {
let ext_lower = ext.to_lowercase();
if self.extension_map.get(&ext_lower) == Some(&descriptor.id) {
self.extension_map.remove(&ext_lower);
}
}
Ok(())
}
/// Get a media type descriptor by ID
pub fn get(&self, id: &str) -> Option<&MediaTypeDescriptor> {
self.types.get(id)
}
/// Get a media type by file extension
pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> {
let ext_lower = ext.to_lowercase();
self.extension_map
.get(&ext_lower)
.and_then(|id| self.types.get(id))
}
/// List all registered media types
pub fn list_all(&self) -> Vec<&MediaTypeDescriptor> {
self.types.values().collect()
}
/// List media types from a specific plugin
pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> {
self.types
.values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.collect()
}
/// List built-in media types (plugin_id is None)
pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> {
self.types
.values()
.filter(|d| d.plugin_id.is_none())
.collect()
}
/// Get count of registered types
pub fn count(&self) -> usize {
self.types.len()
}
/// Check if a media type is registered
pub fn contains(&self, id: &str) -> bool {
self.types.contains_key(id)
}
/// Unregister all types from a specific plugin
pub fn unregister_plugin(&mut self, plugin_id: &str) -> Result<usize> {
let type_ids: Vec<String> = self
.types
.values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.map(|d| d.id.clone())
.collect();
let count = type_ids.len();
for id in type_ids {
self.unregister(&id)?;
}
Ok(count)
}
}
impl Default for MediaTypeRegistry {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_descriptor(id: &str, ext: &str) -> MediaTypeDescriptor {
MediaTypeDescriptor {
id: id.to_string(),
name: format!("{} Type", id),
category: Some(MediaCategory::Document),
extensions: vec![ext.to_string()],
mime_types: vec![format!("application/{}", id)],
plugin_id: Some("test-plugin".to_string()),
}
}
#[test]
fn test_register_and_get() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor.clone()).unwrap();
let retrieved = registry.get("test").unwrap();
assert_eq!(retrieved.id, "test");
assert_eq!(retrieved.name, "test Type");
}
#[test]
fn test_register_duplicate() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor.clone()).unwrap();
let result = registry.register(descriptor);
assert!(result.is_err());
}
#[test]
fn test_get_by_extension() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor).unwrap();
let retrieved = registry.get_by_extension("tst").unwrap();
assert_eq!(retrieved.id, "test");
// Test case insensitivity
let retrieved = registry.get_by_extension("TST").unwrap();
assert_eq!(retrieved.id, "test");
}
#[test]
fn test_unregister() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor).unwrap();
assert!(registry.contains("test"));
registry.unregister("test").unwrap();
assert!(!registry.contains("test"));
// Extension should also be removed
assert!(registry.get_by_extension("tst").is_none());
}
#[test]
fn test_list_by_plugin() {
let mut registry = MediaTypeRegistry::new();
let desc1 = MediaTypeDescriptor {
id: "type1".to_string(),
name: "Type 1".to_string(),
category: Some(MediaCategory::Document),
extensions: vec!["t1".to_string()],
mime_types: vec!["application/type1".to_string()],
plugin_id: Some("plugin1".to_string()),
};
let desc2 = MediaTypeDescriptor {
id: "type2".to_string(),
name: "Type 2".to_string(),
category: Some(MediaCategory::Document),
extensions: vec!["t2".to_string()],
mime_types: vec!["application/type2".to_string()],
plugin_id: Some("plugin2".to_string()),
};
registry.register(desc1).unwrap();
registry.register(desc2).unwrap();
let plugin1_types = registry.list_by_plugin("plugin1");
assert_eq!(plugin1_types.len(), 1);
assert_eq!(plugin1_types[0].id, "type1");
let plugin2_types = registry.list_by_plugin("plugin2");
assert_eq!(plugin2_types.len(), 1);
assert_eq!(plugin2_types[0].id, "type2");
}
#[test]
fn test_unregister_plugin() {
let mut registry = MediaTypeRegistry::new();
for i in 1..=3 {
let desc = MediaTypeDescriptor {
id: format!("type{}", i),
name: format!("Type {}", i),
category: Some(MediaCategory::Document),
extensions: vec![format!("t{}", i)],
mime_types: vec![format!("application/type{}", i)],
plugin_id: Some("test-plugin".to_string()),
};
registry.register(desc).unwrap();
}
assert_eq!(registry.count(), 3);
let removed = registry.unregister_plugin("test-plugin").unwrap();
assert_eq!(removed, 3);
assert_eq!(registry.count(), 0);
}
}

View file

@ -4,7 +4,7 @@ use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -68,14 +68,14 @@ impl MetadataExtractor for AudioExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp3,
MediaType::Flac,
MediaType::Ogg,
MediaType::Wav,
MediaType::Aac,
MediaType::Opus,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp3),
MediaType::Builtin(BuiltinMediaType::Flac),
MediaType::Builtin(BuiltinMediaType::Ogg),
MediaType::Builtin(BuiltinMediaType::Wav),
MediaType::Builtin(BuiltinMediaType::Aac),
MediaType::Builtin(BuiltinMediaType::Opus),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -10,15 +10,19 @@ pub struct DocumentExtractor;
impl MetadataExtractor for DocumentExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Pdf) => extract_pdf(path),
Some(MediaType::Epub) => extract_epub(path),
Some(MediaType::Djvu) => extract_djvu(path),
Some(MediaType::Builtin(BuiltinMediaType::Pdf)) => extract_pdf(path),
Some(MediaType::Builtin(BuiltinMediaType::Epub)) => extract_epub(path),
Some(MediaType::Builtin(BuiltinMediaType::Djvu)) => extract_djvu(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Pdf, MediaType::Epub, MediaType::Djvu]
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Pdf),
MediaType::Builtin(BuiltinMediaType::Epub),
MediaType::Builtin(BuiltinMediaType::Djvu),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -163,24 +163,24 @@ impl MetadataExtractor for ImageExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Jpeg,
MediaType::Png,
MediaType::Gif,
MediaType::Webp,
MediaType::Avif,
MediaType::Tiff,
MediaType::Bmp,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Jpeg),
MediaType::Builtin(BuiltinMediaType::Png),
MediaType::Builtin(BuiltinMediaType::Gif),
MediaType::Builtin(BuiltinMediaType::Webp),
MediaType::Builtin(BuiltinMediaType::Avif),
MediaType::Builtin(BuiltinMediaType::Tiff),
MediaType::Builtin(BuiltinMediaType::Bmp),
// RAW formats (TIFF-based, kamadak-exif handles these)
MediaType::Cr2,
MediaType::Nef,
MediaType::Arw,
MediaType::Dng,
MediaType::Orf,
MediaType::Rw2,
MediaType::Builtin(BuiltinMediaType::Cr2),
MediaType::Builtin(BuiltinMediaType::Nef),
MediaType::Builtin(BuiltinMediaType::Arw),
MediaType::Builtin(BuiltinMediaType::Dng),
MediaType::Builtin(BuiltinMediaType::Orf),
MediaType::Builtin(BuiltinMediaType::Rw2),
// HEIC
MediaType::Heic,
MediaType::Builtin(BuiltinMediaType::Heic),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -34,7 +34,10 @@ impl MetadataExtractor for MarkdownExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Markdown, MediaType::PlainText]
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Markdown),
MediaType::Builtin(BuiltinMediaType::PlainText),
]
}
}

View file

@ -24,7 +24,7 @@ pub struct ExtractedMetadata {
pub trait MetadataExtractor: Send + Sync {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata>;
fn supported_types(&self) -> &[MediaType];
fn supported_types(&self) -> Vec<MediaType>;
}
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> {

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -10,18 +10,16 @@ pub struct VideoExtractor;
impl MetadataExtractor for VideoExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Mkv) => extract_mkv(path),
Some(MediaType::Mp4) => extract_mp4(path),
Some(MediaType::Builtin(BuiltinMediaType::Mkv)) => extract_mkv(path),
Some(MediaType::Builtin(BuiltinMediaType::Mp4)) => extract_mp4(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp4,
MediaType::Mkv,
MediaType::Avi,
MediaType::Webm,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp4),
MediaType::Builtin(BuiltinMediaType::Mkv),
]
}
}

View file

@ -0,0 +1,31 @@
//! Playlist management: ordered collections of media items.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A user-owned playlist of media items.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Playlist {
pub id: Uuid,
pub owner_id: UserId,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
pub is_smart: bool,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// An item within a playlist at a specific position.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlaylistItem {
pub playlist_id: Uuid,
pub media_id: MediaId,
pub position: i32,
pub added_at: DateTime<Utc>,
}

View file

@ -0,0 +1,407 @@
//! Plugin loader for discovering and loading plugins from the filesystem
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginManifest;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
use walkdir::WalkDir;
/// Plugin loader handles discovery and loading of plugins from directories
pub struct PluginLoader {
/// Directories to search for plugins
plugin_dirs: Vec<PathBuf>,
}
impl PluginLoader {
/// Create a new plugin loader
pub fn new(plugin_dirs: Vec<PathBuf>) -> Self {
Self { plugin_dirs }
}
/// Discover all plugins in configured directories
pub async fn discover_plugins(&self) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new();
for dir in &self.plugin_dirs {
if !dir.exists() {
warn!("Plugin directory does not exist: {:?}", dir);
continue;
}
info!("Discovering plugins in: {:?}", dir);
match self.discover_in_directory(dir).await {
Ok(found) => {
info!("Found {} plugins in {:?}", found.len(), dir);
manifests.extend(found);
}
Err(e) => {
warn!("Error discovering plugins in {:?}: {}", dir, e);
}
}
}
Ok(manifests)
}
/// Discover plugins in a specific directory
async fn discover_in_directory(&self, dir: &Path) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new();
// Walk the directory looking for plugin.toml files
for entry in WalkDir::new(dir)
.max_depth(3) // Don't go too deep
.follow_links(false)
{
let entry = match entry {
Ok(e) => e,
Err(e) => {
warn!("Error reading directory entry: {}", e);
continue;
}
};
let path = entry.path();
// Look for plugin.toml files
if path.file_name() == Some(std::ffi::OsStr::new("plugin.toml")) {
debug!("Found plugin manifest: {:?}", path);
match PluginManifest::from_file(path) {
Ok(manifest) => {
info!("Loaded manifest for plugin: {}", manifest.plugin.name);
manifests.push(manifest);
}
Err(e) => {
warn!("Failed to load manifest from {:?}: {}", path, e);
}
}
}
}
Ok(manifests)
}
/// Resolve the WASM binary path from a manifest
pub fn resolve_wasm_path(&self, manifest: &PluginManifest) -> Result<PathBuf> {
// The WASM path in the manifest is relative to the manifest file
// We need to search for it in the plugin directories
for dir in &self.plugin_dirs {
// Look for a directory matching the plugin name
let plugin_dir = dir.join(&manifest.plugin.name);
if !plugin_dir.exists() {
continue;
}
// Check for plugin.toml in this directory
let manifest_path = plugin_dir.join("plugin.toml");
if !manifest_path.exists() {
continue;
}
// Resolve WASM path relative to this directory
let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm);
if wasm_path.exists() {
// Verify the resolved path is within the plugin directory (prevent path traversal)
let canonical_wasm = wasm_path
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?;
let canonical_plugin_dir = plugin_dir
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize plugin dir: {}", e))?;
if !canonical_wasm.starts_with(&canonical_plugin_dir) {
return Err(anyhow!(
"WASM binary path escapes plugin directory: {:?}",
wasm_path
));
}
return Ok(canonical_wasm);
}
}
Err(anyhow!(
"WASM binary not found for plugin: {}",
manifest.plugin.name
))
}
/// Download a plugin from a URL
pub async fn download_plugin(&self, url: &str) -> Result<PathBuf> {
// Only allow HTTPS downloads
if !url.starts_with("https://") {
return Err(anyhow!(
"Only HTTPS URLs are allowed for plugin downloads: {}",
url
));
}
let dest_dir = self
.plugin_dirs
.first()
.ok_or_else(|| anyhow!("No plugin directories configured"))?;
std::fs::create_dir_all(dest_dir)?;
// Download the archive with timeout and size limits
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(300))
.build()
.map_err(|e| anyhow!("Failed to build HTTP client: {}", e))?;
let response = client
.get(url)
.send()
.await
.map_err(|e| anyhow!("Failed to download plugin: {}", e))?;
if !response.status().is_success() {
return Err(anyhow!(
"Plugin download failed with status: {}",
response.status()
));
}
// Check content-length header before downloading
const MAX_PLUGIN_SIZE: u64 = 100 * 1024 * 1024; // 100 MB
if let Some(content_length) = response.content_length()
&& content_length > MAX_PLUGIN_SIZE {
return Err(anyhow!(
"Plugin archive too large: {} bytes (max {} bytes)",
content_length,
MAX_PLUGIN_SIZE
));
}
let bytes = response
.bytes()
.await
.map_err(|e| anyhow!("Failed to read plugin response: {}", e))?;
// Check actual size after download
if bytes.len() as u64 > MAX_PLUGIN_SIZE {
return Err(anyhow!(
"Plugin archive too large: {} bytes (max {} bytes)",
bytes.len(),
MAX_PLUGIN_SIZE
));
}
// Write archive to a unique temp file
let temp_archive = dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7()));
std::fs::write(&temp_archive, &bytes)?;
// Extract using tar with -C to target directory
let canonical_dest = dest_dir
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize dest dir: {}", e))?;
let output = std::process::Command::new("tar")
.args([
"xzf",
&temp_archive.to_string_lossy(),
"-C",
&canonical_dest.to_string_lossy(),
])
.output()
.map_err(|e| anyhow!("Failed to extract plugin archive: {}", e))?;
// Clean up the archive
let _ = std::fs::remove_file(&temp_archive);
if !output.status.success() {
return Err(anyhow!(
"Failed to extract plugin archive: {}",
String::from_utf8_lossy(&output.stderr)
));
}
// Validate that all extracted files are within dest_dir
for entry in WalkDir::new(&canonical_dest).follow_links(false) {
let entry = entry?;
let entry_canonical = entry.path().canonicalize()?;
if !entry_canonical.starts_with(&canonical_dest) {
return Err(anyhow!(
"Extracted file escapes destination directory: {:?}",
entry.path()
));
}
}
// Find the extracted plugin directory by looking for plugin.toml
for entry in WalkDir::new(dest_dir).max_depth(2).follow_links(false) {
let entry = entry?;
if entry.file_name() == "plugin.toml" {
let plugin_dir = entry
.path()
.parent()
.ok_or_else(|| anyhow!("Invalid plugin.toml location"))?;
// Validate the manifest
let manifest = PluginManifest::from_file(entry.path())?;
info!("Downloaded and extracted plugin: {}", manifest.plugin.name);
return Ok(plugin_dir.to_path_buf());
}
}
Err(anyhow!(
"No plugin.toml found after extracting archive from: {}",
url
))
}
/// Validate a plugin package
pub fn validate_plugin_package(&self, path: &Path) -> Result<()> {
// Check that the path exists
if !path.exists() {
return Err(anyhow!("Plugin path does not exist: {:?}", path));
}
// Check for plugin.toml
let manifest_path = path.join("plugin.toml");
if !manifest_path.exists() {
return Err(anyhow!("Missing plugin.toml in {:?}", path));
}
// Parse and validate manifest
let manifest = PluginManifest::from_file(&manifest_path)?;
// Check that WASM binary exists
let wasm_path = path.join(&manifest.plugin.binary.wasm);
if !wasm_path.exists() {
return Err(anyhow!(
"WASM binary not found: {}",
manifest.plugin.binary.wasm
));
}
// Verify the WASM path is within the plugin directory (prevent path traversal)
let canonical_wasm = wasm_path.canonicalize()?;
let canonical_path = path.canonicalize()?;
if !canonical_wasm.starts_with(&canonical_path) {
return Err(anyhow!(
"WASM binary path escapes plugin directory: {:?}",
wasm_path
));
}
// Validate WASM file
let wasm_bytes = std::fs::read(&wasm_path)?;
if wasm_bytes.len() < 4 || &wasm_bytes[0..4] != b"\0asm" {
return Err(anyhow!("Invalid WASM file: {:?}", wasm_path));
}
Ok(())
}
/// Get plugin directory path for a given plugin name
pub fn get_plugin_dir(&self, plugin_name: &str) -> Option<PathBuf> {
for dir in &self.plugin_dirs {
let plugin_dir = dir.join(plugin_name);
if plugin_dir.exists() {
return Some(plugin_dir);
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn test_discover_plugins_empty() {
let temp_dir = TempDir::new().unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap();
assert_eq!(manifests.len(), 0);
}
#[tokio::test]
async fn test_discover_plugins_with_manifest() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
// Create a valid manifest
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create dummy WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap();
assert_eq!(manifests.len(), 1);
assert_eq!(manifests[0].plugin.name, "test-plugin");
}
#[test]
fn test_validate_plugin_package() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
// Create a valid manifest
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
let loader = PluginLoader::new(vec![]);
// Should fail without WASM file
assert!(loader.validate_plugin_package(&plugin_dir).is_err());
// Create valid WASM file (magic number only)
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
// Should succeed now
assert!(loader.validate_plugin_package(&plugin_dir).is_ok());
}
#[test]
fn test_validate_invalid_wasm() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create invalid WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"not wasm").unwrap();
let loader = PluginLoader::new(vec![]);
assert!(loader.validate_plugin_package(&plugin_dir).is_err());
}
}

View file

@ -0,0 +1,419 @@
//! Plugin system for Pinakes
//!
//! This module provides a comprehensive plugin architecture that allows extending
//! Pinakes with custom media types, metadata extractors, search backends, and more.
//!
//! # Architecture
//!
//! - Plugins are compiled to WASM and run in a sandboxed environment
//! - Capability-based security controls what plugins can access
//! - Hot-reload support for development
//! - Automatic plugin discovery from configured directories
use anyhow::Result;
use pinakes_plugin_api::{PluginContext, PluginMetadata};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, error, info, warn};
pub mod loader;
pub mod registry;
pub mod runtime;
pub mod security;
pub use loader::PluginLoader;
pub use registry::{PluginRegistry, RegisteredPlugin};
pub use runtime::{WasmPlugin, WasmRuntime};
pub use security::CapabilityEnforcer;
/// Plugin manager coordinates plugin lifecycle and operations
pub struct PluginManager {
/// Plugin registry
registry: Arc<RwLock<PluginRegistry>>,
/// WASM runtime for executing plugins
runtime: Arc<WasmRuntime>,
/// Plugin loader for discovery and loading
loader: PluginLoader,
/// Capability enforcer for security
enforcer: CapabilityEnforcer,
/// Plugin data directory
data_dir: PathBuf,
/// Plugin cache directory
cache_dir: PathBuf,
/// Configuration
config: PluginManagerConfig,
}
/// Configuration for the plugin manager
#[derive(Debug, Clone)]
pub struct PluginManagerConfig {
/// Directories to search for plugins
pub plugin_dirs: Vec<PathBuf>,
/// Whether to enable hot-reload (for development)
pub enable_hot_reload: bool,
/// Whether to allow unsigned plugins
pub allow_unsigned: bool,
/// Maximum number of concurrent plugin operations
pub max_concurrent_ops: usize,
/// Plugin timeout in seconds
pub plugin_timeout_secs: u64,
}
impl Default for PluginManagerConfig {
fn default() -> Self {
Self {
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: false,
max_concurrent_ops: 4,
plugin_timeout_secs: 30,
}
}
}
impl From<crate::config::PluginsConfig> for PluginManagerConfig {
fn from(cfg: crate::config::PluginsConfig) -> Self {
Self {
plugin_dirs: cfg.plugin_dirs,
enable_hot_reload: cfg.enable_hot_reload,
allow_unsigned: cfg.allow_unsigned,
max_concurrent_ops: cfg.max_concurrent_ops,
plugin_timeout_secs: cfg.plugin_timeout_secs,
}
}
}
impl PluginManager {
/// Create a new plugin manager
pub fn new(data_dir: PathBuf, cache_dir: PathBuf, config: PluginManagerConfig) -> Result<Self> {
// Ensure directories exist
std::fs::create_dir_all(&data_dir)?;
std::fs::create_dir_all(&cache_dir)?;
let runtime = Arc::new(WasmRuntime::new()?);
let registry = Arc::new(RwLock::new(PluginRegistry::new()));
let loader = PluginLoader::new(config.plugin_dirs.clone());
let enforcer = CapabilityEnforcer::new();
Ok(Self {
registry,
runtime,
loader,
enforcer,
data_dir,
cache_dir,
config,
})
}
/// Discover and load all plugins from configured directories
pub async fn discover_and_load_all(&self) -> Result<Vec<String>> {
info!("Discovering plugins from {:?}", self.config.plugin_dirs);
let manifests = self.loader.discover_plugins().await?;
let mut loaded_plugins = Vec::new();
for manifest in manifests {
match self.load_plugin_from_manifest(&manifest).await {
Ok(plugin_id) => {
info!("Loaded plugin: {}", plugin_id);
loaded_plugins.push(plugin_id);
}
Err(e) => {
warn!("Failed to load plugin {}: {}", manifest.plugin.name, e);
}
}
}
Ok(loaded_plugins)
}
/// Load a plugin from a manifest file
async fn load_plugin_from_manifest(
&self,
manifest: &pinakes_plugin_api::PluginManifest,
) -> Result<String> {
let plugin_id = manifest.plugin_id();
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
// Check if already loaded
{
let registry = self.registry.read().await;
if registry.is_loaded(&plugin_id) {
return Ok(plugin_id);
}
}
// Validate capabilities
let capabilities = manifest.to_capabilities();
self.enforcer.validate_capabilities(&capabilities)?;
// Create plugin context
let plugin_data_dir = self.data_dir.join(&plugin_id);
let plugin_cache_dir = self.cache_dir.join(&plugin_id);
tokio::fs::create_dir_all(&plugin_data_dir).await?;
tokio::fs::create_dir_all(&plugin_cache_dir).await?;
let context = PluginContext {
data_dir: plugin_data_dir,
cache_dir: plugin_cache_dir,
config: manifest
.config
.iter()
.map(|(k, v)| {
(
k.clone(),
serde_json::to_value(v).unwrap_or_else(|e| {
tracing::warn!("failed to serialize config value for key {}: {}", k, e);
serde_json::Value::Null
}),
)
})
.collect(),
capabilities: capabilities.clone(),
};
// Load WASM binary
let wasm_path = self.loader.resolve_wasm_path(manifest)?;
let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?;
// Initialize plugin
let init_succeeded = match wasm_plugin.call_function("initialize", &[]).await {
Ok(_) => true,
Err(e) => {
tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e);
false
}
};
// Register plugin
let metadata = PluginMetadata {
id: plugin_id.clone(),
name: manifest.plugin.name.clone(),
version: manifest.plugin.version.clone(),
author: manifest.plugin.author.clone().unwrap_or_default(),
description: manifest.plugin.description.clone().unwrap_or_default(),
api_version: manifest.plugin.api_version.clone(),
capabilities_required: capabilities,
};
// Derive manifest_path from the loader's plugin directories
let manifest_path = self
.loader
.get_plugin_dir(&manifest.plugin.name)
.map(|dir| dir.join("plugin.toml"));
let registered = RegisteredPlugin {
id: plugin_id.clone(),
metadata,
wasm_plugin,
manifest: manifest.clone(),
manifest_path,
enabled: init_succeeded,
};
let mut registry = self.registry.write().await;
registry.register(registered)?;
Ok(plugin_id)
}
/// Install a plugin from a file or URL
pub async fn install_plugin(&self, source: &str) -> Result<String> {
info!("Installing plugin from: {}", source);
// Download/copy plugin to plugins directory
let plugin_path = if source.starts_with("http://") || source.starts_with("https://") {
// Download from URL
self.loader.download_plugin(source).await?
} else {
// Copy from local file
PathBuf::from(source)
};
// Load the manifest
let manifest_path = plugin_path.join("plugin.toml");
let manifest = pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?;
// Load the plugin
self.load_plugin_from_manifest(&manifest).await
}
/// Uninstall a plugin
pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> {
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
info!("Uninstalling plugin: {}", plugin_id);
// Shutdown plugin first
self.shutdown_plugin(plugin_id).await?;
// Remove from registry
let mut registry = self.registry.write().await;
registry.unregister(plugin_id)?;
// Remove plugin data and cache
let plugin_data_dir = self.data_dir.join(plugin_id);
let plugin_cache_dir = self.cache_dir.join(plugin_id);
if plugin_data_dir.exists() {
std::fs::remove_dir_all(&plugin_data_dir)?;
}
if plugin_cache_dir.exists() {
std::fs::remove_dir_all(&plugin_cache_dir)?;
}
Ok(())
}
/// Enable a plugin
pub async fn enable_plugin(&self, plugin_id: &str) -> Result<()> {
let mut registry = self.registry.write().await;
registry.enable(plugin_id)
}
/// Disable a plugin
pub async fn disable_plugin(&self, plugin_id: &str) -> Result<()> {
let mut registry = self.registry.write().await;
registry.disable(plugin_id)
}
/// Shutdown a specific plugin
pub async fn shutdown_plugin(&self, plugin_id: &str) -> Result<()> {
debug!("Shutting down plugin: {}", plugin_id);
let registry = self.registry.read().await;
if let Some(plugin) = registry.get(plugin_id) {
plugin.wasm_plugin.call_function("shutdown", &[]).await.ok();
Ok(())
} else {
Err(anyhow::anyhow!("Plugin not found: {}", plugin_id))
}
}
/// Shutdown all plugins
pub async fn shutdown_all(&self) -> Result<()> {
info!("Shutting down all plugins");
let registry = self.registry.read().await;
let plugin_ids: Vec<String> = registry.list_all().iter().map(|p| p.id.clone()).collect();
for plugin_id in plugin_ids {
if let Err(e) = self.shutdown_plugin(&plugin_id).await {
error!("Failed to shutdown plugin {}: {}", plugin_id, e);
}
}
Ok(())
}
/// Get list of all registered plugins
pub async fn list_plugins(&self) -> Vec<PluginMetadata> {
let registry = self.registry.read().await;
registry
.list_all()
.iter()
.map(|p| p.metadata.clone())
.collect()
}
/// Get plugin metadata by ID
pub async fn get_plugin(&self, plugin_id: &str) -> Option<PluginMetadata> {
let registry = self.registry.read().await;
registry.get(plugin_id).map(|p| p.metadata.clone())
}
/// Check if a plugin is loaded and enabled
pub async fn is_plugin_enabled(&self, plugin_id: &str) -> bool {
let registry = self.registry.read().await;
registry.is_enabled(plugin_id).unwrap_or(false)
}
/// Reload a plugin (for hot-reload during development)
pub async fn reload_plugin(&self, plugin_id: &str) -> Result<()> {
if !self.config.enable_hot_reload {
return Err(anyhow::anyhow!("Hot-reload is disabled"));
}
info!("Reloading plugin: {}", plugin_id);
// Re-read the manifest from disk if possible, falling back to cached version
let manifest = {
let registry = self.registry.read().await;
let plugin = registry
.get(plugin_id)
.ok_or_else(|| anyhow::anyhow!("Plugin not found"))?;
if let Some(ref manifest_path) = plugin.manifest_path {
pinakes_plugin_api::PluginManifest::from_file(manifest_path).unwrap_or_else(|e| {
warn!("Failed to re-read manifest from disk, using cached: {}", e);
plugin.manifest.clone()
})
} else {
plugin.manifest.clone()
}
};
// Shutdown and unload current version
self.shutdown_plugin(plugin_id).await?;
{
let mut registry = self.registry.write().await;
registry.unregister(plugin_id)?;
}
// Reload from manifest
self.load_plugin_from_manifest(&manifest).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn test_plugin_manager_creation() {
let temp_dir = TempDir::new().unwrap();
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir.clone(), cache_dir.clone(), config);
assert!(manager.is_ok());
assert!(data_dir.exists());
assert!(cache_dir.exists());
}
#[tokio::test]
async fn test_list_plugins_empty() {
let temp_dir = TempDir::new().unwrap();
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir, cache_dir, config).unwrap();
let plugins = manager.list_plugins().await;
assert_eq!(plugins.len(), 0);
}
}

View file

@ -0,0 +1,280 @@
//! Plugin registry for managing loaded plugins
use std::path::PathBuf;
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use std::collections::HashMap;
use super::runtime::WasmPlugin;
/// A registered plugin with its metadata and runtime state
#[derive(Clone)]
pub struct RegisteredPlugin {
pub id: String,
pub metadata: PluginMetadata,
pub wasm_plugin: WasmPlugin,
pub manifest: PluginManifest,
pub manifest_path: Option<PathBuf>,
pub enabled: bool,
}
/// Plugin registry maintains the state of all loaded plugins
pub struct PluginRegistry {
/// Map of plugin ID to registered plugin
plugins: HashMap<String, RegisteredPlugin>,
}
impl PluginRegistry {
/// Create a new empty registry
pub fn new() -> Self {
Self {
plugins: HashMap::new(),
}
}
/// Register a new plugin
pub fn register(&mut self, plugin: RegisteredPlugin) -> Result<()> {
if self.plugins.contains_key(&plugin.id) {
return Err(anyhow!("Plugin already registered: {}", plugin.id));
}
self.plugins.insert(plugin.id.clone(), plugin);
Ok(())
}
/// Unregister a plugin by ID
pub fn unregister(&mut self, plugin_id: &str) -> Result<()> {
self.plugins
.remove(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
Ok(())
}
/// Get a plugin by ID
pub fn get(&self, plugin_id: &str) -> Option<&RegisteredPlugin> {
self.plugins.get(plugin_id)
}
/// Get a mutable reference to a plugin by ID
pub fn get_mut(&mut self, plugin_id: &str) -> Option<&mut RegisteredPlugin> {
self.plugins.get_mut(plugin_id)
}
/// Check if a plugin is loaded
pub fn is_loaded(&self, plugin_id: &str) -> bool {
self.plugins.contains_key(plugin_id)
}
/// Check if a plugin is enabled. Returns `None` if the plugin is not found.
pub fn is_enabled(&self, plugin_id: &str) -> Option<bool> {
self.plugins.get(plugin_id).map(|p| p.enabled)
}
/// Enable a plugin
pub fn enable(&mut self, plugin_id: &str) -> Result<()> {
let plugin = self
.plugins
.get_mut(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
plugin.enabled = true;
Ok(())
}
/// Disable a plugin
pub fn disable(&mut self, plugin_id: &str) -> Result<()> {
let plugin = self
.plugins
.get_mut(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
plugin.enabled = false;
Ok(())
}
/// List all registered plugins
pub fn list_all(&self) -> Vec<&RegisteredPlugin> {
self.plugins.values().collect()
}
/// List all enabled plugins
pub fn list_enabled(&self) -> Vec<&RegisteredPlugin> {
self.plugins.values().filter(|p| p.enabled).collect()
}
/// Get plugins by kind (e.g., "media_type", "metadata_extractor")
pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> {
self.plugins
.values()
.filter(|p| p.manifest.plugin.kind.contains(&kind.to_string()))
.collect()
}
/// Get count of registered plugins
pub fn count(&self) -> usize {
self.plugins.len()
}
/// Get count of enabled plugins
pub fn count_enabled(&self) -> usize {
self.plugins.values().filter(|p| p.enabled).count()
}
}
impl Default for PluginRegistry {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::Capabilities;
use std::collections::HashMap;
fn create_test_plugin(id: &str, kind: Vec<String>) -> RegisteredPlugin {
let manifest = PluginManifest {
plugin: pinakes_plugin_api::manifest::PluginInfo {
name: id.to_string(),
version: "1.0.0".to_string(),
api_version: "1.0".to_string(),
author: Some("Test".to_string()),
description: Some("Test plugin".to_string()),
homepage: None,
license: None,
kind,
binary: pinakes_plugin_api::manifest::PluginBinary {
wasm: "test.wasm".to_string(),
entrypoint: None,
},
dependencies: vec![],
},
capabilities: Default::default(),
config: HashMap::new(),
};
RegisteredPlugin {
id: id.to_string(),
metadata: PluginMetadata {
id: id.to_string(),
name: id.to_string(),
version: "1.0.0".to_string(),
author: "Test".to_string(),
description: "Test plugin".to_string(),
api_version: "1.0".to_string(),
capabilities_required: Capabilities::default(),
},
wasm_plugin: WasmPlugin::default(),
manifest,
manifest_path: None,
enabled: true,
}
}
#[test]
fn test_registry_register_and_get() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
assert!(registry.is_loaded("test-plugin"));
assert!(registry.get("test-plugin").is_some());
}
#[test]
fn test_registry_duplicate_register() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
let result = registry.register(plugin);
assert!(result.is_err());
}
#[test]
fn test_registry_unregister() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
registry.unregister("test-plugin").unwrap();
assert!(!registry.is_loaded("test-plugin"));
}
#[test]
fn test_registry_enable_disable() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true));
registry.disable("test-plugin").unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(false));
registry.enable("test-plugin").unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true));
assert_eq!(registry.is_enabled("nonexistent"), None);
}
#[test]
fn test_registry_get_by_kind() {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["metadata_extractor".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin3",
vec!["media_type".to_string()],
))
.unwrap();
let media_type_plugins = registry.get_by_kind("media_type");
assert_eq!(media_type_plugins.len(), 2);
let extractor_plugins = registry.get_by_kind("metadata_extractor");
assert_eq!(extractor_plugins.len(), 1);
}
#[test]
fn test_registry_counts() {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["media_type".to_string()],
))
.unwrap();
assert_eq!(registry.count(), 2);
assert_eq!(registry.count_enabled(), 2);
registry.disable("plugin1").unwrap();
assert_eq!(registry.count(), 2);
assert_eq!(registry.count_enabled(), 1);
}
}

View file

@ -0,0 +1,582 @@
//! WASM runtime for executing plugins
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginContext;
use std::path::Path;
use std::sync::Arc;
use wasmtime::*;
/// WASM runtime wrapper for executing plugins
pub struct WasmRuntime {
engine: Engine,
}
impl WasmRuntime {
/// Create a new WASM runtime
pub fn new() -> Result<Self> {
let mut config = Config::new();
// Enable WASM features
config.wasm_component_model(true);
config.async_support(true);
// Set resource limits
config.max_wasm_stack(1024 * 1024); // 1MB stack
config.consume_fuel(true); // Enable fuel metering for CPU limits
let engine = Engine::new(&config)?;
Ok(Self { engine })
}
/// Load a plugin from a WASM file
pub async fn load_plugin(
&self,
wasm_path: &Path,
context: PluginContext,
) -> Result<WasmPlugin> {
if !wasm_path.exists() {
return Err(anyhow!("WASM file not found: {:?}", wasm_path));
}
// Read WASM bytes
let wasm_bytes = std::fs::read(wasm_path)?;
// Compile module
let module = Module::new(&self.engine, &wasm_bytes)?;
Ok(WasmPlugin {
module: Arc::new(module),
context,
})
}
}
/// Store data passed to each WASM invocation
pub struct PluginStoreData {
pub context: PluginContext,
pub exchange_buffer: Vec<u8>,
}
/// A loaded WASM plugin instance
#[derive(Clone)]
pub struct WasmPlugin {
module: Arc<Module>,
context: PluginContext,
}
impl WasmPlugin {
/// Get the plugin context
pub fn context(&self) -> &PluginContext {
&self.context
}
/// Execute a plugin function
///
/// Creates a fresh store and instance per invocation with host functions
/// linked, calls the requested exported function, and returns the result.
pub async fn call_function(&self, function_name: &str, params: &[u8]) -> Result<Vec<u8>> {
let engine = self.module.engine();
// Create store with per-invocation data
let store_data = PluginStoreData {
context: self.context.clone(),
exchange_buffer: Vec::new(),
};
let mut store = Store::new(engine, store_data);
// Set fuel limit based on capabilities
if let Some(max_cpu_time_ms) = self.context.capabilities.max_cpu_time_ms {
let fuel = max_cpu_time_ms * 100_000;
store.set_fuel(fuel)?;
} else {
store.set_fuel(1_000_000_000)?;
}
// Set up linker with host functions
let mut linker = Linker::new(engine);
HostFunctions::setup_linker(&mut linker)?;
// Instantiate the module
let instance = linker.instantiate_async(&mut store, &self.module).await?;
// Get the memory export (if available)
let memory = instance.get_memory(&mut store, "memory");
// If there are params and memory is available, write them
let mut alloc_offset: i32 = 0;
if !params.is_empty()
&& let Some(mem) = &memory {
// Call the plugin's alloc function if available, otherwise write at offset 0
let offset = if let Ok(alloc) =
instance.get_typed_func::<i32, i32>(&mut store, "alloc")
{
let result = alloc.call_async(&mut store, params.len() as i32).await?;
if result < 0 {
return Err(anyhow!("plugin alloc returned negative offset: {}", result));
}
result as usize
} else {
0
};
alloc_offset = offset as i32;
let mem_data = mem.data_mut(&mut store);
if offset + params.len() <= mem_data.len() {
mem_data[offset..offset + params.len()].copy_from_slice(params);
}
}
// Look up the exported function and call it
let func = instance
.get_func(&mut store, function_name)
.ok_or_else(|| anyhow!("exported function '{}' not found", function_name))?;
let func_ty = func.ty(&store);
let param_count = func_ty.params().len();
let result_count = func_ty.results().len();
let mut results = vec![Val::I32(0); result_count];
// Call with appropriate params based on function signature
if param_count == 2 && !params.is_empty() {
// Convention: (ptr, len)
func.call_async(
&mut store,
&[Val::I32(alloc_offset), Val::I32(params.len() as i32)],
&mut results,
)
.await?;
} else if param_count == 0 {
func.call_async(&mut store, &[], &mut results).await?;
} else {
// Generic: fill with zeroes
let params_vals: Vec<Val> = (0..param_count).map(|_| Val::I32(0)).collect();
func.call_async(&mut store, &params_vals, &mut results)
.await?;
}
// Read result from exchange buffer (host functions may have written data)
let exchange = std::mem::take(&mut store.data_mut().exchange_buffer);
if !exchange.is_empty() {
return Ok(exchange);
}
// Otherwise serialize the return values
if let Some(Val::I32(ret)) = results.first() {
Ok(ret.to_le_bytes().to_vec())
} else {
Ok(Vec::new())
}
}
}
#[cfg(test)]
impl Default for WasmPlugin {
fn default() -> Self {
let engine = Engine::default();
let module = Module::new(&engine, br#"(module)"#).unwrap();
Self {
module: Arc::new(module),
context: PluginContext {
data_dir: std::env::temp_dir(),
cache_dir: std::env::temp_dir(),
config: std::collections::HashMap::new(),
capabilities: Default::default(),
},
}
}
}
/// Host functions that plugins can call
pub struct HostFunctions;
impl HostFunctions {
/// Set up host functions in a linker
pub fn setup_linker(linker: &mut Linker<PluginStoreData>) -> Result<()> {
// host_log: log a message from the plugin
linker.func_wrap(
"env",
"host_log",
|mut caller: Caller<'_, PluginStoreData>, level: i32, ptr: i32, len: i32| {
if ptr < 0 || len < 0 {
return;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
if let Some(mem) = memory {
let data = mem.data(&caller);
let start = ptr as usize;
let end = start + len as usize;
if end <= data.len()
&& let Ok(msg) = std::str::from_utf8(&data[start..end]) {
match level {
0 => tracing::error!(plugin = true, "{}", msg),
1 => tracing::warn!(plugin = true, "{}", msg),
2 => tracing::info!(plugin = true, "{}", msg),
_ => tracing::debug!(plugin = true, "{}", msg),
}
}
}
},
)?;
// host_read_file: read a file into the exchange buffer
linker.func_wrap(
"env",
"host_read_file",
|mut caller: Caller<'_, PluginStoreData>, path_ptr: i32, path_len: i32| -> i32 {
if path_ptr < 0 || path_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = path_ptr as usize;
let end = start + path_len as usize;
if end > data.len() {
return -1;
}
let path_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
// Canonicalize path before checking permissions to prevent traversal
let path = match std::path::Path::new(&path_str).canonicalize() {
Ok(p) => p,
Err(_) => return -1,
};
// Check read permission against canonicalized path
let can_read = caller
.data()
.context
.capabilities
.filesystem
.read
.iter()
.any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| path.starts_with(a))
});
if !can_read {
tracing::warn!(path = %path_str, "plugin read access denied");
return -2;
}
match std::fs::read(&path) {
Ok(contents) => {
let len = contents.len() as i32;
caller.data_mut().exchange_buffer = contents;
len
}
Err(_) => -1,
}
},
)?;
// host_write_file: write data to a file
linker.func_wrap(
"env",
"host_write_file",
|mut caller: Caller<'_, PluginStoreData>,
path_ptr: i32,
path_len: i32,
data_ptr: i32,
data_len: i32|
-> i32 {
if path_ptr < 0 || path_len < 0 || data_ptr < 0 || data_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let mem_data = mem.data(&caller);
let path_start = path_ptr as usize;
let path_end = path_start + path_len as usize;
let data_start = data_ptr as usize;
let data_end = data_start + data_len as usize;
if path_end > mem_data.len() || data_end > mem_data.len() {
return -1;
}
let path_str = match std::str::from_utf8(&mem_data[path_start..path_end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
let file_data = mem_data[data_start..data_end].to_vec();
// Canonicalize path for write (file may not exist yet)
let path = std::path::Path::new(&path_str);
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
let Some(canonical) = canonical else {
return -1;
};
// Check write permission against canonicalized path
let can_write = caller
.data()
.context
.capabilities
.filesystem
.write
.iter()
.any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
});
if !can_write {
tracing::warn!(path = %path_str, "plugin write access denied");
return -2;
}
match std::fs::write(&canonical, &file_data) {
Ok(()) => 0,
Err(_) => -1,
}
},
)?;
// host_http_request: make an HTTP request (blocking)
linker.func_wrap(
"env",
"host_http_request",
|mut caller: Caller<'_, PluginStoreData>, url_ptr: i32, url_len: i32| -> i32 {
if url_ptr < 0 || url_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = url_ptr as usize;
let end = start + url_len as usize;
if end > data.len() {
return -1;
}
let url_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
// Check network permission
if !caller.data().context.capabilities.network.enabled {
tracing::warn!(url = %url_str, "plugin network access denied");
return -2;
}
// Use block_in_place to avoid blocking the async runtime's thread pool.
// Falls back to a blocking client with timeout if block_in_place is unavailable.
let result = std::panic::catch_unwind(|| {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| e.to_string())?;
let resp = client
.get(&url_str)
.send()
.await
.map_err(|e| e.to_string())?;
let bytes = resp.bytes().await.map_err(|e| e.to_string())?;
Ok::<_, String>(bytes)
})
})
});
match result {
Ok(Ok(bytes)) => {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
Ok(Err(_)) => -1,
Err(_) => {
// block_in_place panicked (e.g. current-thread runtime);
// fall back to blocking client with timeout
let client = match reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
{
Ok(c) => c,
Err(_) => return -1,
};
match client.get(&url_str).send() {
Ok(resp) => match resp.bytes() {
Ok(bytes) => {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
Err(_) => -1,
},
Err(_) => -1,
}
}
}
},
)?;
// host_get_config: read a config key into the exchange buffer
linker.func_wrap(
"env",
"host_get_config",
|mut caller: Caller<'_, PluginStoreData>, key_ptr: i32, key_len: i32| -> i32 {
if key_ptr < 0 || key_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = key_ptr as usize;
let end = start + key_len as usize;
if end > data.len() {
return -1;
}
let key_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
match caller.data().context.config.get(&key_str) {
Some(value) => {
let json = value.to_string();
let bytes = json.into_bytes();
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes;
len
}
None => -1,
}
},
)?;
// host_get_buffer: copy the exchange buffer to WASM memory
linker.func_wrap(
"env",
"host_get_buffer",
|mut caller: Caller<'_, PluginStoreData>, dest_ptr: i32, dest_len: i32| -> i32 {
if dest_ptr < 0 || dest_len < 0 {
return -1;
}
let buf = caller.data().exchange_buffer.clone();
let copy_len = buf.len().min(dest_len as usize);
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let mem_data = mem.data_mut(&mut caller);
let start = dest_ptr as usize;
if start + copy_len > mem_data.len() {
return -1;
}
mem_data[start..start + copy_len].copy_from_slice(&buf[..copy_len]);
copy_len as i32
},
)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::PluginContext;
use std::collections::HashMap;
#[test]
fn test_wasm_runtime_creation() {
let runtime = WasmRuntime::new();
assert!(runtime.is_ok());
}
#[test]
fn test_host_functions_file_access() {
let mut capabilities = pinakes_plugin_api::Capabilities::default();
capabilities.filesystem.read.push("/tmp".into());
capabilities.filesystem.write.push("/tmp/output".into());
let context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
capabilities,
};
// Verify capability checks work via context fields
let can_read = context
.capabilities
.filesystem
.read
.iter()
.any(|p| Path::new("/tmp/test.txt").starts_with(p));
assert!(can_read);
let cant_read = context
.capabilities
.filesystem
.read
.iter()
.any(|p| Path::new("/etc/passwd").starts_with(p));
assert!(!cant_read);
let can_write = context
.capabilities
.filesystem
.write
.iter()
.any(|p| Path::new("/tmp/output/file.txt").starts_with(p));
assert!(can_write);
let cant_write = context
.capabilities
.filesystem
.write
.iter()
.any(|p| Path::new("/tmp/file.txt").starts_with(p));
assert!(!cant_write);
}
#[test]
fn test_host_functions_network_access() {
let mut context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
capabilities: Default::default(),
};
assert!(!context.capabilities.network.enabled);
context.capabilities.network.enabled = true;
assert!(context.capabilities.network.enabled);
}
#[test]
fn test_linker_setup() {
let engine = Engine::default();
let mut linker = Linker::<PluginStoreData>::new(&engine);
let result = HostFunctions::setup_linker(&mut linker);
assert!(result.is_ok());
}
}

View file

@ -0,0 +1,341 @@
//! Capability-based security for plugins
use anyhow::{Result, anyhow};
use pinakes_plugin_api::Capabilities;
use std::path::{Path, PathBuf};
/// Capability enforcer validates and enforces plugin capabilities
pub struct CapabilityEnforcer {
/// Maximum allowed memory per plugin (bytes)
max_memory_limit: usize,
/// Maximum allowed CPU time per plugin (milliseconds)
max_cpu_time_limit: u64,
/// Allowed filesystem read paths (system-wide)
allowed_read_paths: Vec<PathBuf>,
/// Allowed filesystem write paths (system-wide)
allowed_write_paths: Vec<PathBuf>,
/// Whether to allow network access by default
allow_network_default: bool,
}
impl CapabilityEnforcer {
/// Create a new capability enforcer with default limits
pub fn new() -> Self {
Self {
max_memory_limit: 512 * 1024 * 1024, // 512 MB
max_cpu_time_limit: 60 * 1000, // 60 seconds
allowed_read_paths: vec![],
allowed_write_paths: vec![],
allow_network_default: false,
}
}
/// Set maximum memory limit
pub fn with_max_memory(mut self, bytes: usize) -> Self {
self.max_memory_limit = bytes;
self
}
/// Set maximum CPU time limit
pub fn with_max_cpu_time(mut self, milliseconds: u64) -> Self {
self.max_cpu_time_limit = milliseconds;
self
}
/// Add allowed read path
pub fn allow_read_path(mut self, path: PathBuf) -> Self {
self.allowed_read_paths.push(path);
self
}
/// Add allowed write path
pub fn allow_write_path(mut self, path: PathBuf) -> Self {
self.allowed_write_paths.push(path);
self
}
/// Set default network access policy
pub fn with_network_default(mut self, allow: bool) -> Self {
self.allow_network_default = allow;
self
}
/// Validate capabilities requested by a plugin
pub fn validate_capabilities(&self, capabilities: &Capabilities) -> Result<()> {
// Validate memory limit
if let Some(memory) = capabilities.max_memory_bytes
&& memory > self.max_memory_limit
{
return Err(anyhow!(
"Requested memory ({} bytes) exceeds limit ({} bytes)",
memory,
self.max_memory_limit
));
}
// Validate CPU time limit
if let Some(cpu_time) = capabilities.max_cpu_time_ms
&& cpu_time > self.max_cpu_time_limit
{
return Err(anyhow!(
"Requested CPU time ({} ms) exceeds limit ({} ms)",
cpu_time,
self.max_cpu_time_limit
));
}
// Validate filesystem access
self.validate_filesystem_access(capabilities)?;
// Validate network access
if capabilities.network.enabled && !self.allow_network_default {
return Err(anyhow!(
"Plugin requests network access, but network access is disabled by policy"
));
}
Ok(())
}
/// Validate filesystem access capabilities
fn validate_filesystem_access(&self, capabilities: &Capabilities) -> Result<()> {
// Check read paths
for path in &capabilities.filesystem.read {
if !self.is_read_allowed(path) {
return Err(anyhow!(
"Plugin requests read access to {:?} which is not in allowed paths",
path
));
}
}
// Check write paths
for path in &capabilities.filesystem.write {
if !self.is_write_allowed(path) {
return Err(anyhow!(
"Plugin requests write access to {:?} which is not in allowed paths",
path
));
}
}
Ok(())
}
/// Check if a path is allowed for reading
pub fn is_read_allowed(&self, path: &Path) -> bool {
if self.allowed_read_paths.is_empty() {
return false; // deny-all when unconfigured
}
let Ok(canonical) = path.canonicalize() else {
return false;
};
self.allowed_read_paths.iter().any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
})
}
/// Check if a path is allowed for writing
pub fn is_write_allowed(&self, path: &Path) -> bool {
if self.allowed_write_paths.is_empty() {
return false; // deny-all when unconfigured
}
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
let Some(canonical) = canonical else {
return false;
};
self.allowed_write_paths.iter().any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
})
}
/// Check if network access is allowed for a plugin
pub fn is_network_allowed(&self, capabilities: &Capabilities) -> bool {
capabilities.network.enabled && self.allow_network_default
}
/// Check if a specific domain is allowed
pub fn is_domain_allowed(&self, capabilities: &Capabilities, domain: &str) -> bool {
if !capabilities.network.enabled {
return false;
}
// If no domain restrictions, allow all domains
if capabilities.network.allowed_domains.is_none() {
return self.allow_network_default;
}
// Check against allowed domains list
capabilities
.network
.allowed_domains
.as_ref()
.map(|domains| domains.iter().any(|d| d.eq_ignore_ascii_case(domain)))
.unwrap_or(false)
}
/// Get effective memory limit for a plugin
pub fn get_memory_limit(&self, capabilities: &Capabilities) -> usize {
capabilities
.max_memory_bytes
.unwrap_or(self.max_memory_limit)
.min(self.max_memory_limit)
}
/// Get effective CPU time limit for a plugin
pub fn get_cpu_time_limit(&self, capabilities: &Capabilities) -> u64 {
capabilities
.max_cpu_time_ms
.unwrap_or(self.max_cpu_time_limit)
.min(self.max_cpu_time_limit)
}
}
impl Default for CapabilityEnforcer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[allow(unused_imports)]
use pinakes_plugin_api::{FilesystemCapability, NetworkCapability};
#[test]
fn test_validate_memory_limit() {
let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB
let mut caps = Capabilities::default();
caps.max_memory_bytes = Some(50 * 1024 * 1024); // 50 MB - OK
assert!(enforcer.validate_capabilities(&caps).is_ok());
caps.max_memory_bytes = Some(200 * 1024 * 1024); // 200 MB - exceeds limit
assert!(enforcer.validate_capabilities(&caps).is_err());
}
#[test]
fn test_validate_cpu_time_limit() {
let enforcer = CapabilityEnforcer::new().with_max_cpu_time(30_000); // 30 seconds
let mut caps = Capabilities::default();
caps.max_cpu_time_ms = Some(10_000); // 10 seconds - OK
assert!(enforcer.validate_capabilities(&caps).is_ok());
caps.max_cpu_time_ms = Some(60_000); // 60 seconds - exceeds limit
assert!(enforcer.validate_capabilities(&caps).is_err());
}
#[test]
fn test_filesystem_read_allowed() {
// Use real temp directories so canonicalize works
let tmp = tempfile::tempdir().unwrap();
let allowed_dir = tmp.path().join("allowed");
std::fs::create_dir_all(&allowed_dir).unwrap();
let test_file = allowed_dir.join("test.txt");
std::fs::write(&test_file, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_read_path(allowed_dir.clone());
assert!(enforcer.is_read_allowed(&test_file));
assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd")));
}
#[test]
fn test_filesystem_read_denied_when_empty() {
let enforcer = CapabilityEnforcer::new();
assert!(!enforcer.is_read_allowed(Path::new("/tmp/test.txt")));
}
#[test]
fn test_filesystem_write_allowed() {
let tmp = tempfile::tempdir().unwrap();
let output_dir = tmp.path().join("output");
std::fs::create_dir_all(&output_dir).unwrap();
// Existing file in allowed dir
let existing = output_dir.join("file.txt");
std::fs::write(&existing, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_write_path(output_dir.clone());
assert!(enforcer.is_write_allowed(&existing));
// New file in allowed dir (parent exists)
assert!(enforcer.is_write_allowed(&output_dir.join("new_file.txt")));
assert!(!enforcer.is_write_allowed(Path::new("/etc/config")));
}
#[test]
fn test_filesystem_write_denied_when_empty() {
let enforcer = CapabilityEnforcer::new();
assert!(!enforcer.is_write_allowed(Path::new("/tmp/file.txt")));
}
#[test]
fn test_network_allowed() {
let enforcer = CapabilityEnforcer::new().with_network_default(true);
let mut caps = Capabilities::default();
caps.network.enabled = true;
assert!(enforcer.is_network_allowed(&caps));
caps.network.enabled = false;
assert!(!enforcer.is_network_allowed(&caps));
}
#[test]
fn test_domain_restrictions() {
let enforcer = CapabilityEnforcer::new().with_network_default(true);
let mut caps = Capabilities::default();
caps.network.enabled = true;
caps.network.allowed_domains = Some(vec![
"api.example.com".to_string(),
"cdn.example.com".to_string(),
]);
assert!(enforcer.is_domain_allowed(&caps, "api.example.com"));
assert!(enforcer.is_domain_allowed(&caps, "cdn.example.com"));
assert!(!enforcer.is_domain_allowed(&caps, "evil.com"));
}
#[test]
fn test_get_effective_limits() {
let enforcer = CapabilityEnforcer::new()
.with_max_memory(100 * 1024 * 1024)
.with_max_cpu_time(30_000);
let mut caps = Capabilities::default();
// No limits specified - use defaults
assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000);
// Plugin requests lower limits - use plugin's
caps.max_memory_bytes = Some(50 * 1024 * 1024);
caps.max_cpu_time_ms = Some(10_000);
assert_eq!(enforcer.get_memory_limit(&caps), 50 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 10_000);
// Plugin requests higher limits - cap at system max
caps.max_memory_bytes = Some(200 * 1024 * 1024);
caps.max_cpu_time_ms = Some(60_000);
assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000);
}
}

View file

@ -0,0 +1,52 @@
//! Social features: ratings, comments, favorites, and share links.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A user's rating for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Rating {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub stars: u8,
pub review_text: Option<String>,
pub created_at: DateTime<Utc>,
}
/// A comment on a media item, supporting threaded replies.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Comment {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub parent_comment_id: Option<Uuid>,
pub text: String,
pub created_at: DateTime<Utc>,
}
/// A user's favorite bookmark for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Favorite {
pub user_id: UserId,
pub media_id: MediaId,
pub created_at: DateTime<Utc>,
}
/// A shareable link to a media item with optional password and expiration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShareLink {
pub id: Uuid,
pub media_id: MediaId,
pub created_by: UserId,
pub token: String,
#[serde(skip_serializing)]
pub password_hash: Option<String>,
pub expires_at: Option<DateTime<Utc>>,
pub view_count: u64,
pub created_at: DateTime<Utc>,
}

View file

@ -7,9 +7,18 @@ use std::sync::Arc;
use uuid::Uuid;
use chrono::{DateTime, Utc};
use crate::analytics::UsageEvent;
use crate::enrichment::ExternalMetadata;
use crate::error::Result;
use crate::model::*;
use crate::playlists::Playlist;
use crate::search::{SearchRequest, SearchResults};
use crate::social::{Comment, Rating, ShareLink};
use crate::subtitles::Subtitle;
use crate::transcode::{TranscodeSession, TranscodeStatus};
use crate::users::UserId;
/// Statistics about the database.
#[derive(Debug, Clone, Default)]
@ -187,6 +196,167 @@ pub trait StorageBackend: Send + Sync + 'static {
// Library statistics
async fn library_statistics(&self) -> Result<LibraryStatistics>;
// User Management
async fn list_users(&self) -> Result<Vec<crate::users::User>>;
async fn get_user(&self, id: crate::users::UserId) -> Result<crate::users::User>;
async fn get_user_by_username(&self, username: &str) -> Result<crate::users::User>;
async fn create_user(
&self,
username: &str,
password_hash: &str,
role: crate::config::UserRole,
profile: Option<crate::users::UserProfile>,
) -> Result<crate::users::User>;
async fn update_user(
&self,
id: crate::users::UserId,
password_hash: Option<&str>,
role: Option<crate::config::UserRole>,
profile: Option<crate::users::UserProfile>,
) -> Result<crate::users::User>;
async fn delete_user(&self, id: crate::users::UserId) -> Result<()>;
async fn get_user_libraries(
&self,
user_id: crate::users::UserId,
) -> Result<Vec<crate::users::UserLibraryAccess>>;
async fn grant_library_access(
&self,
user_id: crate::users::UserId,
root_path: &str,
permission: crate::users::LibraryPermission,
) -> Result<()>;
async fn revoke_library_access(
&self,
user_id: crate::users::UserId,
root_path: &str,
) -> Result<()>;
// ===== Ratings =====
async fn rate_media(
&self,
user_id: UserId,
media_id: MediaId,
stars: u8,
review: Option<&str>,
) -> Result<Rating>;
async fn get_media_ratings(&self, media_id: MediaId) -> Result<Vec<Rating>>;
async fn get_user_rating(&self, user_id: UserId, media_id: MediaId) -> Result<Option<Rating>>;
async fn delete_rating(&self, id: Uuid) -> Result<()>;
// ===== Comments =====
async fn add_comment(
&self,
user_id: UserId,
media_id: MediaId,
text: &str,
parent_id: Option<Uuid>,
) -> Result<Comment>;
async fn get_media_comments(&self, media_id: MediaId) -> Result<Vec<Comment>>;
async fn delete_comment(&self, id: Uuid) -> Result<()>;
// ===== Favorites =====
async fn add_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn remove_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn get_user_favorites(
&self,
user_id: UserId,
pagination: &Pagination,
) -> Result<Vec<MediaItem>>;
async fn is_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<bool>;
// ===== Share Links =====
async fn create_share_link(
&self,
media_id: MediaId,
created_by: UserId,
token: &str,
password_hash: Option<&str>,
expires_at: Option<DateTime<Utc>>,
) -> Result<ShareLink>;
async fn get_share_link(&self, token: &str) -> Result<ShareLink>;
async fn increment_share_views(&self, token: &str) -> Result<()>;
async fn delete_share_link(&self, id: Uuid) -> Result<()>;
// ===== Playlists =====
async fn create_playlist(
&self,
owner_id: UserId,
name: &str,
description: Option<&str>,
is_public: bool,
is_smart: bool,
filter_query: Option<&str>,
) -> Result<Playlist>;
async fn get_playlist(&self, id: Uuid) -> Result<Playlist>;
async fn list_playlists(&self, owner_id: Option<UserId>) -> Result<Vec<Playlist>>;
async fn update_playlist(
&self,
id: Uuid,
name: Option<&str>,
description: Option<&str>,
is_public: Option<bool>,
) -> Result<Playlist>;
async fn delete_playlist(&self, id: Uuid) -> Result<()>;
async fn add_to_playlist(
&self,
playlist_id: Uuid,
media_id: MediaId,
position: i32,
) -> Result<()>;
async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()>;
async fn get_playlist_items(&self, playlist_id: Uuid) -> Result<Vec<MediaItem>>;
async fn reorder_playlist(
&self,
playlist_id: Uuid,
media_id: MediaId,
new_position: i32,
) -> Result<()>;
// ===== Analytics =====
async fn record_usage_event(&self, event: &UsageEvent) -> Result<()>;
async fn get_usage_events(
&self,
media_id: Option<MediaId>,
user_id: Option<UserId>,
limit: u64,
) -> Result<Vec<UsageEvent>>;
async fn get_most_viewed(&self, limit: u64) -> Result<Vec<(MediaItem, u64)>>;
async fn get_recently_viewed(&self, user_id: UserId, limit: u64) -> Result<Vec<MediaItem>>;
async fn update_watch_progress(
&self,
user_id: UserId,
media_id: MediaId,
progress_secs: f64,
) -> Result<()>;
async fn get_watch_progress(&self, user_id: UserId, media_id: MediaId) -> Result<Option<f64>>;
async fn cleanup_old_events(&self, before: DateTime<Utc>) -> Result<u64>;
// ===== Subtitles =====
async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>;
async fn get_media_subtitles(&self, media_id: MediaId) -> Result<Vec<Subtitle>>;
async fn delete_subtitle(&self, id: Uuid) -> Result<()>;
async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()>;
// ===== External Metadata (Enrichment) =====
async fn store_external_metadata(&self, meta: &ExternalMetadata) -> Result<()>;
async fn get_external_metadata(&self, media_id: MediaId) -> Result<Vec<ExternalMetadata>>;
async fn delete_external_metadata(&self, id: Uuid) -> Result<()>;
// ===== Transcode Sessions =====
async fn create_transcode_session(&self, session: &TranscodeSession) -> Result<()>;
async fn get_transcode_session(&self, id: Uuid) -> Result<TranscodeSession>;
async fn list_transcode_sessions(
&self,
media_id: Option<MediaId>,
) -> Result<Vec<TranscodeSession>>;
async fn update_transcode_status(
&self,
id: Uuid,
status: TranscodeStatus,
progress: f32,
) -> Result<()>;
async fn cleanup_expired_transcodes(&self, before: DateTime<Utc>) -> Result<u64>;
}
/// Comprehensive library statistics.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,62 @@
//! Subtitle management for video media items.
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
/// A subtitle track associated with a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Subtitle {
pub id: Uuid,
pub media_id: MediaId,
pub language: Option<String>,
pub format: SubtitleFormat,
pub file_path: Option<PathBuf>,
pub is_embedded: bool,
pub track_index: Option<usize>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
/// Supported subtitle formats.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum SubtitleFormat {
Srt,
Vtt,
Ass,
Ssa,
Pgs,
}
impl std::fmt::Display for SubtitleFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::Srt => "srt",
Self::Vtt => "vtt",
Self::Ass => "ass",
Self::Ssa => "ssa",
Self::Pgs => "pgs",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for SubtitleFormat {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"srt" => Ok(Self::Srt),
"vtt" => Ok(Self::Vtt),
"ass" => Ok(Self::Ass),
"ssa" => Ok(Self::Ssa),
"pgs" => Ok(Self::Pgs),
_ => Err(format!("unknown subtitle format: {s}")),
}
}
}

View file

@ -5,7 +5,7 @@ use tracing::{info, warn};
use crate::config::ThumbnailConfig;
use crate::error::{PinakesError, Result};
use crate::media_type::{MediaCategory, MediaType};
use crate::media_type::{BuiltinMediaType, MediaCategory, MediaType};
use crate::model::MediaId;
/// Generate a thumbnail for a media file and return the path to the thumbnail.
@ -41,7 +41,7 @@ pub fn generate_thumbnail_with_config(
MediaCategory::Image => {
if media_type.is_raw() {
generate_raw_thumbnail(source_path, &thumb_path, config)
} else if media_type == MediaType::Heic {
} else if media_type == MediaType::Builtin(BuiltinMediaType::Heic) {
generate_heic_thumbnail(source_path, &thumb_path, config)
} else {
generate_image_thumbnail(source_path, &thumb_path, config)
@ -49,8 +49,12 @@ pub fn generate_thumbnail_with_config(
}
MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config),
MediaCategory::Document => match media_type {
MediaType::Pdf => generate_pdf_thumbnail(source_path, &thumb_path, config),
MediaType::Epub => generate_epub_thumbnail(source_path, &thumb_path, config),
MediaType::Builtin(BuiltinMediaType::Pdf) => {
generate_pdf_thumbnail(source_path, &thumb_path, config)
}
MediaType::Builtin(BuiltinMediaType::Epub) => {
generate_epub_thumbnail(source_path, &thumb_path, config)
}
_ => return Ok(None),
},
_ => return Ok(None),

View file

@ -0,0 +1,545 @@
//! Transcoding service for media files using FFmpeg.
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
use crate::config::{TranscodeProfile, TranscodingConfig};
use crate::model::MediaId;
use crate::storage::DynStorageBackend;
use crate::users::UserId;
/// A transcoding session for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodeSession {
pub id: Uuid,
pub media_id: MediaId,
pub user_id: Option<UserId>,
pub profile: String,
pub cache_path: PathBuf,
pub status: TranscodeStatus,
pub progress: f32,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
/// Duration of the source media in seconds, used for progress calculation.
#[serde(default)]
pub duration_secs: Option<f64>,
/// Handle to cancel the child FFmpeg process.
#[serde(skip)]
pub child_cancel: Option<Arc<tokio::sync::Notify>>,
}
/// Status of a transcode session.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "state")]
pub enum TranscodeStatus {
Pending,
Transcoding,
Complete,
Failed { error: String },
Cancelled,
}
impl TranscodeStatus {
pub fn as_str(&self) -> &str {
match self {
Self::Pending => "pending",
Self::Transcoding => "transcoding",
Self::Complete => "complete",
Self::Failed { .. } => "failed",
Self::Cancelled => "cancelled",
}
}
pub fn from_db(status: &str, error_message: Option<&str>) -> Self {
match status {
"pending" => Self::Pending,
"transcoding" => Self::Transcoding,
"complete" => Self::Complete,
"failed" => Self::Failed {
error: error_message.unwrap_or("unknown error").to_string(),
},
"cancelled" => Self::Cancelled,
other => {
tracing::warn!(
"unknown transcode status '{}', defaulting to Pending",
other
);
Self::Pending
}
}
}
pub fn error_message(&self) -> Option<&str> {
match self {
Self::Failed { error } => Some(error),
_ => None,
}
}
}
/// Service managing transcoding sessions and FFmpeg invocations.
pub struct TranscodeService {
pub config: TranscodingConfig,
pub sessions: Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
semaphore: Arc<Semaphore>,
}
impl TranscodeService {
pub fn new(config: TranscodingConfig) -> Self {
let max_concurrent = config.max_concurrent.max(1);
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
semaphore: Arc::new(Semaphore::new(max_concurrent)),
config,
}
}
pub fn is_enabled(&self) -> bool {
self.config.enabled
}
pub fn cache_dir(&self) -> PathBuf {
self.config
.cache_dir
.clone()
.unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode"))
}
/// Start a transcode job for a media item.
pub async fn start_transcode(
&self,
media_id: MediaId,
source_path: &Path,
profile_name: &str,
duration_secs: Option<f64>,
storage: &DynStorageBackend,
) -> crate::error::Result<Uuid> {
let profile = self
.config
.profiles
.iter()
.find(|p| p.name == profile_name)
.cloned()
.ok_or_else(|| {
crate::error::PinakesError::InvalidOperation(format!(
"unknown transcode profile: {}",
profile_name
))
})?;
let session_id = Uuid::now_v7();
let session_dir = self.cache_dir().join(session_id.to_string());
tokio::fs::create_dir_all(&session_dir).await.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!(
"failed to create session directory: {}",
e
))
})?;
let expires_at =
Some(Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64));
let cancel_notify = Arc::new(tokio::sync::Notify::new());
let session = TranscodeSession {
id: session_id,
media_id,
user_id: None,
profile: profile_name.to_string(),
cache_path: session_dir.clone(),
status: TranscodeStatus::Pending,
progress: 0.0,
created_at: Utc::now(),
expires_at,
duration_secs,
child_cancel: Some(cancel_notify.clone()),
};
// Store session in DB
storage.create_transcode_session(&session).await?;
// Store in memory
{
let mut sessions = self.sessions.write().await;
sessions.insert(session_id, session);
}
// Spawn the FFmpeg task
let sessions = self.sessions.clone();
let semaphore = self.semaphore.clone();
let source = source_path.to_path_buf();
let hw_accel = self.config.hardware_acceleration.clone();
let storage = storage.clone();
let cancel = cancel_notify.clone();
tokio::spawn(async move {
// Acquire semaphore permit to limit concurrency
let _permit = match semaphore.acquire().await {
Ok(permit) => permit,
Err(e) => {
tracing::error!("failed to acquire transcode semaphore: {}", e);
let error_msg = format!("semaphore closed: {}", e);
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Failed {
error: error_msg.clone(),
};
}
if let Err(e) = storage
.update_transcode_status(
session_id,
TranscodeStatus::Failed { error: error_msg },
0.0,
)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
return;
}
};
// Mark as transcoding
{
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Transcoding;
}
}
if let Err(e) = storage
.update_transcode_status(session_id, TranscodeStatus::Transcoding, 0.0)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
// Build FFmpeg args and run
let args = get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref());
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel).await {
Ok(()) => {
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Complete;
sess.progress = 1.0;
}
if let Err(e) = storage
.update_transcode_status(session_id, TranscodeStatus::Complete, 1.0)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
}
Err(e) => {
let error_msg = e.to_string();
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
// Don't overwrite Cancelled status
if matches!(sess.status, TranscodeStatus::Cancelled) {
return;
}
sess.status = TranscodeStatus::Failed {
error: error_msg.clone(),
};
}
drop(s);
if let Err(e) = storage
.update_transcode_status(
session_id,
TranscodeStatus::Failed { error: error_msg },
0.0,
)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
}
}
});
Ok(session_id)
}
/// Cancel a transcode session and clean up cache files.
pub async fn cancel_transcode(
&self,
session_id: Uuid,
storage: &DynStorageBackend,
) -> crate::error::Result<()> {
let (cache_path, cancel_notify) = {
let mut sessions = self.sessions.write().await;
if let Some(sess) = sessions.get_mut(&session_id) {
sess.status = TranscodeStatus::Cancelled;
let cancel = sess.child_cancel.take();
(Some(sess.cache_path.clone()), cancel)
} else {
(None, None)
}
};
// Signal the child process to be killed
if let Some(notify) = cancel_notify {
notify.notify_one();
}
storage
.update_transcode_status(session_id, TranscodeStatus::Cancelled, 0.0)
.await?;
// Clean up cache directory
if let Some(path) = cache_path
&& let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove transcode cache directory: {}", e);
}
Ok(())
}
/// Remove expired transcode sessions and their cache directories.
pub async fn cleanup_expired(&self) {
let now = Utc::now();
// Collect expired entries and remove them from the map under the lock.
let expired: Vec<(Uuid, PathBuf)> = {
let mut sessions = self.sessions.write().await;
let expired: Vec<(Uuid, PathBuf)> = sessions
.iter()
.filter_map(|(id, sess)| {
if let Some(expires) = sess.expires_at
&& now > expires {
return Some((*id, sess.cache_path.clone()));
}
None
})
.collect();
for (id, _) in &expired {
sessions.remove(id);
}
expired
};
// Lock is dropped here; perform filesystem cleanup outside the lock.
for (_id, path) in expired {
if let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove expired transcode cache directory: {}", e);
}
}
}
/// Get a session by ID from the in-memory store.
pub async fn get_session(&self, session_id: Uuid) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions.get(&session_id).cloned()
}
/// Resolve the path to a specific segment file on disk.
pub fn segment_path(&self, session_id: Uuid, segment_name: &str) -> PathBuf {
// Sanitize segment_name to prevent path traversal
let safe_name = std::path::Path::new(segment_name)
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
if safe_name.is_empty() || safe_name.contains('\0') || safe_name.starts_with('.') {
// Return a non-existent path that will fail safely
return self
.cache_dir()
.join(session_id.to_string())
.join("__invalid__");
}
self.cache_dir()
.join(session_id.to_string())
.join(safe_name)
}
/// Find a session for a given media_id and profile.
pub async fn find_session(&self, media_id: MediaId, profile: &str) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions
.values()
.find(|s| s.media_id == media_id && s.profile == profile)
.cloned()
}
}
/// Parse a resolution string like "360p", "720p", "1080p" into (width, height).
pub fn parse_resolution(res: &str) -> (u32, u32) {
match res.trim_end_matches('p') {
"360" => (640, 360),
"480" => (854, 480),
"720" => (1280, 720),
"1080" => (1920, 1080),
"1440" => (2560, 1440),
"2160" | "4k" => (3840, 2160),
_ => (1280, 720), // default to 720p
}
}
/// Estimate bandwidth (bits/sec) from a profile's max_bitrate_kbps.
pub fn estimate_bandwidth(profile: &TranscodeProfile) -> u32 {
profile.max_bitrate_kbps * 1000
}
/// Build FFmpeg CLI arguments for transcoding.
fn get_ffmpeg_args(
source: &Path,
output_dir: &Path,
profile: &TranscodeProfile,
hw_accel: Option<&str>,
) -> Vec<String> {
let (w, h) = parse_resolution(&profile.max_resolution);
let playlist = output_dir.join("playlist.m3u8");
let segment_pattern = output_dir.join("segment%d.ts");
let mut args = Vec::new();
// Hardware acceleration
if let Some(accel) = hw_accel {
args.extend_from_slice(&["-hwaccel".to_string(), accel.to_string()]);
}
args.extend_from_slice(&[
"-i".to_string(),
source.to_string_lossy().to_string(),
"-c:v".to_string(),
profile.video_codec.clone(),
"-c:a".to_string(),
profile.audio_codec.clone(),
"-b:v".to_string(),
format!("{}k", profile.max_bitrate_kbps),
"-vf".to_string(),
format!("scale={}:{}", w, h),
"-f".to_string(),
"hls".to_string(),
"-hls_time".to_string(),
"10".to_string(),
"-hls_segment_filename".to_string(),
segment_pattern.to_string_lossy().to_string(),
"-progress".to_string(),
"pipe:1".to_string(),
"-y".to_string(),
playlist.to_string_lossy().to_string(),
]);
args
}
/// Run FFmpeg as a child process, parsing progress from stdout.
async fn run_ffmpeg(
args: &[String],
sessions: &Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
session_id: Uuid,
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,
) -> Result<(), crate::error::PinakesError> {
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
let mut child = Command::new("ffmpeg")
.args(args)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("failed to spawn ffmpeg: {}", e))
})?;
// Capture stderr in a spawned task for error reporting
let stderr_handle = if let Some(stderr) = child.stderr.take() {
let reader = BufReader::new(stderr);
Some(tokio::spawn(async move {
let mut lines = reader.lines();
let mut collected = Vec::new();
while let Ok(Some(line)) = lines.next_line().await {
collected.push(line);
}
collected
}))
} else {
None
};
// Parse progress from stdout
let stdout_handle = if let Some(stdout) = child.stdout.take() {
let reader = BufReader::new(stdout);
let mut lines = reader.lines();
let sessions = sessions.clone();
Some(tokio::spawn(async move {
while let Ok(Some(line)) = lines.next_line().await {
// FFmpeg progress output: "out_time_us=12345678"
if let Some(time_str) = line.strip_prefix("out_time_us=")
&& let Ok(us) = time_str.trim().parse::<f64>() {
let secs = us / 1_000_000.0;
// Calculate progress based on known duration
let progress = match duration_secs {
Some(dur) if dur > 0.0 => (secs / dur).min(0.99) as f32,
_ => {
// Duration unknown; don't update progress
continue;
}
};
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.progress = progress;
}
}
}
}))
} else {
None
};
// Wait for child, but also listen for cancellation
let status = tokio::select! {
result = child.wait() => {
result.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("ffmpeg process error: {}", e))
})?
}
_ = cancel.notified() => {
// Kill the child process on cancel
if let Err(e) = child.kill().await {
tracing::error!("failed to kill ffmpeg process: {}", e);
}
return Err(crate::error::PinakesError::InvalidOperation(
"cancelled by user".to_string(),
));
}
};
// Await the stdout reader task
if let Some(handle) = stdout_handle {
let _ = handle.await;
}
// Collect stderr output for error reporting
let stderr_output = if let Some(handle) = stderr_handle {
handle.await.unwrap_or_default()
} else {
Vec::new()
};
if !status.success() {
let last_stderr = stderr_output
.iter()
.rev()
.take(10)
.rev()
.cloned()
.collect::<Vec<_>>()
.join("\n");
return Err(crate::error::PinakesError::InvalidOperation(format!(
"ffmpeg exited with status: {}\nstderr:\n{}",
status, last_stderr
)));
}
Ok(())
}

View file

@ -0,0 +1,210 @@
//! User management and authentication
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
use crate::config::UserRole;
use crate::error::{PinakesError, Result};
/// User ID
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct UserId(pub Uuid);
impl UserId {
pub fn new() -> Self {
Self(Uuid::now_v7())
}
}
impl Default for UserId {
fn default() -> Self {
Self::new()
}
}
impl std::fmt::Display for UserId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<Uuid> for UserId {
fn from(id: Uuid) -> Self {
Self(id)
}
}
/// User account with profile information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct User {
pub id: UserId,
pub username: String,
#[serde(skip_serializing)]
pub password_hash: String,
pub role: UserRole,
pub profile: UserProfile,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// User profile information
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct UserProfile {
pub avatar_path: Option<String>,
pub bio: Option<String>,
pub preferences: UserPreferences,
}
/// User-specific preferences
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct UserPreferences {
/// UI theme preference
pub theme: Option<String>,
/// Language preference
pub language: Option<String>,
/// Default video quality preference for transcoding
pub default_video_quality: Option<String>,
/// Whether to auto-play media
pub auto_play: bool,
/// Custom preferences (extensible)
pub custom: HashMap<String, serde_json::Value>,
}
/// Library access permission
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LibraryPermission {
/// Can only read/view media
Read,
/// Can read and modify media metadata
Write,
/// Full control including deletion and sharing
Admin,
}
impl LibraryPermission {
pub fn can_read(&self) -> bool {
true
}
pub fn can_write(&self) -> bool {
matches!(self, Self::Write | Self::Admin)
}
pub fn can_admin(&self) -> bool {
matches!(self, Self::Admin)
}
}
/// User's access to a specific library root
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserLibraryAccess {
pub user_id: UserId,
pub root_path: String,
pub permission: LibraryPermission,
pub granted_at: DateTime<Utc>,
}
/// User creation request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateUserRequest {
pub username: String,
#[serde(skip_serializing)]
pub password: String,
pub role: UserRole,
pub profile: Option<UserProfile>,
}
/// User update request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateUserRequest {
#[serde(skip_serializing)]
pub password: Option<String>,
pub role: Option<UserRole>,
pub profile: Option<UserProfile>,
}
/// User authentication
pub mod auth {
use super::*;
/// Hash a password using Argon2
pub fn hash_password(password: &str) -> Result<String> {
use argon2::{
Argon2,
password_hash::{PasswordHasher, SaltString, rand_core::OsRng},
};
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
argon2
.hash_password(password.as_bytes(), &salt)
.map(|hash| hash.to_string())
.map_err(|e| PinakesError::Authentication(format!("failed to hash password: {e}")))
}
/// Verify a password against a hash
pub fn verify_password(password: &str, hash: &str) -> Result<bool> {
use argon2::{
Argon2,
password_hash::{PasswordHash, PasswordVerifier},
};
let parsed_hash = PasswordHash::new(hash)
.map_err(|e| PinakesError::Authentication(format!("invalid password hash: {e}")))?;
Ok(Argon2::default()
.verify_password(password.as_bytes(), &parsed_hash)
.is_ok())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_hash_and_verify_password() {
let password = "test_password_123";
let hash = auth::hash_password(password).unwrap();
assert!(auth::verify_password(password, &hash).unwrap());
assert!(!auth::verify_password("wrong_password", &hash).unwrap());
}
#[test]
fn test_user_preferences_default() {
let prefs = UserPreferences::default();
assert_eq!(prefs.theme, None);
assert_eq!(prefs.language, None);
assert!(!prefs.auto_play);
assert!(prefs.custom.is_empty());
}
#[test]
fn test_library_permission_levels() {
let read = LibraryPermission::Read;
assert!(read.can_read());
assert!(!read.can_write());
assert!(!read.can_admin());
let write = LibraryPermission::Write;
assert!(write.can_read());
assert!(write.can_write());
assert!(!write.can_admin());
let admin = LibraryPermission::Admin;
assert!(admin.can_read());
assert!(admin.can_write());
assert!(admin.can_admin());
}
}

View file

@ -21,7 +21,9 @@ async fn test_media_crud() {
id,
path: "/tmp/test.txt".into(),
file_name: "test.txt".to_string(),
media_type: pinakes_core::media_type::MediaType::PlainText,
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::PlainText,
),
content_hash: ContentHash::new("abc123".to_string()),
file_size: 100,
title: Some("Test Title".to_string()),
@ -97,7 +99,9 @@ async fn test_tags() {
id,
path: "/tmp/song.mp3".into(),
file_name: "song.mp3".to_string(),
media_type: pinakes_core::media_type::MediaType::Mp3,
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Mp3,
),
content_hash: ContentHash::new("hash1".to_string()),
file_size: 5000,
title: Some("Test Song".to_string()),
@ -147,7 +151,9 @@ async fn test_collections() {
id,
path: "/tmp/doc.pdf".into(),
file_name: "doc.pdf".to_string(),
media_type: pinakes_core::media_type::MediaType::Pdf,
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Pdf,
),
content_hash: ContentHash::new("pdfhash".to_string()),
file_size: 10000,
title: None,
@ -192,7 +198,9 @@ async fn test_custom_fields() {
id,
path: "/tmp/test.md".into(),
file_name: "test.md".to_string(),
media_type: pinakes_core::media_type::MediaType::Markdown,
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Markdown,
),
content_hash: ContentHash::new("mdhash".to_string()),
file_size: 500,
title: None,
@ -387,7 +395,9 @@ async fn test_library_statistics_with_data() {
id: MediaId::new(),
path: "/tmp/stats_test.mp3".into(),
file_name: "stats_test.mp3".to_string(),
media_type: pinakes_core::media_type::MediaType::Mp3,
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Mp3,
),
content_hash: ContentHash::new("stats_hash".to_string()),
file_size: 5000,
title: Some("Stats Song".to_string()),
@ -412,3 +422,449 @@ async fn test_library_statistics_with_data() {
assert!(stats.newest_item.is_some());
assert!(stats.oldest_item.is_some());
}
// ===== Phase 2: Media Server Features =====
fn make_test_media(hash: &str) -> MediaItem {
let now = chrono::Utc::now();
MediaItem {
id: MediaId::new(),
path: format!("/tmp/test_{hash}.mp4").into(),
file_name: format!("test_{hash}.mp4"),
media_type: pinakes_core::media_type::MediaType::Builtin(
pinakes_core::media_type::BuiltinMediaType::Mp4,
),
content_hash: ContentHash::new(hash.to_string()),
file_size: 1000,
title: Some(format!("Test {hash}")),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: Some(2024),
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
}
}
#[tokio::test]
async fn test_ratings_crud() {
let storage = setup().await;
let item = make_test_media("rating1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
// Rate media
let rating = storage
.rate_media(user_id, item.id, 4, Some("Great video"))
.await
.unwrap();
assert_eq!(rating.stars, 4);
assert_eq!(rating.review_text.as_deref(), Some("Great video"));
// Get user's rating
let fetched = storage.get_user_rating(user_id, item.id).await.unwrap();
assert!(fetched.is_some());
assert_eq!(fetched.unwrap().stars, 4);
// Get media ratings
let ratings = storage.get_media_ratings(item.id).await.unwrap();
assert_eq!(ratings.len(), 1);
// Delete rating
storage.delete_rating(rating.id).await.unwrap();
let empty = storage.get_media_ratings(item.id).await.unwrap();
assert!(empty.is_empty());
}
#[tokio::test]
async fn test_comments_crud() {
let storage = setup().await;
let item = make_test_media("comment1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
// Add comment
let comment = storage
.add_comment(user_id, item.id, "Nice video!", None)
.await
.unwrap();
assert_eq!(comment.text, "Nice video!");
assert!(comment.parent_comment_id.is_none());
// Add reply
let reply = storage
.add_comment(user_id, item.id, "Thanks!", Some(comment.id))
.await
.unwrap();
assert_eq!(reply.parent_comment_id, Some(comment.id));
// List comments
let comments = storage.get_media_comments(item.id).await.unwrap();
assert_eq!(comments.len(), 2);
// Delete comment
storage.delete_comment(reply.id).await.unwrap();
let remaining = storage.get_media_comments(item.id).await.unwrap();
assert_eq!(remaining.len(), 1);
}
#[tokio::test]
async fn test_favorites_toggle() {
let storage = setup().await;
let item = make_test_media("fav1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
// Not a favorite initially
assert!(!storage.is_favorite(user_id, item.id).await.unwrap());
// Add favorite
storage.add_favorite(user_id, item.id).await.unwrap();
assert!(storage.is_favorite(user_id, item.id).await.unwrap());
// List favorites
let favs = storage
.get_user_favorites(user_id, &Pagination::default())
.await
.unwrap();
assert_eq!(favs.len(), 1);
// Remove favorite
storage.remove_favorite(user_id, item.id).await.unwrap();
assert!(!storage.is_favorite(user_id, item.id).await.unwrap());
}
#[tokio::test]
async fn test_share_links() {
let storage = setup().await;
let item = make_test_media("share1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
let token = "test_share_token_abc123";
// Create share link
let link = storage
.create_share_link(item.id, user_id, token, None, None)
.await
.unwrap();
assert_eq!(link.token, token);
assert_eq!(link.view_count, 0);
// Get share link
let fetched = storage.get_share_link(token).await.unwrap();
assert_eq!(fetched.media_id, item.id);
// Increment views
storage.increment_share_views(token).await.unwrap();
let updated = storage.get_share_link(token).await.unwrap();
assert_eq!(updated.view_count, 1);
// Delete share link
storage.delete_share_link(link.id).await.unwrap();
let result = storage.get_share_link(token).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_playlists_crud() {
let storage = setup().await;
let item1 = make_test_media("pl1");
let item2 = make_test_media("pl2");
storage.insert_media(&item1).await.unwrap();
storage.insert_media(&item2).await.unwrap();
let owner = pinakes_core::users::UserId::new();
// Create playlist
let playlist = storage
.create_playlist(
owner,
"My Playlist",
Some("A test playlist"),
true,
false,
None,
)
.await
.unwrap();
assert_eq!(playlist.name, "My Playlist");
assert!(playlist.is_public);
// Get playlist
let fetched = storage.get_playlist(playlist.id).await.unwrap();
assert_eq!(fetched.name, "My Playlist");
// Add items
storage
.add_to_playlist(playlist.id, item1.id, 0)
.await
.unwrap();
storage
.add_to_playlist(playlist.id, item2.id, 1)
.await
.unwrap();
// Get playlist items
let items = storage.get_playlist_items(playlist.id).await.unwrap();
assert_eq!(items.len(), 2);
// Reorder
storage
.reorder_playlist(playlist.id, item2.id, 0)
.await
.unwrap();
// Remove item
storage
.remove_from_playlist(playlist.id, item1.id)
.await
.unwrap();
let items = storage.get_playlist_items(playlist.id).await.unwrap();
assert_eq!(items.len(), 1);
// Update playlist
let updated = storage
.update_playlist(playlist.id, Some("Renamed"), None, Some(false))
.await
.unwrap();
assert_eq!(updated.name, "Renamed");
assert!(!updated.is_public);
// List playlists
let playlists = storage.list_playlists(None).await.unwrap();
assert!(!playlists.is_empty());
// Delete playlist
storage.delete_playlist(playlist.id).await.unwrap();
let result = storage.get_playlist(playlist.id).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_analytics_usage_events() {
let storage = setup().await;
let item = make_test_media("analytics1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
// Record events
let event = pinakes_core::analytics::UsageEvent {
id: uuid::Uuid::now_v7(),
media_id: Some(item.id),
user_id: Some(user_id),
event_type: pinakes_core::analytics::UsageEventType::View,
timestamp: chrono::Utc::now(),
duration_secs: Some(60.0),
context_json: None,
};
storage.record_usage_event(&event).await.unwrap();
// Get usage events
let events = storage
.get_usage_events(Some(item.id), None, 10)
.await
.unwrap();
assert_eq!(events.len(), 1);
assert_eq!(
events[0].event_type,
pinakes_core::analytics::UsageEventType::View
);
// Most viewed
let most_viewed = storage.get_most_viewed(10).await.unwrap();
assert_eq!(most_viewed.len(), 1);
assert_eq!(most_viewed[0].1, 1);
// Recently viewed
let recent = storage.get_recently_viewed(user_id, 10).await.unwrap();
assert_eq!(recent.len(), 1);
}
#[tokio::test]
async fn test_watch_progress() {
let storage = setup().await;
let item = make_test_media("progress1");
storage.insert_media(&item).await.unwrap();
let user_id = pinakes_core::users::UserId::new();
// No progress initially
let progress = storage.get_watch_progress(user_id, item.id).await.unwrap();
assert!(progress.is_none());
// Update progress
storage
.update_watch_progress(user_id, item.id, 45.5)
.await
.unwrap();
let progress = storage.get_watch_progress(user_id, item.id).await.unwrap();
assert_eq!(progress, Some(45.5));
// Update again (should upsert)
storage
.update_watch_progress(user_id, item.id, 90.0)
.await
.unwrap();
let progress = storage.get_watch_progress(user_id, item.id).await.unwrap();
assert_eq!(progress, Some(90.0));
}
#[tokio::test]
async fn test_cleanup_old_events() {
let storage = setup().await;
let old_event = pinakes_core::analytics::UsageEvent {
id: uuid::Uuid::now_v7(),
media_id: None,
user_id: None,
event_type: pinakes_core::analytics::UsageEventType::Search,
timestamp: chrono::Utc::now() - chrono::Duration::days(100),
duration_secs: None,
context_json: None,
};
storage.record_usage_event(&old_event).await.unwrap();
let cutoff = chrono::Utc::now() - chrono::Duration::days(90);
let cleaned = storage.cleanup_old_events(cutoff).await.unwrap();
assert_eq!(cleaned, 1);
}
#[tokio::test]
async fn test_subtitles_crud() {
let storage = setup().await;
let item = make_test_media("sub1");
storage.insert_media(&item).await.unwrap();
let subtitle = pinakes_core::subtitles::Subtitle {
id: uuid::Uuid::now_v7(),
media_id: item.id,
language: Some("en".to_string()),
format: pinakes_core::subtitles::SubtitleFormat::Srt,
file_path: Some("/tmp/test.srt".into()),
is_embedded: false,
track_index: None,
offset_ms: 0,
created_at: chrono::Utc::now(),
};
storage.add_subtitle(&subtitle).await.unwrap();
// Get subtitles
let subs = storage.get_media_subtitles(item.id).await.unwrap();
assert_eq!(subs.len(), 1);
assert_eq!(subs[0].language.as_deref(), Some("en"));
assert_eq!(subs[0].format, pinakes_core::subtitles::SubtitleFormat::Srt);
// Update offset
storage
.update_subtitle_offset(subtitle.id, 500)
.await
.unwrap();
let updated = storage.get_media_subtitles(item.id).await.unwrap();
assert_eq!(updated[0].offset_ms, 500);
// Delete subtitle
storage.delete_subtitle(subtitle.id).await.unwrap();
let empty = storage.get_media_subtitles(item.id).await.unwrap();
assert!(empty.is_empty());
}
#[tokio::test]
async fn test_external_metadata() {
let storage = setup().await;
let item = make_test_media("enrich1");
storage.insert_media(&item).await.unwrap();
let meta = pinakes_core::enrichment::ExternalMetadata {
id: uuid::Uuid::now_v7(),
media_id: item.id,
source: pinakes_core::enrichment::EnrichmentSourceType::MusicBrainz,
external_id: Some("mb-123".to_string()),
metadata_json: r#"{"title":"Test"}"#.to_string(),
confidence: 0.85,
last_updated: chrono::Utc::now(),
};
storage.store_external_metadata(&meta).await.unwrap();
// Get external metadata
let metas = storage.get_external_metadata(item.id).await.unwrap();
assert_eq!(metas.len(), 1);
assert_eq!(
metas[0].source,
pinakes_core::enrichment::EnrichmentSourceType::MusicBrainz
);
assert_eq!(metas[0].external_id.as_deref(), Some("mb-123"));
assert!((metas[0].confidence - 0.85).abs() < 0.01);
// Delete
storage.delete_external_metadata(meta.id).await.unwrap();
let empty = storage.get_external_metadata(item.id).await.unwrap();
assert!(empty.is_empty());
}
#[tokio::test]
async fn test_transcode_sessions() {
let storage = setup().await;
let item = make_test_media("transcode1");
storage.insert_media(&item).await.unwrap();
let session = pinakes_core::transcode::TranscodeSession {
id: uuid::Uuid::now_v7(),
media_id: item.id,
user_id: None,
profile: "720p".to_string(),
cache_path: "/tmp/transcode/test.mp4".into(),
status: pinakes_core::transcode::TranscodeStatus::Pending,
progress: 0.0,
created_at: chrono::Utc::now(),
expires_at: Some(chrono::Utc::now() + chrono::Duration::hours(24)),
duration_secs: None,
child_cancel: None,
};
storage.create_transcode_session(&session).await.unwrap();
// Get session
let fetched = storage.get_transcode_session(session.id).await.unwrap();
assert_eq!(fetched.profile, "720p");
assert_eq!(fetched.status.as_str(), "pending");
// Update status
storage
.update_transcode_status(
session.id,
pinakes_core::transcode::TranscodeStatus::Transcoding,
0.5,
)
.await
.unwrap();
let updated = storage.get_transcode_session(session.id).await.unwrap();
assert_eq!(updated.status.as_str(), "transcoding");
assert!((updated.progress - 0.5).abs() < 0.01);
// List sessions
let sessions = storage.list_transcode_sessions(None).await.unwrap();
assert_eq!(sessions.len(), 1);
// List by media ID
let sessions = storage
.list_transcode_sessions(Some(item.id))
.await
.unwrap();
assert_eq!(sessions.len(), 1);
// Cleanup expired
let far_future = chrono::Utc::now() + chrono::Duration::days(365);
let cleaned = storage
.cleanup_expired_transcodes(far_future)
.await
.unwrap();
assert_eq!(cleaned, 1);
}

View file

@ -0,0 +1,27 @@
[package]
name = "pinakes-plugin-api"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
# Core dependencies
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
async-trait = { workspace = true }
# For plugin manifest parsing
toml = { workspace = true }
# For media types and identifiers
uuid = { workspace = true }
chrono = { workspace = true }
mime_guess = { workspace = true }
# WASM bridge types
wit-bindgen = { workspace = true, optional = true }
[features]
default = []
wasm = ["wit-bindgen"]

View file

@ -0,0 +1,374 @@
//! Pinakes Plugin API
//!
//! This crate defines the stable plugin interface for Pinakes.
//! Plugins can extend Pinakes by implementing one or more of the provided traits.
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use thiserror::Error;
pub mod manifest;
pub mod types;
pub mod wasm;
pub use manifest::PluginManifest;
pub use types::*;
/// Plugin API version - plugins must match this version
pub const PLUGIN_API_VERSION: &str = "1.0";
/// Result type for plugin operations
pub type PluginResult<T> = Result<T, PluginError>;
/// Errors that can occur in plugin operations
#[derive(Debug, Error, Serialize, Deserialize)]
pub enum PluginError {
#[error("Plugin initialization failed: {0}")]
InitializationFailed(String),
#[error("Unsupported operation: {0}")]
UnsupportedOperation(String),
#[error("Invalid input: {0}")]
InvalidInput(String),
#[error("IO error: {0}")]
IoError(String),
#[error("Metadata extraction failed: {0}")]
MetadataExtractionFailed(String),
#[error("Thumbnail generation failed: {0}")]
ThumbnailGenerationFailed(String),
#[error("Search backend error: {0}")]
SearchBackendError(String),
#[error("Permission denied: {0}")]
PermissionDenied(String),
#[error("Resource limit exceeded: {0}")]
ResourceLimitExceeded(String),
#[error("Plugin error: {0}")]
Other(String),
}
/// Context provided to plugins during initialization
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginContext {
/// Plugin's data directory for persistent storage
pub data_dir: PathBuf,
/// Plugin's cache directory for temporary data
pub cache_dir: PathBuf,
/// Plugin configuration from manifest
pub config: HashMap<String, serde_json::Value>,
/// Capabilities granted to the plugin
pub capabilities: Capabilities,
}
/// Capabilities that can be granted to plugins
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Capabilities {
/// Filesystem access permissions
pub filesystem: FilesystemCapability,
/// Network access permissions
pub network: NetworkCapability,
/// Environment variable access
pub environment: EnvironmentCapability,
/// Maximum memory usage in bytes
pub max_memory_bytes: Option<usize>,
/// Maximum CPU time in milliseconds
pub max_cpu_time_ms: Option<u64>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct FilesystemCapability {
/// Paths allowed for reading
pub read: Vec<PathBuf>,
/// Paths allowed for writing
pub write: Vec<PathBuf>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct NetworkCapability {
/// Whether network access is allowed
pub enabled: bool,
/// Allowed domains (if None, all domains allowed when enabled)
pub allowed_domains: Option<Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct EnvironmentCapability {
/// Whether environment variable access is allowed
pub enabled: bool,
/// Specific environment variables allowed (if None, all allowed when enabled)
pub allowed_vars: Option<Vec<String>>,
}
/// Base trait that all plugins must implement
#[async_trait]
pub trait Plugin: Send + Sync {
/// Get plugin metadata
fn metadata(&self) -> &PluginMetadata;
/// Initialize the plugin with provided context
async fn initialize(&mut self, context: PluginContext) -> PluginResult<()>;
/// Shutdown the plugin gracefully
async fn shutdown(&mut self) -> PluginResult<()>;
/// Get plugin health status
async fn health_check(&self) -> PluginResult<HealthStatus>;
}
/// Plugin metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginMetadata {
pub id: String,
pub name: String,
pub version: String,
pub author: String,
pub description: String,
pub api_version: String,
pub capabilities_required: Capabilities,
}
/// Health status of a plugin
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HealthStatus {
pub healthy: bool,
pub message: Option<String>,
pub metrics: HashMap<String, f64>,
}
/// Trait for plugins that provide custom media type support
#[async_trait]
pub trait MediaTypeProvider: Plugin {
/// Get the list of media types this plugin supports
fn supported_media_types(&self) -> Vec<MediaTypeDefinition>;
/// Check if this plugin can handle the given file
async fn can_handle(&self, path: &PathBuf, mime_type: Option<&str>) -> PluginResult<bool>;
}
/// Definition of a custom media type
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MediaTypeDefinition {
/// Unique identifier for this media type
pub id: String,
/// Display name
pub name: String,
/// Category (e.g., "video", "audio", "document", "image")
pub category: String,
/// File extensions associated with this type
pub extensions: Vec<String>,
/// MIME types associated with this type
pub mime_types: Vec<String>,
/// Icon name or path
pub icon: Option<String>,
}
/// Trait for plugins that extract metadata from files
#[async_trait]
pub trait MetadataExtractor: Plugin {
/// Extract metadata from a file
async fn extract_metadata(&self, path: &PathBuf) -> PluginResult<ExtractedMetadata>;
/// Get the media types this extractor supports
fn supported_types(&self) -> Vec<String>;
}
/// Metadata extracted from a file
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ExtractedMetadata {
pub title: Option<String>,
pub description: Option<String>,
pub author: Option<String>,
pub created_at: Option<String>,
pub duration_secs: Option<f64>,
pub width: Option<u32>,
pub height: Option<u32>,
pub file_size_bytes: Option<u64>,
pub codec: Option<String>,
pub bitrate_kbps: Option<u32>,
/// Custom metadata fields specific to this file type
pub custom_fields: HashMap<String, serde_json::Value>,
/// Tags extracted from the file
pub tags: Vec<String>,
}
/// Trait for plugins that generate thumbnails
#[async_trait]
pub trait ThumbnailGenerator: Plugin {
/// Generate a thumbnail for the given file
async fn generate_thumbnail(
&self,
path: &PathBuf,
output_path: &PathBuf,
options: ThumbnailOptions,
) -> PluginResult<ThumbnailInfo>;
/// Get the media types this generator supports
fn supported_types(&self) -> Vec<String>;
}
/// Options for thumbnail generation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThumbnailOptions {
pub width: u32,
pub height: u32,
pub quality: u8,
pub format: ThumbnailFormat,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ThumbnailFormat {
Jpeg,
Png,
WebP,
}
/// Information about a generated thumbnail
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThumbnailInfo {
pub path: PathBuf,
pub width: u32,
pub height: u32,
pub file_size_bytes: u64,
}
/// Trait for plugins that provide custom search backends
#[async_trait]
pub trait SearchBackend: Plugin {
/// Index a media item for search
async fn index_item(&self, item: &SearchIndexItem) -> PluginResult<()>;
/// Remove an item from the search index
async fn remove_item(&self, item_id: &str) -> PluginResult<()>;
/// Perform a search query
async fn search(&self, query: &SearchQuery) -> PluginResult<Vec<SearchResult>>;
/// Get search statistics
async fn get_stats(&self) -> PluginResult<SearchStats>;
}
/// Item to be indexed for search
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchIndexItem {
pub id: String,
pub title: Option<String>,
pub description: Option<String>,
pub content: Option<String>,
pub tags: Vec<String>,
pub media_type: String,
pub metadata: HashMap<String, serde_json::Value>,
}
/// Search query
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchQuery {
pub query_text: String,
pub filters: HashMap<String, serde_json::Value>,
pub limit: usize,
pub offset: usize,
}
/// Search result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResult {
pub id: String,
pub score: f64,
pub highlights: Vec<String>,
}
/// Search statistics
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchStats {
pub total_indexed: usize,
pub index_size_bytes: u64,
pub last_update: Option<String>,
}
/// Trait for plugins that handle events
#[async_trait]
pub trait EventHandler: Plugin {
/// Handle an event
async fn handle_event(&self, event: &Event) -> PluginResult<()>;
/// Get the event types this handler is interested in
fn interested_events(&self) -> Vec<EventType>;
}
/// Event type
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum EventType {
MediaImported,
MediaUpdated,
MediaDeleted,
MediaTagged,
MediaUntagged,
CollectionCreated,
CollectionUpdated,
CollectionDeleted,
ScanStarted,
ScanCompleted,
Custom(String),
}
/// Event data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Event {
pub event_type: EventType,
pub timestamp: String,
pub data: HashMap<String, serde_json::Value>,
}
/// Trait for plugins that provide UI themes
#[async_trait]
pub trait ThemeProvider: Plugin {
/// Get available themes from this provider
fn get_themes(&self) -> Vec<ThemeDefinition>;
/// Load a specific theme by ID
async fn load_theme(&self, theme_id: &str) -> PluginResult<Theme>;
}
/// Theme definition
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThemeDefinition {
pub id: String,
pub name: String,
pub description: String,
pub author: String,
pub preview_url: Option<String>,
}
/// Theme data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Theme {
pub id: String,
pub colors: HashMap<String, String>,
pub fonts: HashMap<String, String>,
pub custom_css: Option<String>,
}

View file

@ -0,0 +1,263 @@
//! Plugin manifest parsing and validation
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error;
use crate::{Capabilities, EnvironmentCapability, FilesystemCapability, NetworkCapability};
/// Plugin manifest file format (TOML)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginManifest {
pub plugin: PluginInfo,
#[serde(default)]
pub capabilities: ManifestCapabilities,
#[serde(default)]
pub config: HashMap<String, toml::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginInfo {
pub name: String,
pub version: String,
pub api_version: String,
pub author: Option<String>,
pub description: Option<String>,
pub homepage: Option<String>,
pub license: Option<String>,
/// Plugin kind(s) - e.g., ["media_type", "metadata_extractor"]
pub kind: Vec<String>,
/// Binary configuration
pub binary: PluginBinary,
/// Dependencies on other plugins
#[serde(default)]
pub dependencies: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginBinary {
/// Path to WASM binary
pub wasm: String,
/// Optional entrypoint function name (default: "_start")
pub entrypoint: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ManifestCapabilities {
#[serde(default)]
pub filesystem: ManifestFilesystemCapability,
#[serde(default)]
pub network: bool,
#[serde(default)]
pub environment: Option<Vec<String>>,
#[serde(default)]
pub max_memory_mb: Option<usize>,
#[serde(default)]
pub max_cpu_time_secs: Option<u64>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct ManifestFilesystemCapability {
#[serde(default)]
pub read: Vec<String>,
#[serde(default)]
pub write: Vec<String>,
}
#[derive(Debug, Error)]
pub enum ManifestError {
#[error("Failed to read manifest file: {0}")]
IoError(#[from] std::io::Error),
#[error("Failed to parse manifest: {0}")]
ParseError(#[from] toml::de::Error),
#[error("Invalid manifest: {0}")]
ValidationError(String),
}
impl PluginManifest {
/// Load and parse a plugin manifest from a TOML file
pub fn from_file(path: &Path) -> Result<Self, ManifestError> {
let content = std::fs::read_to_string(path)?;
let manifest: Self = toml::from_str(&content)?;
manifest.validate()?;
Ok(manifest)
}
/// Parse a manifest from TOML string
pub fn from_str(content: &str) -> Result<Self, ManifestError> {
let manifest: Self = toml::from_str(content)?;
manifest.validate()?;
Ok(manifest)
}
/// Validate the manifest
pub fn validate(&self) -> Result<(), ManifestError> {
// Check API version format
if self.plugin.api_version.is_empty() {
return Err(ManifestError::ValidationError(
"api_version cannot be empty".to_string(),
));
}
// Check version format (basic semver check)
if !self.plugin.version.contains('.') {
return Err(ManifestError::ValidationError(
"version must be in semver format (e.g., 1.0.0)".to_string(),
));
}
// Check that at least one kind is specified
if self.plugin.kind.is_empty() {
return Err(ManifestError::ValidationError(
"at least one plugin kind must be specified".to_string(),
));
}
// Check plugin kinds are valid
let valid_kinds = [
"media_type",
"metadata_extractor",
"thumbnail_generator",
"search_backend",
"event_handler",
"theme_provider",
];
for kind in &self.plugin.kind {
if !valid_kinds.contains(&kind.as_str()) {
return Err(ManifestError::ValidationError(format!(
"Invalid plugin kind: {}. Must be one of: {}",
kind,
valid_kinds.join(", ")
)));
}
}
// Check WASM binary path is not empty
if self.plugin.binary.wasm.is_empty() {
return Err(ManifestError::ValidationError(
"WASM binary path cannot be empty".to_string(),
));
}
Ok(())
}
/// Convert manifest capabilities to API capabilities
pub fn to_capabilities(&self) -> Capabilities {
Capabilities {
filesystem: FilesystemCapability {
read: self
.capabilities
.filesystem
.read
.iter()
.map(|s| s.into())
.collect(),
write: self
.capabilities
.filesystem
.write
.iter()
.map(|s| s.into())
.collect(),
},
network: NetworkCapability {
enabled: self.capabilities.network,
allowed_domains: None,
},
environment: EnvironmentCapability {
enabled: self.capabilities.environment.is_some(),
allowed_vars: self.capabilities.environment.clone(),
},
max_memory_bytes: self
.capabilities
.max_memory_mb
.map(|mb| mb.saturating_mul(1024).saturating_mul(1024)),
max_cpu_time_ms: self
.capabilities
.max_cpu_time_secs
.map(|secs| secs.saturating_mul(1000)),
}
}
/// Get plugin ID (derived from name and version)
pub fn plugin_id(&self) -> String {
format!("{}@{}", self.plugin.name, self.plugin.version)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_valid_manifest() {
let toml = r#"
[plugin]
name = "heif-support"
version = "1.0.0"
api_version = "1.0"
author = "Test Author"
description = "HEIF image support"
kind = ["media_type", "metadata_extractor"]
[plugin.binary]
wasm = "plugin.wasm"
[capabilities.filesystem]
read = ["/tmp/pinakes-thumbnails"]
"#;
let manifest = PluginManifest::from_str(toml).unwrap();
assert_eq!(manifest.plugin.name, "heif-support");
assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.kind.len(), 2);
}
#[test]
fn test_invalid_api_version() {
let toml = r#"
[plugin]
name = "test"
version = "1.0.0"
api_version = ""
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
assert!(PluginManifest::from_str(toml).is_err());
}
#[test]
fn test_invalid_kind() {
let toml = r#"
[plugin]
name = "test"
version = "1.0.0"
api_version = "1.0"
kind = ["invalid_kind"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
assert!(PluginManifest::from_str(toml).is_err());
}
}

View file

@ -0,0 +1,156 @@
//! Shared types used across the plugin API
use serde::{Deserialize, Serialize};
use std::fmt;
/// Plugin identifier
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct PluginId(String);
impl PluginId {
pub fn new(id: impl Into<String>) -> Self {
Self(id.into())
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl fmt::Display for PluginId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<String> for PluginId {
fn from(s: String) -> Self {
Self(s)
}
}
impl From<&str> for PluginId {
fn from(s: &str) -> Self {
Self(s.to_string())
}
}
/// Plugin lifecycle state
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum PluginState {
/// Plugin is being loaded
Loading,
/// Plugin is initialized and ready
Ready,
/// Plugin is running
Running,
/// Plugin encountered an error
Error,
/// Plugin is being shut down
ShuttingDown,
/// Plugin is stopped
Stopped,
}
impl fmt::Display for PluginState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Loading => write!(f, "loading"),
Self::Ready => write!(f, "ready"),
Self::Running => write!(f, "running"),
Self::Error => write!(f, "error"),
Self::ShuttingDown => write!(f, "shutting_down"),
Self::Stopped => write!(f, "stopped"),
}
}
}
/// Plugin installation status
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginStatus {
pub id: PluginId,
pub name: String,
pub version: String,
pub state: PluginState,
pub enabled: bool,
pub error_message: Option<String>,
}
/// Version information
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Version {
pub major: u32,
pub minor: u32,
pub patch: u32,
}
impl Version {
pub fn new(major: u32, minor: u32, patch: u32) -> Self {
Self {
major,
minor,
patch,
}
}
/// Parse version from string (e.g., "1.2.3")
pub fn parse(s: &str) -> Option<Self> {
let parts: Vec<&str> = s.split('.').collect();
if parts.len() != 3 {
return None;
}
Some(Self {
major: parts[0].parse().ok()?,
minor: parts[1].parse().ok()?,
patch: parts[2].parse().ok()?,
})
}
/// Check if this version is compatible with another version
/// Compatible if major version matches and minor version is >= required
pub fn is_compatible_with(&self, required: &Version) -> bool {
self.major == required.major && self.minor >= required.minor
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_version_parse() {
let v = Version::parse("1.2.3").unwrap();
assert_eq!(v.major, 1);
assert_eq!(v.minor, 2);
assert_eq!(v.patch, 3);
}
#[test]
fn test_version_compatibility() {
let v1 = Version::new(1, 2, 0);
let v2 = Version::new(1, 1, 0);
let v3 = Version::new(2, 0, 0);
assert!(v1.is_compatible_with(&v2)); // 1.2 >= 1.1
assert!(!v2.is_compatible_with(&v1)); // 1.1 < 1.2
assert!(!v1.is_compatible_with(&v3)); // Different major version
}
#[test]
fn test_version_display() {
let v = Version::new(1, 2, 3);
assert_eq!(v.to_string(), "1.2.3");
}
}

View file

@ -0,0 +1,186 @@
//! WASM bridge types and helpers for plugin communication
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Memory allocation info for passing data between host and plugin
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WasmMemoryAlloc {
/// Pointer to allocated memory
pub ptr: u32,
/// Size of allocation in bytes
pub len: u32,
}
/// Request from host to plugin
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HostRequest {
/// Request ID for matching with response
pub request_id: String,
/// Method name being called
pub method: String,
/// Serialized parameters
pub params: Vec<u8>,
}
/// Response from plugin to host
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginResponse {
/// Request ID this response corresponds to
pub request_id: String,
/// Success or error
pub result: WasmResult<Vec<u8>>,
}
/// Result type for WASM operations
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum WasmResult<T> {
Ok(T),
Err(String),
}
impl<T> From<Result<T, String>> for WasmResult<T> {
fn from(r: Result<T, String>) -> Self {
match r {
Ok(v) => WasmResult::Ok(v),
Err(e) => WasmResult::Err(e),
}
}
}
/// Host functions available to plugins
pub mod host_functions {
/// Log a message from plugin
pub const LOG: &str = "host_log";
/// Read a file (if permitted)
pub const READ_FILE: &str = "host_read_file";
/// Write a file (if permitted)
pub const WRITE_FILE: &str = "host_write_file";
/// Make an HTTP request (if permitted)
pub const HTTP_REQUEST: &str = "host_http_request";
/// Get configuration value
pub const GET_CONFIG: &str = "host_get_config";
/// Emit an event
pub const EMIT_EVENT: &str = "host_emit_event";
}
/// Log level for plugin logging
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum LogLevel {
Trace,
Debug,
Info,
Warn,
Error,
}
/// Log message from plugin
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogMessage {
pub level: LogLevel,
pub target: String,
pub message: String,
pub fields: HashMap<String, String>,
}
/// HTTP request parameters
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HttpRequest {
pub method: String,
pub url: String,
pub headers: HashMap<String, String>,
pub body: Option<Vec<u8>>,
}
/// HTTP response
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HttpResponse {
pub status: u16,
pub headers: HashMap<String, String>,
pub body: Vec<u8>,
}
/// Helper functions for serializing/deserializing data across WASM boundary
pub mod helpers {
use super::*;
/// Serialize a value to bytes for passing to WASM
pub fn serialize<T: Serialize>(value: &T) -> Result<Vec<u8>, String> {
serde_json::to_vec(value).map_err(|e| format!("Serialization error: {}", e))
}
/// Deserialize bytes from WASM to a value
pub fn deserialize<T: for<'de> Deserialize<'de>>(bytes: &[u8]) -> Result<T, String> {
serde_json::from_slice(bytes).map_err(|e| format!("Deserialization error: {}", e))
}
/// Create a success response
pub fn ok_response<T: Serialize>(request_id: String, value: &T) -> Result<Vec<u8>, String> {
let result = WasmResult::Ok(serialize(value)?);
let response = PluginResponse { request_id, result };
serialize(&response)
}
/// Create an error response
pub fn error_response(request_id: String, error: String) -> Result<Vec<u8>, String> {
let result = WasmResult::<Vec<u8>>::Err(error);
let response = PluginResponse { request_id, result };
serialize(&response)
}
}
#[cfg(test)]
mod tests {
use super::helpers::*;
use super::*;
#[test]
fn test_serialize_deserialize() {
let data = vec![1u8, 2, 3, 4];
let bytes = serialize(&data).unwrap();
let recovered: Vec<u8> = deserialize(&bytes).unwrap();
assert_eq!(data, recovered);
}
#[test]
fn test_ok_response() {
let request_id = "test-123".to_string();
let value = "success";
let response_bytes = ok_response(request_id.clone(), &value).unwrap();
let response: PluginResponse = deserialize(&response_bytes).unwrap();
assert_eq!(response.request_id, request_id);
match response.result {
WasmResult::Ok(data) => {
let recovered: String = deserialize(&data).unwrap();
assert_eq!(recovered, value);
}
WasmResult::Err(_) => panic!("Expected Ok result"),
}
}
#[test]
fn test_error_response() {
let request_id = "test-456".to_string();
let error_msg = "Something went wrong";
let response_bytes = error_response(request_id.clone(), error_msg.to_string()).unwrap();
let response: PluginResponse = deserialize(&response_bytes).unwrap();
assert_eq!(response.request_id, request_id);
match response.result {
WasmResult::Err(msg) => assert_eq!(msg, error_msg),
WasmResult::Ok(_) => panic!("Expected Err result"),
}
}
}

View file

@ -0,0 +1,67 @@
use pinakes_plugin_api::PluginManifest;
use std::path::PathBuf;
#[test]
fn test_markdown_metadata_manifest() {
let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
.join("examples/plugins/markdown-metadata/plugin.toml");
let manifest = PluginManifest::from_file(&manifest_path)
.expect("Failed to parse markdown-metadata plugin.toml");
assert_eq!(manifest.plugin.name, "markdown-metadata");
assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.api_version, "1.0");
assert_eq!(manifest.plugin.kind, vec!["metadata_extractor"]);
assert_eq!(manifest.plugin.binary.wasm, "markdown_metadata.wasm");
// Validate capabilities
let caps = manifest.to_capabilities();
assert_eq!(caps.filesystem.read.len(), 0);
assert_eq!(caps.filesystem.write.len(), 0);
assert!(!caps.network.enabled);
// Validate config
assert!(manifest.config.contains_key("extract_tags"));
assert!(manifest.config.contains_key("parse_yaml"));
assert!(manifest.config.contains_key("max_file_size"));
}
#[test]
fn test_heif_support_manifest() {
let manifest_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
.join("examples/plugins/heif-support/plugin.toml");
let manifest = PluginManifest::from_file(&manifest_path)
.expect("Failed to parse heif-support plugin.toml");
assert_eq!(manifest.plugin.name, "heif-support");
assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.api_version, "1.0");
assert_eq!(
manifest.plugin.kind,
vec!["media_type", "metadata_extractor", "thumbnail_generator"]
);
assert_eq!(manifest.plugin.binary.wasm, "heif_support.wasm");
// Validate capabilities
let caps = manifest.to_capabilities();
assert_eq!(caps.filesystem.read.len(), 1);
assert_eq!(caps.filesystem.write.len(), 1);
assert!(!caps.network.enabled);
assert_eq!(caps.max_memory_bytes, Some(256 * 1024 * 1024)); // 256MB
assert_eq!(caps.max_cpu_time_ms, Some(30 * 1000)); // 30 seconds
// Validate config
assert!(manifest.config.contains_key("extract_exif"));
assert!(manifest.config.contains_key("generate_thumbnails"));
assert!(manifest.config.contains_key("thumbnail_quality"));
}

View file

@ -6,6 +6,7 @@ license.workspace = true
[dependencies]
pinakes-core = { path = "../pinakes-core" }
pinakes-plugin-api = { path = "../pinakes-plugin-api" }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
@ -25,6 +26,8 @@ tower_governor = { workspace = true }
tokio-util = { version = "0.7", features = ["io"] }
argon2 = { workspace = true }
rand = "0.9"
percent-encoding = "2"
[dev-dependencies]
http-body-util = "0.1"
tempfile = "3"

View file

@ -40,6 +40,9 @@ pub fn create_router(state: AppState) -> Router {
config: login_governor,
});
// Public routes (no auth required)
let public_routes = Router::new().route("/s/{token}", get(routes::social::access_shared_media));
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
.route("/health", get(routes::health::health))
@ -87,7 +90,82 @@ pub fn create_router(state: AppState) -> Router {
.route("/webhooks", get(routes::webhooks::list_webhooks))
// Auth endpoints (self-service) — login handled separately with stricter rate limit
.route("/auth/logout", post(routes::auth::logout))
.route("/auth/me", get(routes::auth::me));
.route("/auth/me", get(routes::auth::me))
// Social: ratings & comments (read)
.route(
"/media/{id}/ratings",
get(routes::social::get_media_ratings),
)
.route(
"/media/{id}/comments",
get(routes::social::get_media_comments),
)
// Favorites (read)
.route("/favorites", get(routes::social::list_favorites))
// Playlists (read)
.route("/playlists", get(routes::playlists::list_playlists))
.route("/playlists/{id}", get(routes::playlists::get_playlist))
.route("/playlists/{id}/items", get(routes::playlists::list_items))
.route(
"/playlists/{id}/shuffle",
post(routes::playlists::shuffle_playlist),
)
// Analytics (read)
.route(
"/analytics/most-viewed",
get(routes::analytics::get_most_viewed),
)
.route(
"/analytics/recently-viewed",
get(routes::analytics::get_recently_viewed),
)
.route("/analytics/events", post(routes::analytics::record_event))
.route(
"/media/{id}/progress",
get(routes::analytics::get_watch_progress),
)
.route(
"/media/{id}/progress",
post(routes::analytics::update_watch_progress),
)
// Subtitles (read)
.route(
"/media/{id}/subtitles",
get(routes::subtitles::list_subtitles),
)
.route(
"/media/{media_id}/subtitles/{subtitle_id}/content",
get(routes::subtitles::get_subtitle_content),
)
// Enrichment (read)
.route(
"/media/{id}/external-metadata",
get(routes::enrichment::get_external_metadata),
)
// Transcode (read)
.route("/transcode/{id}", get(routes::transcode::get_session))
.route("/transcode", get(routes::transcode::list_sessions))
// Streaming
.route(
"/media/{id}/stream/hls/master.m3u8",
get(routes::streaming::hls_master_playlist),
)
.route(
"/media/{id}/stream/hls/{profile}/playlist.m3u8",
get(routes::streaming::hls_variant_playlist),
)
.route(
"/media/{id}/stream/hls/{profile}/{segment}",
get(routes::streaming::hls_segment),
)
.route(
"/media/{id}/stream/dash/manifest.mpd",
get(routes::streaming::dash_manifest),
)
.route(
"/media/{id}/stream/dash/{profile}/{segment}",
get(routes::streaming::dash_segment),
);
// Write routes: Editor+ required
let editor_routes = Router::new()
@ -190,6 +268,58 @@ pub fn create_router(state: AppState) -> Router {
)
// Webhooks
.route("/webhooks/test", post(routes::webhooks::test_webhook))
// Social: ratings & comments (write)
.route("/media/{id}/ratings", post(routes::social::rate_media))
.route("/media/{id}/comments", post(routes::social::add_comment))
// Favorites (write)
.route("/favorites", post(routes::social::add_favorite))
.route(
"/favorites/{media_id}",
delete(routes::social::remove_favorite),
)
// Share links
.route("/share", post(routes::social::create_share_link))
// Playlists (write)
.route("/playlists", post(routes::playlists::create_playlist))
.route("/playlists/{id}", patch(routes::playlists::update_playlist))
.route(
"/playlists/{id}",
delete(routes::playlists::delete_playlist),
)
.route("/playlists/{id}/items", post(routes::playlists::add_item))
.route(
"/playlists/{id}/items/{media_id}",
delete(routes::playlists::remove_item),
)
.route(
"/playlists/{id}/reorder",
post(routes::playlists::reorder_item),
)
// Subtitles (write)
.route(
"/media/{id}/subtitles",
post(routes::subtitles::add_subtitle),
)
.route(
"/subtitles/{id}",
delete(routes::subtitles::delete_subtitle),
)
.route(
"/subtitles/{id}/offset",
patch(routes::subtitles::update_offset),
)
// Enrichment (write)
.route(
"/media/{id}/enrich",
post(routes::enrichment::trigger_enrichment),
)
.route("/jobs/enrich", post(routes::enrichment::batch_enrich))
// Transcode (write)
.route(
"/media/{id}/transcode",
post(routes::transcode::start_transcode),
)
.route("/transcode/{id}", delete(routes::transcode::cancel_session))
.layer(middleware::from_fn(auth::require_editor));
// Admin-only routes: destructive/config operations
@ -203,14 +333,33 @@ pub fn create_router(state: AppState) -> Router {
.route("/config/ui", put(routes::config::update_ui_config))
.route("/database/vacuum", post(routes::database::vacuum_database))
.route("/database/clear", post(routes::database::clear_database))
// Plugin management
.route("/plugins", get(routes::plugins::list_plugins))
.route("/plugins/{id}", get(routes::plugins::get_plugin))
.route("/plugins/install", post(routes::plugins::install_plugin))
.route("/plugins/{id}", delete(routes::plugins::uninstall_plugin))
.route("/plugins/{id}/toggle", post(routes::plugins::toggle_plugin))
.route("/plugins/{id}/reload", post(routes::plugins::reload_plugin))
// User management
.route("/users", get(routes::users::list_users))
.route("/users", post(routes::users::create_user))
.route("/users/{id}", get(routes::users::get_user))
.route("/users/{id}", patch(routes::users::update_user))
.route("/users/{id}", delete(routes::users::delete_user))
.route(
"/users/{id}/libraries",
get(routes::users::get_user_libraries),
)
.route(
"/users/{id}/libraries",
post(routes::users::grant_library_access),
)
.route(
"/users/{id}/libraries",
delete(routes::users::revoke_library_access),
)
.layer(middleware::from_fn(auth::require_admin));
let api = Router::new()
.merge(login_route)
.merge(viewer_routes)
.merge(editor_routes)
.merge(admin_routes);
// CORS: allow same-origin by default, plus the desktop UI origin
let cors = CorsLayer::new()
.allow_origin([
@ -228,13 +377,25 @@ pub fn create_router(state: AppState) -> Router {
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true);
Router::new()
.nest("/api/v1", api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
// Create protected routes with auth middleware
let protected_api = Router::new()
.merge(viewer_routes)
.merge(editor_routes)
.merge(admin_routes)
.layer(middleware::from_fn_with_state(
state.clone(),
auth::require_auth,
))
));
// Combine protected and public routes
let full_api = Router::new()
.merge(login_route)
.merge(public_routes)
.merge(protected_api);
Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer {
config: global_governor,
})

View file

@ -85,6 +85,7 @@ pub async fn require_auth(
if expected_key.is_empty() {
// Empty key means no auth required
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
return next.run(request).await;
}
@ -110,6 +111,7 @@ pub async fn require_auth(
}
// When no api_key is configured, or key matches, grant admin
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
}
next.run(request).await
@ -143,6 +145,24 @@ pub async fn require_admin(request: Request, next: Next) -> Response {
}
}
/// Resolve the authenticated username (from request extensions) to a UserId.
///
/// Returns an error if the user cannot be found.
pub async fn resolve_user_id(
storage: &pinakes_core::storage::DynStorageBackend,
username: &str,
) -> Result<pinakes_core::users::UserId, crate::error::ApiError> {
match storage.get_user_by_username(username).await {
Ok(user) => Ok(user.id),
Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication("user not found".into()),
))
}
}
}
fn unauthorized(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(

View file

@ -551,3 +551,431 @@ impl From<pinakes_core::model::AuditEntry> for AuditEntryResponse {
}
}
}
// Plugins
#[derive(Debug, Serialize)]
pub struct PluginResponse {
pub id: String,
pub name: String,
pub version: String,
pub author: String,
pub description: String,
pub api_version: String,
pub enabled: bool,
}
#[derive(Debug, Deserialize)]
pub struct InstallPluginRequest {
pub source: String, // URL or file path
}
#[derive(Debug, Deserialize)]
pub struct TogglePluginRequest {
pub enabled: bool,
}
impl PluginResponse {
pub fn new(meta: pinakes_plugin_api::PluginMetadata, enabled: bool) -> Self {
Self {
id: meta.id,
name: meta.name,
version: meta.version,
author: meta.author,
description: meta.description,
api_version: meta.api_version,
enabled,
}
}
}
// Users
#[derive(Debug, Serialize)]
pub struct UserResponse {
pub id: String,
pub username: String,
pub role: String,
pub profile: UserProfileResponse,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct UserProfileResponse {
pub avatar_path: Option<String>,
pub bio: Option<String>,
pub preferences: UserPreferencesResponse,
}
#[derive(Debug, Serialize)]
pub struct UserPreferencesResponse {
pub theme: Option<String>,
pub language: Option<String>,
pub default_video_quality: Option<String>,
pub auto_play: bool,
}
#[derive(Debug, Serialize)]
pub struct UserLibraryResponse {
pub user_id: String,
pub root_path: String,
pub permission: String,
pub granted_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
pub struct GrantLibraryAccessRequest {
pub root_path: String,
pub permission: pinakes_core::users::LibraryPermission,
}
#[derive(Debug, Deserialize)]
pub struct RevokeLibraryAccessRequest {
pub root_path: String,
}
impl From<pinakes_core::users::User> for UserResponse {
fn from(user: pinakes_core::users::User) -> Self {
Self {
id: user.id.0.to_string(),
username: user.username,
role: user.role.to_string(),
profile: UserProfileResponse {
avatar_path: user.profile.avatar_path,
bio: user.profile.bio,
preferences: UserPreferencesResponse {
theme: user.profile.preferences.theme,
language: user.profile.preferences.language,
default_video_quality: user.profile.preferences.default_video_quality,
auto_play: user.profile.preferences.auto_play,
},
},
created_at: user.created_at,
updated_at: user.updated_at,
}
}
}
impl From<pinakes_core::users::UserLibraryAccess> for UserLibraryResponse {
fn from(access: pinakes_core::users::UserLibraryAccess) -> Self {
Self {
user_id: access.user_id.0.to_string(),
root_path: access.root_path,
permission: format!("{:?}", access.permission).to_lowercase(),
granted_at: access.granted_at,
}
}
}
// ===== Social (Ratings, Comments, Favorites, Shares) =====
#[derive(Debug, Serialize)]
pub struct RatingResponse {
pub id: String,
pub user_id: String,
pub media_id: String,
pub stars: u8,
pub review_text: Option<String>,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::Rating> for RatingResponse {
fn from(r: pinakes_core::social::Rating) -> Self {
Self {
id: r.id.to_string(),
user_id: r.user_id.0.to_string(),
media_id: r.media_id.0.to_string(),
stars: r.stars,
review_text: r.review_text,
created_at: r.created_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct CreateRatingRequest {
pub stars: u8,
pub review_text: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct CommentResponse {
pub id: String,
pub user_id: String,
pub media_id: String,
pub parent_comment_id: Option<String>,
pub text: String,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::Comment> for CommentResponse {
fn from(c: pinakes_core::social::Comment) -> Self {
Self {
id: c.id.to_string(),
user_id: c.user_id.0.to_string(),
media_id: c.media_id.0.to_string(),
parent_comment_id: c.parent_comment_id.map(|id| id.to_string()),
text: c.text,
created_at: c.created_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct CreateCommentRequest {
pub text: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
pub struct FavoriteRequest {
pub media_id: Uuid,
}
#[derive(Debug, Deserialize)]
pub struct CreateShareLinkRequest {
pub media_id: Uuid,
pub password: Option<String>,
pub expires_in_hours: Option<u64>,
}
#[derive(Debug, Serialize)]
pub struct ShareLinkResponse {
pub id: String,
pub media_id: String,
pub token: String,
pub expires_at: Option<DateTime<Utc>>,
pub view_count: u64,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::ShareLink> for ShareLinkResponse {
fn from(s: pinakes_core::social::ShareLink) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
token: s.token,
expires_at: s.expires_at,
view_count: s.view_count,
created_at: s.created_at,
}
}
}
// ===== Playlists =====
#[derive(Debug, Serialize)]
pub struct PlaylistResponse {
pub id: String,
pub owner_id: String,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
pub is_smart: bool,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
impl From<pinakes_core::playlists::Playlist> for PlaylistResponse {
fn from(p: pinakes_core::playlists::Playlist) -> Self {
Self {
id: p.id.to_string(),
owner_id: p.owner_id.0.to_string(),
name: p.name,
description: p.description,
is_public: p.is_public,
is_smart: p.is_smart,
filter_query: p.filter_query,
created_at: p.created_at,
updated_at: p.updated_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct CreatePlaylistRequest {
pub name: String,
pub description: Option<String>,
pub is_public: Option<bool>,
pub is_smart: Option<bool>,
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct UpdatePlaylistRequest {
pub name: Option<String>,
pub description: Option<String>,
pub is_public: Option<bool>,
}
#[derive(Debug, Deserialize)]
pub struct PlaylistItemRequest {
pub media_id: Uuid,
pub position: Option<i32>,
}
#[derive(Debug, Deserialize)]
pub struct ReorderPlaylistRequest {
pub media_id: Uuid,
pub new_position: i32,
}
// ===== Analytics =====
#[derive(Debug, Serialize)]
pub struct UsageEventResponse {
pub id: String,
pub media_id: Option<String>,
pub user_id: Option<String>,
pub event_type: String,
pub timestamp: DateTime<Utc>,
pub duration_secs: Option<f64>,
}
impl From<pinakes_core::analytics::UsageEvent> for UsageEventResponse {
fn from(e: pinakes_core::analytics::UsageEvent) -> Self {
Self {
id: e.id.to_string(),
media_id: e.media_id.map(|m| m.0.to_string()),
user_id: e.user_id.map(|u| u.0.to_string()),
event_type: e.event_type.to_string(),
timestamp: e.timestamp,
duration_secs: e.duration_secs,
}
}
}
#[derive(Debug, Deserialize)]
pub struct RecordUsageEventRequest {
pub media_id: Option<Uuid>,
pub event_type: String,
pub duration_secs: Option<f64>,
pub context: Option<serde_json::Value>,
}
#[derive(Debug, Serialize)]
pub struct MostViewedResponse {
pub media: MediaResponse,
pub view_count: u64,
}
#[derive(Debug, Deserialize)]
pub struct WatchProgressRequest {
pub progress_secs: f64,
}
#[derive(Debug, Serialize)]
pub struct WatchProgressResponse {
pub progress_secs: f64,
}
// ===== Subtitles =====
#[derive(Debug, Serialize)]
pub struct SubtitleResponse {
pub id: String,
pub media_id: String,
pub language: Option<String>,
pub format: String,
pub is_embedded: bool,
pub track_index: Option<usize>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::subtitles::Subtitle> for SubtitleResponse {
fn from(s: pinakes_core::subtitles::Subtitle) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
language: s.language,
format: s.format.to_string(),
is_embedded: s.is_embedded,
track_index: s.track_index,
offset_ms: s.offset_ms,
created_at: s.created_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct AddSubtitleRequest {
pub language: Option<String>,
pub format: String,
pub file_path: Option<String>,
pub is_embedded: Option<bool>,
pub track_index: Option<usize>,
pub offset_ms: Option<i64>,
}
#[derive(Debug, Deserialize)]
pub struct UpdateSubtitleOffsetRequest {
pub offset_ms: i64,
}
// ===== Enrichment =====
#[derive(Debug, Serialize)]
pub struct ExternalMetadataResponse {
pub id: String,
pub media_id: String,
pub source: String,
pub external_id: Option<String>,
pub metadata: serde_json::Value,
pub confidence: f64,
pub last_updated: DateTime<Utc>,
}
impl From<pinakes_core::enrichment::ExternalMetadata> for ExternalMetadataResponse {
fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self {
let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| {
tracing::warn!(
"failed to deserialize external metadata JSON for media {}: {}",
m.media_id.0,
e
);
serde_json::Value::Null
});
Self {
id: m.id.to_string(),
media_id: m.media_id.0.to_string(),
source: m.source.to_string(),
external_id: m.external_id,
metadata,
confidence: m.confidence,
last_updated: m.last_updated,
}
}
}
// ===== Transcode =====
#[derive(Debug, Serialize)]
pub struct TranscodeSessionResponse {
pub id: String,
pub media_id: String,
pub profile: String,
pub status: String,
pub progress: f32,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
}
impl From<pinakes_core::transcode::TranscodeSession> for TranscodeSessionResponse {
fn from(s: pinakes_core::transcode::TranscodeSession) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
profile: s.profile,
status: s.status.as_str().to_string(),
progress: s.progress,
created_at: s.created_at,
expires_at: s.expires_at,
}
}
}
#[derive(Debug, Deserialize)]
pub struct CreateTranscodeRequest {
pub profile: String,
}

View file

@ -38,6 +38,8 @@ impl IntoResponse for ApiError {
}
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::Authentication(msg) => (StatusCode::UNAUTHORIZED, msg.clone()),
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
(

View file

@ -161,17 +161,29 @@ async fn main() -> Result<()> {
let addr = format!("{}:{}", config.server.host, config.server.port);
// Initialize transcode service early so the job queue can reference it
let transcode_service: Option<Arc<pinakes_core::transcode::TranscodeService>> =
if config.transcoding.enabled {
Some(Arc::new(pinakes_core::transcode::TranscodeService::new(
config.transcoding.clone(),
)))
} else {
None
};
// Initialize job queue with executor
let job_storage = storage.clone();
let job_config = config.clone();
let job_transcode = transcode_service.clone();
let job_queue = pinakes_core::jobs::JobQueue::new(
config.jobs.worker_count,
move |job_id, kind, cancel, jobs| {
let storage = job_storage.clone();
let config = job_config.clone();
let transcode_svc = job_transcode.clone();
tokio::spawn(async move {
use pinakes_core::jobs::{JobKind, JobQueue};
let result = match kind {
match kind {
JobKind::Scan { path } => {
let ignore = config.scanning.ignore_patterns.clone();
let res = if let Some(p) = path {
@ -232,7 +244,7 @@ async fn main() -> Result<()> {
match storage.get_media(*mid).await {
Ok(item) => {
let source = item.path.clone();
let mt = item.media_type;
let mt = item.media_type.clone();
let id = item.id;
let td = thumb_dir.clone();
let tc = thumb_config.clone();
@ -333,8 +345,65 @@ async fn main() -> Result<()> {
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
JobKind::Transcode { media_id, profile } => {
if let Some(ref svc) = transcode_svc {
match storage.get_media(media_id).await {
Ok(item) => {
match svc
.start_transcode(
media_id,
&item.path,
&profile,
item.duration_secs,
&storage,
)
.await
{
Ok(session_id) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({"session_id": session_id.to_string()}),
)
.await;
}
Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await
}
}
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
} else {
JobQueue::fail(&jobs, job_id, "transcoding is not enabled".to_string())
.await;
}
}
JobKind::Enrich { media_ids } => {
// Enrichment job placeholder
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}),
)
.await;
}
JobKind::CleanupAnalytics => {
let before = chrono::Utc::now() - chrono::Duration::days(90);
match storage.cleanup_old_events(before).await {
Ok(count) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({"cleaned_up": count}),
)
.await;
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
};
let _ = result;
();
drop(cancel);
})
},
@ -345,6 +414,27 @@ async fn main() -> Result<()> {
config.jobs.cache_ttl_secs,
));
// Initialize plugin manager if plugins are enabled (before moving config into Arc)
let plugin_manager = if config.plugins.enabled {
match pinakes_core::plugin::PluginManager::new(
config.plugins.data_dir.clone(),
config.plugins.cache_dir.clone(),
config.plugins.clone().into(),
) {
Ok(pm) => {
tracing::info!("Plugin manager initialized");
Some(Arc::new(pm))
}
Err(e) => {
tracing::warn!("Failed to initialize plugin manager: {}", e);
None
}
}
} else {
tracing::info!("Plugins disabled in configuration");
None
};
// Initialize scheduler with cancellation support
let shutdown_token = tokio_util::sync::CancellationToken::new();
let config_arc = Arc::new(RwLock::new(config));
@ -376,6 +466,8 @@ async fn main() -> Result<()> {
job_queue,
cache,
scheduler,
plugin_manager,
transcode_service,
};
// Periodic session cleanup (every 15 minutes)

View file

@ -0,0 +1,94 @@
use axum::Json;
use axum::extract::{Extension, Path, Query, State};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::analytics::{UsageEvent, UsageEventType};
use pinakes_core::model::MediaId;
const MAX_LIMIT: u64 = 100;
pub async fn get_most_viewed(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MostViewedResponse>>, ApiError> {
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let results = state.storage.get_most_viewed(limit).await?;
Ok(Json(
results
.into_iter()
.map(|(item, count)| MostViewedResponse {
media: MediaResponse::from(item),
view_count: count,
})
.collect(),
))
}
pub async fn get_recently_viewed(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let items = state.storage.get_recently_viewed(user_id, limit).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
pub async fn record_event(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let event_type: UsageEventType = req
.event_type
.parse()
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent {
id: Uuid::now_v7(),
media_id: req.media_id.map(MediaId),
user_id: Some(user_id),
event_type,
timestamp: chrono::Utc::now(),
duration_secs: req.duration_secs,
context_json: req.context.map(|v| v.to_string()),
};
state.storage.record_usage_event(&event).await?;
Ok(Json(serde_json::json!({"recorded": true})))
}
pub async fn get_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<WatchProgressResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let progress = state
.storage
.get_watch_progress(user_id, MediaId(id))
.await?
.unwrap_or(0.0);
Ok(Json(WatchProgressResponse {
progress_secs: progress,
}))
}
pub async fn update_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<WatchProgressRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.update_watch_progress(user_id, MediaId(id), req.progress_secs)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

View file

@ -0,0 +1,48 @@
use axum::Json;
use axum::extract::{Path, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
pub async fn trigger_enrichment(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Submit enrichment as a background job
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich {
media_ids: vec![MediaId(id)],
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
pub async fn get_external_metadata(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<ExternalMetadataResponse>>, ApiError> {
let metadata = state.storage.get_external_metadata(MediaId(id)).await?;
Ok(Json(
metadata
.into_iter()
.map(ExternalMetadataResponse::from)
.collect(),
))
}
pub async fn batch_enrich(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids: Vec<MediaId> = req.media_ids.into_iter().map(MediaId).collect();
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids })
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}

View file

@ -26,7 +26,7 @@ pub async fn trigger_verify_integrity(
let media_ids = req
.media_ids
.into_iter()
.map(|id| pinakes_core::model::MediaId(id))
.map(pinakes_core::model::MediaId)
.collect();
let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids };
let job_id = state.job_queue.submit(kind).await;
@ -94,6 +94,6 @@ pub async fn resolve_orphans(
.collect();
let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(|e| ApiError(e))?;
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count })))
}

View file

@ -1,18 +1,27 @@
pub mod analytics;
pub mod audit;
pub mod auth;
pub mod collections;
pub mod config;
pub mod database;
pub mod duplicates;
pub mod enrichment;
pub mod export;
pub mod health;
pub mod integrity;
pub mod jobs;
pub mod media;
pub mod playlists;
pub mod plugins;
pub mod saved_searches;
pub mod scan;
pub mod scheduled_tasks;
pub mod search;
pub mod social;
pub mod statistics;
pub mod streaming;
pub mod subtitles;
pub mod tags;
pub mod transcode;
pub mod users;
pub mod webhooks;

View file

@ -0,0 +1,208 @@
use axum::Json;
use axum::extract::{Extension, Path, State};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::playlists::Playlist;
use pinakes_core::users::UserId;
/// Check whether a user has access to a playlist.
///
/// * `require_write` when `true` only the playlist owner is allowed (for
/// mutations such as update, delete, add/remove/reorder items). When `false`
/// the playlist must either be public or owned by the requesting user.
async fn check_playlist_access(
storage: &pinakes_core::storage::DynStorageBackend,
playlist_id: Uuid,
user_id: UserId,
require_write: bool,
) -> Result<Playlist, ApiError> {
let playlist = storage.get_playlist(playlist_id).await.map_err(ApiError)?;
if require_write {
// Write operations require ownership
if playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"only the playlist owner can modify this playlist".into(),
)));
}
} else {
// Read operations: allow if public or owner
if !playlist.is_public && playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"playlist is private".into(),
)));
}
}
Ok(playlist)
}
pub async fn create_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if req.name.is_empty() || req.name.chars().count() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let owner_id = resolve_user_id(&state.storage, &username).await?;
let playlist = state
.storage
.create_playlist(
owner_id,
&req.name,
req.description.as_deref(),
req.is_public.unwrap_or(false),
req.is_smart.unwrap_or(false),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn list_playlists(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<PlaylistResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
// Fetch all playlists and filter to only public ones plus the user's own
let playlists = state.storage.list_playlists(None).await?;
let visible: Vec<PlaylistResponse> = playlists
.into_iter()
.filter(|p| p.is_public || p.owner_id == user_id)
.map(PlaylistResponse::from)
.collect();
Ok(Json(visible))
}
pub async fn get_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<PlaylistResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist = check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn update_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if let Some(ref name) = req.name
&& (name.is_empty() || name.chars().count() > 255) {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let playlist = state
.storage
.update_playlist(
id,
req.name.as_deref(),
req.description.as_deref(),
req.is_public,
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn delete_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state.storage.delete_playlist(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn add_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<PlaylistItemRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let position = match req.position {
Some(p) => p,
None => {
let items = state.storage.get_playlist_items(id).await?;
items.len() as i32
}
};
state
.storage
.add_to_playlist(id, MediaId(req.media_id), position)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path((id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.remove_from_playlist(id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn list_items(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
let items = state.storage.get_playlist_items(id).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
pub async fn reorder_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<ReorderPlaylistRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.reorder_playlist(id, MediaId(req.media_id), req.new_position)
.await?;
Ok(Json(serde_json::json!({"reordered": true})))
}
pub async fn shuffle_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
use rand::seq::SliceRandom;
let mut items = state.storage.get_playlist_items(id).await?;
items.shuffle(&mut rand::rng());
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}

View file

@ -0,0 +1,149 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
/// List all installed plugins
pub async fn list_plugins(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginResponse>>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugins = plugin_manager.list_plugins().await;
let mut responses = Vec::with_capacity(plugins.len());
for meta in plugins {
let enabled = plugin_manager.is_plugin_enabled(&meta.id).await;
responses.push(PluginResponse::new(meta, enabled));
}
Ok(Json(responses))
}
/// Get a specific plugin by ID
pub async fn get_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin = plugin_manager.get_plugin(&id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"Plugin not found: {}",
id
)))
})?;
let enabled = plugin_manager.is_plugin_enabled(&id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Install a plugin from URL or file path
pub async fn install_plugin(
State(state): State<AppState>,
Json(req): Json<InstallPluginRequest>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_id = plugin_manager
.install_plugin(&req.source)
.await
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to install plugin: {}", e),
))
})?;
let plugin = plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(),
))
})?;
let enabled = plugin_manager.is_plugin_enabled(&plugin_id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Uninstall a plugin
pub async fn uninstall_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
plugin_manager.uninstall_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to uninstall plugin: {}", e),
))
})?;
Ok(Json(serde_json::json!({"uninstalled": true})))
}
/// Enable or disable a plugin
pub async fn toggle_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<TogglePluginRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
if req.enabled {
plugin_manager.enable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to enable plugin: {}", e),
))
})?;
} else {
plugin_manager.disable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to disable plugin: {}", e),
))
})?;
}
Ok(Json(serde_json::json!({
"id": id,
"enabled": req.enabled
})))
}
/// Reload a plugin (for development)
pub async fn reload_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
plugin_manager.reload_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to reload plugin: {}", e),
))
})?;
Ok(Json(serde_json::json!({"reloaded": true})))
}

View file

@ -0,0 +1,199 @@
use axum::Json;
use axum::extract::{Extension, Path, Query, State};
use serde::Deserialize;
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{MediaId, Pagination};
#[derive(Deserialize)]
pub struct ShareLinkQuery {
pub password: Option<String>,
}
// ===== Ratings =====
pub async fn rate_media(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateRatingRequest>,
) -> Result<Json<RatingResponse>, ApiError> {
if req.stars < 1 || req.stars > 5 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"stars must be between 1 and 5".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let rating = state
.storage
.rate_media(user_id, MediaId(id), req.stars, req.review_text.as_deref())
.await?;
Ok(Json(RatingResponse::from(rating)))
}
pub async fn get_media_ratings(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<RatingResponse>>, ApiError> {
let ratings = state.storage.get_media_ratings(MediaId(id)).await?;
Ok(Json(
ratings.into_iter().map(RatingResponse::from).collect(),
))
}
// ===== Comments =====
pub async fn add_comment(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateCommentRequest>,
) -> Result<Json<CommentResponse>, ApiError> {
let char_count = req.text.chars().count();
if char_count == 0 || char_count > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"comment text must be 1-10000 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let comment = state
.storage
.add_comment(user_id, MediaId(id), &req.text, req.parent_id)
.await?;
Ok(Json(CommentResponse::from(comment)))
}
pub async fn get_media_comments(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<CommentResponse>>, ApiError> {
let comments = state.storage.get_media_comments(MediaId(id)).await?;
Ok(Json(
comments.into_iter().map(CommentResponse::from).collect(),
))
}
// ===== Favorites =====
pub async fn add_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<FavoriteRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.add_favorite(user_id, MediaId(req.media_id))
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.remove_favorite(user_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn list_favorites(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_user_favorites(user_id, &Pagination::default())
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
// ===== Share Links =====
pub async fn create_share_link(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareLinkRequest>,
) -> Result<Json<ShareLinkResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let token = uuid::Uuid::now_v7().to_string().replace('-', "");
let password_hash = match req.password.as_ref() {
Some(p) => Some(pinakes_core::users::auth::hash_password(p).map_err(ApiError)?),
None => None,
};
const MAX_EXPIRY_HOURS: u64 = 8760; // 1 year
if let Some(h) = req.expires_in_hours
&& h > MAX_EXPIRY_HOURS {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"expires_in_hours cannot exceed {}",
MAX_EXPIRY_HOURS
)),
));
}
let expires_at = req
.expires_in_hours
.map(|h| chrono::Utc::now() + chrono::Duration::hours(h as i64));
let link = state
.storage
.create_share_link(
MediaId(req.media_id),
user_id,
&token,
password_hash.as_deref(),
expires_at,
)
.await?;
Ok(Json(ShareLinkResponse::from(link)))
}
pub async fn access_shared_media(
State(state): State<AppState>,
Path(token): Path<String>,
Query(query): Query<ShareLinkQuery>,
) -> Result<Json<MediaResponse>, ApiError> {
let link = state.storage.get_share_link(&token).await?;
// Check expiration
if let Some(expires) = link.expires_at
&& chrono::Utc::now() > expires {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"share link has expired".into(),
),
));
}
// Verify password if set
if let Some(ref hash) = link.password_hash {
let password = match query.password.as_deref() {
Some(p) => p,
None => {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"password required for this share link".into(),
)));
}
};
let valid = pinakes_core::users::auth::verify_password(password, hash).unwrap_or(false);
if !valid {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"invalid share link password".into(),
)));
}
}
state.storage.increment_share_views(&token).await?;
let item = state.storage.get_media(link.media_id).await?;
Ok(Json(MediaResponse::from(item)))
}

View file

@ -0,0 +1,238 @@
use axum::extract::{Path, State};
use axum::http::StatusCode;
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use uuid::Uuid;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::transcode::{estimate_bandwidth, parse_resolution};
fn escape_xml(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&apos;")
}
pub async fn hls_master_playlist(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
// Verify media exists
let _item = state.storage.get_media(MediaId(id)).await?;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut playlist = String::from("#EXTM3U\n#EXT-X-VERSION:3\n\n");
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let encoded_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
playlist.push_str(&format!(
"#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},RESOLUTION={w}x{h}\n\
/api/v1/media/{id}/stream/hls/{encoded_name}/playlist.m3u8\n\n",
));
}
Ok(axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap())
}
pub async fn hls_variant_playlist(
State(state): State<AppState>,
Path((id, profile)): Path<(Uuid, String)>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate HLS playlist for media with unknown or zero duration".into(),
),
));
}
let segment_duration = 10.0;
let num_segments = (duration / segment_duration).ceil() as usize;
let mut playlist = String::from(
"#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#EXT-X-MEDIA-SEQUENCE:0\n",
);
for i in 0..num_segments.max(1) {
let seg_dur = if i == num_segments.saturating_sub(1) && duration > 0.0 {
duration - (i as f64 * segment_duration)
} else {
segment_duration
};
playlist.push_str(&format!("#EXTINF:{seg_dur:.3},\n"));
playlist.push_str(&format!(
"/api/v1/media/{id}/stream/hls/{profile}/segment{i}.ts\n"
));
}
playlist.push_str("#EXT-X-ENDLIST\n");
Ok(axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap())
}
pub async fn hls_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()),
));
}
let media_id = MediaId(id);
// Look for an active/completed transcode session
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) = transcode_service.find_session(media_id, &profile).await {
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(axum::response::Response::builder()
.header("Content-Type", "video/MP2T")
.body(axum::body::Body::from(data))
.unwrap());
}
// Session exists but segment not ready yet
return Ok(axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap());
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST /media/{id}/transcode"
.into(),
),
))
}
pub async fn dash_manifest(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate DASH manifest for media with unknown or zero duration".into(),
),
));
}
let hours = (duration / 3600.0) as u32;
let minutes = ((duration % 3600.0) / 60.0) as u32;
let seconds = duration % 60.0;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut representations = String::new();
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let xml_name = escape_xml(&profile.name);
let url_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
representations.push_str(&format!(
r#" <Representation id="{xml_name}" bandwidth="{bandwidth}" width="{w}" height="{h}">
<SegmentTemplate media="/api/v1/media/{id}/stream/dash/{url_name}/segment$Number$.m4s" initialization="/api/v1/media/{id}/stream/dash/{url_name}/init.mp4" duration="10000" timescale="1000" startNumber="0"/>
</Representation>
"#,
));
}
let mpd = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" type="static" mediaPresentationDuration="PT{hours}H{minutes}M{seconds:.1}S" minBufferTime="PT1.5S">
<Period>
<AdaptationSet mimeType="video/mp4" segmentAlignment="true">
{representations} </AdaptationSet>
</Period>
</MPD>"#
);
Ok(axum::response::Response::builder()
.header("Content-Type", "application/dash+xml")
.body(axum::body::Body::from(mpd))
.unwrap())
}
pub async fn dash_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()),
));
}
let media_id = MediaId(id);
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) = transcode_service.find_session(media_id, &profile).await {
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(axum::response::Response::builder()
.header("Content-Type", "video/mp4")
.body(axum::body::Body::from(data))
.unwrap());
}
return Ok(axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap());
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST /media/{id}/transcode"
.into(),
),
))
}

View file

@ -0,0 +1,123 @@
use axum::Json;
use axum::extract::{Path, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::subtitles::{Subtitle, SubtitleFormat};
pub async fn list_subtitles(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<SubtitleResponse>>, ApiError> {
let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?;
Ok(Json(
subtitles.into_iter().map(SubtitleResponse::from).collect(),
))
}
pub async fn add_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<AddSubtitleRequest>,
) -> Result<Json<SubtitleResponse>, ApiError> {
let format: SubtitleFormat = req
.format
.parse()
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?;
let is_embedded = req.is_embedded.unwrap_or(false);
if !is_embedded && req.file_path.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
),
));
}
if is_embedded && req.track_index.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
),
));
}
let subtitle = Subtitle {
id: Uuid::now_v7(),
media_id: MediaId(id),
language: req.language,
format,
file_path: req.file_path.map(std::path::PathBuf::from),
is_embedded,
track_index: req.track_index,
offset_ms: req.offset_ms.unwrap_or(0),
created_at: chrono::Utc::now(),
};
state.storage.add_subtitle(&subtitle).await?;
Ok(Json(SubtitleResponse::from(subtitle)))
}
pub async fn delete_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_subtitle(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn get_subtitle_content(
State(state): State<AppState>,
Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>,
) -> Result<axum::response::Response, ApiError> {
let subtitles = state.storage.get_media_subtitles(MediaId(media_id)).await?;
let subtitle = subtitles
.into_iter()
.find(|s| s.id == subtitle_id)
.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"subtitle {subtitle_id}"
)))
})?;
if let Some(ref path) = subtitle.file_path {
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {}", path.display(), e),
))
}
})?;
let content_type = match subtitle.format {
SubtitleFormat::Vtt => "text/vtt",
SubtitleFormat::Srt => "application/x-subrip",
_ => "text/plain",
};
Ok(axum::response::Response::builder()
.header("Content-Type", content_type)
.body(axum::body::Body::from(content))
.unwrap())
} else {
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"subtitle is embedded, no file to serve".into(),
),
))
}
}
pub async fn update_offset(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateSubtitleOffsetRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.update_subtitle_offset(id, req.offset_ms)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

View file

@ -0,0 +1,63 @@
use axum::Json;
use axum::extract::{Path, Query, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
pub async fn start_transcode(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<CreateTranscodeRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Transcode {
media_id: MediaId(id),
profile: req.profile,
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
pub async fn get_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TranscodeSessionResponse>, ApiError> {
let session = state.storage.get_transcode_session(id).await?;
Ok(Json(TranscodeSessionResponse::from(session)))
}
pub async fn list_sessions(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<TranscodeSessionResponse>>, ApiError> {
let _ = params; // reserved for future filtering
let sessions = state.storage.list_transcode_sessions(None).await?;
Ok(Json(
sessions
.into_iter()
.map(TranscodeSessionResponse::from)
.collect(),
))
}
pub async fn cancel_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
if let Some(transcode_service) = &state.transcode_service {
transcode_service
.cancel_transcode(id, &state.storage)
.await?;
} else {
state
.storage
.update_transcode_status(id, pinakes_core::transcode::TranscodeStatus::Cancelled, 0.0)
.await?;
}
Ok(Json(serde_json::json!({"cancelled": true})))
}

View file

@ -0,0 +1,191 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::users::{CreateUserRequest, UpdateUserRequest, UserId};
/// List all users (admin only)
pub async fn list_users(
State(state): State<AppState>,
) -> Result<Json<Vec<UserResponse>>, ApiError> {
let users = state.storage.list_users().await?;
Ok(Json(users.into_iter().map(UserResponse::from).collect()))
}
/// Create a new user (admin only)
pub async fn create_user(
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
// Validate username
if req.username.is_empty() || req.username.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"username must be 1-255 characters".into(),
),
));
}
// Validate password
if req.password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
// Hash password
let password_hash = pinakes_core::users::auth::hash_password(&req.password)?;
// Create user - rely on DB unique constraint for username to avoid TOCTOU race
let user = state
.storage
.create_user(&req.username, &password_hash, req.role, req.profile)
.await
.map_err(|e| {
// Map unique constraint violations to a user-friendly conflict error
let err_str = e.to_string();
if err_str.contains("UNIQUE")
|| err_str.contains("unique")
|| err_str.contains("duplicate key")
{
ApiError(pinakes_core::error::PinakesError::DuplicateHash(
"username already exists".into(),
))
} else {
ApiError(e)
}
})?;
Ok(Json(UserResponse::from(user)))
}
/// Get a specific user by ID
pub async fn get_user(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let user = state.storage.get_user(user_id).await?;
Ok(Json(UserResponse::from(user)))
}
/// Update a user
pub async fn update_user(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<UpdateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
// Hash password if provided
let password_hash = if let Some(ref password) = req.password {
if password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
Some(pinakes_core::users::auth::hash_password(password)?)
} else {
None
};
let user = state
.storage
.update_user(user_id, password_hash.as_deref(), req.role, req.profile)
.await?;
Ok(Json(UserResponse::from(user)))
}
/// Delete a user (admin only)
pub async fn delete_user(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state.storage.delete_user(user_id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
/// Get user's accessible libraries
pub async fn get_user_libraries(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<Vec<UserLibraryResponse>>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let libraries = state.storage.get_user_libraries(user_id).await?;
Ok(Json(
libraries
.into_iter()
.map(UserLibraryResponse::from)
.collect(),
))
}
/// Grant library access to a user (admin only)
pub async fn grant_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<GrantLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.grant_library_access(user_id, &req.root_path, req.permission)
.await?;
Ok(Json(serde_json::json!({"granted": true})))
}
/// Revoke library access from a user (admin only)
///
/// Uses a JSON body instead of a path parameter because root_path may contain
/// slashes that conflict with URL routing.
pub async fn revoke_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<RevokeLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.revoke_library_access(user_id, &req.root_path)
.await?;
Ok(Json(serde_json::json!({"revoked": true})))
}

View file

@ -7,9 +7,11 @@ use tokio::sync::RwLock;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{Config, UserRole};
use pinakes_core::jobs::JobQueue;
use pinakes_core::plugin::PluginManager;
use pinakes_core::scan::ScanProgress;
use pinakes_core::scheduler::TaskScheduler;
use pinakes_core::storage::DynStorageBackend;
use pinakes_core::transcode::TranscodeService;
/// Default session TTL: 24 hours.
pub const SESSION_TTL_SECS: i64 = 24 * 60 * 60;
@ -47,4 +49,6 @@ pub struct AppState {
pub job_queue: Arc<JobQueue>,
pub cache: Arc<CacheLayer>,
pub scheduler: Arc<TaskScheduler>,
pub plugin_manager: Option<Arc<PluginManager>>,
pub transcode_service: Option<Arc<TranscodeService>>,
}

View file

@ -10,8 +10,10 @@ use tower::ServiceExt;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{
AccountsConfig, Config, DirectoryConfig, JobsConfig, ScanningConfig, ServerConfig,
SqliteConfig, StorageBackendType, StorageConfig, ThumbnailConfig, UiConfig, WebhookConfig,
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType,
StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, UserAccount, UserRole,
WebhookConfig,
};
use pinakes_core::jobs::JobQueue;
use pinakes_core::storage::StorageBackend;
@ -41,12 +43,57 @@ fn post_json(uri: &str, body: &str) -> Request<Body> {
req
}
async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
/// Build a GET request with Bearer auth
fn get_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.uri(uri)
.header("authorization", format!("Bearer {}", token))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
let config = Config {
/// Build a POST JSON request with Bearer auth
fn post_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {}", token))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a DELETE request with Bearer auth
fn delete_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("DELETE")
.uri(uri)
.header("authorization", format!("Bearer {}", token))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PATCH JSON request with Bearer auth
fn patch_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PATCH")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {}", token))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
fn default_config() -> Config {
Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
@ -72,7 +119,20 @@ async fn setup_app() -> axum::Router {
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
};
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
}
}
async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let config = default_config();
let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
@ -92,11 +152,117 @@ async fn setup_app() -> axum::Router {
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
transcode_service: None,
};
pinakes_server::app::create_router(state)
}
/// Hash a password for test user accounts
fn hash_password(password: &str) -> String {
pinakes_core::users::auth::hash_password(password).unwrap()
}
/// Set up an app with accounts enabled and three pre-seeded users.
/// Returns (Router, admin_token, editor_token, viewer_token).
async fn setup_app_with_auth() -> (axum::Router, String, String, String) {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
// Create users in database so resolve_user_id works
let users_to_create = vec![
("admin", "adminpass", UserRole::Admin),
("editor", "editorpass", UserRole::Editor),
("viewer", "viewerpass", UserRole::Viewer),
];
for (username, password, role) in &users_to_create {
let password_hash = hash_password(password);
storage
.create_user(username, &password_hash, *role, None)
.await
.expect("create user");
}
let mut config = default_config();
config.accounts.enabled = true;
config.accounts.users = vec![
UserAccount {
username: "admin".to_string(),
password_hash: hash_password("adminpass"),
role: UserRole::Admin,
},
UserAccount {
username: "editor".to_string(),
password_hash: hash_password("editorpass"),
role: UserRole::Editor,
},
UserAccount {
username: "viewer".to_string(),
password_hash: hash_password("viewerpass"),
role: UserRole::Viewer,
},
];
let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
sessions: Arc::new(RwLock::new(std::collections::HashMap::new())),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
transcode_service: None,
};
let app = pinakes_server::app::create_router(state);
// Login each user to get tokens
let admin_token = login_user(app.clone(), "admin", "adminpass").await;
let editor_token = login_user(app.clone(), "editor", "editorpass").await;
let viewer_token = login_user(app.clone(), "viewer", "viewerpass").await;
(app, admin_token, editor_token, viewer_token)
}
async fn login_user(app: axum::Router, username: &str, password: &str) -> String {
let body = format!(r#"{{"username":"{}","password":"{}"}}"#, username, password);
let response = app
.oneshot(post_json("/api/v1/auth/login", &body))
.await
.unwrap();
assert_eq!(
response.status(),
StatusCode::OK,
"login failed for user {}",
username
);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
result["token"].as_str().unwrap().to_string()
}
async fn response_body(response: axum::response::Response) -> serde_json::Value {
let body = response.into_body().collect().await.unwrap().to_bytes();
serde_json::from_slice(&body).unwrap_or(serde_json::Value::Null)
}
// ===================================================================
// Existing tests (no auth)
// ===================================================================
#[tokio::test]
async fn test_list_media_empty() {
let app = setup_app().await;
@ -210,3 +376,623 @@ async fn test_scheduled_tasks_endpoint() {
assert!(tasks[0]["name"].is_string());
assert!(tasks[0]["schedule"].is_string());
}
#[tokio::test]
async fn test_user_management_crud() {
let app = setup_app().await;
// Create a user
let response = app
.clone()
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"testuser","password":"password123","role":"viewer"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let user: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(user["username"], "testuser");
assert_eq!(user["role"], "viewer");
let user_id = user["id"].as_str().unwrap();
// List users
let response = app.clone().oneshot(get("/api/v1/users")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let users: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(users.len(), 1);
assert_eq!(users[0]["username"], "testuser");
// Get specific user
let response = app
.clone()
.oneshot(get(&format!("/api/v1/users/{}", user_id)))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let retrieved_user: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(retrieved_user["username"], "testuser");
// Delete user
let mut req = Request::builder()
.method("DELETE")
.uri(&format!("/api/v1/users/{}", user_id))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.clone().oneshot(req).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Verify user is deleted
let response = app
.oneshot(get(&format!("/api/v1/users/{}", user_id)))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_health_endpoint() {
let app = setup_app().await;
// Health endpoint should be publicly accessible
let response = app.oneshot(get("/api/v1/health")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_user_duplicate_username() {
let app = setup_app().await;
// Create first user
let response = app
.clone()
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"duplicate","password":"password1","role":"viewer"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Try to create user with same username
let response = app
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"duplicate","password":"password2","role":"viewer"}"#,
))
.await
.unwrap();
// Should fail with conflict (409) for duplicate username
assert_eq!(response.status(), StatusCode::CONFLICT);
}
// ===================================================================
// Authentication tests
// ===================================================================
#[tokio::test]
async fn test_unauthenticated_request_rejected() {
let (app, _, _, _) = setup_app_with_auth().await;
// Request without Bearer token
let response = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_invalid_token_rejected() {
let (app, _, _, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/media", "totally-invalid-token"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_login_valid_credentials() {
let (app, _, _, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"admin","password":"adminpass"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body["token"].is_string());
assert_eq!(body["username"], "admin");
assert_eq!(body["role"], "admin");
}
#[tokio::test]
async fn test_login_invalid_password() {
let (app, _, _, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"admin","password":"wrongpassword"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_login_unknown_user() {
let (app, _, _, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"nonexistent","password":"whatever"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_auth_me_endpoint() {
let (app, admin_token, _, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/auth/me", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["username"], "admin");
assert_eq!(body["role"], "admin");
}
#[tokio::test]
async fn test_logout() {
let (app, admin_token, _, _) = setup_app_with_auth().await;
// Logout
let response = app
.clone()
.oneshot(post_json_authed("/api/v1/auth/logout", "", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Subsequent requests with same token should fail
let response = app
.oneshot(get_authed("/api/v1/media", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
// ===================================================================
// Authorization / RBAC tests
// ===================================================================
#[tokio::test]
async fn test_viewer_cannot_access_editor_routes() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// POST /tags is an editor-only route
let response = app
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"test"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_viewer_cannot_access_admin_routes() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// GET /users is an admin-only route
let response = app
.oneshot(get_authed("/api/v1/users", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_editor_cannot_access_admin_routes() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/users", &editor_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_editor_can_write() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"EditorTag"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_admin_can_access_all() {
let (app, admin_token, _, _) = setup_app_with_auth().await;
// Viewer route
let response = app
.clone()
.oneshot(get_authed("/api/v1/media", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Editor route
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"AdminTag"}"#,
&admin_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Admin route
let response = app
.oneshot(get_authed("/api/v1/users", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
// ===================================================================
// Phase 2 feature tests: Social
// ===================================================================
#[tokio::test]
async fn test_rating_invalid_stars_zero() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/ratings",
r#"{"stars":0}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_rating_invalid_stars_six() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/ratings",
r#"{"stars":6}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_comment_empty_text() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/comments",
r#"{"text":""}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_favorites_list_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/favorites", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
// ===================================================================
// Phase 2 feature tests: Playlists
// ===================================================================
#[tokio::test]
async fn test_playlist_crud() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
// Create
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/playlists",
r#"{"name":"My Playlist"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
let playlist_id = body["id"].as_str().unwrap().to_string();
assert_eq!(body["name"], "My Playlist");
// List
let response = app
.clone()
.oneshot(get_authed("/api/v1/playlists", &editor_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body.as_array().unwrap().len(), 1);
// Get
let response = app
.clone()
.oneshot(get_authed(
&format!("/api/v1/playlists/{}", playlist_id),
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Update
let response = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/playlists/{}", playlist_id),
r#"{"name":"Updated Playlist","description":"A test description"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["name"], "Updated Playlist");
// Delete
let response = app
.clone()
.oneshot(delete_authed(
&format!("/api/v1/playlists/{}", playlist_id),
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_playlist_empty_name() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/playlists",
r#"{"name":""}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
// ===================================================================
// Phase 2 feature tests: Analytics
// ===================================================================
#[tokio::test]
async fn test_most_viewed_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/analytics/most-viewed", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_record_event_and_query() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
// Record an event
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/analytics/events",
r#"{"event_type":"view","duration_secs":5.0}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["recorded"], true);
}
// ===================================================================
// Phase 2 feature tests: Streaming/Transcode
// ===================================================================
#[tokio::test]
async fn test_transcode_session_not_found() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/transcode/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
// Should be 404 or 500 (not found in DB)
assert!(
response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_transcode_list_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/transcode", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_hls_segment_no_session() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/stream/hls/720p/segment0.ts",
&viewer_token,
))
.await
.unwrap();
// Should fail because media doesn't exist or no transcode session
assert!(
response.status() == StatusCode::BAD_REQUEST
|| response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
// ===================================================================
// Phase 2 feature tests: Subtitles
// ===================================================================
#[tokio::test]
async fn test_subtitles_list() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// Should return empty for nonexistent media (or not found)
let response = app
.oneshot(get_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/subtitles",
&viewer_token,
))
.await
.unwrap();
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
// ===================================================================
// Health: public access test
// ===================================================================
#[tokio::test]
async fn test_health_public() {
let (app, _, _, _) = setup_app_with_auth().await;
// Health endpoint should be accessible without auth even when accounts enabled
let response = app.oneshot(get("/api/v1/health")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
// ===================================================================
// Input validation & edge case tests
// ===================================================================
#[tokio::test]
async fn test_invalid_uuid_in_path() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/media/not-a-uuid", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_oversized_comment() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let long_text: String = "x".repeat(10_001);
let body = format!(r#"{{"text":"{}"}}"#, long_text);
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/comments",
&body,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_share_link_expired() {
// Uses no-auth setup since share links are complex to test with auth
// (need real media items). Verify the expire check logic works.
let app = setup_app().await;
// First import a dummy file to get a media_id — but we can't without a real file.
// So let's test the public share access endpoint with a nonexistent token.
let response = app
.oneshot(get("/api/v1/s/nonexistent_token"))
.await
.unwrap();
// Should fail with not found or internal error (no such share link)
assert!(
response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}

View file

@ -0,0 +1,211 @@
use std::net::SocketAddr;
use std::sync::Arc;
use axum::body::Body;
use axum::extract::ConnectInfo;
use axum::http::{Request, StatusCode};
use http_body_util::BodyExt;
use tokio::sync::RwLock;
use tower::ServiceExt;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType,
StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, WebhookConfig,
};
use pinakes_core::jobs::JobQueue;
use pinakes_core::plugin::PluginManager;
use pinakes_core::storage::StorageBackend;
use pinakes_core::storage::sqlite::SqliteBackend;
/// Fake socket address for tests (governor needs ConnectInfo<SocketAddr>)
fn test_addr() -> ConnectInfo<SocketAddr> {
ConnectInfo("127.0.0.1:9999".parse().unwrap())
}
/// Build a GET request with ConnectInfo for rate limiter compatibility
fn get(uri: &str) -> Request<Body> {
let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap();
req.extensions_mut().insert(test_addr());
req
}
async fn setup_app_with_plugins() -> (axum::Router, Arc<PluginManager>, tempfile::TempDir) {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
// Create temp directories for plugin manager (automatically cleaned up when TempDir drops)
let temp_dir = tempfile::TempDir::new().expect("create temp dir");
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
std::fs::create_dir_all(&data_dir).expect("create data dir");
std::fs::create_dir_all(&cache_dir).expect("create cache dir");
let plugin_config = PluginsConfig {
enabled: true,
data_dir: data_dir.clone(),
cache_dir: cache_dir.clone(),
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: true,
max_concurrent_ops: 2,
plugin_timeout_secs: 10,
};
let plugin_manager = PluginManager::new(data_dir, cache_dir, plugin_config.clone().into())
.expect("create plugin manager");
let plugin_manager = Arc::new(plugin_manager);
let config = Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: ":memory:".into(),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![],
import_concurrency: 8,
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
plugins: plugin_config,
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
};
let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
sessions: Arc::new(RwLock::new(std::collections::HashMap::new())),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: Some(plugin_manager.clone()),
transcode_service: None,
};
let router = pinakes_server::app::create_router(state);
(router, plugin_manager, temp_dir)
}
#[tokio::test]
async fn test_list_plugins_empty() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let response = app.oneshot(get("/api/v1/plugins")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let plugins: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(plugins.len(), 0, "should start with no plugins loaded");
}
#[tokio::test]
async fn test_plugin_manager_exists() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
// Verify plugin manager is accessible
let plugins = _pm.list_plugins().await;
assert_eq!(plugins.len(), 0);
// Verify API endpoint works
let response = app.oneshot(get("/api/v1/plugins")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_plugin_not_found() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let response = app
.oneshot(get("/api/v1/plugins/nonexistent"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_plugin_enable_disable() {
let (app, pm, _tmp) = setup_app_with_plugins().await;
// Verify plugin manager is initialized
assert!(pm.list_plugins().await.is_empty());
// For this test, we would need to actually load a plugin first
// Since we don't have a real WASM plugin loaded, we'll just verify
// the endpoints exist and return appropriate errors
let mut req = Request::builder()
.method("POST")
.uri("/api/v1/plugins/test-plugin/enable")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.clone().oneshot(req).await.unwrap();
// Should be NOT_FOUND since plugin doesn't exist
assert_eq!(response.status(), StatusCode::NOT_FOUND);
// Test disable endpoint
let mut req = Request::builder()
.method("POST")
.uri("/api/v1/plugins/test-plugin/disable")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.oneshot(req).await.unwrap();
// Should also be NOT_FOUND
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_plugin_uninstall_not_found() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let mut req = Request::builder()
.method("DELETE")
.uri("/api/v1/plugins/nonexistent")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.oneshot(req).await.unwrap();
// Expect 400 or 404 when plugin doesn't exist
assert!(
response.status() == StatusCode::BAD_REQUEST || response.status() == StatusCode::NOT_FOUND
);
}

View file

@ -54,7 +54,7 @@ pub struct AppState {
pub total_media_count: u64,
pub server_url: String,
// Duplicates view
pub duplicate_groups: Vec<Vec<crate::client::MediaResponse>>,
pub duplicate_groups: Vec<crate::client::DuplicateGroupResponse>,
pub duplicates_selected: Option<usize>,
// Database view
pub database_stats: Option<Vec<(String, String)>>,
@ -249,16 +249,11 @@ fn handle_api_result(state: &mut AppState, result: ApiResult) {
}
}
ApiResult::Duplicates(groups) => {
let flat: Vec<Vec<crate::client::MediaResponse>> =
groups.into_iter().map(|g| g.items).collect();
state.duplicate_groups = flat;
if !state.duplicate_groups.is_empty() {
if !groups.is_empty() {
state.duplicates_selected = Some(0);
}
state.status_message = Some(format!(
"Found {} duplicate groups",
state.duplicate_groups.len()
));
state.status_message = Some(format!("Found {} duplicate groups", groups.len()));
state.duplicate_groups = groups;
}
ApiResult::DatabaseStats(stats) => {
state.database_stats = Some(vec![
@ -617,6 +612,13 @@ async fn handle_action(
}
}
}
// Also fetch background jobs info
match client.list_jobs().await {
Ok(jobs) => {
tracing::debug!("Found {} background jobs", jobs.len());
}
Err(e) => tracing::warn!("Failed to list jobs: {}", e),
}
});
}
Action::QueueView => {
@ -1024,6 +1026,134 @@ async fn handle_action(
"?: Help q: Quit /: Search i: Import o: Open t: Tags c: Collections a: Audit s: Scan S: Settings r: Refresh Home/End: Top/Bottom".into()
);
}
Action::Edit => {
if state.current_view == View::Detail
&& let Some(ref media) = state.selected_media {
// Populate edit fields from selected media
state.edit_title = media.title.clone().unwrap_or_default();
state.edit_artist = media.artist.clone().unwrap_or_default();
state.edit_album = media.album.clone().unwrap_or_default();
state.edit_genre = media.genre.clone().unwrap_or_default();
state.edit_year = media.year.map(|y| y.to_string()).unwrap_or_default();
state.edit_description = media.description.clone().unwrap_or_default();
state.edit_field_index = Some(0);
state.input_mode = true;
state.current_view = View::MetadataEdit;
}
}
Action::Vacuum => {
if state.current_view == View::Database {
state.status_message = Some("Vacuuming database...".to_string());
let client = client.clone();
let tx = event_sender.clone();
tokio::spawn(async move {
match client.vacuum_database().await {
Ok(()) => {
tracing::info!("Database vacuum completed");
// Refresh stats after vacuum
if let Ok(stats) = client.database_stats().await {
let _ =
tx.send(AppEvent::ApiResult(ApiResult::DatabaseStats(stats)));
}
}
Err(e) => {
tracing::error!("Vacuum failed: {}", e);
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Vacuum failed: {e}"
))));
}
}
});
}
}
Action::Toggle => {
if state.current_view == View::Tasks
&& let Some(idx) = state.scheduled_tasks_selected
&& let Some(task) = state.scheduled_tasks.get(idx) {
let task_id = task.id.clone();
let client = client.clone();
let tx = event_sender.clone();
tokio::spawn(async move {
match client.toggle_scheduled_task(&task_id).await {
Ok(()) => {
// Refresh tasks list
if let Ok(tasks) = client.list_scheduled_tasks().await {
let _ = tx.send(AppEvent::ApiResult(
ApiResult::ScheduledTasks(tasks),
));
}
}
Err(e) => {
tracing::error!("Failed to toggle task: {}", e);
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(
format!("Toggle task failed: {e}"),
)));
}
}
});
}
}
Action::RunNow => {
if state.current_view == View::Tasks
&& let Some(idx) = state.scheduled_tasks_selected
&& let Some(task) = state.scheduled_tasks.get(idx) {
let task_id = task.id.clone();
let task_name = task.name.clone();
state.status_message = Some(format!("Running task: {task_name}..."));
let client = client.clone();
let tx = event_sender.clone();
tokio::spawn(async move {
match client.run_task_now(&task_id).await {
Ok(()) => {
// Refresh tasks list
if let Ok(tasks) = client.list_scheduled_tasks().await {
let _ = tx.send(AppEvent::ApiResult(
ApiResult::ScheduledTasks(tasks),
));
}
}
Err(e) => {
tracing::error!("Failed to run task: {}", e);
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(
format!("Run task failed: {e}"),
)));
}
}
});
}
}
Action::Save => {
if state.current_view == View::MetadataEdit
&& let Some(ref media) = state.selected_media {
let updates = serde_json::json!({
"title": if state.edit_title.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_title.clone()) },
"artist": if state.edit_artist.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_artist.clone()) },
"album": if state.edit_album.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_album.clone()) },
"genre": if state.edit_genre.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_genre.clone()) },
"year": state.edit_year.parse::<i32>().ok(),
"description": if state.edit_description.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(state.edit_description.clone()) },
});
let media_id = media.id.clone();
let client = client.clone();
let tx = event_sender.clone();
state.status_message = Some("Saving...".to_string());
tokio::spawn(async move {
match client.update_media(&media_id, updates).await {
Ok(_) => {
let _ = tx.send(AppEvent::ApiResult(ApiResult::MediaUpdated));
}
Err(e) => {
tracing::error!("Failed to update media: {}", e);
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Update failed: {e}"
))));
}
}
});
state.input_mode = false;
state.current_view = View::Detail;
}
}
Action::NavigateLeft | Action::NavigateRight | Action::None => {}
}
}

View file

@ -101,7 +101,10 @@ pub struct DuplicateGroupResponse {
pub items: Vec<MediaResponse>,
}
/// Background job response from the API.
/// Fields are used for deserialization; the job count is logged in the Database view.
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)]
pub struct JobResponse {
pub id: String,
pub kind: serde_json::Value,

View file

@ -36,6 +36,11 @@ pub enum Action {
TagMedia,
UntagMedia,
Help,
Edit,
Vacuum,
Toggle,
RunNow,
Save,
Char(char),
Backspace,
None,
@ -43,11 +48,15 @@ pub enum Action {
pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Action {
if in_input_mode {
match key.code {
KeyCode::Esc => Action::Back,
KeyCode::Enter => Action::Select,
KeyCode::Char(c) => Action::Char(c),
KeyCode::Backspace => Action::Backspace,
match (key.code, key.modifiers) {
(KeyCode::Esc, _) => Action::Back,
(KeyCode::Enter, _) => Action::Select,
(KeyCode::Char('s'), KeyModifiers::CONTROL) => match current_view {
View::MetadataEdit => Action::Save,
_ => Action::Select,
},
(KeyCode::Char(c), _) => Action::Char(c),
(KeyCode::Backspace, _) => Action::Backspace,
_ => Action::None,
}
} else {
@ -70,10 +79,13 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac
},
(KeyCode::Char('o'), _) => Action::Open,
(KeyCode::Char('e'), _) => match current_view {
View::Detail => Action::Select,
View::Detail => Action::Edit,
_ => Action::None,
},
(KeyCode::Char('t'), _) => Action::TagView,
(KeyCode::Char('t'), _) => match current_view {
View::Tasks => Action::Toggle,
_ => Action::TagView,
},
(KeyCode::Char('c'), _) => Action::CollectionView,
(KeyCode::Char('a'), _) => Action::AuditView,
(KeyCode::Char('S'), _) => Action::SettingsView,
@ -82,11 +94,24 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac
(KeyCode::Char('Q'), _) => Action::QueueView,
(KeyCode::Char('X'), _) => Action::StatisticsView,
(KeyCode::Char('T'), _) => Action::TasksView,
// Ctrl+S must come before plain 's' to ensure proper precedence
(KeyCode::Char('s'), KeyModifiers::CONTROL) => match current_view {
View::MetadataEdit => Action::Save,
_ => Action::None,
},
(KeyCode::Char('s'), _) => Action::ScanTrigger,
(KeyCode::Char('r'), _) => Action::Refresh,
(KeyCode::Char('n'), _) => Action::CreateTag,
(KeyCode::Char('+'), _) => Action::TagMedia,
(KeyCode::Char('-'), _) => Action::UntagMedia,
(KeyCode::Char('v'), _) => match current_view {
View::Database => Action::Vacuum,
_ => Action::None,
},
(KeyCode::Char('x'), _) => match current_view {
View::Tasks => Action::RunNow,
_ => Action::None,
},
(KeyCode::Tab, _) => Action::NextTab,
(KeyCode::BackTab, _) => Action::PrevTab,
(KeyCode::PageUp, _) => Action::PageUp,

View file

@ -15,14 +15,17 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
} else {
let mut list_items = Vec::new();
for (i, group) in state.duplicate_groups.iter().enumerate() {
// Show truncated hash (first 16 chars) for identification
let hash_display = if group.content_hash.len() > 16 {
&group.content_hash[..16]
} else {
&group.content_hash
};
let header = format!(
"Group {} ({} items, hash: {})",
"Group {} ({} items, hash: {}...)",
i + 1,
group.len(),
group
.first()
.map(|m| m.content_hash.as_str())
.unwrap_or("?")
group.items.len(),
hash_display
);
list_items.push(ListItem::new(Line::from(Span::styled(
header,
@ -30,7 +33,7 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
))));
for item in group {
for item in &group.items {
let line = format!(" {} - {}", item.file_name, item.path);
let is_selected = state
.duplicates_selected

View file

@ -37,9 +37,15 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
.map(super::format_date)
.unwrap_or("-");
let status = task.last_status.as_deref().unwrap_or("-");
// Show abbreviated task ID (first 8 chars)
let task_id_short = if task.id.len() > 8 {
&task.id[..8]
} else {
&task.id
};
let text = format!(
" {enabled_marker} {:<20} {:<16} Last: {:<12} Next: {:<12} Status: {}",
" {enabled_marker} [{task_id_short}] {:<20} {:<16} Last: {:<12} Next: {:<12} Status: {}",
task.name, task.schedule, last_run, next_run, status
);

View file

@ -48,7 +48,7 @@ pub fn App() -> Element {
let base_url =
std::env::var("PINAKES_SERVER_URL").unwrap_or_else(|_| "http://localhost:3000".into());
let api_key = std::env::var("PINAKES_API_KEY").ok();
let client = use_signal(|| ApiClient::new(&base_url, api_key.as_deref()));
let mut client = use_signal(|| ApiClient::new(&base_url, api_key.as_deref()));
let server_url = use_signal(|| base_url.clone());
let mut current_view = use_signal(|| View::Library);
@ -103,10 +103,13 @@ pub fn App() -> Element {
// Auth state
let mut auth_required = use_signal(|| false);
let mut current_user = use_signal(|| Option::<UserInfoResponse>::None);
let _login_error = use_signal(|| Option::<String>::None);
let _login_loading = use_signal(|| false);
let mut login_error = use_signal(|| Option::<String>::None);
let mut login_loading = use_signal(|| false);
let mut auto_play_media = use_signal(|| false);
// Import state for UI feedback
let mut import_in_progress = use_signal(|| false);
// Check auth on startup
let client_auth = client.read().clone();
use_effect(move || {
@ -117,10 +120,16 @@ pub fn App() -> Element {
current_user.set(Some(user));
auth_required.set(false);
}
Err(_) => {
// Check if server has accounts enabled by trying login endpoint
// If we get a 401 on /auth/me, accounts may be enabled
auth_required.set(false); // Will be set to true if needed
Err(e) => {
// Check if this is an auth error (401) vs network error
let err_str = e.to_string();
if err_str.contains("401")
|| err_str.contains("unauthorized")
|| err_str.contains("Unauthorized")
{
auth_required.set(true);
}
// For network errors, don't require auth (server offline state handles this)
}
}
// Load UI config
@ -255,6 +264,33 @@ pub fn App() -> Element {
}
};
// Login handler for auth flow
let on_login_submit = {
move |(username, password): (String, String)| {
let login_client = client.read().clone();
spawn(async move {
login_loading.set(true);
login_error.set(None);
match login_client.login(&username, &password).await {
Ok(resp) => {
// Update the signal with a new client that has the token set
client.write().set_token(&resp.token);
current_user.set(Some(UserInfoResponse {
username: resp.username,
role: resp.role,
}));
auth_required.set(false);
}
Err(e) => {
login_error.set(Some(format!("Login failed: {e}")));
}
}
login_loading.set(false);
});
}
};
let view_title = use_memo(move || current_view.read().title());
let _total_pages = use_memo(move || {
let ps = *media_page_size.read();
@ -265,8 +301,15 @@ pub fn App() -> Element {
rsx! {
style { {styles::CSS} }
// Phase 7.1: Keyboard shortcuts
div { class: "app",
if *auth_required.read() {
crate::components::login::Login {
on_login: on_login_submit,
error: login_error.read().clone(),
loading: *login_loading.read(),
}
} else {
// Phase 7.1: Keyboard shortcuts
div { class: "app",
tabindex: "0",
onkeydown: {
move |evt: KeyboardEvent| {
@ -316,7 +359,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{25a6}" }
"Library"
span { class: "nav-item-text", "Library" }
// Phase 7.2: Badge
span { class: "nav-badge", "{media_total_count}" }
}
@ -324,7 +367,7 @@ pub fn App() -> Element {
class: if *current_view.read() == View::Search { "nav-item active" } else { "nav-item" },
onclick: move |_| current_view.set(View::Search),
span { class: "nav-icon", "\u{2315}" }
"Search"
span { class: "nav-item-text", "Search" }
}
button {
class: if *current_view.read() == View::Import { "nav-item active" } else { "nav-item" },
@ -341,7 +384,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2912}" }
"Import"
span { class: "nav-item-text", "Import" }
}
}
@ -357,7 +400,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2605}" }
"Tags"
span { class: "nav-item-text", "Tags" }
// Phase 7.2: Badge
span { class: "nav-badge", "{tags_list.read().len()}" }
}
@ -373,7 +416,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2630}" }
"Collections"
span { class: "nav-item-text", "Collections" }
// Phase 7.2: Badge
span { class: "nav-badge", "{collections_list.read().len()}" }
}
@ -391,7 +434,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2637}" }
"Audit"
span { class: "nav-item-text", "Audit" }
}
button {
class: if *current_view.read() == View::Duplicates { "nav-item active" } else { "nav-item" },
@ -408,7 +451,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2261}" }
"Duplicates"
span { class: "nav-item-text", "Duplicates" }
}
button {
class: if *current_view.read() == View::Settings { "nav-item active" } else { "nav-item" },
@ -425,7 +468,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2699}" }
"Settings"
span { class: "nav-item-text", "Settings" }
}
button {
class: if *current_view.read() == View::Database { "nav-item active" } else { "nav-item" },
@ -442,7 +485,7 @@ pub fn App() -> Element {
}
},
span { class: "nav-icon", "\u{2750}" }
"Database"
span { class: "nav-item-text", "Database" }
}
}
@ -1142,6 +1185,7 @@ pub fn App() -> Element {
tags: tags_list.read().clone(),
collections: collections_list.read().clone(),
scan_progress: scan_progress.read().clone(),
is_importing: *import_in_progress.read(),
on_import_file: {
let client = client.read().clone();
let refresh_media = refresh_media.clone();
@ -1150,6 +1194,7 @@ pub fn App() -> Element {
let client = client.clone();
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
import_in_progress.set(true);
spawn(async move {
if tag_ids.is_empty() && new_tags.is_empty() && col_id.is_none() {
match client.import_file(&path).await {
@ -1179,6 +1224,7 @@ pub fn App() -> Element {
Err(e) => show_toast(format!("Import failed: {e}"), true),
}
}
import_in_progress.set(false);
});
}
},
@ -1190,8 +1236,8 @@ pub fn App() -> Element {
let client = client.clone();
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
import_in_progress.set(true);
spawn(async move {
show_toast("Importing directory...".into(), false);
match client.import_directory(&path, &tag_ids, &new_tags, col_id.as_deref()).await {
Ok(resp) => {
show_toast(
@ -1208,6 +1254,7 @@ pub fn App() -> Element {
}
Err(e) => show_toast(format!("Directory import failed: {e}"), true),
}
import_in_progress.set(false);
});
}
},
@ -1218,8 +1265,8 @@ pub fn App() -> Element {
move |_| {
let client = client.clone();
let refresh_media = refresh_media.clone();
import_in_progress.set(true);
spawn(async move {
show_toast("Scanning...".into(), false);
match client.trigger_scan().await {
Ok(_results) => {
// Poll scan status until done
@ -1242,6 +1289,7 @@ pub fn App() -> Element {
}
Err(e) => show_toast(format!("Scan failed: {e}"), true),
}
import_in_progress.set(false);
});
}
},
@ -1253,8 +1301,9 @@ pub fn App() -> Element {
let client = client.clone();
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
let file_count = paths.len();
import_in_progress.set(true);
spawn(async move {
show_toast(format!("Importing {} files...", paths.len()), false);
match client.batch_import(&paths, &tag_ids, &new_tags, col_id.as_deref()).await {
Ok(resp) => {
show_toast(
@ -1269,8 +1318,9 @@ pub fn App() -> Element {
preview_files.set(Vec::new());
preview_total_size.set(0);
}
Err(e) => show_toast(format!("Batch import failed: {e}"), true),
Err(e) => show_toast(format!("Batch import failed ({file_count} files): {e}"), true),
}
import_in_progress.set(false);
});
}
},
@ -1556,6 +1606,7 @@ pub fn App() -> Element {
}
}
}
} // end else (auth not required)
// Phase 1.4: Toast queue - show up to 3 stacked from bottom
div { class: "toast-container",

View file

@ -22,6 +22,7 @@ pub fn Import(
preview_files: Vec<DirectoryPreviewFile>,
preview_total_size: u64,
scan_progress: Option<ScanStatusResponse>,
#[props(default = false)] is_importing: bool,
) -> Element {
let mut import_mode = use_signal(|| 0usize);
let mut file_path = use_signal(String::new);
@ -44,6 +45,19 @@ pub fn Import(
let current_mode = *import_mode.read();
rsx! {
// Import status panel (shown when import is in progress)
if is_importing {
div { class: "import-status-panel",
div { class: "import-status-header",
div { class: "status-dot checking" }
span { "Import in progress..." }
}
div { class: "progress-bar",
div { class: "progress-fill indeterminate" }
}
}
}
// Tab bar
div { class: "import-tabs",
button {
@ -114,6 +128,7 @@ pub fn Import(
}
button {
class: "btn btn-primary",
disabled: is_importing,
onclick: {
let mut file_path = file_path;
let mut selected_tags = selected_tags;
@ -133,7 +148,7 @@ pub fn Import(
}
}
},
"Import"
if is_importing { "Importing..." } else { "Import" }
}
}
}
@ -494,7 +509,7 @@ pub fn Import(
rsx! {
button {
class: "btn btn-primary",
disabled: !has_selected,
disabled: !has_selected || is_importing,
onclick: {
let mut selected_file_paths = selected_file_paths;
let mut selected_tags = selected_tags;
@ -514,7 +529,9 @@ pub fn Import(
}
}
},
if has_selected {
if is_importing {
"Importing..."
} else if has_selected {
"Import Selected ({sel_count})"
} else {
"Import Selected"
@ -526,6 +543,7 @@ pub fn Import(
// Import entire directory
button {
class: "btn btn-secondary",
disabled: is_importing,
onclick: {
let mut dir_path = dir_path;
let mut selected_tags = selected_tags;
@ -547,7 +565,7 @@ pub fn Import(
}
}
},
"Import Entire Directory"
if is_importing { "Importing..." } else { "Import Entire Directory" }
}
}
}
@ -569,8 +587,9 @@ pub fn Import(
div { class: "mb-16", style: "text-align: center;",
button {
class: "btn btn-primary",
disabled: is_importing,
onclick: move |_| on_scan.call(()),
"Scan All Roots"
if is_importing { "Scanning..." } else { "Scan All Roots" }
}
}

View file

@ -41,14 +41,20 @@ impl Default for PlayQueue {
}
impl PlayQueue {
/// Check if the queue is empty.
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
/// Get the current item in the queue.
#[allow(dead_code)]
pub fn current(&self) -> Option<&QueueItem> {
self.items.get(self.current_index)
}
/// Advance to the next item based on repeat mode.
#[allow(dead_code)]
pub fn next(&mut self) -> Option<&QueueItem> {
if self.items.is_empty() {
return None;
@ -70,6 +76,8 @@ impl PlayQueue {
}
}
/// Go to the previous item based on repeat mode.
#[allow(dead_code)]
pub fn previous(&mut self) -> Option<&QueueItem> {
if self.items.is_empty() {
return None;
@ -82,10 +90,14 @@ impl PlayQueue {
self.items.get(self.current_index)
}
/// Add an item to the queue.
#[allow(dead_code)]
pub fn add(&mut self, item: QueueItem) {
self.items.push(item);
}
/// Remove an item from the queue by index.
#[allow(dead_code)]
pub fn remove(&mut self, index: usize) {
if index < self.items.len() {
self.items.remove(index);
@ -95,11 +107,15 @@ impl PlayQueue {
}
}
/// Clear all items from the queue.
#[allow(dead_code)]
pub fn clear(&mut self) {
self.items.clear();
self.current_index = 0;
}
/// Toggle between repeat modes: Off -> All -> One -> Off.
#[allow(dead_code)]
pub fn toggle_repeat(&mut self) {
self.repeat = match self.repeat {
RepeatMode::Off => RepeatMode::All,
@ -108,6 +124,8 @@ impl PlayQueue {
};
}
/// Toggle shuffle mode on/off.
#[allow(dead_code)]
pub fn toggle_shuffle(&mut self) {
self.shuffle = !self.shuffle;
}

View file

@ -81,6 +81,15 @@ body {
.sidebar.collapsed .nav-item { justify-content: center; padding: 8px; border-left: none; }
.sidebar.collapsed .nav-icon { width: auto; margin: 0; }
/* Nav item text - hide when collapsed */
.nav-item-text {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.sidebar.collapsed .nav-item-text { display: none; }
.sidebar-toggle {
background: none;
border: none;
@ -1550,6 +1559,34 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
transition: width 0.3s ease;
}
.progress-fill.indeterminate {
width: 30%;
animation: indeterminate 1.5s ease-in-out infinite;
}
@keyframes indeterminate {
0% { transform: translateX(-100%); }
100% { transform: translateX(400%); }
}
/* ── Import status panel ── */
.import-status-panel {
background: var(--bg-2);
border: 1px solid var(--accent);
border-radius: var(--radius);
padding: 12px 16px;
margin-bottom: 16px;
}
.import-status-header {
display: flex;
align-items: center;
gap: 8px;
margin-bottom: 8px;
font-size: 13px;
color: var(--text-0);
}
/* ── Tag confirmation ── */
.tag-confirm-delete {
display: inline-flex;
@ -2336,14 +2373,26 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
align-items: center;
gap: 6px;
font-size: 12px;
flex-wrap: wrap;
overflow: hidden;
min-width: 0;
}
.user-name {
font-weight: 500;
color: var(--text-0);
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
max-width: 90px;
flex-shrink: 1;
}
/* Hide user details in collapsed sidebar, show only logout icon */
.sidebar.collapsed .user-info .user-name,
.sidebar.collapsed .user-info .role-badge { display: none; }
.sidebar.collapsed .user-info .btn { padding: 6px; }
.role-badge {
display: inline-block;
padding: 1px 6px;