various: simplify code; work on security and performance

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9a5114addcab5fbff430ab2b919b83466a6a6964
This commit is contained in:
raf 2026-02-02 17:32:11 +03:00
commit c4adc4e3e0
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
75 changed files with 12921 additions and 358 deletions

View file

@ -0,0 +1,69 @@
//! Usage analytics and watch history tracking.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A tracked usage event for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UsageEvent {
pub id: Uuid,
pub media_id: Option<MediaId>,
pub user_id: Option<UserId>,
pub event_type: UsageEventType,
pub timestamp: DateTime<Utc>,
pub duration_secs: Option<f64>,
pub context_json: Option<String>,
}
/// Types of usage events that can be tracked.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum UsageEventType {
View,
Play,
Export,
Share,
Search,
}
impl std::fmt::Display for UsageEventType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::View => "view",
Self::Play => "play",
Self::Export => "export",
Self::Share => "share",
Self::Search => "search",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for UsageEventType {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"view" => Ok(Self::View),
"play" => Ok(Self::Play),
"export" => Ok(Self::Export),
"share" => Ok(Self::Share),
"search" => Ok(Self::Search),
_ => Err(format!("unknown usage event type: {s}")),
}
}
}
/// Watch history entry tracking progress through media.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WatchHistory {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub progress_secs: f64,
pub last_watched: DateTime<Utc>,
}

View file

@ -45,10 +45,10 @@ where
pub async fn get(&self, key: &K) -> Option<V> {
let map = self.entries.read().await;
if let Some(entry) = map.get(key) {
if entry.inserted_at.elapsed() < self.ttl {
return Some(entry.value.clone());
}
if let Some(entry) = map.get(key)
&& entry.inserted_at.elapsed() < self.ttl
{
return Some(entry.value.clone());
}
None
}

View file

@ -20,6 +20,16 @@ pub struct Config {
pub webhooks: Vec<WebhookConfig>,
#[serde(default)]
pub scheduled_tasks: Vec<ScheduledTaskConfig>,
#[serde(default)]
pub plugins: PluginsConfig,
#[serde(default)]
pub transcoding: TranscodingConfig,
#[serde(default)]
pub enrichment: EnrichmentConfig,
#[serde(default)]
pub cloud: CloudConfig,
#[serde(default)]
pub analytics: AnalyticsConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -192,6 +202,233 @@ impl std::fmt::Display for UserRole {
}
}
// ===== Plugin Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PluginsConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_plugin_data_dir")]
pub data_dir: PathBuf,
#[serde(default = "default_plugin_cache_dir")]
pub cache_dir: PathBuf,
#[serde(default)]
pub plugin_dirs: Vec<PathBuf>,
#[serde(default)]
pub enable_hot_reload: bool,
#[serde(default)]
pub allow_unsigned: bool,
#[serde(default = "default_max_concurrent_ops")]
pub max_concurrent_ops: usize,
#[serde(default = "default_plugin_timeout")]
pub plugin_timeout_secs: u64,
}
fn default_plugin_data_dir() -> PathBuf {
Config::default_data_dir().join("plugins").join("data")
}
fn default_plugin_cache_dir() -> PathBuf {
Config::default_data_dir().join("plugins").join("cache")
}
fn default_max_concurrent_ops() -> usize {
4
}
fn default_plugin_timeout() -> u64 {
30
}
impl Default for PluginsConfig {
fn default() -> Self {
Self {
enabled: false,
data_dir: default_plugin_data_dir(),
cache_dir: default_plugin_cache_dir(),
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: false,
max_concurrent_ops: default_max_concurrent_ops(),
plugin_timeout_secs: default_plugin_timeout(),
}
}
}
// ===== Transcoding Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodingConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub cache_dir: Option<PathBuf>,
#[serde(default = "default_cache_ttl_hours")]
pub cache_ttl_hours: u64,
#[serde(default = "default_max_concurrent_transcodes")]
pub max_concurrent: usize,
#[serde(default)]
pub hardware_acceleration: Option<String>,
#[serde(default)]
pub profiles: Vec<TranscodeProfile>,
}
fn default_cache_ttl_hours() -> u64 {
48
}
fn default_max_concurrent_transcodes() -> usize {
2
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodeProfile {
pub name: String,
pub video_codec: String,
pub audio_codec: String,
pub max_bitrate_kbps: u32,
pub max_resolution: String,
}
impl Default for TranscodingConfig {
fn default() -> Self {
Self {
enabled: false,
cache_dir: None,
cache_ttl_hours: default_cache_ttl_hours(),
max_concurrent: default_max_concurrent_transcodes(),
hardware_acceleration: None,
profiles: vec![
TranscodeProfile {
name: "high".to_string(),
video_codec: "h264".to_string(),
audio_codec: "aac".to_string(),
max_bitrate_kbps: 8000,
max_resolution: "1080p".to_string(),
},
TranscodeProfile {
name: "medium".to_string(),
video_codec: "h264".to_string(),
audio_codec: "aac".to_string(),
max_bitrate_kbps: 4000,
max_resolution: "720p".to_string(),
},
],
}
}
}
// ===== Enrichment Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct EnrichmentConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub auto_enrich_on_import: bool,
#[serde(default)]
pub sources: EnrichmentSources,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct EnrichmentSources {
#[serde(default)]
pub musicbrainz: EnrichmentSource,
#[serde(default)]
pub tmdb: EnrichmentSource,
#[serde(default)]
pub lastfm: EnrichmentSource,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct EnrichmentSource {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub api_key: Option<String>,
#[serde(default)]
pub api_endpoint: Option<String>,
}
// ===== Cloud Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_auto_sync_interval")]
pub auto_sync_interval_mins: u64,
#[serde(default)]
pub accounts: Vec<CloudAccount>,
}
fn default_auto_sync_interval() -> u64 {
60
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudAccount {
pub id: String,
pub provider: String,
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub sync_rules: Vec<CloudSyncRule>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CloudSyncRule {
pub local_path: PathBuf,
pub remote_path: String,
pub direction: CloudSyncDirection,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CloudSyncDirection {
Upload,
Download,
Bidirectional,
}
impl Default for CloudConfig {
fn default() -> Self {
Self {
enabled: false,
auto_sync_interval_mins: default_auto_sync_interval(),
accounts: vec![],
}
}
}
// ===== Analytics Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalyticsConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default = "default_true")]
pub track_usage: bool,
#[serde(default = "default_retention_days")]
pub retention_days: u64,
}
fn default_retention_days() -> u64 {
90
}
impl Default for AnalyticsConfig {
fn default() -> Self {
Self {
enabled: false,
track_usage: true,
retention_days: default_retention_days(),
}
}
}
// ===== Storage Configuration =====
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StorageConfig {
pub backend: StorageBackendType,
@ -379,6 +616,11 @@ impl Default for Config {
thumbnails: ThumbnailConfig::default(),
webhooks: vec![],
scheduled_tasks: vec![],
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
}
}
}

View file

@ -0,0 +1,109 @@
//! Last.fm metadata enrichment for audio files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct LastFmEnricher {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl LastFmEnricher {
pub fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
api_key,
base_url: "https://ws.audioscrobbler.com/2.0".to_string(),
}
}
}
#[async_trait::async_trait]
impl MetadataEnricher for LastFmEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::LastFm
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let artist = match &item.artist {
Some(a) if !a.is_empty() => a,
_ => return Ok(None),
};
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let url = format!("{}/", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("method", "track.getInfo"),
("api_key", self.api_key.as_str()),
("artist", artist.as_str()),
("track", title.as_str()),
("format", "json"),
])
.send()
.await
.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm request failed: {e}"))
})?;
if !resp.status().is_success() {
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm response read failed: {e}"))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("Last.fm JSON parse failed: {e}"))
})?;
// Check for error response
if json.get("error").is_some() {
return Ok(None);
}
let track = match json.get("track") {
Some(t) => t,
None => return Ok(None),
};
let mbid = track.get("mbid").and_then(|m| m.as_str()).map(String::from);
let listeners = track
.get("listeners")
.and_then(|l| l.as_str())
.and_then(|l| l.parse::<f64>().ok())
.unwrap_or(0.0);
// Normalize listeners to confidence (arbitrary scale)
let confidence = (listeners / 1_000_000.0).min(1.0);
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::LastFm,
external_id: mbid,
metadata_json: body,
confidence,
last_updated: Utc::now(),
}))
}
}

View file

@ -0,0 +1,66 @@
//! Metadata enrichment from external sources.
pub mod lastfm;
pub mod musicbrainz;
pub mod tmdb;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::error::Result;
use crate::model::{MediaId, MediaItem};
/// Externally-sourced metadata for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExternalMetadata {
pub id: Uuid,
pub media_id: MediaId,
pub source: EnrichmentSourceType,
pub external_id: Option<String>,
pub metadata_json: String,
pub confidence: f64,
pub last_updated: DateTime<Utc>,
}
/// Supported enrichment data sources.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum EnrichmentSourceType {
#[serde(rename = "musicbrainz")]
MusicBrainz,
#[serde(rename = "tmdb")]
Tmdb,
#[serde(rename = "lastfm")]
LastFm,
}
impl std::fmt::Display for EnrichmentSourceType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::MusicBrainz => "musicbrainz",
Self::Tmdb => "tmdb",
Self::LastFm => "lastfm",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for EnrichmentSourceType {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"musicbrainz" => Ok(Self::MusicBrainz),
"tmdb" => Ok(Self::Tmdb),
"lastfm" => Ok(Self::LastFm),
_ => Err(format!("unknown enrichment source: {s}")),
}
}
}
/// Trait for metadata enrichment providers.
#[async_trait::async_trait]
pub trait MetadataEnricher: Send + Sync {
fn source(&self) -> EnrichmentSourceType;
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>>;
}

View file

@ -0,0 +1,134 @@
//! MusicBrainz metadata enrichment for audio files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct MusicBrainzEnricher {
client: reqwest::Client,
base_url: String,
}
impl Default for MusicBrainzEnricher {
fn default() -> Self {
Self::new()
}
}
impl MusicBrainzEnricher {
pub fn new() -> Self {
Self {
client: reqwest::Client::builder()
.user_agent("Pinakes/0.1 (https://github.com/notashelf/pinakes)")
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
base_url: "https://musicbrainz.org/ws/2".to_string(),
}
}
}
fn escape_lucene_query(s: &str) -> String {
let special_chars = [
'+', '-', '&', '|', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\',
'/',
];
let mut escaped = String::with_capacity(s.len() * 2);
for c in s.chars() {
if special_chars.contains(&c) {
escaped.push('\\');
}
escaped.push(c);
}
escaped
}
#[async_trait::async_trait]
impl MetadataEnricher for MusicBrainzEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::MusicBrainz
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let mut query = format!("recording:{}", escape_lucene_query(title));
if let Some(ref artist) = item.artist {
query.push_str(&format!(" AND artist:{}", escape_lucene_query(artist)));
}
let url = format!("{}/recording/", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("query", &query),
("fmt", &"json".to_string()),
("limit", &"1".to_string()),
])
.send()
.await
.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz request failed: {e}"))
})?;
if !resp.status().is_success() {
let status = resp.status();
if status == reqwest::StatusCode::TOO_MANY_REQUESTS
|| status == reqwest::StatusCode::SERVICE_UNAVAILABLE
{
return Err(PinakesError::MetadataExtraction(format!(
"MusicBrainz rate limited (HTTP {})",
status.as_u16()
)));
}
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz response read failed: {e}"))
})?;
// Parse to check if we got results
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("MusicBrainz JSON parse failed: {e}"))
})?;
let recordings = json.get("recordings").and_then(|r| r.as_array());
if recordings.is_none_or(|r| r.is_empty()) {
return Ok(None);
}
let recording = &recordings.unwrap()[0];
let external_id = recording
.get("id")
.and_then(|id| id.as_str())
.map(String::from);
let score = recording
.get("score")
.and_then(|s| s.as_f64())
.unwrap_or(0.0)
/ 100.0;
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::MusicBrainz,
external_id,
metadata_json: body,
confidence: score,
last_updated: Utc::now(),
}))
}
}

View file

@ -0,0 +1,109 @@
//! TMDB (The Movie Database) metadata enrichment for video files.
use std::time::Duration;
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
pub struct TmdbEnricher {
client: reqwest::Client,
api_key: String,
base_url: String,
}
impl TmdbEnricher {
pub fn new(api_key: String) -> Self {
Self {
client: reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.connect_timeout(Duration::from_secs(5))
.build()
.expect("failed to build HTTP client with configured timeouts"),
api_key,
base_url: "https://api.themoviedb.org/3".to_string(),
}
}
}
#[async_trait::async_trait]
impl MetadataEnricher for TmdbEnricher {
fn source(&self) -> EnrichmentSourceType {
EnrichmentSourceType::Tmdb
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
let title = match &item.title {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let url = format!("{}/search/movie", self.base_url);
let resp = self
.client
.get(&url)
.query(&[
("api_key", &self.api_key),
("query", &title.to_string()),
("page", &"1".to_string()),
])
.send()
.await
.map_err(|e| PinakesError::MetadataExtraction(format!("TMDB request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
if status == reqwest::StatusCode::UNAUTHORIZED {
return Err(PinakesError::MetadataExtraction(
"TMDB API key is invalid (401)".into(),
));
}
if status == reqwest::StatusCode::TOO_MANY_REQUESTS {
tracing::warn!("TMDB rate limit exceeded (429)");
return Ok(None);
}
tracing::debug!(status = %status, "TMDB search returned non-success status");
return Ok(None);
}
let body = resp.text().await.map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB response read failed: {e}"))
})?;
let json: serde_json::Value = serde_json::from_str(&body).map_err(|e| {
PinakesError::MetadataExtraction(format!("TMDB JSON parse failed: {e}"))
})?;
let results = json.get("results").and_then(|r| r.as_array());
if results.is_none_or(|r| r.is_empty()) {
return Ok(None);
}
let movie = &results.unwrap()[0];
let external_id = match movie.get("id").and_then(|id| id.as_i64()) {
Some(id) => id.to_string(),
None => return Ok(None),
};
let popularity = movie
.get("popularity")
.and_then(|p| p.as_f64())
.unwrap_or(0.0);
// Normalize popularity to 0-1 range (TMDB popularity can be very high)
let confidence = (popularity / 100.0).min(1.0);
Ok(Some(ExternalMetadata {
id: Uuid::now_v7(),
media_id: item.id,
source: EnrichmentSourceType::Tmdb,
external_id: Some(external_id),
metadata_json: body,
confidence,
last_updated: Utc::now(),
}))
}
}

View file

@ -42,6 +42,12 @@ pub enum PinakesError {
#[error("invalid operation: {0}")]
InvalidOperation(String),
#[error("authentication error: {0}")]
Authentication(String),
#[error("authorization error: {0}")]
Authorization(String),
}
impl From<rusqlite::Error> for PinakesError {
@ -56,4 +62,10 @@ impl From<tokio_postgres::Error> for PinakesError {
}
}
impl From<serde_json::Error> for PinakesError {
fn from(e: serde_json::Error) -> Self {
PinakesError::Database(format!("JSON serialization error: {}", e))
}
}
pub type Result<T> = std::result::Result<T, PinakesError>;

View file

@ -27,6 +27,27 @@ pub enum PinakesEvent {
expected: String,
actual: String,
},
MediaRated {
media_id: String,
user_id: String,
stars: u8,
},
MediaCommented {
media_id: String,
user_id: String,
},
PlaylistCreated {
playlist_id: String,
owner_id: String,
},
TranscodeStarted {
media_id: String,
profile: String,
},
TranscodeCompleted {
media_id: String,
profile: String,
},
}
impl PinakesEvent {
@ -37,6 +58,11 @@ impl PinakesEvent {
Self::MediaDeleted { .. } => "media_deleted",
Self::ScanCompleted { .. } => "scan_completed",
Self::IntegrityMismatch { .. } => "integrity_mismatch",
Self::MediaRated { .. } => "media_rated",
Self::MediaCommented { .. } => "media_commented",
Self::PlaylistCreated { .. } => "playlist_created",
Self::TranscodeStarted { .. } => "transcode_started",
Self::TranscodeCompleted { .. } => "transcode_completed",
}
}
}

View file

@ -23,7 +23,7 @@ pub async fn export_library(
limit: u64::MAX,
sort: None,
};
let items = storage.list_media(&&pagination).await?;
let items = storage.list_media(&pagination).await?;
let count = items.len();
match format {

View file

@ -64,9 +64,12 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
let extracted = {
let path_clone = path.clone();
tokio::task::spawn_blocking(move || metadata::extract_metadata(&path_clone, media_type))
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
metadata::extract_metadata(&path_clone, media_type_clone)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
};
let file_name = path
@ -82,8 +85,9 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
let thumb_path = {
let source = path.clone();
let thumb_dir = thumbnail::default_thumbnail_dir();
let media_type_clone = media_type.clone();
tokio::task::spawn_blocking(move || {
thumbnail::generate_thumbnail(media_id, &source, media_type, &thumb_dir)
thumbnail::generate_thumbnail(media_id, &source, media_type_clone, &thumb_dir)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??

View file

@ -184,13 +184,13 @@ pub async fn cleanup_orphaned_thumbnails(
let entries = std::fs::read_dir(thumbnail_dir)?;
for entry in entries.flatten() {
let path = entry.path();
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
if !known_ids.contains(stem) {
if let Err(e) = std::fs::remove_file(&path) {
warn!(path = %path.display(), error = %e, "failed to remove orphaned thumbnail");
} else {
removed += 1;
}
if let Some(stem) = path.file_stem().and_then(|s| s.to_str())
&& !known_ids.contains(stem)
{
if let Err(e) = std::fs::remove_file(&path) {
warn!(path = %path.display(), error = %e, "failed to remove orphaned thumbnail");
} else {
removed += 1;
}
}
}

View file

@ -29,6 +29,14 @@ pub enum JobKind {
format: ExportFormat,
destination: PathBuf,
},
Transcode {
media_id: MediaId,
profile: String,
},
Enrich {
media_ids: Vec<MediaId>,
},
CleanupAnalytics,
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -1,7 +1,9 @@
pub mod analytics;
pub mod audit;
pub mod cache;
pub mod collections;
pub mod config;
pub mod enrichment;
pub mod error;
pub mod events;
pub mod export;
@ -13,9 +15,15 @@ pub mod media_type;
pub mod metadata;
pub mod model;
pub mod opener;
pub mod playlists;
pub mod plugin;
pub mod scan;
pub mod scheduler;
pub mod search;
pub mod social;
pub mod storage;
pub mod subtitles;
pub mod tags;
pub mod thumbnail;
pub mod transcode;
pub mod users;

View file

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MediaType {
pub enum BuiltinMediaType {
// Audio
Mp3,
Flac,
@ -60,7 +60,48 @@ pub enum MediaCategory {
Image,
}
impl MediaType {
impl BuiltinMediaType {
/// Get the unique ID for this media type
pub fn id(&self) -> String {
format!("{:?}", self).to_lowercase()
}
/// Get the display name for this media type
pub fn name(&self) -> String {
match self {
Self::Mp3 => "MP3 Audio".to_string(),
Self::Flac => "FLAC Audio".to_string(),
Self::Ogg => "OGG Audio".to_string(),
Self::Wav => "WAV Audio".to_string(),
Self::Aac => "AAC Audio".to_string(),
Self::Opus => "Opus Audio".to_string(),
Self::Mp4 => "MP4 Video".to_string(),
Self::Mkv => "MKV Video".to_string(),
Self::Avi => "AVI Video".to_string(),
Self::Webm => "WebM Video".to_string(),
Self::Pdf => "PDF Document".to_string(),
Self::Epub => "EPUB eBook".to_string(),
Self::Djvu => "DjVu Document".to_string(),
Self::Markdown => "Markdown".to_string(),
Self::PlainText => "Plain Text".to_string(),
Self::Jpeg => "JPEG Image".to_string(),
Self::Png => "PNG Image".to_string(),
Self::Gif => "GIF Image".to_string(),
Self::Webp => "WebP Image".to_string(),
Self::Svg => "SVG Image".to_string(),
Self::Avif => "AVIF Image".to_string(),
Self::Tiff => "TIFF Image".to_string(),
Self::Bmp => "BMP Image".to_string(),
Self::Cr2 => "Canon RAW (CR2)".to_string(),
Self::Nef => "Nikon RAW (NEF)".to_string(),
Self::Arw => "Sony RAW (ARW)".to_string(),
Self::Dng => "Adobe DNG RAW".to_string(),
Self::Orf => "Olympus RAW (ORF)".to_string(),
Self::Rw2 => "Panasonic RAW (RW2)".to_string(),
Self::Heic => "HEIC Image".to_string(),
}
}
pub fn from_extension(ext: &str) -> Option<Self> {
match ext.to_ascii_lowercase().as_str() {
"mp3" => Some(Self::Mp3),

View file

@ -0,0 +1,232 @@
//! Extensible media type system
//!
//! This module provides an extensible media type system that supports both
//! built-in media types and plugin-registered custom types.
use serde::{Deserialize, Serialize};
use std::path::Path;
pub mod builtin;
pub mod registry;
pub use builtin::{BuiltinMediaType, MediaCategory};
pub use registry::{MediaTypeDescriptor, MediaTypeRegistry};
/// Media type identifier - can be either built-in or custom
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MediaType {
/// Built-in media type (backward compatible)
Builtin(BuiltinMediaType),
/// Custom media type from a plugin
Custom(String),
}
impl MediaType {
/// Create a new custom media type
pub fn custom(id: impl Into<String>) -> Self {
Self::Custom(id.into())
}
/// Get the type ID as a string
pub fn id(&self) -> String {
match self {
Self::Builtin(b) => b.id(),
Self::Custom(id) => id.clone(),
}
}
/// Get the display name for this media type
/// For custom types without a registry, returns the ID as the name
pub fn name(&self) -> String {
match self {
Self::Builtin(b) => b.name(),
Self::Custom(id) => id.clone(),
}
}
/// Get the display name for this media type with registry support
pub fn name_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self {
Self::Builtin(b) => b.name(),
Self::Custom(id) => registry
.get(id)
.map(|d| d.name.clone())
.unwrap_or_else(|| id.clone()),
}
}
/// Get the category for this media type
/// For custom types without a registry, returns MediaCategory::Document as default
pub fn category(&self) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
Self::Custom(_) => MediaCategory::Document,
}
}
/// Get the category for this media type with registry support
pub fn category_with_registry(&self, registry: &MediaTypeRegistry) -> MediaCategory {
match self {
Self::Builtin(b) => b.category(),
Self::Custom(id) => registry
.get(id)
.and_then(|d| d.category)
.unwrap_or(MediaCategory::Document),
}
}
/// Get the MIME type
/// For custom types without a registry, returns "application/octet-stream"
pub fn mime_type(&self) -> String {
match self {
Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(_) => "application/octet-stream".to_string(),
}
}
/// Get the MIME type with registry support
pub fn mime_type_with_registry(&self, registry: &MediaTypeRegistry) -> String {
match self {
Self::Builtin(b) => b.mime_type().to_string(),
Self::Custom(id) => registry
.get(id)
.and_then(|d| d.mime_types.first().cloned())
.unwrap_or_else(|| "application/octet-stream".to_string()),
}
}
/// Get file extensions
/// For custom types without a registry, returns an empty vec
pub fn extensions(&self) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Custom(_) => vec![],
}
}
/// Get file extensions with registry support
pub fn extensions_with_registry(&self, registry: &MediaTypeRegistry) -> Vec<String> {
match self {
Self::Builtin(b) => b.extensions().iter().map(|s| s.to_string()).collect(),
Self::Custom(id) => registry
.get(id)
.map(|d| d.extensions.clone())
.unwrap_or_default(),
}
}
/// Check if this is a RAW image format
pub fn is_raw(&self) -> bool {
match self {
Self::Builtin(b) => b.is_raw(),
Self::Custom(_) => false,
}
}
/// Resolve a media type from file extension (built-in types only)
/// Use from_extension_with_registry for custom types
pub fn from_extension(ext: &str) -> Option<Self> {
BuiltinMediaType::from_extension(ext).map(Self::Builtin)
}
/// Resolve a media type from file extension with registry (includes custom types)
pub fn from_extension_with_registry(ext: &str, registry: &MediaTypeRegistry) -> Option<Self> {
// Try built-in types first
if let Some(builtin) = BuiltinMediaType::from_extension(ext) {
return Some(Self::Builtin(builtin));
}
// Try registered custom types
registry
.get_by_extension(ext)
.map(|desc| Self::Custom(desc.id.clone()))
}
/// Resolve a media type from file path (built-in types only)
/// Use from_path_with_registry for custom types
pub fn from_path(path: &Path) -> Option<Self> {
path.extension()
.and_then(|e| e.to_str())
.and_then(Self::from_extension)
}
/// Resolve a media type from file path with registry (includes custom types)
pub fn from_path_with_registry(path: &Path, registry: &MediaTypeRegistry) -> Option<Self> {
path.extension()
.and_then(|e| e.to_str())
.and_then(|ext| Self::from_extension_with_registry(ext, registry))
}
}
// Implement From<BuiltinMediaType> for easier conversion
impl From<BuiltinMediaType> for MediaType {
fn from(builtin: BuiltinMediaType) -> Self {
Self::Builtin(builtin)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_builtin_media_type() {
let mt = MediaType::Builtin(BuiltinMediaType::Mp3);
assert_eq!(mt.id(), "mp3");
assert_eq!(mt.mime_type(), "audio/mpeg");
assert_eq!(mt.category(), MediaCategory::Audio);
}
#[test]
fn test_custom_media_type() {
let mut registry = MediaTypeRegistry::new();
let descriptor = MediaTypeDescriptor {
id: "heif".to_string(),
name: "HEIF Image".to_string(),
category: Some(MediaCategory::Image),
extensions: vec!["heif".to_string()],
mime_types: vec!["image/heif".to_string()],
plugin_id: Some("heif-plugin".to_string()),
};
registry.register(descriptor).unwrap();
let mt = MediaType::custom("heif");
assert_eq!(mt.id(), "heif");
assert_eq!(mt.mime_type_with_registry(&registry), "image/heif");
assert_eq!(mt.category_with_registry(&registry), MediaCategory::Image);
}
#[test]
fn test_from_extension_builtin() {
let registry = MediaTypeRegistry::new();
let mt = MediaType::from_extension_with_registry("mp3", &registry);
assert!(mt.is_some());
assert_eq!(mt.unwrap(), MediaType::Builtin(BuiltinMediaType::Mp3));
}
#[test]
fn test_from_extension_custom() {
let mut registry = MediaTypeRegistry::new();
let descriptor = MediaTypeDescriptor {
id: "customformat".to_string(),
name: "Custom Format".to_string(),
category: Some(MediaCategory::Image),
extensions: vec!["xyz".to_string()],
mime_types: vec!["application/x-custom".to_string()],
plugin_id: Some("custom-plugin".to_string()),
};
registry.register(descriptor).unwrap();
let mt = MediaType::from_extension_with_registry("xyz", &registry);
assert!(mt.is_some());
assert_eq!(mt.unwrap(), MediaType::custom("customformat"));
}
}

View file

@ -0,0 +1,285 @@
//! Media type registry for managing both built-in and custom media types
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use super::MediaCategory;
/// Descriptor for a media type (built-in or custom)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MediaTypeDescriptor {
/// Unique identifier
pub id: String,
/// Display name
pub name: String,
/// Category
pub category: Option<MediaCategory>,
/// File extensions
pub extensions: Vec<String>,
/// MIME types
pub mime_types: Vec<String>,
/// Plugin that registered this type (None for built-in types)
pub plugin_id: Option<String>,
}
/// Registry for media types
#[derive(Debug, Clone)]
pub struct MediaTypeRegistry {
/// Map of media type ID to descriptor
types: HashMap<String, MediaTypeDescriptor>,
/// Map of extension to media type ID
extension_map: HashMap<String, String>,
}
impl MediaTypeRegistry {
/// Create a new empty registry
pub fn new() -> Self {
Self {
types: HashMap::new(),
extension_map: HashMap::new(),
}
}
/// Register a new media type
pub fn register(&mut self, descriptor: MediaTypeDescriptor) -> Result<()> {
// Check if ID is already registered
if self.types.contains_key(&descriptor.id) {
return Err(anyhow!("Media type already registered: {}", descriptor.id));
}
// Register extensions
for ext in &descriptor.extensions {
let ext_lower = ext.to_lowercase();
if self.extension_map.contains_key(&ext_lower) {
// Extension already registered - this is OK, we'll use the first one
// In a more sophisticated system, we might track multiple types per extension
continue;
}
self.extension_map.insert(ext_lower, descriptor.id.clone());
}
// Register the type
self.types.insert(descriptor.id.clone(), descriptor);
Ok(())
}
/// Unregister a media type
pub fn unregister(&mut self, id: &str) -> Result<()> {
let descriptor = self
.types
.remove(id)
.ok_or_else(|| anyhow!("Media type not found: {}", id))?;
// Remove extensions
for ext in &descriptor.extensions {
let ext_lower = ext.to_lowercase();
if self.extension_map.get(&ext_lower) == Some(&descriptor.id) {
self.extension_map.remove(&ext_lower);
}
}
Ok(())
}
/// Get a media type descriptor by ID
pub fn get(&self, id: &str) -> Option<&MediaTypeDescriptor> {
self.types.get(id)
}
/// Get a media type by file extension
pub fn get_by_extension(&self, ext: &str) -> Option<&MediaTypeDescriptor> {
let ext_lower = ext.to_lowercase();
self.extension_map
.get(&ext_lower)
.and_then(|id| self.types.get(id))
}
/// List all registered media types
pub fn list_all(&self) -> Vec<&MediaTypeDescriptor> {
self.types.values().collect()
}
/// List media types from a specific plugin
pub fn list_by_plugin(&self, plugin_id: &str) -> Vec<&MediaTypeDescriptor> {
self.types
.values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.collect()
}
/// List built-in media types (plugin_id is None)
pub fn list_builtin(&self) -> Vec<&MediaTypeDescriptor> {
self.types
.values()
.filter(|d| d.plugin_id.is_none())
.collect()
}
/// Get count of registered types
pub fn count(&self) -> usize {
self.types.len()
}
/// Check if a media type is registered
pub fn contains(&self, id: &str) -> bool {
self.types.contains_key(id)
}
/// Unregister all types from a specific plugin
pub fn unregister_plugin(&mut self, plugin_id: &str) -> Result<usize> {
let type_ids: Vec<String> = self
.types
.values()
.filter(|d| d.plugin_id.as_deref() == Some(plugin_id))
.map(|d| d.id.clone())
.collect();
let count = type_ids.len();
for id in type_ids {
self.unregister(&id)?;
}
Ok(count)
}
}
impl Default for MediaTypeRegistry {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_descriptor(id: &str, ext: &str) -> MediaTypeDescriptor {
MediaTypeDescriptor {
id: id.to_string(),
name: format!("{} Type", id),
category: Some(MediaCategory::Document),
extensions: vec![ext.to_string()],
mime_types: vec![format!("application/{}", id)],
plugin_id: Some("test-plugin".to_string()),
}
}
#[test]
fn test_register_and_get() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor.clone()).unwrap();
let retrieved = registry.get("test").unwrap();
assert_eq!(retrieved.id, "test");
assert_eq!(retrieved.name, "test Type");
}
#[test]
fn test_register_duplicate() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor.clone()).unwrap();
let result = registry.register(descriptor);
assert!(result.is_err());
}
#[test]
fn test_get_by_extension() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor).unwrap();
let retrieved = registry.get_by_extension("tst").unwrap();
assert_eq!(retrieved.id, "test");
// Test case insensitivity
let retrieved = registry.get_by_extension("TST").unwrap();
assert_eq!(retrieved.id, "test");
}
#[test]
fn test_unregister() {
let mut registry = MediaTypeRegistry::new();
let descriptor = create_test_descriptor("test", "tst");
registry.register(descriptor).unwrap();
assert!(registry.contains("test"));
registry.unregister("test").unwrap();
assert!(!registry.contains("test"));
// Extension should also be removed
assert!(registry.get_by_extension("tst").is_none());
}
#[test]
fn test_list_by_plugin() {
let mut registry = MediaTypeRegistry::new();
let desc1 = MediaTypeDescriptor {
id: "type1".to_string(),
name: "Type 1".to_string(),
category: Some(MediaCategory::Document),
extensions: vec!["t1".to_string()],
mime_types: vec!["application/type1".to_string()],
plugin_id: Some("plugin1".to_string()),
};
let desc2 = MediaTypeDescriptor {
id: "type2".to_string(),
name: "Type 2".to_string(),
category: Some(MediaCategory::Document),
extensions: vec!["t2".to_string()],
mime_types: vec!["application/type2".to_string()],
plugin_id: Some("plugin2".to_string()),
};
registry.register(desc1).unwrap();
registry.register(desc2).unwrap();
let plugin1_types = registry.list_by_plugin("plugin1");
assert_eq!(plugin1_types.len(), 1);
assert_eq!(plugin1_types[0].id, "type1");
let plugin2_types = registry.list_by_plugin("plugin2");
assert_eq!(plugin2_types.len(), 1);
assert_eq!(plugin2_types[0].id, "type2");
}
#[test]
fn test_unregister_plugin() {
let mut registry = MediaTypeRegistry::new();
for i in 1..=3 {
let desc = MediaTypeDescriptor {
id: format!("type{}", i),
name: format!("Type {}", i),
category: Some(MediaCategory::Document),
extensions: vec![format!("t{}", i)],
mime_types: vec![format!("application/type{}", i)],
plugin_id: Some("test-plugin".to_string()),
};
registry.register(desc).unwrap();
}
assert_eq!(registry.count(), 3);
let removed = registry.unregister_plugin("test-plugin").unwrap();
assert_eq!(removed, 3);
assert_eq!(registry.count(), 0);
}
}

View file

@ -4,7 +4,7 @@ use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -68,14 +68,14 @@ impl MetadataExtractor for AudioExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp3,
MediaType::Flac,
MediaType::Ogg,
MediaType::Wav,
MediaType::Aac,
MediaType::Opus,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp3),
MediaType::Builtin(BuiltinMediaType::Flac),
MediaType::Builtin(BuiltinMediaType::Ogg),
MediaType::Builtin(BuiltinMediaType::Wav),
MediaType::Builtin(BuiltinMediaType::Aac),
MediaType::Builtin(BuiltinMediaType::Opus),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -10,15 +10,19 @@ pub struct DocumentExtractor;
impl MetadataExtractor for DocumentExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Pdf) => extract_pdf(path),
Some(MediaType::Epub) => extract_epub(path),
Some(MediaType::Djvu) => extract_djvu(path),
Some(MediaType::Builtin(BuiltinMediaType::Pdf)) => extract_pdf(path),
Some(MediaType::Builtin(BuiltinMediaType::Epub)) => extract_epub(path),
Some(MediaType::Builtin(BuiltinMediaType::Djvu)) => extract_djvu(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Pdf, MediaType::Epub, MediaType::Djvu]
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Pdf),
MediaType::Builtin(BuiltinMediaType::Epub),
MediaType::Builtin(BuiltinMediaType::Djvu),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -163,24 +163,24 @@ impl MetadataExtractor for ImageExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Jpeg,
MediaType::Png,
MediaType::Gif,
MediaType::Webp,
MediaType::Avif,
MediaType::Tiff,
MediaType::Bmp,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Jpeg),
MediaType::Builtin(BuiltinMediaType::Png),
MediaType::Builtin(BuiltinMediaType::Gif),
MediaType::Builtin(BuiltinMediaType::Webp),
MediaType::Builtin(BuiltinMediaType::Avif),
MediaType::Builtin(BuiltinMediaType::Tiff),
MediaType::Builtin(BuiltinMediaType::Bmp),
// RAW formats (TIFF-based, kamadak-exif handles these)
MediaType::Cr2,
MediaType::Nef,
MediaType::Arw,
MediaType::Dng,
MediaType::Orf,
MediaType::Rw2,
MediaType::Builtin(BuiltinMediaType::Cr2),
MediaType::Builtin(BuiltinMediaType::Nef),
MediaType::Builtin(BuiltinMediaType::Arw),
MediaType::Builtin(BuiltinMediaType::Dng),
MediaType::Builtin(BuiltinMediaType::Orf),
MediaType::Builtin(BuiltinMediaType::Rw2),
// HEIC
MediaType::Heic,
MediaType::Builtin(BuiltinMediaType::Heic),
]
}
}

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -34,7 +34,10 @@ impl MetadataExtractor for MarkdownExtractor {
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Markdown, MediaType::PlainText]
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Markdown),
MediaType::Builtin(BuiltinMediaType::PlainText),
]
}
}

View file

@ -24,7 +24,7 @@ pub struct ExtractedMetadata {
pub trait MetadataExtractor: Send + Sync {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata>;
fn supported_types(&self) -> &[MediaType];
fn supported_types(&self) -> Vec<MediaType>;
}
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> {

View file

@ -1,7 +1,7 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
@ -10,18 +10,16 @@ pub struct VideoExtractor;
impl MetadataExtractor for VideoExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Mkv) => extract_mkv(path),
Some(MediaType::Mp4) => extract_mp4(path),
Some(MediaType::Builtin(BuiltinMediaType::Mkv)) => extract_mkv(path),
Some(MediaType::Builtin(BuiltinMediaType::Mp4)) => extract_mp4(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp4,
MediaType::Mkv,
MediaType::Avi,
MediaType::Webm,
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp4),
MediaType::Builtin(BuiltinMediaType::Mkv),
]
}
}

View file

@ -0,0 +1,31 @@
//! Playlist management: ordered collections of media items.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A user-owned playlist of media items.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Playlist {
pub id: Uuid,
pub owner_id: UserId,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
pub is_smart: bool,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// An item within a playlist at a specific position.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlaylistItem {
pub playlist_id: Uuid,
pub media_id: MediaId,
pub position: i32,
pub added_at: DateTime<Utc>,
}

View file

@ -0,0 +1,407 @@
//! Plugin loader for discovering and loading plugins from the filesystem
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginManifest;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
use walkdir::WalkDir;
/// Plugin loader handles discovery and loading of plugins from directories
pub struct PluginLoader {
/// Directories to search for plugins
plugin_dirs: Vec<PathBuf>,
}
impl PluginLoader {
/// Create a new plugin loader
pub fn new(plugin_dirs: Vec<PathBuf>) -> Self {
Self { plugin_dirs }
}
/// Discover all plugins in configured directories
pub async fn discover_plugins(&self) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new();
for dir in &self.plugin_dirs {
if !dir.exists() {
warn!("Plugin directory does not exist: {:?}", dir);
continue;
}
info!("Discovering plugins in: {:?}", dir);
match self.discover_in_directory(dir).await {
Ok(found) => {
info!("Found {} plugins in {:?}", found.len(), dir);
manifests.extend(found);
}
Err(e) => {
warn!("Error discovering plugins in {:?}: {}", dir, e);
}
}
}
Ok(manifests)
}
/// Discover plugins in a specific directory
async fn discover_in_directory(&self, dir: &Path) -> Result<Vec<PluginManifest>> {
let mut manifests = Vec::new();
// Walk the directory looking for plugin.toml files
for entry in WalkDir::new(dir)
.max_depth(3) // Don't go too deep
.follow_links(false)
{
let entry = match entry {
Ok(e) => e,
Err(e) => {
warn!("Error reading directory entry: {}", e);
continue;
}
};
let path = entry.path();
// Look for plugin.toml files
if path.file_name() == Some(std::ffi::OsStr::new("plugin.toml")) {
debug!("Found plugin manifest: {:?}", path);
match PluginManifest::from_file(path) {
Ok(manifest) => {
info!("Loaded manifest for plugin: {}", manifest.plugin.name);
manifests.push(manifest);
}
Err(e) => {
warn!("Failed to load manifest from {:?}: {}", path, e);
}
}
}
}
Ok(manifests)
}
/// Resolve the WASM binary path from a manifest
pub fn resolve_wasm_path(&self, manifest: &PluginManifest) -> Result<PathBuf> {
// The WASM path in the manifest is relative to the manifest file
// We need to search for it in the plugin directories
for dir in &self.plugin_dirs {
// Look for a directory matching the plugin name
let plugin_dir = dir.join(&manifest.plugin.name);
if !plugin_dir.exists() {
continue;
}
// Check for plugin.toml in this directory
let manifest_path = plugin_dir.join("plugin.toml");
if !manifest_path.exists() {
continue;
}
// Resolve WASM path relative to this directory
let wasm_path = plugin_dir.join(&manifest.plugin.binary.wasm);
if wasm_path.exists() {
// Verify the resolved path is within the plugin directory (prevent path traversal)
let canonical_wasm = wasm_path
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize WASM path: {}", e))?;
let canonical_plugin_dir = plugin_dir
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize plugin dir: {}", e))?;
if !canonical_wasm.starts_with(&canonical_plugin_dir) {
return Err(anyhow!(
"WASM binary path escapes plugin directory: {:?}",
wasm_path
));
}
return Ok(canonical_wasm);
}
}
Err(anyhow!(
"WASM binary not found for plugin: {}",
manifest.plugin.name
))
}
/// Download a plugin from a URL
pub async fn download_plugin(&self, url: &str) -> Result<PathBuf> {
// Only allow HTTPS downloads
if !url.starts_with("https://") {
return Err(anyhow!(
"Only HTTPS URLs are allowed for plugin downloads: {}",
url
));
}
let dest_dir = self
.plugin_dirs
.first()
.ok_or_else(|| anyhow!("No plugin directories configured"))?;
std::fs::create_dir_all(dest_dir)?;
// Download the archive with timeout and size limits
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(300))
.build()
.map_err(|e| anyhow!("Failed to build HTTP client: {}", e))?;
let response = client
.get(url)
.send()
.await
.map_err(|e| anyhow!("Failed to download plugin: {}", e))?;
if !response.status().is_success() {
return Err(anyhow!(
"Plugin download failed with status: {}",
response.status()
));
}
// Check content-length header before downloading
const MAX_PLUGIN_SIZE: u64 = 100 * 1024 * 1024; // 100 MB
if let Some(content_length) = response.content_length()
&& content_length > MAX_PLUGIN_SIZE {
return Err(anyhow!(
"Plugin archive too large: {} bytes (max {} bytes)",
content_length,
MAX_PLUGIN_SIZE
));
}
let bytes = response
.bytes()
.await
.map_err(|e| anyhow!("Failed to read plugin response: {}", e))?;
// Check actual size after download
if bytes.len() as u64 > MAX_PLUGIN_SIZE {
return Err(anyhow!(
"Plugin archive too large: {} bytes (max {} bytes)",
bytes.len(),
MAX_PLUGIN_SIZE
));
}
// Write archive to a unique temp file
let temp_archive = dest_dir.join(format!(".download-{}.tar.gz", uuid::Uuid::now_v7()));
std::fs::write(&temp_archive, &bytes)?;
// Extract using tar with -C to target directory
let canonical_dest = dest_dir
.canonicalize()
.map_err(|e| anyhow!("Failed to canonicalize dest dir: {}", e))?;
let output = std::process::Command::new("tar")
.args([
"xzf",
&temp_archive.to_string_lossy(),
"-C",
&canonical_dest.to_string_lossy(),
])
.output()
.map_err(|e| anyhow!("Failed to extract plugin archive: {}", e))?;
// Clean up the archive
let _ = std::fs::remove_file(&temp_archive);
if !output.status.success() {
return Err(anyhow!(
"Failed to extract plugin archive: {}",
String::from_utf8_lossy(&output.stderr)
));
}
// Validate that all extracted files are within dest_dir
for entry in WalkDir::new(&canonical_dest).follow_links(false) {
let entry = entry?;
let entry_canonical = entry.path().canonicalize()?;
if !entry_canonical.starts_with(&canonical_dest) {
return Err(anyhow!(
"Extracted file escapes destination directory: {:?}",
entry.path()
));
}
}
// Find the extracted plugin directory by looking for plugin.toml
for entry in WalkDir::new(dest_dir).max_depth(2).follow_links(false) {
let entry = entry?;
if entry.file_name() == "plugin.toml" {
let plugin_dir = entry
.path()
.parent()
.ok_or_else(|| anyhow!("Invalid plugin.toml location"))?;
// Validate the manifest
let manifest = PluginManifest::from_file(entry.path())?;
info!("Downloaded and extracted plugin: {}", manifest.plugin.name);
return Ok(plugin_dir.to_path_buf());
}
}
Err(anyhow!(
"No plugin.toml found after extracting archive from: {}",
url
))
}
/// Validate a plugin package
pub fn validate_plugin_package(&self, path: &Path) -> Result<()> {
// Check that the path exists
if !path.exists() {
return Err(anyhow!("Plugin path does not exist: {:?}", path));
}
// Check for plugin.toml
let manifest_path = path.join("plugin.toml");
if !manifest_path.exists() {
return Err(anyhow!("Missing plugin.toml in {:?}", path));
}
// Parse and validate manifest
let manifest = PluginManifest::from_file(&manifest_path)?;
// Check that WASM binary exists
let wasm_path = path.join(&manifest.plugin.binary.wasm);
if !wasm_path.exists() {
return Err(anyhow!(
"WASM binary not found: {}",
manifest.plugin.binary.wasm
));
}
// Verify the WASM path is within the plugin directory (prevent path traversal)
let canonical_wasm = wasm_path.canonicalize()?;
let canonical_path = path.canonicalize()?;
if !canonical_wasm.starts_with(&canonical_path) {
return Err(anyhow!(
"WASM binary path escapes plugin directory: {:?}",
wasm_path
));
}
// Validate WASM file
let wasm_bytes = std::fs::read(&wasm_path)?;
if wasm_bytes.len() < 4 || &wasm_bytes[0..4] != b"\0asm" {
return Err(anyhow!("Invalid WASM file: {:?}", wasm_path));
}
Ok(())
}
/// Get plugin directory path for a given plugin name
pub fn get_plugin_dir(&self, plugin_name: &str) -> Option<PathBuf> {
for dir in &self.plugin_dirs {
let plugin_dir = dir.join(plugin_name);
if plugin_dir.exists() {
return Some(plugin_dir);
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn test_discover_plugins_empty() {
let temp_dir = TempDir::new().unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap();
assert_eq!(manifests.len(), 0);
}
#[tokio::test]
async fn test_discover_plugins_with_manifest() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
// Create a valid manifest
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create dummy WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
let loader = PluginLoader::new(vec![temp_dir.path().to_path_buf()]);
let manifests = loader.discover_plugins().await.unwrap();
assert_eq!(manifests.len(), 1);
assert_eq!(manifests[0].plugin.name, "test-plugin");
}
#[test]
fn test_validate_plugin_package() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
// Create a valid manifest
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
let loader = PluginLoader::new(vec![]);
// Should fail without WASM file
assert!(loader.validate_plugin_package(&plugin_dir).is_err());
// Create valid WASM file (magic number only)
std::fs::write(plugin_dir.join("plugin.wasm"), b"\0asm\x01\x00\x00\x00").unwrap();
// Should succeed now
assert!(loader.validate_plugin_package(&plugin_dir).is_ok());
}
#[test]
fn test_validate_invalid_wasm() {
let temp_dir = TempDir::new().unwrap();
let plugin_dir = temp_dir.path().join("test-plugin");
std::fs::create_dir(&plugin_dir).unwrap();
let manifest_content = r#"
[plugin]
name = "test-plugin"
version = "1.0.0"
api_version = "1.0"
kind = ["media_type"]
[plugin.binary]
wasm = "plugin.wasm"
"#;
std::fs::write(plugin_dir.join("plugin.toml"), manifest_content).unwrap();
// Create invalid WASM file
std::fs::write(plugin_dir.join("plugin.wasm"), b"not wasm").unwrap();
let loader = PluginLoader::new(vec![]);
assert!(loader.validate_plugin_package(&plugin_dir).is_err());
}
}

View file

@ -0,0 +1,419 @@
//! Plugin system for Pinakes
//!
//! This module provides a comprehensive plugin architecture that allows extending
//! Pinakes with custom media types, metadata extractors, search backends, and more.
//!
//! # Architecture
//!
//! - Plugins are compiled to WASM and run in a sandboxed environment
//! - Capability-based security controls what plugins can access
//! - Hot-reload support for development
//! - Automatic plugin discovery from configured directories
use anyhow::Result;
use pinakes_plugin_api::{PluginContext, PluginMetadata};
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, error, info, warn};
pub mod loader;
pub mod registry;
pub mod runtime;
pub mod security;
pub use loader::PluginLoader;
pub use registry::{PluginRegistry, RegisteredPlugin};
pub use runtime::{WasmPlugin, WasmRuntime};
pub use security::CapabilityEnforcer;
/// Plugin manager coordinates plugin lifecycle and operations
pub struct PluginManager {
/// Plugin registry
registry: Arc<RwLock<PluginRegistry>>,
/// WASM runtime for executing plugins
runtime: Arc<WasmRuntime>,
/// Plugin loader for discovery and loading
loader: PluginLoader,
/// Capability enforcer for security
enforcer: CapabilityEnforcer,
/// Plugin data directory
data_dir: PathBuf,
/// Plugin cache directory
cache_dir: PathBuf,
/// Configuration
config: PluginManagerConfig,
}
/// Configuration for the plugin manager
#[derive(Debug, Clone)]
pub struct PluginManagerConfig {
/// Directories to search for plugins
pub plugin_dirs: Vec<PathBuf>,
/// Whether to enable hot-reload (for development)
pub enable_hot_reload: bool,
/// Whether to allow unsigned plugins
pub allow_unsigned: bool,
/// Maximum number of concurrent plugin operations
pub max_concurrent_ops: usize,
/// Plugin timeout in seconds
pub plugin_timeout_secs: u64,
}
impl Default for PluginManagerConfig {
fn default() -> Self {
Self {
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: false,
max_concurrent_ops: 4,
plugin_timeout_secs: 30,
}
}
}
impl From<crate::config::PluginsConfig> for PluginManagerConfig {
fn from(cfg: crate::config::PluginsConfig) -> Self {
Self {
plugin_dirs: cfg.plugin_dirs,
enable_hot_reload: cfg.enable_hot_reload,
allow_unsigned: cfg.allow_unsigned,
max_concurrent_ops: cfg.max_concurrent_ops,
plugin_timeout_secs: cfg.plugin_timeout_secs,
}
}
}
impl PluginManager {
/// Create a new plugin manager
pub fn new(data_dir: PathBuf, cache_dir: PathBuf, config: PluginManagerConfig) -> Result<Self> {
// Ensure directories exist
std::fs::create_dir_all(&data_dir)?;
std::fs::create_dir_all(&cache_dir)?;
let runtime = Arc::new(WasmRuntime::new()?);
let registry = Arc::new(RwLock::new(PluginRegistry::new()));
let loader = PluginLoader::new(config.plugin_dirs.clone());
let enforcer = CapabilityEnforcer::new();
Ok(Self {
registry,
runtime,
loader,
enforcer,
data_dir,
cache_dir,
config,
})
}
/// Discover and load all plugins from configured directories
pub async fn discover_and_load_all(&self) -> Result<Vec<String>> {
info!("Discovering plugins from {:?}", self.config.plugin_dirs);
let manifests = self.loader.discover_plugins().await?;
let mut loaded_plugins = Vec::new();
for manifest in manifests {
match self.load_plugin_from_manifest(&manifest).await {
Ok(plugin_id) => {
info!("Loaded plugin: {}", plugin_id);
loaded_plugins.push(plugin_id);
}
Err(e) => {
warn!("Failed to load plugin {}: {}", manifest.plugin.name, e);
}
}
}
Ok(loaded_plugins)
}
/// Load a plugin from a manifest file
async fn load_plugin_from_manifest(
&self,
manifest: &pinakes_plugin_api::PluginManifest,
) -> Result<String> {
let plugin_id = manifest.plugin_id();
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
// Check if already loaded
{
let registry = self.registry.read().await;
if registry.is_loaded(&plugin_id) {
return Ok(plugin_id);
}
}
// Validate capabilities
let capabilities = manifest.to_capabilities();
self.enforcer.validate_capabilities(&capabilities)?;
// Create plugin context
let plugin_data_dir = self.data_dir.join(&plugin_id);
let plugin_cache_dir = self.cache_dir.join(&plugin_id);
tokio::fs::create_dir_all(&plugin_data_dir).await?;
tokio::fs::create_dir_all(&plugin_cache_dir).await?;
let context = PluginContext {
data_dir: plugin_data_dir,
cache_dir: plugin_cache_dir,
config: manifest
.config
.iter()
.map(|(k, v)| {
(
k.clone(),
serde_json::to_value(v).unwrap_or_else(|e| {
tracing::warn!("failed to serialize config value for key {}: {}", k, e);
serde_json::Value::Null
}),
)
})
.collect(),
capabilities: capabilities.clone(),
};
// Load WASM binary
let wasm_path = self.loader.resolve_wasm_path(manifest)?;
let wasm_plugin = self.runtime.load_plugin(&wasm_path, context).await?;
// Initialize plugin
let init_succeeded = match wasm_plugin.call_function("initialize", &[]).await {
Ok(_) => true,
Err(e) => {
tracing::warn!(plugin_id = %plugin_id, "plugin initialization failed: {}", e);
false
}
};
// Register plugin
let metadata = PluginMetadata {
id: plugin_id.clone(),
name: manifest.plugin.name.clone(),
version: manifest.plugin.version.clone(),
author: manifest.plugin.author.clone().unwrap_or_default(),
description: manifest.plugin.description.clone().unwrap_or_default(),
api_version: manifest.plugin.api_version.clone(),
capabilities_required: capabilities,
};
// Derive manifest_path from the loader's plugin directories
let manifest_path = self
.loader
.get_plugin_dir(&manifest.plugin.name)
.map(|dir| dir.join("plugin.toml"));
let registered = RegisteredPlugin {
id: plugin_id.clone(),
metadata,
wasm_plugin,
manifest: manifest.clone(),
manifest_path,
enabled: init_succeeded,
};
let mut registry = self.registry.write().await;
registry.register(registered)?;
Ok(plugin_id)
}
/// Install a plugin from a file or URL
pub async fn install_plugin(&self, source: &str) -> Result<String> {
info!("Installing plugin from: {}", source);
// Download/copy plugin to plugins directory
let plugin_path = if source.starts_with("http://") || source.starts_with("https://") {
// Download from URL
self.loader.download_plugin(source).await?
} else {
// Copy from local file
PathBuf::from(source)
};
// Load the manifest
let manifest_path = plugin_path.join("plugin.toml");
let manifest = pinakes_plugin_api::PluginManifest::from_file(&manifest_path)?;
// Load the plugin
self.load_plugin_from_manifest(&manifest).await
}
/// Uninstall a plugin
pub async fn uninstall_plugin(&self, plugin_id: &str) -> Result<()> {
// Validate plugin_id to prevent path traversal
if plugin_id.contains('/') || plugin_id.contains('\\') || plugin_id.contains("..") {
return Err(anyhow::anyhow!("Invalid plugin ID: {}", plugin_id));
}
info!("Uninstalling plugin: {}", plugin_id);
// Shutdown plugin first
self.shutdown_plugin(plugin_id).await?;
// Remove from registry
let mut registry = self.registry.write().await;
registry.unregister(plugin_id)?;
// Remove plugin data and cache
let plugin_data_dir = self.data_dir.join(plugin_id);
let plugin_cache_dir = self.cache_dir.join(plugin_id);
if plugin_data_dir.exists() {
std::fs::remove_dir_all(&plugin_data_dir)?;
}
if plugin_cache_dir.exists() {
std::fs::remove_dir_all(&plugin_cache_dir)?;
}
Ok(())
}
/// Enable a plugin
pub async fn enable_plugin(&self, plugin_id: &str) -> Result<()> {
let mut registry = self.registry.write().await;
registry.enable(plugin_id)
}
/// Disable a plugin
pub async fn disable_plugin(&self, plugin_id: &str) -> Result<()> {
let mut registry = self.registry.write().await;
registry.disable(plugin_id)
}
/// Shutdown a specific plugin
pub async fn shutdown_plugin(&self, plugin_id: &str) -> Result<()> {
debug!("Shutting down plugin: {}", plugin_id);
let registry = self.registry.read().await;
if let Some(plugin) = registry.get(plugin_id) {
plugin.wasm_plugin.call_function("shutdown", &[]).await.ok();
Ok(())
} else {
Err(anyhow::anyhow!("Plugin not found: {}", plugin_id))
}
}
/// Shutdown all plugins
pub async fn shutdown_all(&self) -> Result<()> {
info!("Shutting down all plugins");
let registry = self.registry.read().await;
let plugin_ids: Vec<String> = registry.list_all().iter().map(|p| p.id.clone()).collect();
for plugin_id in plugin_ids {
if let Err(e) = self.shutdown_plugin(&plugin_id).await {
error!("Failed to shutdown plugin {}: {}", plugin_id, e);
}
}
Ok(())
}
/// Get list of all registered plugins
pub async fn list_plugins(&self) -> Vec<PluginMetadata> {
let registry = self.registry.read().await;
registry
.list_all()
.iter()
.map(|p| p.metadata.clone())
.collect()
}
/// Get plugin metadata by ID
pub async fn get_plugin(&self, plugin_id: &str) -> Option<PluginMetadata> {
let registry = self.registry.read().await;
registry.get(plugin_id).map(|p| p.metadata.clone())
}
/// Check if a plugin is loaded and enabled
pub async fn is_plugin_enabled(&self, plugin_id: &str) -> bool {
let registry = self.registry.read().await;
registry.is_enabled(plugin_id).unwrap_or(false)
}
/// Reload a plugin (for hot-reload during development)
pub async fn reload_plugin(&self, plugin_id: &str) -> Result<()> {
if !self.config.enable_hot_reload {
return Err(anyhow::anyhow!("Hot-reload is disabled"));
}
info!("Reloading plugin: {}", plugin_id);
// Re-read the manifest from disk if possible, falling back to cached version
let manifest = {
let registry = self.registry.read().await;
let plugin = registry
.get(plugin_id)
.ok_or_else(|| anyhow::anyhow!("Plugin not found"))?;
if let Some(ref manifest_path) = plugin.manifest_path {
pinakes_plugin_api::PluginManifest::from_file(manifest_path).unwrap_or_else(|e| {
warn!("Failed to re-read manifest from disk, using cached: {}", e);
plugin.manifest.clone()
})
} else {
plugin.manifest.clone()
}
};
// Shutdown and unload current version
self.shutdown_plugin(plugin_id).await?;
{
let mut registry = self.registry.write().await;
registry.unregister(plugin_id)?;
}
// Reload from manifest
self.load_plugin_from_manifest(&manifest).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
#[tokio::test]
async fn test_plugin_manager_creation() {
let temp_dir = TempDir::new().unwrap();
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir.clone(), cache_dir.clone(), config);
assert!(manager.is_ok());
assert!(data_dir.exists());
assert!(cache_dir.exists());
}
#[tokio::test]
async fn test_list_plugins_empty() {
let temp_dir = TempDir::new().unwrap();
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
let config = PluginManagerConfig::default();
let manager = PluginManager::new(data_dir, cache_dir, config).unwrap();
let plugins = manager.list_plugins().await;
assert_eq!(plugins.len(), 0);
}
}

View file

@ -0,0 +1,280 @@
//! Plugin registry for managing loaded plugins
use std::path::PathBuf;
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use std::collections::HashMap;
use super::runtime::WasmPlugin;
/// A registered plugin with its metadata and runtime state
#[derive(Clone)]
pub struct RegisteredPlugin {
pub id: String,
pub metadata: PluginMetadata,
pub wasm_plugin: WasmPlugin,
pub manifest: PluginManifest,
pub manifest_path: Option<PathBuf>,
pub enabled: bool,
}
/// Plugin registry maintains the state of all loaded plugins
pub struct PluginRegistry {
/// Map of plugin ID to registered plugin
plugins: HashMap<String, RegisteredPlugin>,
}
impl PluginRegistry {
/// Create a new empty registry
pub fn new() -> Self {
Self {
plugins: HashMap::new(),
}
}
/// Register a new plugin
pub fn register(&mut self, plugin: RegisteredPlugin) -> Result<()> {
if self.plugins.contains_key(&plugin.id) {
return Err(anyhow!("Plugin already registered: {}", plugin.id));
}
self.plugins.insert(plugin.id.clone(), plugin);
Ok(())
}
/// Unregister a plugin by ID
pub fn unregister(&mut self, plugin_id: &str) -> Result<()> {
self.plugins
.remove(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
Ok(())
}
/// Get a plugin by ID
pub fn get(&self, plugin_id: &str) -> Option<&RegisteredPlugin> {
self.plugins.get(plugin_id)
}
/// Get a mutable reference to a plugin by ID
pub fn get_mut(&mut self, plugin_id: &str) -> Option<&mut RegisteredPlugin> {
self.plugins.get_mut(plugin_id)
}
/// Check if a plugin is loaded
pub fn is_loaded(&self, plugin_id: &str) -> bool {
self.plugins.contains_key(plugin_id)
}
/// Check if a plugin is enabled. Returns `None` if the plugin is not found.
pub fn is_enabled(&self, plugin_id: &str) -> Option<bool> {
self.plugins.get(plugin_id).map(|p| p.enabled)
}
/// Enable a plugin
pub fn enable(&mut self, plugin_id: &str) -> Result<()> {
let plugin = self
.plugins
.get_mut(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
plugin.enabled = true;
Ok(())
}
/// Disable a plugin
pub fn disable(&mut self, plugin_id: &str) -> Result<()> {
let plugin = self
.plugins
.get_mut(plugin_id)
.ok_or_else(|| anyhow!("Plugin not found: {}", plugin_id))?;
plugin.enabled = false;
Ok(())
}
/// List all registered plugins
pub fn list_all(&self) -> Vec<&RegisteredPlugin> {
self.plugins.values().collect()
}
/// List all enabled plugins
pub fn list_enabled(&self) -> Vec<&RegisteredPlugin> {
self.plugins.values().filter(|p| p.enabled).collect()
}
/// Get plugins by kind (e.g., "media_type", "metadata_extractor")
pub fn get_by_kind(&self, kind: &str) -> Vec<&RegisteredPlugin> {
self.plugins
.values()
.filter(|p| p.manifest.plugin.kind.contains(&kind.to_string()))
.collect()
}
/// Get count of registered plugins
pub fn count(&self) -> usize {
self.plugins.len()
}
/// Get count of enabled plugins
pub fn count_enabled(&self) -> usize {
self.plugins.values().filter(|p| p.enabled).count()
}
}
impl Default for PluginRegistry {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::Capabilities;
use std::collections::HashMap;
fn create_test_plugin(id: &str, kind: Vec<String>) -> RegisteredPlugin {
let manifest = PluginManifest {
plugin: pinakes_plugin_api::manifest::PluginInfo {
name: id.to_string(),
version: "1.0.0".to_string(),
api_version: "1.0".to_string(),
author: Some("Test".to_string()),
description: Some("Test plugin".to_string()),
homepage: None,
license: None,
kind,
binary: pinakes_plugin_api::manifest::PluginBinary {
wasm: "test.wasm".to_string(),
entrypoint: None,
},
dependencies: vec![],
},
capabilities: Default::default(),
config: HashMap::new(),
};
RegisteredPlugin {
id: id.to_string(),
metadata: PluginMetadata {
id: id.to_string(),
name: id.to_string(),
version: "1.0.0".to_string(),
author: "Test".to_string(),
description: "Test plugin".to_string(),
api_version: "1.0".to_string(),
capabilities_required: Capabilities::default(),
},
wasm_plugin: WasmPlugin::default(),
manifest,
manifest_path: None,
enabled: true,
}
}
#[test]
fn test_registry_register_and_get() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
assert!(registry.is_loaded("test-plugin"));
assert!(registry.get("test-plugin").is_some());
}
#[test]
fn test_registry_duplicate_register() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin.clone()).unwrap();
let result = registry.register(plugin);
assert!(result.is_err());
}
#[test]
fn test_registry_unregister() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
registry.unregister("test-plugin").unwrap();
assert!(!registry.is_loaded("test-plugin"));
}
#[test]
fn test_registry_enable_disable() {
let mut registry = PluginRegistry::new();
let plugin = create_test_plugin("test-plugin", vec!["media_type".to_string()]);
registry.register(plugin).unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true));
registry.disable("test-plugin").unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(false));
registry.enable("test-plugin").unwrap();
assert_eq!(registry.is_enabled("test-plugin"), Some(true));
assert_eq!(registry.is_enabled("nonexistent"), None);
}
#[test]
fn test_registry_get_by_kind() {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["metadata_extractor".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin3",
vec!["media_type".to_string()],
))
.unwrap();
let media_type_plugins = registry.get_by_kind("media_type");
assert_eq!(media_type_plugins.len(), 2);
let extractor_plugins = registry.get_by_kind("metadata_extractor");
assert_eq!(extractor_plugins.len(), 1);
}
#[test]
fn test_registry_counts() {
let mut registry = PluginRegistry::new();
registry
.register(create_test_plugin(
"plugin1",
vec!["media_type".to_string()],
))
.unwrap();
registry
.register(create_test_plugin(
"plugin2",
vec!["media_type".to_string()],
))
.unwrap();
assert_eq!(registry.count(), 2);
assert_eq!(registry.count_enabled(), 2);
registry.disable("plugin1").unwrap();
assert_eq!(registry.count(), 2);
assert_eq!(registry.count_enabled(), 1);
}
}

View file

@ -0,0 +1,582 @@
//! WASM runtime for executing plugins
use anyhow::{Result, anyhow};
use pinakes_plugin_api::PluginContext;
use std::path::Path;
use std::sync::Arc;
use wasmtime::*;
/// WASM runtime wrapper for executing plugins
pub struct WasmRuntime {
engine: Engine,
}
impl WasmRuntime {
/// Create a new WASM runtime
pub fn new() -> Result<Self> {
let mut config = Config::new();
// Enable WASM features
config.wasm_component_model(true);
config.async_support(true);
// Set resource limits
config.max_wasm_stack(1024 * 1024); // 1MB stack
config.consume_fuel(true); // Enable fuel metering for CPU limits
let engine = Engine::new(&config)?;
Ok(Self { engine })
}
/// Load a plugin from a WASM file
pub async fn load_plugin(
&self,
wasm_path: &Path,
context: PluginContext,
) -> Result<WasmPlugin> {
if !wasm_path.exists() {
return Err(anyhow!("WASM file not found: {:?}", wasm_path));
}
// Read WASM bytes
let wasm_bytes = std::fs::read(wasm_path)?;
// Compile module
let module = Module::new(&self.engine, &wasm_bytes)?;
Ok(WasmPlugin {
module: Arc::new(module),
context,
})
}
}
/// Store data passed to each WASM invocation
pub struct PluginStoreData {
pub context: PluginContext,
pub exchange_buffer: Vec<u8>,
}
/// A loaded WASM plugin instance
#[derive(Clone)]
pub struct WasmPlugin {
module: Arc<Module>,
context: PluginContext,
}
impl WasmPlugin {
/// Get the plugin context
pub fn context(&self) -> &PluginContext {
&self.context
}
/// Execute a plugin function
///
/// Creates a fresh store and instance per invocation with host functions
/// linked, calls the requested exported function, and returns the result.
pub async fn call_function(&self, function_name: &str, params: &[u8]) -> Result<Vec<u8>> {
let engine = self.module.engine();
// Create store with per-invocation data
let store_data = PluginStoreData {
context: self.context.clone(),
exchange_buffer: Vec::new(),
};
let mut store = Store::new(engine, store_data);
// Set fuel limit based on capabilities
if let Some(max_cpu_time_ms) = self.context.capabilities.max_cpu_time_ms {
let fuel = max_cpu_time_ms * 100_000;
store.set_fuel(fuel)?;
} else {
store.set_fuel(1_000_000_000)?;
}
// Set up linker with host functions
let mut linker = Linker::new(engine);
HostFunctions::setup_linker(&mut linker)?;
// Instantiate the module
let instance = linker.instantiate_async(&mut store, &self.module).await?;
// Get the memory export (if available)
let memory = instance.get_memory(&mut store, "memory");
// If there are params and memory is available, write them
let mut alloc_offset: i32 = 0;
if !params.is_empty()
&& let Some(mem) = &memory {
// Call the plugin's alloc function if available, otherwise write at offset 0
let offset = if let Ok(alloc) =
instance.get_typed_func::<i32, i32>(&mut store, "alloc")
{
let result = alloc.call_async(&mut store, params.len() as i32).await?;
if result < 0 {
return Err(anyhow!("plugin alloc returned negative offset: {}", result));
}
result as usize
} else {
0
};
alloc_offset = offset as i32;
let mem_data = mem.data_mut(&mut store);
if offset + params.len() <= mem_data.len() {
mem_data[offset..offset + params.len()].copy_from_slice(params);
}
}
// Look up the exported function and call it
let func = instance
.get_func(&mut store, function_name)
.ok_or_else(|| anyhow!("exported function '{}' not found", function_name))?;
let func_ty = func.ty(&store);
let param_count = func_ty.params().len();
let result_count = func_ty.results().len();
let mut results = vec![Val::I32(0); result_count];
// Call with appropriate params based on function signature
if param_count == 2 && !params.is_empty() {
// Convention: (ptr, len)
func.call_async(
&mut store,
&[Val::I32(alloc_offset), Val::I32(params.len() as i32)],
&mut results,
)
.await?;
} else if param_count == 0 {
func.call_async(&mut store, &[], &mut results).await?;
} else {
// Generic: fill with zeroes
let params_vals: Vec<Val> = (0..param_count).map(|_| Val::I32(0)).collect();
func.call_async(&mut store, &params_vals, &mut results)
.await?;
}
// Read result from exchange buffer (host functions may have written data)
let exchange = std::mem::take(&mut store.data_mut().exchange_buffer);
if !exchange.is_empty() {
return Ok(exchange);
}
// Otherwise serialize the return values
if let Some(Val::I32(ret)) = results.first() {
Ok(ret.to_le_bytes().to_vec())
} else {
Ok(Vec::new())
}
}
}
#[cfg(test)]
impl Default for WasmPlugin {
fn default() -> Self {
let engine = Engine::default();
let module = Module::new(&engine, br#"(module)"#).unwrap();
Self {
module: Arc::new(module),
context: PluginContext {
data_dir: std::env::temp_dir(),
cache_dir: std::env::temp_dir(),
config: std::collections::HashMap::new(),
capabilities: Default::default(),
},
}
}
}
/// Host functions that plugins can call
pub struct HostFunctions;
impl HostFunctions {
/// Set up host functions in a linker
pub fn setup_linker(linker: &mut Linker<PluginStoreData>) -> Result<()> {
// host_log: log a message from the plugin
linker.func_wrap(
"env",
"host_log",
|mut caller: Caller<'_, PluginStoreData>, level: i32, ptr: i32, len: i32| {
if ptr < 0 || len < 0 {
return;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
if let Some(mem) = memory {
let data = mem.data(&caller);
let start = ptr as usize;
let end = start + len as usize;
if end <= data.len()
&& let Ok(msg) = std::str::from_utf8(&data[start..end]) {
match level {
0 => tracing::error!(plugin = true, "{}", msg),
1 => tracing::warn!(plugin = true, "{}", msg),
2 => tracing::info!(plugin = true, "{}", msg),
_ => tracing::debug!(plugin = true, "{}", msg),
}
}
}
},
)?;
// host_read_file: read a file into the exchange buffer
linker.func_wrap(
"env",
"host_read_file",
|mut caller: Caller<'_, PluginStoreData>, path_ptr: i32, path_len: i32| -> i32 {
if path_ptr < 0 || path_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = path_ptr as usize;
let end = start + path_len as usize;
if end > data.len() {
return -1;
}
let path_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
// Canonicalize path before checking permissions to prevent traversal
let path = match std::path::Path::new(&path_str).canonicalize() {
Ok(p) => p,
Err(_) => return -1,
};
// Check read permission against canonicalized path
let can_read = caller
.data()
.context
.capabilities
.filesystem
.read
.iter()
.any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| path.starts_with(a))
});
if !can_read {
tracing::warn!(path = %path_str, "plugin read access denied");
return -2;
}
match std::fs::read(&path) {
Ok(contents) => {
let len = contents.len() as i32;
caller.data_mut().exchange_buffer = contents;
len
}
Err(_) => -1,
}
},
)?;
// host_write_file: write data to a file
linker.func_wrap(
"env",
"host_write_file",
|mut caller: Caller<'_, PluginStoreData>,
path_ptr: i32,
path_len: i32,
data_ptr: i32,
data_len: i32|
-> i32 {
if path_ptr < 0 || path_len < 0 || data_ptr < 0 || data_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let mem_data = mem.data(&caller);
let path_start = path_ptr as usize;
let path_end = path_start + path_len as usize;
let data_start = data_ptr as usize;
let data_end = data_start + data_len as usize;
if path_end > mem_data.len() || data_end > mem_data.len() {
return -1;
}
let path_str = match std::str::from_utf8(&mem_data[path_start..path_end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
let file_data = mem_data[data_start..data_end].to_vec();
// Canonicalize path for write (file may not exist yet)
let path = std::path::Path::new(&path_str);
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
let Some(canonical) = canonical else {
return -1;
};
// Check write permission against canonicalized path
let can_write = caller
.data()
.context
.capabilities
.filesystem
.write
.iter()
.any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
});
if !can_write {
tracing::warn!(path = %path_str, "plugin write access denied");
return -2;
}
match std::fs::write(&canonical, &file_data) {
Ok(()) => 0,
Err(_) => -1,
}
},
)?;
// host_http_request: make an HTTP request (blocking)
linker.func_wrap(
"env",
"host_http_request",
|mut caller: Caller<'_, PluginStoreData>, url_ptr: i32, url_len: i32| -> i32 {
if url_ptr < 0 || url_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = url_ptr as usize;
let end = start + url_len as usize;
if end > data.len() {
return -1;
}
let url_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
// Check network permission
if !caller.data().context.capabilities.network.enabled {
tracing::warn!(url = %url_str, "plugin network access denied");
return -2;
}
// Use block_in_place to avoid blocking the async runtime's thread pool.
// Falls back to a blocking client with timeout if block_in_place is unavailable.
let result = std::panic::catch_unwind(|| {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| e.to_string())?;
let resp = client
.get(&url_str)
.send()
.await
.map_err(|e| e.to_string())?;
let bytes = resp.bytes().await.map_err(|e| e.to_string())?;
Ok::<_, String>(bytes)
})
})
});
match result {
Ok(Ok(bytes)) => {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
Ok(Err(_)) => -1,
Err(_) => {
// block_in_place panicked (e.g. current-thread runtime);
// fall back to blocking client with timeout
let client = match reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
{
Ok(c) => c,
Err(_) => return -1,
};
match client.get(&url_str).send() {
Ok(resp) => match resp.bytes() {
Ok(bytes) => {
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes.to_vec();
len
}
Err(_) => -1,
},
Err(_) => -1,
}
}
}
},
)?;
// host_get_config: read a config key into the exchange buffer
linker.func_wrap(
"env",
"host_get_config",
|mut caller: Caller<'_, PluginStoreData>, key_ptr: i32, key_len: i32| -> i32 {
if key_ptr < 0 || key_len < 0 {
return -1;
}
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let data = mem.data(&caller);
let start = key_ptr as usize;
let end = start + key_len as usize;
if end > data.len() {
return -1;
}
let key_str = match std::str::from_utf8(&data[start..end]) {
Ok(s) => s.to_string(),
Err(_) => return -1,
};
match caller.data().context.config.get(&key_str) {
Some(value) => {
let json = value.to_string();
let bytes = json.into_bytes();
let len = bytes.len() as i32;
caller.data_mut().exchange_buffer = bytes;
len
}
None => -1,
}
},
)?;
// host_get_buffer: copy the exchange buffer to WASM memory
linker.func_wrap(
"env",
"host_get_buffer",
|mut caller: Caller<'_, PluginStoreData>, dest_ptr: i32, dest_len: i32| -> i32 {
if dest_ptr < 0 || dest_len < 0 {
return -1;
}
let buf = caller.data().exchange_buffer.clone();
let copy_len = buf.len().min(dest_len as usize);
let memory = caller.get_export("memory").and_then(|e| e.into_memory());
let Some(mem) = memory else { return -1 };
let mem_data = mem.data_mut(&mut caller);
let start = dest_ptr as usize;
if start + copy_len > mem_data.len() {
return -1;
}
mem_data[start..start + copy_len].copy_from_slice(&buf[..copy_len]);
copy_len as i32
},
)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use pinakes_plugin_api::PluginContext;
use std::collections::HashMap;
#[test]
fn test_wasm_runtime_creation() {
let runtime = WasmRuntime::new();
assert!(runtime.is_ok());
}
#[test]
fn test_host_functions_file_access() {
let mut capabilities = pinakes_plugin_api::Capabilities::default();
capabilities.filesystem.read.push("/tmp".into());
capabilities.filesystem.write.push("/tmp/output".into());
let context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
capabilities,
};
// Verify capability checks work via context fields
let can_read = context
.capabilities
.filesystem
.read
.iter()
.any(|p| Path::new("/tmp/test.txt").starts_with(p));
assert!(can_read);
let cant_read = context
.capabilities
.filesystem
.read
.iter()
.any(|p| Path::new("/etc/passwd").starts_with(p));
assert!(!cant_read);
let can_write = context
.capabilities
.filesystem
.write
.iter()
.any(|p| Path::new("/tmp/output/file.txt").starts_with(p));
assert!(can_write);
let cant_write = context
.capabilities
.filesystem
.write
.iter()
.any(|p| Path::new("/tmp/file.txt").starts_with(p));
assert!(!cant_write);
}
#[test]
fn test_host_functions_network_access() {
let mut context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
capabilities: Default::default(),
};
assert!(!context.capabilities.network.enabled);
context.capabilities.network.enabled = true;
assert!(context.capabilities.network.enabled);
}
#[test]
fn test_linker_setup() {
let engine = Engine::default();
let mut linker = Linker::<PluginStoreData>::new(&engine);
let result = HostFunctions::setup_linker(&mut linker);
assert!(result.is_ok());
}
}

View file

@ -0,0 +1,341 @@
//! Capability-based security for plugins
use anyhow::{Result, anyhow};
use pinakes_plugin_api::Capabilities;
use std::path::{Path, PathBuf};
/// Capability enforcer validates and enforces plugin capabilities
pub struct CapabilityEnforcer {
/// Maximum allowed memory per plugin (bytes)
max_memory_limit: usize,
/// Maximum allowed CPU time per plugin (milliseconds)
max_cpu_time_limit: u64,
/// Allowed filesystem read paths (system-wide)
allowed_read_paths: Vec<PathBuf>,
/// Allowed filesystem write paths (system-wide)
allowed_write_paths: Vec<PathBuf>,
/// Whether to allow network access by default
allow_network_default: bool,
}
impl CapabilityEnforcer {
/// Create a new capability enforcer with default limits
pub fn new() -> Self {
Self {
max_memory_limit: 512 * 1024 * 1024, // 512 MB
max_cpu_time_limit: 60 * 1000, // 60 seconds
allowed_read_paths: vec![],
allowed_write_paths: vec![],
allow_network_default: false,
}
}
/// Set maximum memory limit
pub fn with_max_memory(mut self, bytes: usize) -> Self {
self.max_memory_limit = bytes;
self
}
/// Set maximum CPU time limit
pub fn with_max_cpu_time(mut self, milliseconds: u64) -> Self {
self.max_cpu_time_limit = milliseconds;
self
}
/// Add allowed read path
pub fn allow_read_path(mut self, path: PathBuf) -> Self {
self.allowed_read_paths.push(path);
self
}
/// Add allowed write path
pub fn allow_write_path(mut self, path: PathBuf) -> Self {
self.allowed_write_paths.push(path);
self
}
/// Set default network access policy
pub fn with_network_default(mut self, allow: bool) -> Self {
self.allow_network_default = allow;
self
}
/// Validate capabilities requested by a plugin
pub fn validate_capabilities(&self, capabilities: &Capabilities) -> Result<()> {
// Validate memory limit
if let Some(memory) = capabilities.max_memory_bytes
&& memory > self.max_memory_limit
{
return Err(anyhow!(
"Requested memory ({} bytes) exceeds limit ({} bytes)",
memory,
self.max_memory_limit
));
}
// Validate CPU time limit
if let Some(cpu_time) = capabilities.max_cpu_time_ms
&& cpu_time > self.max_cpu_time_limit
{
return Err(anyhow!(
"Requested CPU time ({} ms) exceeds limit ({} ms)",
cpu_time,
self.max_cpu_time_limit
));
}
// Validate filesystem access
self.validate_filesystem_access(capabilities)?;
// Validate network access
if capabilities.network.enabled && !self.allow_network_default {
return Err(anyhow!(
"Plugin requests network access, but network access is disabled by policy"
));
}
Ok(())
}
/// Validate filesystem access capabilities
fn validate_filesystem_access(&self, capabilities: &Capabilities) -> Result<()> {
// Check read paths
for path in &capabilities.filesystem.read {
if !self.is_read_allowed(path) {
return Err(anyhow!(
"Plugin requests read access to {:?} which is not in allowed paths",
path
));
}
}
// Check write paths
for path in &capabilities.filesystem.write {
if !self.is_write_allowed(path) {
return Err(anyhow!(
"Plugin requests write access to {:?} which is not in allowed paths",
path
));
}
}
Ok(())
}
/// Check if a path is allowed for reading
pub fn is_read_allowed(&self, path: &Path) -> bool {
if self.allowed_read_paths.is_empty() {
return false; // deny-all when unconfigured
}
let Ok(canonical) = path.canonicalize() else {
return false;
};
self.allowed_read_paths.iter().any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
})
}
/// Check if a path is allowed for writing
pub fn is_write_allowed(&self, path: &Path) -> bool {
if self.allowed_write_paths.is_empty() {
return false; // deny-all when unconfigured
}
let canonical = if path.exists() {
path.canonicalize().ok()
} else {
path.parent()
.and_then(|p| p.canonicalize().ok())
.map(|p| p.join(path.file_name().unwrap_or_default()))
};
let Some(canonical) = canonical else {
return false;
};
self.allowed_write_paths.iter().any(|allowed| {
allowed
.canonicalize()
.is_ok_and(|a| canonical.starts_with(a))
})
}
/// Check if network access is allowed for a plugin
pub fn is_network_allowed(&self, capabilities: &Capabilities) -> bool {
capabilities.network.enabled && self.allow_network_default
}
/// Check if a specific domain is allowed
pub fn is_domain_allowed(&self, capabilities: &Capabilities, domain: &str) -> bool {
if !capabilities.network.enabled {
return false;
}
// If no domain restrictions, allow all domains
if capabilities.network.allowed_domains.is_none() {
return self.allow_network_default;
}
// Check against allowed domains list
capabilities
.network
.allowed_domains
.as_ref()
.map(|domains| domains.iter().any(|d| d.eq_ignore_ascii_case(domain)))
.unwrap_or(false)
}
/// Get effective memory limit for a plugin
pub fn get_memory_limit(&self, capabilities: &Capabilities) -> usize {
capabilities
.max_memory_bytes
.unwrap_or(self.max_memory_limit)
.min(self.max_memory_limit)
}
/// Get effective CPU time limit for a plugin
pub fn get_cpu_time_limit(&self, capabilities: &Capabilities) -> u64 {
capabilities
.max_cpu_time_ms
.unwrap_or(self.max_cpu_time_limit)
.min(self.max_cpu_time_limit)
}
}
impl Default for CapabilityEnforcer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[allow(unused_imports)]
use pinakes_plugin_api::{FilesystemCapability, NetworkCapability};
#[test]
fn test_validate_memory_limit() {
let enforcer = CapabilityEnforcer::new().with_max_memory(100 * 1024 * 1024); // 100 MB
let mut caps = Capabilities::default();
caps.max_memory_bytes = Some(50 * 1024 * 1024); // 50 MB - OK
assert!(enforcer.validate_capabilities(&caps).is_ok());
caps.max_memory_bytes = Some(200 * 1024 * 1024); // 200 MB - exceeds limit
assert!(enforcer.validate_capabilities(&caps).is_err());
}
#[test]
fn test_validate_cpu_time_limit() {
let enforcer = CapabilityEnforcer::new().with_max_cpu_time(30_000); // 30 seconds
let mut caps = Capabilities::default();
caps.max_cpu_time_ms = Some(10_000); // 10 seconds - OK
assert!(enforcer.validate_capabilities(&caps).is_ok());
caps.max_cpu_time_ms = Some(60_000); // 60 seconds - exceeds limit
assert!(enforcer.validate_capabilities(&caps).is_err());
}
#[test]
fn test_filesystem_read_allowed() {
// Use real temp directories so canonicalize works
let tmp = tempfile::tempdir().unwrap();
let allowed_dir = tmp.path().join("allowed");
std::fs::create_dir_all(&allowed_dir).unwrap();
let test_file = allowed_dir.join("test.txt");
std::fs::write(&test_file, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_read_path(allowed_dir.clone());
assert!(enforcer.is_read_allowed(&test_file));
assert!(!enforcer.is_read_allowed(Path::new("/etc/passwd")));
}
#[test]
fn test_filesystem_read_denied_when_empty() {
let enforcer = CapabilityEnforcer::new();
assert!(!enforcer.is_read_allowed(Path::new("/tmp/test.txt")));
}
#[test]
fn test_filesystem_write_allowed() {
let tmp = tempfile::tempdir().unwrap();
let output_dir = tmp.path().join("output");
std::fs::create_dir_all(&output_dir).unwrap();
// Existing file in allowed dir
let existing = output_dir.join("file.txt");
std::fs::write(&existing, "test").unwrap();
let enforcer = CapabilityEnforcer::new().allow_write_path(output_dir.clone());
assert!(enforcer.is_write_allowed(&existing));
// New file in allowed dir (parent exists)
assert!(enforcer.is_write_allowed(&output_dir.join("new_file.txt")));
assert!(!enforcer.is_write_allowed(Path::new("/etc/config")));
}
#[test]
fn test_filesystem_write_denied_when_empty() {
let enforcer = CapabilityEnforcer::new();
assert!(!enforcer.is_write_allowed(Path::new("/tmp/file.txt")));
}
#[test]
fn test_network_allowed() {
let enforcer = CapabilityEnforcer::new().with_network_default(true);
let mut caps = Capabilities::default();
caps.network.enabled = true;
assert!(enforcer.is_network_allowed(&caps));
caps.network.enabled = false;
assert!(!enforcer.is_network_allowed(&caps));
}
#[test]
fn test_domain_restrictions() {
let enforcer = CapabilityEnforcer::new().with_network_default(true);
let mut caps = Capabilities::default();
caps.network.enabled = true;
caps.network.allowed_domains = Some(vec![
"api.example.com".to_string(),
"cdn.example.com".to_string(),
]);
assert!(enforcer.is_domain_allowed(&caps, "api.example.com"));
assert!(enforcer.is_domain_allowed(&caps, "cdn.example.com"));
assert!(!enforcer.is_domain_allowed(&caps, "evil.com"));
}
#[test]
fn test_get_effective_limits() {
let enforcer = CapabilityEnforcer::new()
.with_max_memory(100 * 1024 * 1024)
.with_max_cpu_time(30_000);
let mut caps = Capabilities::default();
// No limits specified - use defaults
assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000);
// Plugin requests lower limits - use plugin's
caps.max_memory_bytes = Some(50 * 1024 * 1024);
caps.max_cpu_time_ms = Some(10_000);
assert_eq!(enforcer.get_memory_limit(&caps), 50 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 10_000);
// Plugin requests higher limits - cap at system max
caps.max_memory_bytes = Some(200 * 1024 * 1024);
caps.max_cpu_time_ms = Some(60_000);
assert_eq!(enforcer.get_memory_limit(&caps), 100 * 1024 * 1024);
assert_eq!(enforcer.get_cpu_time_limit(&caps), 30_000);
}
}

View file

@ -0,0 +1,52 @@
//! Social features: ratings, comments, favorites, and share links.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
use crate::users::UserId;
/// A user's rating for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Rating {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub stars: u8,
pub review_text: Option<String>,
pub created_at: DateTime<Utc>,
}
/// A comment on a media item, supporting threaded replies.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Comment {
pub id: Uuid,
pub user_id: UserId,
pub media_id: MediaId,
pub parent_comment_id: Option<Uuid>,
pub text: String,
pub created_at: DateTime<Utc>,
}
/// A user's favorite bookmark for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Favorite {
pub user_id: UserId,
pub media_id: MediaId,
pub created_at: DateTime<Utc>,
}
/// A shareable link to a media item with optional password and expiration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShareLink {
pub id: Uuid,
pub media_id: MediaId,
pub created_by: UserId,
pub token: String,
#[serde(skip_serializing)]
pub password_hash: Option<String>,
pub expires_at: Option<DateTime<Utc>>,
pub view_count: u64,
pub created_at: DateTime<Utc>,
}

View file

@ -7,9 +7,18 @@ use std::sync::Arc;
use uuid::Uuid;
use chrono::{DateTime, Utc};
use crate::analytics::UsageEvent;
use crate::enrichment::ExternalMetadata;
use crate::error::Result;
use crate::model::*;
use crate::playlists::Playlist;
use crate::search::{SearchRequest, SearchResults};
use crate::social::{Comment, Rating, ShareLink};
use crate::subtitles::Subtitle;
use crate::transcode::{TranscodeSession, TranscodeStatus};
use crate::users::UserId;
/// Statistics about the database.
#[derive(Debug, Clone, Default)]
@ -187,6 +196,167 @@ pub trait StorageBackend: Send + Sync + 'static {
// Library statistics
async fn library_statistics(&self) -> Result<LibraryStatistics>;
// User Management
async fn list_users(&self) -> Result<Vec<crate::users::User>>;
async fn get_user(&self, id: crate::users::UserId) -> Result<crate::users::User>;
async fn get_user_by_username(&self, username: &str) -> Result<crate::users::User>;
async fn create_user(
&self,
username: &str,
password_hash: &str,
role: crate::config::UserRole,
profile: Option<crate::users::UserProfile>,
) -> Result<crate::users::User>;
async fn update_user(
&self,
id: crate::users::UserId,
password_hash: Option<&str>,
role: Option<crate::config::UserRole>,
profile: Option<crate::users::UserProfile>,
) -> Result<crate::users::User>;
async fn delete_user(&self, id: crate::users::UserId) -> Result<()>;
async fn get_user_libraries(
&self,
user_id: crate::users::UserId,
) -> Result<Vec<crate::users::UserLibraryAccess>>;
async fn grant_library_access(
&self,
user_id: crate::users::UserId,
root_path: &str,
permission: crate::users::LibraryPermission,
) -> Result<()>;
async fn revoke_library_access(
&self,
user_id: crate::users::UserId,
root_path: &str,
) -> Result<()>;
// ===== Ratings =====
async fn rate_media(
&self,
user_id: UserId,
media_id: MediaId,
stars: u8,
review: Option<&str>,
) -> Result<Rating>;
async fn get_media_ratings(&self, media_id: MediaId) -> Result<Vec<Rating>>;
async fn get_user_rating(&self, user_id: UserId, media_id: MediaId) -> Result<Option<Rating>>;
async fn delete_rating(&self, id: Uuid) -> Result<()>;
// ===== Comments =====
async fn add_comment(
&self,
user_id: UserId,
media_id: MediaId,
text: &str,
parent_id: Option<Uuid>,
) -> Result<Comment>;
async fn get_media_comments(&self, media_id: MediaId) -> Result<Vec<Comment>>;
async fn delete_comment(&self, id: Uuid) -> Result<()>;
// ===== Favorites =====
async fn add_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn remove_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<()>;
async fn get_user_favorites(
&self,
user_id: UserId,
pagination: &Pagination,
) -> Result<Vec<MediaItem>>;
async fn is_favorite(&self, user_id: UserId, media_id: MediaId) -> Result<bool>;
// ===== Share Links =====
async fn create_share_link(
&self,
media_id: MediaId,
created_by: UserId,
token: &str,
password_hash: Option<&str>,
expires_at: Option<DateTime<Utc>>,
) -> Result<ShareLink>;
async fn get_share_link(&self, token: &str) -> Result<ShareLink>;
async fn increment_share_views(&self, token: &str) -> Result<()>;
async fn delete_share_link(&self, id: Uuid) -> Result<()>;
// ===== Playlists =====
async fn create_playlist(
&self,
owner_id: UserId,
name: &str,
description: Option<&str>,
is_public: bool,
is_smart: bool,
filter_query: Option<&str>,
) -> Result<Playlist>;
async fn get_playlist(&self, id: Uuid) -> Result<Playlist>;
async fn list_playlists(&self, owner_id: Option<UserId>) -> Result<Vec<Playlist>>;
async fn update_playlist(
&self,
id: Uuid,
name: Option<&str>,
description: Option<&str>,
is_public: Option<bool>,
) -> Result<Playlist>;
async fn delete_playlist(&self, id: Uuid) -> Result<()>;
async fn add_to_playlist(
&self,
playlist_id: Uuid,
media_id: MediaId,
position: i32,
) -> Result<()>;
async fn remove_from_playlist(&self, playlist_id: Uuid, media_id: MediaId) -> Result<()>;
async fn get_playlist_items(&self, playlist_id: Uuid) -> Result<Vec<MediaItem>>;
async fn reorder_playlist(
&self,
playlist_id: Uuid,
media_id: MediaId,
new_position: i32,
) -> Result<()>;
// ===== Analytics =====
async fn record_usage_event(&self, event: &UsageEvent) -> Result<()>;
async fn get_usage_events(
&self,
media_id: Option<MediaId>,
user_id: Option<UserId>,
limit: u64,
) -> Result<Vec<UsageEvent>>;
async fn get_most_viewed(&self, limit: u64) -> Result<Vec<(MediaItem, u64)>>;
async fn get_recently_viewed(&self, user_id: UserId, limit: u64) -> Result<Vec<MediaItem>>;
async fn update_watch_progress(
&self,
user_id: UserId,
media_id: MediaId,
progress_secs: f64,
) -> Result<()>;
async fn get_watch_progress(&self, user_id: UserId, media_id: MediaId) -> Result<Option<f64>>;
async fn cleanup_old_events(&self, before: DateTime<Utc>) -> Result<u64>;
// ===== Subtitles =====
async fn add_subtitle(&self, subtitle: &Subtitle) -> Result<()>;
async fn get_media_subtitles(&self, media_id: MediaId) -> Result<Vec<Subtitle>>;
async fn delete_subtitle(&self, id: Uuid) -> Result<()>;
async fn update_subtitle_offset(&self, id: Uuid, offset_ms: i64) -> Result<()>;
// ===== External Metadata (Enrichment) =====
async fn store_external_metadata(&self, meta: &ExternalMetadata) -> Result<()>;
async fn get_external_metadata(&self, media_id: MediaId) -> Result<Vec<ExternalMetadata>>;
async fn delete_external_metadata(&self, id: Uuid) -> Result<()>;
// ===== Transcode Sessions =====
async fn create_transcode_session(&self, session: &TranscodeSession) -> Result<()>;
async fn get_transcode_session(&self, id: Uuid) -> Result<TranscodeSession>;
async fn list_transcode_sessions(
&self,
media_id: Option<MediaId>,
) -> Result<Vec<TranscodeSession>>;
async fn update_transcode_status(
&self,
id: Uuid,
status: TranscodeStatus,
progress: f32,
) -> Result<()>;
async fn cleanup_expired_transcodes(&self, before: DateTime<Utc>) -> Result<u64>;
}
/// Comprehensive library statistics.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,62 @@
//! Subtitle management for video media items.
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::model::MediaId;
/// A subtitle track associated with a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Subtitle {
pub id: Uuid,
pub media_id: MediaId,
pub language: Option<String>,
pub format: SubtitleFormat,
pub file_path: Option<PathBuf>,
pub is_embedded: bool,
pub track_index: Option<usize>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
/// Supported subtitle formats.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum SubtitleFormat {
Srt,
Vtt,
Ass,
Ssa,
Pgs,
}
impl std::fmt::Display for SubtitleFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::Srt => "srt",
Self::Vtt => "vtt",
Self::Ass => "ass",
Self::Ssa => "ssa",
Self::Pgs => "pgs",
};
write!(f, "{s}")
}
}
impl std::str::FromStr for SubtitleFormat {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"srt" => Ok(Self::Srt),
"vtt" => Ok(Self::Vtt),
"ass" => Ok(Self::Ass),
"ssa" => Ok(Self::Ssa),
"pgs" => Ok(Self::Pgs),
_ => Err(format!("unknown subtitle format: {s}")),
}
}
}

View file

@ -5,7 +5,7 @@ use tracing::{info, warn};
use crate::config::ThumbnailConfig;
use crate::error::{PinakesError, Result};
use crate::media_type::{MediaCategory, MediaType};
use crate::media_type::{BuiltinMediaType, MediaCategory, MediaType};
use crate::model::MediaId;
/// Generate a thumbnail for a media file and return the path to the thumbnail.
@ -41,7 +41,7 @@ pub fn generate_thumbnail_with_config(
MediaCategory::Image => {
if media_type.is_raw() {
generate_raw_thumbnail(source_path, &thumb_path, config)
} else if media_type == MediaType::Heic {
} else if media_type == MediaType::Builtin(BuiltinMediaType::Heic) {
generate_heic_thumbnail(source_path, &thumb_path, config)
} else {
generate_image_thumbnail(source_path, &thumb_path, config)
@ -49,8 +49,12 @@ pub fn generate_thumbnail_with_config(
}
MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config),
MediaCategory::Document => match media_type {
MediaType::Pdf => generate_pdf_thumbnail(source_path, &thumb_path, config),
MediaType::Epub => generate_epub_thumbnail(source_path, &thumb_path, config),
MediaType::Builtin(BuiltinMediaType::Pdf) => {
generate_pdf_thumbnail(source_path, &thumb_path, config)
}
MediaType::Builtin(BuiltinMediaType::Epub) => {
generate_epub_thumbnail(source_path, &thumb_path, config)
}
_ => return Ok(None),
},
_ => return Ok(None),

View file

@ -0,0 +1,545 @@
//! Transcoding service for media files using FFmpeg.
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
use crate::config::{TranscodeProfile, TranscodingConfig};
use crate::model::MediaId;
use crate::storage::DynStorageBackend;
use crate::users::UserId;
/// A transcoding session for a media item.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodeSession {
pub id: Uuid,
pub media_id: MediaId,
pub user_id: Option<UserId>,
pub profile: String,
pub cache_path: PathBuf,
pub status: TranscodeStatus,
pub progress: f32,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
/// Duration of the source media in seconds, used for progress calculation.
#[serde(default)]
pub duration_secs: Option<f64>,
/// Handle to cancel the child FFmpeg process.
#[serde(skip)]
pub child_cancel: Option<Arc<tokio::sync::Notify>>,
}
/// Status of a transcode session.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "state")]
pub enum TranscodeStatus {
Pending,
Transcoding,
Complete,
Failed { error: String },
Cancelled,
}
impl TranscodeStatus {
pub fn as_str(&self) -> &str {
match self {
Self::Pending => "pending",
Self::Transcoding => "transcoding",
Self::Complete => "complete",
Self::Failed { .. } => "failed",
Self::Cancelled => "cancelled",
}
}
pub fn from_db(status: &str, error_message: Option<&str>) -> Self {
match status {
"pending" => Self::Pending,
"transcoding" => Self::Transcoding,
"complete" => Self::Complete,
"failed" => Self::Failed {
error: error_message.unwrap_or("unknown error").to_string(),
},
"cancelled" => Self::Cancelled,
other => {
tracing::warn!(
"unknown transcode status '{}', defaulting to Pending",
other
);
Self::Pending
}
}
}
pub fn error_message(&self) -> Option<&str> {
match self {
Self::Failed { error } => Some(error),
_ => None,
}
}
}
/// Service managing transcoding sessions and FFmpeg invocations.
pub struct TranscodeService {
pub config: TranscodingConfig,
pub sessions: Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
semaphore: Arc<Semaphore>,
}
impl TranscodeService {
pub fn new(config: TranscodingConfig) -> Self {
let max_concurrent = config.max_concurrent.max(1);
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
semaphore: Arc::new(Semaphore::new(max_concurrent)),
config,
}
}
pub fn is_enabled(&self) -> bool {
self.config.enabled
}
pub fn cache_dir(&self) -> PathBuf {
self.config
.cache_dir
.clone()
.unwrap_or_else(|| PathBuf::from("/tmp/pinakes-transcode"))
}
/// Start a transcode job for a media item.
pub async fn start_transcode(
&self,
media_id: MediaId,
source_path: &Path,
profile_name: &str,
duration_secs: Option<f64>,
storage: &DynStorageBackend,
) -> crate::error::Result<Uuid> {
let profile = self
.config
.profiles
.iter()
.find(|p| p.name == profile_name)
.cloned()
.ok_or_else(|| {
crate::error::PinakesError::InvalidOperation(format!(
"unknown transcode profile: {}",
profile_name
))
})?;
let session_id = Uuid::now_v7();
let session_dir = self.cache_dir().join(session_id.to_string());
tokio::fs::create_dir_all(&session_dir).await.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!(
"failed to create session directory: {}",
e
))
})?;
let expires_at =
Some(Utc::now() + chrono::Duration::hours(self.config.cache_ttl_hours as i64));
let cancel_notify = Arc::new(tokio::sync::Notify::new());
let session = TranscodeSession {
id: session_id,
media_id,
user_id: None,
profile: profile_name.to_string(),
cache_path: session_dir.clone(),
status: TranscodeStatus::Pending,
progress: 0.0,
created_at: Utc::now(),
expires_at,
duration_secs,
child_cancel: Some(cancel_notify.clone()),
};
// Store session in DB
storage.create_transcode_session(&session).await?;
// Store in memory
{
let mut sessions = self.sessions.write().await;
sessions.insert(session_id, session);
}
// Spawn the FFmpeg task
let sessions = self.sessions.clone();
let semaphore = self.semaphore.clone();
let source = source_path.to_path_buf();
let hw_accel = self.config.hardware_acceleration.clone();
let storage = storage.clone();
let cancel = cancel_notify.clone();
tokio::spawn(async move {
// Acquire semaphore permit to limit concurrency
let _permit = match semaphore.acquire().await {
Ok(permit) => permit,
Err(e) => {
tracing::error!("failed to acquire transcode semaphore: {}", e);
let error_msg = format!("semaphore closed: {}", e);
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Failed {
error: error_msg.clone(),
};
}
if let Err(e) = storage
.update_transcode_status(
session_id,
TranscodeStatus::Failed { error: error_msg },
0.0,
)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
return;
}
};
// Mark as transcoding
{
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Transcoding;
}
}
if let Err(e) = storage
.update_transcode_status(session_id, TranscodeStatus::Transcoding, 0.0)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
// Build FFmpeg args and run
let args = get_ffmpeg_args(&source, &session_dir, &profile, hw_accel.as_deref());
match run_ffmpeg(&args, &sessions, session_id, duration_secs, cancel).await {
Ok(()) => {
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.status = TranscodeStatus::Complete;
sess.progress = 1.0;
}
if let Err(e) = storage
.update_transcode_status(session_id, TranscodeStatus::Complete, 1.0)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
}
Err(e) => {
let error_msg = e.to_string();
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
// Don't overwrite Cancelled status
if matches!(sess.status, TranscodeStatus::Cancelled) {
return;
}
sess.status = TranscodeStatus::Failed {
error: error_msg.clone(),
};
}
drop(s);
if let Err(e) = storage
.update_transcode_status(
session_id,
TranscodeStatus::Failed { error: error_msg },
0.0,
)
.await
{
tracing::error!("failed to update transcode status: {}", e);
}
}
}
});
Ok(session_id)
}
/// Cancel a transcode session and clean up cache files.
pub async fn cancel_transcode(
&self,
session_id: Uuid,
storage: &DynStorageBackend,
) -> crate::error::Result<()> {
let (cache_path, cancel_notify) = {
let mut sessions = self.sessions.write().await;
if let Some(sess) = sessions.get_mut(&session_id) {
sess.status = TranscodeStatus::Cancelled;
let cancel = sess.child_cancel.take();
(Some(sess.cache_path.clone()), cancel)
} else {
(None, None)
}
};
// Signal the child process to be killed
if let Some(notify) = cancel_notify {
notify.notify_one();
}
storage
.update_transcode_status(session_id, TranscodeStatus::Cancelled, 0.0)
.await?;
// Clean up cache directory
if let Some(path) = cache_path
&& let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove transcode cache directory: {}", e);
}
Ok(())
}
/// Remove expired transcode sessions and their cache directories.
pub async fn cleanup_expired(&self) {
let now = Utc::now();
// Collect expired entries and remove them from the map under the lock.
let expired: Vec<(Uuid, PathBuf)> = {
let mut sessions = self.sessions.write().await;
let expired: Vec<(Uuid, PathBuf)> = sessions
.iter()
.filter_map(|(id, sess)| {
if let Some(expires) = sess.expires_at
&& now > expires {
return Some((*id, sess.cache_path.clone()));
}
None
})
.collect();
for (id, _) in &expired {
sessions.remove(id);
}
expired
};
// Lock is dropped here; perform filesystem cleanup outside the lock.
for (_id, path) in expired {
if let Err(e) = tokio::fs::remove_dir_all(&path).await {
tracing::error!("failed to remove expired transcode cache directory: {}", e);
}
}
}
/// Get a session by ID from the in-memory store.
pub async fn get_session(&self, session_id: Uuid) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions.get(&session_id).cloned()
}
/// Resolve the path to a specific segment file on disk.
pub fn segment_path(&self, session_id: Uuid, segment_name: &str) -> PathBuf {
// Sanitize segment_name to prevent path traversal
let safe_name = std::path::Path::new(segment_name)
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
if safe_name.is_empty() || safe_name.contains('\0') || safe_name.starts_with('.') {
// Return a non-existent path that will fail safely
return self
.cache_dir()
.join(session_id.to_string())
.join("__invalid__");
}
self.cache_dir()
.join(session_id.to_string())
.join(safe_name)
}
/// Find a session for a given media_id and profile.
pub async fn find_session(&self, media_id: MediaId, profile: &str) -> Option<TranscodeSession> {
let sessions = self.sessions.read().await;
sessions
.values()
.find(|s| s.media_id == media_id && s.profile == profile)
.cloned()
}
}
/// Parse a resolution string like "360p", "720p", "1080p" into (width, height).
pub fn parse_resolution(res: &str) -> (u32, u32) {
match res.trim_end_matches('p') {
"360" => (640, 360),
"480" => (854, 480),
"720" => (1280, 720),
"1080" => (1920, 1080),
"1440" => (2560, 1440),
"2160" | "4k" => (3840, 2160),
_ => (1280, 720), // default to 720p
}
}
/// Estimate bandwidth (bits/sec) from a profile's max_bitrate_kbps.
pub fn estimate_bandwidth(profile: &TranscodeProfile) -> u32 {
profile.max_bitrate_kbps * 1000
}
/// Build FFmpeg CLI arguments for transcoding.
fn get_ffmpeg_args(
source: &Path,
output_dir: &Path,
profile: &TranscodeProfile,
hw_accel: Option<&str>,
) -> Vec<String> {
let (w, h) = parse_resolution(&profile.max_resolution);
let playlist = output_dir.join("playlist.m3u8");
let segment_pattern = output_dir.join("segment%d.ts");
let mut args = Vec::new();
// Hardware acceleration
if let Some(accel) = hw_accel {
args.extend_from_slice(&["-hwaccel".to_string(), accel.to_string()]);
}
args.extend_from_slice(&[
"-i".to_string(),
source.to_string_lossy().to_string(),
"-c:v".to_string(),
profile.video_codec.clone(),
"-c:a".to_string(),
profile.audio_codec.clone(),
"-b:v".to_string(),
format!("{}k", profile.max_bitrate_kbps),
"-vf".to_string(),
format!("scale={}:{}", w, h),
"-f".to_string(),
"hls".to_string(),
"-hls_time".to_string(),
"10".to_string(),
"-hls_segment_filename".to_string(),
segment_pattern.to_string_lossy().to_string(),
"-progress".to_string(),
"pipe:1".to_string(),
"-y".to_string(),
playlist.to_string_lossy().to_string(),
]);
args
}
/// Run FFmpeg as a child process, parsing progress from stdout.
async fn run_ffmpeg(
args: &[String],
sessions: &Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
session_id: Uuid,
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,
) -> Result<(), crate::error::PinakesError> {
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
let mut child = Command::new("ffmpeg")
.args(args)
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("failed to spawn ffmpeg: {}", e))
})?;
// Capture stderr in a spawned task for error reporting
let stderr_handle = if let Some(stderr) = child.stderr.take() {
let reader = BufReader::new(stderr);
Some(tokio::spawn(async move {
let mut lines = reader.lines();
let mut collected = Vec::new();
while let Ok(Some(line)) = lines.next_line().await {
collected.push(line);
}
collected
}))
} else {
None
};
// Parse progress from stdout
let stdout_handle = if let Some(stdout) = child.stdout.take() {
let reader = BufReader::new(stdout);
let mut lines = reader.lines();
let sessions = sessions.clone();
Some(tokio::spawn(async move {
while let Ok(Some(line)) = lines.next_line().await {
// FFmpeg progress output: "out_time_us=12345678"
if let Some(time_str) = line.strip_prefix("out_time_us=")
&& let Ok(us) = time_str.trim().parse::<f64>() {
let secs = us / 1_000_000.0;
// Calculate progress based on known duration
let progress = match duration_secs {
Some(dur) if dur > 0.0 => (secs / dur).min(0.99) as f32,
_ => {
// Duration unknown; don't update progress
continue;
}
};
let mut s = sessions.write().await;
if let Some(sess) = s.get_mut(&session_id) {
sess.progress = progress;
}
}
}
}))
} else {
None
};
// Wait for child, but also listen for cancellation
let status = tokio::select! {
result = child.wait() => {
result.map_err(|e| {
crate::error::PinakesError::InvalidOperation(format!("ffmpeg process error: {}", e))
})?
}
_ = cancel.notified() => {
// Kill the child process on cancel
if let Err(e) = child.kill().await {
tracing::error!("failed to kill ffmpeg process: {}", e);
}
return Err(crate::error::PinakesError::InvalidOperation(
"cancelled by user".to_string(),
));
}
};
// Await the stdout reader task
if let Some(handle) = stdout_handle {
let _ = handle.await;
}
// Collect stderr output for error reporting
let stderr_output = if let Some(handle) = stderr_handle {
handle.await.unwrap_or_default()
} else {
Vec::new()
};
if !status.success() {
let last_stderr = stderr_output
.iter()
.rev()
.take(10)
.rev()
.cloned()
.collect::<Vec<_>>()
.join("\n");
return Err(crate::error::PinakesError::InvalidOperation(format!(
"ffmpeg exited with status: {}\nstderr:\n{}",
status, last_stderr
)));
}
Ok(())
}

View file

@ -0,0 +1,210 @@
//! User management and authentication
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
use crate::config::UserRole;
use crate::error::{PinakesError, Result};
/// User ID
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct UserId(pub Uuid);
impl UserId {
pub fn new() -> Self {
Self(Uuid::now_v7())
}
}
impl Default for UserId {
fn default() -> Self {
Self::new()
}
}
impl std::fmt::Display for UserId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<Uuid> for UserId {
fn from(id: Uuid) -> Self {
Self(id)
}
}
/// User account with profile information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct User {
pub id: UserId,
pub username: String,
#[serde(skip_serializing)]
pub password_hash: String,
pub role: UserRole,
pub profile: UserProfile,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// User profile information
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct UserProfile {
pub avatar_path: Option<String>,
pub bio: Option<String>,
pub preferences: UserPreferences,
}
/// User-specific preferences
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct UserPreferences {
/// UI theme preference
pub theme: Option<String>,
/// Language preference
pub language: Option<String>,
/// Default video quality preference for transcoding
pub default_video_quality: Option<String>,
/// Whether to auto-play media
pub auto_play: bool,
/// Custom preferences (extensible)
pub custom: HashMap<String, serde_json::Value>,
}
/// Library access permission
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LibraryPermission {
/// Can only read/view media
Read,
/// Can read and modify media metadata
Write,
/// Full control including deletion and sharing
Admin,
}
impl LibraryPermission {
pub fn can_read(&self) -> bool {
true
}
pub fn can_write(&self) -> bool {
matches!(self, Self::Write | Self::Admin)
}
pub fn can_admin(&self) -> bool {
matches!(self, Self::Admin)
}
}
/// User's access to a specific library root
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserLibraryAccess {
pub user_id: UserId,
pub root_path: String,
pub permission: LibraryPermission,
pub granted_at: DateTime<Utc>,
}
/// User creation request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateUserRequest {
pub username: String,
#[serde(skip_serializing)]
pub password: String,
pub role: UserRole,
pub profile: Option<UserProfile>,
}
/// User update request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateUserRequest {
#[serde(skip_serializing)]
pub password: Option<String>,
pub role: Option<UserRole>,
pub profile: Option<UserProfile>,
}
/// User authentication
pub mod auth {
use super::*;
/// Hash a password using Argon2
pub fn hash_password(password: &str) -> Result<String> {
use argon2::{
Argon2,
password_hash::{PasswordHasher, SaltString, rand_core::OsRng},
};
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
argon2
.hash_password(password.as_bytes(), &salt)
.map(|hash| hash.to_string())
.map_err(|e| PinakesError::Authentication(format!("failed to hash password: {e}")))
}
/// Verify a password against a hash
pub fn verify_password(password: &str, hash: &str) -> Result<bool> {
use argon2::{
Argon2,
password_hash::{PasswordHash, PasswordVerifier},
};
let parsed_hash = PasswordHash::new(hash)
.map_err(|e| PinakesError::Authentication(format!("invalid password hash: {e}")))?;
Ok(Argon2::default()
.verify_password(password.as_bytes(), &parsed_hash)
.is_ok())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_hash_and_verify_password() {
let password = "test_password_123";
let hash = auth::hash_password(password).unwrap();
assert!(auth::verify_password(password, &hash).unwrap());
assert!(!auth::verify_password("wrong_password", &hash).unwrap());
}
#[test]
fn test_user_preferences_default() {
let prefs = UserPreferences::default();
assert_eq!(prefs.theme, None);
assert_eq!(prefs.language, None);
assert!(!prefs.auto_play);
assert!(prefs.custom.is_empty());
}
#[test]
fn test_library_permission_levels() {
let read = LibraryPermission::Read;
assert!(read.can_read());
assert!(!read.can_write());
assert!(!read.can_admin());
let write = LibraryPermission::Write;
assert!(write.can_read());
assert!(write.can_write());
assert!(!write.can_admin());
let admin = LibraryPermission::Admin;
assert!(admin.can_read());
assert!(admin.can_write());
assert!(admin.can_admin());
}
}