initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4a6b498153eccd5407510dd541b7f4816a6a6964
This commit is contained in:
raf 2026-01-30 22:05:46 +03:00
commit 6a73d11c4b
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
124 changed files with 34856 additions and 0 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake

7931
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

100
Cargo.toml Normal file
View file

@ -0,0 +1,100 @@
[workspace]
members = [
"crates/pinakes-core",
"crates/pinakes-server",
"crates/pinakes-tui",
"crates/pinakes-ui",
]
resolver = "3"
[workspace.package]
edition = "2024"
version = "0.1.0"
license = "MIT"
[workspace.dependencies]
# Async runtime
tokio = { version = "1", features = ["full"] }
# Serialization
serde = { version = "1", features = ["derive"] }
serde_json = "1"
toml = "0.9"
# CLI argument parsing
clap = { version = "4", features = ["derive", "env"] }
# Date/time
chrono = { version = "0.4", features = ["serde"] }
# IDs
uuid = { version = "1", features = ["v7", "serde"] }
# Error handling
thiserror = "2"
anyhow = "1"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] }
# Hashing
blake3 = "1"
# Metadata extraction
lofty = "0.22"
lopdf = "0.39"
epub = "2"
matroska = "0.30"
gray_matter = "0.3"
kamadak-exif = "0.6"
# Database - SQLite
rusqlite = { version = "0.37", features = ["bundled", "column_decltype"] }
# Database - PostgreSQL
tokio-postgres = { version = "0.7", features = ["with-uuid-1", "with-chrono-0_4", "with-serde_json-1"] }
deadpool-postgres = "0.14"
postgres-types = { version = "0.2", features = ["derive"] }
# Migrations
refinery = { version = "0.9", features = ["rusqlite", "tokio-postgres"] }
# Filesystem
walkdir = "2"
notify = { version = "8", features = ["macos_fsevent"] }
# Search parser
winnow = "0.7"
# HTTP server
axum = { version = "0.8", features = ["macros"] }
tower = "0.5"
tower-http = { version = "0.6", features = ["cors", "trace"] }
governor = "0.8"
tower_governor = "0.6"
# HTTP client
reqwest = { version = "0.13", features = ["json", "query"] }
# TUI
ratatui = "0.30"
crossterm = "0.29"
# Desktop/Web UI
dioxus = { version = "0.7", features = ["desktop", "router"] }
# Async trait (dyn-compatible async methods)
async-trait = "0.1"
# Image processing (thumbnails)
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp", "gif", "tiff", "bmp"] }
# Markdown rendering
pulldown-cmark = "0.12"
# Password hashing
argon2 = "0.5"
# Misc
mime_guess = "2"

193
README.md Normal file
View file

@ -0,0 +1,193 @@
# Pinakes
A media cataloging and library management system written in Rust. Pinakes
indexes files across configured directories, extracts metadata from audio,
video, document, and text files, and provides full-text search with tagging,
collections, and audit logging. It supports both SQLite and PostgreSQL backends.
## Building
```sh
# Build all compilable crates
cargo build -p pinakes-core -p pinakes-server -p pinakes-tui
# The Dioxus UI requires GTK3 and libsoup system libraries:
# On Debian/Ubuntu: apt install libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev
# On Fedora: dnf install gtk3-devel libsoup3-devel webkit2gtk4.1-devel
# On Nix: Use the dev shell, everything is provided :)
cargo build -p pinakes-ui
```
## Configuration
Copy the example config and edit it:
```sh
cp pinakes.toml.example pinakes.toml
```
Key settings:
- `storage.backend` -- `"sqlite"` or `"postgres"`
- `storage.sqlite.path` -- Path to the SQLite database file
- `storage.postgres.*` -- PostgreSQL connection parameters
- `directories.roots` -- Directories to scan for media files
- `scanning.watch` -- Enable filesystem watching for automatic imports
- `scanning.ignore_patterns` -- Patterns to skip during scanning (e.g., `".*"`,
`"node_modules"`)
- `server.host` / `server.port` -- Server bind address
## Running
### Server
```sh
cargo run -p pinakes-server -- pinakes.toml
# or
cargo run -p pinakes-server -- --config pinakes.toml
```
The server starts on the configured host:port (default `127.0.0.1:3000`).
### TUI
```sh
cargo run -p pinakes-tui
# or with a custom server URL:
cargo run -p pinakes-tui -- --server http://localhost:3000
```
Keybindings:
<!-- markdownlint-disable MD013-->
| Key | Action |
| --------------------- | -------------------------------------------------------- |
| `q` / `Ctrl-C` | Quit |
| `j` / `k` | Navigate down / up |
| `Enter` | Select / confirm |
| `Esc` | Back |
| `/` | Search |
| `i` | Import file |
| `o` | Open file |
| `d` | Delete (media in library, tag/collection in their views) |
| `t` | Tags view |
| `c` | Collections view |
| `a` | Audit log view |
| `s` | Trigger scan |
| `r` | Refresh current view |
| `n` | Create new tag (in tags view) |
| `+` | Tag selected media (in detail view) |
| `-` | Untag selected media (in detail view) |
| `Tab` / `Shift-Tab` | Next / previous tab |
| `PageUp` / `PageDown` | Paginate |
<!-- markdownlint-enable MD013-->
### Desktop/Web UI
```sh
cargo run -p pinakes-ui
```
Set `PINAKES_SERVER_URL` to point at the server if it is not on
`localhost:3000`.
## API
All endpoints are under `/api/v1`.
### Media
| Method | Path | Description |
| -------- | -------------------- | ------------------------------------- |
| `POST` | `/media/import` | Import a file (`{"path": "..."}`) |
| `GET` | `/media` | List media (query: `offset`, `limit`) |
| `GET` | `/media/{id}` | Get media item |
| `PATCH` | `/media/{id}` | Update metadata |
| `DELETE` | `/media/{id}` | Delete media item |
| `GET` | `/media/{id}/stream` | Stream file content |
| `POST` | `/media/{id}/open` | Open with system viewer |
### Search
| Method | Path | Description |
| ------ | --------------- | ---------------------------------------------- |
| `GET` | `/search?q=...` | Search (query: `q`, `sort`, `offset`, `limit`) |
Search syntax: `term`, `"exact phrase"`, `field:value`, `type:pdf`, `tag:music`,
`prefix*`, `fuzzy~`, `-excluded`, `a b` (AND), `a OR b`, `(grouped)`.
### Tags
<!-- markdownlint-disable MD013-->
| Method | Path | Description |
| -------- | --------------------------- | ------------------------------------------------ |
| `POST` | `/tags` | Create tag (`{"name": "...", "parent_id": ...}`) |
| `GET` | `/tags` | List all tags |
| `GET` | `/tags/{id}` | Get tag |
| `DELETE` | `/tags/{id}` | Delete tag |
| `POST` | `/media/{id}/tags` | Tag media (`{"tag_id": "..."}`) |
| `GET` | `/media/{id}/tags` | List media's tags |
| `DELETE` | `/media/{id}/tags/{tag_id}` | Untag media |
<!-- markdownlint-enable MD013-->
### Collections
| Method | Path | Description |
| -------- | ---------------------------------- | ----------------- |
| `POST` | `/collections` | Create collection |
| `GET` | `/collections` | List collections |
| `GET` | `/collections/{id}` | Get collection |
| `DELETE` | `/collections/{id}` | Delete collection |
| `POST` | `/collections/{id}/members` | Add member |
| `GET` | `/collections/{id}/members` | List members |
| `DELETE` | `/collections/{cid}/members/{mid}` | Remove member |
Virtual collections (kind `"virtual"`) evaluate their `filter_query` as a search
query when listing members, returning results dynamically.
### Audit & Scanning
<!-- markdownlint-disable MD013-->
| Method | Path | Description |
| ------ | -------- | ----------------------------------------------------------------------------- |
| `GET` | `/audit` | List audit log (query: `offset`, `limit`) |
| `POST` | `/scan` | Trigger directory scan (`{"path": "/..."}` or `{"path": null}` for all roots) |
<!-- markdownlint-enable MD013-->
## Testing
```sh
# Unit and integration tests for the core library (SQLite in-memory)
cargo test -p pinakes-core
# API integration tests for the server
cargo test -p pinakes-server
```
## Supported Media Types
| Category | Formats |
| -------- | ------------------------------- |
| Audio | MP3, FLAC, OGG, WAV, AAC, Opus |
| Video | MP4, MKV, AVI, WebM |
| Document | PDF, EPUB, DjVu |
| Text | Markdown, Plain text |
| Image | JPEG, PNG, GIF, WebP, SVG, AVIF |
Metadata extraction uses lofty (audio, MP4), matroska (MKV), lopdf (PDF), epub
(EPUB), and gray_matter (Markdown frontmatter).
## Storage Backends
**SQLite** (default) -- Single-file database with WAL mode and FTS5 full-text
search. Bundled SQLite guarantees FTS5 availability.
**PostgreSQL** -- Native async with connection pooling (deadpool-postgres). Uses
tsvector with weighted columns for full-text search and pg_trgm for fuzzy
matching. Requires the `pg_trgm` extension.

View file

@ -0,0 +1,39 @@
[package]
name = "pinakes-core"
edition.workspace = true
version.workspace = true
license.workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
tracing = { workspace = true }
blake3 = { workspace = true }
lofty = { workspace = true }
lopdf = { workspace = true }
epub = { workspace = true }
matroska = { workspace = true }
gray_matter = { workspace = true }
rusqlite = { workspace = true }
tokio-postgres = { workspace = true }
deadpool-postgres = { workspace = true }
postgres-types = { workspace = true }
refinery = { workspace = true }
walkdir = { workspace = true }
notify = { workspace = true }
winnow = { workspace = true }
mime_guess = { workspace = true }
async-trait = { workspace = true }
kamadak-exif = { workspace = true }
image = { workspace = true }
tokio-util = { version = "0.7", features = ["rt"] }
reqwest = { workspace = true }
[dev-dependencies]
tempfile = "3"

View file

@ -0,0 +1,21 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::{AuditAction, AuditEntry, MediaId};
use crate::storage::DynStorageBackend;
pub async fn record_action(
storage: &DynStorageBackend,
media_id: Option<MediaId>,
action: AuditAction,
details: Option<String>,
) -> Result<()> {
let entry = AuditEntry {
id: Uuid::now_v7(),
media_id,
action,
details,
timestamp: chrono::Utc::now(),
};
storage.record_audit(&entry).await
}

View file

@ -0,0 +1,91 @@
use std::collections::HashMap;
use std::hash::Hash;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
struct CacheEntry<V> {
value: V,
inserted_at: Instant,
}
/// A simple TTL-based in-memory cache with periodic eviction.
pub struct Cache<K, V> {
entries: Arc<RwLock<HashMap<K, CacheEntry<V>>>>,
ttl: Duration,
}
impl<K, V> Cache<K, V>
where
K: Eq + Hash + Clone + Send + Sync + 'static,
V: Clone + Send + Sync + 'static,
{
pub fn new(ttl: Duration) -> Self {
let cache = Self {
entries: Arc::new(RwLock::new(HashMap::new())),
ttl,
};
// Spawn periodic eviction task
let entries = cache.entries.clone();
let ttl = cache.ttl;
tokio::spawn(async move {
let mut interval = tokio::time::interval(ttl);
loop {
interval.tick().await;
let now = Instant::now();
let mut map = entries.write().await;
map.retain(|_, entry| now.duration_since(entry.inserted_at) < ttl);
}
});
cache
}
pub async fn get(&self, key: &K) -> Option<V> {
let map = self.entries.read().await;
if let Some(entry) = map.get(key) {
if entry.inserted_at.elapsed() < self.ttl {
return Some(entry.value.clone());
}
}
None
}
pub async fn insert(&self, key: K, value: V) {
let mut map = self.entries.write().await;
map.insert(
key,
CacheEntry {
value,
inserted_at: Instant::now(),
},
);
}
pub async fn invalidate(&self, key: &K) {
let mut map = self.entries.write().await;
map.remove(key);
}
pub async fn invalidate_all(&self) {
let mut map = self.entries.write().await;
map.clear();
}
}
/// Application-level cache layer wrapping multiple caches for different data types.
pub struct CacheLayer {
/// Cache for serialized API responses, keyed by request path + query string.
pub responses: Cache<String, String>,
}
impl CacheLayer {
pub fn new(ttl_secs: u64) -> Self {
let ttl = Duration::from_secs(ttl_secs);
Self {
responses: Cache::new(ttl),
}
}
}

View file

@ -0,0 +1,78 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::*;
use crate::storage::DynStorageBackend;
pub async fn create_collection(
storage: &DynStorageBackend,
name: &str,
kind: CollectionKind,
description: Option<&str>,
filter_query: Option<&str>,
) -> Result<Collection> {
storage
.create_collection(name, kind, description, filter_query)
.await
}
pub async fn add_member(
storage: &DynStorageBackend,
collection_id: Uuid,
media_id: MediaId,
position: i32,
) -> Result<()> {
storage
.add_to_collection(collection_id, media_id, position)
.await?;
crate::audit::record_action(
storage,
Some(media_id),
AuditAction::AddedToCollection,
Some(format!("collection_id={collection_id}")),
)
.await
}
pub async fn remove_member(
storage: &DynStorageBackend,
collection_id: Uuid,
media_id: MediaId,
) -> Result<()> {
storage
.remove_from_collection(collection_id, media_id)
.await?;
crate::audit::record_action(
storage,
Some(media_id),
AuditAction::RemovedFromCollection,
Some(format!("collection_id={collection_id}")),
)
.await
}
pub async fn get_members(
storage: &DynStorageBackend,
collection_id: Uuid,
) -> Result<Vec<MediaItem>> {
let collection = storage.get_collection(collection_id).await?;
match collection.kind {
CollectionKind::Virtual => {
// Virtual collections evaluate their filter_query dynamically
if let Some(ref query_str) = collection.filter_query {
let query = crate::search::parse_search_query(query_str)?;
let request = crate::search::SearchRequest {
query,
sort: crate::search::SortOrder::DateDesc,
pagination: Pagination::new(0, 10000, None),
};
let results = storage.search(&request).await?;
Ok(results.items)
} else {
Ok(Vec::new())
}
}
CollectionKind::Manual => storage.get_collection_members(collection_id).await,
}
}

View file

@ -0,0 +1,437 @@
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub storage: StorageConfig,
pub directories: DirectoryConfig,
pub scanning: ScanningConfig,
pub server: ServerConfig,
#[serde(default)]
pub ui: UiConfig,
#[serde(default)]
pub accounts: AccountsConfig,
#[serde(default)]
pub jobs: JobsConfig,
#[serde(default)]
pub thumbnails: ThumbnailConfig,
#[serde(default)]
pub webhooks: Vec<WebhookConfig>,
#[serde(default)]
pub scheduled_tasks: Vec<ScheduledTaskConfig>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ScheduledTaskConfig {
pub id: String,
pub enabled: bool,
pub schedule: crate::scheduler::Schedule,
pub last_run: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct JobsConfig {
#[serde(default = "default_worker_count")]
pub worker_count: usize,
#[serde(default = "default_cache_ttl")]
pub cache_ttl_secs: u64,
}
fn default_worker_count() -> usize {
2
}
fn default_cache_ttl() -> u64 {
60
}
impl Default for JobsConfig {
fn default() -> Self {
Self {
worker_count: default_worker_count(),
cache_ttl_secs: default_cache_ttl(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThumbnailConfig {
#[serde(default = "default_thumb_size")]
pub size: u32,
#[serde(default = "default_thumb_quality")]
pub quality: u8,
#[serde(default)]
pub ffmpeg_path: Option<String>,
#[serde(default = "default_video_seek")]
pub video_seek_secs: u32,
}
fn default_thumb_size() -> u32 {
320
}
fn default_thumb_quality() -> u8 {
80
}
fn default_video_seek() -> u32 {
2
}
impl Default for ThumbnailConfig {
fn default() -> Self {
Self {
size: default_thumb_size(),
quality: default_thumb_quality(),
ffmpeg_path: None,
video_seek_secs: default_video_seek(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebhookConfig {
pub url: String,
pub events: Vec<String>,
#[serde(default)]
pub secret: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UiConfig {
#[serde(default = "default_theme")]
pub theme: String,
#[serde(default = "default_view")]
pub default_view: String,
#[serde(default = "default_page_size")]
pub default_page_size: usize,
#[serde(default = "default_view_mode")]
pub default_view_mode: String,
#[serde(default)]
pub auto_play_media: bool,
#[serde(default = "default_true")]
pub show_thumbnails: bool,
#[serde(default)]
pub sidebar_collapsed: bool,
}
fn default_theme() -> String {
"dark".to_string()
}
fn default_view() -> String {
"library".to_string()
}
fn default_page_size() -> usize {
48
}
fn default_view_mode() -> String {
"grid".to_string()
}
fn default_true() -> bool {
true
}
impl Default for UiConfig {
fn default() -> Self {
Self {
theme: default_theme(),
default_view: default_view(),
default_page_size: default_page_size(),
default_view_mode: default_view_mode(),
auto_play_media: false,
show_thumbnails: true,
sidebar_collapsed: false,
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct AccountsConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default)]
pub users: Vec<UserAccount>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserAccount {
pub username: String,
pub password_hash: String,
#[serde(default)]
pub role: UserRole,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum UserRole {
Admin,
Editor,
#[default]
Viewer,
}
impl UserRole {
pub fn can_read(self) -> bool {
true
}
pub fn can_write(self) -> bool {
matches!(self, Self::Admin | Self::Editor)
}
pub fn can_admin(self) -> bool {
matches!(self, Self::Admin)
}
}
impl std::fmt::Display for UserRole {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Admin => write!(f, "admin"),
Self::Editor => write!(f, "editor"),
Self::Viewer => write!(f, "viewer"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StorageConfig {
pub backend: StorageBackendType,
pub sqlite: Option<SqliteConfig>,
pub postgres: Option<PostgresConfig>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum StorageBackendType {
Sqlite,
Postgres,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SqliteConfig {
pub path: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PostgresConfig {
pub host: String,
pub port: u16,
pub database: String,
pub username: String,
pub password: String,
pub max_connections: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DirectoryConfig {
pub roots: Vec<PathBuf>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ScanningConfig {
pub watch: bool,
pub poll_interval_secs: u64,
pub ignore_patterns: Vec<String>,
#[serde(default = "default_import_concurrency")]
pub import_concurrency: usize,
}
fn default_import_concurrency() -> usize {
8
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerConfig {
pub host: String,
pub port: u16,
/// Optional API key for bearer token authentication.
/// If set, all requests (except /health) must include `Authorization: Bearer <key>`.
/// Can also be set via `PINAKES_API_KEY` environment variable.
pub api_key: Option<String>,
}
impl Config {
pub fn from_file(path: &Path) -> crate::error::Result<Self> {
let content = std::fs::read_to_string(path).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to read config file: {e}"))
})?;
toml::from_str(&content)
.map_err(|e| crate::error::PinakesError::Config(format!("failed to parse config: {e}")))
}
/// Try loading from file, falling back to defaults if the file doesn't exist.
pub fn load_or_default(path: &Path) -> crate::error::Result<Self> {
if path.exists() {
Self::from_file(path)
} else {
let config = Self::default();
// Ensure the data directory exists for the default SQLite database
config.ensure_dirs()?;
Ok(config)
}
}
/// Save the current config to a TOML file.
pub fn save_to_file(&self, path: &Path) -> crate::error::Result<()> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
let content = toml::to_string_pretty(self).map_err(|e| {
crate::error::PinakesError::Config(format!("failed to serialize config: {e}"))
})?;
std::fs::write(path, content)?;
Ok(())
}
/// Ensure all directories needed by this config exist and are writable.
pub fn ensure_dirs(&self) -> crate::error::Result<()> {
if let Some(ref sqlite) = self.storage.sqlite
&& let Some(parent) = sqlite.path.parent()
{
std::fs::create_dir_all(parent)?;
let metadata = std::fs::metadata(parent)?;
if metadata.permissions().readonly() {
return Err(crate::error::PinakesError::Config(format!(
"directory is not writable: {}",
parent.display()
)));
}
}
Ok(())
}
/// Returns the default config file path following XDG conventions.
pub fn default_config_path() -> PathBuf {
if let Ok(xdg) = std::env::var("XDG_CONFIG_HOME") {
PathBuf::from(xdg).join("pinakes").join("pinakes.toml")
} else if let Ok(home) = std::env::var("HOME") {
PathBuf::from(home)
.join(".config")
.join("pinakes")
.join("pinakes.toml")
} else {
PathBuf::from("pinakes.toml")
}
}
/// Validate configuration values for correctness.
pub fn validate(&self) -> Result<(), String> {
if self.server.port == 0 {
return Err("server port cannot be 0".into());
}
if self.server.host.is_empty() {
return Err("server host cannot be empty".into());
}
if self.scanning.poll_interval_secs == 0 {
return Err("poll interval cannot be 0".into());
}
if self.scanning.import_concurrency == 0 || self.scanning.import_concurrency > 256 {
return Err("import_concurrency must be between 1 and 256".into());
}
Ok(())
}
/// Returns the default data directory following XDG conventions.
pub fn default_data_dir() -> PathBuf {
if let Ok(xdg) = std::env::var("XDG_DATA_HOME") {
PathBuf::from(xdg).join("pinakes")
} else if let Ok(home) = std::env::var("HOME") {
PathBuf::from(home)
.join(".local")
.join("share")
.join("pinakes")
} else {
PathBuf::from("pinakes-data")
}
}
}
impl Default for Config {
fn default() -> Self {
let data_dir = Self::default_data_dir();
Self {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: data_dir.join("pinakes.db"),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![
".*".to_string(),
"node_modules".to_string(),
"__pycache__".to_string(),
"target".to_string(),
],
import_concurrency: default_import_concurrency(),
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: vec![],
scheduled_tasks: vec![],
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config_with_concurrency(concurrency: usize) -> Config {
let mut config = Config::default();
config.scanning.import_concurrency = concurrency;
config
}
#[test]
fn test_validate_import_concurrency_zero() {
let config = test_config_with_concurrency(0);
assert!(config.validate().is_err());
assert!(
config
.validate()
.unwrap_err()
.contains("import_concurrency")
);
}
#[test]
fn test_validate_import_concurrency_too_high() {
let config = test_config_with_concurrency(257);
assert!(config.validate().is_err());
assert!(
config
.validate()
.unwrap_err()
.contains("import_concurrency")
);
}
#[test]
fn test_validate_import_concurrency_valid() {
let config = test_config_with_concurrency(8);
assert!(config.validate().is_ok());
}
#[test]
fn test_validate_import_concurrency_boundary_low() {
let config = test_config_with_concurrency(1);
assert!(config.validate().is_ok());
}
#[test]
fn test_validate_import_concurrency_boundary_high() {
let config = test_config_with_concurrency(256);
assert!(config.validate().is_ok());
}
}

View file

@ -0,0 +1,59 @@
use std::path::PathBuf;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum PinakesError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("database error: {0}")]
Database(String),
#[error("migration error: {0}")]
Migration(String),
#[error("configuration error: {0}")]
Config(String),
#[error("media item not found: {0}")]
NotFound(String),
#[error("duplicate content hash: {0}")]
DuplicateHash(String),
#[error("unsupported media type for path: {0}")]
UnsupportedMediaType(PathBuf),
#[error("metadata extraction failed: {0}")]
MetadataExtraction(String),
#[error("search query parse error: {0}")]
SearchParse(String),
#[error("file not found at path: {0}")]
FileNotFound(PathBuf),
#[error("tag not found: {0}")]
TagNotFound(String),
#[error("collection not found: {0}")]
CollectionNotFound(String),
#[error("invalid operation: {0}")]
InvalidOperation(String),
}
impl From<rusqlite::Error> for PinakesError {
fn from(e: rusqlite::Error) -> Self {
PinakesError::Database(e.to_string())
}
}
impl From<tokio_postgres::Error> for PinakesError {
fn from(e: tokio_postgres::Error) -> Self {
PinakesError::Database(e.to_string())
}
}
pub type Result<T> = std::result::Result<T, PinakesError>;

View file

@ -0,0 +1,106 @@
use std::sync::Arc;
use serde::{Deserialize, Serialize};
use tokio::sync::broadcast;
use tracing::warn;
use crate::config::WebhookConfig;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PinakesEvent {
MediaImported {
media_id: String,
},
MediaUpdated {
media_id: String,
},
MediaDeleted {
media_id: String,
},
ScanCompleted {
files_found: usize,
files_processed: usize,
},
IntegrityMismatch {
media_id: String,
expected: String,
actual: String,
},
}
impl PinakesEvent {
pub fn event_name(&self) -> &'static str {
match self {
Self::MediaImported { .. } => "media_imported",
Self::MediaUpdated { .. } => "media_updated",
Self::MediaDeleted { .. } => "media_deleted",
Self::ScanCompleted { .. } => "scan_completed",
Self::IntegrityMismatch { .. } => "integrity_mismatch",
}
}
}
pub struct EventBus {
tx: broadcast::Sender<PinakesEvent>,
}
impl EventBus {
pub fn new(webhooks: Vec<WebhookConfig>) -> Arc<Self> {
let (tx, _) = broadcast::channel(256);
// Spawn webhook delivery task
if !webhooks.is_empty() {
let mut rx: broadcast::Receiver<PinakesEvent> = tx.subscribe();
let webhooks = Arc::new(webhooks);
tokio::spawn(async move {
while let Ok(event) = rx.recv().await {
let event_name = event.event_name();
for hook in webhooks.iter() {
if hook.events.iter().any(|e| e == event_name || e == "*") {
let url = hook.url.clone();
let event_clone = event.clone();
let secret = hook.secret.clone();
tokio::spawn(async move {
deliver_webhook(&url, &event_clone, secret.as_deref()).await;
});
}
}
}
});
}
Arc::new(Self { tx })
}
pub fn emit(&self, event: PinakesEvent) {
// Ignore send errors (no receivers)
let _ = self.tx.send(event);
}
}
async fn deliver_webhook(url: &str, event: &PinakesEvent, _secret: Option<&str>) {
let client = reqwest::Client::new();
let body = serde_json::to_string(event).unwrap_or_default();
for attempt in 0..3 {
match client
.post(url)
.header("Content-Type", "application/json")
.body(body.clone())
.send()
.await
{
Ok(resp) if resp.status().is_success() => return,
Ok(resp) => {
warn!(url, status = %resp.status(), attempt, "webhook delivery failed");
}
Err(e) => {
warn!(url, error = %e, attempt, "webhook delivery error");
}
}
// Exponential backoff
tokio::time::sleep(std::time::Duration::from_secs(1 << attempt)).await;
}
}

View file

@ -0,0 +1,68 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
use crate::error::Result;
use crate::jobs::ExportFormat;
use crate::storage::DynStorageBackend;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExportResult {
pub items_exported: usize,
pub output_path: String,
}
/// Export library data to the specified format.
pub async fn export_library(
storage: &DynStorageBackend,
format: &ExportFormat,
destination: &Path,
) -> Result<ExportResult> {
let pagination = crate::model::Pagination {
offset: 0,
limit: u64::MAX,
sort: None,
};
let items = storage.list_media(&&pagination).await?;
let count = items.len();
match format {
ExportFormat::Json => {
let json = serde_json::to_string_pretty(&items)
.map_err(|e| crate::error::PinakesError::Config(format!("json serialize: {e}")))?;
std::fs::write(destination, json)?;
}
ExportFormat::Csv => {
let mut csv = String::new();
csv.push_str("id,path,file_name,media_type,content_hash,file_size,title,artist,album,genre,year,duration_secs,description,created_at,updated_at\n");
for item in &items {
csv.push_str(&format!(
"{},{},{},{:?},{},{},{},{},{},{},{},{},{},{},{}\n",
item.id,
item.path.display(),
item.file_name,
item.media_type,
item.content_hash,
item.file_size,
item.title.as_deref().unwrap_or(""),
item.artist.as_deref().unwrap_or(""),
item.album.as_deref().unwrap_or(""),
item.genre.as_deref().unwrap_or(""),
item.year.map(|y| y.to_string()).unwrap_or_default(),
item.duration_secs
.map(|d| d.to_string())
.unwrap_or_default(),
item.description.as_deref().unwrap_or(""),
item.created_at,
item.updated_at,
));
}
std::fs::write(destination, csv)?;
}
}
Ok(ExportResult {
items_exported: count,
output_path: destination.to_string_lossy().to_string(),
})
}

View file

@ -0,0 +1,31 @@
use std::path::Path;
use crate::error::Result;
use crate::model::ContentHash;
const BUFFER_SIZE: usize = 65536;
pub async fn compute_file_hash(path: &Path) -> Result<ContentHash> {
let path = path.to_path_buf();
let hash = tokio::task::spawn_blocking(move || -> Result<ContentHash> {
let mut hasher = blake3::Hasher::new();
let mut file = std::fs::File::open(&path)?;
let mut buf = vec![0u8; BUFFER_SIZE];
loop {
let n = std::io::Read::read(&mut file, &mut buf)?;
if n == 0 {
break;
}
hasher.update(&buf[..n]);
}
Ok(ContentHash::new(hasher.finalize().to_hex().to_string()))
})
.await
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))??;
Ok(hash)
}
pub fn compute_hash_sync(data: &[u8]) -> ContentHash {
let hash = blake3::hash(data);
ContentHash::new(hash.to_hex().to_string())
}

View file

@ -0,0 +1,250 @@
use std::path::{Path, PathBuf};
use tracing::info;
use crate::audit;
use crate::error::{PinakesError, Result};
use crate::hash::compute_file_hash;
use crate::media_type::MediaType;
use crate::metadata;
use crate::model::*;
use crate::storage::DynStorageBackend;
use crate::thumbnail;
pub struct ImportResult {
pub media_id: MediaId,
pub was_duplicate: bool,
pub path: PathBuf,
}
/// Check that a canonicalized path falls under at least one configured root directory.
/// If no roots are configured, all paths are allowed (for ad-hoc imports).
pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) -> Result<()> {
let roots = storage.list_root_dirs().await?;
if roots.is_empty() {
return Ok(());
}
for root in &roots {
if let Ok(canonical_root) = root.canonicalize()
&& path.starts_with(&canonical_root)
{
return Ok(());
}
}
Err(PinakesError::InvalidOperation(format!(
"path {} is not within any configured root directory",
path.display()
)))
}
pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<ImportResult> {
let path = path.canonicalize()?;
if !path.exists() {
return Err(PinakesError::FileNotFound(path));
}
validate_path_in_roots(storage, &path).await?;
let media_type = MediaType::from_path(&path)
.ok_or_else(|| PinakesError::UnsupportedMediaType(path.clone()))?;
let content_hash = compute_file_hash(&path).await?;
if let Some(existing) = storage.get_media_by_hash(&content_hash).await? {
return Ok(ImportResult {
media_id: existing.id,
was_duplicate: true,
path: path.clone(),
});
}
let file_meta = std::fs::metadata(&path)?;
let file_size = file_meta.len();
let extracted = {
let path_clone = path.clone();
tokio::task::spawn_blocking(move || metadata::extract_metadata(&path_clone, media_type))
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
};
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let now = chrono::Utc::now();
let media_id = MediaId::new();
// Generate thumbnail for image types
let thumb_path = {
let source = path.clone();
let thumb_dir = thumbnail::default_thumbnail_dir();
tokio::task::spawn_blocking(move || {
thumbnail::generate_thumbnail(media_id, &source, media_type, &thumb_dir)
})
.await
.map_err(|e| PinakesError::MetadataExtraction(e.to_string()))??
};
let item = MediaItem {
id: media_id,
path: path.clone(),
file_name,
media_type,
content_hash,
file_size,
title: extracted.title,
artist: extracted.artist,
album: extracted.album,
genre: extracted.genre,
year: extracted.year,
duration_secs: extracted.duration_secs,
description: extracted.description,
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await?;
// Store extracted extra metadata as custom fields
for (key, value) in &extracted.extra {
let field = CustomField {
field_type: CustomFieldType::Text,
value: value.clone(),
};
if let Err(e) = storage.set_custom_field(media_id, key, &field).await {
tracing::warn!(
media_id = %media_id,
field = %key,
error = %e,
"failed to store extracted metadata as custom field"
);
}
}
audit::record_action(
storage,
Some(media_id),
AuditAction::Imported,
Some(format!("path={}", path.display())),
)
.await?;
info!(media_id = %media_id, path = %path.display(), "imported media file");
Ok(ImportResult {
media_id,
was_duplicate: false,
path: path.clone(),
})
}
pub(crate) fn should_ignore(path: &std::path::Path, patterns: &[String]) -> bool {
for component in path.components() {
if let std::path::Component::Normal(name) = component {
let name_str = name.to_string_lossy();
for pattern in patterns {
if pattern.starts_with('.')
&& name_str.starts_with('.')
&& pattern == name_str.as_ref()
{
return true;
}
// Simple glob: ".*" matches any dotfile
if pattern == ".*" && name_str.starts_with('.') {
return true;
}
if name_str == pattern.as_str() {
return true;
}
}
}
}
false
}
/// Default number of concurrent import tasks.
const DEFAULT_IMPORT_CONCURRENCY: usize = 8;
pub async fn import_directory(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
import_directory_with_concurrency(storage, dir, ignore_patterns, DEFAULT_IMPORT_CONCURRENCY)
.await
}
pub async fn import_directory_with_concurrency(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
concurrency: usize,
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
let concurrency = concurrency.clamp(1, 256);
let dir = dir.to_path_buf();
let patterns = ignore_patterns.to_vec();
let entries: Vec<PathBuf> = {
let dir = dir.clone();
tokio::task::spawn_blocking(move || {
walkdir::WalkDir::new(&dir)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| e.file_type().is_file())
.filter(|e| MediaType::from_path(e.path()).is_some())
.filter(|e| !should_ignore(e.path(), &patterns))
.map(|e| e.path().to_path_buf())
.collect()
})
.await
.map_err(|e| PinakesError::Io(std::io::Error::other(e)))?
};
let mut results = Vec::with_capacity(entries.len());
let mut join_set = tokio::task::JoinSet::new();
let mut pending_paths: Vec<PathBuf> = Vec::new();
for entry_path in entries {
let storage = storage.clone();
let path = entry_path.clone();
pending_paths.push(entry_path);
join_set.spawn(async move {
let result = import_file(&storage, &path).await;
(path, result)
});
// Limit concurrency by draining when we hit the cap
if join_set.len() >= concurrency
&& let Some(Ok((path, result))) = join_set.join_next().await
{
match result {
Ok(r) => results.push(Ok(r)),
Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e));
}
}
}
}
// Drain remaining tasks
while let Some(Ok((path, result))) = join_set.join_next().await {
match result {
Ok(r) => results.push(Ok(r)),
Err(e) => {
tracing::warn!(path = %path.display(), error = %e, "failed to import file");
results.push(Err(e));
}
}
}
Ok(results)
}

View file

@ -0,0 +1,201 @@
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
use crate::error::Result;
use crate::hash::compute_file_hash;
use crate::model::{ContentHash, MediaId};
use crate::storage::DynStorageBackend;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrphanReport {
/// Media items whose files no longer exist on disk.
pub orphaned_ids: Vec<MediaId>,
/// Files on disk that are not tracked in the database.
pub untracked_paths: Vec<PathBuf>,
/// Files that appear to have moved (same hash, different path).
pub moved_files: Vec<(MediaId, PathBuf, PathBuf)>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum OrphanAction {
Delete,
Ignore,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VerificationReport {
pub verified: usize,
pub mismatched: Vec<(MediaId, String, String)>,
pub missing: Vec<MediaId>,
pub errors: Vec<(MediaId, String)>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum IntegrityStatus {
Unverified,
Verified,
Mismatch,
Missing,
}
impl std::fmt::Display for IntegrityStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Unverified => write!(f, "unverified"),
Self::Verified => write!(f, "verified"),
Self::Mismatch => write!(f, "mismatch"),
Self::Missing => write!(f, "missing"),
}
}
}
impl std::str::FromStr for IntegrityStatus {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"unverified" => Ok(Self::Unverified),
"verified" => Ok(Self::Verified),
"mismatch" => Ok(Self::Mismatch),
"missing" => Ok(Self::Missing),
_ => Err(format!("unknown integrity status: {s}")),
}
}
}
/// Detect orphaned media items (files that no longer exist on disk).
pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport> {
let media_paths = storage.list_media_paths().await?;
let mut orphaned_ids = Vec::new();
let moved_files = Vec::new();
for (id, path, _hash) in &media_paths {
if !path.exists() {
orphaned_ids.push(*id);
}
}
info!(
orphaned = orphaned_ids.len(),
total = media_paths.len(),
"orphan detection complete"
);
Ok(OrphanReport {
orphaned_ids,
untracked_paths: Vec::new(),
moved_files,
})
}
/// Resolve orphaned media items by deleting them from the database.
pub async fn resolve_orphans(
storage: &DynStorageBackend,
action: OrphanAction,
ids: &[MediaId],
) -> Result<u64> {
match action {
OrphanAction::Delete => {
let count = storage.batch_delete_media(ids).await?;
info!(count, "resolved orphans by deletion");
Ok(count)
}
OrphanAction::Ignore => {
info!(count = ids.len(), "orphans ignored");
Ok(0)
}
}
}
/// Verify integrity of media files by recomputing hashes and comparing.
pub async fn verify_integrity(
storage: &DynStorageBackend,
media_ids: Option<&[MediaId]>,
) -> Result<VerificationReport> {
let all_paths = storage.list_media_paths().await?;
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> = if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> = ids.iter().copied().collect();
all_paths
.into_iter()
.filter(|(id, _, _)| id_set.contains(id))
.collect()
} else {
all_paths
};
let mut report = VerificationReport {
verified: 0,
mismatched: Vec::new(),
missing: Vec::new(),
errors: Vec::new(),
};
for (id, path, expected_hash) in paths_to_check {
if !path.exists() {
report.missing.push(id);
continue;
}
match compute_file_hash(&path).await {
Ok(actual_hash) => {
if actual_hash.0 == expected_hash.0 {
report.verified += 1;
} else {
report
.mismatched
.push((id, expected_hash.0.clone(), actual_hash.0));
}
}
Err(e) => {
report.errors.push((id, e.to_string()));
}
}
}
info!(
verified = report.verified,
mismatched = report.mismatched.len(),
missing = report.missing.len(),
errors = report.errors.len(),
"integrity verification complete"
);
Ok(report)
}
/// Clean up orphaned thumbnail files that don't correspond to any media item.
pub async fn cleanup_orphaned_thumbnails(
storage: &DynStorageBackend,
thumbnail_dir: &Path,
) -> Result<usize> {
let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths
.iter()
.map(|(id, _, _)| id.0.to_string())
.collect();
let mut removed = 0;
if thumbnail_dir.exists() {
let entries = std::fs::read_dir(thumbnail_dir)?;
for entry in entries.flatten() {
let path = entry.path();
if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) {
if !known_ids.contains(stem) {
if let Err(e) = std::fs::remove_file(&path) {
warn!(path = %path.display(), error = %e, "failed to remove orphaned thumbnail");
} else {
removed += 1;
}
}
}
}
}
info!(removed, "orphaned thumbnail cleanup complete");
Ok(removed)
}

View file

@ -0,0 +1,226 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::sync::{RwLock, mpsc};
use tokio_util::sync::CancellationToken;
use uuid::Uuid;
use crate::model::MediaId;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum JobKind {
Scan {
path: Option<PathBuf>,
},
GenerateThumbnails {
media_ids: Vec<MediaId>,
},
VerifyIntegrity {
media_ids: Vec<MediaId>,
},
OrphanDetection,
CleanupThumbnails,
Export {
format: ExportFormat,
destination: PathBuf,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ExportFormat {
Json,
Csv,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "state")]
pub enum JobStatus {
Pending,
Running { progress: f32, message: String },
Completed { result: Value },
Failed { error: String },
Cancelled,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Job {
pub id: Uuid,
pub kind: JobKind,
pub status: JobStatus,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
struct WorkerItem {
job_id: Uuid,
kind: JobKind,
cancel: CancellationToken,
}
pub struct JobQueue {
jobs: Arc<RwLock<HashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>>,
tx: mpsc::Sender<WorkerItem>,
}
impl JobQueue {
/// Create a new job queue and spawn `worker_count` background workers.
///
/// The `executor` callback is invoked for each job; it receives the job kind,
/// a progress-reporting callback, and a cancellation token.
pub fn new<F>(worker_count: usize, executor: F) -> Arc<Self>
where
F: Fn(
Uuid,
JobKind,
CancellationToken,
Arc<RwLock<HashMap<Uuid, Job>>>,
) -> tokio::task::JoinHandle<()>
+ Send
+ Sync
+ 'static,
{
let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> = Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new()));
let executor = Arc::new(executor);
for _ in 0..worker_count {
let rx = rx.clone();
let jobs = jobs.clone();
let cancellations = cancellations.clone();
let executor = executor.clone();
tokio::spawn(async move {
loop {
let item = {
let mut guard = rx.lock().await;
guard.recv().await
};
let Some(item) = item else { break };
// Mark as running
{
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&item.job_id) {
job.status = JobStatus::Running {
progress: 0.0,
message: "starting".to_string(),
};
job.updated_at = Utc::now();
}
}
let handle = executor(item.job_id, item.kind, item.cancel, jobs.clone());
let _ = handle.await;
// Clean up cancellation token
cancellations.write().await.remove(&item.job_id);
}
});
}
Arc::new(Self {
jobs,
cancellations,
tx,
})
}
/// Submit a new job, returning its ID.
pub async fn submit(&self, kind: JobKind) -> Uuid {
let id = Uuid::now_v7();
let now = Utc::now();
let cancel = CancellationToken::new();
let job = Job {
id,
kind: kind.clone(),
status: JobStatus::Pending,
created_at: now,
updated_at: now,
};
self.jobs.write().await.insert(id, job);
self.cancellations.write().await.insert(id, cancel.clone());
let item = WorkerItem {
job_id: id,
kind,
cancel,
};
// If the channel is full we still record the job — it'll stay Pending
let _ = self.tx.send(item).await;
id
}
/// Get the status of a job.
pub async fn status(&self, id: Uuid) -> Option<Job> {
self.jobs.read().await.get(&id).cloned()
}
/// List all jobs, most recent first.
pub async fn list(&self) -> Vec<Job> {
let map = self.jobs.read().await;
let mut jobs: Vec<Job> = map.values().cloned().collect();
jobs.sort_by(|a, b| b.created_at.cmp(&a.created_at));
jobs
}
/// Cancel a running or pending job.
pub async fn cancel(&self, id: Uuid) -> bool {
if let Some(token) = self.cancellations.read().await.get(&id) {
token.cancel();
let mut map = self.jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Cancelled;
job.updated_at = Utc::now();
}
true
} else {
false
}
}
/// Update a job's progress. Called by executors.
pub async fn update_progress(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
id: Uuid,
progress: f32,
message: String,
) {
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Running { progress, message };
job.updated_at = Utc::now();
}
}
/// Mark a job as completed.
pub async fn complete(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, result: Value) {
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Completed { result };
job.updated_at = Utc::now();
}
}
/// Mark a job as failed.
pub async fn fail(jobs: &Arc<RwLock<HashMap<Uuid, Job>>>, id: Uuid, error: String) {
let mut map = jobs.write().await;
if let Some(job) = map.get_mut(&id) {
job.status = JobStatus::Failed { error };
job.updated_at = Utc::now();
}
}
}

View file

@ -0,0 +1,21 @@
pub mod audit;
pub mod cache;
pub mod collections;
pub mod config;
pub mod error;
pub mod events;
pub mod export;
pub mod hash;
pub mod import;
pub mod integrity;
pub mod jobs;
pub mod media_type;
pub mod metadata;
pub mod model;
pub mod opener;
pub mod scan;
pub mod scheduler;
pub mod search;
pub mod storage;
pub mod tags;
pub mod thumbnail;

View file

@ -0,0 +1,209 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MediaType {
// Audio
Mp3,
Flac,
Ogg,
Wav,
Aac,
Opus,
// Video
Mp4,
Mkv,
Avi,
Webm,
// Documents
Pdf,
Epub,
Djvu,
// Text
Markdown,
PlainText,
// Images
Jpeg,
Png,
Gif,
Webp,
Svg,
Avif,
Tiff,
Bmp,
// RAW Images
Cr2,
Nef,
Arw,
Dng,
Orf,
Rw2,
// HEIC/HEIF
Heic,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MediaCategory {
Audio,
Video,
Document,
Text,
Image,
}
impl MediaType {
pub fn from_extension(ext: &str) -> Option<Self> {
match ext.to_ascii_lowercase().as_str() {
"mp3" => Some(Self::Mp3),
"flac" => Some(Self::Flac),
"ogg" | "oga" => Some(Self::Ogg),
"wav" => Some(Self::Wav),
"aac" | "m4a" => Some(Self::Aac),
"opus" => Some(Self::Opus),
"mp4" | "m4v" => Some(Self::Mp4),
"mkv" => Some(Self::Mkv),
"avi" => Some(Self::Avi),
"webm" => Some(Self::Webm),
"pdf" => Some(Self::Pdf),
"epub" => Some(Self::Epub),
"djvu" => Some(Self::Djvu),
"md" | "markdown" => Some(Self::Markdown),
"txt" | "text" => Some(Self::PlainText),
"jpg" | "jpeg" => Some(Self::Jpeg),
"png" => Some(Self::Png),
"gif" => Some(Self::Gif),
"webp" => Some(Self::Webp),
"svg" => Some(Self::Svg),
"avif" => Some(Self::Avif),
"tiff" | "tif" => Some(Self::Tiff),
"bmp" => Some(Self::Bmp),
"cr2" => Some(Self::Cr2),
"nef" => Some(Self::Nef),
"arw" => Some(Self::Arw),
"dng" => Some(Self::Dng),
"orf" => Some(Self::Orf),
"rw2" => Some(Self::Rw2),
"heic" | "heif" => Some(Self::Heic),
_ => None,
}
}
pub fn from_path(path: &Path) -> Option<Self> {
path.extension()
.and_then(|e| e.to_str())
.and_then(Self::from_extension)
}
pub fn mime_type(&self) -> &'static str {
match self {
Self::Mp3 => "audio/mpeg",
Self::Flac => "audio/flac",
Self::Ogg => "audio/ogg",
Self::Wav => "audio/wav",
Self::Aac => "audio/aac",
Self::Opus => "audio/opus",
Self::Mp4 => "video/mp4",
Self::Mkv => "video/x-matroska",
Self::Avi => "video/x-msvideo",
Self::Webm => "video/webm",
Self::Pdf => "application/pdf",
Self::Epub => "application/epub+zip",
Self::Djvu => "image/vnd.djvu",
Self::Markdown => "text/markdown",
Self::PlainText => "text/plain",
Self::Jpeg => "image/jpeg",
Self::Png => "image/png",
Self::Gif => "image/gif",
Self::Webp => "image/webp",
Self::Svg => "image/svg+xml",
Self::Avif => "image/avif",
Self::Tiff => "image/tiff",
Self::Bmp => "image/bmp",
Self::Cr2 => "image/x-canon-cr2",
Self::Nef => "image/x-nikon-nef",
Self::Arw => "image/x-sony-arw",
Self::Dng => "image/x-adobe-dng",
Self::Orf => "image/x-olympus-orf",
Self::Rw2 => "image/x-panasonic-rw2",
Self::Heic => "image/heic",
}
}
pub fn category(&self) -> MediaCategory {
match self {
Self::Mp3 | Self::Flac | Self::Ogg | Self::Wav | Self::Aac | Self::Opus => {
MediaCategory::Audio
}
Self::Mp4 | Self::Mkv | Self::Avi | Self::Webm => MediaCategory::Video,
Self::Pdf | Self::Epub | Self::Djvu => MediaCategory::Document,
Self::Markdown | Self::PlainText => MediaCategory::Text,
Self::Jpeg
| Self::Png
| Self::Gif
| Self::Webp
| Self::Svg
| Self::Avif
| Self::Tiff
| Self::Bmp
| Self::Cr2
| Self::Nef
| Self::Arw
| Self::Dng
| Self::Orf
| Self::Rw2
| Self::Heic => MediaCategory::Image,
}
}
pub fn extensions(&self) -> &'static [&'static str] {
match self {
Self::Mp3 => &["mp3"],
Self::Flac => &["flac"],
Self::Ogg => &["ogg", "oga"],
Self::Wav => &["wav"],
Self::Aac => &["aac", "m4a"],
Self::Opus => &["opus"],
Self::Mp4 => &["mp4", "m4v"],
Self::Mkv => &["mkv"],
Self::Avi => &["avi"],
Self::Webm => &["webm"],
Self::Pdf => &["pdf"],
Self::Epub => &["epub"],
Self::Djvu => &["djvu"],
Self::Markdown => &["md", "markdown"],
Self::PlainText => &["txt", "text"],
Self::Jpeg => &["jpg", "jpeg"],
Self::Png => &["png"],
Self::Gif => &["gif"],
Self::Webp => &["webp"],
Self::Svg => &["svg"],
Self::Avif => &["avif"],
Self::Tiff => &["tiff", "tif"],
Self::Bmp => &["bmp"],
Self::Cr2 => &["cr2"],
Self::Nef => &["nef"],
Self::Arw => &["arw"],
Self::Dng => &["dng"],
Self::Orf => &["orf"],
Self::Rw2 => &["rw2"],
Self::Heic => &["heic", "heif"],
}
}
/// Returns true if this is a RAW image format.
pub fn is_raw(&self) -> bool {
matches!(
self,
Self::Cr2 | Self::Nef | Self::Arw | Self::Dng | Self::Orf | Self::Rw2
)
}
}

View file

@ -0,0 +1,81 @@
use std::path::Path;
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use super::{ExtractedMetadata, MetadataExtractor};
pub struct AudioExtractor;
impl MetadataExtractor for AudioExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("audio metadata: {e}")))?;
let mut meta = ExtractedMetadata::default();
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag.title().map(|s| s.to_string());
meta.artist = tag.artist().map(|s| s.to_string());
meta.album = tag.album().map(|s| s.to_string());
meta.genre = tag.genre().map(|s| s.to_string());
meta.year = tag.year().map(|y| y as i32);
}
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
if let Some(track) = tag.track() {
meta.extra
.insert("track_number".to_string(), track.to_string());
}
if let Some(disc) = tag.disk() {
meta.extra
.insert("disc_number".to_string(), disc.to_string());
}
if let Some(comment) = tag.comment() {
meta.extra
.insert("comment".to_string(), comment.to_string());
}
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
.insert("bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
.insert("channels".to_string(), channels.to_string());
}
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp3,
MediaType::Flac,
MediaType::Ogg,
MediaType::Wav,
MediaType::Aac,
MediaType::Opus,
]
}
}

View file

@ -0,0 +1,192 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use super::{ExtractedMetadata, MetadataExtractor};
pub struct DocumentExtractor;
impl MetadataExtractor for DocumentExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Pdf) => extract_pdf(path),
Some(MediaType::Epub) => extract_epub(path),
Some(MediaType::Djvu) => extract_djvu(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Pdf, MediaType::Epub, MediaType::Djvu]
}
}
fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
let doc = lopdf::Document::load(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("PDF load: {e}")))?;
let mut meta = ExtractedMetadata::default();
// Find the Info dictionary via the trailer
if let Ok(info_ref) = doc.trailer.get(b"Info") {
let info_obj = if let Ok(reference) = info_ref.as_reference() {
doc.get_object(reference).ok()
} else {
Some(info_ref)
};
if let Some(obj) = info_obj
&& let Ok(dict) = obj.as_dict()
{
if let Ok(title) = dict.get(b"Title") {
meta.title = pdf_object_to_string(title);
}
if let Ok(author) = dict.get(b"Author") {
meta.artist = pdf_object_to_string(author);
}
if let Ok(subject) = dict.get(b"Subject") {
meta.description = pdf_object_to_string(subject);
}
if let Ok(creator) = dict.get(b"Creator") {
meta.extra.insert(
"creator".to_string(),
pdf_object_to_string(creator).unwrap_or_default(),
);
}
if let Ok(producer) = dict.get(b"Producer") {
meta.extra.insert(
"producer".to_string(),
pdf_object_to_string(producer).unwrap_or_default(),
);
}
}
}
// Page count
let page_count = doc.get_pages().len();
if page_count > 0 {
meta.extra
.insert("page_count".to_string(), page_count.to_string());
}
Ok(meta)
}
fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> {
match obj {
lopdf::Object::String(bytes, _) => Some(String::from_utf8_lossy(bytes).into_owned()),
lopdf::Object::Name(name) => Some(String::from_utf8_lossy(name).into_owned()),
_ => None,
}
}
fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
let doc = epub::doc::EpubDoc::new(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB parse: {e}")))?;
let mut meta = ExtractedMetadata {
title: doc.mdata("title").map(|item| item.value.clone()),
artist: doc.mdata("creator").map(|item| item.value.clone()),
description: doc.mdata("description").map(|item| item.value.clone()),
..Default::default()
};
if let Some(lang) = doc.mdata("language") {
meta.extra
.insert("language".to_string(), lang.value.clone());
}
if let Some(publisher) = doc.mdata("publisher") {
meta.extra
.insert("publisher".to_string(), publisher.value.clone());
}
if let Some(date) = doc.mdata("date") {
meta.extra.insert("date".to_string(), date.value.clone());
}
Ok(meta)
}
fn extract_djvu(path: &Path) -> Result<ExtractedMetadata> {
// DjVu files contain metadata in SEXPR (S-expression) format within
// ANTa/ANTz chunks, or in the DIRM chunk. We parse the raw bytes to
// extract any metadata fields we can find.
let data = std::fs::read(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("DjVu read: {e}")))?;
let mut meta = ExtractedMetadata::default();
// DjVu files start with "AT&T" magic followed by FORM:DJVU or FORM:DJVM
if data.len() < 16 {
return Ok(meta);
}
// Search for metadata annotations in the file. DjVu metadata is stored
// as S-expressions like (metadata (key "value") ...) within ANTa chunks.
let content = String::from_utf8_lossy(&data);
// Look for (metadata ...) blocks
if let Some(meta_start) = content.find("(metadata") {
let remainder = &content[meta_start..];
// Extract key-value pairs like (title "Some Title")
extract_djvu_field(remainder, "title", &mut meta.title);
extract_djvu_field(remainder, "author", &mut meta.artist);
let mut desc = None;
extract_djvu_field(remainder, "subject", &mut desc);
if desc.is_none() {
extract_djvu_field(remainder, "description", &mut desc);
}
meta.description = desc;
let mut year_str = None;
extract_djvu_field(remainder, "year", &mut year_str);
if let Some(ref y) = year_str {
meta.year = y.parse().ok();
}
let mut creator = None;
extract_djvu_field(remainder, "creator", &mut creator);
if let Some(c) = creator {
meta.extra.insert("creator".to_string(), c);
}
}
// Also check for booklet-style metadata that some DjVu encoders write
// outside the metadata SEXPR
if meta.title.is_none()
&& let Some(title_start) = content.find("(bookmarks")
{
let remainder = &content[title_start..];
// First bookmark title is often the document title
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
meta.title = Some(val.to_string());
}
}
}
}
Ok(meta)
}
fn extract_djvu_field(sexpr: &str, key: &str, out: &mut Option<String>) {
// Look for patterns like (key "value") in the S-expression
let pattern = format!("({key}");
if let Some(start) = sexpr.find(&pattern) {
let remainder = &sexpr[start + pattern.len()..];
// Find the quoted value
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
*out = Some(val.to_string());
}
}
}
}
}

View file

@ -0,0 +1,213 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use super::{ExtractedMetadata, MetadataExtractor};
pub struct ImageExtractor;
impl MetadataExtractor for ImageExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let mut meta = ExtractedMetadata::default();
let file = std::fs::File::open(path)?;
let mut buf_reader = std::io::BufReader::new(&file);
let exif_data = match exif::Reader::new().read_from_container(&mut buf_reader) {
Ok(exif) => exif,
Err(_) => return Ok(meta),
};
// Image dimensions
if let Some(width) = exif_data
.get_field(exif::Tag::PixelXDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageWidth, exif::In::PRIMARY))
&& let Some(w) = field_to_u32(width)
{
meta.extra.insert("width".to_string(), w.to_string());
}
if let Some(height) = exif_data
.get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY))
&& let Some(h) = field_to_u32(height)
{
meta.extra.insert("height".to_string(), h.to_string());
}
// Camera make and model
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) {
let val = make.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("camera_make".to_string(), val);
}
}
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) {
let val = model.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("camera_model".to_string(), val);
}
}
// Date taken
if let Some(date) = exif_data
.get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY))
{
let val = date.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("date_taken".to_string(), val);
}
}
// GPS coordinates
if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = (
exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitudeRef, exif::In::PRIMARY),
) && let (Some(lat_val), Some(lon_val)) =
(dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref))
{
meta.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra
.insert("gps_longitude".to_string(), format!("{lon_val:.6}"));
}
// Exposure info
if let Some(iso) =
exif_data.get_field(exif::Tag::PhotographicSensitivity, exif::In::PRIMARY)
{
let val = iso.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("iso".to_string(), val);
}
}
if let Some(exposure) = exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY) {
let val = exposure.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("exposure_time".to_string(), val);
}
}
if let Some(aperture) = exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY) {
let val = aperture.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("f_number".to_string(), val);
}
}
if let Some(focal) = exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY) {
let val = focal.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("focal_length".to_string(), val);
}
}
// Lens model
if let Some(lens) = exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY) {
let val = lens.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.extra
.insert("lens_model".to_string(), val.trim_matches('"').to_string());
}
}
// Flash
if let Some(flash) = exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY) {
let val = flash.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("flash".to_string(), val);
}
}
// Orientation
if let Some(orientation) = exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
let val = orientation.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("orientation".to_string(), val);
}
}
// Software
if let Some(software) = exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY) {
let val = software.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("software".to_string(), val);
}
}
// Image description as title
if let Some(desc) = exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY) {
let val = desc.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.title = Some(val.trim_matches('"').to_string());
}
}
// Artist
if let Some(artist) = exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY) {
let val = artist.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.artist = Some(val.trim_matches('"').to_string());
}
}
// Copyright as description
if let Some(copyright) = exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY) {
let val = copyright.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.description = Some(val.trim_matches('"').to_string());
}
}
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Jpeg,
MediaType::Png,
MediaType::Gif,
MediaType::Webp,
MediaType::Avif,
MediaType::Tiff,
MediaType::Bmp,
// RAW formats (TIFF-based, kamadak-exif handles these)
MediaType::Cr2,
MediaType::Nef,
MediaType::Arw,
MediaType::Dng,
MediaType::Orf,
MediaType::Rw2,
// HEIC
MediaType::Heic,
]
}
}
fn field_to_u32(field: &exif::Field) -> Option<u32> {
match &field.value {
exif::Value::Long(v) => v.first().copied(),
exif::Value::Short(v) => v.first().map(|&x| x as u32),
_ => None,
}
}
fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f64> {
if let exif::Value::Rational(ref rationals) = dms_field.value
&& rationals.len() >= 3
{
let degrees = rationals[0].to_f64();
let minutes = rationals[1].to_f64();
let seconds = rationals[2].to_f64();
let mut decimal = degrees + minutes / 60.0 + seconds / 3600.0;
let ref_str = ref_field.display_value().to_string();
if ref_str.contains('S') || ref_str.contains('W') {
decimal = -decimal;
}
return Some(decimal);
}
None
}

View file

@ -0,0 +1,40 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use super::{ExtractedMetadata, MetadataExtractor};
pub struct MarkdownExtractor;
impl MetadataExtractor for MarkdownExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let content = std::fs::read_to_string(path)?;
let parsed = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
let mut meta = ExtractedMetadata::default();
if let Some(data) = parsed.ok().and_then(|p| p.data)
&& let gray_matter::Pod::Hash(map) = data
{
if let Some(gray_matter::Pod::String(title)) = map.get("title") {
meta.title = Some(title.clone());
}
if let Some(gray_matter::Pod::String(author)) = map.get("author") {
meta.artist = Some(author.clone());
}
if let Some(gray_matter::Pod::String(desc)) = map.get("description") {
meta.description = Some(desc.clone());
}
if let Some(gray_matter::Pod::String(date)) = map.get("date") {
meta.extra.insert("date".to_string(), date.clone());
}
}
Ok(meta)
}
fn supported_types(&self) -> &[MediaType] {
&[MediaType::Markdown, MediaType::PlainText]
}
}

View file

@ -0,0 +1,46 @@
pub mod audio;
pub mod document;
pub mod image;
pub mod markdown;
pub mod video;
use std::collections::HashMap;
use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
#[derive(Debug, Clone, Default)]
pub struct ExtractedMetadata {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
}
pub trait MetadataExtractor: Send + Sync {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata>;
fn supported_types(&self) -> &[MediaType];
}
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> {
let extractors: Vec<Box<dyn MetadataExtractor>> = vec![
Box::new(audio::AudioExtractor),
Box::new(document::DocumentExtractor),
Box::new(video::VideoExtractor),
Box::new(markdown::MarkdownExtractor),
Box::new(image::ImageExtractor),
];
for extractor in &extractors {
if extractor.supported_types().contains(&media_type) {
return extractor.extract(path);
}
}
Ok(ExtractedMetadata::default())
}

View file

@ -0,0 +1,120 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::MediaType;
use super::{ExtractedMetadata, MetadataExtractor};
pub struct VideoExtractor;
impl MetadataExtractor for VideoExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Mkv) => extract_mkv(path),
Some(MediaType::Mp4) => extract_mp4(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> &[MediaType] {
&[
MediaType::Mp4,
MediaType::Mkv,
MediaType::Avi,
MediaType::Webm,
]
}
}
fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
let file = std::fs::File::open(path)?;
let mkv = matroska::Matroska::open(file)
.map_err(|e| PinakesError::MetadataExtraction(format!("MKV parse: {e}")))?;
let mut meta = ExtractedMetadata {
title: mkv.info.title.clone(),
duration_secs: mkv.info.duration.map(|dur| dur.as_secs_f64()),
..Default::default()
};
// Extract resolution and codec info from tracks
for track in &mkv.tracks {
match &track.settings {
matroska::Settings::Video(v) => {
meta.extra.insert(
"resolution".to_string(),
format!("{}x{}", v.pixel_width, v.pixel_height),
);
if !track.codec_id.is_empty() {
meta.extra
.insert("video_codec".to_string(), track.codec_id.clone());
}
}
matroska::Settings::Audio(a) => {
meta.extra.insert(
"sample_rate".to_string(),
format!("{} Hz", a.sample_rate as u32),
);
meta.extra
.insert("channels".to_string(), a.channels.to_string());
if !track.codec_id.is_empty() {
meta.extra
.insert("audio_codec".to_string(), track.codec_id.clone());
}
}
_ => {}
}
}
Ok(meta)
}
fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("MP4 metadata: {e}")))?;
let mut meta = ExtractedMetadata::default();
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag
.title()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.artist = tag
.artist()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.album = tag
.album()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.genre = tag
.genre()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.year = tag.year().map(|y| y as i32);
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
.insert("audio_bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
.insert("channels".to_string(), channels.to_string());
}
Ok(meta)
}

View file

@ -0,0 +1,191 @@
use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::media_type::MediaType;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct MediaId(pub Uuid);
impl MediaId {
pub fn new() -> Self {
Self(Uuid::now_v7())
}
}
impl fmt::Display for MediaId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Default for MediaId {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ContentHash(pub String);
impl ContentHash {
pub fn new(hex: String) -> Self {
Self(hex)
}
}
impl fmt::Display for ContentHash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MediaItem {
pub id: MediaId,
pub path: PathBuf,
pub file_name: String,
pub media_type: MediaType,
pub content_hash: ContentHash,
pub file_size: u64,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CustomField {
pub field_type: CustomFieldType,
pub value: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CustomFieldType {
Text,
Number,
Date,
Boolean,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tag {
pub id: Uuid,
pub name: String,
pub parent_id: Option<Uuid>,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Collection {
pub id: Uuid,
pub name: String,
pub description: Option<String>,
pub kind: CollectionKind,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CollectionKind {
Manual,
Virtual,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CollectionMember {
pub collection_id: Uuid,
pub media_id: MediaId,
pub position: i32,
pub added_at: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuditEntry {
pub id: Uuid,
pub media_id: Option<MediaId>,
pub action: AuditAction,
pub details: Option<String>,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AuditAction {
Imported,
Updated,
Deleted,
Tagged,
Untagged,
AddedToCollection,
RemovedFromCollection,
Opened,
Scanned,
}
impl fmt::Display for AuditAction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match self {
Self::Imported => "imported",
Self::Updated => "updated",
Self::Deleted => "deleted",
Self::Tagged => "tagged",
Self::Untagged => "untagged",
Self::AddedToCollection => "added_to_collection",
Self::RemovedFromCollection => "removed_from_collection",
Self::Opened => "opened",
Self::Scanned => "scanned",
};
write!(f, "{s}")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Pagination {
pub offset: u64,
pub limit: u64,
pub sort: Option<String>,
}
impl Pagination {
pub fn new(offset: u64, limit: u64, sort: Option<String>) -> Self {
Self {
offset,
limit,
sort,
}
}
}
impl Default for Pagination {
fn default() -> Self {
Self {
offset: 0,
limit: 50,
sort: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SavedSearch {
pub id: Uuid,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: DateTime<Utc>,
}

View file

@ -0,0 +1,79 @@
use std::path::Path;
use std::process::Command;
use crate::error::{PinakesError, Result};
pub trait Opener: Send + Sync {
fn open(&self, path: &Path) -> Result<()>;
}
/// Linux opener using xdg-open
pub struct XdgOpener;
impl Opener for XdgOpener {
fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("xdg-open")
.arg(path)
.status()
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run xdg-open: {e}")))?;
if status.success() {
Ok(())
} else {
Err(PinakesError::InvalidOperation(format!(
"xdg-open exited with status {status}"
)))
}
}
}
/// macOS opener using the `open` command
pub struct MacOpener;
impl Opener for MacOpener {
fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("open")
.arg(path)
.status()
.map_err(|e| PinakesError::InvalidOperation(format!("failed to run open: {e}")))?;
if status.success() {
Ok(())
} else {
Err(PinakesError::InvalidOperation(format!(
"open exited with status {status}"
)))
}
}
}
/// Windows opener using `cmd /c start`
pub struct WindowsOpener;
impl Opener for WindowsOpener {
fn open(&self, path: &Path) -> Result<()> {
let status = Command::new("cmd")
.args(["/C", "start", ""])
.arg(path)
.status()
.map_err(|e| {
PinakesError::InvalidOperation(format!("failed to run cmd /c start: {e}"))
})?;
if status.success() {
Ok(())
} else {
Err(PinakesError::InvalidOperation(format!(
"cmd /c start exited with status {status}"
)))
}
}
}
/// Returns the platform-appropriate opener.
pub fn default_opener() -> Box<dyn Opener> {
if cfg!(target_os = "macos") {
Box::new(MacOpener)
} else if cfg!(target_os = "windows") {
Box::new(WindowsOpener)
} else {
Box::new(XdgOpener)
}
}

View file

@ -0,0 +1,283 @@
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use notify::{PollWatcher, RecursiveMode, Watcher};
use tokio::sync::mpsc;
use tracing::{info, warn};
use crate::error::Result;
use crate::import;
use crate::storage::DynStorageBackend;
pub struct ScanStatus {
pub scanning: bool,
pub files_found: usize,
pub files_processed: usize,
pub errors: Vec<String>,
}
/// Shared scan progress that can be read by the status endpoint while a scan runs.
#[derive(Clone)]
pub struct ScanProgress {
pub is_scanning: Arc<AtomicBool>,
pub files_found: Arc<AtomicUsize>,
pub files_processed: Arc<AtomicUsize>,
pub error_count: Arc<AtomicUsize>,
pub error_messages: Arc<Mutex<Vec<String>>>,
}
const MAX_STORED_ERRORS: usize = 100;
impl ScanProgress {
pub fn new() -> Self {
Self {
is_scanning: Arc::new(AtomicBool::new(false)),
files_found: Arc::new(AtomicUsize::new(0)),
files_processed: Arc::new(AtomicUsize::new(0)),
error_count: Arc::new(AtomicUsize::new(0)),
error_messages: Arc::new(Mutex::new(Vec::new())),
}
}
pub fn snapshot(&self) -> ScanStatus {
let errors = self
.error_messages
.lock()
.map(|v| v.clone())
.unwrap_or_default();
ScanStatus {
scanning: self.is_scanning.load(Ordering::Acquire),
files_found: self.files_found.load(Ordering::Acquire),
files_processed: self.files_processed.load(Ordering::Acquire),
errors,
}
}
fn begin(&self) {
self.is_scanning.store(true, Ordering::Release);
self.files_found.store(0, Ordering::Release);
self.files_processed.store(0, Ordering::Release);
self.error_count.store(0, Ordering::Release);
if let Ok(mut msgs) = self.error_messages.lock() {
msgs.clear();
}
}
fn record_error(&self, message: String) {
self.error_count.fetch_add(1, Ordering::Release);
if let Ok(mut msgs) = self.error_messages.lock()
&& msgs.len() < MAX_STORED_ERRORS
{
msgs.push(message);
}
}
fn finish(&self) {
self.is_scanning.store(false, Ordering::Release);
}
}
impl Default for ScanProgress {
fn default() -> Self {
Self::new()
}
}
pub async fn scan_directory(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
) -> Result<ScanStatus> {
scan_directory_with_progress(storage, dir, ignore_patterns, None).await
}
pub async fn scan_directory_with_progress(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
) -> Result<ScanStatus> {
info!(dir = %dir.display(), "starting directory scan");
if let Some(p) = progress {
p.begin();
}
let results = import::import_directory(storage, dir, ignore_patterns).await?;
// Note: for configurable concurrency, use import_directory_with_concurrency directly
let mut errors = Vec::new();
let mut processed = 0;
for result in &results {
match result {
Ok(_) => processed += 1,
Err(e) => {
let msg = e.to_string();
if let Some(p) = progress {
p.record_error(msg.clone());
}
errors.push(msg);
}
}
}
if let Some(p) = progress {
p.files_found.store(results.len(), Ordering::Release);
p.files_processed.store(processed, Ordering::Release);
p.finish();
}
let status = ScanStatus {
scanning: false,
files_found: results.len(),
files_processed: processed,
errors,
};
Ok(status)
}
pub async fn scan_all_roots(
storage: &DynStorageBackend,
ignore_patterns: &[String],
) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_progress(storage, ignore_patterns, None).await
}
pub async fn scan_all_roots_with_progress(
storage: &DynStorageBackend,
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
) -> Result<Vec<ScanStatus>> {
let roots = storage.list_root_dirs().await?;
let mut statuses = Vec::new();
for root in roots {
match scan_directory_with_progress(storage, &root, ignore_patterns, progress).await {
Ok(status) => statuses.push(status),
Err(e) => {
warn!(root = %root.display(), error = %e, "failed to scan root directory");
statuses.push(ScanStatus {
scanning: false,
files_found: 0,
files_processed: 0,
errors: vec![e.to_string()],
});
}
}
}
Ok(statuses)
}
pub struct FileWatcher {
_watcher: Box<dyn Watcher + Send>,
rx: mpsc::Receiver<PathBuf>,
}
impl FileWatcher {
pub fn new(dirs: &[PathBuf]) -> Result<Self> {
let (tx, rx) = mpsc::channel(1024);
// Try the recommended (native) watcher first, fall back to polling
let watcher: Box<dyn Watcher + Send> = match Self::try_native_watcher(dirs, tx.clone()) {
Ok(w) => {
info!("using native filesystem watcher");
w
}
Err(native_err) => {
warn!(error = %native_err, "native watcher failed, falling back to polling");
Self::polling_watcher(dirs, tx)?
}
};
Ok(Self {
_watcher: watcher,
rx,
})
}
fn try_native_watcher(
dirs: &[PathBuf],
tx: mpsc::Sender<PathBuf>,
) -> std::result::Result<Box<dyn Watcher + Send>, notify::Error> {
let tx_clone = tx.clone();
let mut watcher =
notify::recommended_watcher(move |res: notify::Result<notify::Event>| {
if let Ok(event) = res {
for path in event.paths {
if tx_clone.blocking_send(path).is_err() {
tracing::warn!("filesystem watcher channel closed, stopping");
break;
}
}
}
})?;
for dir in dirs {
watcher.watch(dir, RecursiveMode::Recursive)?;
}
Ok(Box::new(watcher))
}
fn polling_watcher(
dirs: &[PathBuf],
tx: mpsc::Sender<PathBuf>,
) -> Result<Box<dyn Watcher + Send>> {
let tx_clone = tx.clone();
let poll_interval = std::time::Duration::from_secs(5);
let config = notify::Config::default().with_poll_interval(poll_interval);
let mut watcher = PollWatcher::new(
move |res: notify::Result<notify::Event>| {
if let Ok(event) = res {
for path in event.paths {
if tx_clone.blocking_send(path).is_err() {
tracing::warn!("filesystem watcher channel closed, stopping");
break;
}
}
}
},
config,
)
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?;
for dir in dirs {
watcher
.watch(dir, RecursiveMode::Recursive)
.map_err(|e| crate::error::PinakesError::Io(std::io::Error::other(e)))?;
}
Ok(Box::new(watcher))
}
pub async fn next_change(&mut self) -> Option<PathBuf> {
self.rx.recv().await
}
}
pub async fn watch_and_import(
storage: DynStorageBackend,
dirs: Vec<PathBuf>,
ignore_patterns: Vec<String>,
) -> Result<()> {
let mut watcher = FileWatcher::new(&dirs)?;
info!("filesystem watcher started");
while let Some(path) = watcher.next_change().await {
if path.is_file()
&& crate::media_type::MediaType::from_path(&path).is_some()
&& !crate::import::should_ignore(&path, &ignore_patterns)
{
info!(path = %path.display(), "detected file change, importing");
if let Err(e) = import::import_file(&storage, &path).await {
warn!(path = %path.display(), error = %e, "failed to import changed file");
}
}
}
Ok(())
}

View file

@ -0,0 +1,517 @@
use std::path::PathBuf;
use std::sync::Arc;
use chrono::{DateTime, Datelike, Utc};
use serde::{Deserialize, Serialize};
use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken;
use uuid::Uuid;
use crate::config::Config;
use crate::jobs::{JobKind, JobQueue};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum Schedule {
Interval { secs: u64 },
Daily { hour: u32, minute: u32 },
Weekly { day: u32, hour: u32, minute: u32 },
}
impl Schedule {
pub fn next_run(&self, from: DateTime<Utc>) -> DateTime<Utc> {
match self {
Schedule::Interval { secs } => from + chrono::Duration::seconds(*secs as i64),
Schedule::Daily { hour, minute } => {
let today = from
.date_naive()
.and_hms_opt(*hour, *minute, 0)
.unwrap_or_default();
let today_utc = today.and_utc();
if today_utc > from {
today_utc
} else {
today_utc + chrono::Duration::days(1)
}
}
Schedule::Weekly { day, hour, minute } => {
let current_day = from.weekday().num_days_from_monday();
let target_day = *day;
let days_ahead = if target_day > current_day {
target_day - current_day
} else if target_day < current_day {
7 - (current_day - target_day)
} else {
let today = from
.date_naive()
.and_hms_opt(*hour, *minute, 0)
.unwrap_or_default()
.and_utc();
if today > from {
return today;
}
7
};
let target_date = from.date_naive() + chrono::Duration::days(days_ahead as i64);
target_date
.and_hms_opt(*hour, *minute, 0)
.unwrap_or_default()
.and_utc()
}
}
}
pub fn display_string(&self) -> String {
match self {
Schedule::Interval { secs } => {
if *secs >= 3600 {
format!("Every {}h", secs / 3600)
} else if *secs >= 60 {
format!("Every {}m", secs / 60)
} else {
format!("Every {}s", secs)
}
}
Schedule::Daily { hour, minute } => format!("Daily {hour:02}:{minute:02}"),
Schedule::Weekly { day, hour, minute } => {
let day_name = match day {
0 => "Mon",
1 => "Tue",
2 => "Wed",
3 => "Thu",
4 => "Fri",
5 => "Sat",
_ => "Sun",
};
format!("{day_name} {hour:02}:{minute:02}")
}
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ScheduledTask {
pub id: String,
pub name: String,
pub kind: JobKind,
pub schedule: Schedule,
pub enabled: bool,
pub last_run: Option<DateTime<Utc>>,
pub next_run: Option<DateTime<Utc>>,
pub last_status: Option<String>,
/// Whether a job for this task is currently running. Skipped during serialization.
#[serde(default, skip_serializing)]
pub running: bool,
/// The job ID of the last submitted job. Skipped during serialization/deserialization.
#[serde(skip)]
pub last_job_id: Option<Uuid>,
}
pub struct TaskScheduler {
tasks: Arc<RwLock<Vec<ScheduledTask>>>,
job_queue: Arc<JobQueue>,
cancel: CancellationToken,
config: Arc<RwLock<Config>>,
config_path: Option<PathBuf>,
}
impl TaskScheduler {
pub fn new(
job_queue: Arc<JobQueue>,
cancel: CancellationToken,
config: Arc<RwLock<Config>>,
config_path: Option<PathBuf>,
) -> Self {
let now = Utc::now();
let default_tasks = vec![
ScheduledTask {
id: "periodic_scan".to_string(),
name: "Periodic Scan".to_string(),
kind: JobKind::Scan { path: None },
schedule: Schedule::Interval { secs: 3600 },
enabled: true,
last_run: None,
next_run: Some(now + chrono::Duration::seconds(3600)),
last_status: None,
running: false,
last_job_id: None,
},
ScheduledTask {
id: "integrity_check".to_string(),
name: "Integrity Check".to_string(),
kind: JobKind::VerifyIntegrity { media_ids: vec![] },
schedule: Schedule::Weekly {
day: 0,
hour: 3,
minute: 0,
},
enabled: false,
last_run: None,
next_run: None,
last_status: None,
running: false,
last_job_id: None,
},
ScheduledTask {
id: "orphan_detection".to_string(),
name: "Orphan Detection".to_string(),
kind: JobKind::OrphanDetection,
schedule: Schedule::Daily { hour: 2, minute: 0 },
enabled: false,
last_run: None,
next_run: None,
last_status: None,
running: false,
last_job_id: None,
},
ScheduledTask {
id: "thumbnail_cleanup".to_string(),
name: "Thumbnail Cleanup".to_string(),
kind: JobKind::CleanupThumbnails,
schedule: Schedule::Weekly {
day: 6,
hour: 4,
minute: 0,
},
enabled: false,
last_run: None,
next_run: None,
last_status: None,
running: false,
last_job_id: None,
},
];
Self {
tasks: Arc::new(RwLock::new(default_tasks)),
job_queue,
cancel,
config,
config_path,
}
}
/// Restore saved task state from config. Should be called once after construction.
pub async fn restore_state(&self) {
let saved = self.config.read().await.scheduled_tasks.clone();
if saved.is_empty() {
return;
}
let mut tasks = self.tasks.write().await;
for saved_task in &saved {
if let Some(task) = tasks.iter_mut().find(|t| t.id == saved_task.id) {
task.enabled = saved_task.enabled;
task.schedule = saved_task.schedule.clone();
if let Some(Ok(dt)) = saved_task
.last_run
.as_ref()
.map(|s| DateTime::parse_from_rfc3339(s))
{
task.last_run = Some(dt.with_timezone(&Utc));
}
if task.enabled {
let from = task.last_run.unwrap_or_else(Utc::now);
task.next_run = Some(task.schedule.next_run(from));
} else {
task.next_run = None;
}
}
}
}
/// Persist current task state to config file.
async fn persist_task_state(&self) {
let tasks = self.tasks.read().await;
let task_configs: Vec<crate::config::ScheduledTaskConfig> = tasks
.iter()
.map(|t| crate::config::ScheduledTaskConfig {
id: t.id.clone(),
enabled: t.enabled,
schedule: t.schedule.clone(),
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
})
.collect();
drop(tasks);
{
let mut config = self.config.write().await;
config.scheduled_tasks = task_configs;
}
if let Some(ref path) = self.config_path {
let config = self.config.read().await;
if let Err(e) = config.save_to_file(path) {
tracing::warn!(error = %e, "failed to persist scheduler state to config file");
}
}
}
pub async fn list_tasks(&self) -> Vec<ScheduledTask> {
self.tasks.read().await.clone()
}
pub async fn toggle_task(&self, id: &str) -> Option<bool> {
let result = {
let mut tasks = self.tasks.write().await;
if let Some(task) = tasks.iter_mut().find(|t| t.id == id) {
task.enabled = !task.enabled;
if task.enabled {
task.next_run = Some(task.schedule.next_run(Utc::now()));
} else {
task.next_run = None;
}
Some(task.enabled)
} else {
None
}
};
if result.is_some() {
self.persist_task_state().await;
}
result
}
/// Run a task immediately. Uses a single write lock to avoid TOCTOU races.
pub async fn run_now(&self, id: &str) -> Option<String> {
let result = {
let mut tasks = self.tasks.write().await;
let task = tasks.iter_mut().find(|t| t.id == id)?;
// Submit the job (cheap: sends to mpsc channel)
let job_id = self.job_queue.submit(task.kind.clone()).await;
task.last_run = Some(Utc::now());
task.last_status = Some("running".to_string());
task.running = true;
task.last_job_id = Some(job_id);
if task.enabled {
task.next_run = Some(task.schedule.next_run(Utc::now()));
}
Some(job_id.to_string())
};
if result.is_some() {
self.persist_task_state().await;
}
result
}
/// Main scheduler loop. Uses a two-phase approach per tick to avoid
/// holding the write lock across await points. Returns when the
/// cancellation token is triggered.
pub async fn run(&self) {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
loop {
tokio::select! {
_ = interval.tick() => {}
_ = self.cancel.cancelled() => {
tracing::info!("scheduler shutting down");
return;
}
}
// Phase 1: Check completed jobs and update running status
{
use crate::jobs::JobStatus;
let mut tasks = self.tasks.write().await;
for task in tasks.iter_mut() {
if !task.running {
continue;
}
let Some(job_id) = task.last_job_id else {
continue;
};
let Some(job) = self.job_queue.status(job_id).await else {
continue;
};
match &job.status {
JobStatus::Completed { .. } => {
task.running = false;
task.last_status = Some("completed".to_string());
}
JobStatus::Failed { error } => {
task.running = false;
task.last_status = Some(format!("failed: {error}"));
}
JobStatus::Cancelled => {
task.running = false;
task.last_status = Some("cancelled".to_string());
}
_ => {} // still pending or running
}
}
}
// Phase 2: Collect due tasks and submit jobs
let now = Utc::now();
let mut to_submit: Vec<(usize, JobKind)> = Vec::new();
{
let mut tasks = self.tasks.write().await;
for (i, task) in tasks.iter_mut().enumerate() {
if !task.enabled || task.running {
continue;
}
let due = task.next_run.is_some_and(|next| now >= next);
if due {
to_submit.push((i, task.kind.clone()));
task.last_run = Some(now);
task.last_status = Some("running".to_string());
task.running = true;
task.next_run = Some(task.schedule.next_run(now));
}
}
}
// Submit jobs without holding the lock
for (idx, kind) in to_submit {
let job_id = self.job_queue.submit(kind).await;
let mut tasks = self.tasks.write().await;
if let Some(task) = tasks.get_mut(idx) {
task.last_job_id = Some(job_id);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
#[test]
fn test_interval_next_run() {
let from = Utc.with_ymd_and_hms(2025, 6, 15, 12, 0, 0).unwrap();
let schedule = Schedule::Interval { secs: 3600 };
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 15, 13, 0, 0).unwrap());
}
#[test]
fn test_daily_next_run_future_today() {
// 10:00 UTC, schedule is 14:00 => same day
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Daily {
hour: 14,
minute: 0,
};
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 15, 14, 0, 0).unwrap());
}
#[test]
fn test_daily_next_run_past_today() {
// 16:00 UTC, schedule is 14:00 => next day
let from = Utc.with_ymd_and_hms(2025, 6, 15, 16, 0, 0).unwrap();
let schedule = Schedule::Daily {
hour: 14,
minute: 0,
};
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 16, 14, 0, 0).unwrap());
}
#[test]
fn test_weekly_next_run() {
// 2025-06-15 is a Sunday (day 6). Target is Monday (day 0) at 03:00.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 12, 0, 0).unwrap();
let schedule = Schedule::Weekly {
day: 0,
hour: 3,
minute: 0,
};
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 16, 3, 0, 0).unwrap());
}
#[test]
fn test_weekly_same_day_future() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 14:00, current is 10:00 => today.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly {
day: 6,
hour: 14,
minute: 0,
};
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 15, 14, 0, 0).unwrap());
}
#[test]
fn test_weekly_same_day_past() {
// 2025-06-15 is Sunday (day 6). Schedule is Sunday 08:00, current is 10:00 => next week.
let from = Utc.with_ymd_and_hms(2025, 6, 15, 10, 0, 0).unwrap();
let schedule = Schedule::Weekly {
day: 6,
hour: 8,
minute: 0,
};
let next = schedule.next_run(from);
assert_eq!(next, Utc.with_ymd_and_hms(2025, 6, 22, 8, 0, 0).unwrap());
}
#[test]
fn test_serde_roundtrip() {
let task = ScheduledTask {
id: "test".to_string(),
name: "Test Task".to_string(),
kind: JobKind::Scan { path: None },
schedule: Schedule::Interval { secs: 3600 },
enabled: true,
last_run: Some(Utc::now()),
next_run: Some(Utc::now()),
last_status: Some("completed".to_string()),
running: true,
last_job_id: Some(Uuid::now_v7()),
};
let json = serde_json::to_string(&task).unwrap();
let deserialized: ScheduledTask = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.id, "test");
assert_eq!(deserialized.enabled, true);
// running defaults to false on deserialization (skip_serializing)
assert!(!deserialized.running);
// last_job_id is skipped entirely
assert!(deserialized.last_job_id.is_none());
}
#[test]
fn test_display_string() {
assert_eq!(
Schedule::Interval { secs: 3600 }.display_string(),
"Every 1h"
);
assert_eq!(
Schedule::Interval { secs: 300 }.display_string(),
"Every 5m"
);
assert_eq!(
Schedule::Interval { secs: 30 }.display_string(),
"Every 30s"
);
assert_eq!(
Schedule::Daily { hour: 3, minute: 0 }.display_string(),
"Daily 03:00"
);
assert_eq!(
Schedule::Weekly {
day: 0,
hour: 3,
minute: 0
}
.display_string(),
"Mon 03:00"
);
assert_eq!(
Schedule::Weekly {
day: 6,
hour: 14,
minute: 30
}
.display_string(),
"Sun 14:30"
);
}
}

View file

@ -0,0 +1,256 @@
use serde::{Deserialize, Serialize};
use winnow::combinator::{alt, delimited, preceded, repeat};
use winnow::token::{take_till, take_while};
use winnow::{ModalResult, Parser};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum SearchQuery {
FullText(String),
FieldMatch { field: String, value: String },
And(Vec<SearchQuery>),
Or(Vec<SearchQuery>),
Not(Box<SearchQuery>),
Prefix(String),
Fuzzy(String),
TypeFilter(String),
TagFilter(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchRequest {
pub query: SearchQuery,
pub sort: SortOrder,
pub pagination: crate::model::Pagination,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchResults {
pub items: Vec<crate::model::MediaItem>,
pub total_count: u64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[derive(Default)]
pub enum SortOrder {
#[default]
Relevance,
DateAsc,
DateDesc,
NameAsc,
NameDesc,
SizeAsc,
SizeDesc,
}
fn ws<'i>(input: &mut &'i str) -> ModalResult<&'i str> {
take_while(0.., ' ').parse_next(input)
}
fn quoted_string(input: &mut &str) -> ModalResult<String> {
delimited('"', take_till(0.., '"'), '"')
.map(|s: &str| s.to_string())
.parse_next(input)
}
fn bare_word(input: &mut &str) -> ModalResult<String> {
take_while(1.., |c: char| !c.is_whitespace() && c != ')' && c != '(')
.map(|s: &str| s.to_string())
.parse_next(input)
}
fn word_or_quoted(input: &mut &str) -> ModalResult<String> {
alt((quoted_string, bare_word)).parse_next(input)
}
fn not_expr(input: &mut &str) -> ModalResult<SearchQuery> {
preceded(('-', ws), atom)
.map(|q| SearchQuery::Not(Box::new(q)))
.parse_next(input)
}
fn field_match(input: &mut &str) -> ModalResult<SearchQuery> {
let field_name =
take_while(1.., |c: char| c.is_alphanumeric() || c == '_').map(|s: &str| s.to_string());
(field_name, ':', word_or_quoted)
.map(|(field, _, value)| match field.as_str() {
"type" => SearchQuery::TypeFilter(value),
"tag" => SearchQuery::TagFilter(value),
_ => SearchQuery::FieldMatch { field, value },
})
.parse_next(input)
}
fn prefix_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let word = take_while(1.., |c: char| {
!c.is_whitespace() && c != ')' && c != '(' && c != '*'
})
.map(|s: &str| s.to_string());
(word, '*')
.map(|(w, _)| SearchQuery::Prefix(w))
.parse_next(input)
}
fn fuzzy_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let word = take_while(1.., |c: char| {
!c.is_whitespace() && c != ')' && c != '(' && c != '~'
})
.map(|s: &str| s.to_string());
(word, '~')
.map(|(w, _)| SearchQuery::Fuzzy(w))
.parse_next(input)
}
fn paren_expr(input: &mut &str) -> ModalResult<SearchQuery> {
delimited(('(', ws), or_expr, (ws, ')')).parse_next(input)
}
fn not_or_keyword(input: &mut &str) -> ModalResult<()> {
if let Some(rest) = input.strip_prefix("OR")
&& (rest.is_empty() || rest.starts_with(' ') || rest.starts_with(')'))
{
return Err(winnow::error::ErrMode::Backtrack(
winnow::error::ContextError::new(),
));
}
Ok(())
}
fn full_text(input: &mut &str) -> ModalResult<SearchQuery> {
not_or_keyword.parse_next(input)?;
word_or_quoted.map(SearchQuery::FullText).parse_next(input)
}
fn atom(input: &mut &str) -> ModalResult<SearchQuery> {
alt((
paren_expr,
not_expr,
field_match,
prefix_expr,
fuzzy_expr,
full_text,
))
.parse_next(input)
}
fn and_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let first = atom.parse_next(input)?;
let rest: Vec<SearchQuery> = repeat(0.., preceded(ws, atom)).parse_next(input)?;
if rest.is_empty() {
Ok(first)
} else {
let mut terms = vec![first];
terms.extend(rest);
Ok(SearchQuery::And(terms))
}
}
fn or_expr(input: &mut &str) -> ModalResult<SearchQuery> {
let first = and_expr.parse_next(input)?;
let rest: Vec<SearchQuery> =
repeat(0.., preceded((ws, "OR", ws), and_expr)).parse_next(input)?;
if rest.is_empty() {
Ok(first)
} else {
let mut terms = vec![first];
terms.extend(rest);
Ok(SearchQuery::Or(terms))
}
}
pub fn parse_search_query(input: &str) -> crate::error::Result<SearchQuery> {
let trimmed = input.trim();
if trimmed.is_empty() {
return Ok(SearchQuery::FullText(String::new()));
}
let mut input = trimmed;
or_expr
.parse_next(&mut input)
.map_err(|e| crate::error::PinakesError::SearchParse(format!("{e}")))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_text() {
let q = parse_search_query("hello").unwrap();
assert_eq!(q, SearchQuery::FullText("hello".into()));
}
#[test]
fn test_field_match() {
let q = parse_search_query("artist:Beatles").unwrap();
assert_eq!(
q,
SearchQuery::FieldMatch {
field: "artist".into(),
value: "Beatles".into()
}
);
}
#[test]
fn test_type_filter() {
let q = parse_search_query("type:pdf").unwrap();
assert_eq!(q, SearchQuery::TypeFilter("pdf".into()));
}
#[test]
fn test_tag_filter() {
let q = parse_search_query("tag:music").unwrap();
assert_eq!(q, SearchQuery::TagFilter("music".into()));
}
#[test]
fn test_and_implicit() {
let q = parse_search_query("hello world").unwrap();
assert_eq!(
q,
SearchQuery::And(vec![
SearchQuery::FullText("hello".into()),
SearchQuery::FullText("world".into()),
])
);
}
#[test]
fn test_or() {
let q = parse_search_query("hello OR world").unwrap();
assert_eq!(
q,
SearchQuery::Or(vec![
SearchQuery::FullText("hello".into()),
SearchQuery::FullText("world".into()),
])
);
}
#[test]
fn test_not() {
let q = parse_search_query("-excluded").unwrap();
assert_eq!(
q,
SearchQuery::Not(Box::new(SearchQuery::FullText("excluded".into())))
);
}
#[test]
fn test_prefix() {
let q = parse_search_query("hel*").unwrap();
assert_eq!(q, SearchQuery::Prefix("hel".into()));
}
#[test]
fn test_fuzzy() {
let q = parse_search_query("hello~").unwrap();
assert_eq!(q, SearchQuery::Fuzzy("hello".into()));
}
#[test]
fn test_quoted() {
let q = parse_search_query("\"hello world\"").unwrap();
assert_eq!(q, SearchQuery::FullText("hello world".into()));
}
}

View file

@ -0,0 +1,26 @@
use crate::error::{PinakesError, Result};
mod sqlite_migrations {
use refinery::embed_migrations;
embed_migrations!("../../migrations/sqlite");
}
mod postgres_migrations {
use refinery::embed_migrations;
embed_migrations!("../../migrations/postgres");
}
pub fn run_sqlite_migrations(conn: &mut rusqlite::Connection) -> Result<()> {
sqlite_migrations::migrations::runner()
.run(conn)
.map_err(|e| PinakesError::Migration(e.to_string()))?;
Ok(())
}
pub async fn run_postgres_migrations(client: &mut tokio_postgres::Client) -> Result<()> {
postgres_migrations::migrations::runner()
.run_async(client)
.await
.map_err(|e| PinakesError::Migration(e.to_string()))?;
Ok(())
}

View file

@ -0,0 +1,209 @@
pub mod migrations;
pub mod postgres;
pub mod sqlite;
use std::path::PathBuf;
use std::sync::Arc;
use uuid::Uuid;
use crate::error::Result;
use crate::model::*;
use crate::search::{SearchRequest, SearchResults};
/// Statistics about the database.
#[derive(Debug, Clone, Default)]
pub struct DatabaseStats {
pub media_count: u64,
pub tag_count: u64,
pub collection_count: u64,
pub audit_count: u64,
pub database_size_bytes: u64,
pub backend_name: String,
}
#[async_trait::async_trait]
pub trait StorageBackend: Send + Sync + 'static {
// Migrations
async fn run_migrations(&self) -> Result<()>;
// Root directories
async fn add_root_dir(&self, path: PathBuf) -> Result<()>;
async fn list_root_dirs(&self) -> Result<Vec<PathBuf>>;
async fn remove_root_dir(&self, path: &std::path::Path) -> Result<()>;
// Media CRUD
async fn insert_media(&self, item: &MediaItem) -> Result<()>;
async fn get_media(&self, id: MediaId) -> Result<MediaItem>;
async fn count_media(&self) -> Result<u64>;
async fn get_media_by_hash(&self, hash: &ContentHash) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>;
async fn update_media(&self, item: &MediaItem) -> Result<()>;
async fn delete_media(&self, id: MediaId) -> Result<()>;
async fn delete_all_media(&self) -> Result<u64>;
// Tags
async fn create_tag(&self, name: &str, parent_id: Option<Uuid>) -> Result<Tag>;
async fn get_tag(&self, id: Uuid) -> Result<Tag>;
async fn list_tags(&self) -> Result<Vec<Tag>>;
async fn delete_tag(&self, id: Uuid) -> Result<()>;
async fn tag_media(&self, media_id: MediaId, tag_id: Uuid) -> Result<()>;
async fn untag_media(&self, media_id: MediaId, tag_id: Uuid) -> Result<()>;
async fn get_media_tags(&self, media_id: MediaId) -> Result<Vec<Tag>>;
async fn get_tag_descendants(&self, tag_id: Uuid) -> Result<Vec<Tag>>;
// Collections
async fn create_collection(
&self,
name: &str,
kind: CollectionKind,
description: Option<&str>,
filter_query: Option<&str>,
) -> Result<Collection>;
async fn get_collection(&self, id: Uuid) -> Result<Collection>;
async fn list_collections(&self) -> Result<Vec<Collection>>;
async fn delete_collection(&self, id: Uuid) -> Result<()>;
async fn add_to_collection(
&self,
collection_id: Uuid,
media_id: MediaId,
position: i32,
) -> Result<()>;
async fn remove_from_collection(&self, collection_id: Uuid, media_id: MediaId) -> Result<()>;
async fn get_collection_members(&self, collection_id: Uuid) -> Result<Vec<MediaItem>>;
// Search
async fn search(&self, request: &SearchRequest) -> Result<SearchResults>;
// Audit
async fn record_audit(&self, entry: &AuditEntry) -> Result<()>;
async fn list_audit_entries(
&self,
media_id: Option<MediaId>,
pagination: &Pagination,
) -> Result<Vec<AuditEntry>>;
// Custom fields
async fn set_custom_field(
&self,
media_id: MediaId,
name: &str,
field: &CustomField,
) -> Result<()>;
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>;
async fn delete_custom_field(&self, media_id: MediaId, name: &str) -> Result<()>;
// Batch operations (transactional where supported)
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
let mut count = 0u64;
for id in ids {
self.delete_media(*id).await?;
count += 1;
}
Ok(count)
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
let mut count = 0u64;
for media_id in media_ids {
for tag_id in tag_ids {
self.tag_media(*media_id, *tag_id).await?;
count += 1;
}
}
Ok(count)
}
// Integrity
async fn list_media_paths(&self) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>;
// Batch metadata update
async fn batch_update_media(
&self,
ids: &[MediaId],
title: Option<&str>,
artist: Option<&str>,
album: Option<&str>,
genre: Option<&str>,
year: Option<i32>,
description: Option<&str>,
) -> Result<u64> {
let mut count = 0u64;
for id in ids {
let mut item = self.get_media(*id).await?;
if let Some(v) = title {
item.title = Some(v.to_string());
}
if let Some(v) = artist {
item.artist = Some(v.to_string());
}
if let Some(v) = album {
item.album = Some(v.to_string());
}
if let Some(v) = genre {
item.genre = Some(v.to_string());
}
if let Some(v) = &year {
item.year = Some(*v);
}
if let Some(v) = description {
item.description = Some(v.to_string());
}
item.updated_at = chrono::Utc::now();
self.update_media(&item).await?;
count += 1;
}
Ok(count)
}
// Saved searches
async fn save_search(
&self,
id: uuid::Uuid,
name: &str,
query: &str,
sort_order: Option<&str>,
) -> Result<()>;
async fn list_saved_searches(&self) -> Result<Vec<crate::model::SavedSearch>>;
async fn delete_saved_search(&self, id: uuid::Uuid) -> Result<()>;
// Duplicates
async fn find_duplicates(&self) -> Result<Vec<Vec<MediaItem>>>;
// Database management
async fn database_stats(&self) -> Result<DatabaseStats>;
async fn vacuum(&self) -> Result<()>;
async fn clear_all_data(&self) -> Result<()>;
// Thumbnail helpers
/// List all media IDs, optionally filtering to those missing thumbnails.
async fn list_media_ids_for_thumbnails(
&self,
only_missing: bool,
) -> Result<Vec<crate::model::MediaId>>;
// Library statistics
async fn library_statistics(&self) -> Result<LibraryStatistics>;
}
/// Comprehensive library statistics.
#[derive(Debug, Clone, Default)]
pub struct LibraryStatistics {
pub total_media: u64,
pub total_size_bytes: u64,
pub avg_file_size_bytes: u64,
pub media_by_type: Vec<(String, u64)>,
pub storage_by_type: Vec<(String, u64)>,
pub newest_item: Option<String>,
pub oldest_item: Option<String>,
pub top_tags: Vec<(String, u64)>,
pub top_collections: Vec<(String, u64)>,
pub total_tags: u64,
pub total_collections: u64,
pub total_duplicates: u64,
}
pub type DynStorageBackend = Arc<dyn StorageBackend>;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,43 @@
use uuid::Uuid;
use crate::error::Result;
use crate::model::{AuditAction, MediaId, Tag};
use crate::storage::DynStorageBackend;
pub async fn create_tag(
storage: &DynStorageBackend,
name: &str,
parent_id: Option<Uuid>,
) -> Result<Tag> {
storage.create_tag(name, parent_id).await
}
pub async fn tag_media(storage: &DynStorageBackend, media_id: MediaId, tag_id: Uuid) -> Result<()> {
storage.tag_media(media_id, tag_id).await?;
crate::audit::record_action(
storage,
Some(media_id),
AuditAction::Tagged,
Some(format!("tag_id={tag_id}")),
)
.await
}
pub async fn untag_media(
storage: &DynStorageBackend,
media_id: MediaId,
tag_id: Uuid,
) -> Result<()> {
storage.untag_media(media_id, tag_id).await?;
crate::audit::record_action(
storage,
Some(media_id),
AuditAction::Untagged,
Some(format!("tag_id={tag_id}")),
)
.await
}
pub async fn get_tag_tree(storage: &DynStorageBackend, tag_id: Uuid) -> Result<Vec<Tag>> {
storage.get_tag_descendants(tag_id).await
}

View file

@ -0,0 +1,278 @@
use std::path::{Path, PathBuf};
use std::process::Command;
use tracing::{info, warn};
use crate::config::ThumbnailConfig;
use crate::error::{PinakesError, Result};
use crate::media_type::{MediaCategory, MediaType};
use crate::model::MediaId;
/// Generate a thumbnail for a media file and return the path to the thumbnail.
///
/// Supports images (via `image` crate), videos (via ffmpeg), PDFs (via pdftoppm),
/// and EPUBs (via cover image extraction).
pub fn generate_thumbnail(
media_id: MediaId,
source_path: &Path,
media_type: MediaType,
thumbnail_dir: &Path,
) -> Result<Option<PathBuf>> {
generate_thumbnail_with_config(
media_id,
source_path,
media_type,
thumbnail_dir,
&ThumbnailConfig::default(),
)
}
pub fn generate_thumbnail_with_config(
media_id: MediaId,
source_path: &Path,
media_type: MediaType,
thumbnail_dir: &Path,
config: &ThumbnailConfig,
) -> Result<Option<PathBuf>> {
std::fs::create_dir_all(thumbnail_dir)?;
let thumb_path = thumbnail_dir.join(format!("{}.jpg", media_id));
let result = match media_type.category() {
MediaCategory::Image => {
if media_type.is_raw() {
generate_raw_thumbnail(source_path, &thumb_path, config)
} else if media_type == MediaType::Heic {
generate_heic_thumbnail(source_path, &thumb_path, config)
} else {
generate_image_thumbnail(source_path, &thumb_path, config)
}
}
MediaCategory::Video => generate_video_thumbnail(source_path, &thumb_path, config),
MediaCategory::Document => match media_type {
MediaType::Pdf => generate_pdf_thumbnail(source_path, &thumb_path, config),
MediaType::Epub => generate_epub_thumbnail(source_path, &thumb_path, config),
_ => return Ok(None),
},
_ => return Ok(None),
};
match result {
Ok(()) => {
info!(media_id = %media_id, category = ?media_type.category(), "generated thumbnail");
Ok(Some(thumb_path))
}
Err(e) => {
warn!(media_id = %media_id, error = %e, "failed to generate thumbnail");
Ok(None)
}
}
}
fn generate_image_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
let img = image::open(source)
.map_err(|e| PinakesError::MetadataExtraction(format!("image open: {e}")))?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("thumbnail encode: {e}")))?;
Ok(())
}
fn generate_video_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
let ffmpeg = config.ffmpeg_path.as_deref().unwrap_or("ffmpeg");
let status = Command::new(ffmpeg)
.args(["-ss", &config.video_seek_secs.to_string(), "-i"])
.arg(source)
.args([
"-vframes",
"1",
"-vf",
&format!("scale={}:{}", config.size, config.size),
"-y",
])
.arg(dest)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
PinakesError::MetadataExtraction(format!("ffmpeg not found or failed to execute: {e}"))
})?;
if !status.success() {
return Err(PinakesError::MetadataExtraction(format!(
"ffmpeg exited with status {}",
status
)));
}
Ok(())
}
fn generate_pdf_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
// Use pdftoppm to render first page, then resize with image crate
let temp_prefix = dest.with_extension("tmp");
let status = Command::new("pdftoppm")
.args(["-jpeg", "-f", "1", "-l", "1", "-singlefile"])
.arg(source)
.arg(&temp_prefix)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
PinakesError::MetadataExtraction(format!(
"pdftoppm not found or failed to execute: {e}"
))
})?;
if !status.success() {
return Err(PinakesError::MetadataExtraction(format!(
"pdftoppm exited with status {}",
status
)));
}
// pdftoppm outputs <prefix>.jpg
let rendered = temp_prefix.with_extension("jpg");
if rendered.exists() {
// Resize to thumbnail size
let img = image::open(&rendered)
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail open: {e}")))?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("pdf thumbnail encode: {e}")))?;
let _ = std::fs::remove_file(&rendered);
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
"pdftoppm did not produce output".to_string(),
))
}
}
fn generate_epub_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
// Try to extract cover image from EPUB
let mut doc = epub::doc::EpubDoc::new(source)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub open: {e}")))?;
let cover_data = doc.get_cover().map(|(data, _mime)| data).or_else(|| {
// Fallback: try to find a cover image in the resources
doc.get_resource("cover-image")
.map(|(data, _)| data)
.or_else(|| doc.get_resource("cover").map(|(data, _)| data))
});
if let Some(data) = cover_data {
let img = image::load_from_memory(&data)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub cover decode: {e}")))?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("epub thumbnail encode: {e}")))?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
"no cover image found in epub".to_string(),
))
}
}
fn generate_raw_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
// Try dcraw to extract embedded JPEG preview, then resize
let temp_ppm = dest.with_extension("ppm");
let status = Command::new("dcraw")
.args(["-e", "-c"])
.arg(source)
.stdout(std::fs::File::create(&temp_ppm).map_err(|e| {
PinakesError::MetadataExtraction(format!("failed to create temp file: {e}"))
})?)
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| PinakesError::MetadataExtraction(format!("dcraw not found or failed: {e}")))?;
if !status.success() {
let _ = std::fs::remove_file(&temp_ppm);
return Err(PinakesError::MetadataExtraction(format!(
"dcraw exited with status {}",
status
)));
}
// The extracted preview is typically a JPEG — try loading it
if temp_ppm.exists() {
let result = image::open(&temp_ppm);
let _ = std::fs::remove_file(&temp_ppm);
let img = result
.map_err(|e| PinakesError::MetadataExtraction(format!("raw preview decode: {e}")))?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("raw thumbnail encode: {e}")))?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
"dcraw did not produce output".to_string(),
))
}
}
fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig) -> Result<()> {
// Use heif-convert to convert to JPEG, then resize
let temp_jpg = dest.with_extension("tmp.jpg");
let status = Command::new("heif-convert")
.arg(source)
.arg(&temp_jpg)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| {
PinakesError::MetadataExtraction(format!("heif-convert not found or failed: {e}"))
})?;
if !status.success() {
let _ = std::fs::remove_file(&temp_jpg);
return Err(PinakesError::MetadataExtraction(format!(
"heif-convert exited with status {}",
status
)));
}
if temp_jpg.exists() {
let result = image::open(&temp_jpg);
let _ = std::fs::remove_file(&temp_jpg);
let img =
result.map_err(|e| PinakesError::MetadataExtraction(format!("heic decode: {e}")))?;
let thumb = img.thumbnail(config.size, config.size);
let mut output = std::fs::File::create(dest)?;
let encoder =
image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, config.quality);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("heic thumbnail encode: {e}")))?;
Ok(())
} else {
Err(PinakesError::MetadataExtraction(
"heif-convert did not produce output".to_string(),
))
}
}
/// Returns the default thumbnail directory under the data dir.
pub fn default_thumbnail_dir() -> PathBuf {
crate::config::Config::default_data_dir().join("thumbnails")
}

View file

@ -0,0 +1,414 @@
use std::collections::HashMap;
use std::sync::Arc;
use pinakes_core::model::*;
use pinakes_core::storage::StorageBackend;
use pinakes_core::storage::sqlite::SqliteBackend;
async fn setup() -> Arc<SqliteBackend> {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
Arc::new(backend)
}
#[tokio::test]
async fn test_media_crud() {
let storage = setup().await;
let now = chrono::Utc::now();
let id = MediaId::new();
let item = MediaItem {
id,
path: "/tmp/test.txt".into(),
file_name: "test.txt".to_string(),
media_type: pinakes_core::media_type::MediaType::PlainText,
content_hash: ContentHash::new("abc123".to_string()),
file_size: 100,
title: Some("Test Title".to_string()),
artist: None,
album: None,
genre: None,
year: Some(2024),
duration_secs: None,
description: Some("A test file".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
// Insert
storage.insert_media(&item).await.unwrap();
// Get
let fetched = storage.get_media(id).await.unwrap();
assert_eq!(fetched.id, id);
assert_eq!(fetched.title.as_deref(), Some("Test Title"));
assert_eq!(fetched.file_size, 100);
// Get by hash
let by_hash = storage
.get_media_by_hash(&ContentHash::new("abc123".into()))
.await
.unwrap();
assert!(by_hash.is_some());
assert_eq!(by_hash.unwrap().id, id);
// Update
let mut updated = fetched;
updated.title = Some("Updated Title".to_string());
storage.update_media(&updated).await.unwrap();
let re_fetched = storage.get_media(id).await.unwrap();
assert_eq!(re_fetched.title.as_deref(), Some("Updated Title"));
// List
let list = storage.list_media(&Pagination::default()).await.unwrap();
assert_eq!(list.len(), 1);
// Delete
storage.delete_media(id).await.unwrap();
let result = storage.get_media(id).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_tags() {
let storage = setup().await;
// Create tags
let parent = storage.create_tag("Music", None).await.unwrap();
let child = storage.create_tag("Rock", Some(parent.id)).await.unwrap();
assert_eq!(parent.name, "Music");
assert_eq!(child.parent_id, Some(parent.id));
// List tags
let tags = storage.list_tags().await.unwrap();
assert_eq!(tags.len(), 2);
// Get descendants
let descendants = storage.get_tag_descendants(parent.id).await.unwrap();
assert!(descendants.iter().any(|t| t.name == "Rock"));
// Tag media
let now = chrono::Utc::now();
let id = MediaId::new();
let item = MediaItem {
id,
path: "/tmp/song.mp3".into(),
file_name: "song.mp3".to_string(),
media_type: pinakes_core::media_type::MediaType::Mp3,
content_hash: ContentHash::new("hash1".to_string()),
file_size: 5000,
title: Some("Test Song".to_string()),
artist: Some("Test Artist".to_string()),
album: None,
genre: None,
year: None,
duration_secs: Some(180.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await.unwrap();
storage.tag_media(id, parent.id).await.unwrap();
let media_tags = storage.get_media_tags(id).await.unwrap();
assert_eq!(media_tags.len(), 1);
assert_eq!(media_tags[0].name, "Music");
// Untag
storage.untag_media(id, parent.id).await.unwrap();
let media_tags = storage.get_media_tags(id).await.unwrap();
assert_eq!(media_tags.len(), 0);
// Delete tag
storage.delete_tag(child.id).await.unwrap();
let tags = storage.list_tags().await.unwrap();
assert_eq!(tags.len(), 1);
}
#[tokio::test]
async fn test_collections() {
let storage = setup().await;
let col = storage
.create_collection("Favorites", CollectionKind::Manual, Some("My faves"), None)
.await
.unwrap();
assert_eq!(col.name, "Favorites");
assert_eq!(col.kind, CollectionKind::Manual);
let now = chrono::Utc::now();
let id = MediaId::new();
let item = MediaItem {
id,
path: "/tmp/doc.pdf".into(),
file_name: "doc.pdf".to_string(),
media_type: pinakes_core::media_type::MediaType::Pdf,
content_hash: ContentHash::new("pdfhash".to_string()),
file_size: 10000,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await.unwrap();
storage.add_to_collection(col.id, id, 0).await.unwrap();
let members = storage.get_collection_members(col.id).await.unwrap();
assert_eq!(members.len(), 1);
assert_eq!(members[0].id, id);
storage.remove_from_collection(col.id, id).await.unwrap();
let members = storage.get_collection_members(col.id).await.unwrap();
assert_eq!(members.len(), 0);
// List collections
let cols = storage.list_collections().await.unwrap();
assert_eq!(cols.len(), 1);
storage.delete_collection(col.id).await.unwrap();
let cols = storage.list_collections().await.unwrap();
assert_eq!(cols.len(), 0);
}
#[tokio::test]
async fn test_custom_fields() {
let storage = setup().await;
let now = chrono::Utc::now();
let id = MediaId::new();
let item = MediaItem {
id,
path: "/tmp/test.md".into(),
file_name: "test.md".to_string(),
media_type: pinakes_core::media_type::MediaType::Markdown,
content_hash: ContentHash::new("mdhash".to_string()),
file_size: 500,
title: None,
artist: None,
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await.unwrap();
// Set custom field
let field = CustomField {
field_type: CustomFieldType::Text,
value: "important".to_string(),
};
storage
.set_custom_field(id, "priority", &field)
.await
.unwrap();
// Get custom fields
let fields = storage.get_custom_fields(id).await.unwrap();
assert_eq!(fields.len(), 1);
assert_eq!(fields["priority"].value, "important");
// Verify custom fields are loaded with get_media
let media = storage.get_media(id).await.unwrap();
assert_eq!(media.custom_fields.len(), 1);
assert_eq!(media.custom_fields["priority"].value, "important");
// Delete custom field
storage.delete_custom_field(id, "priority").await.unwrap();
let fields = storage.get_custom_fields(id).await.unwrap();
assert_eq!(fields.len(), 0);
}
#[tokio::test]
async fn test_search() {
let storage = setup().await;
let now = chrono::Utc::now();
// Insert a few items
for (i, (name, title, artist)) in [
("song1.mp3", "Bohemian Rhapsody", "Queen"),
("song2.mp3", "Stairway to Heaven", "Led Zeppelin"),
("doc.pdf", "Rust Programming", ""),
]
.iter()
.enumerate()
{
let item = MediaItem {
id: MediaId::new(),
path: format!("/tmp/{name}").into(),
file_name: name.to_string(),
media_type: pinakes_core::media_type::MediaType::from_path(std::path::Path::new(name))
.unwrap(),
content_hash: ContentHash::new(format!("hash{i}")),
file_size: 1000 * (i as u64 + 1),
title: Some(title.to_string()),
artist: if artist.is_empty() {
None
} else {
Some(artist.to_string())
},
album: None,
genre: None,
year: None,
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await.unwrap();
}
// Full-text search
let request = pinakes_core::search::SearchRequest {
query: pinakes_core::search::parse_search_query("Bohemian").unwrap(),
sort: pinakes_core::search::SortOrder::Relevance,
pagination: Pagination::new(0, 50, None),
};
let results = storage.search(&request).await.unwrap();
assert_eq!(results.total_count, 1);
assert_eq!(results.items[0].title.as_deref(), Some("Bohemian Rhapsody"));
// Type filter
let request = pinakes_core::search::SearchRequest {
query: pinakes_core::search::parse_search_query("type:pdf").unwrap(),
sort: pinakes_core::search::SortOrder::Relevance,
pagination: Pagination::new(0, 50, None),
};
let results = storage.search(&request).await.unwrap();
assert_eq!(results.total_count, 1);
assert_eq!(results.items[0].file_name, "doc.pdf");
}
#[tokio::test]
async fn test_audit_log() {
let storage = setup().await;
let entry = AuditEntry {
id: uuid::Uuid::now_v7(),
media_id: None,
action: AuditAction::Scanned,
details: Some("test scan".to_string()),
timestamp: chrono::Utc::now(),
};
storage.record_audit(&entry).await.unwrap();
let entries = storage
.list_audit_entries(None, &Pagination::new(0, 10, None))
.await
.unwrap();
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].action, AuditAction::Scanned);
}
#[tokio::test]
async fn test_import_with_dedup() {
let storage = setup().await as pinakes_core::storage::DynStorageBackend;
// Create a temp file
let dir = tempfile::tempdir().unwrap();
let file_path = dir.path().join("test.txt");
std::fs::write(&file_path, "hello world").unwrap();
// First import
let result1 = pinakes_core::import::import_file(&storage, &file_path)
.await
.unwrap();
assert!(!result1.was_duplicate);
// Second import of same file
let result2 = pinakes_core::import::import_file(&storage, &file_path)
.await
.unwrap();
assert!(result2.was_duplicate);
assert_eq!(result1.media_id, result2.media_id);
}
#[tokio::test]
async fn test_root_dirs() {
let storage = setup().await;
storage.add_root_dir("/tmp/music".into()).await.unwrap();
storage.add_root_dir("/tmp/docs".into()).await.unwrap();
let dirs = storage.list_root_dirs().await.unwrap();
assert_eq!(dirs.len(), 2);
storage
.remove_root_dir(std::path::Path::new("/tmp/music"))
.await
.unwrap();
let dirs = storage.list_root_dirs().await.unwrap();
assert_eq!(dirs.len(), 1);
assert_eq!(dirs[0], std::path::PathBuf::from("/tmp/docs"));
}
#[tokio::test]
async fn test_library_statistics_empty() {
let storage = setup().await;
let stats = storage.library_statistics().await.unwrap();
assert_eq!(stats.total_media, 0);
assert_eq!(stats.total_size_bytes, 0);
assert_eq!(stats.avg_file_size_bytes, 0);
assert!(stats.media_by_type.is_empty());
assert!(stats.storage_by_type.is_empty());
assert!(stats.top_tags.is_empty());
assert!(stats.top_collections.is_empty());
assert!(stats.newest_item.is_none());
assert!(stats.oldest_item.is_none());
assert_eq!(stats.total_tags, 0);
assert_eq!(stats.total_collections, 0);
assert_eq!(stats.total_duplicates, 0);
}
#[tokio::test]
async fn test_library_statistics_with_data() {
let storage = setup().await;
let now = chrono::Utc::now();
let item = MediaItem {
id: MediaId::new(),
path: "/tmp/stats_test.mp3".into(),
file_name: "stats_test.mp3".to_string(),
media_type: pinakes_core::media_type::MediaType::Mp3,
content_hash: ContentHash::new("stats_hash".to_string()),
file_size: 5000,
title: Some("Stats Song".to_string()),
artist: None,
album: None,
genre: None,
year: None,
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
created_at: now,
updated_at: now,
};
storage.insert_media(&item).await.unwrap();
let stats = storage.library_statistics().await.unwrap();
assert_eq!(stats.total_media, 1);
assert_eq!(stats.total_size_bytes, 5000);
assert_eq!(stats.avg_file_size_bytes, 5000);
assert!(!stats.media_by_type.is_empty());
assert!(stats.newest_item.is_some());
assert!(stats.oldest_item.is_some());
}

View file

@ -0,0 +1,30 @@
[package]
name = "pinakes-server"
edition.workspace = true
version.workspace = true
license.workspace = true
[dependencies]
pinakes-core = { path = "../pinakes-core" }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
axum = { workspace = true }
tower = { workspace = true }
tower-http = { workspace = true }
governor = { workspace = true }
tower_governor = { workspace = true }
tokio-util = { version = "0.7", features = ["io"] }
argon2 = { workspace = true }
rand = "0.9"
[dev-dependencies]
http-body-util = "0.1"

View file

@ -0,0 +1,244 @@
use std::sync::Arc;
use axum::Router;
use axum::extract::DefaultBodyLimit;
use axum::http::{HeaderValue, Method, header};
use axum::middleware;
use axum::routing::{delete, get, patch, post, put};
use tower_governor::GovernorLayer;
use tower_governor::governor::GovernorConfigBuilder;
use tower_http::cors::CorsLayer;
use tower_http::trace::TraceLayer;
use crate::auth;
use crate::routes;
use crate::state::AppState;
pub fn create_router(state: AppState) -> Router {
// Global rate limit: 100 requests/sec per IP
let global_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(1)
.burst_size(100)
.finish()
.unwrap(),
);
// Strict rate limit for login: 5 requests/min per IP
let login_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(12) // replenish one every 12 seconds
.burst_size(5)
.finish()
.unwrap(),
);
// Login route with strict rate limiting
let login_route = Router::new()
.route("/auth/login", post(routes::auth::login))
.layer(GovernorLayer {
config: login_governor,
});
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
.route("/health", get(routes::health::health))
.route("/media/count", get(routes::media::get_media_count))
.route("/media", get(routes::media::list_media))
.route("/media/{id}", get(routes::media::get_media))
.route("/media/{id}/stream", get(routes::media::stream_media))
.route("/media/{id}/thumbnail", get(routes::media::get_thumbnail))
.route("/media/{media_id}/tags", get(routes::tags::get_media_tags))
.route("/search", get(routes::search::search))
.route("/search", post(routes::search::search_post))
.route("/tags", get(routes::tags::list_tags))
.route("/tags/{id}", get(routes::tags::get_tag))
.route("/collections", get(routes::collections::list_collections))
.route(
"/collections/{id}",
get(routes::collections::get_collection),
)
.route(
"/collections/{id}/members",
get(routes::collections::get_members),
)
.route("/audit", get(routes::audit::list_audit))
.route("/scan/status", get(routes::scan::scan_status))
.route("/config", get(routes::config::get_config))
.route("/config/ui", get(routes::config::get_ui_config))
.route("/database/stats", get(routes::database::database_stats))
.route("/duplicates", get(routes::duplicates::list_duplicates))
// Statistics
.route("/statistics", get(routes::statistics::library_statistics))
// Scheduled tasks (read)
.route(
"/tasks/scheduled",
get(routes::scheduled_tasks::list_scheduled_tasks),
)
// Jobs
.route("/jobs", get(routes::jobs::list_jobs))
.route("/jobs/{id}", get(routes::jobs::get_job))
// Saved searches (read)
.route(
"/searches/saved",
get(routes::saved_searches::list_saved_searches),
)
// Webhooks (read)
.route("/webhooks", get(routes::webhooks::list_webhooks))
// Auth endpoints (self-service) — login handled separately with stricter rate limit
.route("/auth/logout", post(routes::auth::logout))
.route("/auth/me", get(routes::auth::me));
// Write routes: Editor+ required
let editor_routes = Router::new()
.route("/media/import", post(routes::media::import_media))
.route(
"/media/import/options",
post(routes::media::import_with_options),
)
.route("/media/import/batch", post(routes::media::batch_import))
.route(
"/media/import/directory",
post(routes::media::import_directory_endpoint),
)
.route(
"/media/import/preview",
post(routes::media::preview_directory),
)
.route("/media/batch/tag", post(routes::media::batch_tag))
.route("/media/batch/delete", post(routes::media::batch_delete))
.route("/media/batch/update", patch(routes::media::batch_update))
.route(
"/media/batch/collection",
post(routes::media::batch_add_to_collection),
)
.route("/media/all", delete(routes::media::delete_all_media))
.route("/media/{id}", patch(routes::media::update_media))
.route("/media/{id}", delete(routes::media::delete_media))
.route("/media/{id}/open", post(routes::media::open_media))
.route(
"/media/{id}/custom-fields",
post(routes::media::set_custom_field),
)
.route(
"/media/{id}/custom-fields/{name}",
delete(routes::media::delete_custom_field),
)
.route("/tags", post(routes::tags::create_tag))
.route("/tags/{id}", delete(routes::tags::delete_tag))
.route("/media/{media_id}/tags", post(routes::tags::tag_media))
.route(
"/media/{media_id}/tags/{tag_id}",
delete(routes::tags::untag_media),
)
.route("/collections", post(routes::collections::create_collection))
.route(
"/collections/{id}",
delete(routes::collections::delete_collection),
)
.route(
"/collections/{id}/members",
post(routes::collections::add_member),
)
.route(
"/collections/{collection_id}/members/{media_id}",
delete(routes::collections::remove_member),
)
.route("/scan", post(routes::scan::trigger_scan))
.route("/jobs/{id}/cancel", post(routes::jobs::cancel_job))
// Saved searches (write)
.route(
"/searches/saved",
post(routes::saved_searches::create_saved_search),
)
.route(
"/searches/saved/{id}",
delete(routes::saved_searches::delete_saved_search),
)
// Integrity
.route(
"/jobs/orphan-detection",
post(routes::integrity::trigger_orphan_detection),
)
.route(
"/jobs/verify-integrity",
post(routes::integrity::trigger_verify_integrity),
)
.route(
"/jobs/cleanup-thumbnails",
post(routes::integrity::trigger_cleanup_thumbnails),
)
.route(
"/jobs/generate-thumbnails",
post(routes::integrity::generate_all_thumbnails),
)
.route("/orphans/resolve", post(routes::integrity::resolve_orphans))
// Export
.route("/jobs/export", post(routes::export::trigger_export))
.route(
"/jobs/export/options",
post(routes::export::trigger_export_with_options),
)
// Scheduled tasks (write)
.route(
"/tasks/scheduled/{id}/toggle",
post(routes::scheduled_tasks::toggle_scheduled_task),
)
.route(
"/tasks/scheduled/{id}/run-now",
post(routes::scheduled_tasks::run_scheduled_task_now),
)
// Webhooks
.route("/webhooks/test", post(routes::webhooks::test_webhook))
.layer(middleware::from_fn(auth::require_editor));
// Admin-only routes: destructive/config operations
let admin_routes = Router::new()
.route(
"/config/scanning",
put(routes::config::update_scanning_config),
)
.route("/config/roots", post(routes::config::add_root))
.route("/config/roots", delete(routes::config::remove_root))
.route("/config/ui", put(routes::config::update_ui_config))
.route("/database/vacuum", post(routes::database::vacuum_database))
.route("/database/clear", post(routes::database::clear_database))
.layer(middleware::from_fn(auth::require_admin));
let api = Router::new()
.merge(login_route)
.merge(viewer_routes)
.merge(editor_routes)
.merge(admin_routes);
// CORS: allow same-origin by default, plus the desktop UI origin
let cors = CorsLayer::new()
.allow_origin([
"http://localhost:3000".parse::<HeaderValue>().unwrap(),
"http://127.0.0.1:3000".parse::<HeaderValue>().unwrap(),
"tauri://localhost".parse::<HeaderValue>().unwrap(),
])
.allow_methods([
Method::GET,
Method::POST,
Method::PUT,
Method::PATCH,
Method::DELETE,
])
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true);
Router::new()
.nest("/api/v1", api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(middleware::from_fn_with_state(
state.clone(),
auth::require_auth,
))
.layer(GovernorLayer {
config: global_governor,
})
.layer(TraceLayer::new_for_http())
.layer(cors)
.with_state(state)
}

View file

@ -0,0 +1,164 @@
use axum::extract::{Request, State};
use axum::http::StatusCode;
use axum::middleware::Next;
use axum::response::{IntoResponse, Response};
use pinakes_core::config::UserRole;
use crate::state::AppState;
/// Constant-time string comparison to prevent timing attacks on API keys.
fn constant_time_eq(a: &str, b: &str) -> bool {
if a.len() != b.len() {
return false;
}
a.as_bytes()
.iter()
.zip(b.as_bytes())
.fold(0u8, |acc, (x, y)| acc | (x ^ y))
== 0
}
/// Axum middleware that checks for a valid Bearer token.
///
/// If `accounts.enabled == true`: look up bearer token in session store.
/// If `accounts.enabled == false`: use existing api_key logic (unchanged behavior).
/// Skips authentication for the `/health` and `/auth/login` path suffixes.
pub async fn require_auth(
State(state): State<AppState>,
mut request: Request,
next: Next,
) -> Response {
let path = request.uri().path().to_string();
// Always allow health and login endpoints
if path.ends_with("/health") || path.ends_with("/auth/login") {
return next.run(request).await;
}
let config = state.config.read().await;
if config.accounts.enabled {
// Session-based auth
let token = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
.map(|s| s.to_string());
drop(config);
let Some(token) = token else {
tracing::debug!(path = %path, "rejected: missing Authorization header");
return unauthorized("missing Authorization header");
};
let sessions = state.sessions.read().await;
let Some(session) = sessions.get(&token) else {
tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token");
};
// Check session expiry
if session.is_expired() {
let username = session.username.clone();
drop(sessions);
// Remove expired session
let mut sessions_mut = state.sessions.write().await;
sessions_mut.remove(&token);
tracing::info!(username = %username, "session expired");
return unauthorized("session expired");
}
// Inject role and username into request extensions
request.extensions_mut().insert(session.role);
request.extensions_mut().insert(session.username.clone());
} else {
// Legacy API key auth
let api_key = std::env::var("PINAKES_API_KEY")
.ok()
.or_else(|| config.server.api_key.clone());
drop(config);
if let Some(ref expected_key) = api_key {
if expected_key.is_empty() {
// Empty key means no auth required
request.extensions_mut().insert(UserRole::Admin);
return next.run(request).await;
}
let auth_header = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok());
match auth_header {
Some(header) if header.starts_with("Bearer ") => {
let token = &header[7..];
if !constant_time_eq(token, expected_key.as_str()) {
tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key");
}
}
_ => {
return unauthorized(
"missing or malformed Authorization header, expected: Bearer <api_key>",
);
}
}
}
// When no api_key is configured, or key matches, grant admin
request.extensions_mut().insert(UserRole::Admin);
}
next.run(request).await
}
/// Middleware: requires Editor or Admin role.
pub async fn require_editor(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_write() {
next.run(request).await
} else {
forbidden("editor role required")
}
}
/// Middleware: requires Admin role.
pub async fn require_admin(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_admin() {
next.run(request).await
} else {
forbidden("admin role required")
}
}
fn unauthorized(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::UNAUTHORIZED,
[("content-type", "application/json")],
body,
)
.into_response()
}
fn forbidden(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::FORBIDDEN,
[("content-type", "application/json")],
body,
)
.into_response()
}

View file

@ -0,0 +1,553 @@
use std::collections::HashMap;
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
// Media
#[derive(Debug, Serialize)]
pub struct MediaResponse {
pub id: String,
pub path: String,
pub file_name: String,
pub media_type: String,
pub content_hash: String,
pub file_size: u64,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
pub struct CustomFieldResponse {
pub field_type: String,
pub value: String,
}
#[derive(Debug, Deserialize)]
pub struct ImportRequest {
pub path: PathBuf,
}
#[derive(Debug, Serialize)]
pub struct ImportResponse {
pub media_id: String,
pub was_duplicate: bool,
}
#[derive(Debug, Deserialize)]
pub struct UpdateMediaRequest {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
// Tags
#[derive(Debug, Serialize)]
pub struct TagResponse {
pub id: String,
pub name: String,
pub parent_id: Option<String>,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
pub struct CreateTagRequest {
pub name: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
pub struct TagMediaRequest {
pub tag_id: Uuid,
}
// Collections
#[derive(Debug, Serialize)]
pub struct CollectionResponse {
pub id: String,
pub name: String,
pub description: Option<String>,
pub kind: String,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
pub struct CreateCollectionRequest {
pub name: String,
pub kind: String,
pub description: Option<String>,
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct AddMemberRequest {
pub media_id: Uuid,
pub position: Option<i32>,
}
// Search
#[derive(Debug, Deserialize)]
pub struct SearchParams {
pub q: String,
pub sort: Option<String>,
pub offset: Option<u64>,
pub limit: Option<u64>,
}
#[derive(Debug, Serialize)]
pub struct SearchResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
// Audit
#[derive(Debug, Serialize)]
pub struct AuditEntryResponse {
pub id: String,
pub media_id: Option<String>,
pub action: String,
pub details: Option<String>,
pub timestamp: DateTime<Utc>,
}
// Search (POST body)
#[derive(Debug, Deserialize)]
pub struct SearchRequestBody {
pub q: String,
pub sort: Option<String>,
pub offset: Option<u64>,
pub limit: Option<u64>,
}
// Scan
#[derive(Debug, Deserialize)]
pub struct ScanRequest {
pub path: Option<PathBuf>,
}
#[derive(Debug, Serialize)]
pub struct ScanResponse {
pub files_found: usize,
pub files_processed: usize,
pub errors: Vec<String>,
}
#[derive(Debug, Serialize)]
pub struct ScanJobResponse {
pub job_id: String,
}
#[derive(Debug, Serialize)]
pub struct ScanStatusResponse {
pub scanning: bool,
pub files_found: usize,
pub files_processed: usize,
pub error_count: usize,
pub errors: Vec<String>,
}
// Pagination
#[derive(Debug, Deserialize)]
pub struct PaginationParams {
pub offset: Option<u64>,
pub limit: Option<u64>,
pub sort: Option<String>,
}
// Open
#[derive(Debug, Deserialize)]
pub struct OpenRequest {
pub media_id: Uuid,
}
// Config
#[derive(Debug, Serialize)]
pub struct ConfigResponse {
pub backend: String,
pub database_path: Option<String>,
pub roots: Vec<String>,
pub scanning: ScanningConfigResponse,
pub server: ServerConfigResponse,
pub ui: UiConfigResponse,
pub config_path: Option<String>,
pub config_writable: bool,
}
#[derive(Debug, Serialize)]
pub struct ScanningConfigResponse {
pub watch: bool,
pub poll_interval_secs: u64,
pub ignore_patterns: Vec<String>,
}
#[derive(Debug, Serialize)]
pub struct ServerConfigResponse {
pub host: String,
pub port: u16,
}
#[derive(Debug, Deserialize)]
pub struct UpdateScanningRequest {
pub watch: Option<bool>,
pub poll_interval_secs: Option<u64>,
pub ignore_patterns: Option<Vec<String>>,
}
#[derive(Debug, Deserialize)]
pub struct RootDirRequest {
pub path: String,
}
// Enhanced Import
#[derive(Debug, Deserialize)]
pub struct ImportWithOptionsRequest {
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
pub struct BatchImportRequest {
pub paths: Vec<PathBuf>,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize)]
pub struct BatchImportResponse {
pub results: Vec<BatchImportItemResult>,
pub total: usize,
pub imported: usize,
pub duplicates: usize,
pub errors: usize,
}
#[derive(Debug, Serialize)]
pub struct BatchImportItemResult {
pub path: String,
pub media_id: Option<String>,
pub was_duplicate: bool,
pub error: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct DirectoryImportRequest {
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize)]
pub struct DirectoryPreviewResponse {
pub files: Vec<DirectoryPreviewFile>,
pub total_count: usize,
pub total_size: u64,
}
#[derive(Debug, Serialize)]
pub struct DirectoryPreviewFile {
pub path: String,
pub file_name: String,
pub media_type: String,
pub file_size: u64,
}
// Custom Fields
#[derive(Debug, Deserialize)]
pub struct SetCustomFieldRequest {
pub name: String,
pub field_type: String,
pub value: String,
}
// Media update extended
#[derive(Debug, Deserialize)]
pub struct UpdateMediaFullRequest {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
// Batch operations
#[derive(Debug, Deserialize)]
pub struct BatchTagRequest {
pub media_ids: Vec<Uuid>,
pub tag_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize)]
pub struct BatchCollectionRequest {
pub media_ids: Vec<Uuid>,
pub collection_id: Uuid,
}
#[derive(Debug, Deserialize)]
pub struct BatchDeleteRequest {
pub media_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize)]
pub struct BatchUpdateRequest {
pub media_ids: Vec<Uuid>,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct BatchOperationResponse {
pub processed: usize,
pub errors: Vec<String>,
}
// Search with sort
#[derive(Debug, Serialize)]
pub struct MediaCountResponse {
pub count: u64,
}
// Database management
#[derive(Debug, Serialize)]
pub struct DatabaseStatsResponse {
pub media_count: u64,
pub tag_count: u64,
pub collection_count: u64,
pub audit_count: u64,
pub database_size_bytes: u64,
pub backend_name: String,
}
// UI Config
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct UiConfigResponse {
pub theme: String,
pub default_view: String,
pub default_page_size: usize,
pub default_view_mode: String,
pub auto_play_media: bool,
pub show_thumbnails: bool,
pub sidebar_collapsed: bool,
}
#[derive(Debug, Deserialize)]
pub struct UpdateUiConfigRequest {
pub theme: Option<String>,
pub default_view: Option<String>,
pub default_page_size: Option<usize>,
pub default_view_mode: Option<String>,
pub auto_play_media: Option<bool>,
pub show_thumbnails: Option<bool>,
pub sidebar_collapsed: Option<bool>,
}
impl From<&pinakes_core::config::UiConfig> for UiConfigResponse {
fn from(ui: &pinakes_core::config::UiConfig) -> Self {
Self {
theme: ui.theme.clone(),
default_view: ui.default_view.clone(),
default_page_size: ui.default_page_size,
default_view_mode: ui.default_view_mode.clone(),
auto_play_media: ui.auto_play_media,
show_thumbnails: ui.show_thumbnails,
sidebar_collapsed: ui.sidebar_collapsed,
}
}
}
// Library Statistics
#[derive(Debug, Serialize)]
pub struct LibraryStatisticsResponse {
pub total_media: u64,
pub total_size_bytes: u64,
pub avg_file_size_bytes: u64,
pub media_by_type: Vec<TypeCountResponse>,
pub storage_by_type: Vec<TypeCountResponse>,
pub newest_item: Option<String>,
pub oldest_item: Option<String>,
pub top_tags: Vec<TypeCountResponse>,
pub top_collections: Vec<TypeCountResponse>,
pub total_tags: u64,
pub total_collections: u64,
pub total_duplicates: u64,
}
#[derive(Debug, Serialize)]
pub struct TypeCountResponse {
pub name: String,
pub count: u64,
}
impl From<pinakes_core::storage::LibraryStatistics> for LibraryStatisticsResponse {
fn from(stats: pinakes_core::storage::LibraryStatistics) -> Self {
Self {
total_media: stats.total_media,
total_size_bytes: stats.total_size_bytes,
avg_file_size_bytes: stats.avg_file_size_bytes,
media_by_type: stats
.media_by_type
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
storage_by_type: stats
.storage_by_type
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
newest_item: stats.newest_item,
oldest_item: stats.oldest_item,
top_tags: stats
.top_tags
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
top_collections: stats
.top_collections
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
total_tags: stats.total_tags,
total_collections: stats.total_collections,
total_duplicates: stats.total_duplicates,
}
}
}
// Scheduled Tasks
#[derive(Debug, Serialize)]
pub struct ScheduledTaskResponse {
pub id: String,
pub name: String,
pub schedule: String,
pub enabled: bool,
pub last_run: Option<String>,
pub next_run: Option<String>,
pub last_status: Option<String>,
}
// Duplicates
#[derive(Debug, Serialize)]
pub struct DuplicateGroupResponse {
pub content_hash: String,
pub items: Vec<MediaResponse>,
}
// Auth
#[derive(Debug, Deserialize)]
pub struct LoginRequest {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize)]
pub struct LoginResponse {
pub token: String,
pub username: String,
pub role: String,
}
#[derive(Debug, Serialize)]
pub struct UserInfoResponse {
pub username: String,
pub role: String,
}
// Conversion helpers
impl From<pinakes_core::model::MediaItem> for MediaResponse {
fn from(item: pinakes_core::model::MediaItem) -> Self {
Self {
id: item.id.0.to_string(),
path: item.path.to_string_lossy().to_string(),
file_name: item.file_name,
media_type: serde_json::to_value(item.media_type)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default(),
content_hash: item.content_hash.0,
file_size: item.file_size,
title: item.title,
artist: item.artist,
album: item.album,
genre: item.genre,
year: item.year,
duration_secs: item.duration_secs,
description: item.description,
has_thumbnail: item.thumbnail_path.is_some(),
custom_fields: item
.custom_fields
.into_iter()
.map(|(k, v)| {
(
k,
CustomFieldResponse {
field_type: format!("{:?}", v.field_type).to_lowercase(),
value: v.value,
},
)
})
.collect(),
created_at: item.created_at,
updated_at: item.updated_at,
}
}
}
impl From<pinakes_core::model::Tag> for TagResponse {
fn from(tag: pinakes_core::model::Tag) -> Self {
Self {
id: tag.id.to_string(),
name: tag.name,
parent_id: tag.parent_id.map(|id| id.to_string()),
created_at: tag.created_at,
}
}
}
impl From<pinakes_core::model::Collection> for CollectionResponse {
fn from(col: pinakes_core::model::Collection) -> Self {
Self {
id: col.id.to_string(),
name: col.name,
description: col.description,
kind: format!("{:?}", col.kind).to_lowercase(),
filter_query: col.filter_query,
created_at: col.created_at,
updated_at: col.updated_at,
}
}
}
impl From<pinakes_core::model::AuditEntry> for AuditEntryResponse {
fn from(entry: pinakes_core::model::AuditEntry) -> Self {
Self {
id: entry.id.to_string(),
media_id: entry.media_id.map(|id| id.0.to_string()),
action: entry.action.to_string(),
details: entry.details,
timestamp: entry.timestamp,
}
}
}

View file

@ -0,0 +1,69 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use serde::Serialize;
#[derive(Debug, Serialize)]
struct ErrorResponse {
error: String,
}
pub struct ApiError(pub pinakes_core::error::PinakesError);
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
use pinakes_core::error::PinakesError;
let (status, message) = match &self.0 {
PinakesError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::FileNotFound(path) => {
// Only expose the file name, not the full path
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}"))
}
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => {
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
(
StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"),
)
}
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(),
)
}
_ => {
tracing::error!(error = %self.0, "internal server error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(),
)
}
};
let body = serde_json::to_string(&ErrorResponse {
error: message.clone(),
})
.unwrap_or_else(|_| format!(r#"{{"error":"{}"}}"#, message));
(status, [("content-type", "application/json")], body).into_response()
}
}
impl From<pinakes_core::error::PinakesError> for ApiError {
fn from(e: pinakes_core::error::PinakesError) -> Self {
Self(e)
}
}

View file

@ -0,0 +1,6 @@
pub mod app;
pub mod auth;
pub mod dto;
pub mod error;
pub mod routes;
pub mod state;

View file

@ -0,0 +1,448 @@
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Result;
use clap::Parser;
use tokio::sync::RwLock;
use tracing::info;
use tracing_subscriber::EnvFilter;
use pinakes_core::config::Config;
use pinakes_core::storage::StorageBackend;
use pinakes_server::app;
use pinakes_server::state::AppState;
/// Pinakes media cataloging server
#[derive(Parser)]
#[command(name = "pinakes-server", version, about)]
struct Cli {
/// Path to configuration file
#[arg(short, long, env = "PINAKES_CONFIG")]
config: Option<PathBuf>,
/// Override listen host
#[arg(long)]
host: Option<String>,
/// Override listen port
#[arg(short, long)]
port: Option<u16>,
/// Set log level (trace, debug, info, warn, error)
#[arg(long, default_value = "info")]
log_level: String,
/// Log output format (compact, full, pretty, json)
#[arg(long, default_value = "compact")]
log_format: String,
/// Run database migrations only, then exit
#[arg(long)]
migrate_only: bool,
}
fn resolve_config_path(explicit: Option<&std::path::Path>) -> PathBuf {
if let Some(path) = explicit {
return path.to_path_buf();
}
// Check current directory
let local = PathBuf::from("pinakes.toml");
if local.exists() {
return local;
}
// XDG default
Config::default_config_path()
}
#[tokio::main]
async fn main() -> Result<()> {
let cli = Cli::parse();
// Initialize logging
let env_filter = EnvFilter::try_new(&cli.log_level).unwrap_or_else(|_| EnvFilter::new("info"));
match cli.log_format.as_str() {
"json" => {
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.json()
.init();
}
"pretty" => {
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.pretty()
.init();
}
"full" => {
tracing_subscriber::fmt().with_env_filter(env_filter).init();
}
_ => {
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.compact()
.init();
}
}
let config_path = resolve_config_path(cli.config.as_deref());
info!(path = %config_path.display(), "loading configuration");
let mut config = Config::load_or_default(&config_path)?;
config.ensure_dirs()?;
config
.validate()
.map_err(|e| anyhow::anyhow!("invalid configuration: {e}"))?;
// Apply CLI overrides
if let Some(host) = cli.host {
config.server.host = host;
}
if let Some(port) = cli.port {
config.server.port = port;
}
// Storage backend initialization
let storage: pinakes_core::storage::DynStorageBackend = match config.storage.backend {
pinakes_core::config::StorageBackendType::Sqlite => {
let sqlite_config = config.storage.sqlite.as_ref().ok_or_else(|| {
anyhow::anyhow!(
"sqlite storage selected but [storage.sqlite] config section missing"
)
})?;
info!(path = %sqlite_config.path.display(), "initializing sqlite storage");
let backend = pinakes_core::storage::sqlite::SqliteBackend::new(&sqlite_config.path)?;
backend.run_migrations().await?;
Arc::new(backend)
}
pinakes_core::config::StorageBackendType::Postgres => {
let pg_config = config.storage.postgres.as_ref().ok_or_else(|| {
anyhow::anyhow!(
"postgres storage selected but [storage.postgres] config section missing"
)
})?;
info!(host = %pg_config.host, port = pg_config.port, database = %pg_config.database, "initializing postgres storage");
let backend = pinakes_core::storage::postgres::PostgresBackend::new(pg_config).await?;
backend.run_migrations().await?;
Arc::new(backend)
}
};
if cli.migrate_only {
info!("migrations complete, exiting");
return Ok(());
}
// Register root directories
for root in &config.directories.roots {
if root.exists() {
storage.add_root_dir(root.clone()).await?;
info!(path = %root.display(), "registered root directory");
} else {
tracing::warn!(path = %root.display(), "root directory does not exist, skipping");
}
}
// Start filesystem watcher if configured
if config.scanning.watch {
let watch_storage = storage.clone();
let watch_dirs = config.directories.roots.clone();
let watch_ignore = config.scanning.ignore_patterns.clone();
tokio::spawn(async move {
if let Err(e) =
pinakes_core::scan::watch_and_import(watch_storage, watch_dirs, watch_ignore).await
{
tracing::error!(error = %e, "filesystem watcher failed");
}
});
info!("filesystem watcher started");
}
let addr = format!("{}:{}", config.server.host, config.server.port);
// Initialize job queue with executor
let job_storage = storage.clone();
let job_config = config.clone();
let job_queue = pinakes_core::jobs::JobQueue::new(
config.jobs.worker_count,
move |job_id, kind, cancel, jobs| {
let storage = job_storage.clone();
let config = job_config.clone();
tokio::spawn(async move {
use pinakes_core::jobs::{JobKind, JobQueue};
let result = match kind {
JobKind::Scan { path } => {
let ignore = config.scanning.ignore_patterns.clone();
let res = if let Some(p) = path {
pinakes_core::scan::scan_directory(&storage, &p, &ignore).await
} else {
pinakes_core::scan::scan_all_roots(&storage, &ignore)
.await
.map(|statuses| {
let total_found: usize =
statuses.iter().map(|s| s.files_found).sum();
let total_processed: usize =
statuses.iter().map(|s| s.files_processed).sum();
let all_errors: Vec<String> =
statuses.into_iter().flat_map(|s| s.errors).collect();
pinakes_core::scan::ScanStatus {
scanning: false,
files_found: total_found,
files_processed: total_processed,
errors: all_errors,
}
})
};
match res {
Ok(status) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({
"files_found": status.files_found,
"files_processed": status.files_processed,
"errors": status.errors,
}),
)
.await;
}
Err(e) => {
JobQueue::fail(&jobs, job_id, e.to_string()).await;
}
}
}
JobKind::GenerateThumbnails { media_ids } => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
let thumb_config = config.thumbnails.clone();
let total = media_ids.len();
let mut generated = 0usize;
let mut errors = Vec::new();
for (i, mid) in media_ids.iter().enumerate() {
if cancel.is_cancelled() {
break;
}
JobQueue::update_progress(
&jobs,
job_id,
i as f32 / total as f32,
format!("{}/{}", i, total),
)
.await;
match storage.get_media(*mid).await {
Ok(item) => {
let source = item.path.clone();
let mt = item.media_type;
let id = item.id;
let td = thumb_dir.clone();
let tc = thumb_config.clone();
let res = tokio::task::spawn_blocking(move || {
pinakes_core::thumbnail::generate_thumbnail_with_config(
id, &source, mt, &td, &tc,
)
})
.await;
match res {
Ok(Ok(Some(path))) => {
let mut updated = item;
updated.thumbnail_path = Some(path);
let _ = storage.update_media(&updated).await;
generated += 1;
}
Ok(Ok(None)) => {}
Ok(Err(e)) => errors.push(format!("{}: {}", mid, e)),
Err(e) => errors.push(format!("{}: {}", mid, e)),
}
}
Err(e) => errors.push(format!("{}: {}", mid, e)),
}
}
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({
"generated": generated, "errors": errors
}),
)
.await;
}
JobKind::VerifyIntegrity { media_ids } => {
let ids = if media_ids.is_empty() {
None
} else {
Some(media_ids.as_slice())
};
match pinakes_core::integrity::verify_integrity(&storage, ids).await {
Ok(report) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::to_value(&report).unwrap_or_default(),
)
.await;
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
JobKind::OrphanDetection => {
match pinakes_core::integrity::detect_orphans(&storage).await {
Ok(report) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::to_value(&report).unwrap_or_default(),
)
.await;
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
JobKind::CleanupThumbnails => {
let thumb_dir = pinakes_core::thumbnail::default_thumbnail_dir();
match pinakes_core::integrity::cleanup_orphaned_thumbnails(
&storage, &thumb_dir,
)
.await
{
Ok(removed) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({ "removed": removed }),
)
.await;
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
JobKind::Export {
format,
destination,
} => {
match pinakes_core::export::export_library(&storage, &format, &destination)
.await
{
Ok(result) => {
JobQueue::complete(
&jobs,
job_id,
serde_json::to_value(&result).unwrap_or_default(),
)
.await;
}
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
}
};
let _ = result;
drop(cancel);
})
},
);
// Initialize cache layer
let cache = std::sync::Arc::new(pinakes_core::cache::CacheLayer::new(
config.jobs.cache_ttl_secs,
));
// Initialize scheduler with cancellation support
let shutdown_token = tokio_util::sync::CancellationToken::new();
let config_arc = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
shutdown_token.clone(),
config_arc.clone(),
Some(config_path.clone()),
);
let scheduler = Arc::new(scheduler);
// Restore saved scheduler state from config
scheduler.restore_state().await;
// Spawn scheduler background loop
{
let scheduler = scheduler.clone();
tokio::spawn(async move {
scheduler.run().await;
});
}
let state = AppState {
storage: storage.clone(),
config: config_arc,
config_path: Some(config_path),
scan_progress: pinakes_core::scan::ScanProgress::new(),
sessions: Arc::new(RwLock::new(std::collections::HashMap::new())),
job_queue,
cache,
scheduler,
};
// Periodic session cleanup (every 15 minutes)
{
let sessions = state.sessions.clone();
let cancel = shutdown_token.clone();
tokio::spawn(async move {
let mut interval = tokio::time::interval(std::time::Duration::from_secs(15 * 60));
loop {
tokio::select! {
_ = interval.tick() => {
pinakes_server::state::cleanup_expired_sessions(&sessions).await;
}
_ = cancel.cancelled() => {
break;
}
}
}
});
}
let router = app::create_router(state);
info!(addr = %addr, "server listening");
let listener = tokio::net::TcpListener::bind(&addr).await?;
axum::serve(
listener,
router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
)
.with_graceful_shutdown(shutdown_signal())
.await?;
shutdown_token.cancel();
info!("server shut down");
Ok(())
}
async fn shutdown_signal() {
let ctrl_c = async {
match tokio::signal::ctrl_c().await {
Ok(()) => {}
Err(e) => {
tracing::warn!(error = %e, "failed to install Ctrl+C handler");
std::future::pending::<()>().await;
}
}
};
#[cfg(unix)]
let terminate = async {
match tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) {
Ok(mut signal) => {
signal.recv().await;
}
Err(e) => {
tracing::warn!(error = %e, "failed to install SIGTERM handler");
std::future::pending::<()>().await;
}
}
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => info!("received Ctrl+C, shutting down"),
_ = terminate => info!("received SIGTERM, shutting down"),
}
}

View file

@ -0,0 +1,23 @@
use axum::Json;
use axum::extract::{Query, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination;
pub async fn list_audit(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<AuditEntryResponse>>, ApiError> {
let pagination = Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
);
let entries = state.storage.list_audit_entries(None, &pagination).await?;
Ok(Json(
entries.into_iter().map(AuditEntryResponse::from).collect(),
))
}

View file

@ -0,0 +1,119 @@
use axum::Json;
use axum::extract::State;
use axum::http::{HeaderMap, StatusCode};
use crate::dto::{LoginRequest, LoginResponse, UserInfoResponse};
use crate::state::AppState;
pub async fn login(
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
) -> Result<Json<LoginResponse>, StatusCode> {
// Limit input sizes to prevent DoS
if req.username.len() > 255 || req.password.len() > 1024 {
return Err(StatusCode::BAD_REQUEST);
}
let config = state.config.read().await;
if !config.accounts.enabled {
return Err(StatusCode::NOT_FOUND);
}
let user = config
.accounts
.users
.iter()
.find(|u| u.username == req.username);
let user = match user {
Some(u) => u,
None => {
tracing::warn!(username = %req.username, "login failed: unknown user");
return Err(StatusCode::UNAUTHORIZED);
}
};
// Verify password using argon2
use argon2::password_hash::PasswordVerifier;
let hash = &user.password_hash;
let parsed_hash = argon2::password_hash::PasswordHash::new(hash)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let valid = argon2::Argon2::default()
.verify_password(req.password.as_bytes(), &parsed_hash)
.is_ok();
if !valid {
tracing::warn!(username = %req.username, "login failed: invalid password");
return Err(StatusCode::UNAUTHORIZED);
}
// Generate session token
use rand::Rng;
let token: String = rand::rng()
.sample_iter(&rand::distr::Alphanumeric)
.take(48)
.map(char::from)
.collect();
let role = user.role;
let username = user.username.clone();
// Store session
{
let mut sessions = state.sessions.write().await;
sessions.insert(
token.clone(),
crate::state::SessionInfo {
username: username.clone(),
role,
created_at: chrono::Utc::now(),
},
);
}
tracing::info!(username = %username, role = %role, "login successful");
Ok(Json(LoginResponse {
token,
username,
role: role.to_string(),
}))
}
pub async fn logout(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
let mut sessions = state.sessions.write().await;
sessions.remove(token);
}
StatusCode::OK
}
pub async fn me(
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<UserInfoResponse>, StatusCode> {
let config = state.config.read().await;
if !config.accounts.enabled {
// When accounts are not enabled, return a default admin user
return Ok(Json(UserInfoResponse {
username: "admin".to_string(),
role: "admin".to_string(),
}));
}
drop(config);
let token = extract_bearer_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let sessions = state.sessions.read().await;
let session = sessions.get(token).ok_or(StatusCode::UNAUTHORIZED)?;
Ok(Json(UserInfoResponse {
username: session.username.clone(),
role: session.role.to_string(),
}))
}
fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
headers
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
}

View file

@ -0,0 +1,101 @@
use axum::Json;
use axum::extract::{Path, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{CollectionKind, MediaId};
pub async fn create_collection(
State(state): State<AppState>,
Json(req): Json<CreateCollectionRequest>,
) -> Result<Json<CollectionResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"collection name must be 1-255 characters".into(),
),
));
}
if let Some(ref desc) = req.description
&& desc.len() > 10_000
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"description exceeds 10000 characters".into(),
),
));
}
let kind = match req.kind.as_str() {
"virtual" => CollectionKind::Virtual,
_ => CollectionKind::Manual,
};
let col = pinakes_core::collections::create_collection(
&state.storage,
&req.name,
kind,
req.description.as_deref(),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(CollectionResponse::from(col)))
}
pub async fn list_collections(
State(state): State<AppState>,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
let cols = state.storage.list_collections().await?;
Ok(Json(
cols.into_iter().map(CollectionResponse::from).collect(),
))
}
pub async fn get_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<CollectionResponse>, ApiError> {
let col = state.storage.get_collection(id).await?;
Ok(Json(CollectionResponse::from(col)))
}
pub async fn delete_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_collection(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn add_member(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
Json(req): Json<AddMemberRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::add_member(
&state.storage,
collection_id,
MediaId(req.media_id),
req.position.unwrap_or(0),
)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_member(
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::remove_member(&state.storage, collection_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn get_members(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let items = pinakes_core::collections::get_members(&state.storage, collection_id).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}

View file

@ -0,0 +1,217 @@
use axum::Json;
use axum::extract::State;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
}
None => false,
};
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
pub async fn get_ui_config(
State(state): State<AppState>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let config = state.config.read().await;
Ok(Json(UiConfigResponse::from(&config.ui)))
}
pub async fn update_ui_config(
State(state): State<AppState>,
Json(req): Json<UpdateUiConfigRequest>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(theme) = req.theme {
config.ui.theme = theme;
}
if let Some(default_view) = req.default_view {
config.ui.default_view = default_view;
}
if let Some(default_page_size) = req.default_page_size {
config.ui.default_page_size = default_page_size;
}
if let Some(default_view_mode) = req.default_view_mode {
config.ui.default_view_mode = default_view_mode;
}
if let Some(auto_play) = req.auto_play_media {
config.ui.auto_play_media = auto_play;
}
if let Some(show_thumbs) = req.show_thumbnails {
config.ui.show_thumbnails = show_thumbs;
}
if let Some(collapsed) = req.sidebar_collapsed {
config.ui.sidebar_collapsed = collapsed;
}
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
Ok(Json(UiConfigResponse::from(&config.ui)))
}
pub async fn update_scanning_config(
State(state): State<AppState>,
Json(req): Json<UpdateScanningRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(watch) = req.watch {
config.scanning.watch = watch;
}
if let Some(interval) = req.poll_interval_secs {
config.scanning.poll_interval_secs = interval;
}
if let Some(patterns) = req.ignore_patterns {
config.scanning.ignore_patterns = patterns;
}
// Persist to disk if we have a config path
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
}
None => false,
};
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
pub async fn add_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
if !path.exists() {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
}
state.storage.add_root_dir(path.clone()).await?;
{
let mut config = state.config.write().await;
if !config.directories.roots.contains(&path) {
config.directories.roots.push(path);
}
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
}
pub async fn remove_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
state.storage.remove_root_dir(&path).await?;
{
let mut config = state.config.write().await;
config.directories.roots.retain(|r| r != &path);
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
}

View file

@ -0,0 +1,34 @@
use axum::Json;
use axum::extract::State;
use crate::dto::DatabaseStatsResponse;
use crate::error::ApiError;
use crate::state::AppState;
pub async fn database_stats(
State(state): State<AppState>,
) -> Result<Json<DatabaseStatsResponse>, ApiError> {
let stats = state.storage.database_stats().await?;
Ok(Json(DatabaseStatsResponse {
media_count: stats.media_count,
tag_count: stats.tag_count,
collection_count: stats.collection_count,
audit_count: stats.audit_count,
database_size_bytes: stats.database_size_bytes,
backend_name: stats.backend_name,
}))
}
pub async fn vacuum_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.vacuum().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
}
pub async fn clear_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.clear_all_data().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
}

View file

@ -0,0 +1,30 @@
use axum::Json;
use axum::extract::State;
use crate::dto::{DuplicateGroupResponse, MediaResponse};
use crate::error::ApiError;
use crate::state::AppState;
pub async fn list_duplicates(
State(state): State<AppState>,
) -> Result<Json<Vec<DuplicateGroupResponse>>, ApiError> {
let groups = state.storage.find_duplicates().await?;
let response: Vec<DuplicateGroupResponse> = groups
.into_iter()
.map(|items| {
let content_hash = items
.first()
.map(|i| i.content_hash.0.clone())
.unwrap_or_default();
let media_items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
DuplicateGroupResponse {
content_hash,
items: media_items,
}
})
.collect();
Ok(Json(response))
}

View file

@ -0,0 +1,42 @@
use axum::Json;
use axum::extract::State;
use serde::Deserialize;
use std::path::PathBuf;
use crate::error::ApiError;
use crate::state::AppState;
#[derive(Debug, Deserialize)]
pub struct ExportRequest {
pub format: String,
pub destination: PathBuf,
}
pub async fn trigger_export(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Default export to JSON in data dir
let dest = pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json,
destination: dest,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
pub async fn trigger_export_with_options(
State(state): State<AppState>,
Json(req): Json<ExportRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let format = match req.format.as_str() {
"csv" => pinakes_core::jobs::ExportFormat::Csv,
_ => pinakes_core::jobs::ExportFormat::Json,
};
let kind = pinakes_core::jobs::JobKind::Export {
format,
destination: req.destination,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}

View file

@ -0,0 +1,8 @@
use axum::Json;
pub async fn health() -> Json<serde_json::Value> {
Json(serde_json::json!({
"status": "ok",
"version": env!("CARGO_PKG_VERSION"),
}))
}

View file

@ -0,0 +1,99 @@
use axum::Json;
use axum::extract::State;
use serde::Deserialize;
use crate::error::ApiError;
use crate::state::AppState;
#[derive(Debug, Deserialize)]
pub struct OrphanResolveRequest {
pub action: String,
pub ids: Vec<uuid::Uuid>,
}
pub async fn trigger_orphan_detection(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::OrphanDetection;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
pub async fn trigger_verify_integrity(
State(state): State<AppState>,
Json(req): Json<VerifyIntegrityRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids = req
.media_ids
.into_iter()
.map(|id| pinakes_core::model::MediaId(id))
.collect();
let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
pub struct VerifyIntegrityRequest {
pub media_ids: Vec<uuid::Uuid>,
}
pub async fn trigger_cleanup_thumbnails(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::CleanupThumbnails;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
pub struct GenerateThumbnailsRequest {
/// When true, only generate thumbnails for items that don't have one yet.
/// When false (default), regenerate all thumbnails.
#[serde(default)]
pub only_missing: bool,
}
pub async fn generate_all_thumbnails(
State(state): State<AppState>,
body: Option<Json<GenerateThumbnailsRequest>>,
) -> Result<Json<serde_json::Value>, ApiError> {
let only_missing = body.map(|b| b.only_missing).unwrap_or(false);
let media_ids = state
.storage
.list_media_ids_for_thumbnails(only_missing)
.await?;
let count = media_ids.len();
if count == 0 {
return Ok(Json(serde_json::json!({
"job_id": null,
"media_count": 0,
"message": "no media items to process"
})));
}
let kind = pinakes_core::jobs::JobKind::GenerateThumbnails { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({
"job_id": job_id.to_string(),
"media_count": count
})))
}
pub async fn resolve_orphans(
State(state): State<AppState>,
Json(req): Json<OrphanResolveRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let action = match req.action.as_str() {
"delete" => pinakes_core::integrity::OrphanAction::Delete,
_ => pinakes_core::integrity::OrphanAction::Ignore,
};
let ids: Vec<pinakes_core::model::MediaId> = req
.ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(|e| ApiError(e))?;
Ok(Json(serde_json::json!({ "resolved": count })))
}

View file

@ -0,0 +1,34 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::jobs::Job;
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await)
}
pub async fn get_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<Job>, ApiError> {
state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}")).into()
})
}
pub async fn cancel_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let cancelled = state.job_queue.cancel(id).await;
if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true })))
} else {
Err(pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}"
))
.into())
}
}

View file

@ -0,0 +1,795 @@
use axum::Json;
use axum::extract::{Path, Query, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{MediaId, Pagination};
use pinakes_core::storage::DynStorageBackend;
/// Apply tags and add to collection after a successful import.
/// Shared logic used by import_with_options, batch_import, and import_directory_endpoint.
async fn apply_import_post_processing(
storage: &DynStorageBackend,
media_id: MediaId,
tag_ids: Option<&[Uuid]>,
new_tags: Option<&[String]>,
collection_id: Option<Uuid>,
) {
if let Some(tag_ids) = tag_ids {
for tid in tag_ids {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, *tid).await {
tracing::warn!(error = %e, "failed to apply tag during import");
}
}
}
if let Some(new_tags) = new_tags {
for name in new_tags {
match pinakes_core::tags::create_tag(storage, name, None).await {
Ok(tag) => {
if let Err(e) = pinakes_core::tags::tag_media(storage, media_id, tag.id).await {
tracing::warn!(error = %e, "failed to apply new tag during import");
}
}
Err(e) => {
tracing::warn!(tag_name = %name, error = %e, "failed to create tag during import");
}
}
}
}
if let Some(col_id) = collection_id
&& let Err(e) = pinakes_core::collections::add_member(storage, col_id, media_id, 0).await
{
tracing::warn!(error = %e, "failed to add to collection during import");
}
}
pub async fn import_media(
State(state): State<AppState>,
Json(req): Json<ImportRequest>,
) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?;
Ok(Json(ImportResponse {
media_id: result.media_id.0.to_string(),
was_duplicate: result.was_duplicate,
}))
}
pub async fn list_media(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let pagination = Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
params.sort,
);
let items = state.storage.list_media(&pagination).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
pub async fn get_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<MediaResponse>, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
Ok(Json(MediaResponse::from(item)))
}
/// Maximum length for short text fields (title, artist, album, genre).
const MAX_SHORT_TEXT: usize = 500;
/// Maximum length for long text fields (description).
const MAX_LONG_TEXT: usize = 10_000;
fn validate_optional_text(field: &Option<String>, name: &str, max: usize) -> Result<(), ApiError> {
if let Some(v) = field
&& v.len() > max
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"{name} exceeds {max} characters"
)),
));
}
Ok(())
}
pub async fn update_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateMediaRequest>,
) -> Result<Json<MediaResponse>, ApiError> {
validate_optional_text(&req.title, "title", MAX_SHORT_TEXT)?;
validate_optional_text(&req.artist, "artist", MAX_SHORT_TEXT)?;
validate_optional_text(&req.album, "album", MAX_SHORT_TEXT)?;
validate_optional_text(&req.genre, "genre", MAX_SHORT_TEXT)?;
validate_optional_text(&req.description, "description", MAX_LONG_TEXT)?;
let mut item = state.storage.get_media(MediaId(id)).await?;
if let Some(title) = req.title {
item.title = Some(title);
}
if let Some(artist) = req.artist {
item.artist = Some(artist);
}
if let Some(album) = req.album {
item.album = Some(album);
}
if let Some(genre) = req.genre {
item.genre = Some(genre);
}
if let Some(year) = req.year {
item.year = Some(year);
}
if let Some(description) = req.description {
item.description = Some(description);
}
item.updated_at = chrono::Utc::now();
state.storage.update_media(&item).await?;
pinakes_core::audit::record_action(
&state.storage,
Some(item.id),
pinakes_core::model::AuditAction::Updated,
None,
)
.await?;
Ok(Json(MediaResponse::from(item)))
}
pub async fn delete_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_id = MediaId(id);
// Fetch item first to get thumbnail path for cleanup
let item = state.storage.get_media(media_id).await?;
// Record audit BEFORE delete to avoid FK constraint violation
pinakes_core::audit::record_action(
&state.storage,
Some(media_id),
pinakes_core::model::AuditAction::Deleted,
None,
)
.await?;
state.storage.delete_media(media_id).await?;
// Clean up thumbnail file if it exists
if let Some(ref thumb_path) = item.thumbnail_path
&& let Err(e) = tokio::fs::remove_file(thumb_path).await
&& e.kind() != std::io::ErrorKind::NotFound
{
tracing::warn!(path = %thumb_path.display(), error = %e, "failed to remove thumbnail");
}
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn open_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let opener = pinakes_core::opener::default_opener();
opener.open(&item.path)?;
pinakes_core::audit::record_action(
&state.storage,
Some(item.id),
pinakes_core::model::AuditAction::Opened,
None,
)
.await?;
Ok(Json(serde_json::json!({"opened": true})))
}
pub async fn stream_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
headers: axum::http::HeaderMap,
) -> Result<axum::response::Response, ApiError> {
use axum::body::Body;
use axum::http::{StatusCode, header};
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio_util::io::ReaderStream;
let item = state.storage.get_media(MediaId(id)).await?;
let file = tokio::fs::File::open(&item.path).await.map_err(|_e| {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
item.path.clone(),
))
})?;
let metadata = file
.metadata()
.await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
let total_size = metadata.len();
let content_type = item.media_type.mime_type();
// Parse Range header
if let Some(range_header) = headers.get(header::RANGE)
&& let Ok(range_str) = range_header.to_str()
&& let Some(range) = parse_range(range_str, total_size)
{
let (start, end) = range;
let content_length = end - start + 1;
let mut file = file;
file.seek(std::io::SeekFrom::Start(start))
.await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
let limited = file.take(content_length);
let stream = ReaderStream::new(limited);
let body = Body::from_stream(stream);
return axum::response::Response::builder()
.status(StatusCode::PARTIAL_CONTENT)
.header(header::CONTENT_TYPE, content_type)
.header(header::CONTENT_LENGTH, content_length)
.header(header::ACCEPT_RANGES, "bytes")
.header(
header::CONTENT_RANGE,
format!("bytes {start}-{end}/{total_size}"),
)
.header(
header::CONTENT_DISPOSITION,
format!("inline; filename=\"{}\"", item.file_name),
)
.body(body)
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
});
}
// Full response (no Range header)
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
axum::response::Response::builder()
.header(header::CONTENT_TYPE, content_type)
.header(header::CONTENT_LENGTH, total_size)
.header(header::ACCEPT_RANGES, "bytes")
.header(
header::CONTENT_DISPOSITION,
format!("inline; filename=\"{}\"", item.file_name),
)
.body(body)
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
/// Parse a `Range: bytes=START-END` header value.
/// Returns `Some((start, end))` inclusive, or `None` if malformed.
fn parse_range(header: &str, total_size: u64) -> Option<(u64, u64)> {
let bytes_prefix = header.strip_prefix("bytes=")?;
let (start_str, end_str) = bytes_prefix.split_once('-')?;
if start_str.is_empty() {
// Suffix range: bytes=-500 means last 500 bytes
let suffix_len: u64 = end_str.parse().ok()?;
let start = total_size.saturating_sub(suffix_len);
Some((start, total_size - 1))
} else {
let start: u64 = start_str.parse().ok()?;
let end = if end_str.is_empty() {
total_size - 1
} else {
end_str.parse::<u64>().ok()?.min(total_size - 1)
};
if start > end || start >= total_size {
return None;
}
Some((start, end))
}
}
pub async fn import_with_options(
State(state): State<AppState>,
Json(req): Json<ImportWithOptionsRequest>,
) -> Result<Json<ImportResponse>, ApiError> {
let result = pinakes_core::import::import_file(&state.storage, &req.path).await?;
if !result.was_duplicate {
apply_import_post_processing(
&state.storage,
result.media_id,
req.tag_ids.as_deref(),
req.new_tags.as_deref(),
req.collection_id,
)
.await;
}
Ok(Json(ImportResponse {
media_id: result.media_id.0.to_string(),
was_duplicate: result.was_duplicate,
}))
}
pub async fn batch_import(
State(state): State<AppState>,
Json(req): Json<BatchImportRequest>,
) -> Result<Json<BatchImportResponse>, ApiError> {
if req.paths.len() > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"batch size exceeds limit of 10000".into(),
),
));
}
let mut results = Vec::new();
let mut imported = 0usize;
let mut duplicates = 0usize;
let mut errors = 0usize;
for path in &req.paths {
match pinakes_core::import::import_file(&state.storage, path).await {
Ok(result) => {
if result.was_duplicate {
duplicates += 1;
} else {
imported += 1;
apply_import_post_processing(
&state.storage,
result.media_id,
req.tag_ids.as_deref(),
req.new_tags.as_deref(),
req.collection_id,
)
.await;
}
results.push(BatchImportItemResult {
path: path.to_string_lossy().to_string(),
media_id: Some(result.media_id.0.to_string()),
was_duplicate: result.was_duplicate,
error: None,
});
}
Err(e) => {
errors += 1;
results.push(BatchImportItemResult {
path: path.to_string_lossy().to_string(),
media_id: None,
was_duplicate: false,
error: Some(e.to_string()),
});
}
}
}
let total = results.len();
Ok(Json(BatchImportResponse {
results,
total,
imported,
duplicates,
errors,
}))
}
pub async fn import_directory_endpoint(
State(state): State<AppState>,
Json(req): Json<DirectoryImportRequest>,
) -> Result<Json<BatchImportResponse>, ApiError> {
let config = state.config.read().await;
let ignore_patterns = config.scanning.ignore_patterns.clone();
let concurrency = config.scanning.import_concurrency;
drop(config);
let import_results = pinakes_core::import::import_directory_with_concurrency(
&state.storage,
&req.path,
&ignore_patterns,
concurrency,
)
.await?;
let mut results = Vec::new();
let mut imported = 0usize;
let mut duplicates = 0usize;
let mut errors = 0usize;
for r in import_results {
match r {
Ok(result) => {
if result.was_duplicate {
duplicates += 1;
} else {
imported += 1;
apply_import_post_processing(
&state.storage,
result.media_id,
req.tag_ids.as_deref(),
req.new_tags.as_deref(),
req.collection_id,
)
.await;
}
results.push(BatchImportItemResult {
path: result.path.to_string_lossy().to_string(),
media_id: Some(result.media_id.0.to_string()),
was_duplicate: result.was_duplicate,
error: None,
});
}
Err(e) => {
errors += 1;
results.push(BatchImportItemResult {
path: String::new(),
media_id: None,
was_duplicate: false,
error: Some(e.to_string()),
});
}
}
}
let total = results.len();
Ok(Json(BatchImportResponse {
results,
total,
imported,
duplicates,
errors,
}))
}
pub async fn preview_directory(
State(state): State<AppState>,
Json(req): Json<serde_json::Value>,
) -> Result<Json<DirectoryPreviewResponse>, ApiError> {
let path_str = req.get("path").and_then(|v| v.as_str()).ok_or_else(|| {
pinakes_core::error::PinakesError::InvalidOperation("path required".into())
})?;
let recursive = req
.get("recursive")
.and_then(|v| v.as_bool())
.unwrap_or(true);
let dir = std::path::PathBuf::from(path_str);
if !dir.is_dir() {
return Err(pinakes_core::error::PinakesError::FileNotFound(dir).into());
}
// Validate the directory is under a configured root (if roots are configured)
let roots = state.storage.list_root_dirs().await?;
if !roots.is_empty() {
let canonical = dir.canonicalize().map_err(|_| {
pinakes_core::error::PinakesError::InvalidOperation("cannot resolve path".into())
})?;
let allowed = roots.iter().any(|root| canonical.starts_with(root));
if !allowed {
return Err(pinakes_core::error::PinakesError::InvalidOperation(
"path is not under a configured root directory".into(),
)
.into());
}
}
let files: Vec<DirectoryPreviewFile> = tokio::task::spawn_blocking(move || {
let mut result = Vec::new();
fn walk_dir(
dir: &std::path::Path,
recursive: bool,
result: &mut Vec<DirectoryPreviewFile>,
) {
let Ok(entries) = std::fs::read_dir(dir) else {
return;
};
for entry in entries.flatten() {
let path = entry.path();
// Skip hidden files/dirs
if path
.file_name()
.map(|n| n.to_string_lossy().starts_with('.'))
.unwrap_or(false)
{
continue;
}
if path.is_dir() {
if recursive {
walk_dir(&path, recursive, result);
}
} else if path.is_file()
&& let Some(mt) = pinakes_core::media_type::MediaType::from_path(&path)
{
let size = entry.metadata().ok().map(|m| m.len()).unwrap_or(0);
let file_name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
let media_type = serde_json::to_value(mt)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default();
result.push(DirectoryPreviewFile {
path: path.to_string_lossy().to_string(),
file_name,
media_type,
file_size: size,
});
}
}
}
walk_dir(&dir, recursive, &mut result);
result
})
.await
.map_err(|e| pinakes_core::error::PinakesError::Io(std::io::Error::other(e)))?;
let total_count = files.len();
let total_size = files.iter().map(|f| f.file_size).sum();
Ok(Json(DirectoryPreviewResponse {
files,
total_count,
total_size,
}))
}
pub async fn set_custom_field(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<SetCustomFieldRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"field name must be 1-255 characters".into(),
),
));
}
if req.value.len() > MAX_LONG_TEXT {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"field value exceeds {} characters",
MAX_LONG_TEXT
)),
));
}
use pinakes_core::model::{CustomField, CustomFieldType};
let field_type = match req.field_type.as_str() {
"number" => CustomFieldType::Number,
"date" => CustomFieldType::Date,
"boolean" => CustomFieldType::Boolean,
_ => CustomFieldType::Text,
};
let field = CustomField {
field_type,
value: req.value,
};
state
.storage
.set_custom_field(MediaId(id), &req.name, &field)
.await?;
Ok(Json(serde_json::json!({"set": true})))
}
pub async fn delete_custom_field(
State(state): State<AppState>,
Path((id, name)): Path<(Uuid, String)>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.delete_custom_field(MediaId(id), &name)
.await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn batch_tag(
State(state): State<AppState>,
Json(req): Json<BatchTagRequest>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
if req.media_ids.len() > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"batch size exceeds limit of 10000".into(),
),
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state
.storage
.batch_tag_media(&media_ids, &req.tag_ids)
.await
{
Ok(count) => Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}
}
pub async fn delete_all_media(
State(state): State<AppState>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
// Record audit entry before deletion
if let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Deleted,
Some("delete all media".to_string()),
)
.await
{
tracing::warn!(error = %e, "failed to record audit entry");
}
match state.storage.delete_all_media().await {
Ok(count) => Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}
}
pub async fn batch_delete(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
if req.media_ids.len() > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"batch size exceeds limit of 10000".into(),
),
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
// Record audit entries BEFORE delete to avoid FK constraint violation.
// Use None for media_id since they'll be deleted; include ID in details.
for id in &media_ids {
if let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Deleted,
Some(format!("batch delete: media_id={}", id.0)),
)
.await
{
tracing::warn!(error = %e, "failed to record audit entry");
}
}
match state.storage.batch_delete_media(&media_ids).await {
Ok(count) => Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}
}
pub async fn batch_add_to_collection(
State(state): State<AppState>,
Json(req): Json<BatchCollectionRequest>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
if req.media_ids.len() > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"batch size exceeds limit of 10000".into(),
),
));
}
let mut processed = 0;
let mut errors = Vec::new();
for (i, media_id) in req.media_ids.iter().enumerate() {
match pinakes_core::collections::add_member(
&state.storage,
req.collection_id,
MediaId(*media_id),
i as i32,
)
.await
{
Ok(_) => processed += 1,
Err(e) => errors.push(format!("{media_id}: {e}")),
}
}
Ok(Json(BatchOperationResponse { processed, errors }))
}
pub async fn batch_update(
State(state): State<AppState>,
Json(req): Json<BatchUpdateRequest>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
if req.media_ids.len() > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"batch size exceeds limit of 10000".into(),
),
));
}
let media_ids: Vec<MediaId> = req.media_ids.iter().map(|id| MediaId(*id)).collect();
match state
.storage
.batch_update_media(
&media_ids,
req.title.as_deref(),
req.artist.as_deref(),
req.album.as_deref(),
req.genre.as_deref(),
req.year,
req.description.as_deref(),
)
.await
{
Ok(count) => Ok(Json(BatchOperationResponse {
processed: count as usize,
errors: Vec::new(),
})),
Err(e) => Ok(Json(BatchOperationResponse {
processed: 0,
errors: vec![e.to_string()],
})),
}
}
pub async fn get_thumbnail(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
use axum::body::Body;
use axum::http::header;
use tokio_util::io::ReaderStream;
let item = state.storage.get_media(MediaId(id)).await?;
let thumb_path = item.thumbnail_path.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"no thumbnail available".into(),
))
})?;
let file = tokio::fs::File::open(&thumb_path)
.await
.map_err(|_e| ApiError(pinakes_core::error::PinakesError::FileNotFound(thumb_path)))?;
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
axum::response::Response::builder()
.header(header::CONTENT_TYPE, "image/jpeg")
.header(header::CACHE_CONTROL, "public, max-age=86400")
.body(body)
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
pub async fn get_media_count(
State(state): State<AppState>,
) -> Result<Json<MediaCountResponse>, ApiError> {
let count = state.storage.count_media().await?;
Ok(Json(MediaCountResponse { count }))
}

View file

@ -0,0 +1,18 @@
pub mod audit;
pub mod auth;
pub mod collections;
pub mod config;
pub mod database;
pub mod duplicates;
pub mod export;
pub mod health;
pub mod integrity;
pub mod jobs;
pub mod media;
pub mod saved_searches;
pub mod scan;
pub mod scheduled_tasks;
pub mod search;
pub mod statistics;
pub mod tags;
pub mod webhooks;

View file

@ -0,0 +1,76 @@
use axum::Json;
use axum::extract::{Path, State};
use serde::{Deserialize, Serialize};
use crate::error::ApiError;
use crate::state::AppState;
#[derive(Debug, Deserialize)]
pub struct CreateSavedSearchRequest {
pub name: String,
pub query: String,
pub sort_order: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct SavedSearchResponse {
pub id: String,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
}
pub async fn create_saved_search(
State(state): State<AppState>,
Json(req): Json<CreateSavedSearchRequest>,
) -> Result<Json<SavedSearchResponse>, ApiError> {
let id = uuid::Uuid::now_v7();
state
.storage
.save_search(id, &req.name, &req.query, req.sort_order.as_deref())
.await
.map_err(ApiError)?;
Ok(Json(SavedSearchResponse {
id: id.to_string(),
name: req.name,
query: req.query,
sort_order: req.sort_order,
created_at: chrono::Utc::now(),
}))
}
pub async fn list_saved_searches(
State(state): State<AppState>,
) -> Result<Json<Vec<SavedSearchResponse>>, ApiError> {
let searches = state
.storage
.list_saved_searches()
.await
.map_err(ApiError)?;
Ok(Json(
searches
.into_iter()
.map(|s| SavedSearchResponse {
id: s.id.to_string(),
name: s.name,
query: s.query,
sort_order: s.sort_order,
created_at: s.created_at,
})
.collect(),
))
}
pub async fn delete_saved_search(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.delete_saved_search(id)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "deleted": true })))
}

View file

@ -0,0 +1,30 @@
use axum::Json;
use axum::extract::State;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
/// Trigger a scan as a background job. Returns the job ID immediately.
pub async fn trigger_scan(
State(state): State<AppState>,
Json(req): Json<ScanRequest>,
) -> Result<Json<ScanJobResponse>, ApiError> {
let kind = pinakes_core::jobs::JobKind::Scan { path: req.path };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(ScanJobResponse {
job_id: job_id.to_string(),
}))
}
pub async fn scan_status(State(state): State<AppState>) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len();
Json(ScanStatusResponse {
scanning: snapshot.scanning,
files_found: snapshot.files_found,
files_processed: snapshot.files_processed,
error_count,
errors: snapshot.errors,
})
}

View file

@ -0,0 +1,55 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::dto::ScheduledTaskResponse;
use crate::error::ApiError;
use crate::state::AppState;
pub async fn list_scheduled_tasks(
State(state): State<AppState>,
) -> Result<Json<Vec<ScheduledTaskResponse>>, ApiError> {
let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter()
.map(|t| ScheduledTaskResponse {
id: t.id,
name: t.name,
schedule: t.schedule.display_string(),
enabled: t.enabled,
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status,
})
.collect();
Ok(Json(responses))
}
pub async fn toggle_scheduled_task(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.toggle_task(&id).await {
Some(enabled) => Ok(Json(serde_json::json!({
"id": id,
"enabled": enabled,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
}
}
pub async fn run_scheduled_task_now(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.run_now(&id).await {
Some(job_id) => Ok(Json(serde_json::json!({
"id": id,
"job_id": job_id,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
}
}

View file

@ -0,0 +1,87 @@
use axum::Json;
use axum::extract::{Query, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination;
use pinakes_core::search::{SearchRequest, SortOrder, parse_search_query};
fn resolve_sort(sort: Option<&str>) -> SortOrder {
match sort {
Some("date_asc") => SortOrder::DateAsc,
Some("date_desc") => SortOrder::DateDesc,
Some("name_asc") => SortOrder::NameAsc,
Some("name_desc") => SortOrder::NameDesc,
Some("size_asc") => SortOrder::SizeAsc,
Some("size_desc") => SortOrder::SizeDesc,
_ => SortOrder::Relevance,
}
}
pub async fn search(
State(state): State<AppState>,
Query(params): Query<SearchParams>,
) -> Result<Json<SearchResponse>, ApiError> {
if params.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&params.q)?;
let sort = resolve_sort(params.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
),
};
let results = state.storage.search(&request).await?;
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
}
pub async fn search_post(
State(state): State<AppState>,
Json(body): Json<SearchRequestBody>,
) -> Result<Json<SearchResponse>, ApiError> {
if body.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&body.q)?;
let sort = resolve_sort(body.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
body.offset.unwrap_or(0),
body.limit.unwrap_or(50).min(1000),
None,
),
};
let results = state.storage.search(&request).await?;
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
}

View file

@ -0,0 +1,13 @@
use axum::Json;
use axum::extract::State;
use crate::dto::LibraryStatisticsResponse;
use crate::error::ApiError;
use crate::state::AppState;
pub async fn library_statistics(
State(state): State<AppState>,
) -> Result<Json<LibraryStatisticsResponse>, ApiError> {
let stats = state.storage.library_statistics().await?;
Ok(Json(LibraryStatisticsResponse::from(stats)))
}

View file

@ -0,0 +1,70 @@
use axum::Json;
use axum::extract::{Path, State};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
pub async fn create_tag(
State(state): State<AppState>,
Json(req): Json<CreateTagRequest>,
) -> Result<Json<TagResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"tag name must be 1-255 characters".into(),
),
));
}
let tag = pinakes_core::tags::create_tag(&state.storage, &req.name, req.parent_id).await?;
Ok(Json(TagResponse::from(tag)))
}
pub async fn list_tags(State(state): State<AppState>) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.list_tags().await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}
pub async fn get_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TagResponse>, ApiError> {
let tag = state.storage.get_tag(id).await?;
Ok(Json(TagResponse::from(tag)))
}
pub async fn delete_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_tag(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn tag_media(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
Json(req): Json<TagMediaRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::tag_media(&state.storage, MediaId(media_id), req.tag_id).await?;
Ok(Json(serde_json::json!({"tagged": true})))
}
pub async fn untag_media(
State(state): State<AppState>,
Path((media_id, tag_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::untag_media(&state.storage, MediaId(media_id), tag_id).await?;
Ok(Json(serde_json::json!({"untagged": true})))
}
pub async fn get_media_tags(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.get_media_tags(MediaId(media_id)).await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}

View file

@ -0,0 +1,40 @@
use axum::Json;
use axum::extract::State;
use serde::Serialize;
use crate::error::ApiError;
use crate::state::AppState;
#[derive(Debug, Serialize)]
pub struct WebhookInfo {
pub url: String,
pub events: Vec<String>,
}
pub async fn list_webhooks(
State(state): State<AppState>,
) -> Result<Json<Vec<WebhookInfo>>, ApiError> {
let config = state.config.read().await;
let hooks: Vec<WebhookInfo> = config
.webhooks
.iter()
.map(|h| WebhookInfo {
url: h.url.clone(),
events: h.events.clone(),
})
.collect();
Ok(Json(hooks))
}
pub async fn test_webhook(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let config = state.config.read().await;
let count = config.webhooks.len();
// Emit a test event to all configured webhooks
// In production, the event bus would handle delivery
Ok(Json(serde_json::json!({
"webhooks_configured": count,
"test_sent": true
})))
}

View file

@ -0,0 +1,50 @@
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::RwLock;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{Config, UserRole};
use pinakes_core::jobs::JobQueue;
use pinakes_core::scan::ScanProgress;
use pinakes_core::scheduler::TaskScheduler;
use pinakes_core::storage::DynStorageBackend;
/// Default session TTL: 24 hours.
pub const SESSION_TTL_SECS: i64 = 24 * 60 * 60;
#[derive(Debug, Clone)]
pub struct SessionInfo {
pub username: String,
pub role: UserRole,
pub created_at: chrono::DateTime<chrono::Utc>,
}
impl SessionInfo {
/// Returns true if this session has exceeded its TTL.
pub fn is_expired(&self) -> bool {
let age = chrono::Utc::now() - self.created_at;
age.num_seconds() > SESSION_TTL_SECS
}
}
pub type SessionStore = Arc<RwLock<HashMap<String, SessionInfo>>>;
/// Remove all expired sessions from the store.
pub async fn cleanup_expired_sessions(sessions: &SessionStore) {
let mut store = sessions.write().await;
store.retain(|_, info| !info.is_expired());
}
#[derive(Clone)]
pub struct AppState {
pub storage: DynStorageBackend,
pub config: Arc<RwLock<Config>>,
pub config_path: Option<PathBuf>,
pub scan_progress: ScanProgress,
pub sessions: SessionStore,
pub job_queue: Arc<JobQueue>,
pub cache: Arc<CacheLayer>,
pub scheduler: Arc<TaskScheduler>,
}

View file

@ -0,0 +1,212 @@
use std::net::SocketAddr;
use std::sync::Arc;
use axum::body::Body;
use axum::extract::ConnectInfo;
use axum::http::{Request, StatusCode};
use http_body_util::BodyExt;
use tokio::sync::RwLock;
use tower::ServiceExt;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{
AccountsConfig, Config, DirectoryConfig, JobsConfig, ScanningConfig, ServerConfig,
SqliteConfig, StorageBackendType, StorageConfig, ThumbnailConfig, UiConfig, WebhookConfig,
};
use pinakes_core::jobs::JobQueue;
use pinakes_core::storage::StorageBackend;
use pinakes_core::storage::sqlite::SqliteBackend;
/// Fake socket address for tests (governor needs ConnectInfo<SocketAddr>)
fn test_addr() -> ConnectInfo<SocketAddr> {
ConnectInfo("127.0.0.1:9999".parse().unwrap())
}
/// Build a GET request with ConnectInfo for rate limiter compatibility
fn get(uri: &str) -> Request<Body> {
let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST request with ConnectInfo
fn post_json(uri: &str, body: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let config = Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: ":memory:".into(),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![],
import_concurrency: 8,
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
};
let job_queue = JobQueue::new(1, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
sessions: Arc::new(RwLock::new(std::collections::HashMap::new())),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
};
pinakes_server::app::create_router(state)
}
#[tokio::test]
async fn test_list_media_empty() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let items: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(items.len(), 0);
}
#[tokio::test]
async fn test_create_and_list_tags() {
let app = setup_app().await;
// Create a tag
let response = app
.clone()
.oneshot(post_json("/api/v1/tags", r#"{"name":"Music"}"#))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// List tags
let response = app.oneshot(get("/api/v1/tags")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let tags: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(tags.len(), 1);
assert_eq!(tags[0]["name"], "Music");
}
#[tokio::test]
async fn test_search_empty() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/search?q=test")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(result["total_count"], 0);
}
#[tokio::test]
async fn test_media_not_found() {
let app = setup_app().await;
let response = app
.oneshot(get("/api/v1/media/00000000-0000-0000-0000-000000000000"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_collections_crud() {
let app = setup_app().await;
// Create collection
let response = app
.clone()
.oneshot(post_json(
"/api/v1/collections",
r#"{"name":"Favorites","kind":"manual"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// List collections
let response = app.oneshot(get("/api/v1/collections")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let cols: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(cols.len(), 1);
assert_eq!(cols[0]["name"], "Favorites");
}
#[tokio::test]
async fn test_statistics_endpoint() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/statistics")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let stats: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(stats["total_media"], 0);
assert_eq!(stats["total_size_bytes"], 0);
}
#[tokio::test]
async fn test_scheduled_tasks_endpoint() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/tasks/scheduled")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let tasks: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert!(!tasks.is_empty(), "should have default scheduled tasks");
// Verify structure of first task
assert!(tasks[0]["id"].is_string());
assert!(tasks[0]["name"].is_string());
assert!(tasks[0]["schedule"].is_string());
}

View file

@ -0,0 +1,20 @@
[package]
name = "pinakes-tui"
edition.workspace = true
version.workspace = true
license.workspace = true
[dependencies]
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
reqwest = { workspace = true }
ratatui = { workspace = true }
crossterm = { workspace = true }

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,455 @@
use anyhow::Result;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Clone)]
pub struct ApiClient {
client: Client,
base_url: String,
}
// Response types (mirror server DTOs)
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct MediaResponse {
pub id: String,
pub path: String,
pub file_name: String,
pub media_type: String,
pub content_hash: String,
pub file_size: u64,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
#[serde(default)]
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CustomFieldResponse {
pub field_type: String,
pub value: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ImportResponse {
pub media_id: String,
pub was_duplicate: bool,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct TagResponse {
pub id: String,
pub name: String,
pub parent_id: Option<String>,
pub created_at: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CollectionResponse {
pub id: String,
pub name: String,
pub description: Option<String>,
pub kind: String,
pub filter_query: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct SearchResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct AuditEntryResponse {
pub id: String,
pub media_id: Option<String>,
pub action: String,
pub details: Option<String>,
pub timestamp: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ScanResponse {
pub files_found: usize,
pub files_processed: usize,
pub errors: Vec<String>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct DatabaseStatsResponse {
pub media_count: u64,
pub tag_count: u64,
pub collection_count: u64,
pub audit_count: u64,
pub database_size_bytes: u64,
pub backend_name: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct DuplicateGroupResponse {
pub content_hash: String,
pub items: Vec<MediaResponse>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct JobResponse {
pub id: String,
pub kind: serde_json::Value,
pub status: serde_json::Value,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ScheduledTaskResponse {
pub id: String,
pub name: String,
pub schedule: String,
pub enabled: bool,
pub last_run: Option<String>,
pub next_run: Option<String>,
pub last_status: Option<String>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct LibraryStatisticsResponse {
pub total_media: u64,
pub total_size_bytes: u64,
pub avg_file_size_bytes: u64,
pub media_by_type: Vec<TypeCount>,
pub storage_by_type: Vec<TypeCount>,
pub newest_item: Option<String>,
pub oldest_item: Option<String>,
pub top_tags: Vec<TypeCount>,
pub top_collections: Vec<TypeCount>,
pub total_tags: u64,
pub total_collections: u64,
pub total_duplicates: u64,
}
#[derive(Debug, Clone, Deserialize)]
pub struct TypeCount {
pub name: String,
pub count: u64,
}
impl ApiClient {
pub fn new(base_url: &str) -> Self {
Self {
client: Client::new(),
base_url: base_url.trim_end_matches('/').to_string(),
}
}
fn url(&self, path: &str) -> String {
format!("{}/api/v1{}", self.base_url, path)
}
pub async fn list_media(&self, offset: u64, limit: u64) -> Result<Vec<MediaResponse>> {
let resp = self
.client
.get(self.url("/media"))
.query(&[("offset", offset.to_string()), ("limit", limit.to_string())])
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn get_media(&self, id: &str) -> Result<MediaResponse> {
let resp = self
.client
.get(self.url(&format!("/media/{id}")))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn import_file(&self, path: &str) -> Result<ImportResponse> {
let resp = self
.client
.post(self.url("/media/import"))
.json(&serde_json::json!({"path": path}))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn delete_media(&self, id: &str) -> Result<()> {
self.client
.delete(self.url(&format!("/media/{id}")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn open_media(&self, id: &str) -> Result<()> {
self.client
.post(self.url(&format!("/media/{id}/open")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn search(&self, query: &str, offset: u64, limit: u64) -> Result<SearchResponse> {
let resp = self
.client
.get(self.url("/search"))
.query(&[
("q", query.to_string()),
("offset", offset.to_string()),
("limit", limit.to_string()),
])
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn list_tags(&self) -> Result<Vec<TagResponse>> {
let resp = self
.client
.get(self.url("/tags"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn create_tag(&self, name: &str, parent_id: Option<&str>) -> Result<TagResponse> {
let mut body = serde_json::json!({"name": name});
if let Some(pid) = parent_id {
body["parent_id"] = serde_json::Value::String(pid.to_string());
}
let resp = self
.client
.post(self.url("/tags"))
.json(&body)
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn delete_tag(&self, id: &str) -> Result<()> {
self.client
.delete(self.url(&format!("/tags/{id}")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn tag_media(&self, media_id: &str, tag_id: &str) -> Result<()> {
self.client
.post(self.url(&format!("/media/{media_id}/tags")))
.json(&serde_json::json!({"tag_id": tag_id}))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn untag_media(&self, media_id: &str, tag_id: &str) -> Result<()> {
self.client
.delete(self.url(&format!("/media/{media_id}/tags/{tag_id}")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn get_media_tags(&self, media_id: &str) -> Result<Vec<TagResponse>> {
let resp = self
.client
.get(self.url(&format!("/media/{media_id}/tags")))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn list_collections(&self) -> Result<Vec<CollectionResponse>> {
let resp = self
.client
.get(self.url("/collections"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn delete_collection(&self, id: &str) -> Result<()> {
self.client
.delete(self.url(&format!("/collections/{id}")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn trigger_scan(&self, path: Option<&str>) -> Result<Vec<ScanResponse>> {
let body = match path {
Some(p) => serde_json::json!({"path": p}),
None => serde_json::json!({"path": null}),
};
let resp = self
.client
.post(self.url("/scan"))
.json(&body)
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn list_audit(&self, offset: u64, limit: u64) -> Result<Vec<AuditEntryResponse>> {
let resp = self
.client
.get(self.url("/audit"))
.query(&[("offset", offset.to_string()), ("limit", limit.to_string())])
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn find_duplicates(&self) -> Result<Vec<DuplicateGroupResponse>> {
let resp = self
.client
.get(self.url("/duplicates"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn database_stats(&self) -> Result<DatabaseStatsResponse> {
let resp = self
.client
.get(self.url("/database/stats"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn list_jobs(&self) -> Result<Vec<JobResponse>> {
let resp = self
.client
.get(self.url("/jobs"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn vacuum_database(&self) -> Result<()> {
self.client
.post(self.url("/database/vacuum"))
.json(&serde_json::json!({}))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn update_media(
&self,
id: &str,
updates: serde_json::Value,
) -> Result<MediaResponse> {
let resp = self
.client
.patch(self.url(&format!("/media/{id}")))
.json(&updates)
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn library_statistics(&self) -> Result<LibraryStatisticsResponse> {
let resp = self
.client
.get(self.url("/statistics"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn list_scheduled_tasks(&self) -> Result<Vec<ScheduledTaskResponse>> {
let resp = self
.client
.get(self.url("/tasks/scheduled"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}
pub async fn toggle_scheduled_task(&self, id: &str) -> Result<()> {
self.client
.post(self.url(&format!("/tasks/scheduled/{id}/toggle")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn run_task_now(&self, id: &str) -> Result<()> {
self.client
.post(self.url(&format!("/tasks/scheduled/{id}/run-now")))
.send()
.await?
.error_for_status()?;
Ok(())
}
}

View file

@ -0,0 +1,74 @@
use std::time::Duration;
use crossterm::event::{self, Event as CrosstermEvent, KeyEvent};
use tokio::sync::mpsc;
#[derive(Debug)]
pub enum AppEvent {
Key(KeyEvent),
Tick,
ApiResult(ApiResult),
}
#[derive(Debug)]
#[allow(dead_code)]
pub enum ApiResult {
MediaList(Vec<crate::client::MediaResponse>),
SearchResults(crate::client::SearchResponse),
Tags(Vec<crate::client::TagResponse>),
AllTags(Vec<crate::client::TagResponse>),
Collections(Vec<crate::client::CollectionResponse>),
ImportDone(crate::client::ImportResponse),
ScanDone(Vec<crate::client::ScanResponse>),
AuditLog(Vec<crate::client::AuditEntryResponse>),
Duplicates(Vec<crate::client::DuplicateGroupResponse>),
DatabaseStats(crate::client::DatabaseStatsResponse),
Statistics(crate::client::LibraryStatisticsResponse),
ScheduledTasks(Vec<crate::client::ScheduledTaskResponse>),
MediaUpdated,
Error(String),
}
pub struct EventHandler {
tx: mpsc::UnboundedSender<AppEvent>,
rx: mpsc::UnboundedReceiver<AppEvent>,
}
impl EventHandler {
pub fn new(tick_rate: Duration) -> Self {
let (tx, rx) = mpsc::unbounded_channel();
let event_tx = tx.clone();
std::thread::spawn(move || {
loop {
match event::poll(tick_rate) {
Ok(true) => {
if let Ok(CrosstermEvent::Key(key)) = event::read()
&& event_tx.send(AppEvent::Key(key)).is_err()
{
break;
}
}
Ok(false) => {
if event_tx.send(AppEvent::Tick).is_err() {
break;
}
}
Err(e) => {
tracing::warn!(error = %e, "event poll failed");
}
}
}
});
Self { tx, rx }
}
pub fn sender(&self) -> mpsc::UnboundedSender<AppEvent> {
self.tx.clone()
}
pub async fn next(&mut self) -> Option<AppEvent> {
self.rx.recv().await
}
}

View file

@ -0,0 +1,97 @@
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
use crate::app::View;
pub enum Action {
Quit,
NavigateUp,
NavigateDown,
NavigateLeft,
NavigateRight,
Select,
Back,
Search,
Import,
Delete,
DeleteSelected,
Open,
TagView,
CollectionView,
AuditView,
SettingsView,
DuplicatesView,
DatabaseView,
QueueView,
StatisticsView,
TasksView,
ScanTrigger,
Refresh,
NextTab,
PrevTab,
PageUp,
PageDown,
GoTop,
GoBottom,
CreateTag,
TagMedia,
UntagMedia,
Help,
Char(char),
Backspace,
None,
}
pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Action {
if in_input_mode {
match key.code {
KeyCode::Esc => Action::Back,
KeyCode::Enter => Action::Select,
KeyCode::Char(c) => Action::Char(c),
KeyCode::Backspace => Action::Backspace,
_ => Action::None,
}
} else {
match (key.code, key.modifiers) {
(KeyCode::Char('q'), _) | (KeyCode::Char('c'), KeyModifiers::CONTROL) => Action::Quit,
(KeyCode::Up | KeyCode::Char('k'), _) => Action::NavigateUp,
(KeyCode::Down | KeyCode::Char('j'), _) => Action::NavigateDown,
(KeyCode::Left | KeyCode::Char('h'), _) => Action::NavigateLeft,
(KeyCode::Right | KeyCode::Char('l'), _) => Action::NavigateRight,
(KeyCode::Home, _) => Action::GoTop,
(KeyCode::End, _) => Action::GoBottom,
(KeyCode::Enter, _) => Action::Select,
(KeyCode::Esc, _) => Action::Back,
(KeyCode::Char('/'), _) => Action::Search,
(KeyCode::Char('?'), _) => Action::Help,
(KeyCode::Char('i'), _) => Action::Import,
(KeyCode::Char('d'), _) => match current_view {
View::Tags | View::Collections => Action::DeleteSelected,
_ => Action::Delete,
},
(KeyCode::Char('o'), _) => Action::Open,
(KeyCode::Char('e'), _) => match current_view {
View::Detail => Action::Select,
_ => Action::None,
},
(KeyCode::Char('t'), _) => Action::TagView,
(KeyCode::Char('c'), _) => Action::CollectionView,
(KeyCode::Char('a'), _) => Action::AuditView,
(KeyCode::Char('S'), _) => Action::SettingsView,
(KeyCode::Char('D'), _) => Action::DuplicatesView,
(KeyCode::Char('B'), _) => Action::DatabaseView,
(KeyCode::Char('Q'), _) => Action::QueueView,
(KeyCode::Char('X'), _) => Action::StatisticsView,
(KeyCode::Char('T'), _) => Action::TasksView,
(KeyCode::Char('s'), _) => Action::ScanTrigger,
(KeyCode::Char('r'), _) => Action::Refresh,
(KeyCode::Char('n'), _) => Action::CreateTag,
(KeyCode::Char('+'), _) => Action::TagMedia,
(KeyCode::Char('-'), _) => Action::UntagMedia,
(KeyCode::Tab, _) => Action::NextTab,
(KeyCode::BackTab, _) => Action::PrevTab,
(KeyCode::PageUp, _) => Action::PageUp,
(KeyCode::PageDown, _) => Action::PageDown,
_ => Action::None,
}
}
}

View file

@ -0,0 +1,55 @@
use anyhow::Result;
use clap::Parser;
use tracing_subscriber::EnvFilter;
mod app;
mod client;
mod event;
mod input;
mod ui;
/// Pinakes terminal UI client
#[derive(Parser)]
#[command(name = "pinakes-tui", version, about)]
struct Cli {
/// Server URL to connect to
#[arg(
short,
long,
env = "PINAKES_SERVER_URL",
default_value = "http://localhost:3000"
)]
server: String,
/// Set log level (trace, debug, info, warn, error)
#[arg(long, default_value = "warn")]
log_level: String,
/// Log to file instead of stderr (avoids corrupting TUI display)
#[arg(long)]
log_file: Option<std::path::PathBuf>,
}
#[tokio::main]
async fn main() -> Result<()> {
let cli = Cli::parse();
// Initialize logging - for TUI, must log to file to avoid corrupting the display
let env_filter = EnvFilter::try_new(&cli.log_level).unwrap_or_else(|_| EnvFilter::new("warn"));
if let Some(log_path) = &cli.log_file {
let file = std::fs::File::create(log_path)?;
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.with_writer(file)
.with_ansi(false)
.init();
} else {
// When no log file specified, suppress all output to avoid TUI corruption
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::new("off"))
.init();
}
app::run(&cli.server).await
}

View file

@ -0,0 +1,85 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::Span;
use ratatui::widgets::{Block, Borders, Cell, Row, Table};
use super::format_date;
use crate::app::AppState;
/// Return a color for an audit action string.
fn action_color(action: &str) -> Color {
match action {
"imported" | "import" | "created" => Color::Green,
"deleted" | "delete" | "removed" => Color::Red,
"tagged" | "tag_added" => Color::Cyan,
"untagged" | "tag_removed" => Color::Yellow,
"updated" | "modified" | "edited" => Color::Blue,
"scanned" | "scan" => Color::Magenta,
_ => Color::White,
}
}
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let header = Row::new(vec!["Action", "Media ID", "Details", "Date"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
let rows: Vec<Row> = state
.audit_log
.iter()
.enumerate()
.map(|(i, entry)| {
let style = if Some(i) == state.audit_selected {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else {
Style::default()
};
let color = action_color(&entry.action);
let action_cell = Cell::from(Span::styled(
entry.action.clone(),
Style::default().fg(color).add_modifier(Modifier::BOLD),
));
// Truncate media ID for display
let media_display = entry
.media_id
.as_deref()
.map(|id| {
if id.len() > 12 {
format!("{}...", &id[..12])
} else {
id.to_string()
}
})
.unwrap_or_else(|| "-".into());
Row::new(vec![
action_cell,
Cell::from(media_display),
Cell::from(entry.details.clone().unwrap_or_else(|| "-".into())),
Cell::from(format_date(&entry.timestamp).to_string()),
])
.style(style)
})
.collect();
let title = format!(" Audit Log ({}) ", state.audit_log.len());
let table = Table::new(
rows,
[
ratatui::layout::Constraint::Percentage(18),
ratatui::layout::Constraint::Percentage(22),
ratatui::layout::Constraint::Percentage(40),
ratatui::layout::Constraint::Percentage(20),
],
)
.header(header)
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(table, area);
}

View file

@ -0,0 +1,64 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::widgets::{Block, Borders, Row, Table};
use super::format_date;
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let header = Row::new(vec!["Name", "Kind", "Description", "Members", "Created"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
let rows: Vec<Row> = state
.collections
.iter()
.enumerate()
.map(|(i, col)| {
let style = if Some(i) == state.collection_selected {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else {
Style::default()
};
// We show the filter_query as a proxy for member info when kind is "smart"
let members_display = if col.kind == "smart" {
col.filter_query
.as_deref()
.map(|q| format!("filter: {q}"))
.unwrap_or_else(|| "-".to_string())
} else {
"-".to_string()
};
Row::new(vec![
col.name.clone(),
col.kind.clone(),
col.description.clone().unwrap_or_else(|| "-".into()),
members_display,
format_date(&col.created_at).to_string(),
])
.style(style)
})
.collect();
let title = format!(" Collections ({}) ", state.collections.len());
let table = Table::new(
rows,
[
ratatui::layout::Constraint::Percentage(25),
ratatui::layout::Constraint::Percentage(12),
ratatui::layout::Constraint::Percentage(28),
ratatui::layout::Constraint::Percentage(15),
ratatui::layout::Constraint::Percentage(20),
],
)
.header(header)
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(table, area);
}

View file

@ -0,0 +1,55 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let label_style = Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD);
let value_style = Style::default().fg(Color::White);
let section_style = Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD);
let pad = " ";
let mut lines = vec![
Line::default(),
Line::from(Span::styled("--- Database Statistics ---", section_style)),
];
if let Some(ref stats) = state.database_stats {
for (key, value) in stats {
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(format!("{key:<20}"), label_style),
Span::styled(value.to_string(), value_style),
]));
}
} else {
lines.push(Line::from(vec![
Span::raw(pad),
Span::raw("Press 'r' to load database statistics"),
]));
}
lines.push(Line::default());
lines.push(Line::from(Span::styled("--- Actions ---", section_style)));
lines.push(Line::from(vec![
Span::raw(pad),
Span::raw("v: Vacuum database"),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::raw("Esc: Return to library"),
]));
let paragraph =
Paragraph::new(lines).block(Block::default().borders(Borders::ALL).title(" Database "));
f.render_widget(paragraph, area);
}

View file

@ -0,0 +1,223 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph};
use super::{format_date, format_duration, format_size, media_type_color};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let item = match &state.selected_media {
Some(item) => item,
None => {
let msg = Paragraph::new("No item selected")
.block(Block::default().borders(Borders::ALL).title(" Detail "));
f.render_widget(msg, area);
return;
}
};
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0)])
.split(area);
let label_style = Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD);
let value_style = Style::default().fg(Color::White);
let dim_style = Style::default().fg(Color::DarkGray);
let pad = " ";
let label_width = 14;
let make_label = |name: &str| -> String { format!("{name:<label_width$}") };
let mut lines: Vec<Line> = Vec::new();
// Section: File Info
lines.push(Line::from(Span::styled(
"--- File Info ---",
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Name"), label_style),
Span::styled(&item.file_name, value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Path"), label_style),
Span::styled(&item.path, dim_style),
]));
let type_color = media_type_color(&item.media_type);
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Type"), label_style),
Span::styled(&item.media_type, Style::default().fg(type_color)),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Size"), label_style),
Span::styled(format_size(item.file_size), value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Hash"), label_style),
Span::styled(&item.content_hash, dim_style),
]));
if item.has_thumbnail {
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Thumbnail"), label_style),
Span::styled("Yes", Style::default().fg(Color::Green)),
]));
}
lines.push(Line::default()); // blank line
// Section: Metadata
lines.push(Line::from(Span::styled(
"--- Metadata ---",
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Title"), label_style),
Span::styled(item.title.as_deref().unwrap_or("-"), value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Artist"), label_style),
Span::styled(item.artist.as_deref().unwrap_or("-"), value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Album"), label_style),
Span::styled(item.album.as_deref().unwrap_or("-"), value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Genre"), label_style),
Span::styled(item.genre.as_deref().unwrap_or("-"), value_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Year"), label_style),
Span::styled(
item.year
.map(|y| y.to_string())
.unwrap_or_else(|| "-".to_string()),
value_style,
),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Duration"), label_style),
Span::styled(
item.duration_secs
.map(format_duration)
.unwrap_or_else(|| "-".to_string()),
value_style,
),
]));
// Description
if let Some(ref desc) = item.description
&& !desc.is_empty()
{
lines.push(Line::default());
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Description"), label_style),
Span::styled(desc.as_str(), value_style),
]));
}
// Custom fields
if !item.custom_fields.is_empty() {
lines.push(Line::default());
lines.push(Line::from(Span::styled(
"--- Custom Fields ---",
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)));
let mut fields: Vec<_> = item.custom_fields.iter().collect();
fields.sort_by_key(|(k, _)| k.as_str());
for (key, field) in fields {
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(format!("{key:<label_width$}"), label_style),
Span::styled(
format!("{} ({})", field.value, field.field_type),
value_style,
),
]));
}
}
// Tags section
if !state.tags.is_empty() {
lines.push(Line::default());
lines.push(Line::from(Span::styled(
"--- Tags ---",
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)));
let tag_names: Vec<&str> = state.tags.iter().map(|t| t.name.as_str()).collect();
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(tag_names.join(", "), Style::default().fg(Color::Green)),
]));
}
lines.push(Line::default());
// Section: Timestamps
lines.push(Line::from(Span::styled(
"--- Timestamps ---",
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Created"), label_style),
Span::styled(format_date(&item.created_at), dim_style),
]));
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(make_label("Updated"), label_style),
Span::styled(format_date(&item.updated_at), dim_style),
]));
let title = if let Some(ref title_str) = item.title {
format!(" Detail: {} ", title_str)
} else {
format!(" Detail: {} ", item.file_name)
};
let detail = Paragraph::new(lines).block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(detail, chunks[0]);
}

View file

@ -0,0 +1,56 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let items: Vec<ListItem> = if state.duplicate_groups.is_empty() {
vec![ListItem::new(Line::from(Span::styled(
" No duplicates found. Press 'r' to refresh.",
Style::default().fg(Color::DarkGray),
)))]
} else {
let mut list_items = Vec::new();
for (i, group) in state.duplicate_groups.iter().enumerate() {
let header = format!(
"Group {} ({} items, hash: {})",
i + 1,
group.len(),
group
.first()
.map(|m| m.content_hash.as_str())
.unwrap_or("?")
);
list_items.push(ListItem::new(Line::from(Span::styled(
header,
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
))));
for item in group {
let line = format!(" {} - {}", item.file_name, item.path);
let is_selected = state
.duplicates_selected
.map(|sel| sel == list_items.len())
.unwrap_or(false);
let style = if is_selected {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(Color::White)
};
list_items.push(ListItem::new(Line::from(Span::styled(line, style))));
}
list_items.push(ListItem::new(Line::default()));
}
list_items
};
let list = List::new(items).block(Block::default().borders(Borders::ALL).title(" Duplicates "));
f.render_widget(list, area);
}

View file

@ -0,0 +1,65 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(0)])
.split(area);
let input = Paragraph::new(state.import_input.as_str())
.block(
Block::default()
.borders(Borders::ALL)
.title(" Import File (enter path and press Enter) "),
)
.style(if state.input_mode {
Style::default().fg(Color::Cyan)
} else {
Style::default()
});
f.render_widget(input, chunks[0]);
let label_style = Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD);
let key_style = Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD);
let help_lines = vec![
Line::default(),
Line::from(Span::styled(
" Import a file or trigger a library scan",
label_style,
)),
Line::default(),
Line::from(vec![
Span::styled(" Enter", key_style),
Span::raw(" Import the file at the entered path"),
]),
Line::from(vec![
Span::styled(" Esc", key_style),
Span::raw(" Cancel and return to library"),
]),
Line::from(vec![
Span::styled(" s", key_style),
Span::raw(" Trigger a full library scan (scans all configured directories)"),
]),
Line::default(),
Line::from(Span::styled(" Tips:", label_style)),
Line::from(" - Enter an absolute path to a media file (e.g. /home/user/music/song.mp3)"),
Line::from(" - The file will be copied into the managed library"),
Line::from(" - Duplicates are detected by content hash and will be skipped"),
Line::from(" - Press 's' (without typing a path) to scan all library directories"),
];
let help =
Paragraph::new(help_lines).block(Block::default().borders(Borders::ALL).title(" Help "));
f.render_widget(help, chunks[1]);
}

View file

@ -0,0 +1,75 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::Span;
use ratatui::widgets::{Block, Borders, Cell, Row, Table};
use super::{format_duration, format_size, media_type_color};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let header = Row::new(vec!["Title / Name", "Type", "Duration", "Year", "Size"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
let rows: Vec<Row> = state
.media_list
.iter()
.enumerate()
.map(|(i, item)| {
let style = if Some(i) == state.selected_index {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else {
Style::default()
};
let display_name = item.title.as_deref().unwrap_or(&item.file_name).to_string();
let type_color = media_type_color(&item.media_type);
let type_cell = Cell::from(Span::styled(
item.media_type.clone(),
Style::default().fg(type_color),
));
let duration = item
.duration_secs
.map(format_duration)
.unwrap_or_else(|| "-".to_string());
let year = item
.year
.map(|y| y.to_string())
.unwrap_or_else(|| "-".to_string());
Row::new(vec![
Cell::from(display_name),
type_cell,
Cell::from(duration),
Cell::from(year),
Cell::from(format_size(item.file_size)),
])
.style(style)
})
.collect();
let page = (state.page_offset / state.page_size) + 1;
let item_count = state.media_list.len();
let title = format!(" Library (page {page}, {item_count} items) ");
let table = Table::new(
rows,
[
Constraint::Percentage(35),
Constraint::Percentage(20),
Constraint::Percentage(15),
Constraint::Percentage(10),
Constraint::Percentage(20),
],
)
.header(header)
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(table, area);
}

View file

@ -0,0 +1,83 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(0)])
.split(area);
// Header
let title = if let Some(ref media) = state.selected_media {
format!(" Edit: {} ", media.file_name)
} else {
" Edit Metadata ".to_string()
};
let header = Paragraph::new(Line::from(Span::styled(
&title,
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
)))
.block(Block::default().borders(Borders::ALL));
f.render_widget(header, chunks[0]);
// Edit fields
let label_style = Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD);
let value_style = Style::default().fg(Color::White);
let active_style = Style::default()
.fg(Color::Green)
.add_modifier(Modifier::BOLD);
let pad = " ";
let fields = [
("Title", &state.edit_title),
("Artist", &state.edit_artist),
("Album", &state.edit_album),
("Genre", &state.edit_genre),
("Year", &state.edit_year),
("Description", &state.edit_description),
];
let mut lines = Vec::new();
lines.push(Line::default());
for (i, (label, value)) in fields.iter().enumerate() {
let is_active = state.edit_field_index == Some(i);
let style = if is_active { active_style } else { label_style };
let cursor = if is_active { "> " } else { pad };
lines.push(Line::from(vec![
Span::raw(cursor),
Span::styled(format!("{label:<14}"), style),
Span::styled(value.as_str(), value_style),
if is_active {
Span::styled("_", Style::default().fg(Color::Green))
} else {
Span::raw("")
},
]));
}
lines.push(Line::default());
lines.push(Line::from(vec![
Span::raw(pad),
Span::styled(
"Tab: Next field Enter: Save Esc: Cancel",
Style::default().fg(Color::DarkGray),
),
]));
let editor =
Paragraph::new(lines).block(Block::default().borders(Borders::ALL).title(" Fields "));
f.render_widget(editor, chunks[1]);
}

View file

@ -0,0 +1,190 @@
pub mod audit;
pub mod collections;
pub mod database;
pub mod detail;
pub mod duplicates;
pub mod import;
pub mod library;
pub mod metadata_edit;
pub mod queue;
pub mod search;
pub mod settings;
pub mod statistics;
pub mod tags;
pub mod tasks;
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph, Tabs};
use crate::app::{AppState, View};
/// Format a file size in bytes into a human-readable string.
pub fn format_size(bytes: u64) -> String {
if bytes < 1024 {
format!("{bytes} B")
} else if bytes < 1024 * 1024 {
format!("{:.1} KB", bytes as f64 / 1024.0)
} else if bytes < 1024 * 1024 * 1024 {
format!("{:.1} MB", bytes as f64 / (1024.0 * 1024.0))
} else {
format!("{:.2} GB", bytes as f64 / (1024.0 * 1024.0 * 1024.0))
}
}
/// Format duration in seconds into hh:mm:ss format.
pub fn format_duration(secs: f64) -> String {
let total = secs as u64;
let h = total / 3600;
let m = (total % 3600) / 60;
let s = total % 60;
if h > 0 {
format!("{h:02}:{m:02}:{s:02}")
} else {
format!("{m:02}:{s:02}")
}
}
/// Trim a timestamp string to just the date portion (YYYY-MM-DD).
pub fn format_date(timestamp: &str) -> &str {
// Timestamps are typically "2024-01-15T10:30:00Z" or similar
if timestamp.len() >= 10 {
&timestamp[..10]
} else {
timestamp
}
}
/// Return a color based on media type string.
pub fn media_type_color(media_type: &str) -> Color {
match media_type {
t if t.starts_with("audio") => Color::Green,
t if t.starts_with("video") => Color::Magenta,
t if t.starts_with("image") => Color::Yellow,
t if t.starts_with("application/pdf") => Color::Red,
t if t.starts_with("text") => Color::Cyan,
_ => Color::White,
}
}
pub fn render(f: &mut Frame, state: &AppState) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(3),
Constraint::Min(0),
Constraint::Length(1),
])
.split(f.area());
render_tabs(f, state, chunks[0]);
match state.current_view {
View::Library => library::render(f, state, chunks[1]),
View::Search => search::render(f, state, chunks[1]),
View::Detail => detail::render(f, state, chunks[1]),
View::Tags => tags::render(f, state, chunks[1]),
View::Collections => collections::render(f, state, chunks[1]),
View::Audit => audit::render(f, state, chunks[1]),
View::Import => import::render(f, state, chunks[1]),
View::Settings => settings::render(f, state, chunks[1]),
View::Duplicates => duplicates::render(f, state, chunks[1]),
View::Database => database::render(f, state, chunks[1]),
View::MetadataEdit => metadata_edit::render(f, state, chunks[1]),
View::Queue => queue::render(f, state, chunks[1]),
View::Statistics => statistics::render(f, state, chunks[1]),
View::Tasks => tasks::render(f, state, chunks[1]),
}
render_status_bar(f, state, chunks[2]);
}
fn render_tabs(f: &mut Frame, state: &AppState, area: Rect) {
let titles: Vec<Line> = vec![
"Library",
"Search",
"Tags",
"Collections",
"Audit",
"Queue",
"Stats",
"Tasks",
]
.into_iter()
.map(|t| Line::from(Span::styled(t, Style::default().fg(Color::White))))
.collect();
let selected = match state.current_view {
View::Library | View::Detail | View::Import | View::Settings | View::MetadataEdit => 0,
View::Search => 1,
View::Tags => 2,
View::Collections => 3,
View::Audit | View::Duplicates | View::Database => 4,
View::Queue => 5,
View::Statistics => 6,
View::Tasks => 7,
};
let tabs = Tabs::new(titles)
.block(Block::default().borders(Borders::ALL).title(" Pinakes "))
.select(selected)
.style(Style::default().fg(Color::Gray))
.highlight_style(
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
);
f.render_widget(tabs, area);
}
fn render_status_bar(f: &mut Frame, state: &AppState, area: Rect) {
let status = if let Some(ref msg) = state.status_message {
msg.clone()
} else {
match state.current_view {
View::Tags => {
" q:Quit j/k:Nav Home/End:Top/Bot n:New d:Delete r:Refresh Tab:Switch"
.to_string()
}
View::Collections => {
" q:Quit j/k:Nav Home/End:Top/Bot d:Delete r:Refresh Tab:Switch".to_string()
}
View::Audit => {
" q:Quit j/k:Nav Home/End:Top/Bot r:Refresh Tab:Switch".to_string()
}
View::Detail => {
" q:Quit Esc:Back o:Open e:Edit +:Tag -:Untag r:Refresh ?:Help".to_string()
}
View::Import => {
" Enter:Import Esc:Cancel s:Scan libraries ?:Help".to_string()
}
View::Settings => " q:Quit Esc:Back ?:Help".to_string(),
View::Duplicates => " q:Quit j/k:Nav r:Refresh Esc:Back".to_string(),
View::Database => " q:Quit v:Vacuum r:Refresh Esc:Back".to_string(),
View::MetadataEdit => {
" Tab:Next field Enter:Save Esc:Cancel".to_string()
}
View::Queue => {
" q:Quit j/k:Nav Enter:Play d:Remove N:Next P:Prev R:Repeat S:Shuffle C:Clear"
.to_string()
}
View::Statistics => " q:Quit r:Refresh Esc:Back ?:Help".to_string(),
View::Tasks => {
" q:Quit j/k:Nav Enter:Toggle R:Run Now r:Refresh Esc:Back".to_string()
}
_ => {
" q:Quit /:Search i:Import o:Open t:Tags c:Coll a:Audit D:Dupes B:DB Q:Queue X:Stats T:Tasks ?:Help"
.to_string()
}
}
};
let paragraph = Paragraph::new(Line::from(Span::styled(
status,
Style::default().fg(Color::DarkGray),
)));
f.render_widget(paragraph, area);
}

View file

@ -0,0 +1,69 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let items: Vec<ListItem> = if state.play_queue.is_empty() {
vec![ListItem::new(Line::from(Span::styled(
" Queue is empty. Select items in the library and press 'q' to add.",
Style::default().fg(Color::DarkGray),
)))]
} else {
state
.play_queue
.iter()
.enumerate()
.map(|(i, item)| {
let is_current = state.queue_current_index == Some(i);
let is_selected = state.queue_selected == Some(i);
let prefix = if is_current { ">> " } else { " " };
let type_color = super::media_type_color(&item.media_type);
let id_suffix = if item.media_id.len() > 8 {
&item.media_id[item.media_id.len() - 8..]
} else {
&item.media_id
};
let text = if let Some(ref artist) = item.artist {
format!("{prefix}{} - {} [{}]", item.title, artist, id_suffix)
} else {
format!("{prefix}{} [{}]", item.title, id_suffix)
};
let style = if is_selected {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else if is_current {
Style::default()
.fg(Color::Green)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(type_color)
};
ListItem::new(Line::from(Span::styled(text, style)))
})
.collect()
};
let repeat_str = match state.queue_repeat {
0 => "Off",
1 => "One",
_ => "All",
};
let shuffle_str = if state.queue_shuffle { "On" } else { "Off" };
let title = format!(
" Queue ({}) | Repeat: {} | Shuffle: {} ",
state.play_queue.len(),
repeat_str,
shuffle_str,
);
let list = List::new(items).block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(list, area);
}

View file

@ -0,0 +1,81 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::Span;
use ratatui::widgets::{Block, Borders, Cell, Paragraph, Row, Table};
use super::{format_size, media_type_color};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(0)])
.split(area);
// Search input
let input = Paragraph::new(state.search_input.as_str())
.block(
Block::default()
.borders(Borders::ALL)
.title(" Search (type and press Enter) "),
)
.style(if state.input_mode {
Style::default().fg(Color::Cyan)
} else {
Style::default()
});
f.render_widget(input, chunks[0]);
// Results
let header = Row::new(vec!["Name", "Type", "Artist", "Size"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
let rows: Vec<Row> = state
.search_results
.iter()
.enumerate()
.map(|(i, item)| {
let style = if Some(i) == state.search_selected {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else {
Style::default()
};
let type_color = media_type_color(&item.media_type);
let type_cell = Cell::from(Span::styled(
item.media_type.clone(),
Style::default().fg(type_color),
));
Row::new(vec![
Cell::from(item.file_name.clone()),
type_cell,
Cell::from(item.artist.clone().unwrap_or_default()),
Cell::from(format_size(item.file_size)),
])
.style(style)
})
.collect();
let shown = state.search_results.len();
let total = state.search_total_count;
let results_title = format!(" Results: {shown} shown, {total} total ");
let table = Table::new(
rows,
[
Constraint::Percentage(35),
Constraint::Percentage(20),
Constraint::Percentage(25),
Constraint::Percentage(20),
],
)
.header(header)
.block(Block::default().borders(Borders::ALL).title(results_title));
f.render_widget(table, chunks[1]);
}

View file

@ -0,0 +1,82 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let label_style = Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD);
let value_style = Style::default().fg(Color::White);
let section_style = Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD);
let pad = " ";
let lines = vec![
Line::default(),
Line::from(Span::styled("--- Connection ---", section_style)),
Line::from(vec![
Span::raw(pad),
Span::styled("Server URL: ", label_style),
Span::styled(&state.server_url, value_style),
]),
Line::default(),
Line::from(Span::styled("--- Library ---", section_style)),
Line::from(vec![
Span::raw(pad),
Span::styled("Total items: ", label_style),
Span::styled(state.total_media_count.to_string(), value_style),
]),
Line::from(vec![
Span::raw(pad),
Span::styled("Page size: ", label_style),
Span::styled(state.page_size.to_string(), value_style),
]),
Line::from(vec![
Span::raw(pad),
Span::styled("Current page: ", label_style),
Span::styled(
((state.page_offset / state.page_size) + 1).to_string(),
value_style,
),
]),
Line::default(),
Line::from(Span::styled("--- State ---", section_style)),
Line::from(vec![
Span::raw(pad),
Span::styled("Tags loaded: ", label_style),
Span::styled(state.tags.len().to_string(), value_style),
]),
Line::from(vec![
Span::raw(pad),
Span::styled("All tags: ", label_style),
Span::styled(state.all_tags.len().to_string(), value_style),
]),
Line::from(vec![
Span::raw(pad),
Span::styled("Collections: ", label_style),
Span::styled(state.collections.len().to_string(), value_style),
]),
Line::from(vec![
Span::raw(pad),
Span::styled("Audit entries: ", label_style),
Span::styled(state.audit_log.len().to_string(), value_style),
]),
Line::default(),
Line::from(Span::styled("--- Shortcuts ---", section_style)),
Line::from(vec![
Span::raw(pad),
Span::raw("Press Esc to return to the library view"),
]),
];
let settings =
Paragraph::new(lines).block(Block::default().borders(Borders::ALL).title(" Settings "));
f.render_widget(settings, area);
}

View file

@ -0,0 +1,183 @@
use ratatui::Frame;
use ratatui::layout::{Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Paragraph, Row, Table};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let Some(ref stats) = state.library_stats else {
let msg = Paragraph::new("Loading statistics... (press X to refresh)")
.block(Block::default().borders(Borders::ALL).title(" Statistics "));
f.render_widget(msg, area);
return;
};
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Length(8), // Overview
Constraint::Length(10), // Media by type
Constraint::Min(6), // Top tags & collections
])
.split(area);
// Overview section
let overview_lines = vec![
Line::from(vec![
Span::styled(" Total Media: ", Style::default().fg(Color::Gray)),
Span::styled(
stats.total_media.to_string(),
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
),
Span::raw(" "),
Span::styled("Total Size: ", Style::default().fg(Color::Gray)),
Span::styled(
super::format_size(stats.total_size_bytes),
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD),
),
]),
Line::from(vec![
Span::styled(" Avg Size: ", Style::default().fg(Color::Gray)),
Span::styled(
super::format_size(stats.avg_file_size_bytes),
Style::default().fg(Color::White),
),
]),
Line::from(vec![
Span::styled(" Tags: ", Style::default().fg(Color::Gray)),
Span::styled(
stats.total_tags.to_string(),
Style::default().fg(Color::Green),
),
Span::raw(" "),
Span::styled("Collections: ", Style::default().fg(Color::Gray)),
Span::styled(
stats.total_collections.to_string(),
Style::default().fg(Color::Green),
),
Span::raw(" "),
Span::styled("Duplicates: ", Style::default().fg(Color::Gray)),
Span::styled(
stats.total_duplicates.to_string(),
Style::default().fg(Color::Yellow),
),
]),
Line::from(vec![
Span::styled(" Newest: ", Style::default().fg(Color::Gray)),
Span::styled(
stats
.newest_item
.as_deref()
.map(super::format_date)
.unwrap_or("-"),
Style::default().fg(Color::White),
),
Span::raw(" "),
Span::styled("Oldest: ", Style::default().fg(Color::Gray)),
Span::styled(
stats
.oldest_item
.as_deref()
.map(super::format_date)
.unwrap_or("-"),
Style::default().fg(Color::White),
),
]),
];
let overview = Paragraph::new(overview_lines)
.block(Block::default().borders(Borders::ALL).title(" Overview "));
f.render_widget(overview, chunks[0]);
// Media by Type table
let type_rows: Vec<Row> = stats
.media_by_type
.iter()
.map(|tc| {
let color = super::media_type_color(&tc.name);
Row::new(vec![
Span::styled(tc.name.clone(), Style::default().fg(color)),
Span::styled(tc.count.to_string(), Style::default().fg(Color::White)),
])
})
.collect();
let storage_rows: Vec<Row> = stats
.storage_by_type
.iter()
.map(|tc| {
Row::new(vec![
Span::styled(tc.name.clone(), Style::default().fg(Color::Gray)),
Span::styled(
super::format_size(tc.count),
Style::default().fg(Color::White),
),
])
})
.collect();
let type_cols = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(chunks[1]);
let type_table = Table::new(type_rows, [Constraint::Min(20), Constraint::Length(10)]).block(
Block::default()
.borders(Borders::ALL)
.title(" Media by Type "),
);
f.render_widget(type_table, type_cols[0]);
let storage_table = Table::new(storage_rows, [Constraint::Min(20), Constraint::Length(12)])
.block(
Block::default()
.borders(Borders::ALL)
.title(" Storage by Type "),
);
f.render_widget(storage_table, type_cols[1]);
// Top tags and collections
let bottom_cols = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(chunks[2]);
let tag_rows: Vec<Row> = stats
.top_tags
.iter()
.map(|tc| {
Row::new(vec![
Span::styled(tc.name.clone(), Style::default().fg(Color::Green)),
Span::styled(tc.count.to_string(), Style::default().fg(Color::White)),
])
})
.collect();
let tags_table = Table::new(tag_rows, [Constraint::Min(20), Constraint::Length(10)])
.block(Block::default().borders(Borders::ALL).title(" Top Tags "));
f.render_widget(tags_table, bottom_cols[0]);
let col_rows: Vec<Row> = stats
.top_collections
.iter()
.map(|tc| {
Row::new(vec![
Span::styled(tc.name.clone(), Style::default().fg(Color::Magenta)),
Span::styled(tc.count.to_string(), Style::default().fg(Color::White)),
])
})
.collect();
let cols_table = Table::new(col_rows, [Constraint::Min(20), Constraint::Length(10)]).block(
Block::default()
.borders(Borders::ALL)
.title(" Top Collections "),
);
f.render_widget(cols_table, bottom_cols[1]);
}

View file

@ -0,0 +1,61 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::widgets::{Block, Borders, Row, Table};
use super::format_date;
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let header = Row::new(vec!["Name", "Parent", "Created"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
let rows: Vec<Row> = state
.tags
.iter()
.enumerate()
.map(|(i, tag)| {
let style = if Some(i) == state.tag_selected {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else {
Style::default()
};
// Resolve parent tag name from the tags list itself
let parent_display = match &tag.parent_id {
Some(pid) => state
.tags
.iter()
.find(|t| t.id == *pid)
.map(|t| t.name.clone())
.unwrap_or_else(|| pid.chars().take(8).collect::<String>() + "..."),
None => "-".to_string(),
};
Row::new(vec![
tag.name.clone(),
parent_display,
format_date(&tag.created_at).to_string(),
])
.style(style)
})
.collect();
let title = format!(" Tags ({}) ", state.tags.len());
let table = Table::new(
rows,
[
ratatui::layout::Constraint::Percentage(40),
ratatui::layout::Constraint::Percentage(30),
ratatui::layout::Constraint::Percentage(30),
],
)
.header(header)
.block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(table, area);
}

View file

@ -0,0 +1,63 @@
use ratatui::Frame;
use ratatui::layout::Rect;
use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, List, ListItem};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let items: Vec<ListItem> = if state.scheduled_tasks.is_empty() {
vec![ListItem::new(Line::from(Span::styled(
" No scheduled tasks. Press T to refresh.",
Style::default().fg(Color::DarkGray),
)))]
} else {
state
.scheduled_tasks
.iter()
.enumerate()
.map(|(i, task)| {
let is_selected = state.scheduled_tasks_selected == Some(i);
let enabled_marker = if task.enabled { "[ON] " } else { "[OFF]" };
let enabled_color = if task.enabled {
Color::Green
} else {
Color::DarkGray
};
let last_run = task
.last_run
.as_deref()
.map(super::format_date)
.unwrap_or("-");
let next_run = task
.next_run
.as_deref()
.map(super::format_date)
.unwrap_or("-");
let status = task.last_status.as_deref().unwrap_or("-");
let text = format!(
" {enabled_marker} {:<20} {:<16} Last: {:<12} Next: {:<12} Status: {}",
task.name, task.schedule, last_run, next_run, status
);
let style = if is_selected {
Style::default()
.fg(Color::Cyan)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(enabled_color)
};
ListItem::new(Line::from(Span::styled(text, style)))
})
.collect()
};
let title = format!(" Scheduled Tasks ({}) ", state.scheduled_tasks.len());
let list = List::new(items).block(Block::default().borders(Borders::ALL).title(title));
f.render_widget(list, area);
}

View file

@ -0,0 +1,21 @@
[package]
name = "pinakes-ui"
edition.workspace = true
version.workspace = true
license.workspace = true
[dependencies]
serde = { workspace = true }
serde_json = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
reqwest = { workspace = true }
dioxus = { workspace = true }
tokio = { workspace = true }
rfd = "0.17"
pulldown-cmark = { workspace = true }
gray_matter = { workspace = true }

1577
crates/pinakes-ui/src/app.rs Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,128 @@
use dioxus::prelude::*;
use super::pagination::Pagination as PaginationControls;
use super::utils::format_timestamp;
use crate::client::AuditEntryResponse;
const ACTION_OPTIONS: &[&str] = &[
"All",
"imported",
"deleted",
"tagged",
"untagged",
"updated",
"added_to_collection",
"removed_from_collection",
"opened",
"scanned",
];
#[component]
pub fn AuditLog(
entries: Vec<AuditEntryResponse>,
on_select: EventHandler<String>,
audit_page: u64,
total_pages: u64,
on_page_change: EventHandler<u64>,
audit_filter: String,
on_filter_change: EventHandler<String>,
) -> Element {
if entries.is_empty() {
return rsx! {
div { class: "empty-state",
h3 { class: "empty-title", "No audit entries" }
p { class: "empty-subtitle", "Activity will appear here as you use the application." }
}
};
}
rsx! {
div { class: "audit-controls",
select {
class: "filter-select",
value: "{audit_filter}",
onchange: move |evt: Event<FormData>| {
on_filter_change.call(evt.value().to_string());
},
for option in ACTION_OPTIONS.iter() {
option {
key: "{option}",
value: "{option}",
selected: audit_filter == *option,
"{option}"
}
}
}
}
table { class: "data-table",
thead {
tr {
th { "Action" }
th { "Media ID" }
th { "Details" }
th { "Timestamp" }
}
}
tbody {
for entry in entries.iter() {
{
let media_id = entry.media_id.clone().unwrap_or_default();
let truncated_id = if media_id.len() > 8 {
format!("{}...", &media_id[..8])
} else {
media_id.clone()
};
let details = entry.details.clone().unwrap_or_default();
let action_class = action_badge_class(&entry.action);
let timestamp = format_timestamp(&entry.timestamp);
let click_id = media_id.clone();
let has_media_id = !media_id.is_empty();
rsx! {
tr { key: "{entry.id}",
td {
span { class: "type-badge {action_class}", "{entry.action}" }
}
td {
if has_media_id {
span {
class: "mono clickable",
onclick: move |_| {
on_select.call(click_id.clone());
},
"{truncated_id}"
}
} else {
span { class: "mono", "{truncated_id}" }
}
}
td { "{details}" }
td { "{timestamp}" }
}
}
}
}
}
}
PaginationControls {
current_page: audit_page,
total_pages: total_pages,
on_page_change: on_page_change,
}
}
}
fn action_badge_class(action: &str) -> &'static str {
match action {
"imported" => "type-image",
"deleted" => "action-danger",
"tagged" | "untagged" => "tag-badge",
"updated" => "action-updated",
"added_to_collection" => "action-collection",
"removed_from_collection" => "action-collection-remove",
"opened" => "action-opened",
"scanned" => "action-scanned",
_ => "type-other",
}
}

View file

@ -0,0 +1,42 @@
use dioxus::prelude::*;
#[derive(Debug, Clone, PartialEq)]
pub struct BreadcrumbItem {
pub label: String,
pub view: Option<String>,
}
#[component]
pub fn Breadcrumb(
items: Vec<BreadcrumbItem>,
on_navigate: EventHandler<Option<String>>,
) -> Element {
rsx! {
nav { class: "breadcrumb",
for (i, item) in items.iter().enumerate() {
if i > 0 {
span { class: "breadcrumb-sep", " > " }
}
if i < items.len() - 1 {
{
let view = item.view.clone();
let label = item.label.clone();
rsx! {
a {
class: "breadcrumb-link",
href: "#",
onclick: move |e: Event<MouseData>| {
e.prevent_default();
on_navigate.call(view.clone());
},
"{label}"
}
}
}
} else {
span { class: "breadcrumb-current", "{item.label}" }
}
}
}
}
}

View file

@ -0,0 +1,334 @@
use dioxus::prelude::*;
use super::utils::{format_size, type_badge_class};
use crate::client::{CollectionResponse, MediaResponse};
#[component]
pub fn Collections(
collections: Vec<CollectionResponse>,
collection_members: Vec<MediaResponse>,
viewing_collection: Option<String>,
on_create: EventHandler<(String, String, Option<String>, Option<String>)>,
on_delete: EventHandler<String>,
on_view_members: EventHandler<String>,
on_back_to_list: EventHandler<()>,
on_remove_member: EventHandler<(String, String)>,
on_select: EventHandler<String>,
on_add_member: EventHandler<(String, String)>,
all_media: Vec<MediaResponse>,
) -> Element {
let mut new_name = use_signal(String::new);
let mut new_kind = use_signal(|| String::from("manual"));
let mut new_description = use_signal(String::new);
let mut new_filter_query = use_signal(String::new);
let mut confirm_delete: Signal<Option<String>> = use_signal(|| None);
let mut show_add_modal = use_signal(|| false);
// Detail view: viewing a specific collection's members
if let Some(ref col_id) = viewing_collection {
let col_name = collections
.iter()
.find(|c| &c.id == col_id)
.map(|c| c.name.clone())
.unwrap_or_else(|| col_id.clone());
let back_click = move |_| on_back_to_list.call(());
// Collect IDs of current members to filter available media
let member_ids: Vec<String> = collection_members.iter().map(|m| m.id.clone()).collect();
let available_media: Vec<&MediaResponse> = all_media
.iter()
.filter(|m| !member_ids.contains(&m.id))
.collect();
let modal_col_id = col_id.clone();
return rsx! {
button {
class: "btn btn-ghost mb-16",
onclick: back_click,
"\u{2190} Back to Collections"
}
h3 { class: "mb-16", "{col_name}" }
div { class: "form-row mb-16",
button {
class: "btn btn-primary",
onclick: move |_| show_add_modal.set(true),
"Add Media"
}
}
if collection_members.is_empty() {
div { class: "empty-state",
p { class: "empty-subtitle", "This collection has no members." }
}
} else {
table { class: "data-table",
thead {
tr {
th { "Name" }
th { "Type" }
th { "Artist" }
th { "Size" }
th { "" }
}
}
tbody {
for item in collection_members.iter() {
{
let artist = item.artist.clone().unwrap_or_default();
let size = format_size(item.file_size);
let badge_class = type_badge_class(&item.media_type);
let remove_cid = col_id.clone();
let remove_mid = item.id.clone();
let row_click = {
let mid = item.id.clone();
move |_| on_select.call(mid.clone())
};
rsx! {
tr {
key: "{item.id}",
class: "clickable-row",
onclick: row_click,
td { "{item.file_name}" }
td {
span { class: "type-badge {badge_class}", "{item.media_type}" }
}
td { "{artist}" }
td { "{size}" }
td {
button {
class: "btn btn-danger btn-sm",
onclick: move |e: Event<MouseData>| {
e.stop_propagation();
on_remove_member.call((remove_cid.clone(), remove_mid.clone()));
},
"Remove"
}
}
}
}
}
}
}
}
}
// Add Media modal
if *show_add_modal.read() {
div { class: "modal-overlay",
onclick: move |_| show_add_modal.set(false),
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
div { class: "modal-header",
h3 { "Add Media to Collection" }
button {
class: "btn btn-ghost",
onclick: move |_| show_add_modal.set(false),
"\u{2715}"
}
}
div { class: "modal-body",
if available_media.is_empty() {
p { "No media available to add." }
} else {
table { class: "data-table",
thead {
tr {
th { "Name" }
th { "Type" }
th { "Artist" }
}
}
tbody {
for media in available_media.iter() {
{
let artist = media.artist.clone().unwrap_or_default();
let badge_class = type_badge_class(&media.media_type);
let add_click = {
let cid = modal_col_id.clone();
let mid = media.id.clone();
move |_| {
on_add_member.call((cid.clone(), mid.clone()));
show_add_modal.set(false);
}
};
rsx! {
tr {
key: "{media.id}",
class: "clickable-row",
onclick: add_click,
td { "{media.file_name}" }
td {
span { class: "type-badge {badge_class}", "{media.media_type}" }
}
td { "{artist}" }
}
}
}
}
}
}
}
}
}
}
}
};
}
// List view: show all collections with create form
let is_virtual = *new_kind.read() == "virtual";
let create_click = move |_| {
let name = new_name.read().clone();
if name.is_empty() {
return;
}
let kind = new_kind.read().clone();
let desc = {
let d = new_description.read().clone();
if d.is_empty() { None } else { Some(d) }
};
let filter = {
let f = new_filter_query.read().clone();
if f.is_empty() { None } else { Some(f) }
};
on_create.call((name, kind, desc, filter));
new_name.set(String::new());
new_kind.set(String::from("manual"));
new_description.set(String::new());
new_filter_query.set(String::new());
};
rsx! {
div { class: "card",
div { class: "card-header",
h3 { class: "card-title", "Collections" }
}
div { class: "form-row mb-16",
input {
r#type: "text",
placeholder: "Collection name...",
value: "{new_name}",
oninput: move |e| new_name.set(e.value()),
}
select {
value: "{new_kind}",
onchange: move |e| new_kind.set(e.value()),
option { value: "manual", "Manual" }
option { value: "virtual", "Virtual" }
}
input {
r#type: "text",
placeholder: "Description (optional)...",
value: "{new_description}",
oninput: move |e| new_description.set(e.value()),
}
}
if is_virtual {
div { class: "form-row mb-16",
input {
r#type: "text",
placeholder: "Filter query for virtual collection...",
value: "{new_filter_query}",
oninput: move |e| new_filter_query.set(e.value()),
}
}
}
div { class: "form-row mb-16",
button {
class: "btn btn-primary",
onclick: create_click,
"Create"
}
}
if collections.is_empty() {
div { class: "empty-state",
p { class: "empty-subtitle", "No collections yet. Create one above." }
}
} else {
table { class: "data-table",
thead {
tr {
th { "Name" }
th { "Kind" }
th { "Description" }
th { "" }
th { "" }
}
}
tbody {
for col in collections.iter() {
{
let desc = col.description.clone().unwrap_or_default();
let kind_class = if col.kind == "virtual" { "type-document" } else { "type-other" };
let view_click = {
let id = col.id.clone();
move |_| on_view_members.call(id.clone())
};
let col_id_for_delete = col.id.clone();
let is_confirming = confirm_delete
.read()
.as_ref()
.map(|id| id == &col.id)
.unwrap_or(false);
rsx! {
tr { key: "{col.id}",
td { "{col.name}" }
td {
span { class: "type-badge {kind_class}", "{col.kind}" }
}
td { "{desc}" }
td {
button {
class: "btn btn-sm btn-secondary",
onclick: view_click,
"View"
}
}
td {
if is_confirming {
button {
class: "btn btn-danger btn-sm",
onclick: {
let id = col_id_for_delete.clone();
move |_| {
on_delete.call(id.clone());
confirm_delete.set(None);
}
},
"Confirm"
}
button {
class: "btn btn-ghost btn-sm",
onclick: move |_| confirm_delete.set(None),
"Cancel"
}
} else {
button {
class: "btn btn-danger btn-sm",
onclick: {
let id = col_id_for_delete.clone();
move |_| confirm_delete.set(Some(id.clone()))
},
"Delete"
}
}
}
}
}
}
}
}
}
}
}
}
}

View file

@ -0,0 +1,193 @@
use dioxus::prelude::*;
use super::utils::format_size;
use crate::client::DatabaseStatsResponse;
#[component]
pub fn Database(
stats: Option<DatabaseStatsResponse>,
on_refresh: EventHandler<()>,
on_vacuum: EventHandler<()>,
on_clear: EventHandler<()>,
on_backup: EventHandler<String>,
) -> Element {
let mut confirm_clear = use_signal(|| false);
let mut confirm_vacuum = use_signal(|| false);
let mut backup_path = use_signal(String::new);
rsx! {
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Database Overview" }
button {
class: "btn btn-sm btn-secondary",
onclick: move |_| on_refresh.call(()),
"\u{21bb} Refresh"
}
}
match stats.as_ref() {
Some(s) => {
let size_str = format_size(s.database_size_bytes);
rsx! {
div { class: "stats-grid",
div { class: "stat-card",
div { class: "stat-value", "{s.media_count}" }
div { class: "stat-label", "Media Items" }
}
div { class: "stat-card",
div { class: "stat-value", "{s.tag_count}" }
div { class: "stat-label", "Tags" }
}
div { class: "stat-card",
div { class: "stat-value", "{s.collection_count}" }
div { class: "stat-label", "Collections" }
}
div { class: "stat-card",
div { class: "stat-value", "{s.audit_count}" }
div { class: "stat-label", "Audit Entries" }
}
div { class: "stat-card",
div { class: "stat-value", "{size_str}" }
div { class: "stat-label", "Database Size" }
}
div { class: "stat-card",
div { class: "stat-value", "{s.backend_name}" }
div { class: "stat-label", "Backend" }
}
}
}
},
None => rsx! {
div { class: "empty-state",
p { class: "text-muted", "Loading database stats..." }
}
},
}
}
// Maintenance actions
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Maintenance" }
}
div { class: "db-actions",
// Vacuum
div { class: "db-action-row",
div { class: "db-action-info",
h4 { "Vacuum Database" }
p { class: "text-muted text-sm",
"Reclaim unused disk space and optimize the database. "
"This is safe to run at any time but may briefly lock the database."
}
}
if *confirm_vacuum.read() {
div { class: "db-action-confirm",
span { class: "text-sm", "Run vacuum?" }
button {
class: "btn btn-sm btn-primary",
onclick: move |_| {
confirm_vacuum.set(false);
on_vacuum.call(());
},
"Confirm"
}
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| confirm_vacuum.set(false),
"Cancel"
}
}
} else {
button {
class: "btn btn-secondary",
onclick: move |_| confirm_vacuum.set(true),
"Vacuum"
}
}
}
// Backup
div { class: "db-action-row",
div { class: "db-action-info",
h4 { "Backup Database" }
p { class: "text-muted text-sm",
"Create a copy of the database at the specified path. "
"The backup is a full snapshot of the current state."
}
}
div { class: "form-row",
input {
r#type: "text",
placeholder: "/path/to/backup.db",
value: "{backup_path}",
oninput: move |e| backup_path.set(e.value()),
style: "max-width: 300px;",
}
button {
class: "btn btn-secondary",
disabled: backup_path.read().is_empty(),
onclick: {
let mut backup_path = backup_path;
move |_| {
let path = backup_path.read().clone();
if !path.is_empty() {
on_backup.call(path);
backup_path.set(String::new());
}
}
},
"Backup"
}
}
}
}
}
// Danger zone
div { class: "card mb-16 danger-card",
div { class: "card-header",
h3 { class: "card-title", style: "color: var(--danger);", "Danger Zone" }
}
div { class: "db-actions",
div { class: "db-action-row",
div { class: "db-action-info",
h4 { "Clear All Data" }
p { class: "text-muted text-sm",
"Permanently delete all media records, tags, collections, and audit entries. "
"This cannot be undone. Files on disk are not affected."
}
}
if *confirm_clear.read() {
div { class: "db-action-confirm",
span { class: "text-sm", style: "color: var(--danger);",
"This will delete everything. Are you sure?"
}
button {
class: "btn btn-sm btn-danger",
onclick: move |_| {
confirm_clear.set(false);
on_clear.call(());
},
"Yes, Delete Everything"
}
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| confirm_clear.set(false),
"Cancel"
}
}
} else {
button {
class: "btn btn-danger",
onclick: move |_| confirm_clear.set(true),
"Clear All Data"
}
}
}
}
}
}
}

View file

@ -0,0 +1,663 @@
use dioxus::prelude::*;
use super::image_viewer::ImageViewer;
use super::markdown_viewer::MarkdownViewer;
use super::media_player::MediaPlayer;
use super::utils::{format_duration, format_size, media_category, type_badge_class};
use crate::client::{MediaResponse, MediaUpdateEvent, TagResponse};
#[component]
pub fn Detail(
media: MediaResponse,
media_tags: Vec<TagResponse>,
all_tags: Vec<TagResponse>,
server_url: String,
#[props(default = false)] autoplay: bool,
on_back: EventHandler<()>,
on_open: EventHandler<String>,
on_update: EventHandler<MediaUpdateEvent>,
on_tag: EventHandler<(String, String)>,
on_untag: EventHandler<(String, String)>,
on_set_custom_field: EventHandler<(String, String, String, String)>,
on_delete_custom_field: EventHandler<(String, String)>,
on_delete: EventHandler<String>,
) -> Element {
let mut editing = use_signal(|| false);
let mut show_image_viewer = use_signal(|| false);
let mut edit_title = use_signal(String::new);
let mut edit_artist = use_signal(String::new);
let mut edit_album = use_signal(String::new);
let mut edit_genre = use_signal(String::new);
let mut edit_year = use_signal(String::new);
let mut edit_description = use_signal(String::new);
let mut add_tag_id = use_signal(String::new);
let mut new_field_name = use_signal(String::new);
let mut new_field_type = use_signal(|| "text".to_string());
let mut new_field_value = use_signal(String::new);
let mut confirm_delete = use_signal(|| false);
let id = media.id.clone();
let title = media.title.clone().unwrap_or_default();
let artist = media.artist.clone().unwrap_or_default();
let album = media.album.clone().unwrap_or_default();
let genre = media.genre.clone().unwrap_or_default();
let year_str = media.year.map(|y| y.to_string()).unwrap_or_default();
let duration_str = media.duration_secs.map(format_duration).unwrap_or_default();
let description = media.description.clone().unwrap_or_default();
let size = format_size(media.file_size);
let badge_class = type_badge_class(&media.media_type);
let custom_fields: Vec<(String, String, String)> = media
.custom_fields
.iter()
.map(|(k, v)| (k.clone(), v.field_type.clone(), v.value.clone()))
.collect();
let is_editing = editing();
// Separate system-extracted metadata from user-defined custom fields.
// System fields are those set by extractors (camera info, dimensions, etc.)
let system_field_names: &[&str] = &[
"width",
"height",
"camera_make",
"camera_model",
"date_taken",
"gps_latitude",
"gps_longitude",
"iso",
"exposure_time",
"f_number",
"focal_length",
"software",
"lens_model",
"flash",
"orientation",
"track_number",
"disc_number",
"comment",
"bitrate",
"sample_rate",
"channels",
"resolution",
"video_codec",
"audio_codec",
"audio_bitrate",
];
let system_fields: Vec<(String, String, String)> = custom_fields
.iter()
.filter(|(k, _, _)| system_field_names.contains(&k.as_str()))
.cloned()
.collect();
let user_fields: Vec<(String, String, String)> = custom_fields
.iter()
.filter(|(k, _, _)| !system_field_names.contains(&k.as_str()))
.cloned()
.collect();
let has_system_fields = !system_fields.is_empty();
let has_user_fields = !user_fields.is_empty();
// Media preview URLs
let stream_url = format!("{}/api/v1/media/{}/stream", server_url, media.id);
let thumbnail_url = format!("{}/api/v1/media/{}/thumbnail", server_url, media.id);
let category = media_category(&media.media_type);
let has_thumbnail = media.has_thumbnail;
// Compute available tags (all_tags minus media_tags)
let media_tag_ids: Vec<String> = media_tags.iter().map(|t| t.id.clone()).collect();
let available_tags: Vec<TagResponse> = all_tags
.iter()
.filter(|t| !media_tag_ids.contains(&t.id))
.cloned()
.collect();
// Clone values needed for closures
let id_for_open = id.clone();
let id_for_save = id.clone();
let id_for_tag = id.clone();
let id_for_field = id.clone();
let id_for_delete = id.clone();
// Clone media field values for the edit button
let title_for_edit = media.title.clone().unwrap_or_default();
let artist_for_edit = media.artist.clone().unwrap_or_default();
let album_for_edit = media.album.clone().unwrap_or_default();
let genre_for_edit = media.genre.clone().unwrap_or_default();
let year_for_edit = media.year.map(|y| y.to_string()).unwrap_or_default();
let description_for_edit = media.description.clone().unwrap_or_default();
let on_edit_click = move |_| {
edit_title.set(title_for_edit.clone());
edit_artist.set(artist_for_edit.clone());
edit_album.set(album_for_edit.clone());
edit_genre.set(genre_for_edit.clone());
edit_year.set(year_for_edit.clone());
edit_description.set(description_for_edit.clone());
editing.set(true);
};
let on_save_click = {
let id_save = id_for_save.clone();
move |_| {
let t = edit_title();
let ar = edit_artist();
let al = edit_album();
let g = edit_genre();
let y_str = edit_year();
let d = edit_description();
let title_opt = if t.is_empty() { None } else { Some(t) };
let artist_opt = if ar.is_empty() { None } else { Some(ar) };
let album_opt = if al.is_empty() { None } else { Some(al) };
let genre_opt = if g.is_empty() { None } else { Some(g) };
let year_opt = if y_str.is_empty() {
None
} else {
y_str.parse::<i32>().ok()
};
let desc_opt = if d.is_empty() { None } else { Some(d) };
on_update.call(MediaUpdateEvent {
id: id_save.clone(),
title: title_opt,
artist: artist_opt,
album: album_opt,
genre: genre_opt,
year: year_opt,
description: desc_opt,
});
editing.set(false);
}
};
let on_cancel_click = move |_| {
editing.set(false);
};
let on_tag_add_click = {
let id_tag = id_for_tag.clone();
move |_| {
let tag_id = add_tag_id();
if !tag_id.is_empty() {
on_tag.call((id_tag.clone(), tag_id));
add_tag_id.set(String::new());
}
}
};
let on_add_field_click = {
let id_field = id_for_field.clone();
move |_| {
let name = new_field_name();
let ft = new_field_type();
let val = new_field_value();
if !name.is_empty() && !val.is_empty() {
on_set_custom_field.call((id_field.clone(), name, ft, val));
new_field_name.set(String::new());
new_field_type.set("text".to_string());
new_field_value.set(String::new());
}
}
};
let on_delete_click = move |_| {
confirm_delete.set(true);
};
let on_confirm_delete = {
let id_del = id_for_delete.clone();
move |_| {
on_delete.call(id_del.clone());
confirm_delete.set(false);
}
};
let on_cancel_delete = move |_| {
confirm_delete.set(false);
};
let stream_url_for_viewer = stream_url.clone();
let thumb_for_player = thumbnail_url.clone();
let file_name_for_viewer = media.file_name.clone();
rsx! {
// Media preview
div { class: "detail-preview",
if category == "audio" {
MediaPlayer {
src: stream_url.clone(),
media_type: "audio".to_string(),
title: media.title.clone(),
thumbnail_url: if has_thumbnail { Some(thumb_for_player.clone()) } else { None },
autoplay: autoplay,
}
} else if category == "video" {
MediaPlayer {
src: stream_url.clone(),
media_type: "video".to_string(),
title: media.title.clone(),
autoplay: autoplay,
}
} else if category == "image" {
if has_thumbnail {
img {
src: "{thumbnail_url}",
alt: "{media.file_name}",
class: "detail-preview-image clickable",
onclick: move |_| show_image_viewer.set(true),
}
} else {
img {
src: "{stream_url}",
alt: "{media.file_name}",
class: "detail-preview-image clickable",
onclick: move |_| show_image_viewer.set(true),
}
}
} else if category == "text" {
MarkdownViewer {
content_url: stream_url.clone(),
media_type: media.media_type.clone(),
}
} else if category == "document" {
div { class: "detail-no-preview",
p { class: "text-muted", "Preview not available for this document type." }
button {
class: "btn btn-primary",
onclick: {
let id_open = id.clone();
move |_| on_open.call(id_open.clone())
},
"Open Externally"
}
}
} else if has_thumbnail {
img {
src: "{thumbnail_url}",
alt: "Thumbnail",
class: "detail-thumbnail",
}
}
}
// Action bar
div { class: "detail-actions",
button {
class: "btn btn-secondary",
onclick: move |_| on_back.call(()),
"Back"
}
button {
class: "btn btn-primary",
onclick: {
let id_open = id_for_open.clone();
move |_| on_open.call(id_open.clone())
},
"Open"
}
if is_editing {
button {
class: "btn btn-primary",
onclick: on_save_click,
"Save"
}
button {
class: "btn btn-ghost",
onclick: on_cancel_click,
"Cancel"
}
} else {
button {
class: "btn btn-secondary",
onclick: on_edit_click,
"Edit"
}
}
if confirm_delete() {
button {
class: "btn btn-danger",
onclick: on_confirm_delete,
"Confirm Delete"
}
button {
class: "btn btn-ghost",
onclick: on_cancel_delete,
"Cancel"
}
} else {
button {
class: "btn btn-danger",
onclick: on_delete_click,
"Delete"
}
}
}
// Info / Edit section
if is_editing {
div { class: "detail-grid",
// Read-only file info
div { class: "detail-field",
span { class: "detail-label", "File Name" }
span { class: "detail-value", "{media.file_name}" }
}
div { class: "detail-field",
span { class: "detail-label", "Path" }
span { class: "detail-value mono", "{media.path}" }
}
div { class: "detail-field",
span { class: "detail-label", "Type" }
span { class: "detail-value",
span { class: "type-badge {badge_class}", "{media.media_type}" }
}
}
div { class: "detail-field",
span { class: "detail-label", "Size" }
span { class: "detail-value", "{size}" }
}
div { class: "detail-field",
span { class: "detail-label", "Hash" }
span { class: "detail-value mono", "{media.content_hash}" }
}
// Editable fields — conditional by media category
div { class: "detail-field",
label { class: "detail-label", "Title" }
input {
r#type: "text",
value: "{edit_title}",
oninput: move |e: Event<FormData>| edit_title.set(e.value()),
}
}
div { class: "detail-field",
label { class: "detail-label",
{match category {
"image" => "Photographer",
"document" | "text" => "Author",
_ => "Artist",
}}
}
input {
r#type: "text",
value: "{edit_artist}",
oninput: move |e: Event<FormData>| edit_artist.set(e.value()),
}
}
if category == "audio" {
div { class: "detail-field",
label { class: "detail-label", "Album" }
input {
r#type: "text",
value: "{edit_album}",
oninput: move |e: Event<FormData>| edit_album.set(e.value()),
}
}
}
if category == "audio" || category == "video" {
div { class: "detail-field",
label { class: "detail-label", "Genre" }
input {
r#type: "text",
value: "{edit_genre}",
oninput: move |e: Event<FormData>| edit_genre.set(e.value()),
}
}
}
if category == "audio" || category == "video" || category == "document" {
div { class: "detail-field",
label { class: "detail-label", "Year" }
input {
r#type: "text",
value: "{edit_year}",
oninput: move |e: Event<FormData>| edit_year.set(e.value()),
}
}
}
div { class: "detail-field full-width",
label { class: "detail-label", "Description" }
textarea {
value: "{edit_description}",
oninput: move |e: Event<FormData>| edit_description.set(e.value()),
}
}
}
} else {
div { class: "detail-grid",
div { class: "detail-field",
span { class: "detail-label", "File Name" }
span { class: "detail-value", "{media.file_name}" }
}
div { class: "detail-field",
span { class: "detail-label", "Path" }
span { class: "detail-value mono", "{media.path}" }
}
div { class: "detail-field",
span { class: "detail-label", "Type" }
span { class: "detail-value",
span { class: "type-badge {badge_class}", "{media.media_type}" }
}
}
div { class: "detail-field",
span { class: "detail-label", "Size" }
span { class: "detail-value", "{size}" }
}
div { class: "detail-field",
span { class: "detail-label", "Hash" }
span { class: "detail-value mono", "{media.content_hash}" }
}
// Title: only shown when non-empty
if !title.is_empty() {
div { class: "detail-field",
span { class: "detail-label", "Title" }
span { class: "detail-value", "{title}" }
}
}
// Artist/Author/Photographer: only shown when non-empty
if !artist.is_empty() {
div { class: "detail-field",
span { class: "detail-label",
{match category {
"image" => "Photographer",
"document" | "text" => "Author",
_ => "Artist",
}}
}
span { class: "detail-value", "{artist}" }
}
}
// Album: audio only, when non-empty
if category == "audio" && !album.is_empty() {
div { class: "detail-field",
span { class: "detail-label", "Album" }
span { class: "detail-value", "{album}" }
}
}
// Genre: audio and video, when non-empty
if (category == "audio" || category == "video") && !genre.is_empty() {
div { class: "detail-field",
span { class: "detail-label", "Genre" }
span { class: "detail-value", "{genre}" }
}
}
// Year: audio, video, document, when non-empty
if (category == "audio" || category == "video" || category == "document") && !year_str.is_empty() {
div { class: "detail-field",
span { class: "detail-label", "Year" }
span { class: "detail-value", "{year_str}" }
}
}
// Duration: audio and video
if (category == "audio" || category == "video") && media.duration_secs.is_some() {
div { class: "detail-field",
span { class: "detail-label", "Duration" }
span { class: "detail-value", "{duration_str}" }
}
}
// Description: only shown when non-empty
if !description.is_empty() {
div { class: "detail-field full-width",
span { class: "detail-label", "Description" }
span { class: "detail-value", "{description}" }
}
}
div { class: "detail-field",
span { class: "detail-label", "Created" }
span { class: "detail-value", "{media.created_at}" }
}
div { class: "detail-field",
span { class: "detail-label", "Updated" }
span { class: "detail-value", "{media.updated_at}" }
}
}
}
// Tags section
div { class: "card mb-16",
div { class: "card-header",
h4 { class: "card-title", "Tags" }
}
div { class: "tag-list mb-8",
for tag in media_tags.iter() {
{
let tag_id = tag.id.clone();
let media_id_untag = id.clone();
rsx! {
span {
class: "tag-badge",
key: "{tag_id}",
"{tag.name}"
span {
class: "tag-remove",
onclick: {
let tid = tag_id.clone();
let mid = media_id_untag.clone();
move |_| on_untag.call((mid.clone(), tid.clone()))
},
"x"
}
}
}
}
}
}
div { class: "form-row",
select {
value: "{add_tag_id}",
onchange: move |e: Event<FormData>| add_tag_id.set(e.value()),
option { value: "", "Add tag..." }
for tag in available_tags.iter() {
{
let tid = tag.id.clone();
let tname = tag.name.clone();
rsx! {
option {
key: "{tid}",
value: "{tid}",
"{tname}"
}
}
}
}
}
button {
class: "btn btn-sm btn-primary",
onclick: on_tag_add_click,
"Add"
}
}
}
// Technical Metadata section (system-extracted fields)
if has_system_fields {
div { class: "card mb-16",
div { class: "card-header",
h4 { class: "card-title", "Technical Metadata" }
}
div { class: "detail-grid",
for (key, _field_type, value) in system_fields.iter() {
div {
class: "detail-field",
key: "{key}",
span { class: "detail-label", "{key}" }
span { class: "detail-value", "{value}" }
}
}
}
}
}
// Custom Fields section (user-defined)
div { class: "card",
div { class: "card-header",
h4 { class: "card-title", "Custom Fields" }
}
if has_user_fields {
div { class: "detail-grid",
for (key, field_type, value) in user_fields.iter() {
{
let field_name = key.clone();
let media_id_del = id.clone();
rsx! {
div {
class: "detail-field",
key: "{field_name}",
span { class: "detail-label", "{key} ({field_type})" }
div { class: "flex-row",
span { class: "detail-value", "{value}" }
button {
class: "btn-icon",
onclick: {
let fname = field_name.clone();
let mid = media_id_del.clone();
move |_| on_delete_custom_field.call((mid.clone(), fname.clone()))
},
"x"
}
}
}
}
}
}
}
}
div { class: "form-row",
input {
r#type: "text",
placeholder: "Field name",
value: "{new_field_name}",
oninput: move |e: Event<FormData>| new_field_name.set(e.value()),
}
select {
value: "{new_field_type}",
onchange: move |e: Event<FormData>| new_field_type.set(e.value()),
option { value: "text", "text" }
option { value: "number", "number" }
option { value: "date", "date" }
option { value: "boolean", "boolean" }
}
input {
r#type: "text",
placeholder: "Value",
value: "{new_field_value}",
oninput: move |e: Event<FormData>| new_field_value.set(e.value()),
}
button {
class: "btn btn-sm btn-primary",
onclick: on_add_field_click,
"Add"
}
}
}
// Image viewer overlay
if *show_image_viewer.read() {
ImageViewer {
src: stream_url_for_viewer.clone(),
alt: file_name_for_viewer.clone(),
on_close: move |_| show_image_viewer.set(false),
}
}
}
}

View file

@ -0,0 +1,170 @@
use dioxus::prelude::*;
use super::utils::{format_size, format_timestamp};
use crate::client::DuplicateGroupResponse;
#[component]
pub fn Duplicates(
groups: Vec<DuplicateGroupResponse>,
server_url: String,
on_delete: EventHandler<String>,
on_refresh: EventHandler<()>,
) -> Element {
let mut expanded_group = use_signal(|| Option::<String>::None);
let mut confirm_delete = use_signal(|| Option::<String>::None);
let total_groups = groups.len();
let total_duplicates: usize = groups.iter().map(|g| g.items.len().saturating_sub(1)).sum();
rsx! {
div { class: "duplicates-view",
div { class: "duplicates-header",
h3 { "Duplicates" }
div { class: "duplicates-summary",
span { class: "text-muted",
"{total_groups} group(s), {total_duplicates} duplicate(s)"
}
button {
class: "btn btn-sm btn-secondary",
onclick: move |_| on_refresh.call(()),
"Refresh"
}
}
}
if groups.is_empty() {
div { class: "empty-state",
p { class: "text-muted", "No duplicate files found." }
}
}
for group in groups.iter() {
{
let hash = group.content_hash.clone();
let is_expanded = expanded_group.read().as_ref() == Some(&hash);
let hash_for_toggle = hash.clone();
let item_count = group.items.len();
let first_name = group.items.first()
.map(|i| i.file_name.clone())
.unwrap_or_default();
let total_size: u64 = group.items.iter().map(|i| i.file_size).sum();
let short_hash = if hash.len() > 12 {
format!("{}...", &hash[..12])
} else {
hash.clone()
};
rsx! {
div {
class: "duplicate-group",
key: "{hash}",
// Group header
button {
class: "duplicate-group-header",
onclick: move |_| {
let current = expanded_group.read().clone();
if current.as_ref() == Some(&hash_for_toggle) {
expanded_group.set(None);
} else {
expanded_group.set(Some(hash_for_toggle.clone()));
}
},
span { class: "expand-icon",
if is_expanded { "\u{25bc}" } else { "\u{25b6}" }
}
span { class: "group-name", "{first_name}" }
span { class: "group-badge", "{item_count} files" }
span { class: "group-size text-muted", "{format_size(total_size)}" }
span { class: "group-hash mono text-muted",
"{short_hash}"
}
}
// Expanded: show items
if is_expanded {
div { class: "duplicate-items",
for (idx, item) in group.items.iter().enumerate() {
{
let item_id = item.id.clone();
let is_first = idx == 0;
let is_confirming = confirm_delete.read().as_ref() == Some(&item_id);
let thumb_url = format!("{}/api/v1/media/{}/thumbnail", server_url, item.id);
let has_thumb = item.has_thumbnail;
rsx! {
div {
class: if is_first { "duplicate-item duplicate-item-keep" } else { "duplicate-item" },
key: "{item_id}",
// Thumbnail
div { class: "dup-thumb",
if has_thumb {
img {
src: "{thumb_url}",
alt: "{item.file_name}",
class: "dup-thumb-img",
}
} else {
div { class: "dup-thumb-placeholder", "\u{1f5bc}" }
}
}
// Info
div { class: "dup-info",
div { class: "dup-filename", "{item.file_name}" }
div { class: "dup-path mono text-muted", "{item.path}" }
div { class: "dup-meta",
span { "{format_size(item.file_size)}" }
span { class: "text-muted", " | " }
span { "{format_timestamp(&item.created_at)}" }
}
}
// Actions
div { class: "dup-actions",
if is_first {
span { class: "keep-badge", "Keep" }
}
if is_confirming {
button {
class: "btn btn-sm btn-danger",
onclick: {
let id = item_id.clone();
move |_| {
confirm_delete.set(None);
on_delete.call(id.clone());
}
},
"Confirm"
}
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| confirm_delete.set(None),
"Cancel"
}
} else if !is_first {
button {
class: "btn btn-sm btn-danger",
onclick: {
let id = item_id.clone();
move |_| confirm_delete.set(Some(id.clone()))
},
"Delete"
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}

View file

@ -0,0 +1,236 @@
use dioxus::prelude::*;
#[derive(Debug, Clone, Copy, PartialEq)]
enum FitMode {
FitScreen,
FitWidth,
Actual,
}
impl FitMode {
fn next(self) -> Self {
match self {
Self::FitScreen => Self::FitWidth,
Self::FitWidth => Self::Actual,
Self::Actual => Self::FitScreen,
}
}
fn label(self) -> &'static str {
match self {
Self::FitScreen => "Fit",
Self::FitWidth => "Width",
Self::Actual => "100%",
}
}
}
#[component]
pub fn ImageViewer(
src: String,
alt: String,
on_close: EventHandler<()>,
#[props(default)] on_prev: Option<EventHandler<()>>,
#[props(default)] on_next: Option<EventHandler<()>>,
) -> Element {
let mut zoom = use_signal(|| 1.0f64);
let mut offset_x = use_signal(|| 0.0f64);
let mut offset_y = use_signal(|| 0.0f64);
let mut dragging = use_signal(|| false);
let mut drag_start_x = use_signal(|| 0.0f64);
let mut drag_start_y = use_signal(|| 0.0f64);
let mut fit_mode = use_signal(|| FitMode::FitScreen);
let z = *zoom.read();
let ox = *offset_x.read();
let oy = *offset_y.read();
let is_dragging = *dragging.read();
let zoom_pct = (z * 100.0) as u32;
let current_fit = *fit_mode.read();
let transform = format!("translate({ox}px, {oy}px) scale({z})");
let cursor = if z > 1.0 {
if is_dragging { "grabbing" } else { "grab" }
} else {
"default"
};
// Compute image style based on fit mode
let img_style = match current_fit {
FitMode::FitScreen => format!(
"transform: {transform}; cursor: {cursor}; max-width: 100%; max-height: 100%; object-fit: contain;"
),
FitMode::FitWidth => {
format!("transform: {transform}; cursor: {cursor}; width: 100%; object-fit: contain;")
}
FitMode::Actual => format!("transform: {transform}; cursor: {cursor};"),
};
let on_wheel = move |e: WheelEvent| {
e.prevent_default();
let delta = e.delta().strip_units();
let factor = if delta.y < 0.0 { 1.1 } else { 1.0 / 1.1 };
let new_zoom = (*zoom.read() * factor).clamp(0.1, 20.0);
zoom.set(new_zoom);
};
let on_mouse_down = move |e: MouseEvent| {
if *zoom.read() > 1.0 {
dragging.set(true);
let coords = e.client_coordinates();
drag_start_x.set(coords.x - *offset_x.read());
drag_start_y.set(coords.y - *offset_y.read());
}
};
let on_mouse_move = move |e: MouseEvent| {
if *dragging.read() {
let coords = e.client_coordinates();
offset_x.set(coords.x - *drag_start_x.read());
offset_y.set(coords.y - *drag_start_y.read());
}
};
let on_mouse_up = move |_: MouseEvent| {
dragging.set(false);
};
let on_keydown = {
move |evt: KeyboardEvent| match evt.key() {
Key::Escape => on_close.call(()),
Key::Character(ref c) if c == "+" || c == "=" => {
let new_zoom = (*zoom.read() * 1.2).min(20.0);
zoom.set(new_zoom);
}
Key::Character(ref c) if c == "-" => {
let new_zoom = (*zoom.read() / 1.2).max(0.1);
zoom.set(new_zoom);
}
Key::Character(ref c) if c == "0" => {
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
fit_mode.set(FitMode::FitScreen);
}
Key::ArrowLeft => {
if let Some(ref prev) = on_prev {
prev.call(());
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
}
}
Key::ArrowRight => {
if let Some(ref next) = on_next {
next.call(());
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
}
}
_ => {}
}
};
let zoom_in = move |_| {
let new_zoom = (*zoom.read() * 1.2).min(20.0);
zoom.set(new_zoom);
};
let zoom_out = move |_| {
let new_zoom = (*zoom.read() / 1.2).max(0.1);
zoom.set(new_zoom);
};
let cycle_fit = move |_| {
let next = fit_mode.read().next();
fit_mode.set(next);
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
};
let has_prev = on_prev.is_some();
let has_next = on_next.is_some();
rsx! {
div {
class: "image-viewer-overlay",
tabindex: "0",
onkeydown: on_keydown,
// Toolbar
div { class: "image-viewer-toolbar",
div { class: "image-viewer-toolbar-left",
if has_prev {
button {
class: "iv-btn",
onclick: move |_| {
if let Some(ref prev) = on_prev {
prev.call(());
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
}
},
title: "Previous",
"\u{25c0}"
}
}
if has_next {
button {
class: "iv-btn",
onclick: move |_| {
if let Some(ref next) = on_next {
next.call(());
zoom.set(1.0);
offset_x.set(0.0);
offset_y.set(0.0);
}
},
title: "Next",
"\u{25b6}"
}
}
}
div { class: "image-viewer-toolbar-center",
button { class: "iv-btn", onclick: cycle_fit, title: "Cycle fit mode",
"{current_fit.label()}"
}
button { class: "iv-btn", onclick: zoom_out, title: "Zoom out", "\u{2212}" }
span { class: "iv-zoom-label", "{zoom_pct}%" }
button { class: "iv-btn", onclick: zoom_in, title: "Zoom in", "+" }
}
div { class: "image-viewer-toolbar-right",
button {
class: "iv-btn iv-close",
onclick: move |_| on_close.call(()),
title: "Close",
"\u{2715}"
}
}
}
// Image canvas
div {
class: "image-viewer-canvas",
onwheel: on_wheel,
onmousedown: on_mouse_down,
onmousemove: on_mouse_move,
onmouseup: on_mouse_up,
onclick: move |e: MouseEvent| {
// Close on background click (not on image)
e.stop_propagation();
},
img {
src: "{src}",
alt: "{alt}",
style: "{img_style}",
draggable: "false",
onclick: move |e: MouseEvent| e.stop_propagation(),
}
}
}
}
}

View file

@ -0,0 +1,717 @@
use std::collections::HashSet;
use dioxus::prelude::*;
use super::utils::{format_size, type_badge_class};
use crate::client::{
CollectionResponse, DirectoryPreviewFile, ImportEvent, ScanStatusResponse, TagResponse,
};
/// Import event for batch: (paths, tag_ids, new_tags, collection_id)
pub type BatchImportEvent = (Vec<String>, Vec<String>, Vec<String>, Option<String>);
#[component]
pub fn Import(
tags: Vec<TagResponse>,
collections: Vec<CollectionResponse>,
on_import_file: EventHandler<ImportEvent>,
on_import_directory: EventHandler<ImportEvent>,
on_import_batch: EventHandler<BatchImportEvent>,
on_scan: EventHandler<()>,
on_preview_directory: EventHandler<(String, bool)>,
preview_files: Vec<DirectoryPreviewFile>,
preview_total_size: u64,
scan_progress: Option<ScanStatusResponse>,
) -> Element {
let mut import_mode = use_signal(|| 0usize);
let mut file_path = use_signal(String::new);
let mut dir_path = use_signal(String::new);
let selected_tags = use_signal(Vec::<String>::new);
let new_tags_input = use_signal(String::new);
let selected_collection = use_signal(|| Option::<String>::None);
// Recursive toggle for directory preview
let mut recursive = use_signal(|| true);
// Filter state for directory preview
let mut filter_types = use_signal(|| vec![true, true, true, true, true, true]); // audio, video, image, document, text, other
let mut filter_min_size = use_signal(|| 0u64);
let mut filter_max_size = use_signal(|| 0u64); // 0 means no limit
// File selection state
let mut selected_file_paths = use_signal(HashSet::<String>::new);
let current_mode = *import_mode.read();
rsx! {
// Tab bar
div { class: "import-tabs",
button {
class: if current_mode == 0 { "import-tab active" } else { "import-tab" },
onclick: move |_| import_mode.set(0),
"Single File"
}
button {
class: if current_mode == 1 { "import-tab active" } else { "import-tab" },
onclick: move |_| import_mode.set(1),
"Directory"
}
button {
class: if current_mode == 2 { "import-tab active" } else { "import-tab" },
onclick: move |_| import_mode.set(2),
"Scan Roots"
}
}
// Mode 0: Single File
if current_mode == 0 {
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Import Single File" }
}
div { class: "form-group",
label { class: "form-label", "File Path" }
div { class: "form-row",
input {
r#type: "text",
placeholder: "/path/to/file...",
value: "{file_path}",
oninput: move |e| file_path.set(e.value()),
onkeypress: {
let mut file_path = file_path;
let mut selected_tags = selected_tags;
let mut new_tags_input = new_tags_input;
let mut selected_collection = selected_collection;
move |e: KeyboardEvent| {
if e.key() == Key::Enter {
let path = file_path.read().clone();
if !path.is_empty() {
let tag_ids = selected_tags.read().clone();
let new_tags = parse_new_tags(&new_tags_input.read());
let col_id = selected_collection.read().clone();
on_import_file.call((path, tag_ids, new_tags, col_id));
file_path.set(String::new());
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
}
}
}
},
}
button {
class: "btn btn-secondary",
onclick: move |_| {
let mut file_path = file_path;
spawn(async move {
if let Some(handle) = rfd::AsyncFileDialog::new().pick_file().await {
file_path.set(handle.path().to_string_lossy().to_string());
}
});
},
"Browse..."
}
button {
class: "btn btn-primary",
onclick: {
let mut file_path = file_path;
let mut selected_tags = selected_tags;
let mut new_tags_input = new_tags_input;
let mut selected_collection = selected_collection;
move |_| {
let path = file_path.read().clone();
if !path.is_empty() {
let tag_ids = selected_tags.read().clone();
let new_tags = parse_new_tags(&new_tags_input.read());
let col_id = selected_collection.read().clone();
on_import_file.call((path, tag_ids, new_tags, col_id));
file_path.set(String::new());
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
}
}
},
"Import"
}
}
}
}
ImportOptions {
tags: tags.clone(),
collections: collections.clone(),
selected_tags: selected_tags,
new_tags_input: new_tags_input,
selected_collection: selected_collection,
}
}
// Mode 1: Directory
if current_mode == 1 {
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Import Directory" }
}
div { class: "form-group",
label { class: "form-label", "Directory Path" }
div { class: "form-row",
input {
r#type: "text",
placeholder: "/path/to/directory...",
value: "{dir_path}",
oninput: move |e| dir_path.set(e.value()),
onkeypress: {
let dir_path = dir_path;
let recursive = recursive;
move |e: KeyboardEvent| {
if e.key() == Key::Enter {
let path = dir_path.read().clone();
if !path.is_empty() {
on_preview_directory.call((path, *recursive.read()));
}
}
}
},
}
button {
class: "btn btn-secondary",
onclick: move |_| {
let mut dir_path = dir_path;
let recursive = recursive;
spawn(async move {
if let Some(handle) = rfd::AsyncFileDialog::new().pick_folder().await {
let path = handle.path().to_string_lossy().to_string();
dir_path.set(path.clone());
on_preview_directory.call((path, *recursive.read()));
}
});
},
"Browse..."
}
button {
class: "btn btn-secondary",
onclick: {
let dir_path = dir_path;
let recursive = recursive;
move |_| {
let path = dir_path.read().clone();
if !path.is_empty() {
on_preview_directory.call((path, *recursive.read()));
}
}
},
"Preview"
}
}
}
// Recursive toggle
div { class: "form-group",
label { class: "form-row",
input {
r#type: "checkbox",
checked: *recursive.read(),
onchange: move |_| recursive.toggle(),
}
span { style: "margin-left: 6px;", "Recursive (include subdirectories)" }
}
}
}
// Preview results
if !preview_files.is_empty() {
{
// Read filter signals once before the loop to avoid per-item reads
let types_snapshot = filter_types.read().clone();
let min = *filter_min_size.read();
let max = *filter_max_size.read();
let filtered: Vec<&DirectoryPreviewFile> = preview_files.iter().filter(|f| {
let type_idx = match type_badge_class(&f.media_type) {
"type-audio" => 0,
"type-video" => 1,
"type-image" => 2,
"type-document" => 3,
"type-text" => 4,
_ => 5,
};
if !types_snapshot[type_idx] { return false; }
if min > 0 && f.file_size < min { return false; }
if max > 0 && f.file_size > max { return false; }
true
}).collect();
let filtered_count = filtered.len();
let total_count = preview_files.len();
// Read selection once for display
let selection = selected_file_paths.read().clone();
let selected_count = selection.len();
let all_filtered_selected = !filtered.is_empty()
&& filtered.iter().all(|f| selection.contains(&f.path));
let filtered_paths: Vec<String> = filtered.iter().map(|f| f.path.clone()).collect();
rsx! {
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Preview" }
p { class: "text-muted text-sm",
"{filtered_count} of {total_count} files shown, {format_size(preview_total_size)} total"
}
}
// Filter bar
div { class: "filter-bar",
div { class: "flex-row mb-8",
label {
input {
r#type: "checkbox",
checked: types_snapshot[0],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[0] = !types[0];
filter_types.set(types);
},
}
" Audio"
}
label {
input {
r#type: "checkbox",
checked: types_snapshot[1],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[1] = !types[1];
filter_types.set(types);
},
}
" Video"
}
label {
input {
r#type: "checkbox",
checked: types_snapshot[2],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[2] = !types[2];
filter_types.set(types);
},
}
" Image"
}
label {
input {
r#type: "checkbox",
checked: types_snapshot[3],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[3] = !types[3];
filter_types.set(types);
},
}
" Document"
}
label {
input {
r#type: "checkbox",
checked: types_snapshot[4],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[4] = !types[4];
filter_types.set(types);
},
}
" Text"
}
label {
input {
r#type: "checkbox",
checked: types_snapshot[5],
onchange: move |_| {
let mut types = filter_types.read().clone();
types[5] = !types[5];
filter_types.set(types);
},
}
" Other"
}
}
div { class: "flex-row",
label { class: "form-label", "Min size (MB): " }
input {
r#type: "number",
value: "{min / (1024 * 1024)}",
oninput: move |e| {
if let Ok(mb) = e.value().parse::<u64>() {
filter_min_size.set(mb * 1024 * 1024);
} else {
filter_min_size.set(0);
}
},
}
label { class: "form-label", "Max size (MB): " }
input {
r#type: "number",
value: "{max / (1024 * 1024)}",
oninput: move |e| {
if let Ok(mb) = e.value().parse::<u64>() {
filter_max_size.set(mb * 1024 * 1024);
} else {
filter_max_size.set(0);
}
},
}
}
}
// Selection toolbar
div { class: "flex-row mb-8", style: "gap: 8px; align-items: center; padding: 0 8px;",
button {
class: "btn btn-sm btn-secondary",
onclick: {
let filtered_paths = filtered_paths.clone();
move |_| {
let mut sel = selected_file_paths.read().clone();
for p in &filtered_paths {
sel.insert(p.clone());
}
selected_file_paths.set(sel);
}
},
"Select All ({filtered_count})"
}
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| {
selected_file_paths.set(HashSet::new());
},
"Deselect All"
}
if selected_count > 0 {
span { class: "text-muted text-sm",
"{selected_count} files selected"
}
}
}
div { style: "max-height: 400px; overflow-y: auto;",
table { class: "data-table",
thead {
tr {
th { style: "width: 32px;",
input {
r#type: "checkbox",
checked: all_filtered_selected,
onclick: {
let filtered_paths = filtered_paths.clone();
move |_| {
if all_filtered_selected {
// Deselect all filtered
let filtered_set: HashSet<String> = filtered_paths.iter().cloned().collect();
let sel = selected_file_paths.read().clone();
let new_sel: HashSet<String> = sel.difference(&filtered_set).cloned().collect();
selected_file_paths.set(new_sel);
} else {
// Select all filtered
let mut sel = selected_file_paths.read().clone();
for p in &filtered_paths {
sel.insert(p.clone());
}
selected_file_paths.set(sel);
}
}
},
}
}
th { "File Name" }
th { "Type" }
th { "Size" }
}
}
tbody {
for file in filtered.iter() {
{
let size = format_size(file.file_size);
let badge_class = type_badge_class(&file.media_type);
let is_selected = selection.contains(&file.path);
let file_path_clone = file.path.clone();
rsx! {
tr {
key: "{file.path}",
class: if is_selected { "row-selected" } else { "" },
td {
input {
r#type: "checkbox",
checked: is_selected,
onclick: {
let path = file_path_clone.clone();
move |_| {
let mut sel = selected_file_paths.read().clone();
if sel.contains(&path) {
sel.remove(&path);
} else {
sel.insert(path.clone());
}
selected_file_paths.set(sel);
}
},
}
}
td { "{file.file_name}" }
td {
span { class: "type-badge {badge_class}", "{file.media_type}" }
}
td { "{size}" }
}
}
}
}
}
}
}
}
}
}
}
ImportOptions {
tags: tags.clone(),
collections: collections.clone(),
selected_tags: selected_tags,
new_tags_input: new_tags_input,
selected_collection: selected_collection,
}
div { class: "flex-row mb-16", style: "gap: 8px;",
// Import selected files only (batch import)
{
let sel_count = selected_file_paths.read().len();
let has_selected = sel_count > 0;
rsx! {
button {
class: "btn btn-primary",
disabled: !has_selected,
onclick: {
let mut selected_file_paths = selected_file_paths;
let mut selected_tags = selected_tags;
let mut new_tags_input = new_tags_input;
let mut selected_collection = selected_collection;
move |_| {
let paths: Vec<String> = selected_file_paths.read().iter().cloned().collect();
if !paths.is_empty() {
let tag_ids = selected_tags.read().clone();
let new_tags = parse_new_tags(&new_tags_input.read());
let col_id = selected_collection.read().clone();
on_import_batch.call((paths, tag_ids, new_tags, col_id));
selected_file_paths.set(HashSet::new());
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
}
}
},
if has_selected {
"Import Selected ({sel_count})"
} else {
"Import Selected"
}
}
}
}
// Import entire directory
button {
class: "btn btn-secondary",
onclick: {
let mut dir_path = dir_path;
let mut selected_tags = selected_tags;
let mut new_tags_input = new_tags_input;
let mut selected_collection = selected_collection;
let mut selected_file_paths = selected_file_paths;
move |_| {
let path = dir_path.read().clone();
if !path.is_empty() {
let tag_ids = selected_tags.read().clone();
let new_tags = parse_new_tags(&new_tags_input.read());
let col_id = selected_collection.read().clone();
on_import_directory.call((path, tag_ids, new_tags, col_id));
dir_path.set(String::new());
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
selected_file_paths.set(HashSet::new());
}
}
},
"Import Entire Directory"
}
}
}
// Mode 2: Scan Roots
if current_mode == 2 {
div { class: "card mb-16",
div { class: "card-header",
h3 { class: "card-title", "Scan Root Directories" }
}
div { class: "empty-state",
p { class: "empty-subtitle",
"Scan all configured root directories for media files. "
"This will discover and import any new files found in your root paths."
}
}
div { class: "mb-16", style: "text-align: center;",
button {
class: "btn btn-primary",
onclick: move |_| on_scan.call(()),
"Scan All Roots"
}
}
if let Some(ref progress) = scan_progress {
{
let pct = (progress.files_processed * 100).checked_div(progress.files_found).unwrap_or(0);
rsx! {
div { class: "mb-16",
div { class: "progress-bar",
div {
class: "progress-fill",
style: "width: {pct}%;",
}
}
p { class: "text-muted text-sm",
"{progress.files_processed} / {progress.files_found} files processed"
}
if progress.error_count > 0 {
p { class: "text-muted text-sm",
"{progress.error_count} errors"
}
}
if progress.scanning {
p { class: "text-muted text-sm", "Scanning..." }
} else {
p { class: "text-muted text-sm", "Scan complete" }
}
}
}
}
}
}
}
}
}
#[component]
fn ImportOptions(
tags: Vec<TagResponse>,
collections: Vec<CollectionResponse>,
selected_tags: Signal<Vec<String>>,
new_tags_input: Signal<String>,
selected_collection: Signal<Option<String>>,
) -> Element {
let selected_tags = selected_tags;
let mut new_tags_input = new_tags_input;
let selected_collection = selected_collection;
rsx! {
div { class: "card mb-16",
div { class: "card-header",
h4 { class: "card-title", "Import Options" }
}
div { class: "form-group",
label { class: "form-label", "Tags" }
if tags.is_empty() {
p { class: "text-muted text-sm", "No tags available. Create tags from the Tags page." }
} else {
div { class: "tag-list",
for tag in tags.iter() {
{
let tag_id = tag.id.clone();
let tag_name = tag.name.clone();
let is_selected = selected_tags.read().contains(&tag_id);
let badge_class = if is_selected {
"tag-badge selected"
} else {
"tag-badge"
};
rsx! {
span {
class: "{badge_class}",
onclick: {
let tag_id = tag_id.clone();
let mut selected_tags = selected_tags;
move |_| {
let mut current = selected_tags.read().clone();
if let Some(pos) = current.iter().position(|t| t == &tag_id) {
current.remove(pos);
} else {
current.push(tag_id.clone());
}
selected_tags.set(current);
}
},
"{tag_name}"
}
}
}
}
}
}
}
div { class: "form-group",
label { class: "form-label", "Create New Tags" }
input {
r#type: "text",
placeholder: "tag1, tag2, tag3...",
value: "{new_tags_input}",
oninput: move |e| new_tags_input.set(e.value()),
}
p { class: "text-muted text-sm", "Comma-separated. Will be created if they don't exist." }
}
div { class: "form-group",
label { class: "form-label", "Add to Collection" }
select {
value: "{selected_collection.read().clone().unwrap_or_default()}",
onchange: {
let mut selected_collection = selected_collection;
move |e: Event<FormData>| {
let val = e.value();
if val.is_empty() {
selected_collection.set(None);
} else {
selected_collection.set(Some(val));
}
}
},
option { value: "", "None" }
for col in collections.iter() {
{
let col_id = col.id.clone();
let col_name = col.name.clone();
rsx! {
option { value: "{col_id}", "{col_name}" }
}
}
}
}
}
}
}
}
fn parse_new_tags(input: &str) -> Vec<String> {
input
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect()
}

View file

@ -0,0 +1,874 @@
use dioxus::prelude::*;
use super::pagination::Pagination as PaginationControls;
use super::utils::{format_size, media_category, type_badge_class, type_icon};
use crate::client::{CollectionResponse, MediaResponse, TagResponse};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ViewMode {
Grid,
Table,
}
/// The set of type filter categories available to the user.
const TYPE_FILTERS: &[&str] = &["all", "audio", "video", "image", "document", "text"];
/// Human-readable label for a type filter value.
fn filter_label(f: &str) -> &str {
match f {
"all" => "All",
"audio" => "Audio",
"video" => "Video",
"image" => "Image",
"document" => "Document",
"text" => "Text",
_ => f,
}
}
/// Parse the current sort field string into (column, direction) so table
/// headers can show the correct arrow indicator.
fn parse_sort(sort: &str) -> (&str, &str) {
if let Some(col) = sort.strip_suffix("_asc") {
(col, "asc")
} else if let Some(col) = sort.strip_suffix("_desc") {
(col, "desc")
} else {
(sort, "asc")
}
}
/// Return the sort arrow indicator for a table column header. Returns an empty
/// string when the column is not the active sort column.
fn sort_arrow(current_sort: &str, column: &str) -> &'static str {
let (col, dir) = parse_sort(current_sort);
if col == column {
if dir == "asc" {
" \u{25b2}"
} else {
" \u{25bc}"
}
} else {
""
}
}
/// Compute the next sort value when a table column header is clicked. If the
/// column is already sorted ascending, flip to descending and vice-versa.
/// Otherwise default to ascending.
fn next_sort(current_sort: &str, column: &str) -> String {
let (col, dir) = parse_sort(current_sort);
if col == column {
let new_dir = if dir == "asc" { "desc" } else { "asc" };
format!("{column}_{new_dir}")
} else {
format!("{column}_asc")
}
}
#[component]
pub fn Library(
media: Vec<MediaResponse>,
tags: Vec<TagResponse>,
collections: Vec<CollectionResponse>,
total_count: u64,
current_page: u64,
page_size: u64,
server_url: String,
on_select: EventHandler<String>,
on_delete: EventHandler<String>,
on_batch_delete: EventHandler<Vec<String>>,
on_batch_tag: EventHandler<(Vec<String>, Vec<String>)>,
on_batch_collection: EventHandler<(Vec<String>, String)>,
on_page_change: EventHandler<u64>,
on_page_size_change: EventHandler<u64>,
on_sort_change: EventHandler<String>,
#[props(default)] on_select_all_global: Option<EventHandler<EventHandler<Vec<String>>>>,
#[props(default)] on_delete_all: Option<EventHandler<()>>,
) -> Element {
let mut selected_ids = use_signal(Vec::<String>::new);
let mut select_all = use_signal(|| false);
let mut confirm_delete = use_signal(|| Option::<String>::None);
let mut confirm_batch_delete = use_signal(|| false);
let mut confirm_delete_all = use_signal(|| false);
let mut show_batch_tag = use_signal(|| false);
let mut batch_tag_selection = use_signal(Vec::<String>::new);
let mut show_batch_collection = use_signal(|| false);
let mut batch_collection_id = use_signal(String::new);
let mut view_mode = use_signal(|| ViewMode::Grid);
let mut sort_field = use_signal(|| "created_at_desc".to_string());
let mut type_filter = use_signal(|| "all".to_string());
// Track the last-clicked index for shift+click range selection.
let mut last_click_index = use_signal(|| Option::<usize>::None);
// True when all items across all pages have been selected.
let mut global_all_selected = use_signal(|| false);
if media.is_empty() && total_count == 0 {
return rsx! {
div { class: "empty-state",
h3 { class: "empty-title", "No media found" }
p { class: "empty-subtitle", "Import files or scan your root directories to get started." }
}
};
}
// Apply client-side type filter.
let active_filter = type_filter.read().clone();
let filtered_media: Vec<MediaResponse> = if active_filter == "all" {
media.clone()
} else {
media
.iter()
.filter(|m| media_category(&m.media_type) == active_filter.as_str())
.cloned()
.collect()
};
let filtered_count = filtered_media.len();
let all_ids: Vec<String> = filtered_media.iter().map(|m| m.id.clone()).collect();
// Read selection once to avoid repeated signal reads in loops
let current_selection: Vec<String> = selected_ids.read().clone();
let selection_count = current_selection.len();
let has_selection = selection_count > 0;
let total_pages = total_count.div_ceil(page_size);
let toggle_select_all = {
let all_ids = all_ids.clone();
move |_| {
let new_val = !*select_all.read();
select_all.set(new_val);
global_all_selected.set(false);
if new_val {
selected_ids.set(all_ids.clone());
} else {
selected_ids.set(Vec::new());
}
}
};
let is_all_selected = *select_all.read();
let current_mode = *view_mode.read();
let current_sort = sort_field.read().clone();
rsx! {
// Confirmation dialog for single delete
if confirm_delete.read().is_some() {
div { class: "modal-overlay",
onclick: move |_| confirm_delete.set(None),
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
h3 { class: "modal-title", "Confirm Delete" }
p { class: "modal-body", "Are you sure you want to delete this media item? This cannot be undone." }
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| confirm_delete.set(None),
"Cancel"
}
button {
class: "btn btn-danger",
onclick: move |_| {
if let Some(id) = confirm_delete.read().clone() {
on_delete.call(id);
}
confirm_delete.set(None);
},
"Delete"
}
}
}
}
}
// Confirmation dialog for batch delete
if *confirm_batch_delete.read() {
div { class: "modal-overlay",
onclick: move |_| confirm_batch_delete.set(false),
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
h3 { class: "modal-title", "Confirm Batch Delete" }
p { class: "modal-body",
"Are you sure you want to delete {selection_count} selected items? This cannot be undone."
}
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| confirm_batch_delete.set(false),
"Cancel"
}
button {
class: "btn btn-danger",
onclick: move |_| {
let ids = selected_ids.read().clone();
on_batch_delete.call(ids);
selected_ids.set(Vec::new());
select_all.set(false);
confirm_batch_delete.set(false);
},
"Delete All"
}
}
}
}
}
// Confirmation dialog for delete all
if *confirm_delete_all.read() {
div { class: "modal-overlay",
onclick: move |_| confirm_delete_all.set(false),
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
h3 { class: "modal-title", "Delete All Media" }
p { class: "modal-body",
"Are you sure you want to delete ALL {total_count} items? This cannot be undone."
}
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| confirm_delete_all.set(false),
"Cancel"
}
button {
class: "btn btn-danger",
onclick: move |_| {
if let Some(handler) = on_delete_all {
handler.call(());
}
selected_ids.set(Vec::new());
select_all.set(false);
global_all_selected.set(false);
confirm_delete_all.set(false);
},
"Delete Everything"
}
}
}
}
}
// Batch tag dialog
if *show_batch_tag.read() {
div { class: "modal-overlay",
onclick: move |_| {
show_batch_tag.set(false);
batch_tag_selection.set(Vec::new());
},
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
h3 { class: "modal-title", "Tag Selected Items" }
p { class: "modal-body text-muted text-sm",
"Select tags to apply to {selection_count} items:"
}
if tags.is_empty() {
p { class: "text-muted", "No tags available. Create tags first." }
} else {
div { class: "tag-list", style: "margin: 12px 0;",
for tag in tags.iter() {
{
let tag_id = tag.id.clone();
let tag_name = tag.name.clone();
let is_selected = batch_tag_selection.read().contains(&tag_id);
let badge_class = if is_selected { "tag-badge selected" } else { "tag-badge" };
rsx! {
span {
class: "{badge_class}",
onclick: {
let tag_id = tag_id.clone();
move |_| {
let mut current = batch_tag_selection.read().clone();
if let Some(pos) = current.iter().position(|t| t == &tag_id) {
current.remove(pos);
} else {
current.push(tag_id.clone());
}
batch_tag_selection.set(current);
}
},
"{tag_name}"
}
}
}
}
}
}
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| {
show_batch_tag.set(false);
batch_tag_selection.set(Vec::new());
},
"Cancel"
}
button {
class: "btn btn-primary",
onclick: move |_| {
let ids = selected_ids.read().clone();
let tag_ids = batch_tag_selection.read().clone();
if !tag_ids.is_empty() {
on_batch_tag.call((ids, tag_ids));
selected_ids.set(Vec::new());
select_all.set(false);
}
show_batch_tag.set(false);
batch_tag_selection.set(Vec::new());
},
"Apply Tags"
}
}
}
}
}
// Batch collection dialog
if *show_batch_collection.read() {
div { class: "modal-overlay",
onclick: move |_| {
show_batch_collection.set(false);
batch_collection_id.set(String::new());
},
div { class: "modal",
onclick: move |e: Event<MouseData>| e.stop_propagation(),
h3 { class: "modal-title", "Add to Collection" }
p { class: "modal-body text-muted text-sm",
"Choose a collection for {selection_count} items:"
}
if collections.is_empty() {
p { class: "text-muted", "No collections available. Create one first." }
} else {
select {
style: "width: 100%; margin: 12px 0;",
value: "{batch_collection_id}",
onchange: move |e: Event<FormData>| batch_collection_id.set(e.value()),
option { value: "", "Select a collection..." }
for col in collections.iter() {
option {
key: "{col.id}",
value: "{col.id}",
"{col.name}"
}
}
}
}
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| {
show_batch_collection.set(false);
batch_collection_id.set(String::new());
},
"Cancel"
}
button {
class: "btn btn-primary",
onclick: move |_| {
let ids = selected_ids.read().clone();
let col_id = batch_collection_id.read().clone();
if !col_id.is_empty() {
on_batch_collection.call((ids, col_id));
selected_ids.set(Vec::new());
select_all.set(false);
}
show_batch_collection.set(false);
batch_collection_id.set(String::new());
},
"Add to Collection"
}
}
}
}
}
// Toolbar: view toggle, sort, batch actions
div { class: "library-toolbar",
div { class: "toolbar-left",
// View mode toggle
div { class: "view-toggle",
button {
class: if current_mode == ViewMode::Grid { "view-btn active" } else { "view-btn" },
onclick: move |_| view_mode.set(ViewMode::Grid),
title: "Grid view",
"\u{25a6}"
}
button {
class: if current_mode == ViewMode::Table { "view-btn active" } else { "view-btn" },
onclick: move |_| view_mode.set(ViewMode::Table),
title: "Table view",
"\u{2630}"
}
}
// Sort selector
div { class: "sort-control",
select {
value: "{sort_field}",
onchange: move |e: Event<FormData>| {
let val = e.value();
sort_field.set(val.clone());
on_sort_change.call(val);
},
option { value: "created_at_desc", "Newest first" }
option { value: "created_at_asc", "Oldest first" }
option { value: "file_name_asc", "Name A-Z" }
option { value: "file_name_desc", "Name Z-A" }
option { value: "file_size_desc", "Largest first" }
option { value: "file_size_asc", "Smallest first" }
option { value: "media_type_asc", "Type" }
}
}
// Page size
div { class: "page-size-control",
span { class: "text-muted text-sm", "Show:" }
select {
value: "{page_size}",
onchange: move |e: Event<FormData>| {
if let Ok(size) = e.value().parse::<u64>() {
on_page_size_change.call(size);
}
},
option { value: "24", "24" }
option { value: "48", "48" }
option { value: "96", "96" }
option { value: "200", "200" }
}
}
}
div { class: "toolbar-right",
// Select All / Deselect All toggle (works in both grid and table)
{
let all_ids2 = all_ids.clone();
rsx! {
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| {
if is_all_selected {
selected_ids.set(Vec::new());
select_all.set(false);
global_all_selected.set(false);
} else {
selected_ids.set(all_ids2.clone());
select_all.set(true);
}
},
if is_all_selected {
"Deselect All"
} else {
"Select All"
}
}
}
}
if has_selection {
div { class: "batch-actions",
span { "{selection_count} selected" }
button {
class: "btn btn-sm btn-secondary",
onclick: move |_| show_batch_tag.set(true),
"Tag"
}
button {
class: "btn btn-sm btn-secondary",
onclick: move |_| show_batch_collection.set(true),
"Collection"
}
button {
class: "btn btn-sm btn-danger",
onclick: move |_| confirm_batch_delete.set(true),
"Delete"
}
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| {
selected_ids.set(Vec::new());
select_all.set(false);
global_all_selected.set(false);
},
"Clear"
}
}
}
if on_delete_all.is_some() && total_count > 0 {
button {
class: "btn btn-sm btn-danger",
onclick: move |_| confirm_delete_all.set(true),
"Delete All"
}
}
span { class: "text-muted text-sm",
"{total_count} items"
}
}
}
// Type filter chips
div { class: "type-filter-row",
for filter in TYPE_FILTERS.iter() {
{
let f = (*filter).to_string();
let is_active = active_filter == f;
let chip_class = if is_active { "filter-chip active" } else { "filter-chip" };
let label = filter_label(filter);
rsx! {
button {
key: "{f}",
class: "{chip_class}",
onclick: {
let f = f.clone();
move |_| {
type_filter.set(f.clone());
}
},
"{label}"
}
}
}
}
}
// Stats summary row
div { class: "library-stats",
span { class: "text-muted text-sm",
if active_filter != "all" {
"Showing {filtered_count} of {total_count} items (filtered: {active_filter})"
} else {
"Showing {filtered_count} items"
}
}
span { class: "text-muted text-sm",
"Page {current_page + 1} of {total_pages}"
}
}
// Select-all banner: when all items on this page are selected and there
// are more pages, offer to select everything across all pages.
if is_all_selected && total_count > page_size && !*global_all_selected.read() {
div { class: "select-all-banner",
"All {filtered_count} items on this page are selected."
if on_select_all_global.is_some() {
button {
onclick: move |_| {
if let Some(handler) = on_select_all_global {
handler.call(EventHandler::new(move |all_ids: Vec<String>| {
selected_ids.set(all_ids);
global_all_selected.set(true);
}));
}
},
"Select all {total_count} items"
}
}
}
}
if *global_all_selected.read() {
div { class: "select-all-banner",
"All {selection_count} items across all pages are selected."
button {
onclick: move |_| {
selected_ids.set(Vec::new());
select_all.set(false);
global_all_selected.set(false);
},
"Clear selection"
}
}
}
// Content: grid or table
match current_mode {
ViewMode::Grid => rsx! {
div { class: "media-grid",
for (idx, item) in filtered_media.iter().enumerate() {
{
let id = item.id.clone();
let badge_class = type_badge_class(&item.media_type);
let is_checked = current_selection.contains(&id);
let card_click = {
let id = item.id.clone();
move |_| on_select.call(id.clone())
};
// Build a list of all visible IDs for shift+click range selection.
let visible_ids: Vec<String> = filtered_media.iter().map(|m| m.id.clone()).collect();
let toggle_id = {
let id = id.clone();
move |e: Event<MouseData>| {
e.stop_propagation();
let shift = e.modifiers().shift();
let mut ids = selected_ids.read().clone();
if shift {
// Shift+click: select range from last_click_index to current idx.
if let Some(last) = *last_click_index.read() {
let start = last.min(idx);
let end = last.max(idx);
for i in start..=end {
if let Some(range_id) = visible_ids.get(i)
&& !ids.contains(range_id)
{
ids.push(range_id.clone());
}
}
} else {
// No previous click, just toggle this one.
if !ids.contains(&id) {
ids.push(id.clone());
}
}
} else if ids.contains(&id) {
ids.retain(|x| x != &id);
} else {
ids.push(id.clone());
}
last_click_index.set(Some(idx));
selected_ids.set(ids);
}
};
let thumb_url = if item.has_thumbnail {
format!("{}/api/v1/media/{}/thumbnail", server_url, item.id)
} else {
String::new()
};
let has_thumb = item.has_thumbnail;
let media_type = item.media_type.clone();
let card_class = if is_checked { "media-card selected" } else { "media-card" };
let title_text = item.title.clone().unwrap_or_default();
let artist_text = item.artist.clone().unwrap_or_default();
rsx! {
div {
key: "{item.id}",
class: "{card_class}",
onclick: card_click,
div { class: "card-checkbox",
input {
r#type: "checkbox",
checked: is_checked,
onclick: toggle_id,
}
}
// Thumbnail with CSS fallback: both the icon and img
// are rendered. The img is absolutely positioned on
// top. If the image fails to load, the icon beneath
// shows through.
div { class: "card-thumbnail",
div { class: "card-type-icon {badge_class}",
"{type_icon(&media_type)}"
}
if has_thumb {
img {
class: "card-thumb-img",
src: "{thumb_url}",
alt: "{item.file_name}",
loading: "lazy",
}
}
}
div { class: "card-info",
div { class: "card-name", title: "{item.file_name}",
"{item.file_name}"
}
if !title_text.is_empty() {
div { class: "card-title text-muted text-xs",
"{title_text}"
}
}
if !artist_text.is_empty() {
div { class: "card-artist text-muted text-xs",
"{artist_text}"
}
}
div { class: "card-meta",
span { class: "type-badge {badge_class}", "{item.media_type}" }
span { class: "card-size", "{format_size(item.file_size)}" }
}
}
}
}
}
}
}
},
ViewMode::Table => rsx! {
table { class: "data-table",
thead {
tr {
th {
input {
r#type: "checkbox",
checked: is_all_selected,
onclick: toggle_select_all,
}
}
th { "" }
th {
class: "sortable-header",
onclick: {
let cs = current_sort.clone();
move |_| {
let val = next_sort(&cs, "file_name");
sort_field.set(val.clone());
on_sort_change.call(val);
}
},
"Name{sort_arrow(&current_sort, \"file_name\")}"
}
th {
class: "sortable-header",
onclick: {
let cs = current_sort.clone();
move |_| {
let val = next_sort(&cs, "media_type");
sort_field.set(val.clone());
on_sort_change.call(val);
}
},
"Type{sort_arrow(&current_sort, \"media_type\")}"
}
th { "Artist" }
th {
class: "sortable-header",
onclick: {
let cs = current_sort.clone();
move |_| {
let val = next_sort(&cs, "file_size");
sort_field.set(val.clone());
on_sort_change.call(val);
}
},
"Size{sort_arrow(&current_sort, \"file_size\")}"
}
th { "" }
}
}
tbody {
for (idx, item) in filtered_media.iter().enumerate() {
{
let id = item.id.clone();
let artist = item.artist.clone().unwrap_or_default();
let size = format_size(item.file_size);
let badge_class = type_badge_class(&item.media_type);
let is_checked = current_selection.contains(&id);
let visible_ids: Vec<String> = filtered_media.iter().map(|m| m.id.clone()).collect();
let toggle_id = {
let id = id.clone();
move |e: Event<MouseData>| {
e.stop_propagation();
let shift = e.modifiers().shift();
let mut ids = selected_ids.read().clone();
if shift {
if let Some(last) = *last_click_index.read() {
let start = last.min(idx);
let end = last.max(idx);
for i in start..=end {
if let Some(range_id) = visible_ids.get(i)
&& !ids.contains(range_id)
{
ids.push(range_id.clone());
}
}
} else {
if !ids.contains(&id) {
ids.push(id.clone());
}
}
} else if ids.contains(&id) {
ids.retain(|x| x != &id);
} else {
ids.push(id.clone());
}
last_click_index.set(Some(idx));
selected_ids.set(ids);
}
};
let row_click = {
let id = item.id.clone();
move |_| on_select.call(id.clone())
};
let delete_click = {
let id = item.id.clone();
move |e: Event<MouseData>| {
e.stop_propagation();
confirm_delete.set(Some(id.clone()));
}
};
let thumb_url = if item.has_thumbnail {
format!("{}/api/v1/media/{}/thumbnail", server_url, item.id)
} else {
String::new()
};
let has_thumb = item.has_thumbnail;
let media_type_str = item.media_type.clone();
rsx! {
tr {
key: "{item.id}",
onclick: row_click,
td {
input {
r#type: "checkbox",
checked: is_checked,
onclick: toggle_id,
}
}
td { class: "table-thumb-cell",
// Thumbnail with CSS fallback: icon always
// rendered, img overlays when available.
span { class: "table-type-icon {badge_class}",
"{type_icon(&media_type_str)}"
}
if has_thumb {
img {
class: "table-thumb table-thumb-overlay",
src: "{thumb_url}",
alt: "",
loading: "lazy",
}
}
}
td { "{item.file_name}" }
td {
span { class: "type-badge {badge_class}", "{item.media_type}" }
}
td { "{artist}" }
td { "{size}" }
td {
button {
class: "btn btn-danger btn-sm",
onclick: delete_click,
"Delete"
}
}
}
}
}
}
}
}
},
}
// Pagination controls
PaginationControls {
current_page,
total_pages,
on_page_change: move |page: u64| on_page_change.call(page),
}
}
}

View file

@ -0,0 +1,59 @@
use dioxus::prelude::*;
#[component]
pub fn SkeletonCard() -> Element {
rsx! {
div { class: "skeleton-card",
div { class: "skeleton-thumb skeleton-pulse" }
div { class: "skeleton-text skeleton-pulse" }
div { class: "skeleton-text skeleton-text-short skeleton-pulse" }
}
}
}
#[component]
pub fn SkeletonRow() -> Element {
rsx! {
div { class: "skeleton-row",
div { class: "skeleton-cell skeleton-cell-icon skeleton-pulse" }
div { class: "skeleton-cell skeleton-cell-wide skeleton-pulse" }
div { class: "skeleton-cell skeleton-pulse" }
div { class: "skeleton-cell skeleton-pulse" }
}
}
}
#[component]
pub fn LoadingOverlay(message: Option<String>) -> Element {
let msg = message.unwrap_or_else(|| "Loading...".to_string());
rsx! {
div { class: "loading-overlay",
div { class: "loading-spinner" }
span { class: "loading-message", "{msg}" }
}
}
}
#[component]
pub fn SkeletonGrid(count: Option<usize>) -> Element {
let n = count.unwrap_or(12);
rsx! {
div { class: "media-grid",
for i in 0..n {
SkeletonCard { key: "skel-{i}" }
}
}
}
}
#[component]
pub fn SkeletonList(count: Option<usize>) -> Element {
let n = count.unwrap_or(10);
rsx! {
div { class: "media-list",
for i in 0..n {
SkeletonRow { key: "skel-row-{i}" }
}
}
}
}

View file

@ -0,0 +1,75 @@
use dioxus::prelude::*;
#[component]
pub fn Login(
on_login: EventHandler<(String, String)>,
#[props(default)] error: Option<String>,
#[props(default = false)] loading: bool,
) -> Element {
let mut username = use_signal(String::new);
let mut password = use_signal(String::new);
let on_submit = {
move |_| {
let u = username.read().clone();
let p = password.read().clone();
if !u.is_empty() && !p.is_empty() {
on_login.call((u, p));
}
}
};
let on_key = move |e: KeyboardEvent| {
if e.key() == Key::Enter {
let u = username.read().clone();
let p = password.read().clone();
if !u.is_empty() && !p.is_empty() {
on_login.call((u, p));
}
}
};
rsx! {
div { class: "login-container",
div { class: "login-card",
h2 { class: "login-title", "Pinakes" }
p { class: "login-subtitle", "Sign in to continue" }
if let Some(ref err) = error {
div { class: "login-error", "{err}" }
}
div { class: "login-form",
div { class: "form-group",
label { class: "form-label", "Username" }
input {
r#type: "text",
placeholder: "Enter username",
value: "{username}",
disabled: loading,
oninput: move |e: Event<FormData>| username.set(e.value()),
onkeypress: on_key,
}
}
div { class: "form-group",
label { class: "form-label", "Password" }
input {
r#type: "password",
placeholder: "Enter password",
value: "{password}",
disabled: loading,
oninput: move |e: Event<FormData>| password.set(e.value()),
onkeypress: on_key,
}
}
button {
class: "btn btn-primary login-btn",
disabled: loading,
onclick: on_submit,
if loading { "Signing in..." } else { "Sign In" }
}
}
}
}
}
}

View file

@ -0,0 +1,180 @@
use dioxus::prelude::*;
#[component]
pub fn MarkdownViewer(content_url: String, media_type: String) -> Element {
let mut rendered_html = use_signal(String::new);
let mut frontmatter_html = use_signal(|| Option::<String>::None);
let mut loading = use_signal(|| true);
let mut error = use_signal(|| Option::<String>::None);
// Fetch content on mount
let url = content_url.clone();
let mtype = media_type.clone();
use_effect(move || {
let url = url.clone();
let mtype = mtype.clone();
spawn(async move {
loading.set(true);
error.set(None);
match reqwest::get(&url).await {
Ok(resp) => match resp.text().await {
Ok(text) => {
if mtype == "md" || mtype == "markdown" {
let (fm_html, body_html) = render_markdown_with_frontmatter(&text);
frontmatter_html.set(fm_html);
rendered_html.set(body_html);
} else {
frontmatter_html.set(None);
rendered_html.set(render_plaintext(&text));
};
}
Err(e) => error.set(Some(format!("Failed to read content: {e}"))),
},
Err(e) => error.set(Some(format!("Failed to fetch: {e}"))),
}
loading.set(false);
});
});
let is_loading = *loading.read();
rsx! {
div { class: "markdown-viewer",
if is_loading {
div { class: "loading-overlay",
div { class: "spinner" }
"Loading content..."
}
}
if let Some(ref err) = *error.read() {
div { class: "error-banner",
span { class: "error-icon", "\u{26a0}" }
"{err}"
}
}
if !is_loading && error.read().is_none() {
if let Some(ref fm) = *frontmatter_html.read() {
div {
class: "frontmatter-card",
dangerous_inner_html: "{fm}",
}
}
div {
class: "markdown-content",
dangerous_inner_html: "{rendered_html}",
}
}
}
}
}
/// Parse frontmatter and render markdown body. Returns (frontmatter_html, body_html).
fn render_markdown_with_frontmatter(text: &str) -> (Option<String>, String) {
use gray_matter::Matter;
use gray_matter::engine::YAML;
let matter = Matter::<YAML>::new();
let Ok(result) = matter.parse(text) else {
// If frontmatter parsing fails, just render the whole text as markdown
return (None, render_markdown(text));
};
let fm_html = result.data.and_then(|data| render_frontmatter_card(&data));
let body_html = render_markdown(&result.content);
(fm_html, body_html)
}
/// Render frontmatter fields as an HTML card.
fn render_frontmatter_card(data: &gray_matter::Pod) -> Option<String> {
let gray_matter::Pod::Hash(map) = data else {
return None;
};
if map.is_empty() {
return None;
}
let mut html = String::from("<dl class=\"frontmatter-fields\">");
for (key, value) in map {
let display_value = pod_to_display(value);
let escaped_key = escape_html(key);
html.push_str(&format!("<dt>{escaped_key}</dt><dd>{display_value}</dd>"));
}
html.push_str("</dl>");
Some(html)
}
fn pod_to_display(pod: &gray_matter::Pod) -> String {
match pod {
gray_matter::Pod::String(s) => escape_html(s),
gray_matter::Pod::Integer(n) => n.to_string(),
gray_matter::Pod::Float(f) => f.to_string(),
gray_matter::Pod::Boolean(b) => b.to_string(),
gray_matter::Pod::Array(arr) => {
let items: Vec<String> = arr.iter().map(pod_to_display).collect();
items.join(", ")
}
gray_matter::Pod::Hash(map) => {
let items: Vec<String> = map
.iter()
.map(|(k, v)| format!("{}: {}", escape_html(k), pod_to_display(v)))
.collect();
items.join("; ")
}
gray_matter::Pod::Null => String::new(),
}
}
fn render_markdown(text: &str) -> String {
use pulldown_cmark::{Options, Parser, html};
let mut options = Options::empty();
options.insert(Options::ENABLE_TABLES);
options.insert(Options::ENABLE_STRIKETHROUGH);
options.insert(Options::ENABLE_TASKLISTS);
options.insert(Options::ENABLE_FOOTNOTES);
options.insert(Options::ENABLE_HEADING_ATTRIBUTES);
let parser = Parser::new_ext(text, options);
let mut html_output = String::new();
html::push_html(&mut html_output, parser);
// Strip script tags for safety
strip_script_tags(&html_output)
}
fn render_plaintext(text: &str) -> String {
let escaped = escape_html(text);
format!("<pre><code>{escaped}</code></pre>")
}
fn escape_html(text: &str) -> String {
text.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
}
fn strip_script_tags(html: &str) -> String {
// Simple removal of <script> tags
let mut result = html.to_string();
while let Some(start) = result.to_lowercase().find("<script") {
if let Some(end) = result.to_lowercase()[start..].find("</script>") {
result = format!(
"{}{}",
&result[..start],
&result[start + end + "</script>".len()..]
);
} else {
// Malformed script tag - remove to end
result = result[..start].to_string();
break;
}
}
result
}

Some files were not shown because too many files have changed in this diff Show more