Compare commits

...

31 commits

Author SHA1 Message Date
103be9d13d
nix: setup sccache
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I375e0d41d42939b63a01a59d41b3fd426a6a6964
2026-03-22 22:05:05 +03:00
f1eacc8484
pinakes-server: add more route tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ief16a2b3181bfa50193fb69a5ad4a9166a6a6964
2026-03-22 22:05:04 +03:00
d26f237828
meta: configure gitattributes; don't diff churn
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9d22892a5f90a95c29fa8979512b13646a6a6964
2026-03-22 22:05:03 +03:00
7ed66f1d3f
various: autofix Clippy warnings
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ia355e5626b5db7760c8dbb571cb552c46a6a6964
2026-03-22 22:05:02 +03:00
c1a1f4a600
pinakes-ui: use system player for HLS streams
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I0255e79e25cde100a063476de2c8fe0d6a6a6964
2026-03-22 22:05:01 +03:00
bac79a2c08
pinakes-server: add more integration tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7c6c8eaad569404c7a13cfa8114d84516a6a6964
2026-03-22 22:05:00 +03:00
b1ddb32ff0
pinakes-server: fix subtitle list response and registration
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I22c7237877862acbf931ce4c662bd2816a6a6964
2026-03-22 22:04:59 +03:00
76a48250e9
pinakes-ui: update styles for media widgets
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ia380cb749d3aafc15ffc242e43eefa106a6a6964
2026-03-22 22:04:58 +03:00
3948872042
pinakes-ui: playlists view and settings updates
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7f39eca04360e78cd76c7cb43c2ad2776a6a6964
2026-03-22 22:04:57 +03:00
67019cad4c
pinakes-ui: add rustdoc to ApiClient types and methods
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4b25ba66e695a870a753bdc6276c113d6a6a6964
2026-03-22 22:04:56 +03:00
0feb51d7b4
pinakes-ui: add playlists; expand detail/settings/player components
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ifb9c9da6fec0a9152b54ccf48705088e6a6a6964
2026-03-22 22:04:55 +03:00
bb69f2fa37
pinakes-tui: cover more API routes in the TUI crate
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id14b6f82d3b9f3c27bee9c214a1bdedc6a6a6964
2026-03-22 22:04:54 +03:00
0dda2aec8f
chore: add cargo xtask alias
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iaf5e1365e825e88a6cde49a50624c7736a6a6964
2026-03-22 22:04:53 +03:00
934691c0f9
docs: auto-generate API route documentation
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id0d1f9769b7ccdbf83d5fa78adef62e46a6a6964
2026-03-22 22:04:52 +03:00
9d58927cb4
pinakes-server: add utoipa annotations to all routes; fix tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I28cf5b7b7cff8e90e123d624d97cf9656a6a6964
2026-03-22 22:04:51 +03:00
67b8322705
pinakes-server: add utoipa annotations; manage embedded subtitle data
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I30d4b23f09113628dea245404b0a31bd6a6a6964
2026-03-22 22:04:50 +03:00
aa68d742c9
pinakes-core: fix minor clippy warnings; add toggle for Swagger UI generation
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ie33a5d17b774289023e3855789d3adc86a6a6964
2026-03-22 22:04:49 +03:00
5e0f404fc7
pinakes-core: initial subtitle management
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id2f9b87b1cc903462539ab8ea47099696a6a6964
2026-03-22 22:04:48 +03:00
2daa1e4395
pinakes-core: add error variants for external tool calls and subtitle ops
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9c9f4a7de065e176e16b108411c3d44b6a6a6964
2026-03-22 22:04:47 +03:00
ee5df288bc
pinakes-server: expand test coverage for server features
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ia09d2d3ad7f6613e21d20321e0877bc16a6a6964
2026-03-22 22:04:46 +03:00
60b6aa1fe8
pinakes-plugin-api: suppress enum_variant_names lint
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I01367dea28dd7b47cf765b6f33782a5e6a6a6964
2026-03-22 22:04:45 +03:00
1ee225201a
pinakes-plugin-api: suppress struct_field_names lint; minor doc tweaks
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I90f1cc46303564a61bdefe76d21045066a6a6964
2026-03-22 22:04:44 +03:00
2f43279dd7
pinakes-server: consolidate helpers for the tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ifbc07ced09014391bc264a36be27dc8c6a6a6964
2026-03-22 22:04:43 +03:00
6b8444f19c
pinakes-plugin-api: fix hasher usage in tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I8ee475aef2d1f81cf6af6f5e247f5e386a6a6964
2026-03-22 22:04:42 +03:00
5b817e0b3e
pinakes-core: fix hasher usage in tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ied03277d450e39299470667ef479c3526a6a6964
2026-03-22 22:04:41 +03:00
e15dad208e
pinakes-core: clarify backup support for postgresql
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7f7d5dcb1d973c8615aacbfc0a5a44576a6a6964
2026-03-22 22:04:40 +03:00
8023dc606b
migrations/postgres: add missing sequence counter for sqlite parity
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iaf993250bff02b3d02aece62876b5ee56a6a6964
2026-03-22 22:04:39 +03:00
c6efd3661f
treewide: replace std hashers with rustc_hash alternatives; fix clippy
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I766c36cb53d3d7f9e85b91a67c4131a66a6a6964
2026-03-22 22:04:38 +03:00
0e79ba0518
meta: ignore Nix build results properly
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iccbf2928b43e8b519d84884e801e4f206a6a6964
2026-03-22 22:04:37 +03:00
cf735b4278
chore: enforce rustc_hash over std hashers
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I228093b5da57d6fa3a6249e06de2f5776a6a6964
2026-03-22 22:04:36 +03:00
ede8a26e59
pinakes-ui: get rid of the build wrapper; let Dioxus compile its own stylesheets
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I8e83a7d9e592b0770b22c5832da6a5586a6a6964
2026-03-22 21:58:53 +03:00
179 changed files with 27914 additions and 1239 deletions

View file

@ -1,3 +1,6 @@
[alias]
xtask = "run --manifest-path xtask/Cargo.toml --"
[unstable]
build-std = ["std", "panic_abort", "core", "alloc"]

View file

@ -1,3 +1,9 @@
avoid-breaking-exported-api = false
cognitive-complexity-threshold = 30
too-many-arguments-threshold = 12
upper-case-acronyms-aggressive = true
check-inconsistent-struct-field-initializers = true
await-holding-invalid-types = [
"generational_box::GenerationalRef",
{ path = "generational_box::GenerationalRef", reason = "Reads should not be held over an await point. This will cause any writes to fail while the await is pending since the read borrow is still active." },
@ -13,3 +19,9 @@ disallowed-methods = [
{ path = "once_cell::unsync::Lazy::new", reason = "use `std::cell::LazyCell` instead, unless you need into_value" },
{ path = "once_cell::sync::Lazy::new", reason = "use `std::sync::LazyLock` instead, unless you need into_value" },
]
disallowed-types = [
{ path = "std::collections::HashMap", reason = "Use `rustc_hash::FxHashMap` instead, which is typically faster." },
{ path = "std::collections::HashSet", reason = "Use `rustc_hash::FxHashSet` instead, which is typically faster." },
]

26
.gitattributes vendored Normal file
View file

@ -0,0 +1,26 @@
*.sh text eol=lf
# Don't diff churn.
*.lock -diff
*LICENSE -diff
# Try to get markdown files to be treated as markdown
# by linguist - ** prefix is for all subdirectories.
**/*.md linguist-detectable
**/*.md linguist-language=Markdown
# This is vendored code, because it's generated by build tools.
# See:
# <https://github.com/github-linguist/linguist/blob/main/docs/overrides.md#vendored-code>
/docs/api/*.json linguist-vendored
/docs/api/*.md linguist-vendored
/crates/pinakes-ui/assets/css/main.css linguist-vendored
# Git Configuration files
*.gitattributes linguist-detectable=false
*.gitattributes linguist-documentation=false
*.gitignore linguist-detectable=false
*.gitignore linguist-documentation=false
*.editorconfig linguist-detectable=false
*.editorconfig linguist-documentation=false

5
.gitignore vendored
View file

@ -3,9 +3,12 @@ target/
**/*.wasm
# Nix
.direnv/
/.direnv/
/result*
# Runtime artifacts
*.db*
# Test configuration
test.toml

BIN
Cargo.lock generated

Binary file not shown.

View file

@ -1,5 +1,5 @@
[workspace]
members = ["crates/*"]
members = ["crates/*", "xtask"]
exclude = ["crates/pinakes-core/tests/fixtures/test-plugin"]
resolver = "3"
@ -18,50 +18,28 @@ pinakes-plugin-api = { path = "./crates/pinakes-plugin-api" }
pinakes-ui = { path = "./crates/pinakes-ui" }
pinakes-tui = { path = "./crates/pinakes-tui" }
# Async runtime
tokio = { version = "1.49.0", features = ["full"] }
tokio-util = { version = "0.7.18", features = ["rt"] }
# Serialization
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
toml = "1.0.3"
# CLI argument parsing
clap = { version = "4.5.60", features = ["derive", "env"] }
# Date/time
chrono = { version = "0.4.44", features = ["serde"] }
# IDs
uuid = { version = "1.21.0", features = ["v7", "serde"] }
# Error handling
thiserror = "2.0.18"
anyhow = "1.0.102"
# Logging
tracing = "0.1.44"
tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] }
# Hashing
blake3 = "1.8.3"
# Cryptographic signatures (plugin verification)
rustc-hash = "2.1.1"
ed25519-dalek = { version = "2.1.1", features = ["std"] }
# Metadata extraction
lofty = "0.23.2"
lopdf = "0.39.0"
epub = "2.1.5"
matroska = "0.30.0"
gray_matter = "0.3.2"
kamadak-exif = "0.6.1"
# Database - SQLite
rusqlite = { version = "=0.37.0", features = ["bundled", "column_decltype"] }
# Database - PostgreSQL
tokio-postgres = { version = "0.7.16", features = [
"with-uuid-1",
"with-chrono-0_4",
@ -71,44 +49,24 @@ deadpool-postgres = "0.14.1"
postgres-types = { version = "0.2.12", features = ["derive"] }
postgres-native-tls = "0.5.2"
native-tls = "0.2.18"
# Migrations
refinery = { version = "0.9.0", features = ["rusqlite", "tokio-postgres"] }
# Filesystem
walkdir = "2.5.0"
notify = { version = "8.2.0", features = ["macos_fsevent"] }
# Search parser
winnow = "0.7.14"
# HTTP server
axum = { version = "0.8.8", features = ["macros", "multipart"] }
axum-server = { version = "0.8.0" }
tower = "0.5.3"
tower-http = { version = "0.6.8", features = ["cors", "trace", "set-header"] }
governor = "0.10.4"
tower_governor = "0.8.0"
# HTTP client
reqwest = { version = "0.13.2", features = ["json", "query", "blocking"] }
url = "2.5"
# TUI
ratatui = "0.30.0"
crossterm = "0.29.0"
# Desktop/Web UI
dioxus = { version = "0.7.3", features = ["desktop", "router"] }
dioxus-core = { version = "0.7.3" }
# Async trait (dyn-compatible async methods)
async-trait = "0.1.89"
# Async utilities
futures = "0.3.32"
# Image processing (thumbnails)
image = { version = "0.25.9", default-features = false, features = [
"jpeg",
"png",
@ -117,15 +75,9 @@ image = { version = "0.25.9", default-features = false, features = [
"tiff",
"bmp",
] }
# Markdown rendering
pulldown-cmark = "0.13.1"
ammonia = "4.1.2"
# Password hashing
argon2 = { version = "0.5.3", features = ["std"] }
# Misc
mime_guess = "2.0.5"
regex = "1.12.3"
dioxus-free-icons = { version = "0.10.0", features = ["font-awesome-solid"] }
@ -137,13 +89,12 @@ urlencoding = "2.1.3"
image_hasher = "3.1.1"
percent-encoding = "2.3.2"
http = "1.4.0"
# WASM runtime for plugins
wasmtime = { version = "42.0.1", features = ["component-model"] }
wit-bindgen = "0.53.1"
# Misc
tempfile = "3.26.0"
utoipa = { version = "5.4.0", features = ["axum_extras", "uuid", "chrono"] }
utoipa-axum = { version = "0.2.0" }
utoipa-swagger-ui = { version = "9.0.2", features = ["axum"] }
# See:
# <https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html>

View file

@ -42,12 +42,16 @@ regex = { workspace = true }
moka = { workspace = true }
urlencoding = { workspace = true }
image_hasher = { workspace = true }
rustc-hash = { workspace = true }
# Plugin system
pinakes-plugin-api.workspace = true
wasmtime.workspace = true
ed25519-dalek.workspace = true
[features]
ffmpeg-tests = []
[lints]
workspace = true

View file

@ -1126,6 +1126,10 @@ pub struct ServerConfig {
/// TLS/HTTPS configuration
#[serde(default)]
pub tls: TlsConfig,
/// Enable the Swagger UI at /api/docs.
/// Defaults to true. Set to false to disable in production if desired.
#[serde(default = "default_true")]
pub swagger_ui: bool,
}
/// TLS/HTTPS configuration for secure connections
@ -1470,6 +1474,7 @@ impl Default for Config {
cors_enabled: false,
cors_origins: vec![],
tls: TlsConfig::default(),
swagger_ui: true,
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
@ -1494,6 +1499,8 @@ impl Default for Config {
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;
use super::*;
fn test_config_with_concurrency(concurrency: usize) -> Config {
@ -1549,7 +1556,7 @@ mod tests {
// HashMap lookup. This avoids unsafe std::env::set_var and is
// thread-safe for parallel test execution.
fn test_lookup<'a>(
vars: &'a std::collections::HashMap<&str, &str>,
vars: &'a FxHashMap<&str, &str>,
) -> impl Fn(&str) -> crate::error::Result<String> + 'a {
move |name| {
vars
@ -1565,24 +1572,27 @@ mod tests {
#[test]
fn test_expand_env_var_simple() {
let vars =
std::collections::HashMap::from([("TEST_VAR_SIMPLE", "test_value")]);
let vars = [("TEST_VAR_SIMPLE", "test_value")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result = expand_env_vars("$TEST_VAR_SIMPLE", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_braces() {
let vars =
std::collections::HashMap::from([("TEST_VAR_BRACES", "test_value")]);
let vars = [("TEST_VAR_BRACES", "test_value")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result = expand_env_vars("${TEST_VAR_BRACES}", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_embedded() {
let vars =
std::collections::HashMap::from([("TEST_VAR_EMBEDDED", "value")]);
let vars = [("TEST_VAR_EMBEDDED", "value")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result =
expand_env_vars("prefix_${TEST_VAR_EMBEDDED}_suffix", test_lookup(&vars));
assert_eq!(result.unwrap(), "prefix_value_suffix");
@ -1590,15 +1600,16 @@ mod tests {
#[test]
fn test_expand_env_var_multiple() {
let vars =
std::collections::HashMap::from([("VAR1", "value1"), ("VAR2", "value2")]);
let vars = [("VAR1", "value1"), ("VAR2", "value2")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result = expand_env_vars("${VAR1}_${VAR2}", test_lookup(&vars));
assert_eq!(result.unwrap(), "value1_value2");
}
#[test]
fn test_expand_env_var_missing() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${NONEXISTENT_VAR}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1611,7 +1622,7 @@ mod tests {
#[test]
fn test_expand_env_var_empty_name() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1624,31 +1635,32 @@ mod tests {
#[test]
fn test_expand_env_var_escaped() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("\\$NOT_A_VAR", test_lookup(&vars));
assert_eq!(result.unwrap(), "$NOT_A_VAR");
}
#[test]
fn test_expand_env_var_no_vars() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("plain_text", test_lookup(&vars));
assert_eq!(result.unwrap(), "plain_text");
}
#[test]
fn test_expand_env_var_underscore() {
let vars = std::collections::HashMap::from([("TEST_VAR_NAME", "value")]);
let vars = [("TEST_VAR_NAME", "value")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result = expand_env_vars("$TEST_VAR_NAME", test_lookup(&vars));
assert_eq!(result.unwrap(), "value");
}
#[test]
fn test_expand_env_var_mixed_syntax() {
let vars = std::collections::HashMap::from([
("VAR1_MIXED", "v1"),
("VAR2_MIXED", "v2"),
]);
let vars = [("VAR1_MIXED", "v1"), ("VAR2_MIXED", "v2")]
.into_iter()
.collect::<FxHashMap<_, _>>();
let result =
expand_env_vars("$VAR1_MIXED and ${VAR2_MIXED}", test_lookup(&vars));
assert_eq!(result.unwrap(), "v1 and v2");

View file

@ -111,6 +111,15 @@ pub enum PinakesError {
#[error("serialization error: {0}")]
Serialization(String),
#[error("external tool `{tool}` failed: {stderr}")]
ExternalTool { tool: String, stderr: String },
#[error("subtitle track {index} not found in media")]
SubtitleTrackNotFound { index: u32 },
#[error("invalid language code: {0}")]
InvalidLanguageCode(String),
}
impl From<rusqlite::Error> for PinakesError {

View file

@ -254,7 +254,7 @@ pub async fn import_file_with_options(
duration_secs: extracted.duration_secs,
description: extracted.description,
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: current_mtime,
// Photo-specific metadata from extraction

View file

@ -1,8 +1,6 @@
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
@ -96,8 +94,8 @@ pub async fn detect_orphans(
let mut orphaned_ids = Vec::new();
// Build hash index: ContentHash -> Vec<(MediaId, PathBuf)>
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
HashMap::new();
let mut hash_index: FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
FxHashMap::default();
for (id, path, hash) in &media_paths {
hash_index
.entry(hash.clone())
@ -138,12 +136,12 @@ pub async fn detect_orphans(
fn detect_moved_files(
orphaned_ids: &[MediaId],
media_paths: &[(MediaId, PathBuf, ContentHash)],
hash_index: &HashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
hash_index: &FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
) -> Vec<(MediaId, PathBuf, PathBuf)> {
let mut moved = Vec::new();
// Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash)
let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths
let orphaned_map: FxHashMap<MediaId, (PathBuf, ContentHash)> = media_paths
.iter()
.filter(|(id, ..)| orphaned_ids.contains(id))
.map(|(id, path, hash)| (*id, (path.clone(), hash.clone())))
@ -184,7 +182,7 @@ async fn detect_untracked_files(
}
// Build set of tracked paths for fast lookup
let tracked_paths: HashSet<PathBuf> = media_paths
let tracked_paths: FxHashSet<PathBuf> = media_paths
.iter()
.map(|(_, path, _)| path.clone())
.collect();
@ -198,7 +196,7 @@ async fn detect_untracked_files(
];
// Walk filesystem for each root in parallel (limit concurrency to 4)
let mut filesystem_paths = HashSet::new();
let mut filesystem_paths = FxHashSet::default();
let mut tasks = tokio::task::JoinSet::new();
for root in roots {
@ -322,8 +320,7 @@ pub async fn verify_integrity(
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> =
if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> =
ids.iter().copied().collect();
let id_set: FxHashSet<MediaId> = ids.iter().copied().collect();
all_paths
.into_iter()
.filter(|(id, ..)| id_set.contains(id))
@ -383,7 +380,7 @@ pub async fn cleanup_orphaned_thumbnails(
thumbnail_dir: &Path,
) -> Result<usize> {
let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths
let known_ids: FxHashSet<String> = media_paths
.iter()
.map(|(id, ..)| id.0.to_string())
.collect();

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::sync::{RwLock, mpsc};
@ -71,8 +72,8 @@ struct WorkerItem {
}
pub struct JobQueue {
jobs: Arc<RwLock<HashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>>,
jobs: Arc<RwLock<FxHashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>>,
tx: mpsc::Sender<WorkerItem>,
}
@ -94,7 +95,7 @@ impl JobQueue {
Uuid,
JobKind,
CancellationToken,
Arc<RwLock<HashMap<Uuid, Job>>>,
Arc<RwLock<FxHashMap<Uuid, Job>>>,
) -> tokio::task::JoinHandle<()>
+ Send
+ Sync
@ -102,10 +103,10 @@ impl JobQueue {
{
let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> =
Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new()));
let jobs: Arc<RwLock<FxHashMap<Uuid, Job>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let executor = Arc::new(executor);
@ -261,7 +262,7 @@ impl JobQueue {
/// Update a job's progress. Called by executors.
pub async fn update_progress(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
progress: f32,
message: String,
@ -275,7 +276,7 @@ impl JobQueue {
/// Mark a job as completed.
pub async fn complete(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
result: Value,
) {
@ -288,7 +289,7 @@ impl JobQueue {
/// Mark a job as failed.
pub async fn fail(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
error: String,
) {

View file

@ -352,7 +352,7 @@ pub fn resolve_link_candidates(
}
// 4. Remove duplicates while preserving order
let mut seen = std::collections::HashSet::new();
let mut seen = rustc_hash::FxHashSet::default();
candidates.retain(|p| seen.insert(p.clone()));
candidates

View file

@ -1,8 +1,7 @@
//! Media type registry for managing both built-in and custom media types
use std::collections::HashMap;
use anyhow::{Result, anyhow};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use super::MediaCategory;
@ -33,10 +32,10 @@ pub struct MediaTypeDescriptor {
#[derive(Debug, Clone)]
pub struct MediaTypeRegistry {
/// Map of media type ID to descriptor
types: HashMap<String, MediaTypeDescriptor>,
types: FxHashMap<String, MediaTypeDescriptor>,
/// Map of extension to media type ID
extension_map: HashMap<String, String>,
extension_map: FxHashMap<String, String>,
}
impl MediaTypeRegistry {
@ -44,8 +43,8 @@ impl MediaTypeRegistry {
#[must_use]
pub fn new() -> Self {
Self {
types: HashMap::new(),
extension_map: HashMap::new(),
types: FxHashMap::default(),
extension_map: FxHashMap::default(),
}
}

View file

@ -190,7 +190,7 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
book_meta.authors = authors;
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
let mut identifiers = rustc_hash::FxHashMap::default();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements

View file

@ -4,7 +4,9 @@ pub mod image;
pub mod markdown;
pub mod video;
use std::{collections::HashMap, path::Path};
use std::path::Path;
use rustc_hash::FxHashMap;
use crate::{error::Result, media_type::MediaType, model::BookMetadata};
@ -17,7 +19,7 @@ pub struct ExtractedMetadata {
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
pub book_metadata: Option<BookMetadata>,
// Photo-specific metadata

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, fmt, path::PathBuf};
use std::{fmt, path::PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -129,7 +130,7 @@ pub struct MediaItem {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
pub custom_fields: FxHashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental
/// scanning
pub file_mtime: Option<i64>,
@ -434,7 +435,7 @@ pub struct BookMetadata {
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorInfo>,
pub identifiers: HashMap<String, Vec<String>>,
pub identifiers: FxHashMap<String, Vec<String>>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@ -454,7 +455,7 @@ impl Default for BookMetadata {
series_index: None,
format: None,
authors: Vec::new(),
identifiers: HashMap::new(),
identifiers: FxHashMap::default(),
created_at: now,
updated_at: now,
}

View file

@ -186,17 +186,19 @@ impl PluginManager {
fn resolve_load_order(
manifests: &[pinakes_plugin_api::PluginManifest],
) -> Vec<pinakes_plugin_api::PluginManifest> {
use std::collections::{HashMap, HashSet, VecDeque};
use std::collections::VecDeque;
use rustc_hash::{FxHashMap, FxHashSet};
// Index manifests by name for O(1) lookup
let by_name: HashMap<&str, usize> = manifests
let by_name: FxHashMap<&str, usize> = manifests
.iter()
.enumerate()
.map(|(i, m)| (m.plugin.name.as_str(), i))
.collect();
// Check for missing dependencies and warn early
let known: HashSet<&str> = by_name.keys().copied().collect();
let known: FxHashSet<&str> = by_name.keys().copied().collect();
for manifest in manifests {
for dep in &manifest.plugin.dependencies {
if !known.contains(dep.as_str()) {
@ -250,7 +252,7 @@ impl PluginManager {
// Anything not in `result` is part of a cycle or has a missing dep
if result.len() < manifests.len() {
let loaded: HashSet<&str> =
let loaded: FxHashSet<&str> =
result.iter().map(|m| m.plugin.name.as_str()).collect();
for manifest in manifests {
if !loaded.contains(manifest.plugin.name.as_str()) {
@ -669,9 +671,9 @@ impl PluginManager {
/// none declare theme extensions.
pub async fn list_ui_theme_extensions(
&self,
) -> std::collections::HashMap<String, String> {
) -> rustc_hash::FxHashMap<String, String> {
let registry = self.registry.read().await;
let mut merged = std::collections::HashMap::new();
let mut merged = rustc_hash::FxHashMap::default();
for plugin in registry.list_all() {
if !plugin.enabled {
continue;

View file

@ -13,12 +13,12 @@
//! priority 100. A circuit breaker disables plugins after consecutive failures.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, Instant},
};
use rustc_hash::FxHashMap;
use tokio::sync::RwLock;
use tracing::{debug, info, warn};
@ -75,22 +75,22 @@ struct CachedCapabilities {
/// Keyed by `(kind, plugin_id)` -> list of supported type strings.
/// Separate entries for each kind avoid collisions when a plugin
/// implements both `metadata_extractor` and `thumbnail_generator`.
supported_types: HashMap<(String, String), Vec<String>>,
supported_types: FxHashMap<(String, String), Vec<String>>,
/// `plugin_id` -> list of interested event type strings
interested_events: HashMap<String, Vec<String>>,
interested_events: FxHashMap<String, Vec<String>>,
/// `plugin_id` -> list of media type definitions (for `MediaTypeProvider`)
media_type_definitions: HashMap<String, Vec<PluginMediaTypeDefinition>>,
media_type_definitions: FxHashMap<String, Vec<PluginMediaTypeDefinition>>,
/// `plugin_id` -> list of theme definitions (for `ThemeProvider`)
theme_definitions: HashMap<String, Vec<PluginThemeDefinition>>,
theme_definitions: FxHashMap<String, Vec<PluginThemeDefinition>>,
}
impl CachedCapabilities {
fn new() -> Self {
Self {
supported_types: HashMap::new(),
interested_events: HashMap::new(),
media_type_definitions: HashMap::new(),
theme_definitions: HashMap::new(),
supported_types: FxHashMap::default(),
interested_events: FxHashMap::default(),
media_type_definitions: FxHashMap::default(),
theme_definitions: FxHashMap::default(),
}
}
}
@ -101,7 +101,7 @@ pub struct PluginPipeline {
manager: Arc<PluginManager>,
timeouts: PluginTimeoutConfig,
max_consecutive_failures: u32,
health: RwLock<HashMap<String, PluginHealth>>,
health: RwLock<FxHashMap<String, PluginHealth>>,
capabilities: RwLock<CachedCapabilities>,
}
@ -117,7 +117,7 @@ impl PluginPipeline {
manager,
timeouts,
max_consecutive_failures,
health: RwLock::new(HashMap::new()),
health: RwLock::new(FxHashMap::default()),
capabilities: RwLock::new(CachedCapabilities::new()),
}
}
@ -826,7 +826,7 @@ impl PluginPipeline {
}
// Deduplicate by ID, keeping the highest-scoring entry
let mut seen: HashMap<String, usize> = HashMap::new();
let mut seen: FxHashMap<String, usize> = FxHashMap::default();
let mut deduped: Vec<SearchResultItem> = Vec::new();
for item in all_results {
if let Some(&idx) = seen.get(&item.id) {
@ -1363,7 +1363,7 @@ mod tests {
year: Some(2024),
duration_secs: None,
description: None,
extra: HashMap::new(),
extra: FxHashMap::default(),
};
merge_metadata(&mut base, &resp);
@ -1379,7 +1379,7 @@ mod tests {
let mut base = ExtractedMetadata::default();
base.extra.insert("key1".to_string(), "val1".to_string());
let mut extra = HashMap::new();
let mut extra = FxHashMap::default();
extra.insert("key2".to_string(), "val2".to_string());
extra.insert("key1".to_string(), "overwritten".to_string());

View file

@ -1,9 +1,10 @@
//! Plugin registry for managing loaded plugins
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use rustc_hash::FxHashMap;
use super::runtime::WasmPlugin;
@ -21,7 +22,7 @@ pub struct RegisteredPlugin {
/// Plugin registry maintains the state of all loaded plugins
pub struct PluginRegistry {
/// Map of plugin ID to registered plugin
plugins: HashMap<String, RegisteredPlugin>,
plugins: FxHashMap<String, RegisteredPlugin>,
}
impl PluginRegistry {
@ -29,7 +30,7 @@ impl PluginRegistry {
#[must_use]
pub fn new() -> Self {
Self {
plugins: HashMap::new(),
plugins: FxHashMap::default(),
}
}
@ -156,9 +157,8 @@ impl Default for PluginRegistry {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::{Capabilities, manifest::ManifestCapabilities};
use rustc_hash::FxHashMap;
use super::*;
@ -181,7 +181,7 @@ mod tests {
priority: 0,
},
capabilities: ManifestCapabilities::default(),
config: HashMap::new(),
config: FxHashMap::default(),
ui: Default::default(),
};

View file

@ -4,8 +4,9 @@
//! Requests are serialized to JSON, passed to the plugin, and responses
//! are deserialized from JSON written by the plugin via `host_set_result`.
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Request to check if a plugin can handle a file
@ -55,7 +56,7 @@ pub struct ExtractMetadataResponse {
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
}
/// Request to generate a thumbnail
@ -140,7 +141,7 @@ pub struct PluginThemeDefinition {
#[derive(Debug, Clone, Deserialize)]
pub struct LoadThemeResponse {
pub css: Option<String>,
pub colors: HashMap<String, String>,
pub colors: FxHashMap<String, String>,
}
#[cfg(test)]

View file

@ -272,7 +272,7 @@ impl Default for WasmPlugin {
context: PluginContext {
data_dir: std::env::temp_dir(),
cache_dir: std::env::temp_dir(),
config: std::collections::HashMap::new(),
config: Default::default(),
capabilities: Default::default(),
},
}
@ -774,9 +774,8 @@ impl HostFunctions {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::PluginContext;
use rustc_hash::FxHashMap;
use super::*;
@ -795,7 +794,7 @@ mod tests {
let context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
config: Default::default(),
capabilities,
};
@ -838,7 +837,7 @@ mod tests {
let mut context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
config: FxHashMap::default(),
capabilities: Default::default(),
};

View file

@ -5,6 +5,7 @@ pub mod sqlite;
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -221,7 +222,7 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>;
) -> Result<FxHashMap<String, CustomField>>;
/// Delete a custom field from a media item by name.
async fn delete_custom_field(
@ -1170,11 +1171,15 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn count_unresolved_links(&self) -> Result<u64>;
/// Create a backup of the database to the specified path.
/// Default implementation returns unsupported; `SQLite` overrides with
/// VACUUM INTO.
///
/// Only supported for `SQLite` (uses VACUUM INTO). `PostgreSQL`
/// deployments should use `pg_dump` directly; this method returns
/// `PinakesError::InvalidOperation` for unsupported backends.
async fn backup(&self, _dest: &std::path::Path) -> Result<()> {
Err(crate::error::PinakesError::InvalidOperation(
"backup not supported for this storage backend".to_string(),
"backup not supported for this storage backend; use pg_dump for \
PostgreSQL"
.to_string(),
))
}
}

View file

@ -1,9 +1,10 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use chrono::Utc;
use deadpool_postgres::{Config as PoolConfig, Pool, Runtime};
use native_tls::TlsConnector;
use postgres_native_tls::MakeTlsConnector;
use rustc_hash::FxHashMap;
use tokio_postgres::{NoTls, Row, types::ToSql};
use uuid::Uuid;
@ -215,7 +216,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: row.get("file_mtime"),
// Photo-specific fields
@ -922,8 +923,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1596,8 +1597,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1759,8 +1760,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1894,7 +1895,7 @@ impl StorageBackend for PostgresBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let client = self
.pool
.get()
@ -1909,7 +1910,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for row in &rows {
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
@ -1988,8 +1989,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2066,8 +2067,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2089,8 +2090,8 @@ impl StorageBackend for PostgresBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -2952,8 +2953,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3365,8 +3366,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3553,8 +3554,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3623,8 +3624,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3728,7 +3729,7 @@ impl StorageBackend for PostgresBackend {
))
})
})
.transpose()?;
.transpose()?; // u32 fits in i32 for any valid track index, error is a safeguard
let offset_ms = i32::try_from(subtitle.offset_ms).map_err(|_| {
PinakesError::InvalidOperation(format!(
"subtitle offset_ms {} exceeds i32 range",
@ -3790,7 +3791,7 @@ impl StorageBackend for PostgresBackend {
is_embedded: row.get("is_embedded"),
track_index: row
.get::<_, Option<i32>>("track_index")
.map(|i| usize::try_from(i).unwrap_or(0)),
.map(|i| u32::try_from(i).unwrap_or(0)),
offset_ms: i64::from(row.get::<_, i32>("offset_ms")),
created_at: row.get("created_at"),
}
@ -4448,8 +4449,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> = FxHashMap::default();
for r in id_rows {
let id_type: String = r.get(0);
let value: String = r.get(1);
@ -7031,11 +7031,11 @@ impl StorageBackend for PostgresBackend {
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let node_ids: std::collections::HashSet<String> =
let node_ids: rustc_hash::FxHashSet<String> =
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center.0.to_string());
for _ in 0..depth {
@ -7099,7 +7099,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut collected = std::collections::HashSet::new();
let mut collected = rustc_hash::FxHashSet::default();
for row in rows {
let id: String = row.get(0);
collected.insert(id);

View file

@ -1,11 +1,11 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use chrono::{DateTime, NaiveDateTime, Utc};
use rusqlite::{Connection, Row, params};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -142,7 +142,7 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
custom_fields: FxHashMap::default(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
@ -358,7 +358,7 @@ fn load_user_profile_sync(
fn load_custom_fields_sync(
db: &Connection,
media_id: MediaId,
) -> rusqlite::Result<HashMap<String, CustomField>> {
) -> rusqlite::Result<FxHashMap<String, CustomField>> {
let mut stmt = db.prepare(
"SELECT field_name, field_type, field_value FROM custom_fields WHERE \
media_id = ?1",
@ -372,7 +372,7 @@ fn load_custom_fields_sync(
value,
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -409,8 +409,8 @@ fn load_custom_fields_batch(
Ok((mid_str, name, ft_str, value))
})?;
let mut fields_map: HashMap<String, HashMap<String, CustomField>> =
HashMap::new();
let mut fields_map: FxHashMap<String, FxHashMap<String, CustomField>> =
FxHashMap::default();
for r in rows {
let (mid_str, name, ft_str, value) = r?;
fields_map
@ -1762,7 +1762,7 @@ impl StorageBackend for SqliteBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let map = {
@ -1783,7 +1783,7 @@ impl StorageBackend for SqliteBackend {
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -2093,8 +2093,8 @@ impl StorageBackend for SqliteBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -4297,9 +4297,7 @@ impl StorageBackend for SqliteBackend {
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let is_embedded = subtitle.is_embedded;
let track_index = subtitle
.track_index
.map(|i| i64::try_from(i).unwrap_or(i64::MAX));
let track_index = subtitle.track_index.map(i64::from);
let offset_ms = subtitle.offset_ms;
let now = subtitle.created_at.to_rfc3339();
let fut = tokio::task::spawn_blocking(move || {
@ -4365,7 +4363,7 @@ impl StorageBackend for SqliteBackend {
is_embedded: row.get::<_, i32>(5)? != 0,
track_index: row
.get::<_, Option<i64>>(6)?
.map(|i| usize::try_from(i).unwrap_or(0)),
.and_then(|i| u32::try_from(i).ok()),
offset_ms: row.get(7)?,
created_at: parse_datetime(&created_str),
})
@ -5265,8 +5263,8 @@ impl StorageBackend for SqliteBackend {
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = ?1",
)?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> =
FxHashMap::default();
for row in stmt.query_map([&media_id_str], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})? {
@ -8336,13 +8334,13 @@ impl StorageBackend for SqliteBackend {
let conn = conn.lock().map_err(|e| PinakesError::Database(format!("connection mutex poisoned: {e}")))?;
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
let mut node_ids = rustc_hash::FxHashSet::default();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center_id);
for _ in 0..depth {

View file

@ -1,6 +1,6 @@
//! Subtitle management for video media items.
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@ -17,7 +17,7 @@ pub struct Subtitle {
pub format: SubtitleFormat,
pub file_path: Option<PathBuf>,
pub is_embedded: bool,
pub track_index: Option<usize>,
pub track_index: Option<u32>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
@ -33,6 +33,25 @@ pub enum SubtitleFormat {
Pgs,
}
impl SubtitleFormat {
/// Returns the MIME type for this subtitle format.
#[must_use]
pub const fn mime_type(self) -> &'static str {
match self {
Self::Srt => "application/x-subrip",
Self::Vtt => "text/vtt",
Self::Ass | Self::Ssa => "text/plain; charset=utf-8",
Self::Pgs => "application/octet-stream",
}
}
/// Returns true if this format is binary (not UTF-8 text).
#[must_use]
pub const fn is_binary(self) -> bool {
matches!(self, Self::Pgs)
}
}
impl std::fmt::Display for SubtitleFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
@ -60,3 +79,298 @@ impl std::str::FromStr for SubtitleFormat {
}
}
}
use crate::error::{PinakesError, Result};
/// Information about a subtitle track embedded in a media container.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SubtitleTrackInfo {
/// Zero-based index among subtitle streams, as reported by ffprobe.
pub index: u32,
/// BCP 47 language code extracted from stream tags, if present.
pub language: Option<String>,
/// Subtitle format derived from the codec name.
pub format: SubtitleFormat,
/// Human-readable title from stream tags, if present.
pub title: Option<String>,
}
/// Detects the subtitle format from a file extension.
///
/// Returns `None` if the extension is unrecognised or absent.
#[must_use]
pub fn detect_format(path: &Path) -> Option<SubtitleFormat> {
match path.extension()?.to_str()?.to_lowercase().as_str() {
"srt" => Some(SubtitleFormat::Srt),
"vtt" => Some(SubtitleFormat::Vtt),
"ass" => Some(SubtitleFormat::Ass),
"ssa" => Some(SubtitleFormat::Ssa),
"pgs" | "sup" => Some(SubtitleFormat::Pgs),
_ => None,
}
}
/// Validates a BCP 47 language code.
///
/// Accepts a primary tag of 2-3 letters followed by zero or more
/// hyphen-separated subtags of 2-8 alphanumeric characters each.
/// Examples: `en`, `en-US`, `zh-Hant`, `zh-Hant-TW`.
pub fn validate_language_code(lang: &str) -> bool {
static RE: std::sync::LazyLock<regex::Regex> =
std::sync::LazyLock::new(|| {
#[expect(clippy::expect_used)]
regex::Regex::new(r"^[A-Za-z]{2,3}(-[A-Za-z0-9]{2,8})*$")
.expect("valid regex pattern")
});
RE.is_match(lang)
}
/// Lists subtitle tracks embedded in a media file using ffprobe.
///
/// Returns an empty vec if the file has no subtitle streams.
///
/// # Errors
///
/// Returns `PinakesError::ExternalTool` if ffprobe is not available or
/// produces an error exit code.
pub async fn list_embedded_tracks(
media_path: &Path,
) -> Result<Vec<SubtitleTrackInfo>> {
let output = tokio::process::Command::new("ffprobe")
.args([
"-v",
"quiet",
"-print_format",
"json",
"-show_streams",
"-select_streams",
"s",
])
.arg(media_path)
.output()
.await
.map_err(|e| {
PinakesError::ExternalTool {
tool: "ffprobe".into(),
stderr: e.to_string(),
}
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr).into_owned();
return Err(PinakesError::ExternalTool {
tool: "ffprobe".into(),
stderr,
});
}
let json: serde_json::Value = serde_json::from_slice(&output.stdout)
.map_err(|e| {
PinakesError::ExternalTool {
tool: "ffprobe".into(),
stderr: format!("failed to parse output: {e}"),
}
})?;
let streams = match json.get("streams").and_then(|s| s.as_array()) {
Some(s) => s,
None => return Ok(vec![]),
};
let mut tracks = Vec::new();
for (idx, stream) in streams.iter().enumerate() {
let codec_name = stream
.get("codec_name")
.and_then(|v| v.as_str())
.unwrap_or("");
let format = match codec_name {
"subrip" => SubtitleFormat::Srt,
"webvtt" => SubtitleFormat::Vtt,
"ass" | "ssa" => SubtitleFormat::Ass,
"hdmv_pgs_subtitle" | "pgssub" => SubtitleFormat::Pgs,
_ => continue, // skip unknown codec
};
let tags = stream.get("tags");
let language = tags
.and_then(|t| t.get("language"))
.and_then(|v| v.as_str())
.map(str::to_owned);
let title = tags
.and_then(|t| t.get("title"))
.and_then(|v| v.as_str())
.map(str::to_owned);
tracks.push(SubtitleTrackInfo {
index: idx as u32,
language,
format,
title,
});
}
Ok(tracks)
}
/// Extracts an embedded subtitle track from a media file using ffmpeg.
///
/// The caller must ensure the output directory exists before calling this
/// function. The output format is determined by the file extension of
/// `output_path`.
///
/// # Errors
///
/// Returns `PinakesError::ExternalTool` if ffmpeg is not available or exits
/// with a non-zero status.
pub async fn extract_embedded_track(
media_path: &Path,
track_index: u32,
output_path: &Path,
) -> Result<()> {
let output = tokio::process::Command::new("ffmpeg")
.args(["-v", "quiet", "-i"])
.arg(media_path)
.args(["-map", &format!("0:s:{track_index}"), "-y"])
.arg(output_path)
.output()
.await
.map_err(|e| {
PinakesError::ExternalTool {
tool: "ffmpeg".into(),
stderr: e.to_string(),
}
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr).into_owned();
return Err(PinakesError::ExternalTool {
tool: "ffmpeg".into(),
stderr,
});
}
Ok(())
}
#[cfg(test)]
mod tests {
use std::path::Path;
use super::{SubtitleFormat, detect_format, validate_language_code};
#[test]
fn test_detect_format_srt() {
assert_eq!(
detect_format(Path::new("track.srt")),
Some(SubtitleFormat::Srt)
);
}
#[test]
fn test_detect_format_vtt() {
assert_eq!(
detect_format(Path::new("track.vtt")),
Some(SubtitleFormat::Vtt)
);
}
#[test]
fn test_detect_format_ass() {
assert_eq!(
detect_format(Path::new("track.ass")),
Some(SubtitleFormat::Ass)
);
}
#[test]
fn test_detect_format_ssa() {
assert_eq!(
detect_format(Path::new("track.ssa")),
Some(SubtitleFormat::Ssa)
);
}
#[test]
fn test_detect_format_pgs() {
assert_eq!(
detect_format(Path::new("track.pgs")),
Some(SubtitleFormat::Pgs)
);
}
#[test]
fn test_detect_format_sup() {
assert_eq!(
detect_format(Path::new("track.sup")),
Some(SubtitleFormat::Pgs)
);
}
#[test]
fn test_detect_format_unknown() {
assert_eq!(detect_format(Path::new("track.xyz")), None);
}
#[test]
fn test_detect_format_no_extension() {
assert_eq!(detect_format(Path::new("track")), None);
}
#[test]
fn test_detect_format_case_insensitive() {
assert_eq!(
detect_format(Path::new("track.SRT")),
Some(SubtitleFormat::Srt)
);
assert_eq!(
detect_format(Path::new("track.VTT")),
Some(SubtitleFormat::Vtt)
);
}
#[test]
fn test_validate_language_code_simple() {
assert!(validate_language_code("en"));
}
#[test]
fn test_validate_language_code_with_region() {
assert!(validate_language_code("en-US"));
}
#[test]
fn test_validate_language_code_script() {
assert!(validate_language_code("zh-Hant"));
}
#[test]
fn test_validate_language_code_full() {
assert!(validate_language_code("zh-Hant-TW"));
}
#[test]
fn test_validate_language_code_empty() {
assert!(!validate_language_code(""));
}
#[test]
fn test_validate_language_code_primary_too_long() {
assert!(!validate_language_code("toolong-tag-over-3-chars"));
}
#[test]
fn test_validate_language_code_underscore_separator() {
assert!(!validate_language_code("en_US"));
}
#[test]
fn test_validate_language_code_subtag_too_short() {
assert!(!validate_language_code("en-a"));
}
#[test]
fn test_validate_language_code_three_letter_primary() {
assert!(validate_language_code("eng"));
}
}

View file

@ -1,12 +1,12 @@
//! Transcoding service for media files using `FFmpeg`.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
@ -94,7 +94,7 @@ impl TranscodeStatus {
/// Service managing transcoding sessions and `FFmpeg` invocations.
pub struct TranscodeService {
pub config: TranscodingConfig,
pub sessions: Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
pub sessions: Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
semaphore: Arc<Semaphore>,
}
@ -103,7 +103,7 @@ impl TranscodeService {
pub fn new(config: TranscodingConfig) -> Self {
let max_concurrent = config.max_concurrent.max(1);
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
sessions: Arc::new(RwLock::new(FxHashMap::default())),
semaphore: Arc::new(Semaphore::new(max_concurrent)),
config,
}
@ -481,7 +481,7 @@ fn get_ffmpeg_args(
/// Run `FFmpeg` as a child process, parsing progress from stdout.
async fn run_ffmpeg(
args: &[String],
sessions: &Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
sessions: &Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
session_id: Uuid,
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,

View file

@ -3,7 +3,7 @@
//! Handles file uploads, metadata extraction, and `MediaItem` creation
//! for files stored in managed content-addressable storage.
use std::{collections::HashMap, path::Path};
use std::path::Path;
use chrono::Utc;
use tokio::io::AsyncRead;
@ -85,7 +85,7 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
duration_secs: extracted.as_ref().and_then(|m| m.duration_secs),
description: extracted.as_ref().and_then(|m| m.description.clone()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: None,
date_taken: extracted.as_ref().and_then(|m| m.date_taken),
latitude: extracted.as_ref().and_then(|m| m.latitude),

View file

@ -1,8 +1,7 @@
//! User management and authentication
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -78,7 +77,7 @@ pub struct UserPreferences {
pub auto_play: bool,
/// Custom preferences (extensible)
pub custom: HashMap<String, serde_json::Value>,
pub custom: FxHashMap<String, serde_json::Value>,
}
/// Library access permission

View file

@ -3,13 +3,14 @@
// the test suite
#![allow(dead_code)]
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use pinakes_core::{
media_type::{BuiltinMediaType, MediaType},
model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use uuid::Uuid;
@ -46,7 +47,7 @@ pub fn make_test_media(hash: &str) -> MediaItem {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -83,7 +84,7 @@ pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -121,7 +122,7 @@ pub fn make_test_markdown_item(id: MediaId) -> MediaItem {
duration_secs: None,
description: Some("Test markdown note".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,

View file

@ -1,6 +1,5 @@
use std::collections::HashMap;
use pinakes_core::{model::*, storage::StorageBackend};
use rustc_hash::FxHashMap;
mod common;
use common::{make_test_media, setup};
@ -28,7 +27,7 @@ async fn test_media_crud() {
duration_secs: None,
description: Some("A test file".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -120,7 +119,7 @@ async fn test_tags() {
duration_secs: Some(180.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -191,7 +190,7 @@ async fn test_collections() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -252,7 +251,7 @@ async fn test_custom_fields() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -334,7 +333,7 @@ async fn test_search() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -479,7 +478,7 @@ async fn test_library_statistics_with_data() {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,

View file

@ -19,6 +19,7 @@ toml = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
mime_guess = { workspace = true }
rustc-hash = { workspace = true }
# WASM bridge types
wit-bindgen = { workspace = true, optional = true }

View file

@ -1,15 +1,13 @@
//! Pinakes Plugin API
//!
//! This crate defines the stable plugin interface for Pinakes.
//! Plugins can extend Pinakes by implementing one or more of the provided
//! Defines the "stable" plugin interface for Pinakes. Using this interface,
//! plugins can extend Pinakes by implementing one or more of the provided
//! traits.
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -25,6 +23,7 @@ pub use ui_schema::*;
pub use wasm::host_functions;
/// Plugin API version - plugins must match this version
/// FIXME: handle breaking changes for the API after stabilizing
pub const PLUGIN_API_VERSION: &str = "1.0";
/// Result type for plugin operations
@ -74,7 +73,7 @@ pub struct PluginContext {
pub cache_dir: PathBuf,
/// Plugin configuration from manifest
pub config: HashMap<String, serde_json::Value>,
pub config: FxHashMap<String, serde_json::Value>,
/// Capabilities granted to the plugin
pub capabilities: Capabilities,
@ -160,7 +159,7 @@ pub struct PluginMetadata {
pub struct HealthStatus {
pub healthy: bool,
pub message: Option<String>,
pub metrics: HashMap<String, f64>,
pub metrics: FxHashMap<String, f64>,
}
/// Trait for plugins that provide custom media type support
@ -227,7 +226,7 @@ pub struct ExtractedMetadata {
pub bitrate_kbps: Option<u32>,
/// Custom metadata fields specific to this file type
pub custom_fields: HashMap<String, serde_json::Value>,
pub custom_fields: FxHashMap<String, serde_json::Value>,
/// Tags extracted from the file
pub tags: Vec<String>,
@ -301,14 +300,14 @@ pub struct SearchIndexItem {
pub content: Option<String>,
pub tags: Vec<String>,
pub media_type: String,
pub metadata: HashMap<String, serde_json::Value>,
pub metadata: FxHashMap<String, serde_json::Value>,
}
/// Search query
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchQuery {
pub query_text: String,
pub filters: HashMap<String, serde_json::Value>,
pub filters: FxHashMap<String, serde_json::Value>,
pub limit: usize,
pub offset: usize,
}
@ -357,10 +356,11 @@ pub enum EventType {
/// Event data
#[derive(Debug, Clone, Serialize, Deserialize)]
#[expect(clippy::struct_field_names)]
pub struct Event {
pub event_type: EventType,
pub timestamp: String,
pub data: HashMap<String, serde_json::Value>,
pub data: FxHashMap<String, serde_json::Value>,
}
/// Trait for plugins that provide UI themes
@ -387,7 +387,7 @@ pub struct ThemeDefinition {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Theme {
pub id: String,
pub colors: HashMap<String, String>,
pub fonts: HashMap<String, String>,
pub colors: FxHashMap<String, String>,
pub fonts: FxHashMap<String, String>,
pub custom_css: Option<String>,
}

View file

@ -1,7 +1,8 @@
//! Plugin manifest parsing and validation
use std::{collections::HashMap, path::Path};
use std::path::Path;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -23,7 +24,7 @@ pub struct PluginManifest {
pub capabilities: ManifestCapabilities,
#[serde(default)]
pub config: HashMap<String, toml::Value>,
pub config: FxHashMap<String, toml::Value>,
/// UI pages provided by this plugin
#[serde(default)]
@ -49,8 +50,8 @@ pub struct UiSection {
/// CSS custom property overrides provided by this plugin.
/// Keys are property names (e.g. `--accent-color`), values are CSS values.
/// The host applies these to `document.documentElement` on startup.
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub theme_extensions: HashMap<String, String>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub theme_extensions: FxHashMap<String, String>,
}
impl UiSection {
@ -194,6 +195,7 @@ pub struct ManifestFilesystemCapability {
}
#[derive(Debug, Error)]
#[expect(clippy::enum_variant_names)]
pub enum ManifestError {
#[error("Failed to read manifest file: {0}")]
IoError(#[from] std::io::Error),
@ -709,7 +711,7 @@ gap = 16
"/api/v1/media".to_string(),
"/api/plugins/my-plugin/data".to_string(),
],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
assert!(section.validate().is_ok());
}
@ -720,7 +722,7 @@ gap = 16
pages: vec![],
widgets: vec![],
required_endpoints: vec!["/not-api/something".to_string()],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
assert!(section.validate().is_err());
}
@ -731,7 +733,7 @@ gap = 16
pages: vec![],
widgets: vec![],
required_endpoints: vec!["/api/ok".to_string(), "no-slash".to_string()],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
let err = section.validate().unwrap_err();
assert!(

View file

@ -49,8 +49,7 @@
//! Array indices use the same notation: `"items.0.title"`.
//! ```
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -133,12 +132,12 @@ pub struct UiPage {
pub root_element: UiElement,
/// Named data sources available to this page
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub data_sources: HashMap<String, DataSource>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub data_sources: FxHashMap<String, DataSource>,
/// Named actions available to this page (referenced by `ActionRef::Name`)
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub actions: HashMap<String, ActionDefinition>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub actions: FxHashMap<String, ActionDefinition>,
}
impl UiPage {
@ -204,8 +203,8 @@ impl UiPage {
/// Validates that there are no cycles in Transform data source dependencies
fn validate_no_cycles(&self) -> SchemaResult<()> {
let mut visited = std::collections::HashSet::new();
let mut stack = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
let mut stack = rustc_hash::FxHashSet::default();
for name in self.data_sources.keys() {
Self::dfs_check_cycles(self, name, &mut visited, &mut stack)?;
@ -218,8 +217,8 @@ impl UiPage {
fn dfs_check_cycles(
&self,
name: &str,
visited: &mut std::collections::HashSet<String>,
stack: &mut std::collections::HashSet<String>,
visited: &mut rustc_hash::FxHashSet<String>,
stack: &mut rustc_hash::FxHashSet<String>,
) -> SchemaResult<()> {
if stack.contains(name) {
return Err(SchemaError::ValidationError(format!(
@ -1451,8 +1450,8 @@ pub struct ActionDefinition {
pub path: String,
/// Action parameters (merged with form data on submit)
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub params: HashMap<String, serde_json::Value>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub params: FxHashMap<String, serde_json::Value>,
/// Success message
#[serde(skip_serializing_if = "Option::is_none")]
@ -1509,7 +1508,7 @@ impl Default for ActionDefinition {
Self {
method: default_http_method(),
path: String::new(),
params: HashMap::new(),
params: FxHashMap::default(),
success_message: None,
error_message: None,
navigate_to: None,
@ -1543,8 +1542,8 @@ pub enum DataSource {
path: String,
/// Query parameters
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
params: HashMap<String, Expression>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
params: FxHashMap<String, Expression>,
/// Polling interval in seconds (0 = no polling)
#[serde(default)]
@ -1839,7 +1838,7 @@ mod tests {
let valid = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/api/test".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -1848,7 +1847,7 @@ mod tests {
let invalid = DataSource::Endpoint {
method: HttpMethod::Get,
path: "api/test".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -1898,8 +1897,8 @@ mod tests {
page_size: 0,
row_actions: vec![],
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
let refs = page.referenced_data_sources();
@ -1918,8 +1917,8 @@ mod tests {
columns: 13,
gap: 16,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
assert!(page.validate().is_err());
@ -1937,8 +1936,8 @@ mod tests {
content: TextContent::Static("Title".to_string()),
id: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
assert!(page.validate().is_err());
@ -2005,7 +2004,7 @@ mod tests {
let bad = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/not-api/something".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -2017,7 +2016,7 @@ mod tests {
let bad = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/api/v1/../admin".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -2078,7 +2077,7 @@ mod tests {
#[test]
fn test_link_validation_rejects_unsafe_href() {
use std::collections::HashMap as HM;
use rustc_hash::FxHashMap as HM;
let page = UiPage {
id: "p".to_string(),
title: "P".to_string(),
@ -2089,15 +2088,15 @@ mod tests {
href: "javascript:alert(1)".to_string(),
external: false,
},
data_sources: HM::new(),
actions: HM::new(),
data_sources: HM::default(),
actions: HM::default(),
};
assert!(page.validate().is_err());
}
#[test]
fn test_reserved_route_rejected() {
use std::collections::HashMap as HM;
use rustc_hash::FxHashMap as HM;
let page = UiPage {
id: "search-page".to_string(),
title: "Search".to_string(),
@ -2108,8 +2107,8 @@ mod tests {
gap: 0,
padding: None,
},
data_sources: HM::new(),
actions: HM::new(),
data_sources: HM::default(),
actions: HM::default(),
};
let err = page.validate().unwrap_err();
assert!(

View file

@ -343,7 +343,7 @@ impl SchemaValidator {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use super::*;
use crate::UiElement;
@ -359,8 +359,8 @@ mod tests {
gap: 0,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
}
}

View file

@ -1,7 +1,6 @@
//! WASM bridge types and helpers for plugin communication
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Memory allocation info for passing data between host and plugin
@ -93,7 +92,7 @@ pub struct LogMessage {
pub level: LogLevel,
pub target: String,
pub message: String,
pub fields: HashMap<String, String>,
pub fields: FxHashMap<String, String>,
}
/// HTTP request parameters
@ -101,7 +100,7 @@ pub struct LogMessage {
pub struct HttpRequest {
pub method: String,
pub url: String,
pub headers: HashMap<String, String>,
pub headers: FxHashMap<String, String>,
pub body: Option<Vec<u8>>,
}
@ -109,7 +108,7 @@ pub struct HttpRequest {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HttpResponse {
pub status: u16,
pub headers: HashMap<String, String>,
pub headers: FxHashMap<String, String>,
pub body: Vec<u8>,
}

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use async_trait::async_trait;
use pinakes_plugin_api::{
@ -25,6 +25,7 @@ use pinakes_plugin_api::{
ThumbnailOptions,
wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage},
};
use rustc_hash::FxHashMap;
struct TestPlugin {
initialized: bool,
@ -41,7 +42,7 @@ impl TestPlugin {
health_status: HealthStatus {
healthy: true,
message: Some("OK".to_string()),
metrics: HashMap::new(),
metrics: FxHashMap::default(),
},
metadata: PluginMetadata {
id: "test-plugin".to_string(),
@ -82,10 +83,12 @@ async fn test_plugin_context_creation() {
let context = PluginContext {
data_dir: PathBuf::from("/data/test-plugin"),
cache_dir: PathBuf::from("/cache/test-plugin"),
config: HashMap::from([
config: [
("enabled".to_string(), serde_json::json!(true)),
("max_items".to_string(), serde_json::json!(100)),
]),
]
.into_iter()
.collect(),
capabilities: Capabilities {
filesystem: FilesystemCapability {
read: vec![PathBuf::from("/data")],
@ -119,7 +122,7 @@ async fn test_plugin_context_fields() {
let context = PluginContext {
data_dir: PathBuf::from("/custom/data"),
cache_dir: PathBuf::from("/custom/cache"),
config: HashMap::new(),
config: FxHashMap::default(),
capabilities: Capabilities::default(),
};
@ -137,7 +140,7 @@ async fn test_plugin_lifecycle() {
let context = PluginContext {
data_dir: PathBuf::from("/data"),
cache_dir: PathBuf::from("/cache"),
config: HashMap::new(),
config: FxHashMap::default(),
capabilities: Capabilities::default(),
};
plugin.initialize(context).await.unwrap();
@ -164,10 +167,12 @@ async fn test_extracted_metadata_structure() {
file_size_bytes: Some(1_500_000),
codec: Some("h264".to_string()),
bitrate_kbps: Some(5000),
custom_fields: HashMap::from([
custom_fields: [
("color_space".to_string(), serde_json::json!("sRGB")),
("orientation".to_string(), serde_json::json!(90)),
]),
]
.into_iter()
.collect(),
tags: vec!["test".to_string(), "document".to_string()],
};
@ -182,10 +187,12 @@ async fn test_extracted_metadata_structure() {
async fn test_search_query_serialization() {
let query = SearchQuery {
query_text: "nature landscape".to_string(),
filters: HashMap::from([
filters: [
("type".to_string(), serde_json::json!("image")),
("year".to_string(), serde_json::json!(2023)),
]),
]
.into_iter()
.collect(),
limit: 50,
offset: 0,
};
@ -329,10 +336,12 @@ async fn test_event_serialization() {
let event = Event {
event_type: EventType::MediaImported,
timestamp: "2024-01-15T10:00:00Z".to_string(),
data: HashMap::from([
data: [
("path".to_string(), serde_json::json!("/media/test.jpg")),
("size".to_string(), serde_json::json!(1024)),
]),
]
.into_iter()
.collect(),
};
let serialized = serde_json::to_string(&event).unwrap();
@ -347,10 +356,12 @@ async fn test_http_request_serialization() {
let request = HttpRequest {
method: "GET".to_string(),
url: "https://api.example.com/data".to_string(),
headers: HashMap::from([
headers: [
("Authorization".to_string(), "Bearer token".to_string()),
("Content-Type".to_string(), "application/json".to_string()),
]),
]
.into_iter()
.collect(),
body: None,
};
@ -366,10 +377,9 @@ async fn test_http_request_serialization() {
async fn test_http_response_serialization() {
let response = HttpResponse {
status: 200,
headers: HashMap::from([(
"Content-Type".to_string(),
"application/json".to_string(),
)]),
headers: [("Content-Type".to_string(), "application/json".to_string())]
.into_iter()
.collect(),
body: b"{\"success\": true}".to_vec(),
};
@ -386,10 +396,12 @@ async fn test_log_message_serialization() {
level: LogLevel::Info,
target: "plugin::metadata".to_string(),
message: "Metadata extraction complete".to_string(),
fields: HashMap::from([
fields: [
("file_count".to_string(), "42".to_string()),
("duration_ms".to_string(), "150".to_string()),
]),
]
.into_iter()
.collect(),
};
let serialized = serde_json::to_string(&message).unwrap();
@ -453,10 +465,12 @@ async fn test_search_index_item_serialization() {
"photos".to_string(),
],
media_type: "image/jpeg".to_string(),
metadata: HashMap::from([
metadata: [
("camera".to_string(), serde_json::json!("Canon EOS R5")),
("location".to_string(), serde_json::json!("Beach")),
]),
]
.into_iter()
.collect(),
};
let serialized = serde_json::to_string(&item).unwrap();
@ -474,17 +488,19 @@ async fn test_health_status_variants() {
let healthy = HealthStatus {
healthy: true,
message: Some("All systems operational".to_string()),
metrics: HashMap::from([
metrics: [
("items_processed".to_string(), 1000.0),
("avg_process_time_ms".to_string(), 45.5),
]),
]
.into_iter()
.collect(),
};
assert!(healthy.healthy);
let unhealthy = HealthStatus {
healthy: false,
message: Some("Database connection failed".to_string()),
metrics: HashMap::new(),
metrics: FxHashMap::default(),
};
assert!(!unhealthy.healthy);
assert_eq!(
@ -571,7 +587,7 @@ async fn test_extracted_metadata_default() {
async fn test_search_query_structure() {
let query = SearchQuery {
query_text: "test query".to_string(),
filters: HashMap::new(),
filters: FxHashMap::default(),
limit: 10,
offset: 0,
};

View file

@ -3,8 +3,6 @@
//! Renderer-level behaviour (e.g., Dioxus components) is out of scope here;
//! that requires a Dioxus runtime and belongs in pinakes-ui tests.
use std::collections::HashMap;
use pinakes_plugin_api::{
DataSource,
HttpMethod,
@ -26,8 +24,8 @@ fn make_page(id: &str, route: &str) -> UiPage {
gap: 0,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: Default::default(),
actions: Default::default(),
}
}

View file

@ -31,10 +31,15 @@ blake3 = { workspace = true }
rand = { workspace = true }
percent-encoding = { workspace = true }
http = { workspace = true }
rustc-hash = { workspace = true }
utoipa = { workspace = true }
utoipa-axum = { workspace = true }
utoipa-swagger-ui = { workspace = true }
[lints]
workspace = true
[dev-dependencies]
http-body-util = "0.1.3"
tempfile = "3.25.0"
reqwest = { workspace = true }
tempfile = { workspace = true }

View file

@ -0,0 +1,486 @@
use utoipa::OpenApi;
/// Central `OpenAPI` document registry.
/// Handler functions and schemas are added here as route modules are annotated.
#[derive(OpenApi)]
#[openapi(
info(
title = "Pinakes API",
version = env!("CARGO_PKG_VERSION"),
description = "Media cataloging and library management API"
),
paths(
// analytics
crate::routes::analytics::get_most_viewed,
crate::routes::analytics::get_recently_viewed,
crate::routes::analytics::record_event,
crate::routes::analytics::get_watch_progress,
crate::routes::analytics::update_watch_progress,
// audit
crate::routes::audit::list_audit,
// auth
crate::routes::auth::login,
crate::routes::auth::logout,
crate::routes::auth::me,
crate::routes::auth::refresh,
crate::routes::auth::revoke_all_sessions,
crate::routes::auth::list_active_sessions,
// backup
crate::routes::backup::create_backup,
// books
crate::routes::books::get_book_metadata,
crate::routes::books::list_books,
crate::routes::books::list_series,
crate::routes::books::get_series_books,
crate::routes::books::list_authors,
crate::routes::books::get_author_books,
crate::routes::books::get_reading_progress,
crate::routes::books::update_reading_progress,
crate::routes::books::get_reading_list,
// collections
crate::routes::collections::create_collection,
crate::routes::collections::list_collections,
crate::routes::collections::get_collection,
crate::routes::collections::delete_collection,
crate::routes::collections::add_member,
crate::routes::collections::remove_member,
crate::routes::collections::get_members,
// config
crate::routes::config::get_config,
crate::routes::config::get_ui_config,
crate::routes::config::update_ui_config,
crate::routes::config::update_scanning_config,
crate::routes::config::add_root,
crate::routes::config::remove_root,
// database
crate::routes::database::database_stats,
crate::routes::database::vacuum_database,
crate::routes::database::clear_database,
// duplicates
crate::routes::duplicates::list_duplicates,
// enrichment
crate::routes::enrichment::trigger_enrichment,
crate::routes::enrichment::get_external_metadata,
crate::routes::enrichment::batch_enrich,
// export
crate::routes::export::trigger_export,
crate::routes::export::trigger_export_with_options,
// health
crate::routes::health::health,
crate::routes::health::liveness,
crate::routes::health::readiness,
crate::routes::health::health_detailed,
// integrity
crate::routes::integrity::trigger_orphan_detection,
crate::routes::integrity::trigger_verify_integrity,
crate::routes::integrity::trigger_cleanup_thumbnails,
crate::routes::integrity::generate_all_thumbnails,
crate::routes::integrity::resolve_orphans,
// jobs
crate::routes::jobs::list_jobs,
crate::routes::jobs::get_job,
crate::routes::jobs::cancel_job,
// media
crate::routes::media::import_media,
crate::routes::media::list_media,
crate::routes::media::get_media,
crate::routes::media::update_media,
crate::routes::media::delete_media,
crate::routes::media::open_media,
crate::routes::media::import_with_options,
crate::routes::media::batch_import,
crate::routes::media::import_directory_endpoint,
crate::routes::media::preview_directory,
crate::routes::media::set_custom_field,
crate::routes::media::delete_custom_field,
crate::routes::media::batch_tag,
crate::routes::media::delete_all_media,
crate::routes::media::batch_delete,
crate::routes::media::batch_add_to_collection,
crate::routes::media::batch_update,
crate::routes::media::get_thumbnail,
crate::routes::media::get_media_count,
crate::routes::media::rename_media,
crate::routes::media::move_media_endpoint,
crate::routes::media::batch_move_media,
crate::routes::media::soft_delete_media,
crate::routes::media::restore_media,
crate::routes::media::list_trash,
crate::routes::media::trash_info,
crate::routes::media::empty_trash,
crate::routes::media::permanent_delete_media,
crate::routes::media::stream_media,
// notes
crate::routes::notes::get_backlinks,
crate::routes::notes::get_outgoing_links,
crate::routes::notes::get_graph,
crate::routes::notes::reindex_links,
crate::routes::notes::resolve_links,
crate::routes::notes::get_unresolved_count,
// photos
crate::routes::photos::get_timeline,
crate::routes::photos::get_map_photos,
// playlists
crate::routes::playlists::create_playlist,
crate::routes::playlists::list_playlists,
crate::routes::playlists::get_playlist,
crate::routes::playlists::update_playlist,
crate::routes::playlists::delete_playlist,
crate::routes::playlists::add_item,
crate::routes::playlists::remove_item,
crate::routes::playlists::list_items,
crate::routes::playlists::reorder_item,
crate::routes::playlists::shuffle_playlist,
// plugins
crate::routes::plugins::list_plugins,
crate::routes::plugins::get_plugin,
crate::routes::plugins::install_plugin,
crate::routes::plugins::uninstall_plugin,
crate::routes::plugins::toggle_plugin,
crate::routes::plugins::list_plugin_ui_pages,
crate::routes::plugins::list_plugin_ui_widgets,
crate::routes::plugins::emit_plugin_event,
crate::routes::plugins::list_plugin_ui_theme_extensions,
crate::routes::plugins::reload_plugin,
// saved_searches
crate::routes::saved_searches::create_saved_search,
crate::routes::saved_searches::list_saved_searches,
crate::routes::saved_searches::delete_saved_search,
// scan
crate::routes::scan::trigger_scan,
crate::routes::scan::scan_status,
// scheduled_tasks
crate::routes::scheduled_tasks::list_scheduled_tasks,
crate::routes::scheduled_tasks::toggle_scheduled_task,
crate::routes::scheduled_tasks::run_scheduled_task_now,
// search
crate::routes::search::search,
crate::routes::search::search_post,
// shares
crate::routes::shares::create_share,
crate::routes::shares::list_outgoing,
crate::routes::shares::list_incoming,
crate::routes::shares::get_share,
crate::routes::shares::update_share,
crate::routes::shares::delete_share,
crate::routes::shares::batch_delete,
crate::routes::shares::access_shared,
crate::routes::shares::get_activity,
crate::routes::shares::get_notifications,
crate::routes::shares::mark_notification_read,
crate::routes::shares::mark_all_read,
// social
crate::routes::social::rate_media,
crate::routes::social::get_media_ratings,
crate::routes::social::add_comment,
crate::routes::social::get_media_comments,
crate::routes::social::add_favorite,
crate::routes::social::remove_favorite,
crate::routes::social::list_favorites,
crate::routes::social::create_share_link,
crate::routes::social::access_shared_media,
// statistics
crate::routes::statistics::library_statistics,
// streaming
crate::routes::streaming::hls_master_playlist,
crate::routes::streaming::hls_variant_playlist,
crate::routes::streaming::hls_segment,
crate::routes::streaming::dash_manifest,
crate::routes::streaming::dash_segment,
// subtitles
crate::routes::subtitles::list_subtitles,
crate::routes::subtitles::add_subtitle,
crate::routes::subtitles::delete_subtitle,
crate::routes::subtitles::get_subtitle_content,
crate::routes::subtitles::update_offset,
// sync
crate::routes::sync::register_device,
crate::routes::sync::list_devices,
crate::routes::sync::get_device,
crate::routes::sync::update_device,
crate::routes::sync::delete_device,
crate::routes::sync::regenerate_token,
crate::routes::sync::get_changes,
crate::routes::sync::report_changes,
crate::routes::sync::acknowledge_changes,
crate::routes::sync::list_conflicts,
crate::routes::sync::resolve_conflict,
crate::routes::sync::create_upload,
crate::routes::sync::upload_chunk,
crate::routes::sync::get_upload_status,
crate::routes::sync::complete_upload,
crate::routes::sync::cancel_upload,
crate::routes::sync::download_file,
// tags
crate::routes::tags::create_tag,
crate::routes::tags::list_tags,
crate::routes::tags::get_tag,
crate::routes::tags::delete_tag,
crate::routes::tags::tag_media,
crate::routes::tags::untag_media,
crate::routes::tags::get_media_tags,
// transcode
crate::routes::transcode::start_transcode,
crate::routes::transcode::get_session,
crate::routes::transcode::list_sessions,
crate::routes::transcode::cancel_session,
// upload
crate::routes::upload::upload_file,
crate::routes::upload::download_file,
crate::routes::upload::move_to_managed,
crate::routes::upload::managed_stats,
// users
crate::routes::users::list_users,
crate::routes::users::create_user,
crate::routes::users::get_user,
crate::routes::users::update_user,
crate::routes::users::delete_user,
crate::routes::users::get_user_libraries,
crate::routes::users::grant_library_access,
crate::routes::users::revoke_library_access,
// webhooks
crate::routes::webhooks::list_webhooks,
crate::routes::webhooks::test_webhook,
),
components(
schemas(
// analytics DTOs
crate::dto::UsageEventResponse,
crate::dto::RecordUsageEventRequest,
// audit DTOs
crate::dto::AuditEntryResponse,
// auth local types
crate::routes::auth::SessionListResponse,
crate::routes::auth::SessionInfo,
// batch DTOs
crate::dto::BatchTagRequest,
crate::dto::BatchCollectionRequest,
crate::dto::BatchDeleteRequest,
crate::dto::BatchUpdateRequest,
crate::dto::BatchOperationResponse,
// books local types
crate::routes::books::BookMetadataResponse,
crate::routes::books::AuthorResponse,
crate::routes::books::ReadingProgressResponse,
crate::routes::books::UpdateProgressRequest,
crate::routes::books::SeriesSummary,
crate::routes::books::AuthorSummary,
// collections DTOs
crate::dto::CollectionResponse,
crate::dto::CreateCollectionRequest,
crate::dto::AddMemberRequest,
// config DTOs
crate::dto::ConfigResponse,
crate::dto::ScanningConfigResponse,
crate::dto::ServerConfigResponse,
crate::dto::UpdateScanningRequest,
crate::dto::RootDirRequest,
crate::dto::UiConfigResponse,
crate::dto::UpdateUiConfigRequest,
// database DTOs
crate::dto::DatabaseStatsResponse,
// duplicate DTOs
crate::dto::DuplicateGroupResponse,
// enrichment DTOs
crate::dto::ExternalMetadataResponse,
// export local types
crate::routes::export::ExportRequest,
// health local types
crate::routes::health::HealthResponse,
crate::routes::health::DatabaseHealth,
crate::routes::health::FilesystemHealth,
crate::routes::health::CacheHealth,
crate::routes::health::DetailedHealthResponse,
crate::routes::health::JobsHealth,
// integrity local types
crate::routes::integrity::OrphanResolveRequest,
crate::routes::integrity::VerifyIntegrityRequest,
crate::routes::integrity::GenerateThumbnailsRequest,
// media DTOs
crate::dto::MediaResponse,
crate::dto::CustomFieldResponse,
crate::dto::ImportRequest,
crate::dto::ImportWithOptionsRequest,
crate::dto::DirectoryImportRequest,
crate::dto::DirectoryPreviewResponse,
crate::dto::UpdateMediaRequest,
crate::dto::MoveMediaRequest,
crate::dto::RenameMediaRequest,
crate::dto::BatchMoveRequest,
crate::dto::BatchImportRequest,
crate::dto::SetCustomFieldRequest,
crate::dto::MediaCountResponse,
crate::dto::TrashInfoResponse,
crate::dto::ImportResponse,
crate::dto::TrashResponse,
crate::dto::EmptyTrashResponse,
crate::dto::BatchImportResponse,
crate::dto::BatchImportItemResult,
crate::dto::DirectoryPreviewFile,
crate::dto::UpdateMediaFullRequest,
crate::dto::OpenRequest,
crate::dto::WatchProgressRequest,
crate::dto::WatchProgressResponse,
// notes local types
crate::routes::notes::BacklinksResponse,
crate::routes::notes::BacklinkItem,
crate::routes::notes::OutgoingLinksResponse,
crate::routes::notes::OutgoingLinkItem,
crate::routes::notes::GraphResponse,
crate::routes::notes::GraphNodeResponse,
crate::routes::notes::GraphEdgeResponse,
crate::routes::notes::ReindexResponse,
crate::routes::notes::ResolveLinksResponse,
crate::routes::notes::UnresolvedLinksResponse,
// photos local types
crate::routes::photos::TimelineGroup,
crate::routes::photos::MapMarker,
// playlists DTOs
crate::dto::PlaylistResponse,
crate::dto::CreatePlaylistRequest,
crate::dto::UpdatePlaylistRequest,
crate::dto::PlaylistItemRequest,
crate::dto::ReorderPlaylistRequest,
// plugins DTOs
crate::dto::PluginResponse,
crate::dto::InstallPluginRequest,
crate::dto::TogglePluginRequest,
crate::dto::PluginUiPageEntry,
crate::dto::PluginUiWidgetEntry,
crate::dto::PluginEventRequest,
// saved_searches local types
crate::routes::saved_searches::CreateSavedSearchRequest,
crate::routes::saved_searches::SavedSearchResponse,
// scan DTOs
crate::dto::ScanRequest,
crate::dto::ScanResponse,
crate::dto::ScanJobResponse,
crate::dto::ScanStatusResponse,
// search DTOs
crate::dto::SearchParams,
crate::dto::SearchResponse,
crate::dto::SearchRequestBody,
crate::dto::PaginationParams,
// sharing DTOs
crate::dto::CreateShareRequest,
crate::dto::UpdateShareRequest,
crate::dto::ShareResponse,
crate::dto::SharePermissionsRequest,
crate::dto::BatchDeleteSharesRequest,
crate::dto::AccessSharedRequest,
crate::dto::SharedContentResponse,
crate::dto::ShareActivityResponse,
crate::dto::ShareNotificationResponse,
// social DTOs
crate::dto::RatingResponse,
crate::dto::CreateRatingRequest,
crate::dto::CommentResponse,
crate::dto::CreateCommentRequest,
crate::dto::FavoriteRequest,
crate::dto::CreateShareLinkRequest,
crate::dto::ShareLinkResponse,
// statistics DTOs
crate::dto::LibraryStatisticsResponse,
crate::dto::TypeCountResponse,
crate::dto::ScheduledTaskResponse,
// subtitles DTOs
crate::dto::SubtitleResponse,
crate::dto::AddSubtitleRequest,
crate::dto::UpdateSubtitleOffsetRequest,
crate::dto::SubtitleListResponse,
crate::dto::SubtitleTrackInfoResponse,
// sync DTOs
crate::dto::RegisterDeviceRequest,
crate::dto::DeviceResponse,
crate::dto::DeviceRegistrationResponse,
crate::dto::UpdateDeviceRequest,
crate::dto::GetChangesParams,
crate::dto::SyncChangeResponse,
crate::dto::ChangesResponse,
crate::dto::ReportChangesRequest,
crate::dto::ReportChangesResponse,
crate::dto::AcknowledgeChangesRequest,
crate::dto::ConflictResponse,
crate::dto::ResolveConflictRequest,
crate::dto::CreateUploadSessionRequest,
crate::dto::UploadSessionResponse,
crate::dto::ChunkUploadedResponse,
crate::dto::MostViewedResponse,
// tags DTOs
crate::dto::TagResponse,
crate::dto::CreateTagRequest,
crate::dto::TagMediaRequest,
// transcode DTOs
crate::dto::TranscodeSessionResponse,
crate::dto::CreateTranscodeRequest,
// upload DTOs
crate::dto::UploadResponse,
crate::dto::ManagedStorageStatsResponse,
// users DTOs
crate::dto::UserResponse,
crate::dto::UserLibraryResponse,
crate::dto::GrantLibraryAccessRequest,
crate::dto::RevokeLibraryAccessRequest,
// webhooks local types
crate::routes::webhooks::WebhookInfo,
)
),
tags(
(name = "analytics", description = "Usage analytics and viewing history"),
(name = "audit", description = "Audit log entries"),
(name = "auth", description = "Authentication and session management"),
(name = "backup", description = "Database backup"),
(name = "books", description = "Book metadata, series, authors, and reading progress"),
(name = "collections", description = "Media collections"),
(name = "config", description = "Server configuration"),
(name = "database", description = "Database administration"),
(name = "duplicates", description = "Duplicate media detection"),
(name = "enrichment", description = "External metadata enrichment"),
(name = "export", description = "Media library export"),
(name = "health", description = "Server health checks"),
(name = "integrity", description = "Library integrity checks and repairs"),
(name = "jobs", description = "Background job management"),
(name = "media", description = "Media item management"),
(name = "notes", description = "Markdown notes link graph"),
(name = "photos", description = "Photo timeline and map view"),
(name = "playlists", description = "Media playlists"),
(name = "plugins", description = "Plugin management"),
(name = "saved_searches", description = "Saved search queries"),
(name = "scan", description = "Directory scanning"),
(name = "scheduled_tasks", description = "Scheduled background tasks"),
(name = "search", description = "Full-text media search"),
(name = "shares", description = "Media sharing and notifications"),
(name = "social", description = "Ratings, comments, favorites, and share links"),
(name = "statistics", description = "Library statistics"),
(name = "streaming", description = "HLS and DASH adaptive streaming"),
(name = "subtitles", description = "Media subtitle management"),
(name = "sync", description = "Multi-device library synchronization"),
(name = "tags", description = "Media tag management"),
(name = "transcode", description = "Video transcoding sessions"),
(name = "upload", description = "File upload and managed storage"),
(name = "users", description = "User and library access management"),
(name = "webhooks", description = "Webhook configuration"),
),
security(
("bearer_auth" = [])
),
modifiers(&SecurityAddon)
)]
pub struct ApiDoc;
struct SecurityAddon;
impl utoipa::Modify for SecurityAddon {
fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) {
if let Some(components) = openapi.components.as_mut() {
components.add_security_scheme(
"bearer_auth",
utoipa::openapi::security::SecurityScheme::Http(
utoipa::openapi::security::Http::new(
utoipa::openapi::security::HttpAuthScheme::Bearer,
),
),
);
}
}
}

View file

@ -14,8 +14,10 @@ use tower_http::{
set_header::SetResponseHeaderLayer,
trace::TraceLayer,
};
use utoipa::OpenApi as _;
use utoipa_swagger_ui::SwaggerUi;
use crate::{auth, routes, state::AppState};
use crate::{api_doc::ApiDoc, auth, routes, state::AppState};
/// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(
@ -51,6 +53,11 @@ pub fn create_router_with_tls(
rate_limits: &pinakes_core::config::RateLimitConfig,
tls_config: Option<&pinakes_core::config::TlsConfig>,
) -> Router {
let swagger_ui_enabled = state
.config
.try_read()
.is_ok_and(|cfg| cfg.server.swagger_ui);
let global_governor = build_governor(
rate_limits.global_per_second,
rate_limits.global_burst_size,
@ -605,7 +612,7 @@ pub fn create_router_with_tls(
HeaderValue::from_static("default-src 'none'; frame-ancestors 'none'"),
));
let router = Router::new()
let base_router = Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer::new(global_governor))
@ -613,6 +620,14 @@ pub fn create_router_with_tls(
.layer(cors)
.layer(security_headers);
let router = if swagger_ui_enabled {
base_router.merge(
SwaggerUi::new("/api/docs").url("/api/openapi.json", ApiDoc::openapi()),
)
} else {
base_router
};
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UsageEventResponse {
pub id: String,
pub media_id: Option<String>,
@ -25,10 +25,11 @@ impl From<pinakes_core::analytics::UsageEvent> for UsageEventResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RecordUsageEventRequest {
pub media_id: Option<Uuid>,
pub event_type: String,
pub duration_secs: Option<f64>,
#[schema(value_type = Option<Object>)]
pub context: Option<serde_json::Value>,
}

View file

@ -1,7 +1,7 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct AuditEntryResponse {
pub id: String,
pub media_id: Option<String>,

View file

@ -1,24 +1,24 @@
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchTagRequest {
pub media_ids: Vec<Uuid>,
pub tag_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchCollectionRequest {
pub media_ids: Vec<Uuid>,
pub collection_id: Uuid,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchDeleteRequest {
pub media_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchUpdateRequest {
pub media_ids: Vec<Uuid>,
pub title: Option<String>,
@ -29,7 +29,7 @@ pub struct BatchUpdateRequest {
pub description: Option<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchOperationResponse {
pub processed: usize,
pub errors: Vec<String>,

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CollectionResponse {
pub id: String,
pub name: String,
@ -13,7 +13,7 @@ pub struct CollectionResponse {
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateCollectionRequest {
pub name: String,
pub kind: String,
@ -21,7 +21,7 @@ pub struct CreateCollectionRequest {
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AddMemberRequest {
pub media_id: Uuid,
pub position: Option<i32>,

View file

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ConfigResponse {
pub backend: String,
pub database_path: Option<String>,
@ -12,33 +12,33 @@ pub struct ConfigResponse {
pub config_writable: bool,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanningConfigResponse {
pub watch: bool,
pub poll_interval_secs: u64,
pub ignore_patterns: Vec<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ServerConfigResponse {
pub host: String,
pub port: u16,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateScanningRequest {
pub watch: Option<bool>,
pub poll_interval_secs: Option<u64>,
pub ignore_patterns: Option<Vec<String>>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RootDirRequest {
pub path: String,
}
// UI Config
#[derive(Debug, Serialize, Deserialize, Clone)]
#[derive(Debug, Serialize, Deserialize, Clone, utoipa::ToSchema)]
pub struct UiConfigResponse {
pub theme: String,
pub default_view: String,
@ -49,7 +49,7 @@ pub struct UiConfigResponse {
pub sidebar_collapsed: bool,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateUiConfigRequest {
pub theme: Option<String>,
pub default_view: Option<String>,

View file

@ -1,12 +1,13 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ExternalMetadataResponse {
pub id: String,
pub media_id: String,
pub source: String,
pub external_id: Option<String>,
#[schema(value_type = Object)]
pub metadata: serde_json::Value,
pub confidence: f64,
pub last_updated: DateTime<Utc>,

View file

@ -1,9 +1,7 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -36,7 +34,7 @@ pub fn relativize_path(full_path: &Path, roots: &[PathBuf]) -> String {
full_path.to_string_lossy().into_owned()
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MediaResponse {
pub id: String,
pub path: String,
@ -52,7 +50,8 @@ pub struct MediaResponse {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
#[schema(value_type = Object)]
pub custom_fields: FxHashMap<String, CustomFieldResponse>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,
@ -69,24 +68,25 @@ pub struct MediaResponse {
pub links_extracted_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CustomFieldResponse {
pub field_type: String,
pub value: String,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ImportRequest {
#[schema(value_type = String)]
pub path: PathBuf,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ImportResponse {
pub media_id: String,
pub was_duplicate: bool,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateMediaRequest {
pub title: Option<String>,
pub artist: Option<String>,
@ -97,56 +97,60 @@ pub struct UpdateMediaRequest {
}
// File Management
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RenameMediaRequest {
pub new_name: String,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct MoveMediaRequest {
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchMoveRequest {
pub media_ids: Vec<Uuid>,
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TrashResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TrashInfoResponse {
pub count: u64,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct EmptyTrashResponse {
pub deleted_count: u64,
}
// Enhanced Import
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ImportWithOptionsRequest {
#[schema(value_type = String)]
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchImportRequest {
#[schema(value_type = Vec<String>)]
pub paths: Vec<PathBuf>,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchImportResponse {
pub results: Vec<BatchImportItemResult>,
pub total: usize,
@ -155,7 +159,7 @@ pub struct BatchImportResponse {
pub errors: usize,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchImportItemResult {
pub path: String,
pub media_id: Option<String>,
@ -163,22 +167,23 @@ pub struct BatchImportItemResult {
pub error: Option<String>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct DirectoryImportRequest {
#[schema(value_type = String)]
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DirectoryPreviewResponse {
pub files: Vec<DirectoryPreviewFile>,
pub total_count: usize,
pub total_size: u64,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DirectoryPreviewFile {
pub path: String,
pub file_name: String,
@ -187,7 +192,7 @@ pub struct DirectoryPreviewFile {
}
// Custom Fields
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SetCustomFieldRequest {
pub name: String,
pub field_type: String,
@ -195,7 +200,7 @@ pub struct SetCustomFieldRequest {
}
// Media update extended
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateMediaFullRequest {
pub title: Option<String>,
pub artist: Option<String>,
@ -206,26 +211,26 @@ pub struct UpdateMediaFullRequest {
}
// Search with sort
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MediaCountResponse {
pub count: u64,
}
// Duplicates
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DuplicateGroupResponse {
pub content_hash: String,
pub items: Vec<MediaResponse>,
}
// Open
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct OpenRequest {
pub media_id: Uuid,
}
// Upload
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UploadResponse {
pub media_id: String,
pub content_hash: String,
@ -244,7 +249,7 @@ impl From<pinakes_core::model::UploadResult> for UploadResponse {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ManagedStorageStatsResponse {
pub total_blobs: u64,
pub total_size_bytes: u64,
@ -370,12 +375,12 @@ mod tests {
}
// Watch progress
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct WatchProgressRequest {
pub progress_secs: f64,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct WatchProgressResponse {
pub progress_secs: f64,
}

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PlaylistResponse {
pub id: String,
pub owner_id: String,
@ -31,7 +31,7 @@ impl From<pinakes_core::playlists::Playlist> for PlaylistResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreatePlaylistRequest {
pub name: String,
pub description: Option<String>,
@ -40,20 +40,20 @@ pub struct CreatePlaylistRequest {
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdatePlaylistRequest {
pub name: Option<String>,
pub description: Option<String>,
pub is_public: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PlaylistItemRequest {
pub media_id: Uuid,
pub position: Option<i32>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ReorderPlaylistRequest {
pub media_id: Uuid,
pub new_position: i32,

View file

@ -1,7 +1,7 @@
use pinakes_plugin_api::{UiPage, UiWidget};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginResponse {
pub id: String,
pub name: String,
@ -12,22 +12,23 @@ pub struct PluginResponse {
pub enabled: bool,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct InstallPluginRequest {
pub source: String, // URL or file path
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct TogglePluginRequest {
pub enabled: bool,
}
/// A single plugin UI page entry in the list response
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginUiPageEntry {
/// Plugin ID that provides this page
pub plugin_id: String,
/// Full page definition
#[schema(value_type = Object)]
pub page: UiPage,
/// Endpoint paths this plugin is allowed to fetch (empty means no
/// restriction)
@ -35,19 +36,21 @@ pub struct PluginUiPageEntry {
}
/// A single plugin UI widget entry in the list response
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginUiWidgetEntry {
/// Plugin ID that provides this widget
pub plugin_id: String,
/// Full widget definition
#[schema(value_type = Object)]
pub widget: UiWidget,
}
/// Request body for emitting a plugin event
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PluginEventRequest {
pub event: String,
#[serde(default)]
#[schema(value_type = Object)]
pub payload: serde_json::Value,
}

View file

@ -2,24 +2,25 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ScanRequest {
#[schema(value_type = Option<String>)]
pub path: Option<PathBuf>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanResponse {
pub files_found: usize,
pub files_processed: usize,
pub errors: Vec<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanJobResponse {
pub job_id: String,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanStatusResponse {
pub scanning: bool,
pub files_found: usize,

View file

@ -9,7 +9,7 @@ pub const MAX_OFFSET: u64 = 10_000_000;
/// Maximum page size accepted from most listing endpoints.
pub const MAX_LIMIT: u64 = 1000;
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SearchParams {
pub q: String,
pub sort: Option<String>,
@ -28,14 +28,14 @@ impl SearchParams {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SearchResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
// Search (POST body)
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SearchRequestBody {
pub q: String,
pub sort: Option<String>,
@ -55,7 +55,7 @@ impl SearchRequestBody {
}
// Pagination
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PaginationParams {
pub offset: Option<u64>,
pub limit: Option<u64>,

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateShareRequest {
pub target_type: String,
pub target_id: String,
@ -16,7 +16,7 @@ pub struct CreateShareRequest {
pub inherit_to_children: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SharePermissionsRequest {
pub can_view: Option<bool>,
pub can_download: Option<bool>,
@ -26,7 +26,7 @@ pub struct SharePermissionsRequest {
pub can_add: Option<bool>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareResponse {
pub id: String,
pub target_type: String,
@ -46,7 +46,7 @@ pub struct ShareResponse {
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SharePermissionsResponse {
pub can_view: bool,
pub can_download: bool,
@ -125,7 +125,7 @@ impl From<pinakes_core::sharing::Share> for ShareResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateShareRequest {
pub permissions: Option<SharePermissionsRequest>,
pub note: Option<String>,
@ -133,7 +133,7 @@ pub struct UpdateShareRequest {
pub inherit_to_children: Option<bool>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareActivityResponse {
pub id: String,
pub share_id: String,
@ -158,7 +158,7 @@ impl From<pinakes_core::sharing::ShareActivity> for ShareActivityResponse {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareNotificationResponse {
pub id: String,
pub share_id: String,
@ -181,12 +181,12 @@ impl From<pinakes_core::sharing::ShareNotification>
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchDeleteSharesRequest {
pub share_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AccessSharedRequest {
pub password: Option<String>,
}
@ -194,7 +194,7 @@ pub struct AccessSharedRequest {
/// Response for accessing shared content.
/// Single-media shares return the media object directly (backwards compatible).
/// Collection/Tag/SavedSearch shares return a list of items.
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
#[serde(untagged)]
pub enum SharedContentResponse {
Single(super::MediaResponse),

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct RatingResponse {
pub id: String,
pub user_id: String,
@ -25,13 +25,13 @@ impl From<pinakes_core::social::Rating> for RatingResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateRatingRequest {
pub stars: u8,
pub review_text: Option<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CommentResponse {
pub id: String,
pub user_id: String,
@ -54,25 +54,25 @@ impl From<pinakes_core::social::Comment> for CommentResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateCommentRequest {
pub text: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct FavoriteRequest {
pub media_id: Uuid,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateShareLinkRequest {
pub media_id: Uuid,
pub password: Option<String>,
pub expires_in_hours: Option<u64>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareLinkResponse {
pub id: String,
pub media_id: String,

View file

@ -1,7 +1,7 @@
use serde::Serialize;
// Library Statistics
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct LibraryStatisticsResponse {
pub total_media: u64,
pub total_size_bytes: u64,
@ -17,7 +17,7 @@ pub struct LibraryStatisticsResponse {
pub total_duplicates: u64,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TypeCountResponse {
pub name: String,
pub count: u64,
@ -61,7 +61,7 @@ impl From<pinakes_core::storage::LibraryStatistics>
}
// Database management
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DatabaseStatsResponse {
pub media_count: u64,
pub tag_count: u64,
@ -72,7 +72,7 @@ pub struct DatabaseStatsResponse {
}
// Scheduled Tasks
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScheduledTaskResponse {
pub id: String,
pub name: String,

View file

@ -1,14 +1,14 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleResponse {
pub id: String,
pub media_id: String,
pub language: Option<String>,
pub format: String,
pub is_embedded: bool,
pub track_index: Option<usize>,
pub track_index: Option<u32>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
@ -28,17 +28,46 @@ impl From<pinakes_core::subtitles::Subtitle> for SubtitleResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AddSubtitleRequest {
pub language: Option<String>,
pub format: String,
pub file_path: Option<String>,
pub is_embedded: Option<bool>,
pub track_index: Option<usize>,
pub track_index: Option<u32>,
pub offset_ms: Option<i64>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateSubtitleOffsetRequest {
pub offset_ms: i64,
}
/// Information about an embedded subtitle track available for extraction.
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleTrackInfoResponse {
pub index: u32,
pub language: Option<String>,
pub format: String,
pub title: Option<String>,
}
impl From<pinakes_core::subtitles::SubtitleTrackInfo>
for SubtitleTrackInfoResponse
{
fn from(t: pinakes_core::subtitles::SubtitleTrackInfo) -> Self {
Self {
index: t.index,
language: t.language,
format: t.format.to_string(),
title: t.title,
}
}
}
/// Response for listing subtitles on a media item.
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleListResponse {
pub subtitles: Vec<SubtitleResponse>,
pub available_tracks: Vec<SubtitleTrackInfoResponse>,
}

View file

@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
use super::media::MediaResponse;
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RegisterDeviceRequest {
pub name: String,
pub device_type: String,
@ -11,7 +11,7 @@ pub struct RegisterDeviceRequest {
pub os_info: Option<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DeviceResponse {
pub id: String,
pub name: String,
@ -42,25 +42,25 @@ impl From<pinakes_core::sync::SyncDevice> for DeviceResponse {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DeviceRegistrationResponse {
pub device: DeviceResponse,
pub device_token: String,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateDeviceRequest {
pub name: Option<String>,
pub enabled: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GetChangesParams {
pub cursor: Option<i64>,
pub limit: Option<u64>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SyncChangeResponse {
pub id: String,
pub sequence: i64,
@ -87,14 +87,14 @@ impl From<pinakes_core::sync::SyncLogEntry> for SyncChangeResponse {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ChangesResponse {
pub changes: Vec<SyncChangeResponse>,
pub cursor: i64,
pub has_more: bool,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ClientChangeReport {
pub path: String,
pub change_type: String,
@ -103,19 +103,19 @@ pub struct ClientChangeReport {
pub local_mtime: Option<i64>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ReportChangesRequest {
pub changes: Vec<ClientChangeReport>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ReportChangesResponse {
pub accepted: Vec<String>,
pub conflicts: Vec<ConflictResponse>,
pub upload_required: Vec<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ConflictResponse {
pub id: String,
pub path: String,
@ -136,12 +136,12 @@ impl From<pinakes_core::sync::SyncConflict> for ConflictResponse {
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ResolveConflictRequest {
pub resolution: String,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateUploadSessionRequest {
pub target_path: String,
pub expected_hash: String,
@ -149,7 +149,7 @@ pub struct CreateUploadSessionRequest {
pub chunk_size: Option<u64>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UploadSessionResponse {
pub id: String,
pub target_path: String,
@ -178,19 +178,19 @@ impl From<pinakes_core::sync::UploadSession> for UploadSessionResponse {
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ChunkUploadedResponse {
pub chunk_index: u64,
pub received: bool,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AcknowledgeChangesRequest {
pub cursor: i64,
}
// Most viewed (uses MediaResponse)
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MostViewedResponse {
pub media: MediaResponse,
pub view_count: u64,

View file

@ -2,7 +2,7 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TagResponse {
pub id: String,
pub name: String,
@ -10,13 +10,13 @@ pub struct TagResponse {
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateTagRequest {
pub name: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct TagMediaRequest {
pub tag_id: Uuid,
}

View file

@ -1,7 +1,7 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TranscodeSessionResponse {
pub id: String,
pub media_id: String,
@ -28,7 +28,7 @@ impl From<pinakes_core::transcode::TranscodeSession>
}
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateTranscodeRequest {
pub profile: String,
}

View file

@ -2,27 +2,27 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
// Auth
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct LoginRequest {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct LoginResponse {
pub token: String,
pub username: String,
pub role: String,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserInfoResponse {
pub username: String,
pub role: String,
}
// Users
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserResponse {
pub id: String,
pub username: String,
@ -32,14 +32,14 @@ pub struct UserResponse {
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserProfileResponse {
pub avatar_path: Option<String>,
pub bio: Option<String>,
pub preferences: UserPreferencesResponse,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserPreferencesResponse {
pub theme: Option<String>,
pub language: Option<String>,
@ -47,7 +47,7 @@ pub struct UserPreferencesResponse {
pub auto_play: bool,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserLibraryResponse {
pub user_id: String,
pub root_path: String,
@ -55,13 +55,14 @@ pub struct UserLibraryResponse {
pub granted_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GrantLibraryAccessRequest {
pub root_path: String,
#[schema(value_type = String)]
pub permission: pinakes_core::users::LibraryPermission,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RevokeLibraryAccessRequest {
pub root_path: String,
}

View file

@ -44,6 +44,25 @@ impl IntoResponse for ApiError {
PinakesError::InvalidOperation(msg) => {
(StatusCode::BAD_REQUEST, msg.clone())
},
PinakesError::InvalidLanguageCode(code) => {
(
StatusCode::BAD_REQUEST,
format!("invalid language code: {code}"),
)
},
PinakesError::SubtitleTrackNotFound { index } => {
(
StatusCode::NOT_FOUND,
format!("subtitle track {index} not found in media"),
)
},
PinakesError::ExternalTool { tool, .. } => {
tracing::error!(tool = %tool, error = %self.0, "external tool failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("external tool `{tool}` failed"),
)
},
PinakesError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.clone())
},

View file

@ -1,3 +1,4 @@
pub mod api_doc;
pub mod app;
pub mod auth;
pub mod dto;

View file

@ -24,6 +24,21 @@ use crate::{
const MAX_LIMIT: u64 = 100;
#[utoipa::path(
get,
path = "/api/v1/analytics/most-viewed",
tag = "analytics",
params(
("limit" = Option<u64>, Query, description = "Maximum number of results"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
),
responses(
(status = 200, description = "Most viewed media", body = Vec<MostViewedResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_most_viewed(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
@ -44,6 +59,21 @@ pub async fn get_most_viewed(
))
}
#[utoipa::path(
get,
path = "/api/v1/analytics/recently-viewed",
tag = "analytics",
params(
("limit" = Option<u64>, Query, description = "Maximum number of results"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
),
responses(
(status = 200, description = "Recently viewed media", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_recently_viewed(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -61,6 +91,18 @@ pub async fn get_recently_viewed(
))
}
#[utoipa::path(
post,
path = "/api/v1/analytics/events",
tag = "analytics",
request_body = RecordUsageEventRequest,
responses(
(status = 200, description = "Event recorded"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn record_event(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -84,6 +126,21 @@ pub async fn record_event(
Ok(Json(serde_json::json!({"recorded": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/progress",
tag = "analytics",
params(
("id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Watch progress", body = WatchProgressResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -100,6 +157,23 @@ pub async fn get_watch_progress(
}))
}
#[utoipa::path(
put,
path = "/api/v1/media/{id}/progress",
tag = "analytics",
params(
("id" = Uuid, Path, description = "Media item ID"),
),
request_body = WatchProgressRequest,
responses(
(status = 200, description = "Progress updated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,

View file

@ -9,6 +9,21 @@ use crate::{
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/audit",
tag = "audit",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Page size"),
),
responses(
(status = 200, description = "Audit log entries", body = Vec<AuditEntryResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_audit(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,

View file

@ -17,6 +17,19 @@ const DUMMY_HASH: &str =
"$argon2id$v=19$m=19456,t=2,\
p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
#[utoipa::path(
post,
path = "/api/v1/auth/login",
tag = "auth",
request_body = LoginRequest,
responses(
(status = 200, description = "Login successful", body = LoginResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Invalid credentials"),
(status = 500, description = "Internal server error"),
),
security()
)]
pub async fn login(
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
@ -82,6 +95,7 @@ pub async fn login(
let user = user.ok_or(StatusCode::INTERNAL_SERVER_ERROR)?;
// Generate session token using unbiased uniform distribution
#[expect(clippy::expect_used)]
let token: String = {
use rand::seq::IndexedRandom;
const CHARSET: &[u8] =
@ -134,39 +148,64 @@ pub async fn login(
}))
}
#[utoipa::path(
post,
path = "/api/v1/auth/logout",
tag = "auth",
responses(
(status = 200, description = "Logged out"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn logout(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
// Get username before deleting session
let username = match state.storage.get_session(token).await {
Ok(Some(session)) => Some(session.username),
_ => None,
};
let Some(token) = extract_bearer_token(&headers) else {
return StatusCode::UNAUTHORIZED;
};
// Delete session from database
if let Err(e) = state.storage.delete_session(token).await {
tracing::error!(error = %e, "failed to delete session from database");
return StatusCode::INTERNAL_SERVER_ERROR;
}
// Get username before deleting session
let username = match state.storage.get_session(token).await {
Ok(Some(session)) => Some(session.username),
_ => None,
};
// Record logout in audit log
if let Some(user) = username
&& let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {user}")),
)
.await
{
tracing::warn!(error = %e, "failed to record logout audit");
}
// Delete session from database
if let Err(e) = state.storage.delete_session(token).await {
tracing::error!(error = %e, "failed to delete session from database");
return StatusCode::INTERNAL_SERVER_ERROR;
}
// Record logout in audit log
if let Some(user) = username
&& let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {user}")),
)
.await
{
tracing::warn!(error = %e, "failed to record logout audit");
}
StatusCode::OK
}
#[utoipa::path(
get,
path = "/api/v1/auth/me",
tag = "auth",
responses(
(status = 200, description = "Current user info", body = UserInfoResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn me(
State(state): State<AppState>,
headers: HeaderMap,
@ -204,6 +243,17 @@ fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
/// Refresh the current session, extending its expiry by the configured
/// duration.
#[utoipa::path(
post,
path = "/api/v1/auth/refresh",
tag = "auth",
responses(
(status = 200, description = "Session refreshed"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn refresh(
State(state): State<AppState>,
headers: HeaderMap,
@ -232,6 +282,17 @@ pub async fn refresh(
}
/// Revoke all sessions for the current user
#[utoipa::path(
post,
path = "/api/v1/auth/revoke-all",
tag = "auth",
responses(
(status = 200, description = "All sessions revoked"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn revoke_all_sessions(
State(state): State<AppState>,
headers: HeaderMap,
@ -280,12 +341,12 @@ pub async fn revoke_all_sessions(
}
/// List all active sessions (admin only)
#[derive(serde::Serialize)]
#[derive(serde::Serialize, utoipa::ToSchema)]
pub struct SessionListResponse {
pub sessions: Vec<SessionInfo>,
}
#[derive(serde::Serialize)]
#[derive(serde::Serialize, utoipa::ToSchema)]
pub struct SessionInfo {
pub username: String,
pub role: String,
@ -294,6 +355,18 @@ pub struct SessionInfo {
pub expires_at: String,
}
#[utoipa::path(
get,
path = "/api/v1/auth/sessions",
tag = "auth",
responses(
(status = 200, description = "Active sessions", body = SessionListResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_active_sessions(
State(state): State<AppState>,
) -> Result<Json<SessionListResponse>, StatusCode> {

View file

@ -11,6 +11,18 @@ use crate::{error::ApiError, state::AppState};
///
/// For `SQLite`: creates a backup via VACUUM INTO and returns the file.
/// For `PostgreSQL`: returns unsupported error (use `pg_dump` instead).
#[utoipa::path(
post,
path = "/api/v1/admin/backup",
tag = "backup",
responses(
(status = 200, description = "Backup file download"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_backup(
State(state): State<AppState>,
) -> Result<Response, ApiError> {

View file

@ -17,6 +17,7 @@ use pinakes_core::{
ReadingStatus,
},
};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -28,7 +29,7 @@ use crate::{
};
/// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct BookMetadataResponse {
pub media_id: Uuid,
pub isbn: Option<String>,
@ -41,7 +42,8 @@ pub struct BookMetadataResponse {
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
pub identifiers: std::collections::HashMap<String, Vec<String>>,
#[schema(value_type = Object)]
pub identifiers: FxHashMap<String, Vec<String>>,
}
impl From<BookMetadata> for BookMetadataResponse {
@ -68,7 +70,7 @@ impl From<BookMetadata> for BookMetadataResponse {
}
/// Author response DTO
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct AuthorResponse {
pub name: String,
pub role: String,
@ -88,7 +90,7 @@ impl From<AuthorInfo> for AuthorResponse {
}
/// Reading progress response DTO
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct ReadingProgressResponse {
pub media_id: Uuid,
pub user_id: Uuid,
@ -112,7 +114,7 @@ impl From<ReadingProgress> for ReadingProgressResponse {
}
/// Update reading progress request
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateProgressRequest {
pub current_page: i32,
}
@ -140,20 +142,32 @@ const fn default_limit() -> u64 {
}
/// Series summary DTO
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SeriesSummary {
pub name: String,
pub book_count: u64,
}
/// Author summary DTO
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct AuthorSummary {
pub name: String,
pub book_count: u64,
}
/// Get book metadata by media ID
#[utoipa::path(
get,
path = "/api/v1/books/{id}/metadata",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Book metadata", body = BookMetadataResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_book_metadata(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
@ -172,6 +186,26 @@ pub async fn get_book_metadata(
}
/// List all books with optional search filters
#[utoipa::path(
get,
path = "/api/v1/books",
tag = "books",
params(
("isbn" = Option<String>, Query, description = "Filter by ISBN"),
("author" = Option<String>, Query, description = "Filter by author"),
("series" = Option<String>, Query, description = "Filter by series"),
("publisher" = Option<String>, Query, description = "Filter by publisher"),
("language" = Option<String>, Query, description = "Filter by language"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "List of books", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_books(
State(state): State<AppState>,
Query(query): Query<SearchBooksQuery>,
@ -203,6 +237,16 @@ pub async fn list_books(
}
/// List all series with book counts
#[utoipa::path(
get,
path = "/api/v1/books/series",
tag = "books",
responses(
(status = 200, description = "List of series with counts", body = Vec<SeriesSummary>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_series(
State(state): State<AppState>,
) -> Result<impl IntoResponse, ApiError> {
@ -221,6 +265,17 @@ pub async fn list_series(
}
/// Get books in a specific series
#[utoipa::path(
get,
path = "/api/v1/books/series/{name}",
tag = "books",
params(("name" = String, Path, description = "Series name")),
responses(
(status = 200, description = "Books in series", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_series_books(
State(state): State<AppState>,
Path(series_name): Path<String>,
@ -235,6 +290,20 @@ pub async fn get_series_books(
}
/// List all authors with book counts
#[utoipa::path(
get,
path = "/api/v1/books/authors",
tag = "books",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Authors with book counts", body = Vec<AuthorSummary>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_authors(
State(state): State<AppState>,
Query(pagination): Query<Pagination>,
@ -254,6 +323,21 @@ pub async fn list_authors(
}
/// Get books by a specific author
#[utoipa::path(
get,
path = "/api/v1/books/authors/{name}/books",
tag = "books",
params(
("name" = String, Path, description = "Author name"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Books by author", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_author_books(
State(state): State<AppState>,
Path(author_name): Path<String>,
@ -273,6 +357,18 @@ pub async fn get_author_books(
}
/// Get reading progress for a book
#[utoipa::path(
get,
path = "/api/v1/books/{id}/progress",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Reading progress", body = ReadingProgressResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -293,6 +389,19 @@ pub async fn get_reading_progress(
}
/// Update reading progress for a book
#[utoipa::path(
put,
path = "/api/v1/books/{id}/progress",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = UpdateProgressRequest,
responses(
(status = 204, description = "Progress updated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn update_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -305,6 +414,10 @@ pub async fn update_reading_progress(
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
// Verify the media item exists before writing progress; a FK violation from
// the storage layer would otherwise surface as a 500 rather than 404.
state.storage.get_media(media_id).await?;
state
.storage
.update_reading_progress(user_id.0, media_id, req.current_page)
@ -314,6 +427,17 @@ pub async fn update_reading_progress(
}
/// Get user's reading list
#[utoipa::path(
get,
path = "/api/v1/books/reading-list",
tag = "books",
params(("status" = Option<String>, Query, description = "Filter by reading status. Valid values: to_read, reading, completed, abandoned")),
responses(
(status = 200, description = "Reading list", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_reading_list(
State(state): State<AppState>,
Extension(username): Extension<String>,

View file

@ -16,6 +16,20 @@ use crate::{
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/collections",
tag = "collections",
request_body = CreateCollectionRequest,
responses(
(status = 200, description = "Collection created", body = CollectionResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_collection(
State(state): State<AppState>,
Json(req): Json<CreateCollectionRequest>,
@ -60,6 +74,17 @@ pub async fn create_collection(
Ok(Json(CollectionResponse::from(col)))
}
#[utoipa::path(
get,
path = "/api/v1/collections",
tag = "collections",
responses(
(status = 200, description = "List of collections", body = Vec<CollectionResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_collections(
State(state): State<AppState>,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
@ -69,6 +94,19 @@ pub async fn list_collections(
))
}
#[utoipa::path(
get,
path = "/api/v1/collections/{id}",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection", body = CollectionResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -77,6 +115,20 @@ pub async fn get_collection(
Ok(Json(CollectionResponse::from(col)))
}
#[utoipa::path(
delete,
path = "/api/v1/collections/{id}",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -91,6 +143,21 @@ pub async fn delete_collection(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/collections/{id}/members",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
request_body = AddMemberRequest,
responses(
(status = 200, description = "Member added"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_member(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
@ -106,6 +173,23 @@ pub async fn add_member(
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/collections/{id}/members/{media_id}",
tag = "collections",
params(
("id" = Uuid, Path, description = "Collection ID"),
("media_id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Member removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_member(
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
@ -119,6 +203,19 @@ pub async fn remove_member(
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/collections/{id}/members",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection members", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_members(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,

View file

@ -14,6 +14,18 @@ use crate::{
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/config",
tag = "config",
responses(
(status = 200, description = "Current server configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_config(
State(state): State<AppState>,
) -> Result<Json<ConfigResponse>, ApiError> {
@ -63,6 +75,17 @@ pub async fn get_config(
}))
}
#[utoipa::path(
get,
path = "/api/v1/config/ui",
tag = "config",
responses(
(status = 200, description = "UI configuration", body = UiConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_ui_config(
State(state): State<AppState>,
) -> Result<Json<UiConfigResponse>, ApiError> {
@ -70,6 +93,19 @@ pub async fn get_ui_config(
Ok(Json(UiConfigResponse::from(&config.ui)))
}
#[utoipa::path(
patch,
path = "/api/v1/config/ui",
tag = "config",
request_body = UpdateUiConfigRequest,
responses(
(status = 200, description = "Updated UI configuration", body = UiConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_ui_config(
State(state): State<AppState>,
Json(req): Json<UpdateUiConfigRequest>,
@ -104,6 +140,19 @@ pub async fn update_ui_config(
Ok(Json(UiConfigResponse::from(&config.ui)))
}
#[utoipa::path(
patch,
path = "/api/v1/config/scanning",
tag = "config",
request_body = UpdateScanningRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_scanning_config(
State(state): State<AppState>,
Json(req): Json<UpdateScanningRequest>,
@ -169,6 +218,20 @@ pub async fn update_scanning_config(
}))
}
#[utoipa::path(
post,
path = "/api/v1/config/roots",
tag = "config",
request_body = RootDirRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
@ -196,6 +259,19 @@ pub async fn add_root(
get_config(State(state)).await
}
#[utoipa::path(
delete,
path = "/api/v1/config/roots",
tag = "config",
request_body = RootDirRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,

View file

@ -2,6 +2,18 @@ use axum::{Json, extract::State};
use crate::{dto::DatabaseStatsResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/admin/database/stats",
tag = "database",
responses(
(status = 200, description = "Database statistics", body = DatabaseStatsResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn database_stats(
State(state): State<AppState>,
) -> Result<Json<DatabaseStatsResponse>, ApiError> {
@ -16,6 +28,18 @@ pub async fn database_stats(
}))
}
#[utoipa::path(
post,
path = "/api/v1/admin/database/vacuum",
tag = "database",
responses(
(status = 200, description = "Database vacuumed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn vacuum_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
@ -23,6 +47,18 @@ pub async fn vacuum_database(
Ok(Json(serde_json::json!({"status": "ok"})))
}
#[utoipa::path(
post,
path = "/api/v1/admin/database/clear",
tag = "database",
responses(
(status = 200, description = "Database cleared"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn clear_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {

View file

@ -6,6 +6,17 @@ use crate::{
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/media/duplicates",
tag = "duplicates",
responses(
(status = 200, description = "Duplicate groups", body = Vec<DuplicateGroupResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_duplicates(
State(state): State<AppState>,
) -> Result<Json<Vec<DuplicateGroupResponse>>, ApiError> {

View file

@ -11,6 +11,20 @@ use crate::{
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/media/{id}/enrich",
tag = "enrichment",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Enrichment job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_enrichment(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -25,6 +39,19 @@ pub async fn trigger_enrichment(
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/metadata/external",
tag = "enrichment",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "External metadata", body = Vec<ExternalMetadataResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_external_metadata(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -38,6 +65,20 @@ pub async fn get_external_metadata(
))
}
#[utoipa::path(
post,
path = "/api/v1/media/enrich/batch",
tag = "enrichment",
request_body = BatchDeleteRequest,
responses(
(status = 200, description = "Enrichment job submitted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_enrich(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field

View file

@ -5,12 +5,25 @@ use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ExportRequest {
pub format: String,
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[utoipa::path(
post,
path = "/api/v1/export",
tag = "export",
responses(
(status = 200, description = "Export job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_export(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
@ -25,6 +38,19 @@ pub async fn trigger_export(
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[utoipa::path(
post,
path = "/api/v1/export/options",
tag = "export",
request_body = ExportRequest,
responses(
(status = 200, description = "Export job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_export_with_options(
State(state): State<AppState>,
Json(req): Json<ExportRequest>,

View file

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use crate::state::AppState;
/// Basic health check response
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct HealthResponse {
pub status: String,
pub version: String,
@ -18,7 +18,7 @@ pub struct HealthResponse {
pub cache: Option<CacheHealth>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct DatabaseHealth {
pub status: String,
pub latency_ms: u64,
@ -26,14 +26,14 @@ pub struct DatabaseHealth {
pub media_count: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct FilesystemHealth {
pub status: String,
pub roots_configured: usize,
pub roots_accessible: usize,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct CacheHealth {
pub hit_rate: f64,
pub total_entries: u64,
@ -43,6 +43,14 @@ pub struct CacheHealth {
}
/// Comprehensive health check - includes database, filesystem, and cache status
#[utoipa::path(
get,
path = "/api/v1/health",
tag = "health",
responses(
(status = 200, description = "Health status", body = HealthResponse),
)
)]
pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
let mut response = HealthResponse {
status: "ok".to_string(),
@ -106,6 +114,14 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
/// Liveness probe - just checks if the server is running
/// Returns 200 OK if the server process is alive
#[utoipa::path(
get,
path = "/api/v1/health/live",
tag = "health",
responses(
(status = 200, description = "Server is alive"),
)
)]
pub async fn liveness() -> impl IntoResponse {
(
StatusCode::OK,
@ -117,6 +133,15 @@ pub async fn liveness() -> impl IntoResponse {
/// Readiness probe - checks if the server can serve requests
/// Returns 200 OK if database is accessible
#[utoipa::path(
get,
path = "/api/v1/health/ready",
tag = "health",
responses(
(status = 200, description = "Server is ready"),
(status = 503, description = "Server not ready"),
)
)]
pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
// Check database connectivity
let db_start = Instant::now();
@ -144,7 +169,7 @@ pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
}
/// Detailed health check for monitoring dashboards
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct DetailedHealthResponse {
pub status: String,
pub version: String,
@ -155,12 +180,20 @@ pub struct DetailedHealthResponse {
pub jobs: JobsHealth,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct JobsHealth {
pub pending: usize,
pub running: usize,
}
#[utoipa::path(
get,
path = "/api/v1/health/detailed",
tag = "health",
responses(
(status = 200, description = "Detailed health status", body = DetailedHealthResponse),
)
)]
pub async fn health_detailed(
State(state): State<AppState>,
) -> Json<DetailedHealthResponse> {

View file

@ -3,12 +3,24 @@ use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct OrphanResolveRequest {
pub action: String,
pub ids: Vec<uuid::Uuid>,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/orphans/detect",
tag = "integrity",
responses(
(status = 200, description = "Orphan detection job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_orphan_detection(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
@ -17,6 +29,19 @@ pub async fn trigger_orphan_detection(
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/verify",
tag = "integrity",
request_body = VerifyIntegrityRequest,
responses(
(status = 200, description = "Integrity verification job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_verify_integrity(
State(state): State<AppState>,
Json(req): Json<VerifyIntegrityRequest>,
@ -31,11 +56,23 @@ pub async fn trigger_verify_integrity(
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct VerifyIntegrityRequest {
pub media_ids: Vec<uuid::Uuid>,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/thumbnails/cleanup",
tag = "integrity",
responses(
(status = 200, description = "Thumbnail cleanup job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_cleanup_thumbnails(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
@ -44,7 +81,7 @@ pub async fn trigger_cleanup_thumbnails(
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GenerateThumbnailsRequest {
/// When true, only generate thumbnails for items that don't have one yet.
/// When false (default), regenerate all thumbnails.
@ -52,6 +89,19 @@ pub struct GenerateThumbnailsRequest {
pub only_missing: bool,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/thumbnails/generate",
tag = "integrity",
request_body = GenerateThumbnailsRequest,
responses(
(status = 200, description = "Thumbnail generation job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn generate_all_thumbnails(
State(state): State<AppState>,
body: Option<Json<GenerateThumbnailsRequest>>,
@ -77,6 +127,19 @@ pub async fn generate_all_thumbnails(
})))
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/orphans/resolve",
tag = "integrity",
request_body = OrphanResolveRequest,
responses(
(status = 200, description = "Orphans resolved"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn resolve_orphans(
State(state): State<AppState>,
Json(req): Json<OrphanResolveRequest>,

View file

@ -6,10 +6,34 @@ use pinakes_core::jobs::Job;
use crate::{error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/jobs",
tag = "jobs",
responses(
(status = 200, description = "List of jobs"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await)
}
#[utoipa::path(
get,
path = "/api/v1/jobs/{id}",
tag = "jobs",
params(("id" = uuid::Uuid, Path, description = "Job ID")),
responses(
(status = 200, description = "Job details"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
@ -20,6 +44,19 @@ pub async fn get_job(
})
}
#[utoipa::path(
post,
path = "/api/v1/jobs/{id}/cancel",
tag = "jobs",
params(("id" = uuid::Uuid, Path, description = "Job ID")),
responses(
(status = 200, description = "Job cancelled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn cancel_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,

View file

@ -3,6 +3,7 @@ use axum::{
extract::{Path, Query, State},
};
use pinakes_core::{model::MediaId, storage::DynStorageBackend};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -98,6 +99,20 @@ async fn apply_import_post_processing(
}
}
#[utoipa::path(
post,
path = "/api/v1/media/import",
tag = "media",
request_body = ImportRequest,
responses(
(status = 200, description = "Media imported", body = ImportResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn import_media(
State(state): State<AppState>,
Json(req): Json<ImportRequest>,
@ -125,6 +140,22 @@ pub async fn import_media(
}))
}
#[utoipa::path(
get,
path = "/api/v1/media",
tag = "media",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Page size"),
("sort" = Option<String>, Query, description = "Sort field"),
),
responses(
(status = 200, description = "List of media items", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_media(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
@ -140,6 +171,19 @@ pub async fn list_media(
))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media item", body = MediaResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -171,6 +215,22 @@ fn validate_optional_text(
Ok(())
}
#[utoipa::path(
patch,
path = "/api/v1/media/{id}",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = UpdateMediaRequest,
responses(
(status = 200, description = "Updated media item", body = MediaResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -228,6 +288,20 @@ pub async fn update_media(
Ok(Json(MediaResponse::new(item, &roots)))
}
#[utoipa::path(
delete,
path = "/api/v1/media/{id}",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -266,6 +340,19 @@ pub async fn delete_media(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/open",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media opened"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn open_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -283,6 +370,20 @@ pub async fn open_media(
Ok(Json(serde_json::json!({"opened": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media stream"),
(status = 206, description = "Partial content"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn stream_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -394,6 +495,20 @@ fn parse_range(header: &str, total_size: u64) -> Option<(u64, u64)> {
}
}
#[utoipa::path(
post,
path = "/api/v1/media/import/options",
tag = "media",
request_body = ImportWithOptionsRequest,
responses(
(status = 200, description = "Media imported", body = ImportResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn import_with_options(
State(state): State<AppState>,
Json(req): Json<ImportWithOptionsRequest>,
@ -428,6 +543,20 @@ pub async fn import_with_options(
}))
}
#[utoipa::path(
post,
path = "/api/v1/media/import/batch",
tag = "media",
request_body = BatchImportRequest,
responses(
(status = 200, description = "Batch import results", body = BatchImportResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_import(
State(state): State<AppState>,
Json(req): Json<BatchImportRequest>,
@ -502,6 +631,20 @@ pub async fn batch_import(
}))
}
#[utoipa::path(
post,
path = "/api/v1/media/import/directory",
tag = "media",
request_body = DirectoryImportRequest,
responses(
(status = 200, description = "Directory import results", body = BatchImportResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn import_directory_endpoint(
State(state): State<AppState>,
Json(req): Json<DirectoryImportRequest>,
@ -570,6 +713,19 @@ pub async fn import_directory_endpoint(
}))
}
#[utoipa::path(
post,
path = "/api/v1/media/import/preview",
tag = "media",
responses(
(status = 200, description = "Directory preview", body = DirectoryPreviewResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn preview_directory(
State(state): State<AppState>,
Json(req): Json<serde_json::Value>,
@ -671,6 +827,22 @@ pub async fn preview_directory(
}))
}
#[utoipa::path(
put,
path = "/api/v1/media/{id}/custom-fields",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = SetCustomFieldRequest,
responses(
(status = 200, description = "Custom field set"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn set_custom_field(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -708,6 +880,23 @@ pub async fn set_custom_field(
Ok(Json(serde_json::json!({"set": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/media/{id}/custom-fields/{name}",
tag = "media",
params(
("id" = Uuid, Path, description = "Media item ID"),
("name" = String, Path, description = "Custom field name"),
),
responses(
(status = 200, description = "Custom field deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_custom_field(
State(state): State<AppState>,
Path((id, name)): Path<(Uuid, String)>,
@ -719,6 +908,20 @@ pub async fn delete_custom_field(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/media/batch/tag",
tag = "media",
request_body = BatchTagRequest,
responses(
(status = 200, description = "Batch tag result", body = BatchOperationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_tag(
State(state): State<AppState>,
Json(req): Json<BatchTagRequest>,
@ -753,6 +956,18 @@ pub async fn batch_tag(
}
}
#[utoipa::path(
delete,
path = "/api/v1/media",
tag = "media",
responses(
(status = 200, description = "All media deleted", body = BatchOperationResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_all_media(
State(state): State<AppState>,
) -> Result<Json<BatchOperationResponse>, ApiError> {
@ -784,6 +999,20 @@ pub async fn delete_all_media(
}
}
#[utoipa::path(
post,
path = "/api/v1/media/batch/delete",
tag = "media",
request_body = BatchDeleteRequest,
responses(
(status = 200, description = "Batch delete result", body = BatchOperationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_delete(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>,
@ -828,6 +1057,20 @@ pub async fn batch_delete(
}
}
#[utoipa::path(
post,
path = "/api/v1/media/batch/collection",
tag = "media",
request_body = BatchCollectionRequest,
responses(
(status = 200, description = "Batch collection result", body = BatchOperationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_add_to_collection(
State(state): State<AppState>,
Json(req): Json<BatchCollectionRequest>,
@ -858,6 +1101,20 @@ pub async fn batch_add_to_collection(
Ok(Json(BatchOperationResponse { processed, errors }))
}
#[utoipa::path(
post,
path = "/api/v1/media/batch/update",
tag = "media",
request_body = BatchUpdateRequest,
responses(
(status = 200, description = "Batch update result", body = BatchOperationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_update(
State(state): State<AppState>,
Json(req): Json<BatchUpdateRequest>,
@ -900,6 +1157,19 @@ pub async fn batch_update(
}
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/thumbnail",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Thumbnail image"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_thumbnail(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -933,6 +1203,17 @@ pub async fn get_thumbnail(
})
}
#[utoipa::path(
get,
path = "/api/v1/media/count",
tag = "media",
responses(
(status = 200, description = "Media count", body = MediaCountResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_count(
State(state): State<AppState>,
) -> Result<Json<MediaCountResponse>, ApiError> {
@ -940,6 +1221,22 @@ pub async fn get_media_count(
Ok(Json(MediaCountResponse { count }))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/rename",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = RenameMediaRequest,
responses(
(status = 200, description = "Renamed media item", body = MediaResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn rename_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -992,6 +1289,22 @@ pub async fn rename_media(
Ok(Json(MediaResponse::new(item, &roots)))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/move",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = MoveMediaRequest,
responses(
(status = 200, description = "Moved media item", body = MediaResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn move_media_endpoint(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -1041,6 +1354,20 @@ pub async fn move_media_endpoint(
Ok(Json(MediaResponse::new(item, &roots)))
}
#[utoipa::path(
post,
path = "/api/v1/media/batch/move",
tag = "media",
request_body = BatchMoveRequest,
responses(
(status = 200, description = "Batch move result", body = BatchOperationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_move_media(
State(state): State<AppState>,
Json(req): Json<BatchMoveRequest>,
@ -1110,6 +1437,20 @@ pub async fn batch_move_media(
}
}
#[utoipa::path(
delete,
path = "/api/v1/media/{id}/trash",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media moved to trash"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn soft_delete_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -1156,6 +1497,20 @@ pub async fn soft_delete_media(
Ok(Json(serde_json::json!({"deleted": true, "trashed": true})))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/restore",
tag = "media",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media restored", body = MediaResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn restore_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -1203,6 +1558,21 @@ pub async fn restore_media(
Ok(Json(MediaResponse::new(item, &roots)))
}
#[utoipa::path(
get,
path = "/api/v1/media/trash",
tag = "media",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Page size"),
),
responses(
(status = 200, description = "Trashed media items", body = TrashResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_trash(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
@ -1222,6 +1592,17 @@ pub async fn list_trash(
}))
}
#[utoipa::path(
get,
path = "/api/v1/media/trash/info",
tag = "media",
responses(
(status = 200, description = "Trash info", body = TrashInfoResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trash_info(
State(state): State<AppState>,
) -> Result<Json<TrashInfoResponse>, ApiError> {
@ -1229,6 +1610,18 @@ pub async fn trash_info(
Ok(Json(TrashInfoResponse { count }))
}
#[utoipa::path(
delete,
path = "/api/v1/media/trash",
tag = "media",
responses(
(status = 200, description = "Trash emptied", body = EmptyTrashResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn empty_trash(
State(state): State<AppState>,
) -> Result<Json<EmptyTrashResponse>, ApiError> {
@ -1246,10 +1639,27 @@ pub async fn empty_trash(
Ok(Json(EmptyTrashResponse { deleted_count }))
}
#[utoipa::path(
delete,
path = "/api/v1/media/{id}/permanent",
tag = "media",
params(
("id" = Uuid, Path, description = "Media item ID"),
("permanent" = Option<String>, Query, description = "Set to 'true' for permanent deletion"),
),
responses(
(status = 200, description = "Media deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn permanent_delete_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Query(params): Query<std::collections::HashMap<String, String>>,
Query(params): Query<FxHashMap<String, String>>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_id = MediaId(id);
let permanent = params.get("permanent").is_some_and(|v| v == "true");

View file

@ -26,14 +26,14 @@ use uuid::Uuid;
use crate::{error::ApiError, state::AppState};
/// Response for backlinks query
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BacklinksResponse {
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
}
/// Individual backlink item
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BacklinkItem {
pub link_id: Uuid,
pub source_id: Uuid,
@ -61,14 +61,14 @@ impl From<BacklinkInfo> for BacklinkItem {
}
/// Response for outgoing links query
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct OutgoingLinksResponse {
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
}
/// Individual outgoing link item
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct OutgoingLinkItem {
pub id: Uuid,
pub target_path: String,
@ -94,7 +94,7 @@ impl From<MarkdownLink> for OutgoingLinkItem {
}
/// Response for graph visualization
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphResponse {
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
@ -103,7 +103,7 @@ pub struct GraphResponse {
}
/// Graph node for visualization
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphNodeResponse {
pub id: String,
pub label: String,
@ -127,7 +127,7 @@ impl From<GraphNode> for GraphNodeResponse {
}
/// Graph edge for visualization
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphEdgeResponse {
pub source: String,
pub target: String,
@ -180,20 +180,20 @@ const fn default_depth() -> u32 {
}
/// Response for reindex operation
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ReindexResponse {
pub message: String,
pub links_extracted: usize,
}
/// Response for link resolution
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ResolveLinksResponse {
pub resolved_count: u64,
}
/// Response for unresolved links count
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UnresolvedLinksResponse {
pub count: u64,
}
@ -201,6 +201,19 @@ pub struct UnresolvedLinksResponse {
/// Get backlinks (incoming links) to a media item.
///
/// GET /api/v1/media/{id}/backlinks
#[utoipa::path(
get,
path = "/api/v1/media/{id}/backlinks",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Backlinks", body = BacklinksResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_backlinks(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -221,6 +234,19 @@ pub async fn get_backlinks(
/// Get outgoing links from a media item.
///
/// GET /api/v1/media/{id}/outgoing-links
#[utoipa::path(
get,
path = "/api/v1/media/{id}/outgoing-links",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Outgoing links", body = OutgoingLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_outgoing_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -241,6 +267,21 @@ pub async fn get_outgoing_links(
/// Get graph data for visualization.
///
/// GET /api/v1/notes/graph?center={uuid}&depth={n}
#[utoipa::path(
get,
path = "/api/v1/notes/graph",
tag = "notes",
params(
("center" = Option<Uuid>, Query, description = "Center node ID"),
("depth" = Option<u32>, Query, description = "Traversal depth (max 5, default 2)"),
),
responses(
(status = 200, description = "Graph data", body = GraphResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_graph(
State(state): State<AppState>,
Query(params): Query<GraphQuery>,
@ -256,6 +297,19 @@ pub async fn get_graph(
/// Re-extract links from a media item.
///
/// POST /api/v1/media/{id}/reindex-links
#[utoipa::path(
post,
path = "/api/v1/media/{id}/reindex-links",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Links reindexed", body = ReindexResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn reindex_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -304,6 +358,17 @@ pub async fn reindex_links(
/// Resolve all unresolved links in the database.
///
/// POST /api/v1/notes/resolve-links
#[utoipa::path(
post,
path = "/api/v1/notes/resolve-links",
tag = "notes",
responses(
(status = 200, description = "Links resolved", body = ResolveLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn resolve_links(
State(state): State<AppState>,
) -> Result<Json<ResolveLinksResponse>, ApiError> {
@ -315,6 +380,17 @@ pub async fn resolve_links(
/// Get count of unresolved links.
///
/// GET /api/v1/notes/unresolved-count
#[utoipa::path(
get,
path = "/api/v1/notes/unresolved-count",
tag = "notes",
responses(
(status = 200, description = "Unresolved link count", body = UnresolvedLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_unresolved_count(
State(state): State<AppState>,
) -> Result<Json<UnresolvedLinksResponse>, ApiError> {

View file

@ -1,5 +1,3 @@
use std::collections::HashMap;
use axum::{
Json,
Router,
@ -38,7 +36,7 @@ const fn default_timeline_limit() -> u64 {
}
/// Timeline group response
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TimelineGroup {
pub date: String,
pub count: usize,
@ -56,7 +54,7 @@ pub struct MapQuery {
}
/// Map marker response
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MapMarker {
pub id: String,
pub latitude: f64,
@ -65,6 +63,23 @@ pub struct MapMarker {
pub date_taken: Option<DateTime<Utc>>,
}
#[utoipa::path(
get,
path = "/api/v1/photos/timeline",
tag = "photos",
params(
("group_by" = Option<String>, Query, description = "Grouping: day, month, year"),
("year" = Option<i32>, Query, description = "Filter by year"),
("month" = Option<u32>, Query, description = "Filter by month"),
("limit" = Option<u64>, Query, description = "Max items (default 10000)"),
),
responses(
(status = 200, description = "Photo timeline groups", body = Vec<TimelineGroup>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
/// Get timeline of photos grouped by date
pub async fn get_timeline(
State(state): State<AppState>,
@ -91,8 +106,10 @@ pub async fn get_timeline(
.collect();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> =
HashMap::new();
let mut groups: rustc_hash::FxHashMap<
String,
Vec<pinakes_core::model::MediaItem>,
> = rustc_hash::FxHashMap::default();
for photo in photos {
if let Some(date_taken) = photo.date_taken {
@ -147,6 +164,24 @@ pub async fn get_timeline(
Ok(Json(timeline))
}
#[utoipa::path(
get,
path = "/api/v1/photos/map",
tag = "photos",
params(
("lat1" = f64, Query, description = "Bounding box latitude 1"),
("lon1" = f64, Query, description = "Bounding box longitude 1"),
("lat2" = f64, Query, description = "Bounding box latitude 2"),
("lon2" = f64, Query, description = "Bounding box longitude 2"),
),
responses(
(status = 200, description = "Map markers", body = Vec<MapMarker>),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
/// Get photos in a bounding box for map view
pub async fn get_map_photos(
State(state): State<AppState>,

View file

@ -51,6 +51,19 @@ async fn check_playlist_access(
Ok(playlist)
}
#[utoipa::path(
post,
path = "/api/v1/playlists",
tag = "playlists",
request_body = CreatePlaylistRequest,
responses(
(status = 200, description = "Playlist created", body = PlaylistResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -78,6 +91,17 @@ pub async fn create_playlist(
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
get,
path = "/api/v1/playlists",
tag = "playlists",
responses(
(status = 200, description = "List of playlists", body = Vec<PlaylistResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_playlists(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -93,6 +117,19 @@ pub async fn list_playlists(
Ok(Json(visible))
}
#[utoipa::path(
get,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist details", body = PlaylistResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -104,6 +141,21 @@ pub async fn get_playlist(
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
patch,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = UpdatePlaylistRequest,
responses(
(status = 200, description = "Playlist updated", body = PlaylistResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -133,6 +185,19 @@ pub async fn update_playlist(
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
delete,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -144,6 +209,20 @@ pub async fn delete_playlist(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/playlists/{id}/items",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = PlaylistItemRequest,
responses(
(status = 200, description = "Item added"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn add_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -165,6 +244,22 @@ pub async fn add_item(
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/playlists/{id}/items/{media_id}",
tag = "playlists",
params(
("id" = Uuid, Path, description = "Playlist ID"),
("media_id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Item removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -179,6 +274,19 @@ pub async fn remove_item(
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/playlists/{id}/items",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist items", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn list_items(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -196,6 +304,20 @@ pub async fn list_items(
))
}
#[utoipa::path(
patch,
path = "/api/v1/playlists/{id}/items/reorder",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = ReorderPlaylistRequest,
responses(
(status = 200, description = "Item reordered"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn reorder_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -211,6 +333,19 @@ pub async fn reorder_item(
Ok(Json(serde_json::json!({"reordered": true})))
}
#[utoipa::path(
post,
path = "/api/v1/playlists/{id}/shuffle",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Shuffled playlist items", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn shuffle_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,

View file

@ -1,10 +1,11 @@
use std::{collections::HashMap, sync::Arc};
use std::sync::Arc;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::plugin::PluginManager;
use rustc_hash::FxHashMap;
use crate::{
dto::{
@ -30,6 +31,17 @@ fn require_plugin_manager(
}
/// List all installed plugins
#[utoipa::path(
get,
path = "/api/v1/plugins",
tag = "plugins",
responses(
(status = 200, description = "List of plugins", body = Vec<PluginResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugins(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginResponse>>, ApiError> {
@ -45,6 +57,18 @@ pub async fn list_plugins(
}
/// Get a specific plugin by ID
#[utoipa::path(
get,
path = "/api/v1/plugins/{id}",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin details", body = PluginResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
@ -62,6 +86,19 @@ pub async fn get_plugin(
}
/// Install a plugin from URL or file path
#[utoipa::path(
post,
path = "/api/v1/plugins",
tag = "plugins",
request_body = InstallPluginRequest,
responses(
(status = 200, description = "Plugin installed", body = PluginResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn install_plugin(
State(state): State<AppState>,
Json(req): Json<InstallPluginRequest>,
@ -90,6 +127,19 @@ pub async fn install_plugin(
}
/// Uninstall a plugin
#[utoipa::path(
delete,
path = "/api/v1/plugins/{id}",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin uninstalled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn uninstall_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
@ -106,6 +156,20 @@ pub async fn uninstall_plugin(
}
/// Enable or disable a plugin
#[utoipa::path(
patch,
path = "/api/v1/plugins/{id}/toggle",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
request_body = TogglePluginRequest,
responses(
(status = 200, description = "Plugin toggled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn toggle_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
@ -145,6 +209,16 @@ pub async fn toggle_plugin(
}
/// List all UI pages provided by loaded plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/pages",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI pages", body = Vec<PluginUiPageEntry>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_pages(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginUiPageEntry>>, ApiError> {
@ -165,6 +239,16 @@ pub async fn list_plugin_ui_pages(
}
/// List all UI widgets provided by loaded plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/widgets",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI widgets", body = Vec<PluginUiWidgetEntry>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_widgets(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginUiWidgetEntry>>, ApiError> {
@ -180,6 +264,17 @@ pub async fn list_plugin_ui_widgets(
/// Receive a plugin event emitted from the UI and dispatch it to interested
/// server-side event-handler plugins via the pipeline.
#[utoipa::path(
post,
path = "/api/v1/plugins/events",
tag = "plugins",
request_body = PluginEventRequest,
responses(
(status = 200, description = "Event received"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn emit_plugin_event(
State(state): State<AppState>,
Json(req): Json<PluginEventRequest>,
@ -192,14 +287,37 @@ pub async fn emit_plugin_event(
}
/// List merged CSS custom property overrides from all enabled plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/theme",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI theme extensions"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_theme_extensions(
State(state): State<AppState>,
) -> Result<Json<HashMap<String, String>>, ApiError> {
) -> Result<Json<FxHashMap<String, String>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
Ok(Json(plugin_manager.list_ui_theme_extensions().await))
}
/// Reload a plugin (for development)
#[utoipa::path(
post,
path = "/api/v1/plugins/{id}/reload",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin reloaded"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn reload_plugin(
State(state): State<AppState>,
Path(id): Path<String>,

View file

@ -6,14 +6,14 @@ use serde::{Deserialize, Serialize};
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateSavedSearchRequest {
pub name: String,
pub query: String,
pub sort_order: Option<String>,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SavedSearchResponse {
pub id: String,
pub name: String,
@ -31,6 +31,19 @@ const VALID_SORT_ORDERS: &[&str] = &[
"size_desc",
];
#[utoipa::path(
post,
path = "/api/v1/searches",
tag = "saved_searches",
request_body = CreateSavedSearchRequest,
responses(
(status = 200, description = "Search saved", body = SavedSearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_saved_search(
State(state): State<AppState>,
Json(req): Json<CreateSavedSearchRequest>,
@ -51,14 +64,15 @@ pub async fn create_saved_search(
));
}
if let Some(ref sort) = req.sort_order
&& !VALID_SORT_ORDERS.contains(&sort.as_str()) {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"sort_order must be one of: {}",
VALID_SORT_ORDERS.join(", ")
)),
));
}
&& !VALID_SORT_ORDERS.contains(&sort.as_str())
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"sort_order must be one of: {}",
VALID_SORT_ORDERS.join(", ")
)),
));
}
let id = uuid::Uuid::now_v7();
state
.storage
@ -75,6 +89,17 @@ pub async fn create_saved_search(
}))
}
#[utoipa::path(
get,
path = "/api/v1/searches",
tag = "saved_searches",
responses(
(status = 200, description = "List of saved searches", body = Vec<SavedSearchResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_saved_searches(
State(state): State<AppState>,
) -> Result<Json<Vec<SavedSearchResponse>>, ApiError> {
@ -99,6 +124,19 @@ pub async fn list_saved_searches(
))
}
#[utoipa::path(
delete,
path = "/api/v1/searches/{id}",
tag = "saved_searches",
params(("id" = uuid::Uuid, Path, description = "Saved search ID")),
responses(
(status = 200, description = "Saved search deleted"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_saved_search(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,

View file

@ -7,6 +7,19 @@ use crate::{
};
/// Trigger a scan as a background job. Returns the job ID immediately.
#[utoipa::path(
post,
path = "/api/v1/scan",
tag = "scan",
request_body = ScanRequest,
responses(
(status = 200, description = "Scan job submitted", body = ScanJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_scan(
State(state): State<AppState>,
Json(req): Json<ScanRequest>,
@ -18,6 +31,16 @@ pub async fn trigger_scan(
}))
}
#[utoipa::path(
get,
path = "/api/v1/scan/status",
tag = "scan",
responses(
(status = 200, description = "Scan status", body = ScanStatusResponse),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn scan_status(
State(state): State<AppState>,
) -> Json<ScanStatusResponse> {

View file

@ -5,6 +5,17 @@ use axum::{
use crate::{dto::ScheduledTaskResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/scheduled-tasks",
tag = "scheduled_tasks",
responses(
(status = 200, description = "List of scheduled tasks", body = Vec<ScheduledTaskResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_scheduled_tasks(
State(state): State<AppState>,
) -> Result<Json<Vec<ScheduledTaskResponse>>, ApiError> {
@ -26,6 +37,19 @@ pub async fn list_scheduled_tasks(
Ok(Json(responses))
}
#[utoipa::path(
post,
path = "/api/v1/scheduled-tasks/{id}/toggle",
tag = "scheduled_tasks",
params(("id" = String, Path, description = "Task ID")),
responses(
(status = 200, description = "Task toggled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn toggle_scheduled_task(
State(state): State<AppState>,
Path(id): Path<String>,
@ -45,6 +69,19 @@ pub async fn toggle_scheduled_task(
}
}
#[utoipa::path(
post,
path = "/api/v1/scheduled-tasks/{id}/run",
tag = "scheduled_tasks",
params(("id" = String, Path, description = "Task ID")),
responses(
(status = 200, description = "Task triggered"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn run_scheduled_task_now(
State(state): State<AppState>,
Path(id): Path<String>,

View file

@ -22,6 +22,24 @@ fn resolve_sort(sort: Option<&str>) -> SortOrder {
}
}
#[utoipa::path(
get,
path = "/api/v1/search",
tag = "search",
params(
("q" = String, Query, description = "Search query"),
("sort" = Option<String>, Query, description = "Sort order"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Search results", body = SearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn search(
State(state): State<AppState>,
Query(params): Query<SearchParams>,
@ -56,6 +74,19 @@ pub async fn search(
}))
}
#[utoipa::path(
post,
path = "/api/v1/search",
tag = "search",
request_body = SearchRequestBody,
responses(
(status = 200, description = "Search results", body = SearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn search_post(
State(state): State<AppState>,
Json(body): Json<SearchRequestBody>,

View file

@ -48,6 +48,19 @@ use crate::{
/// Create a new share
/// POST /api/shares
#[utoipa::path(
post,
path = "/api/v1/shares",
tag = "shares",
request_body = CreateShareRequest,
responses(
(status = 200, description = "Share created", body = ShareResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -201,6 +214,20 @@ pub async fn create_share(
/// List outgoing shares (shares I created)
/// GET /api/shares/outgoing
#[utoipa::path(
get,
path = "/api/v1/shares/outgoing",
tag = "shares",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Outgoing shares", body = Vec<ShareResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_outgoing(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -220,6 +247,20 @@ pub async fn list_outgoing(
/// List incoming shares (shares shared with me)
/// GET /api/shares/incoming
#[utoipa::path(
get,
path = "/api/v1/shares/incoming",
tag = "shares",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Incoming shares", body = Vec<ShareResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_incoming(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -239,6 +280,19 @@ pub async fn list_incoming(
/// Get share details
/// GET /api/shares/{id}
#[utoipa::path(
get,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
responses(
(status = 200, description = "Share details", body = ShareResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -269,6 +323,20 @@ pub async fn get_share(
/// Update a share
/// PATCH /api/shares/{id}
#[utoipa::path(
patch,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
request_body = UpdateShareRequest,
responses(
(status = 200, description = "Share updated", body = ShareResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -349,6 +417,19 @@ pub async fn update_share(
/// Delete (revoke) a share
/// DELETE /api/shares/{id}
#[utoipa::path(
delete,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
responses(
(status = 204, description = "Share deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -393,6 +474,19 @@ pub async fn delete_share(
/// Batch delete shares
/// POST /api/shares/batch/delete
#[utoipa::path(
post,
path = "/api/v1/shares/batch/delete",
tag = "shares",
request_body = BatchDeleteSharesRequest,
responses(
(status = 200, description = "Shares deleted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_delete(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -432,6 +526,20 @@ pub async fn batch_delete(
/// Access a public shared resource
/// GET /api/shared/{token}
#[utoipa::path(
get,
path = "/api/v1/shared/{token}",
tag = "shares",
params(
("token" = String, Path, description = "Share token"),
("password" = Option<String>, Query, description = "Share password if required"),
),
responses(
(status = 200, description = "Shared content", body = SharedContentResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
)
)]
pub async fn access_shared(
State(state): State<AppState>,
Path(token): Path<String>,
@ -599,6 +707,23 @@ pub async fn access_shared(
/// Get share activity log
/// GET /api/shares/{id}/activity
#[utoipa::path(
get,
path = "/api/v1/shares/{id}/activity",
tag = "shares",
params(
("id" = Uuid, Path, description = "Share ID"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Share activity", body = Vec<ShareActivityResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_activity(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -632,6 +757,16 @@ pub async fn get_activity(
/// Get unread share notifications
/// GET /api/notifications/shares
#[utoipa::path(
get,
path = "/api/v1/notifications/shares",
tag = "shares",
responses(
(status = 200, description = "Unread notifications", body = Vec<ShareNotificationResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_notifications(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -650,6 +785,17 @@ pub async fn get_notifications(
/// Mark a notification as read
/// POST /api/notifications/shares/{id}/read
#[utoipa::path(
post,
path = "/api/v1/notifications/shares/{id}/read",
tag = "shares",
params(("id" = Uuid, Path, description = "Notification ID")),
responses(
(status = 200, description = "Notification marked as read"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn mark_notification_read(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -667,6 +813,16 @@ pub async fn mark_notification_read(
/// Mark all notifications as read
/// POST /api/notifications/shares/read-all
#[utoipa::path(
post,
path = "/api/v1/notifications/shares/read-all",
tag = "shares",
responses(
(status = 200, description = "All notifications marked as read"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn mark_all_read(
State(state): State<AppState>,
Extension(username): Extension<String>,

View file

@ -27,6 +27,20 @@ pub struct ShareLinkQuery {
pub password: Option<String>,
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/rate",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateRatingRequest,
responses(
(status = 200, description = "Rating saved", body = RatingResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn rate_media(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -59,6 +73,18 @@ pub async fn rate_media(
Ok(Json(RatingResponse::from(rating)))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/ratings",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media ratings", body = Vec<RatingResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_ratings(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -69,6 +95,20 @@ pub async fn get_media_ratings(
))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/comments",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateCommentRequest,
responses(
(status = 200, description = "Comment added", body = CommentResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_comment(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -91,6 +131,18 @@ pub async fn add_comment(
Ok(Json(CommentResponse::from(comment)))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/comments",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media comments", body = Vec<CommentResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_comments(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -101,6 +153,18 @@ pub async fn get_media_comments(
))
}
#[utoipa::path(
post,
path = "/api/v1/favorites",
tag = "social",
request_body = FavoriteRequest,
responses(
(status = 200, description = "Added to favorites"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -114,6 +178,18 @@ pub async fn add_favorite(
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/favorites/{media_id}",
tag = "social",
params(("media_id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Removed from favorites"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -127,6 +203,17 @@ pub async fn remove_favorite(
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/favorites",
tag = "social",
responses(
(status = 200, description = "User favorites", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_favorites(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -145,6 +232,19 @@ pub async fn list_favorites(
))
}
#[utoipa::path(
post,
path = "/api/v1/media/share",
tag = "social",
request_body = CreateShareLinkRequest,
responses(
(status = 200, description = "Share link created", body = ShareLinkResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_share_link(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -191,6 +291,20 @@ pub async fn create_share_link(
Ok(Json(ShareLinkResponse::from(link)))
}
#[utoipa::path(
get,
path = "/api/v1/shared/media/{token}",
tag = "social",
params(
("token" = String, Path, description = "Share token"),
("password" = Option<String>, Query, description = "Share password"),
),
responses(
(status = 200, description = "Shared media", body = MediaResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
)
)]
pub async fn access_shared_media(
State(state): State<AppState>,
Path(token): Path<String>,

View file

@ -2,6 +2,17 @@ use axum::{Json, extract::State};
use crate::{dto::LibraryStatisticsResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/statistics",
tag = "statistics",
responses(
(status = 200, description = "Library statistics", body = LibraryStatisticsResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn library_statistics(
State(state): State<AppState>,
) -> Result<Json<LibraryStatisticsResponse>, ApiError> {

View file

@ -49,6 +49,18 @@ fn escape_xml(s: &str) -> String {
.replace('\'', "&apos;")
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/master.m3u8",
tag = "streaming",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "HLS master playlist"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_master_playlist(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -75,6 +87,22 @@ pub async fn hls_master_playlist(
build_response("application/vnd.apple.mpegurl", playlist)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/{profile}/playlist.m3u8",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
),
responses(
(status = 200, description = "HLS variant playlist"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_variant_playlist(
State(state): State<AppState>,
Path((id, profile)): Path<(Uuid, String)>,
@ -112,6 +140,23 @@ pub async fn hls_variant_playlist(
build_response("application/vnd.apple.mpegurl", playlist)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/{profile}/{segment}",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
("segment" = String, Path, description = "Segment filename"),
),
responses(
(status = 200, description = "HLS segment data"),
(status = 202, description = "Segment not yet available"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
@ -167,6 +212,19 @@ pub async fn hls_segment(
))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/dash/manifest.mpd",
tag = "streaming",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "DASH manifest"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn dash_manifest(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -216,6 +274,23 @@ pub async fn dash_manifest(
build_response("application/dash+xml", mpd)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/dash/{profile}/{segment}",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
("segment" = String, Path, description = "Segment filename"),
),
responses(
(status = 200, description = "DASH segment data"),
(status = 202, description = "Segment not yet available"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn dash_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,

View file

@ -4,62 +4,185 @@ use axum::{
};
use pinakes_core::{
model::MediaId,
subtitles::{Subtitle, SubtitleFormat},
subtitles::{
Subtitle,
detect_format,
extract_embedded_track,
list_embedded_tracks,
validate_language_code,
},
};
use uuid::Uuid;
use crate::{
dto::{AddSubtitleRequest, SubtitleResponse, UpdateSubtitleOffsetRequest},
dto::{
AddSubtitleRequest,
SubtitleListResponse,
SubtitleResponse,
SubtitleTrackInfoResponse,
UpdateSubtitleOffsetRequest,
},
error::ApiError,
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/media/{id}/subtitles",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Subtitles and available embedded tracks", body = SubtitleListResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn list_subtitles(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<SubtitleResponse>>, ApiError> {
) -> Result<Json<SubtitleListResponse>, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?;
Ok(Json(
subtitles.into_iter().map(SubtitleResponse::from).collect(),
))
let available_tracks =
list_embedded_tracks(&item.path).await.unwrap_or_default();
Ok(Json(SubtitleListResponse {
subtitles: subtitles
.into_iter()
.map(SubtitleResponse::from)
.collect(),
available_tracks: available_tracks
.into_iter()
.map(SubtitleTrackInfoResponse::from)
.collect(),
}))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/subtitles",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = AddSubtitleRequest,
responses(
(status = 200, description = "Subtitle added", body = SubtitleResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<AddSubtitleRequest>,
) -> Result<Json<SubtitleResponse>, ApiError> {
let format: SubtitleFormat = req.format.parse().map_err(|e: String| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
})?;
let is_embedded = req.is_embedded.unwrap_or(false);
if !is_embedded && req.file_path.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
),
));
}
if is_embedded && req.track_index.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
),
));
}
if req
.language
.as_ref()
.is_some_and(|l| l.is_empty() || l.len() > 64)
// Validate language code if provided.
if let Some(ref lang) = req.language
&& !validate_language_code(lang)
{
return Err(ApiError::bad_request("language must be 1-64 bytes"));
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidLanguageCode(lang.clone()),
));
}
let is_embedded = req.is_embedded.unwrap_or(false);
let (file_path, resolved_format) = if is_embedded {
// Embedded subtitle: validate track_index and extract via ffmpeg.
let track_index = req.track_index.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
))
})?;
let item = state.storage.get_media(MediaId(id)).await?;
let tracks = list_embedded_tracks(&item.path).await?;
let track =
tracks
.iter()
.find(|t| t.index == track_index)
.ok_or(ApiError(
pinakes_core::error::PinakesError::SubtitleTrackNotFound {
index: track_index,
},
))?;
// Use the format detected from the embedded track metadata as
// authoritative.
let embedded_format = track.format;
let ext = embedded_format.to_string();
let output_dir = pinakes_core::config::Config::default_data_dir()
.join("subtitles")
.join(id.to_string());
let output_path = output_dir.join(format!("{track_index}.{ext}"));
tokio::fs::create_dir_all(&output_dir).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to create subtitle output dir: {e}"),
))
})?;
extract_embedded_track(&item.path, track_index, &output_path).await?;
(Some(output_path), embedded_format)
} else {
// External subtitle file: validate path then detect format from content.
let path_str = req.file_path.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
))
})?;
let path = std::path::PathBuf::from(&path_str);
use std::path::Component;
if !path.is_absolute()
|| path.components().any(|c| c == Component::ParentDir)
{
return Err(ApiError::bad_request(
"file_path must be an absolute path within a configured root",
));
}
let roots = state.config.read().await.directories.roots.clone();
if !roots.iter().any(|root| path.starts_with(root)) {
return Err(ApiError::bad_request(
"file_path must be an absolute path within a configured root",
));
}
let exists = tokio::fs::try_exists(&path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to check subtitle file: {e}"),
))
})?;
if !exists {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
}
// Detect the actual format from the file extension; use it as authoritative
// rather than trusting the client-supplied format field.
let detected_format = detect_format(&path).ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("unrecognised subtitle format for: {}", path.display()),
))
})?;
(Some(path), detected_format)
};
let subtitle = Subtitle {
id: Uuid::now_v7(),
media_id: MediaId(id),
language: req.language,
format,
file_path: req.file_path.map(std::path::PathBuf::from),
format: resolved_format,
file_path,
is_embedded,
track_index: req.track_index,
offset_ms: req.offset_ms.unwrap_or(0),
@ -69,6 +192,18 @@ pub async fn add_subtitle(
Ok(Json(SubtitleResponse::from(subtitle)))
}
#[utoipa::path(
delete,
path = "/api/v1/subtitles/{id}",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Subtitle ID")),
responses(
(status = 200, description = "Subtitle deleted"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -77,6 +212,21 @@ pub async fn delete_subtitle(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{media_id}/subtitles/{subtitle_id}/content",
tag = "subtitles",
params(
("media_id" = Uuid, Path, description = "Media item ID"),
("subtitle_id" = Uuid, Path, description = "Subtitle ID"),
),
responses(
(status = 200, description = "Subtitle content"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_subtitle_content(
State(state): State<AppState>,
Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>,
@ -91,40 +241,65 @@ pub async fn get_subtitle_content(
)))
})?;
if let Some(ref path) = subtitle.file_path {
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
let path = subtitle.file_path.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"subtitle has no associated file to serve".into(),
))
})?;
let fmt = subtitle.format;
let content_type = fmt.mime_type();
let body = if fmt.is_binary() {
let bytes = tokio::fs::read(&path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {}", path.display(), e),
format!("failed to read subtitle file {}: {e}", path.display()),
))
}
})?;
let content_type = match subtitle.format {
SubtitleFormat::Vtt => "text/vtt",
SubtitleFormat::Srt => "application/x-subrip",
_ => "text/plain",
};
axum::response::Response::builder()
.header("Content-Type", content_type)
.body(axum::body::Body::from(content))
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
axum::body::Body::from(bytes)
} else {
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"subtitle is embedded, no file to serve".into(),
),
))
}
let text = tokio::fs::read_to_string(&path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {e}", path.display()),
))
}
})?;
axum::body::Body::from(text)
};
axum::response::Response::builder()
.header("Content-Type", content_type)
.body(body)
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
#[utoipa::path(
patch,
path = "/api/v1/subtitles/{id}/offset",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Subtitle ID")),
request_body = UpdateSubtitleOffsetRequest,
responses(
(status = 200, description = "Offset updated"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_offset(
State(state): State<AppState>,
Path(id): Path<Uuid>,

View file

@ -57,6 +57,19 @@ const DEFAULT_CHANGES_LIMIT: u64 = 100;
/// Register a new sync device
/// POST /api/sync/devices
#[utoipa::path(
post,
path = "/api/v1/sync/devices",
tag = "sync",
request_body = RegisterDeviceRequest,
responses(
(status = 200, description = "Device registered", body = DeviceRegistrationResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn register_device(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -111,6 +124,16 @@ pub async fn register_device(
/// List user's sync devices
/// GET /api/sync/devices
#[utoipa::path(
get,
path = "/api/v1/sync/devices",
tag = "sync",
responses(
(status = 200, description = "List of devices", body = Vec<DeviceResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_devices(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -127,6 +150,19 @@ pub async fn list_devices(
/// Get device details
/// GET /api/sync/devices/{id}
#[utoipa::path(
get,
path = "/api/v1/sync/devices/{id}",
tag = "sync",
params(("id" = Uuid, Path, description = "Device ID")),
responses(
(status = 200, description = "Device details", body = DeviceResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_device(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -149,6 +185,20 @@ pub async fn get_device(
/// Update a device
/// PUT /api/sync/devices/{id}
#[utoipa::path(
put,
path = "/api/v1/sync/devices/{id}",
tag = "sync",
params(("id" = Uuid, Path, description = "Device ID")),
request_body = UpdateDeviceRequest,
responses(
(status = 200, description = "Device updated", body = DeviceResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_device(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -185,6 +235,19 @@ pub async fn update_device(
/// Delete a device
/// DELETE /api/sync/devices/{id}
#[utoipa::path(
delete,
path = "/api/v1/sync/devices/{id}",
tag = "sync",
params(("id" = Uuid, Path, description = "Device ID")),
responses(
(status = 204, description = "Device deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_device(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -213,6 +276,19 @@ pub async fn delete_device(
/// Regenerate device token
/// POST /api/sync/devices/{id}/token
#[utoipa::path(
post,
path = "/api/v1/sync/devices/{id}/token",
tag = "sync",
params(("id" = Uuid, Path, description = "Device ID")),
responses(
(status = 200, description = "Token regenerated", body = DeviceRegistrationResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn regenerate_token(
State(state): State<AppState>,
Extension(username): Extension<String>,
@ -253,6 +329,21 @@ pub async fn regenerate_token(
/// Get changes since cursor
/// GET /api/sync/changes
#[utoipa::path(
get,
path = "/api/v1/sync/changes",
tag = "sync",
params(
("cursor" = Option<u64>, Query, description = "Sync cursor"),
("limit" = Option<u64>, Query, description = "Max changes (max 1000)"),
),
responses(
(status = 200, description = "Changes since cursor", body = ChangesResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_changes(
State(state): State<AppState>,
Query(params): Query<GetChangesParams>,
@ -290,6 +381,18 @@ pub async fn get_changes(
/// Report local changes from client
/// POST /api/sync/report
#[utoipa::path(
post,
path = "/api/v1/sync/report",
tag = "sync",
request_body = ReportChangesRequest,
responses(
(status = 200, description = "Changes processed", body = ReportChangesResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn report_changes(
State(state): State<AppState>,
Extension(_username): Extension<String>,
@ -392,6 +495,18 @@ pub async fn report_changes(
/// Acknowledge processed changes
/// POST /api/sync/ack
#[utoipa::path(
post,
path = "/api/v1/sync/ack",
tag = "sync",
request_body = AcknowledgeChangesRequest,
responses(
(status = 200, description = "Changes acknowledged"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn acknowledge_changes(
State(state): State<AppState>,
Extension(_username): Extension<String>,
@ -422,6 +537,16 @@ pub async fn acknowledge_changes(
/// List unresolved conflicts
/// GET /api/sync/conflicts
#[utoipa::path(
get,
path = "/api/v1/sync/conflicts",
tag = "sync",
responses(
(status = 200, description = "Unresolved conflicts", body = Vec<ConflictResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_conflicts(
State(state): State<AppState>,
Extension(_username): Extension<String>,
@ -451,6 +576,19 @@ pub async fn list_conflicts(
/// Resolve a sync conflict
/// POST /api/sync/conflicts/{id}/resolve
#[utoipa::path(
post,
path = "/api/v1/sync/conflicts/{id}/resolve",
tag = "sync",
params(("id" = Uuid, Path, description = "Conflict ID")),
request_body = ResolveConflictRequest,
responses(
(status = 200, description = "Conflict resolved"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn resolve_conflict(
State(state): State<AppState>,
Extension(_username): Extension<String>,
@ -477,6 +615,18 @@ pub async fn resolve_conflict(
/// Create an upload session for chunked upload
/// POST /api/sync/upload
#[utoipa::path(
post,
path = "/api/v1/sync/upload",
tag = "sync",
request_body = CreateUploadSessionRequest,
responses(
(status = 200, description = "Upload session created", body = UploadSessionResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn create_upload(
State(state): State<AppState>,
Extension(_username): Extension<String>,
@ -541,6 +691,23 @@ pub async fn create_upload(
/// Upload a chunk
/// PUT /api/sync/upload/{id}/chunks/{index}
#[utoipa::path(
put,
path = "/api/v1/sync/upload/{id}/chunks/{index}",
tag = "sync",
params(
("id" = Uuid, Path, description = "Upload session ID"),
("index" = u64, Path, description = "Chunk index"),
),
request_body(content = Vec<u8>, description = "Chunk binary data", content_type = "application/octet-stream"),
responses(
(status = 200, description = "Chunk received", body = ChunkUploadedResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn upload_chunk(
State(state): State<AppState>,
Path((session_id, chunk_index)): Path<(Uuid, u64)>,
@ -590,6 +757,18 @@ pub async fn upload_chunk(
/// Get upload session status
/// GET /api/sync/upload/{id}
#[utoipa::path(
get,
path = "/api/v1/sync/upload/{id}",
tag = "sync",
params(("id" = Uuid, Path, description = "Upload session ID")),
responses(
(status = 200, description = "Upload session status", body = UploadSessionResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_upload_status(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -603,6 +782,19 @@ pub async fn get_upload_status(
/// Complete an upload session
/// POST /api/sync/upload/{id}/complete
#[utoipa::path(
post,
path = "/api/v1/sync/upload/{id}/complete",
tag = "sync",
params(("id" = Uuid, Path, description = "Upload session ID")),
responses(
(status = 200, description = "Upload completed"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn complete_upload(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -759,6 +951,18 @@ pub async fn complete_upload(
/// Cancel an upload session
/// DELETE /api/sync/upload/{id}
#[utoipa::path(
delete,
path = "/api/v1/sync/upload/{id}",
tag = "sync",
params(("id" = Uuid, Path, description = "Upload session ID")),
responses(
(status = 204, description = "Upload cancelled"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn cancel_upload(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -789,6 +993,19 @@ pub async fn cancel_upload(
/// Download a file for sync (supports Range header)
/// GET /api/sync/download/{*path}
#[utoipa::path(
get,
path = "/api/v1/sync/download/{path}",
tag = "sync",
params(("path" = String, Path, description = "File path")),
responses(
(status = 200, description = "File content"),
(status = 206, description = "Partial content"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn download_file(
State(state): State<AppState>,
Path(path): Path<String>,

View file

@ -11,6 +11,20 @@ use crate::{
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/tags",
tag = "tags",
request_body = CreateTagRequest,
responses(
(status = 200, description = "Tag created", body = TagResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_tag(
State(state): State<AppState>,
Json(req): Json<CreateTagRequest>,
@ -28,6 +42,17 @@ pub async fn create_tag(
Ok(Json(TagResponse::from(tag)))
}
#[utoipa::path(
get,
path = "/api/v1/tags",
tag = "tags",
responses(
(status = 200, description = "List of tags", body = Vec<TagResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_tags(
State(state): State<AppState>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
@ -35,6 +60,19 @@ pub async fn list_tags(
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}
#[utoipa::path(
get,
path = "/api/v1/tags/{id}",
tag = "tags",
params(("id" = Uuid, Path, description = "Tag ID")),
responses(
(status = 200, description = "Tag", body = TagResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -43,6 +81,20 @@ pub async fn get_tag(
Ok(Json(TagResponse::from(tag)))
}
#[utoipa::path(
delete,
path = "/api/v1/tags/{id}",
tag = "tags",
params(("id" = Uuid, Path, description = "Tag ID")),
responses(
(status = 200, description = "Tag deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -51,6 +103,21 @@ pub async fn delete_tag(
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/media/{media_id}/tags",
tag = "tags",
params(("media_id" = Uuid, Path, description = "Media item ID")),
request_body = TagMediaRequest,
responses(
(status = 200, description = "Tag applied"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn tag_media(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
@ -70,6 +137,23 @@ pub async fn tag_media(
Ok(Json(serde_json::json!({"tagged": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/media/{media_id}/tags/{tag_id}",
tag = "tags",
params(
("media_id" = Uuid, Path, description = "Media item ID"),
("tag_id" = Uuid, Path, description = "Tag ID"),
),
responses(
(status = 200, description = "Tag removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn untag_media(
State(state): State<AppState>,
Path((media_id, tag_id)): Path<(Uuid, Uuid)>,
@ -88,6 +172,19 @@ pub async fn untag_media(
Ok(Json(serde_json::json!({"untagged": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{media_id}/tags",
tag = "tags",
params(("media_id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media tags", body = Vec<TagResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_tags(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,

View file

@ -11,6 +11,20 @@ use crate::{
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/media/{id}/transcode",
tag = "transcode",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateTranscodeRequest,
responses(
(status = 200, description = "Transcode job submitted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn start_transcode(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -29,6 +43,18 @@ pub async fn start_transcode(
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
#[utoipa::path(
get,
path = "/api/v1/transcode/{id}",
tag = "transcode",
params(("id" = Uuid, Path, description = "Transcode session ID")),
responses(
(status = 200, description = "Transcode session details", body = TranscodeSessionResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -37,6 +63,16 @@ pub async fn get_session(
Ok(Json(TranscodeSessionResponse::from(session)))
}
#[utoipa::path(
get,
path = "/api/v1/transcode",
tag = "transcode",
responses(
(status = 200, description = "List of transcode sessions", body = Vec<TranscodeSessionResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_sessions(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
@ -51,6 +87,18 @@ pub async fn list_sessions(
))
}
#[utoipa::path(
delete,
path = "/api/v1/transcode/{id}",
tag = "transcode",
params(("id" = Uuid, Path, description = "Transcode session ID")),
responses(
(status = 200, description = "Transcode session cancelled"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn cancel_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,

View file

@ -32,6 +32,18 @@ fn sanitize_content_disposition(filename: &str) -> String {
/// Upload a file to managed storage
/// POST /api/upload
#[utoipa::path(
post,
path = "/api/v1/upload",
tag = "upload",
responses(
(status = 200, description = "File uploaded", body = UploadResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn upload_file(
State(state): State<AppState>,
mut multipart: Multipart,
@ -85,6 +97,19 @@ pub async fn upload_file(
/// Download a managed file
/// GET /api/media/{id}/download
#[utoipa::path(
get,
path = "/api/v1/media/{id}/download",
tag = "upload",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "File content"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn download_file(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -154,6 +179,19 @@ pub async fn download_file(
/// Migrate an external file to managed storage
/// POST /api/media/{id}/move-to-managed
#[utoipa::path(
post,
path = "/api/v1/media/{id}/move-to-managed",
tag = "upload",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 204, description = "File migrated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn move_to_managed(
State(state): State<AppState>,
Path(id): Path<Uuid>,
@ -177,6 +215,17 @@ pub async fn move_to_managed(
/// Get managed storage statistics
/// GET /api/managed/stats
#[utoipa::path(
get,
path = "/api/v1/managed/stats",
tag = "upload",
responses(
(status = 200, description = "Managed storage statistics", body = ManagedStorageStatsResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn managed_stats(
State(state): State<AppState>,
) -> ApiResult<Json<ManagedStorageStatsResponse>> {

View file

@ -16,6 +16,17 @@ use crate::{
};
/// List all users (admin only)
#[utoipa::path(
get,
path = "/api/v1/admin/users",
tag = "users",
responses(
(status = 200, description = "List of users", body = Vec<UserResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_users(
State(state): State<AppState>,
) -> Result<Json<Vec<UserResponse>>, ApiError> {
@ -24,6 +35,24 @@ pub async fn list_users(
}
/// Create a new user (admin only)
#[utoipa::path(
post,
path = "/api/v1/admin/users",
tag = "users",
request_body(
content = inline(serde_json::Value),
description = "username, password, role, and optional profile fields",
content_type = "application/json"
),
responses(
(status = 200, description = "User created", body = UserResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_user(
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>,
@ -74,6 +103,19 @@ pub async fn create_user(
}
/// Get a specific user by ID
#[utoipa::path(
get,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User details", body = UserResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_user(
State(state): State<AppState>,
Path(id): Path<String>,
@ -90,6 +132,25 @@ pub async fn get_user(
}
/// Update a user
#[utoipa::path(
patch,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body(
content = inline(serde_json::Value),
description = "Optional password, role, or profile fields to update",
content_type = "application/json"
),
responses(
(status = 200, description = "User updated", body = UserResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_user(
State(state): State<AppState>,
Path(id): Path<String>,
@ -125,6 +186,19 @@ pub async fn update_user(
}
/// Delete a user (admin only)
#[utoipa::path(
delete,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_user(
State(state): State<AppState>,
Path(id): Path<String>,
@ -141,6 +215,18 @@ pub async fn delete_user(
}
/// Get user's accessible libraries
#[utoipa::path(
get,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User libraries", body = Vec<UserLibraryResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn get_user_libraries(
State(state): State<AppState>,
Path(id): Path<String>,
@ -177,6 +263,20 @@ fn validate_root_path(path: &str) -> Result<(), ApiError> {
}
/// Grant library access to a user (admin only)
#[utoipa::path(
post,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body = GrantLibraryAccessRequest,
responses(
(status = 200, description = "Access granted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn grant_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
@ -202,6 +302,20 @@ pub async fn grant_library_access(
///
/// Uses a JSON body instead of a path parameter because `root_path` may contain
/// slashes that conflict with URL routing.
#[utoipa::path(
delete,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body = RevokeLibraryAccessRequest,
responses(
(status = 200, description = "Access revoked"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn revoke_library_access(
State(state): State<AppState>,
Path(id): Path<String>,

View file

@ -3,12 +3,23 @@ use serde::Serialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct WebhookInfo {
pub url: String,
pub events: Vec<String>,
}
#[utoipa::path(
get,
path = "/api/v1/webhooks",
tag = "webhooks",
responses(
(status = 200, description = "List of configured webhooks", body = Vec<WebhookInfo>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_webhooks(
State(state): State<AppState>,
) -> Result<Json<Vec<WebhookInfo>>, ApiError> {
@ -26,6 +37,17 @@ pub async fn list_webhooks(
Ok(Json(hooks))
}
#[utoipa::path(
post,
path = "/api/v1/webhooks/test",
tag = "webhooks",
responses(
(status = 200, description = "Test webhook sent"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn test_webhook(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {

View file

@ -1,314 +1,24 @@
use std::{net::SocketAddr, sync::Arc};
mod common;
use axum::{
body::Body,
extract::ConnectInfo,
http::{Request, StatusCode},
};
use http_body_util::BodyExt;
use pinakes_core::{
cache::CacheLayer,
config::{
AccountsConfig,
AnalyticsConfig,
CloudConfig,
Config,
DirectoryConfig,
EnrichmentConfig,
JobsConfig,
ManagedStorageConfig,
PhotoConfig,
PluginsConfig,
RateLimitConfig,
ScanningConfig,
ServerConfig,
SharingConfig,
SqliteConfig,
StorageBackendType,
StorageConfig,
SyncConfig,
ThumbnailConfig,
TlsConfig,
TranscodingConfig,
TrashConfig,
UiConfig,
UserAccount,
UserRole,
WebhookConfig,
},
jobs::JobQueue,
storage::{StorageBackend, sqlite::SqliteBackend},
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
test_addr,
};
use tokio::sync::RwLock;
use http_body_util::BodyExt;
use tower::ServiceExt;
/// Fake socket address for tests (governor needs `ConnectInfo`<SocketAddr>)
fn test_addr() -> ConnectInfo<SocketAddr> {
ConnectInfo("127.0.0.1:9999".parse().unwrap())
}
/// Build a GET request with `ConnectInfo` for rate limiter compatibility
fn get(uri: &str) -> Request<Body> {
let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST request with `ConnectInfo`
fn post_json(uri: &str, body: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a GET request with Bearer auth
fn get_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST JSON request with Bearer auth
fn post_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a DELETE request with Bearer auth
fn delete_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("DELETE")
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PATCH JSON request with Bearer auth
fn patch_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PATCH")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
fn default_config() -> Config {
Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: ":memory:".into(),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![],
import_concurrency: 8,
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
authentication_disabled: true,
cors_enabled: false,
cors_origins: vec![],
},
rate_limits: RateLimitConfig::default(),
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
managed_storage: ManagedStorageConfig::default(),
sync: SyncConfig::default(),
sharing: SharingConfig::default(),
trash: TrashConfig::default(),
}
}
async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let config = default_config();
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
pinakes_server::app::create_router(state, &RateLimitConfig::default())
}
/// Hash a password for test user accounts
fn hash_password(password: &str) -> String {
pinakes_core::users::auth::hash_password(password).unwrap()
}
/// Set up an app with accounts enabled and three pre-seeded users.
/// Returns (Router, `admin_token`, `editor_token`, `viewer_token`).
async fn setup_app_with_auth() -> (axum::Router, String, String, String) {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
// Create users in database so resolve_user_id works
let users_to_create = vec![
("admin", "adminpass", UserRole::Admin),
("editor", "editorpass", UserRole::Editor),
("viewer", "viewerpass", UserRole::Viewer),
];
for (username, password, role) in &users_to_create {
let password_hash = hash_password(password);
storage
.create_user(username, &password_hash, *role, None)
.await
.expect("create user");
}
let mut config = default_config();
config.server.authentication_disabled = false; // Enable authentication for these tests
config.accounts.enabled = true;
config.accounts.users = vec![
UserAccount {
username: "admin".to_string(),
password_hash: hash_password("adminpass"),
role: UserRole::Admin,
},
UserAccount {
username: "editor".to_string(),
password_hash: hash_password("editorpass"),
role: UserRole::Editor,
},
UserAccount {
username: "viewer".to_string(),
password_hash: hash_password("viewerpass"),
role: UserRole::Viewer,
},
];
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
let app =
pinakes_server::app::create_router(state, &RateLimitConfig::default());
// Login each user to get tokens
let admin_token = login_user(app.clone(), "admin", "adminpass").await;
let editor_token = login_user(app.clone(), "editor", "editorpass").await;
let viewer_token = login_user(app.clone(), "viewer", "viewerpass").await;
(app, admin_token, editor_token, viewer_token)
}
async fn login_user(
app: axum::Router,
username: &str,
password: &str,
) -> String {
let body = format!(r#"{{"username":"{username}","password":"{password}"}}"#);
let response = app
.oneshot(post_json("/api/v1/auth/login", &body))
.await
.unwrap();
assert_eq!(
response.status(),
StatusCode::OK,
"login failed for user {username}"
);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
result["token"].as_str().unwrap().to_string()
}
async fn response_body(
response: axum::response::Response,
) -> serde_json::Value {
let body = response.into_body().collect().await.unwrap().to_bytes();
serde_json::from_slice(&body).unwrap_or(serde_json::Value::Null)
}
#[tokio::test]
async fn test_list_media_empty() {
let app = setup_app().await;
@ -1010,3 +720,19 @@ async fn test_share_link_expired() {
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_update_sync_device_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let response = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,150 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn list_books_empty() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/books")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
let items = body.as_array().expect("array response");
assert!(items.is_empty());
}
#[tokio::test]
async fn get_book_metadata_not_found() {
let app = setup_app().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.oneshot(get(&format!("/api/v1/books/{fake_id}/metadata")))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn list_books_with_filters() {
let app = setup_app().await;
let resp = app
.oneshot(get("/api/v1/books?author=Tolkien&limit=10"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn list_series_empty() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/books/series")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn list_authors_empty() {
let app = setup_app().await;
let resp = app
.oneshot(get("/api/v1/books/authors?offset=0&limit=50"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn reading_progress_nonexistent_book() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(get_authed(
&format!("/api/v1/books/{fake_id}/progress"),
&viewer,
))
.await
.unwrap();
// Nonexistent book always returns 404.
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn update_reading_progress_nonexistent_book() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/books/{fake_id}/progress"),
r#"{"current_page":42}"#,
&viewer,
))
.await
.unwrap();
// Nonexistent book: handler verifies existence first, so always 404.
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn reading_list_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/books/reading-list", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn import_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/import",
r#"{"path":"/tmp/test.txt"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}"),
r#"{"title":"new"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/media/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,324 @@
use std::{net::SocketAddr, sync::Arc};
use axum::{
body::Body,
extract::ConnectInfo,
http::{Request, StatusCode},
};
use http_body_util::BodyExt;
use pinakes_core::{
cache::CacheLayer,
config::{
AccountsConfig,
AnalyticsConfig,
CloudConfig,
Config,
DirectoryConfig,
EnrichmentConfig,
JobsConfig,
ManagedStorageConfig,
PhotoConfig,
PluginsConfig,
RateLimitConfig,
ScanningConfig,
ServerConfig,
SharingConfig,
SqliteConfig,
StorageBackendType,
StorageConfig,
SyncConfig,
ThumbnailConfig,
TlsConfig,
TranscodingConfig,
TrashConfig,
UiConfig,
UserAccount,
UserRole,
WebhookConfig,
},
jobs::JobQueue,
storage::{StorageBackend, sqlite::SqliteBackend},
};
use tokio::sync::RwLock;
use tower::ServiceExt;
/// Fake socket address for tests (governor needs
/// `ConnectInfo<SocketAddr>`)
pub fn test_addr() -> ConnectInfo<SocketAddr> {
ConnectInfo("127.0.0.1:9999".parse().unwrap())
}
/// Build a GET request with `ConnectInfo` for rate limiter
/// compatibility
pub fn get(uri: &str) -> Request<Body> {
let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST request with `ConnectInfo`
pub fn post_json(uri: &str, body: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a GET request with Bearer auth
pub fn get_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST JSON request with Bearer auth
pub fn post_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PUT JSON request with Bearer auth
pub fn put_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PUT")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a DELETE request with Bearer auth
pub fn delete_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("DELETE")
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PATCH JSON request with Bearer auth
pub fn patch_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PATCH")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
pub fn default_config() -> Config {
Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: ":memory:".into(),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![],
import_concurrency: 8,
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
authentication_disabled: true,
cors_enabled: false,
cors_origins: vec![],
swagger_ui: false,
},
rate_limits: RateLimitConfig::default(),
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
managed_storage: ManagedStorageConfig::default(),
sync: SyncConfig::default(),
sharing: SharingConfig::default(),
trash: TrashConfig::default(),
}
}
pub async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let config = default_config();
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
pinakes_server::app::create_router(state, &RateLimitConfig::default())
}
/// Hash a password for test user accounts
pub fn hash_password(password: &str) -> String {
pinakes_core::users::auth::hash_password(password).unwrap()
}
/// Set up an app with accounts enabled and three pre-seeded users.
/// Returns (Router, `admin_token`, `editor_token`, `viewer_token`).
pub async fn setup_app_with_auth() -> (axum::Router, String, String, String) {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let users_to_create = vec![
("admin", "adminpass", UserRole::Admin),
("editor", "editorpass", UserRole::Editor),
("viewer", "viewerpass", UserRole::Viewer),
];
for (username, password, role) in &users_to_create {
let password_hash = hash_password(password);
storage
.create_user(username, &password_hash, *role, None)
.await
.expect("create user");
}
let mut config = default_config();
config.server.authentication_disabled = false;
config.accounts.enabled = true;
config.accounts.users = vec![
UserAccount {
username: "admin".to_string(),
password_hash: hash_password("adminpass"),
role: UserRole::Admin,
},
UserAccount {
username: "editor".to_string(),
password_hash: hash_password("editorpass"),
role: UserRole::Editor,
},
UserAccount {
username: "viewer".to_string(),
password_hash: hash_password("viewerpass"),
role: UserRole::Viewer,
},
];
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
let app =
pinakes_server::app::create_router(state, &RateLimitConfig::default());
let admin_token = login_user(app.clone(), "admin", "adminpass").await;
let editor_token = login_user(app.clone(), "editor", "editorpass").await;
let viewer_token = login_user(app.clone(), "viewer", "viewerpass").await;
(app, admin_token, editor_token, viewer_token)
}
pub async fn login_user(
app: axum::Router,
username: &str,
password: &str,
) -> String {
let body = format!(r#"{{"username":"{username}","password":"{password}"}}"#);
let response = app
.oneshot(post_json("/api/v1/auth/login", &body))
.await
.unwrap();
assert_eq!(
response.status(),
StatusCode::OK,
"login failed for user {username}"
);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
result["token"].as_str().unwrap().to_string()
}
pub async fn response_body(
response: axum::response::Response,
) -> serde_json::Value {
let body = response.into_body().collect().await.unwrap().to_bytes();
serde_json::from_slice(&body).unwrap_or(serde_json::Value::Null)
}

Some files were not shown because too many files have changed in this diff Show more