pinakes: import in parallel; various UI improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I1eb47cd79cd4145c56af966f6756fe1d6a6a6964
This commit is contained in:
raf 2026-02-03 10:31:20 +03:00
commit 116fe7b059
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
42 changed files with 4316 additions and 316 deletions

127
Cargo.lock generated
View file

@ -126,6 +126,15 @@ dependencies = [
"derive_arbitrary",
]
[[package]]
name = "arc-swap"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73"
dependencies = [
"rustversion",
]
[[package]]
name = "argon2"
version = "0.5.3"
@ -156,6 +165,17 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "async-lock"
version = "3.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
dependencies = [
"event-listener",
"event-listener-strategy",
"pin-project-lite",
]
[[package]]
name = "async-trait"
version = "0.1.89"
@ -297,6 +317,28 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "axum-server"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1ab4a3ec9ea8a657c72d99a03a824af695bd0fb5ec639ccbd9cd3543b41a5f9"
dependencies = [
"arc-swap",
"bytes",
"fs-err",
"http",
"http-body",
"hyper",
"hyper-util",
"pin-project-lite",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
]
[[package]]
name = "base16"
version = "0.2.1"
@ -699,6 +741,15 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "concurrent-queue"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "const-serialize"
version = "0.7.2"
@ -2006,6 +2057,27 @@ dependencies = [
"serde",
]
[[package]]
name = "event-listener"
version = "5.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
dependencies = [
"concurrent-queue",
"parking",
"pin-project-lite",
]
[[package]]
name = "event-listener-strategy"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
dependencies = [
"event-listener",
"pin-project-lite",
]
[[package]]
name = "fallible-iterator"
version = "0.2.0"
@ -2198,6 +2270,16 @@ dependencies = [
"thiserror 1.0.69",
]
[[package]]
name = "fs-err"
version = "3.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf68cef89750956493a66a10f512b9e58d9db21f2a573c079c0bdf1207a54a7"
dependencies = [
"autocfg",
"tokio",
]
[[package]]
name = "fs_extra"
version = "1.3.0"
@ -3911,6 +3993,26 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "moka"
version = "0.12.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e"
dependencies = [
"async-lock",
"crossbeam-channel",
"crossbeam-epoch",
"crossbeam-utils",
"equivalent",
"event-listener",
"futures-util",
"parking_lot",
"portable-atomic",
"smallvec",
"tagptr",
"uuid",
]
[[package]]
name = "moxcms"
version = "0.7.11"
@ -4418,6 +4520,12 @@ dependencies = [
"system-deps",
]
[[package]]
name = "parking"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
[[package]]
name = "parking_lot"
version = "0.12.5"
@ -4710,6 +4818,7 @@ dependencies = [
"lopdf",
"matroska",
"mime_guess",
"moka",
"notify",
"pinakes-plugin-api",
"postgres-types",
@ -4753,9 +4862,11 @@ dependencies = [
"anyhow",
"argon2",
"axum",
"axum-server",
"chrono",
"clap",
"governor",
"http",
"http-body-util",
"percent-encoding",
"pinakes-core",
@ -4803,6 +4914,7 @@ dependencies = [
"chrono",
"clap",
"dioxus",
"futures",
"gray_matter",
"pulldown-cmark",
"reqwest",
@ -5747,6 +5859,15 @@ dependencies = [
"security-framework 3.5.1",
]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.14.0"
@ -6412,6 +6533,12 @@ dependencies = [
"version-compare",
]
[[package]]
name = "tagptr"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
[[package]]
name = "tao"
version = "0.34.5"

View file

@ -76,7 +76,7 @@ winnow = "0.7.14"
# HTTP server
axum = { version = "0.8.8", features = ["macros"] }
tower = "0.5.3"
tower-http = { version = "0.6.8", features = ["cors", "trace"] }
tower-http = { version = "0.6.8", features = ["cors", "trace", "set-header"] }
governor = "0.8.1"
tower_governor = "0.6.0"
@ -93,6 +93,9 @@ dioxus = { version = "0.7.3", features = ["desktop", "router"] }
# Async trait (dyn-compatible async methods)
async-trait = "0.1"
# Async utilities
futures = "0.3"
# Image processing (thumbnails)
image = { version = "0.25.9", default-features = false, features = [
"jpeg",

View file

@ -35,6 +35,7 @@ image = { workspace = true }
tokio-util = { workspace = true }
reqwest = { workspace = true }
argon2 = { workspace = true }
moka = { version = "0.12", features = ["future"] }
# Plugin system
pinakes-plugin-api = { path = "../pinakes-plugin-api" }

View file

@ -1,91 +1,501 @@
use std::collections::HashMap;
//! High-performance caching layer using moka.
//!
//! This module provides a comprehensive caching solution with:
//! - LRU eviction with configurable size limits
//! - TTL-based expiration
//! - Smart cache invalidation
//! - Metrics tracking (hit rate, size, evictions)
//! - Specialized caches for different data types
use std::hash::Hash;
use std::sync::Arc;
use std::time::{Duration, Instant};
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration;
use tokio::sync::RwLock;
use moka::future::Cache as MokaCache;
struct CacheEntry<V> {
value: V,
inserted_at: Instant,
use crate::model::MediaId;
/// Cache statistics for monitoring and debugging.
#[derive(Debug, Clone, Default)]
pub struct CacheStats {
pub hits: u64,
pub misses: u64,
pub evictions: u64,
pub size: u64,
}
/// A simple TTL-based in-memory cache with periodic eviction.
pub struct Cache<K, V> {
entries: Arc<RwLock<HashMap<K, CacheEntry<V>>>>,
ttl: Duration,
impl CacheStats {
pub fn hit_rate(&self) -> f64 {
let total = self.hits + self.misses;
if total == 0 {
0.0
} else {
self.hits as f64 / total as f64
}
}
}
/// Atomic counters for cache metrics.
struct CacheMetrics {
hits: AtomicU64,
misses: AtomicU64,
}
impl Default for CacheMetrics {
fn default() -> Self {
Self {
hits: AtomicU64::new(0),
misses: AtomicU64::new(0),
}
}
}
impl CacheMetrics {
fn record_hit(&self) {
self.hits.fetch_add(1, Ordering::Relaxed);
}
fn record_miss(&self) {
self.misses.fetch_add(1, Ordering::Relaxed);
}
fn stats(&self) -> (u64, u64) {
(
self.hits.load(Ordering::Relaxed),
self.misses.load(Ordering::Relaxed),
)
}
}
/// A high-performance cache with LRU eviction and TTL support.
pub struct Cache<K, V>
where
K: Hash + Eq + Send + Sync + 'static,
V: Clone + Send + Sync + 'static,
{
inner: MokaCache<K, V>,
metrics: Arc<CacheMetrics>,
}
impl<K, V> Cache<K, V>
where
K: Eq + Hash + Clone + Send + Sync + 'static,
K: Hash + Eq + Send + Sync + 'static,
V: Clone + Send + Sync + 'static,
{
pub fn new(ttl: Duration) -> Self {
let cache = Self {
entries: Arc::new(RwLock::new(HashMap::new())),
ttl,
};
/// Create a new cache with the specified TTL and maximum capacity.
pub fn new(ttl: Duration, max_capacity: u64) -> Self {
let inner = MokaCache::builder()
.time_to_live(ttl)
.max_capacity(max_capacity)
.build();
// Spawn periodic eviction task
let entries = cache.entries.clone();
let ttl = cache.ttl;
tokio::spawn(async move {
let mut interval = tokio::time::interval(ttl);
loop {
interval.tick().await;
let now = Instant::now();
let mut map = entries.write().await;
map.retain(|_, entry| now.duration_since(entry.inserted_at) < ttl);
Self {
inner,
metrics: Arc::new(CacheMetrics::default()),
}
});
cache
}
/// Create a new cache with TTL, max capacity, and time-to-idle.
pub fn new_with_idle(ttl: Duration, tti: Duration, max_capacity: u64) -> Self {
let inner = MokaCache::builder()
.time_to_live(ttl)
.time_to_idle(tti)
.max_capacity(max_capacity)
.build();
Self {
inner,
metrics: Arc::new(CacheMetrics::default()),
}
}
/// Get a value from the cache.
pub async fn get(&self, key: &K) -> Option<V> {
let map = self.entries.read().await;
if let Some(entry) = map.get(key)
&& entry.inserted_at.elapsed() < self.ttl
{
return Some(entry.value.clone());
match self.inner.get(key).await {
Some(value) => {
self.metrics.record_hit();
Some(value)
}
None => {
self.metrics.record_miss();
None
}
pub async fn insert(&self, key: K, value: V) {
let mut map = self.entries.write().await;
map.insert(
key,
CacheEntry {
value,
inserted_at: Instant::now(),
},
);
}
}
/// Insert a value into the cache.
pub async fn insert(&self, key: K, value: V) {
self.inner.insert(key, value).await;
}
/// Remove a specific key from the cache.
pub async fn invalidate(&self, key: &K) {
let mut map = self.entries.write().await;
map.remove(key);
self.inner.invalidate(key).await;
}
/// Clear all entries from the cache.
pub async fn invalidate_all(&self) {
self.inner.invalidate_all();
// Run pending tasks to ensure immediate invalidation
self.inner.run_pending_tasks().await;
}
/// Get the current number of entries in the cache.
pub fn entry_count(&self) -> u64 {
self.inner.entry_count()
}
/// Get cache statistics.
pub fn stats(&self) -> CacheStats {
let (hits, misses) = self.metrics.stats();
CacheStats {
hits,
misses,
evictions: 0, // Moka doesn't expose this directly
size: self.entry_count(),
}
}
}
/// Specialized cache for search query results.
pub struct QueryCache {
/// Cache keyed by (query_hash, offset, limit)
inner: Cache<String, String>,
}
impl QueryCache {
pub fn new(ttl: Duration, max_capacity: u64) -> Self {
Self {
inner: Cache::new(ttl, max_capacity),
}
}
/// Generate a cache key from query parameters.
fn make_key(query: &str, offset: u64, limit: u64, sort: Option<&str>) -> String {
use std::hash::{DefaultHasher, Hasher};
let mut hasher = DefaultHasher::new();
hasher.write(query.as_bytes());
hasher.write(&offset.to_le_bytes());
hasher.write(&limit.to_le_bytes());
if let Some(s) = sort {
hasher.write(s.as_bytes());
}
format!("q:{:016x}", hasher.finish())
}
pub async fn get(
&self,
query: &str,
offset: u64,
limit: u64,
sort: Option<&str>,
) -> Option<String> {
let key = Self::make_key(query, offset, limit, sort);
self.inner.get(&key).await
}
pub async fn insert(
&self,
query: &str,
offset: u64,
limit: u64,
sort: Option<&str>,
result: String,
) {
let key = Self::make_key(query, offset, limit, sort);
self.inner.insert(key, result).await;
}
pub async fn invalidate_all(&self) {
let mut map = self.entries.write().await;
map.clear();
self.inner.invalidate_all().await;
}
pub fn stats(&self) -> CacheStats {
self.inner.stats()
}
}
/// Application-level cache layer wrapping multiple caches for different data types.
/// Specialized cache for metadata extraction results.
pub struct MetadataCache {
/// Cache keyed by content hash
inner: Cache<String, String>,
}
impl MetadataCache {
pub fn new(ttl: Duration, max_capacity: u64) -> Self {
Self {
inner: Cache::new(ttl, max_capacity),
}
}
pub async fn get(&self, content_hash: &str) -> Option<String> {
self.inner.get(&content_hash.to_string()).await
}
pub async fn insert(&self, content_hash: &str, metadata_json: String) {
self.inner
.insert(content_hash.to_string(), metadata_json)
.await;
}
pub async fn invalidate(&self, content_hash: &str) {
self.inner.invalidate(&content_hash.to_string()).await;
}
pub fn stats(&self) -> CacheStats {
self.inner.stats()
}
}
/// Specialized cache for media item data.
pub struct MediaCache {
inner: Cache<String, String>,
}
impl MediaCache {
pub fn new(ttl: Duration, max_capacity: u64) -> Self {
Self {
inner: Cache::new(ttl, max_capacity),
}
}
pub async fn get(&self, media_id: MediaId) -> Option<String> {
self.inner.get(&media_id.to_string()).await
}
pub async fn insert(&self, media_id: MediaId, item_json: String) {
self.inner.insert(media_id.to_string(), item_json).await;
}
pub async fn invalidate(&self, media_id: MediaId) {
self.inner.invalidate(&media_id.to_string()).await;
}
pub async fn invalidate_all(&self) {
self.inner.invalidate_all().await;
}
pub fn stats(&self) -> CacheStats {
self.inner.stats()
}
}
/// Configuration for the cache layer.
#[derive(Debug, Clone)]
pub struct CacheConfig {
/// TTL for response cache in seconds
pub response_ttl_secs: u64,
/// Maximum number of cached responses
pub response_max_entries: u64,
/// TTL for query cache in seconds
pub query_ttl_secs: u64,
/// Maximum number of cached query results
pub query_max_entries: u64,
/// TTL for metadata cache in seconds
pub metadata_ttl_secs: u64,
/// Maximum number of cached metadata entries
pub metadata_max_entries: u64,
/// TTL for media cache in seconds
pub media_ttl_secs: u64,
/// Maximum number of cached media items
pub media_max_entries: u64,
}
impl Default for CacheConfig {
fn default() -> Self {
Self {
response_ttl_secs: 60,
response_max_entries: 1000,
query_ttl_secs: 300,
query_max_entries: 500,
metadata_ttl_secs: 3600,
metadata_max_entries: 10000,
media_ttl_secs: 300,
media_max_entries: 5000,
}
}
}
/// Application-level cache layer wrapping multiple specialized caches.
pub struct CacheLayer {
/// Cache for serialized API responses, keyed by request path + query string.
/// Cache for serialized API responses
pub responses: Cache<String, String>,
/// Cache for search query results
pub queries: QueryCache,
/// Cache for metadata extraction results
pub metadata: MetadataCache,
/// Cache for individual media items
pub media: MediaCache,
/// Configuration
config: CacheConfig,
}
impl CacheLayer {
/// Create a new cache layer with the specified TTL (using defaults for other settings).
pub fn new(ttl_secs: u64) -> Self {
let ttl = Duration::from_secs(ttl_secs);
Self {
responses: Cache::new(ttl),
let config = CacheConfig {
response_ttl_secs: ttl_secs,
..Default::default()
};
Self::with_config(config)
}
/// Create a new cache layer with full configuration.
pub fn with_config(config: CacheConfig) -> Self {
Self {
responses: Cache::new(
Duration::from_secs(config.response_ttl_secs),
config.response_max_entries,
),
queries: QueryCache::new(
Duration::from_secs(config.query_ttl_secs),
config.query_max_entries,
),
metadata: MetadataCache::new(
Duration::from_secs(config.metadata_ttl_secs),
config.metadata_max_entries,
),
media: MediaCache::new(
Duration::from_secs(config.media_ttl_secs),
config.media_max_entries,
),
config,
}
}
/// Invalidate all caches related to a media item update.
pub async fn invalidate_for_media_update(&self, media_id: MediaId) {
self.media.invalidate(media_id).await;
// Query cache should be invalidated as search results may change
self.queries.invalidate_all().await;
}
/// Invalidate all caches related to a media item deletion.
pub async fn invalidate_for_media_delete(&self, media_id: MediaId) {
self.media.invalidate(media_id).await;
self.queries.invalidate_all().await;
}
/// Invalidate all caches (useful after bulk imports or major changes).
pub async fn invalidate_all(&self) {
self.responses.invalidate_all().await;
self.queries.invalidate_all().await;
self.media.invalidate_all().await;
// Keep metadata cache as it's keyed by content hash which doesn't change
}
/// Get aggregated statistics for all caches.
pub fn stats(&self) -> CacheLayerStats {
CacheLayerStats {
responses: self.responses.stats(),
queries: self.queries.stats(),
metadata: self.metadata.stats(),
media: self.media.stats(),
}
}
/// Get the current configuration.
pub fn config(&self) -> &CacheConfig {
&self.config
}
}
/// Aggregated statistics for the entire cache layer.
#[derive(Debug, Clone)]
pub struct CacheLayerStats {
pub responses: CacheStats,
pub queries: CacheStats,
pub metadata: CacheStats,
pub media: CacheStats,
}
impl CacheLayerStats {
/// Get the overall hit rate across all caches.
pub fn overall_hit_rate(&self) -> f64 {
let total_hits =
self.responses.hits + self.queries.hits + self.metadata.hits + self.media.hits;
let total_requests = total_hits
+ self.responses.misses
+ self.queries.misses
+ self.metadata.misses
+ self.media.misses;
if total_requests == 0 {
0.0
} else {
total_hits as f64 / total_requests as f64
}
}
/// Get the total number of entries across all caches.
pub fn total_entries(&self) -> u64 {
self.responses.size + self.queries.size + self.metadata.size + self.media.size
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_cache_basic_operations() {
let cache: Cache<String, String> = Cache::new(Duration::from_secs(60), 100);
// Insert and get
cache.insert("key1".to_string(), "value1".to_string()).await;
assert_eq!(
cache.get(&"key1".to_string()).await,
Some("value1".to_string())
);
// Miss
assert_eq!(cache.get(&"key2".to_string()).await, None);
// Invalidate
cache.invalidate(&"key1".to_string()).await;
assert_eq!(cache.get(&"key1".to_string()).await, None);
}
#[tokio::test]
async fn test_cache_stats() {
let cache: Cache<String, String> = Cache::new(Duration::from_secs(60), 100);
cache.insert("key1".to_string(), "value1".to_string()).await;
let _ = cache.get(&"key1".to_string()).await; // hit
let _ = cache.get(&"key2".to_string()).await; // miss
let stats = cache.stats();
assert_eq!(stats.hits, 1);
assert_eq!(stats.misses, 1);
assert!((stats.hit_rate() - 0.5).abs() < 0.01);
}
#[tokio::test]
async fn test_query_cache() {
let cache = QueryCache::new(Duration::from_secs(60), 100);
cache
.insert("test query", 0, 10, Some("name"), "results".to_string())
.await;
assert_eq!(
cache.get("test query", 0, 10, Some("name")).await,
Some("results".to_string())
);
// Different parameters should miss
assert_eq!(cache.get("test query", 10, 10, Some("name")).await, None);
}
#[tokio::test]
async fn test_cache_layer() {
let layer = CacheLayer::new(60);
let media_id = MediaId::new();
layer.media.insert(media_id, "{}".to_string()).await;
assert!(layer.media.get(media_id).await.is_some());
layer.invalidate_for_media_delete(media_id).await;
assert!(layer.media.get(media_id).await.is_none());
}
}

View file

@ -484,6 +484,85 @@ pub struct ServerConfig {
/// If set, all requests (except /health) must include `Authorization: Bearer <key>`.
/// Can also be set via `PINAKES_API_KEY` environment variable.
pub api_key: Option<String>,
/// TLS/HTTPS configuration
#[serde(default)]
pub tls: TlsConfig,
}
/// TLS/HTTPS configuration for secure connections
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TlsConfig {
/// Enable TLS (HTTPS)
#[serde(default)]
pub enabled: bool,
/// Path to the TLS certificate file (PEM format)
#[serde(default)]
pub cert_path: Option<PathBuf>,
/// Path to the TLS private key file (PEM format)
#[serde(default)]
pub key_path: Option<PathBuf>,
/// Enable HTTP to HTTPS redirect (starts a second listener on http_port)
#[serde(default)]
pub redirect_http: bool,
/// Port for HTTP redirect listener (default: 80)
#[serde(default = "default_http_port")]
pub http_port: u16,
/// Enable HSTS (HTTP Strict Transport Security) header
#[serde(default = "default_true")]
pub hsts_enabled: bool,
/// HSTS max-age in seconds (default: 1 year)
#[serde(default = "default_hsts_max_age")]
pub hsts_max_age: u64,
}
fn default_http_port() -> u16 {
80
}
fn default_hsts_max_age() -> u64 {
31536000 // 1 year in seconds
}
impl Default for TlsConfig {
fn default() -> Self {
Self {
enabled: false,
cert_path: None,
key_path: None,
redirect_http: false,
http_port: default_http_port(),
hsts_enabled: true,
hsts_max_age: default_hsts_max_age(),
}
}
}
impl TlsConfig {
/// Validate TLS configuration
pub fn validate(&self) -> Result<(), String> {
if self.enabled {
if self.cert_path.is_none() {
return Err("TLS enabled but cert_path not specified".into());
}
if self.key_path.is_none() {
return Err("TLS enabled but key_path not specified".into());
}
if let Some(ref cert_path) = self.cert_path {
if !cert_path.exists() {
return Err(format!(
"TLS certificate file not found: {}",
cert_path.display()
));
}
}
if let Some(ref key_path) = self.key_path {
if !key_path.exists() {
return Err(format!("TLS key file not found: {}", key_path.display()));
}
}
}
Ok(())
}
}
impl Config {
@ -564,6 +643,8 @@ impl Config {
if self.scanning.import_concurrency == 0 || self.scanning.import_concurrency > 256 {
return Err("import_concurrency must be between 1 and 256".into());
}
// Validate TLS configuration
self.server.tls.validate()?;
Ok(())
}
@ -609,6 +690,7 @@ impl Default for Config {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),

View file

@ -48,6 +48,9 @@ pub enum PinakesError {
#[error("authorization error: {0}")]
Authorization(String),
#[error("path not allowed: {0}")]
PathNotAllowed(String),
}
impl From<rusqlite::Error> for PinakesError {

View file

@ -1,4 +1,5 @@
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use tracing::info;
@ -14,9 +15,29 @@ use crate::thumbnail;
pub struct ImportResult {
pub media_id: MediaId,
pub was_duplicate: bool,
/// True if the file was skipped because it hasn't changed since last scan
pub was_skipped: bool,
pub path: PathBuf,
}
/// Options for import operations
#[derive(Debug, Clone, Default)]
pub struct ImportOptions {
/// Skip files that haven't changed since last scan (based on mtime)
pub incremental: bool,
/// Force re-import even if mtime hasn't changed
pub force: bool,
}
/// Get the modification time of a file as a Unix timestamp
fn get_file_mtime(path: &Path) -> Option<i64> {
std::fs::metadata(path)
.ok()
.and_then(|m| m.modified().ok())
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
.map(|d| d.as_secs() as i64)
}
/// Check that a canonicalized path falls under at least one configured root directory.
/// If no roots are configured, all paths are allowed (for ad-hoc imports).
pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) -> Result<()> {
@ -38,6 +59,15 @@ pub async fn validate_path_in_roots(storage: &DynStorageBackend, path: &Path) ->
}
pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<ImportResult> {
import_file_with_options(storage, path, &ImportOptions::default()).await
}
/// Import a file with configurable options for incremental scanning
pub async fn import_file_with_options(
storage: &DynStorageBackend,
path: &Path,
options: &ImportOptions,
) -> Result<ImportResult> {
let path = path.canonicalize()?;
if !path.exists() {
@ -49,12 +79,38 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
let media_type = MediaType::from_path(&path)
.ok_or_else(|| PinakesError::UnsupportedMediaType(path.clone()))?;
let current_mtime = get_file_mtime(&path);
// Check for incremental scan: skip if file hasn't changed
if options.incremental && !options.force {
if let Some(existing) = storage.get_media_by_path(&path).await? {
// Compare mtimes - if they match, skip this file
if let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime) {
if stored_mtime == curr_mtime {
return Ok(ImportResult {
media_id: existing.id,
was_duplicate: false,
was_skipped: true,
path: path.clone(),
});
}
}
}
}
let content_hash = compute_file_hash(&path).await?;
if let Some(existing) = storage.get_media_by_hash(&content_hash).await? {
// Update the mtime even for duplicates so incremental scan works
if current_mtime.is_some() && existing.file_mtime != current_mtime {
let mut updated = existing.clone();
updated.file_mtime = current_mtime;
let _ = storage.update_media(&updated).await;
}
return Ok(ImportResult {
media_id: existing.id,
was_duplicate: true,
was_skipped: false,
path: path.clone(),
});
}
@ -109,6 +165,7 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
description: extracted.description,
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
file_mtime: current_mtime,
created_at: now,
updated_at: now,
};
@ -144,6 +201,7 @@ pub async fn import_file(storage: &DynStorageBackend, path: &Path) -> Result<Imp
Ok(ImportResult {
media_id,
was_duplicate: false,
was_skipped: false,
path: path.clone(),
})
}
@ -180,7 +238,13 @@ pub async fn import_directory(
dir: &Path,
ignore_patterns: &[String],
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
import_directory_with_concurrency(storage, dir, ignore_patterns, DEFAULT_IMPORT_CONCURRENCY)
import_directory_with_options(
storage,
dir,
ignore_patterns,
DEFAULT_IMPORT_CONCURRENCY,
&ImportOptions::default(),
)
.await
}
@ -189,10 +253,29 @@ pub async fn import_directory_with_concurrency(
dir: &Path,
ignore_patterns: &[String],
concurrency: usize,
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
import_directory_with_options(
storage,
dir,
ignore_patterns,
concurrency,
&ImportOptions::default(),
)
.await
}
/// Import a directory with full options including incremental scanning support
pub async fn import_directory_with_options(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
concurrency: usize,
options: &ImportOptions,
) -> Result<Vec<std::result::Result<ImportResult, PinakesError>>> {
let concurrency = concurrency.clamp(1, 256);
let dir = dir.to_path_buf();
let patterns = ignore_patterns.to_vec();
let options = options.clone();
let entries: Vec<PathBuf> = {
let dir = dir.clone();
@ -213,15 +296,14 @@ pub async fn import_directory_with_concurrency(
let mut results = Vec::with_capacity(entries.len());
let mut join_set = tokio::task::JoinSet::new();
let mut pending_paths: Vec<PathBuf> = Vec::new();
for entry_path in entries {
let storage = storage.clone();
let path = entry_path.clone();
pending_paths.push(entry_path);
let opts = options.clone();
join_set.spawn(async move {
let result = import_file(&storage, &path).await;
let result = import_file_with_options(&storage, &path, &opts).await;
(path, result)
});

View file

@ -231,4 +231,41 @@ impl JobQueue {
job.updated_at = Utc::now();
}
}
/// Get job queue statistics
pub async fn stats(&self) -> JobQueueStats {
let jobs = self.jobs.read().await;
let mut pending = 0;
let mut running = 0;
let mut completed = 0;
let mut failed = 0;
for job in jobs.values() {
match job.status {
JobStatus::Pending => pending += 1,
JobStatus::Running { .. } => running += 1,
JobStatus::Completed { .. } => completed += 1,
JobStatus::Failed { .. } => failed += 1,
JobStatus::Cancelled => {} // Don't count cancelled jobs
}
}
JobQueueStats {
pending,
running,
completed,
failed,
total: jobs.len(),
}
}
}
/// Statistics about the job queue
#[derive(Debug, Clone, Default)]
pub struct JobQueueStats {
pub pending: usize,
pub running: usize,
pub completed: usize,
pub failed: usize,
pub total: usize,
}

View file

@ -15,6 +15,7 @@ pub mod media_type;
pub mod metadata;
pub mod model;
pub mod opener;
pub mod path_validation;
pub mod playlists;
pub mod plugin;
pub mod scan;

View file

@ -61,6 +61,8 @@ pub struct MediaItem {
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental scanning
pub file_mtime: Option<i64>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@ -126,6 +128,7 @@ pub struct AuditEntry {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum AuditAction {
// Media actions
Imported,
Updated,
Deleted,
@ -135,11 +138,50 @@ pub enum AuditAction {
RemovedFromCollection,
Opened,
Scanned,
// Authentication actions
LoginSuccess,
LoginFailed,
Logout,
SessionExpired,
// Authorization actions
PermissionDenied,
RoleChanged,
LibraryAccessGranted,
LibraryAccessRevoked,
// User management
UserCreated,
UserUpdated,
UserDeleted,
// Plugin actions
PluginInstalled,
PluginUninstalled,
PluginEnabled,
PluginDisabled,
// Configuration actions
ConfigChanged,
RootDirectoryAdded,
RootDirectoryRemoved,
// Social/Sharing actions
ShareLinkCreated,
ShareLinkAccessed,
// System actions
DatabaseVacuumed,
DatabaseCleared,
ExportCompleted,
IntegrityCheckCompleted,
}
impl fmt::Display for AuditAction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match self {
// Media actions
Self::Imported => "imported",
Self::Updated => "updated",
Self::Deleted => "deleted",
@ -149,6 +191,44 @@ impl fmt::Display for AuditAction {
Self::RemovedFromCollection => "removed_from_collection",
Self::Opened => "opened",
Self::Scanned => "scanned",
// Authentication actions
Self::LoginSuccess => "login_success",
Self::LoginFailed => "login_failed",
Self::Logout => "logout",
Self::SessionExpired => "session_expired",
// Authorization actions
Self::PermissionDenied => "permission_denied",
Self::RoleChanged => "role_changed",
Self::LibraryAccessGranted => "library_access_granted",
Self::LibraryAccessRevoked => "library_access_revoked",
// User management
Self::UserCreated => "user_created",
Self::UserUpdated => "user_updated",
Self::UserDeleted => "user_deleted",
// Plugin actions
Self::PluginInstalled => "plugin_installed",
Self::PluginUninstalled => "plugin_uninstalled",
Self::PluginEnabled => "plugin_enabled",
Self::PluginDisabled => "plugin_disabled",
// Configuration actions
Self::ConfigChanged => "config_changed",
Self::RootDirectoryAdded => "root_directory_added",
Self::RootDirectoryRemoved => "root_directory_removed",
// Social/Sharing actions
Self::ShareLinkCreated => "share_link_created",
Self::ShareLinkAccessed => "share_link_accessed",
// System actions
Self::DatabaseVacuumed => "database_vacuumed",
Self::DatabaseCleared => "database_cleared",
Self::ExportCompleted => "export_completed",
Self::IntegrityCheckCompleted => "integrity_check_completed",
};
write!(f, "{s}")
}

View file

@ -0,0 +1,310 @@
//! Path validation utilities to prevent path traversal attacks.
//!
//! This module provides functions to validate and sanitize file paths,
//! ensuring they remain within allowed root directories and don't contain
//! malicious path traversal sequences.
use std::path::{Path, PathBuf};
use crate::error::{PinakesError, Result};
/// Validates that a path is within one of the allowed root directories.
///
/// This function:
/// 1. Canonicalizes the path to resolve any symlinks and `..` sequences
/// 2. Checks that the canonical path starts with one of the allowed roots
/// 3. Returns the canonical path if valid, or an error if not
///
/// # Security
///
/// This prevents path traversal attacks where an attacker might try to
/// access files outside the allowed directories using sequences like:
/// - `../../../etc/passwd`
/// - `/media/../../../etc/passwd`
/// - Symlinks pointing outside allowed roots
///
/// # Arguments
///
/// * `path` - The path to validate
/// * `allowed_roots` - List of allowed root directories
///
/// # Returns
///
/// The canonicalized path if valid, or a `PathNotAllowed` error if the path
/// is outside all allowed roots.
///
/// # Example
///
/// ```no_run
/// use std::path::PathBuf;
/// use pinakes_core::path_validation::validate_path;
///
/// let allowed_roots = vec![PathBuf::from("/media"), PathBuf::from("/home/user/documents")];
/// let path = PathBuf::from("/media/music/song.mp3");
///
/// let validated = validate_path(&path, &allowed_roots).unwrap();
/// ```
pub fn validate_path(path: &Path, allowed_roots: &[PathBuf]) -> Result<PathBuf> {
// Handle the case where no roots are configured
if allowed_roots.is_empty() {
return Err(PinakesError::PathNotAllowed(
"no allowed roots configured".to_string(),
));
}
// First check if the path exists
if !path.exists() {
return Err(PinakesError::PathNotAllowed(format!(
"path does not exist: {}",
path.display()
)));
}
// Canonicalize to resolve symlinks and relative components
let canonical = path.canonicalize().map_err(|e| {
PinakesError::PathNotAllowed(format!(
"failed to canonicalize path {}: {}",
path.display(),
e
))
})?;
// Check if the canonical path is within any allowed root
let canonical_roots: Vec<PathBuf> = allowed_roots
.iter()
.filter_map(|root| root.canonicalize().ok())
.collect();
if canonical_roots.is_empty() {
return Err(PinakesError::PathNotAllowed(
"no accessible allowed roots".to_string(),
));
}
let is_allowed = canonical_roots
.iter()
.any(|root| canonical.starts_with(root));
if is_allowed {
Ok(canonical)
} else {
Err(PinakesError::PathNotAllowed(format!(
"path {} is outside allowed roots",
path.display()
)))
}
}
/// Validates a path relative to a single root directory.
///
/// This is a convenience wrapper for `validate_path` when you only have one root.
pub fn validate_path_single_root(path: &Path, root: &Path) -> Result<PathBuf> {
validate_path(path, &[root.to_path_buf()])
}
/// Checks if a path appears to contain traversal sequences without canonicalizing.
///
/// This is a quick pre-check that can reject obviously malicious paths without
/// hitting the filesystem. It should be used in addition to `validate_path`,
/// not as a replacement.
///
/// # Arguments
///
/// * `path` - The path string to check
///
/// # Returns
///
/// `true` if the path appears safe (no obvious traversal sequences),
/// `false` if it contains suspicious patterns.
pub fn path_looks_safe(path: &str) -> bool {
// Reject paths with obvious traversal patterns
!path.contains("..")
&& !path.contains("//")
&& !path.starts_with('/')
&& path.chars().filter(|c| *c == '/').count() < 50 // Reasonable depth limit
}
/// Sanitizes a filename by removing or replacing dangerous characters.
///
/// This removes:
/// - Path separators (`/`, `\`)
/// - Null bytes
/// - Control characters
/// - Leading dots (to prevent hidden files)
///
/// # Arguments
///
/// * `filename` - The filename to sanitize
///
/// # Returns
///
/// A sanitized filename safe for use on most filesystems.
pub fn sanitize_filename(filename: &str) -> String {
let sanitized: String = filename
.chars()
.filter(|c| {
// Allow alphanumeric, common punctuation, and unicode letters
c.is_alphanumeric() || matches!(*c, '-' | '_' | '.' | ' ' | '(' | ')' | '[' | ']')
})
.collect();
// Remove leading dots to prevent hidden files
let sanitized = sanitized.trim_start_matches('.');
// Remove leading/trailing whitespace
let sanitized = sanitized.trim();
// Ensure the filename isn't empty after sanitization
if sanitized.is_empty() {
"unnamed".to_string()
} else {
sanitized.to_string()
}
}
/// Joins a base path with a relative path safely.
///
/// This ensures the resulting path doesn't escape the base directory
/// through use of `..` or absolute paths in the relative component.
///
/// # Arguments
///
/// * `base` - The base directory
/// * `relative` - The relative path to join
///
/// # Returns
///
/// The joined path if safe, or an error if the relative path would escape the base.
pub fn safe_join(base: &Path, relative: &str) -> Result<PathBuf> {
// Reject absolute paths in the relative component
if relative.starts_with('/') || relative.starts_with('\\') {
return Err(PinakesError::PathNotAllowed(
"relative path cannot be absolute".to_string(),
));
}
// Reject paths with .. traversal
if relative.contains("..") {
return Err(PinakesError::PathNotAllowed(
"relative path cannot contain '..'".to_string(),
));
}
// Build the path and validate it stays within base
let joined = base.join(relative);
// Canonicalize base for comparison
let canonical_base = base.canonicalize().map_err(|e| {
PinakesError::PathNotAllowed(format!(
"failed to canonicalize base {}: {}",
base.display(),
e
))
})?;
// The joined path might not exist yet, so we can't canonicalize it directly.
// Instead, we check each component
let mut current = canonical_base.clone();
for component in Path::new(relative).components() {
use std::path::Component;
match component {
Component::Normal(name) => {
current = current.join(name);
}
Component::ParentDir => {
return Err(PinakesError::PathNotAllowed(
"path traversal detected".to_string(),
));
}
Component::CurDir => continue,
_ => {
return Err(PinakesError::PathNotAllowed(
"invalid path component".to_string(),
));
}
}
}
Ok(joined)
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::TempDir;
fn setup_test_dirs() -> TempDir {
let temp = TempDir::new().unwrap();
fs::create_dir_all(temp.path().join("allowed")).unwrap();
fs::create_dir_all(temp.path().join("forbidden")).unwrap();
fs::write(temp.path().join("allowed/file.txt"), "test").unwrap();
fs::write(temp.path().join("forbidden/secret.txt"), "secret").unwrap();
temp
}
#[test]
fn test_validate_path_allowed() {
let temp = setup_test_dirs();
let allowed_roots = vec![temp.path().join("allowed")];
let path = temp.path().join("allowed/file.txt");
let result = validate_path(&path, &allowed_roots);
assert!(result.is_ok());
}
#[test]
fn test_validate_path_forbidden() {
let temp = setup_test_dirs();
let allowed_roots = vec![temp.path().join("allowed")];
let path = temp.path().join("forbidden/secret.txt");
let result = validate_path(&path, &allowed_roots);
assert!(result.is_err());
}
#[test]
fn test_validate_path_traversal() {
let temp = setup_test_dirs();
let allowed_roots = vec![temp.path().join("allowed")];
let path = temp.path().join("allowed/../forbidden/secret.txt");
let result = validate_path(&path, &allowed_roots);
assert!(result.is_err());
}
#[test]
fn test_sanitize_filename() {
assert_eq!(sanitize_filename("normal.txt"), "normal.txt");
assert_eq!(sanitize_filename("../../../etc/passwd"), "etcpasswd");
assert_eq!(sanitize_filename(".hidden"), "hidden");
assert_eq!(sanitize_filename("file<with>bad:chars"), "filewithbadchars");
assert_eq!(sanitize_filename(""), "unnamed");
assert_eq!(sanitize_filename("..."), "unnamed");
}
#[test]
fn test_path_looks_safe() {
assert!(path_looks_safe("normal/path/file.txt"));
assert!(!path_looks_safe("../../../etc/passwd"));
assert!(!path_looks_safe("path//double/slash"));
}
#[test]
fn test_safe_join() {
let temp = TempDir::new().unwrap();
let base = temp.path();
// Valid join
let result = safe_join(base, "subdir/file.txt");
assert!(result.is_ok());
// Traversal attempt
let result = safe_join(base, "../etc/passwd");
assert!(result.is_err());
// Absolute path attempt
let result = safe_join(base, "/etc/passwd");
assert!(result.is_err());
}
}

View file

@ -14,9 +14,20 @@ pub struct ScanStatus {
pub scanning: bool,
pub files_found: usize,
pub files_processed: usize,
/// Number of files skipped because they haven't changed (incremental scan)
pub files_skipped: usize,
pub errors: Vec<String>,
}
/// Options for scanning operations
#[derive(Debug, Clone, Default)]
pub struct ScanOptions {
/// Use incremental scanning (skip unchanged files based on mtime)
pub incremental: bool,
/// Force full rescan even for incremental mode
pub force_full: bool,
}
/// Shared scan progress that can be read by the status endpoint while a scan runs.
#[derive(Clone)]
pub struct ScanProgress {
@ -50,6 +61,7 @@ impl ScanProgress {
scanning: self.is_scanning.load(Ordering::Acquire),
files_found: self.files_found.load(Ordering::Acquire),
files_processed: self.files_processed.load(Ordering::Acquire),
files_skipped: 0, // Not tracked in real-time progress
errors,
}
}
@ -89,7 +101,20 @@ pub async fn scan_directory(
dir: &Path,
ignore_patterns: &[String],
) -> Result<ScanStatus> {
scan_directory_with_progress(storage, dir, ignore_patterns, None).await
scan_directory_with_options(storage, dir, ignore_patterns, None, &ScanOptions::default()).await
}
/// Scan a directory with incremental scanning support
pub async fn scan_directory_incremental(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
) -> Result<ScanStatus> {
let options = ScanOptions {
incremental: true,
force_full: false,
};
scan_directory_with_options(storage, dir, ignore_patterns, None, &options).await
}
pub async fn scan_directory_with_progress(
@ -98,20 +123,62 @@ pub async fn scan_directory_with_progress(
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
) -> Result<ScanStatus> {
info!(dir = %dir.display(), "starting directory scan");
scan_directory_with_options(
storage,
dir,
ignore_patterns,
progress,
&ScanOptions::default(),
)
.await
}
/// Scan a directory with full options including progress tracking and incremental mode
pub async fn scan_directory_with_options(
storage: &DynStorageBackend,
dir: &Path,
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
scan_options: &ScanOptions,
) -> Result<ScanStatus> {
info!(
dir = %dir.display(),
incremental = scan_options.incremental,
force = scan_options.force_full,
"starting directory scan"
);
if let Some(p) = progress {
p.begin();
}
let results = import::import_directory(storage, dir, ignore_patterns).await?;
// Note: for configurable concurrency, use import_directory_with_concurrency directly
// Convert scan options to import options
let import_options = import::ImportOptions {
incremental: scan_options.incremental && !scan_options.force_full,
force: scan_options.force_full,
};
let results = import::import_directory_with_options(
storage,
dir,
ignore_patterns,
8, // Default concurrency
&import_options,
)
.await?;
let mut errors = Vec::new();
let mut processed = 0;
let mut skipped = 0;
for result in &results {
match result {
Ok(_) => processed += 1,
Ok(r) => {
if r.was_skipped {
skipped += 1;
} else {
processed += 1;
}
}
Err(e) => {
let msg = e.to_string();
if let Some(p) = progress {
@ -132,9 +199,20 @@ pub async fn scan_directory_with_progress(
scanning: false,
files_found: results.len(),
files_processed: processed,
files_skipped: skipped,
errors,
};
if scan_options.incremental {
info!(
dir = %dir.display(),
found = status.files_found,
processed = status.files_processed,
skipped = status.files_skipped,
"incremental scan complete"
);
}
Ok(status)
}
@ -142,19 +220,43 @@ pub async fn scan_all_roots(
storage: &DynStorageBackend,
ignore_patterns: &[String],
) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_progress(storage, ignore_patterns, None).await
scan_all_roots_with_options(storage, ignore_patterns, None, &ScanOptions::default()).await
}
/// Scan all roots incrementally (skip unchanged files)
pub async fn scan_all_roots_incremental(
storage: &DynStorageBackend,
ignore_patterns: &[String],
) -> Result<Vec<ScanStatus>> {
let options = ScanOptions {
incremental: true,
force_full: false,
};
scan_all_roots_with_options(storage, ignore_patterns, None, &options).await
}
pub async fn scan_all_roots_with_progress(
storage: &DynStorageBackend,
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
) -> Result<Vec<ScanStatus>> {
scan_all_roots_with_options(storage, ignore_patterns, progress, &ScanOptions::default()).await
}
/// Scan all roots with full options including progress and incremental mode
pub async fn scan_all_roots_with_options(
storage: &DynStorageBackend,
ignore_patterns: &[String],
progress: Option<&ScanProgress>,
scan_options: &ScanOptions,
) -> Result<Vec<ScanStatus>> {
let roots = storage.list_root_dirs().await?;
let mut statuses = Vec::new();
for root in roots {
match scan_directory_with_progress(storage, &root, ignore_patterns, progress).await {
match scan_directory_with_options(storage, &root, ignore_patterns, progress, scan_options)
.await
{
Ok(status) => statuses.push(status),
Err(e) => {
warn!(root = %root.display(), error = %e, "failed to scan root directory");
@ -162,6 +264,7 @@ pub async fn scan_all_roots_with_progress(
scanning: false,
files_found: 0,
files_processed: 0,
files_skipped: 0,
errors: vec![e.to_string()],
});
}

View file

@ -6,7 +6,10 @@ use winnow::{ModalResult, Parser};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum SearchQuery {
FullText(String),
FieldMatch { field: String, value: String },
FieldMatch {
field: String,
value: String,
},
And(Vec<SearchQuery>),
Or(Vec<SearchQuery>),
Not(Box<SearchQuery>),
@ -14,6 +17,45 @@ pub enum SearchQuery {
Fuzzy(String),
TypeFilter(String),
TagFilter(String),
/// Range query: field:start..end (inclusive)
RangeQuery {
field: String,
start: Option<i64>,
end: Option<i64>,
},
/// Comparison query: field:>value, field:<value, field:>=value, field:<=value
CompareQuery {
field: String,
op: CompareOp,
value: i64,
},
/// Date query: created:today, modified:last-week, etc.
DateQuery {
field: String,
value: DateValue,
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum CompareOp {
GreaterThan,
GreaterOrEqual,
LessThan,
LessOrEqual,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum DateValue {
Today,
Yesterday,
ThisWeek,
LastWeek,
ThisMonth,
LastMonth,
ThisYear,
LastYear,
/// Days ago: last-7d, last-30d
DaysAgo(u32),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -69,14 +111,143 @@ fn not_expr(input: &mut &str) -> ModalResult<SearchQuery> {
.parse_next(input)
}
/// Parse a date value like "today", "yesterday", "last-week", "last-30d"
fn parse_date_value(s: &str) -> Option<DateValue> {
match s.to_lowercase().as_str() {
"today" => Some(DateValue::Today),
"yesterday" => Some(DateValue::Yesterday),
"this-week" | "thisweek" => Some(DateValue::ThisWeek),
"last-week" | "lastweek" => Some(DateValue::LastWeek),
"this-month" | "thismonth" => Some(DateValue::ThisMonth),
"last-month" | "lastmonth" => Some(DateValue::LastMonth),
"this-year" | "thisyear" => Some(DateValue::ThisYear),
"last-year" | "lastyear" => Some(DateValue::LastYear),
other => {
// Try to parse "last-Nd" format (e.g., "last-7d", "last-30d")
if let Some(rest) = other.strip_prefix("last-") {
if let Some(days_str) = rest.strip_suffix('d') {
if let Ok(days) = days_str.parse::<u32>() {
return Some(DateValue::DaysAgo(days));
}
}
}
None
}
}
}
/// Parse size strings like "10MB", "1GB", "500KB" to bytes
fn parse_size_value(s: &str) -> Option<i64> {
let s = s.to_uppercase();
if let Some(num) = s.strip_suffix("GB") {
num.parse::<i64>().ok().map(|n| n * 1024 * 1024 * 1024)
} else if let Some(num) = s.strip_suffix("MB") {
num.parse::<i64>().ok().map(|n| n * 1024 * 1024)
} else if let Some(num) = s.strip_suffix("KB") {
num.parse::<i64>().ok().map(|n| n * 1024)
} else if let Some(num) = s.strip_suffix('B') {
num.parse::<i64>().ok()
} else {
s.parse::<i64>().ok()
}
}
fn field_match(input: &mut &str) -> ModalResult<SearchQuery> {
let field_name =
take_while(1.., |c: char| c.is_alphanumeric() || c == '_').map(|s: &str| s.to_string());
(field_name, ':', word_or_quoted)
.map(|(field, _, value)| match field.as_str() {
"type" => SearchQuery::TypeFilter(value),
"tag" => SearchQuery::TagFilter(value),
_ => SearchQuery::FieldMatch { field, value },
.map(|(field, _, value)| {
// Handle special field types
match field.as_str() {
"type" => return SearchQuery::TypeFilter(value),
"tag" => return SearchQuery::TagFilter(value),
_ => {}
}
// Check for range queries: field:start..end
if value.contains("..") {
let parts: Vec<&str> = value.split("..").collect();
if parts.len() == 2 {
let start = if parts[0].is_empty() {
None
} else if field == "size" {
parse_size_value(parts[0])
} else {
parts[0].parse().ok()
};
let end = if parts[1].is_empty() {
None
} else if field == "size" {
parse_size_value(parts[1])
} else {
parts[1].parse().ok()
};
return SearchQuery::RangeQuery { field, start, end };
}
}
// Check for comparison queries: >=, <=, >, <
if let Some(rest) = value.strip_prefix(">=") {
let val = if field == "size" {
parse_size_value(rest).unwrap_or(0)
} else {
rest.parse().unwrap_or(0)
};
return SearchQuery::CompareQuery {
field,
op: CompareOp::GreaterOrEqual,
value: val,
};
}
if let Some(rest) = value.strip_prefix("<=") {
let val = if field == "size" {
parse_size_value(rest).unwrap_or(0)
} else {
rest.parse().unwrap_or(0)
};
return SearchQuery::CompareQuery {
field,
op: CompareOp::LessOrEqual,
value: val,
};
}
if let Some(rest) = value.strip_prefix('>') {
let val = if field == "size" {
parse_size_value(rest).unwrap_or(0)
} else {
rest.parse().unwrap_or(0)
};
return SearchQuery::CompareQuery {
field,
op: CompareOp::GreaterThan,
value: val,
};
}
if let Some(rest) = value.strip_prefix('<') {
let val = if field == "size" {
parse_size_value(rest).unwrap_or(0)
} else {
rest.parse().unwrap_or(0)
};
return SearchQuery::CompareQuery {
field,
op: CompareOp::LessThan,
value: val,
};
}
// Check for date queries on created/modified fields
if field == "created" || field == "modified" {
if let Some(date_val) = parse_date_value(&value) {
return SearchQuery::DateQuery {
field,
value: date_val,
};
}
}
// Default: simple field match
SearchQuery::FieldMatch { field, value }
})
.parse_next(input)
}
@ -253,4 +424,131 @@ mod tests {
let q = parse_search_query("\"hello world\"").unwrap();
assert_eq!(q, SearchQuery::FullText("hello world".into()));
}
#[test]
fn test_range_query_year() {
let q = parse_search_query("year:2020..2023").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
field: "year".into(),
start: Some(2020),
end: Some(2023)
}
);
}
#[test]
fn test_range_query_open_start() {
let q = parse_search_query("year:..2023").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
field: "year".into(),
start: None,
end: Some(2023)
}
);
}
#[test]
fn test_range_query_open_end() {
let q = parse_search_query("year:2020..").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
field: "year".into(),
start: Some(2020),
end: None
}
);
}
#[test]
fn test_compare_greater_than() {
let q = parse_search_query("year:>2020").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
field: "year".into(),
op: CompareOp::GreaterThan,
value: 2020
}
);
}
#[test]
fn test_compare_less_or_equal() {
let q = parse_search_query("year:<=2023").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
field: "year".into(),
op: CompareOp::LessOrEqual,
value: 2023
}
);
}
#[test]
fn test_size_compare_mb() {
let q = parse_search_query("size:>10MB").unwrap();
assert_eq!(
q,
SearchQuery::CompareQuery {
field: "size".into(),
op: CompareOp::GreaterThan,
value: 10 * 1024 * 1024
}
);
}
#[test]
fn test_size_range_gb() {
let q = parse_search_query("size:1GB..2GB").unwrap();
assert_eq!(
q,
SearchQuery::RangeQuery {
field: "size".into(),
start: Some(1024 * 1024 * 1024),
end: Some(2 * 1024 * 1024 * 1024)
}
);
}
#[test]
fn test_date_query_today() {
let q = parse_search_query("created:today").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
field: "created".into(),
value: DateValue::Today
}
);
}
#[test]
fn test_date_query_last_week() {
let q = parse_search_query("modified:last-week").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
field: "modified".into(),
value: DateValue::LastWeek
}
);
}
#[test]
fn test_date_query_days_ago() {
let q = parse_search_query("created:last-30d").unwrap();
assert_eq!(
q,
SearchQuery::DateQuery {
field: "created".into(),
value: DateValue::DaysAgo(30)
}
);
}
}

View file

@ -46,6 +46,8 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn get_media(&self, id: MediaId) -> Result<MediaItem>;
async fn count_media(&self) -> Result<u64>;
async fn get_media_by_hash(&self, hash: &ContentHash) -> Result<Option<MediaItem>>;
/// Get a media item by its file path (used for incremental scanning)
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>;
async fn update_media(&self, item: &MediaItem) -> Result<()>;
async fn delete_media(&self, id: MediaId) -> Result<()>;
@ -232,6 +234,59 @@ pub trait StorageBackend: Send + Sync + 'static {
root_path: &str,
) -> Result<()>;
/// Check if a user has access to a specific media item based on library permissions.
/// Returns the permission level if access is granted, or an error if denied.
/// Admin users (role=admin) bypass library checks and have full access.
async fn check_library_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<crate::users::LibraryPermission> {
// Default implementation: get the media item's path and check against user's library access
let media = self.get_media(media_id).await?;
let path_str = media.path.to_string_lossy().to_string();
// Get user's library permissions
let libraries = self.get_user_libraries(user_id).await?;
// If user has no library restrictions, they have no access (unless they're admin)
// This default impl requires at least one matching library permission
for lib in &libraries {
if path_str.starts_with(&lib.root_path) {
return Ok(lib.permission);
}
}
Err(crate::error::PinakesError::Authorization(format!(
"user {} has no access to media {}",
user_id, media_id
)))
}
/// Check if a user has at least read access to a media item
async fn has_media_read_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<bool> {
match self.check_library_access(user_id, media_id).await {
Ok(perm) => Ok(perm.can_read()),
Err(_) => Ok(false),
}
}
/// Check if a user has write access to a media item
async fn has_media_write_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<bool> {
match self.check_library_access(user_id, media_id).await {
Ok(perm) => Ok(perm.can_write()),
Err(_) => Ok(false),
}
}
// ===== Ratings =====
async fn rate_media(
&self,

View file

@ -114,6 +114,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
file_mtime: row.get("file_mtime"),
created_at: row.get("created_at"),
updated_at: row.get("updated_at"),
})
@ -198,11 +199,61 @@ fn build_search_inner(
if text.is_empty() {
return Ok("TRUE".to_string());
}
let idx = *offset;
// Combine FTS with trigram similarity and ILIKE for comprehensive fuzzy matching
// This allows partial matches like "mus" -> "music"
let idx_fts = *offset;
*offset += 1;
let idx_prefix = *offset;
*offset += 1;
let idx_ilike = *offset;
*offset += 1;
let idx_sim_title = *offset;
*offset += 1;
let idx_sim_artist = *offset;
*offset += 1;
let idx_sim_album = *offset;
*offset += 1;
let idx_sim_filename = *offset;
*offset += 1;
// Sanitize for tsquery prefix matching
let sanitized = text.replace(['&', '|', '!', '(', ')', ':', '*', '\\', '\''], "");
let prefix_query = if sanitized.contains(' ') {
// For multi-word, join with & and add :* to last word
let words: Vec<&str> = sanitized.split_whitespace().collect();
if let Some((last, rest)) = words.split_last() {
let prefix_parts: Vec<String> = rest.iter().map(|w| w.to_string()).collect();
if prefix_parts.is_empty() {
format!("{}:*", last)
} else {
format!("{} & {}:*", prefix_parts.join(" & "), last)
}
} else {
format!("{}:*", sanitized)
}
} else {
format!("{}:*", sanitized)
};
params.push(Box::new(text.clone()));
params.push(Box::new(prefix_query));
params.push(Box::new(format!("%{}%", text)));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
Ok(format!(
"search_vector @@ plainto_tsquery('english', ${idx})"
"(\
search_vector @@ plainto_tsquery('english', ${idx_fts}) OR \
search_vector @@ to_tsquery('english', ${idx_prefix}) OR \
LOWER(COALESCE(title, '')) LIKE LOWER(${idx_ilike}) OR \
LOWER(COALESCE(file_name, '')) LIKE LOWER(${idx_ilike}) OR \
similarity(COALESCE(title, ''), ${idx_sim_title}) > 0.3 OR \
similarity(COALESCE(artist, ''), ${idx_sim_artist}) > 0.3 OR \
similarity(COALESCE(album, ''), ${idx_sim_album}) > 0.3 OR \
similarity(COALESCE(file_name, ''), ${idx_sim_filename}) > 0.25\
)"
))
}
SearchQuery::Prefix(term) => {
@ -214,14 +265,31 @@ fn build_search_inner(
Ok(format!("search_vector @@ to_tsquery('english', ${idx})"))
}
SearchQuery::Fuzzy(term) => {
// Use trigram similarity on multiple fields
let idx_title = *offset;
*offset += 1;
let idx_artist = *offset;
*offset += 1;
let idx_album = *offset;
*offset += 1;
let idx_filename = *offset;
*offset += 1;
let idx_ilike = *offset;
*offset += 1;
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(format!("%{}%", term)));
Ok(format!(
"(similarity(COALESCE(title, ''), ${idx_title}) > 0.3 OR similarity(COALESCE(artist, ''), ${idx_artist}) > 0.3)"
"(\
similarity(COALESCE(title, ''), ${idx_title}) > 0.3 OR \
similarity(COALESCE(artist, ''), ${idx_artist}) > 0.3 OR \
similarity(COALESCE(album, ''), ${idx_album}) > 0.3 OR \
similarity(COALESCE(file_name, ''), ${idx_filename}) > 0.25 OR \
LOWER(COALESCE(title, '')) LIKE LOWER(${idx_ilike}) OR \
LOWER(COALESCE(file_name, '')) LIKE LOWER(${idx_ilike})\
)"
))
}
SearchQuery::FieldMatch { field, value } => {
@ -277,6 +345,86 @@ fn build_search_inner(
let frag = build_search_inner(inner, offset, params, type_filters, tag_filters)?;
Ok(format!("NOT ({frag})"))
}
SearchQuery::RangeQuery { field, start, end } => {
let col = match field.as_str() {
"year" => "year",
"size" | "file_size" => "file_size",
"duration" => "duration_secs",
_ => return Ok("TRUE".to_string()), // Unknown field, ignore
};
match (start, end) {
(Some(s), Some(e)) => {
let idx_start = *offset;
*offset += 1;
let idx_end = *offset;
*offset += 1;
params.push(Box::new(*s));
params.push(Box::new(*e));
Ok(format!("({col} >= ${idx_start} AND {col} <= ${idx_end})"))
}
(Some(s), None) => {
let idx = *offset;
*offset += 1;
params.push(Box::new(*s));
Ok(format!("{col} >= ${idx}"))
}
(None, Some(e)) => {
let idx = *offset;
*offset += 1;
params.push(Box::new(*e));
Ok(format!("{col} <= ${idx}"))
}
(None, None) => Ok("TRUE".to_string()),
}
}
SearchQuery::CompareQuery { field, op, value } => {
let col = match field.as_str() {
"year" => "year",
"size" | "file_size" => "file_size",
"duration" => "duration_secs",
_ => return Ok("TRUE".to_string()), // Unknown field, ignore
};
let op_sql = match op {
crate::search::CompareOp::GreaterThan => ">",
crate::search::CompareOp::GreaterOrEqual => ">=",
crate::search::CompareOp::LessThan => "<",
crate::search::CompareOp::LessOrEqual => "<=",
};
let idx = *offset;
*offset += 1;
params.push(Box::new(*value));
Ok(format!("{col} {op_sql} ${idx}"))
}
SearchQuery::DateQuery { field, value } => {
let col = match field.as_str() {
"created" => "created_at",
"modified" | "updated" => "updated_at",
_ => return Ok("TRUE".to_string()),
};
Ok(date_value_to_postgres_expr(col, value))
}
}
}
/// Convert a DateValue to a PostgreSQL datetime comparison expression
fn date_value_to_postgres_expr(col: &str, value: &crate::search::DateValue) -> String {
use crate::search::DateValue;
match value {
DateValue::Today => format!("{col}::date = CURRENT_DATE"),
DateValue::Yesterday => format!("{col}::date = CURRENT_DATE - INTERVAL '1 day'"),
DateValue::ThisWeek => format!("{col} >= date_trunc('week', CURRENT_DATE)"),
DateValue::LastWeek => format!(
"{col} >= date_trunc('week', CURRENT_DATE) - INTERVAL '7 days' AND {col} < date_trunc('week', CURRENT_DATE)"
),
DateValue::ThisMonth => format!("{col} >= date_trunc('month', CURRENT_DATE)"),
DateValue::LastMonth => format!(
"{col} >= date_trunc('month', CURRENT_DATE) - INTERVAL '1 month' AND {col} < date_trunc('month', CURRENT_DATE)"
),
DateValue::ThisYear => format!("{col} >= date_trunc('year', CURRENT_DATE)"),
DateValue::LastYear => format!(
"{col} >= date_trunc('year', CURRENT_DATE) - INTERVAL '1 year' AND {col} < date_trunc('year', CURRENT_DATE)"
),
DateValue::DaysAgo(days) => format!("{col} >= CURRENT_DATE - INTERVAL '{days} days'"),
}
}
@ -478,7 +626,7 @@ impl StorageBackend for PostgresBackend {
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at
thumbnail_path, file_mtime, created_at, updated_at
FROM media_items WHERE content_hash = $1",
&[&hash.0],
)
@ -494,6 +642,34 @@ impl StorageBackend for PostgresBackend {
}
}
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>> {
let path_str = path.to_string_lossy().to_string();
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let row = client
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at
FROM media_items WHERE path = $1",
&[&path_str],
)
.await?;
match row {
Some(r) => {
let mut item = row_to_media_item(&r)?;
item.custom_fields = self.get_custom_fields(item.id).await?;
Ok(Some(item))
}
None => Ok(None),
}
}
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>> {
let client = self
.pool
@ -671,6 +847,59 @@ impl StorageBackend for PostgresBackend {
Ok(count as u64)
}
// ---- Batch Operations ----
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
if ids.is_empty() {
return Ok(0);
}
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Use ANY with array for efficient batch delete
let uuids: Vec<Uuid> = ids.iter().map(|id| id.0).collect();
let rows = client
.execute("DELETE FROM media_items WHERE id = ANY($1)", &[&uuids])
.await?;
Ok(rows)
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
if media_ids.is_empty() || tag_ids.is_empty() {
return Ok(0);
}
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Use UNNEST for efficient batch insert
let mut media_uuids = Vec::new();
let mut tag_uuids = Vec::new();
for mid in media_ids {
for tid in tag_ids {
media_uuids.push(mid.0);
tag_uuids.push(*tid);
}
}
let rows = client
.execute(
"INSERT INTO media_tags (media_id, tag_id)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[])
ON CONFLICT DO NOTHING",
&[&media_uuids, &tag_uuids],
)
.await?;
Ok(rows)
}
// ---- Tags ----
async fn create_tag(&self, name: &str, parent_id: Option<Uuid>) -> Result<Tag> {
@ -3155,6 +3384,9 @@ fn query_has_fts(query: &SearchQuery) -> bool {
SearchQuery::FieldMatch { .. } => false,
SearchQuery::TypeFilter(_) => false,
SearchQuery::TagFilter(_) => false,
SearchQuery::RangeQuery { .. } => false,
SearchQuery::CompareQuery { .. } => false,
SearchQuery::DateQuery { .. } => false,
SearchQuery::And(children) | SearchQuery::Or(children) => {
children.iter().any(query_has_fts)
}
@ -3173,7 +3405,7 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
None
} else {
let idx = *offset;
*offset += 1;
*offset += 7; // FullText now uses 7 params (fts, prefix, ilike, sim_title, sim_artist, sim_album, sim_filename)
Some(idx)
}
}
@ -3183,7 +3415,7 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
Some(idx)
}
SearchQuery::Fuzzy(_) => {
*offset += 2; // fuzzy uses two params
*offset += 5; // Fuzzy now uses 5 params (sim_title, sim_artist, sim_album, sim_filename, ilike)
None
}
SearchQuery::FieldMatch { .. } => {
@ -3191,6 +3423,21 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
None
}
SearchQuery::TypeFilter(_) | SearchQuery::TagFilter(_) => None,
SearchQuery::RangeQuery { start, end, .. } => {
// Range queries use 0-2 params depending on bounds
if start.is_some() {
*offset += 1;
}
if end.is_some() {
*offset += 1;
}
None
}
SearchQuery::CompareQuery { .. } => {
*offset += 1;
None
}
SearchQuery::DateQuery { .. } => None, // No params, uses inline SQL
SearchQuery::And(children) | SearchQuery::Or(children) => {
for child in children {
if let Some(idx) = find_inner(child, offset) {
@ -3255,10 +3502,15 @@ mod tests {
let mut offset = 1;
let mut params: Vec<Box<dyn ToSql + Sync + Send>> = Vec::new();
let (clause, types, tags) = build_search_clause(&query, &mut offset, &mut params).unwrap();
assert_eq!(clause, "search_vector @@ plainto_tsquery('english', $1)");
// Fuzzy search combines FTS, prefix, ILIKE, and trigram similarity
assert!(clause.contains("plainto_tsquery"));
assert!(clause.contains("to_tsquery"));
assert!(clause.contains("LIKE"));
assert!(clause.contains("similarity"));
assert!(types.is_empty());
assert!(tags.is_empty());
assert_eq!(offset, 2);
// FullText now uses 7 parameters
assert_eq!(offset, 8);
}
#[test]

View file

@ -111,6 +111,8 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
created_at: parse_datetime(&created_str),
updated_at: parse_datetime(&updated_str),
})
@ -312,18 +314,22 @@ fn load_custom_fields_batch(db: &Connection, items: &mut [MediaItem]) -> rusqlit
/// Translate a `SearchQuery` into components that can be assembled into SQL.
///
/// Returns `(fts_expr, where_clauses, join_clauses)` where:
/// Returns `(fts_expr, like_terms, where_clauses, join_clauses, params)` where:
/// - `fts_expr` is an FTS5 MATCH expression (may be empty),
/// - `like_terms` are search terms for LIKE fallback matching,
/// - `where_clauses` are extra WHERE predicates (e.g. type filters),
/// - `join_clauses` are extra JOIN snippets (e.g. tag filters).
/// - `params` are bind parameter values corresponding to `?` placeholders in
/// where_clauses and join_clauses.
fn search_query_to_fts(query: &SearchQuery) -> (String, Vec<String>, Vec<String>, Vec<String>) {
fn search_query_to_fts(
query: &SearchQuery,
) -> (String, Vec<String>, Vec<String>, Vec<String>, Vec<String>) {
let mut wheres = Vec::new();
let mut joins = Vec::new();
let mut params = Vec::new();
let fts = build_fts_expr(query, &mut wheres, &mut joins, &mut params);
(fts, wheres, joins, params)
let mut like_terms = Vec::new();
let fts = build_fts_expr(query, &mut wheres, &mut joins, &mut params, &mut like_terms);
(fts, like_terms, wheres, joins, params)
}
fn build_fts_expr(
@ -331,21 +337,35 @@ fn build_fts_expr(
wheres: &mut Vec<String>,
joins: &mut Vec<String>,
params: &mut Vec<String>,
like_terms: &mut Vec<String>,
) -> String {
match query {
SearchQuery::FullText(text) => {
if text.is_empty() {
String::new()
} else {
sanitize_fts_token(text)
// Collect term for LIKE fallback matching
like_terms.push(text.clone());
// Add implicit prefix matching for better partial matches
// This allows "mus" to match "music", "musician", etc.
let sanitized = sanitize_fts_token(text);
// If it's a single word, add prefix matching
if !sanitized.contains(' ') && !sanitized.contains('"') {
format!("{}*", sanitized)
} else {
// For phrases, use as-is but also add NEAR for proximity
sanitized
}
}
}
SearchQuery::Prefix(prefix) => {
like_terms.push(prefix.clone());
format!("{}*", sanitize_fts_token(prefix))
}
SearchQuery::Fuzzy(term) => {
// FTS5 does not natively support fuzzy; fall back to prefix match
// FTS5 does not natively support fuzzy; use prefix match
// as a best-effort approximation.
like_terms.push(term.clone());
format!("{}*", sanitize_fts_token(term))
}
SearchQuery::FieldMatch { field, value } => {
@ -355,7 +375,7 @@ fn build_fts_expr(
format!("{safe_field}:{safe_value}")
}
SearchQuery::Not(inner) => {
let inner_expr = build_fts_expr(inner, wheres, joins, params);
let inner_expr = build_fts_expr(inner, wheres, joins, params, like_terms);
if inner_expr.is_empty() {
String::new()
} else {
@ -365,7 +385,7 @@ fn build_fts_expr(
SearchQuery::And(terms) => {
let parts: Vec<String> = terms
.iter()
.map(|t| build_fts_expr(t, wheres, joins, params))
.map(|t| build_fts_expr(t, wheres, joins, params, like_terms))
.filter(|s| !s.is_empty())
.collect();
parts.join(" ")
@ -373,7 +393,7 @@ fn build_fts_expr(
SearchQuery::Or(terms) => {
let parts: Vec<String> = terms
.iter()
.map(|t| build_fts_expr(t, wheres, joins, params))
.map(|t| build_fts_expr(t, wheres, joins, params, like_terms))
.filter(|s| !s.is_empty())
.collect();
if parts.len() <= 1 {
@ -399,6 +419,82 @@ fn build_fts_expr(
params.push(tag_name.clone());
String::new()
}
SearchQuery::RangeQuery { field, start, end } => {
let col = match field.as_str() {
"year" => "m.year",
"size" | "file_size" => "m.file_size",
"duration" => "m.duration_secs",
_ => return String::new(), // Unknown field, ignore
};
match (start, end) {
(Some(s), Some(e)) => {
wheres.push(format!("{col} >= ? AND {col} <= ?"));
params.push(s.to_string());
params.push(e.to_string());
}
(Some(s), None) => {
wheres.push(format!("{col} >= ?"));
params.push(s.to_string());
}
(None, Some(e)) => {
wheres.push(format!("{col} <= ?"));
params.push(e.to_string());
}
(None, None) => {}
}
String::new()
}
SearchQuery::CompareQuery { field, op, value } => {
let col = match field.as_str() {
"year" => "m.year",
"size" | "file_size" => "m.file_size",
"duration" => "m.duration_secs",
_ => return String::new(), // Unknown field, ignore
};
let op_sql = match op {
crate::search::CompareOp::GreaterThan => ">",
crate::search::CompareOp::GreaterOrEqual => ">=",
crate::search::CompareOp::LessThan => "<",
crate::search::CompareOp::LessOrEqual => "<=",
};
wheres.push(format!("{col} {op_sql} ?"));
params.push(value.to_string());
String::new()
}
SearchQuery::DateQuery { field, value } => {
let col = match field.as_str() {
"created" => "m.created_at",
"modified" | "updated" => "m.updated_at",
_ => return String::new(),
};
let sql = date_value_to_sqlite_expr(col, value);
if !sql.is_empty() {
wheres.push(sql);
}
String::new()
}
}
}
/// Convert a DateValue to a SQLite datetime comparison expression
fn date_value_to_sqlite_expr(col: &str, value: &crate::search::DateValue) -> String {
use crate::search::DateValue;
match value {
DateValue::Today => format!("date({col}) = date('now')"),
DateValue::Yesterday => format!("date({col}) = date('now', '-1 day')"),
DateValue::ThisWeek => format!("{col} >= datetime('now', 'weekday 0', '-7 days')"),
DateValue::LastWeek => format!(
"{col} >= datetime('now', 'weekday 0', '-14 days') AND {col} < datetime('now', 'weekday 0', '-7 days')"
),
DateValue::ThisMonth => format!("{col} >= datetime('now', 'start of month')"),
DateValue::LastMonth => format!(
"{col} >= datetime('now', 'start of month', '-1 month') AND {col} < datetime('now', 'start of month')"
),
DateValue::ThisYear => format!("{col} >= datetime('now', 'start of year')"),
DateValue::LastYear => format!(
"{col} >= datetime('now', 'start of year', '-1 year') AND {col} < datetime('now', 'start of year')"
),
DateValue::DaysAgo(days) => format!("{col} >= datetime('now', '-{days} days')"),
}
}
@ -514,8 +610,8 @@ impl StorageBackend for SqliteBackend {
db.execute(
"INSERT INTO media_items (id, path, file_name, media_type, content_hash, \
file_size, title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
thumbnail_path, file_mtime, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17)",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -533,6 +629,7 @@ impl StorageBackend for SqliteBackend {
item.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.created_at.to_rfc3339(),
item.updated_at.to_rfc3339(),
],
@ -566,7 +663,7 @@ impl StorageBackend for SqliteBackend {
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items WHERE id = ?1",
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE id = ?1",
)?;
let mut item = stmt
.query_row(params![id.0.to_string()], row_to_media_item)
@ -593,7 +690,7 @@ impl StorageBackend for SqliteBackend {
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items WHERE content_hash = ?1",
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE content_hash = ?1",
)?;
let result = stmt
.query_row(params![hash.0], row_to_media_item)
@ -609,6 +706,32 @@ impl StorageBackend for SqliteBackend {
.map_err(|e| PinakesError::Database(e.to_string()))?
}
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>> {
let path_str = path.to_string_lossy().to_string();
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE path = ?1",
)?;
let result = stmt
.query_row(params![path_str], row_to_media_item)
.optional()?;
if let Some(mut item) = result {
item.custom_fields = load_custom_fields_sync(&db, item.id)?;
Ok(Some(item))
} else {
Ok(None)
}
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))?
}
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>> {
let pagination = pagination.clone();
let conn = Arc::clone(&self.conn);
@ -630,7 +753,7 @@ impl StorageBackend for SqliteBackend {
let sql = format!(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items \
thumbnail_path, file_mtime, created_at, updated_at FROM media_items \
ORDER BY {order_by} LIMIT ?1 OFFSET ?2"
);
let mut stmt = db.prepare(&sql)?;
@ -658,7 +781,7 @@ impl StorageBackend for SqliteBackend {
"UPDATE media_items SET path = ?2, file_name = ?3, media_type = ?4, \
content_hash = ?5, file_size = ?6, title = ?7, artist = ?8, album = ?9, \
genre = ?10, year = ?11, duration_secs = ?12, description = ?13, \
thumbnail_path = ?14, updated_at = ?15 WHERE id = ?1",
thumbnail_path = ?14, file_mtime = ?15, updated_at = ?16 WHERE id = ?1",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -676,6 +799,7 @@ impl StorageBackend for SqliteBackend {
item.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.updated_at.to_rfc3339(),
],
)?;
@ -1067,7 +1191,7 @@ impl StorageBackend for SqliteBackend {
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
let (fts_expr, where_clauses, join_clauses, bind_params) =
let (fts_expr, _like_terms, where_clauses, join_clauses, bind_params) =
search_query_to_fts(&request.query);
let use_fts = !fts_expr.is_empty();
@ -1309,16 +1433,30 @@ impl StorageBackend for SqliteBackend {
}
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
if ids.is_empty() {
return Ok(0);
}
let ids: Vec<String> = ids.iter().map(|id| id.0.to_string()).collect();
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Use IN clause for batch delete - much faster than individual deletes
// SQLite has a limit of ~500-1000 items in IN clause, so chunk if needed
const CHUNK_SIZE: usize = 500;
db.execute_batch("BEGIN IMMEDIATE")?;
let mut count = 0u64;
for id in &ids {
let rows = db.execute("DELETE FROM media_items WHERE id = ?1", params![id])?;
for chunk in ids.chunks(CHUNK_SIZE) {
let placeholders: Vec<String> =
(1..=chunk.len()).map(|i| format!("?{}", i)).collect();
let sql = format!(
"DELETE FROM media_items WHERE id IN ({})",
placeholders.join(", ")
);
let params: Vec<&dyn rusqlite::ToSql> =
chunk.iter().map(|s| s as &dyn rusqlite::ToSql).collect();
let rows = db.execute(&sql, params.as_slice())?;
count += rows as u64;
}
db.execute_batch("COMMIT")?;
@ -1329,6 +1467,9 @@ impl StorageBackend for SqliteBackend {
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
if media_ids.is_empty() || tag_ids.is_empty() {
return Ok(0);
}
let media_ids: Vec<String> = media_ids.iter().map(|id| id.0.to_string()).collect();
let tag_ids: Vec<String> = tag_ids.iter().map(|id| id.to_string()).collect();
let conn = Arc::clone(&self.conn);
@ -1337,13 +1478,14 @@ impl StorageBackend for SqliteBackend {
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
db.execute_batch("BEGIN IMMEDIATE")?;
// Prepare statement once for reuse
let mut stmt = db.prepare_cached(
"INSERT OR IGNORE INTO media_tags (media_id, tag_id) VALUES (?1, ?2)",
)?;
let mut count = 0u64;
for mid in &media_ids {
for tid in &tag_ids {
db.execute(
"INSERT OR IGNORE INTO media_tags (media_id, tag_id) VALUES (?1, ?2)",
params![mid, tid],
)?;
stmt.execute(params![mid, tid])?;
count += 1;
}
}

View file

@ -35,6 +35,7 @@ async fn test_media_crud() {
description: Some("A test file".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -113,6 +114,7 @@ async fn test_tags() {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -165,6 +167,7 @@ async fn test_collections() {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -212,6 +215,7 @@ async fn test_custom_fields() {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -278,6 +282,7 @@ async fn test_search() {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -409,6 +414,7 @@ async fn test_library_statistics_with_data() {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
};
@ -445,6 +451,7 @@ fn make_test_media(hash: &str) -> MediaItem {
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
file_mtime: None,
created_at: now,
updated_at: now,
}

View file

@ -19,6 +19,7 @@ clap = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
axum = { workspace = true }
axum-server = { version = "0.7", features = ["tls-rustls"] }
tower = { workspace = true }
tower-http = { workspace = true }
governor = { workspace = true }
@ -27,6 +28,7 @@ tokio-util = { version = "0.7", features = ["io"] }
argon2 = { workspace = true }
rand = "0.9"
percent-encoding = "2"
http = "1.0"
[dev-dependencies]
http-body-util = "0.1"

View file

@ -5,16 +5,27 @@ use axum::extract::DefaultBodyLimit;
use axum::http::{HeaderValue, Method, header};
use axum::middleware;
use axum::routing::{delete, get, patch, post, put};
use tower::ServiceBuilder;
use tower_governor::GovernorLayer;
use tower_governor::governor::GovernorConfigBuilder;
use tower_http::cors::CorsLayer;
use tower_http::set_header::SetResponseHeaderLayer;
use tower_http::trace::TraceLayer;
use crate::auth;
use crate::routes;
use crate::state::AppState;
/// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(state: AppState) -> Router {
create_router_with_tls(state, None)
}
/// Create the router with TLS configuration for security headers
pub fn create_router_with_tls(
state: AppState,
tls_config: Option<&pinakes_core::config::TlsConfig>,
) -> Router {
// Global rate limit: 100 requests/sec per IP
let global_governor = Arc::new(
GovernorConfigBuilder::default()
@ -41,11 +52,16 @@ pub fn create_router(state: AppState) -> Router {
});
// Public routes (no auth required)
let public_routes = Router::new().route("/s/{token}", get(routes::social::access_shared_media));
let public_routes = Router::new()
.route("/s/{token}", get(routes::social::access_shared_media))
// Kubernetes-style health probes (no auth required for orchestration)
.route("/health/live", get(routes::health::liveness))
.route("/health/ready", get(routes::health::readiness));
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
.route("/health", get(routes::health::health))
.route("/health/detailed", get(routes::health::health_detailed))
.route("/media/count", get(routes::media::get_media_count))
.route("/media", get(routes::media::list_media))
.route("/media/{id}", get(routes::media::get_media))
@ -393,7 +409,40 @@ pub fn create_router(state: AppState) -> Router {
.merge(public_routes)
.merge(protected_api);
Router::new()
// Build security headers layer
let security_headers = ServiceBuilder::new()
// Prevent MIME type sniffing
.layer(SetResponseHeaderLayer::overriding(
header::X_CONTENT_TYPE_OPTIONS,
HeaderValue::from_static("nosniff"),
))
// Prevent clickjacking
.layer(SetResponseHeaderLayer::overriding(
header::X_FRAME_OPTIONS,
HeaderValue::from_static("DENY"),
))
// XSS protection (legacy but still useful for older browsers)
.layer(SetResponseHeaderLayer::overriding(
header::HeaderName::from_static("x-xss-protection"),
HeaderValue::from_static("1; mode=block"),
))
// Referrer policy
.layer(SetResponseHeaderLayer::overriding(
header::REFERRER_POLICY,
HeaderValue::from_static("strict-origin-when-cross-origin"),
))
// Permissions policy (disable unnecessary features)
.layer(SetResponseHeaderLayer::overriding(
header::HeaderName::from_static("permissions-policy"),
HeaderValue::from_static("geolocation=(), microphone=(), camera=()"),
))
// Content Security Policy for API responses
.layer(SetResponseHeaderLayer::overriding(
header::CONTENT_SECURITY_POLICY,
HeaderValue::from_static("default-src 'none'; frame-ancestors 'none'"),
));
let router = Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer {
@ -401,5 +450,26 @@ pub fn create_router(state: AppState) -> Router {
})
.layer(TraceLayer::new_for_http())
.layer(cors)
.layer(security_headers);
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {
let hsts_value = format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header = HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains")
});
router
.layer(SetResponseHeaderLayer::overriding(
header::STRICT_TRANSPORT_SECURITY,
hsts_header,
))
.with_state(state)
} else {
router.with_state(state)
}
} else {
router.with_state(state)
}
}

View file

@ -2,6 +2,9 @@ use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Result;
use axum::Router;
use axum::response::Redirect;
use axum::routing::any;
use clap::Parser;
use tokio::sync::RwLock;
use tracing::info;
@ -202,6 +205,7 @@ async fn main() -> Result<()> {
scanning: false,
files_found: total_found,
files_processed: total_processed,
files_skipped: 0,
errors: all_errors,
}
})
@ -459,7 +463,7 @@ async fn main() -> Result<()> {
let state = AppState {
storage: storage.clone(),
config: config_arc,
config: config_arc.clone(),
config_path: Some(config_path),
scan_progress: pinakes_core::scan::ScanProgress::new(),
sessions: Arc::new(RwLock::new(std::collections::HashMap::new())),
@ -489,8 +493,90 @@ async fn main() -> Result<()> {
});
}
let router = app::create_router(state);
let config_read = config_arc.read().await;
let tls_config = config_read.server.tls.clone();
drop(config_read);
// Create router with TLS config for HSTS headers
let router = if tls_config.enabled {
app::create_router_with_tls(state, Some(&tls_config))
} else {
app::create_router(state)
};
if tls_config.enabled {
// TLS/HTTPS mode
let cert_path = tls_config
.cert_path
.as_ref()
.ok_or_else(|| anyhow::anyhow!("TLS enabled but cert_path not specified"))?;
let key_path = tls_config
.key_path
.as_ref()
.ok_or_else(|| anyhow::anyhow!("TLS enabled but key_path not specified"))?;
info!(addr = %addr, cert = %cert_path.display(), "server listening with TLS");
// Configure TLS
let tls_config_builder =
axum_server::tls_rustls::RustlsConfig::from_pem_file(cert_path, key_path).await?;
// Start HTTP redirect server if configured
if tls_config.redirect_http {
let http_addr = format!(
"{}:{}",
config_arc.read().await.server.host,
tls_config.http_port
);
let https_port = config_arc.read().await.server.port;
let https_host = config_arc.read().await.server.host.clone();
let redirect_router = create_https_redirect_router(https_host, https_port);
let shutdown = shutdown_token.clone();
tokio::spawn(async move {
let listener = match tokio::net::TcpListener::bind(&http_addr).await {
Ok(l) => l,
Err(e) => {
tracing::warn!(error = %e, addr = %http_addr, "failed to bind HTTP redirect listener");
return;
}
};
info!(addr = %http_addr, "HTTP redirect server listening");
let server = axum::serve(
listener,
redirect_router.into_make_service_with_connect_info::<std::net::SocketAddr>(),
);
tokio::select! {
result = server => {
if let Err(e) = result {
tracing::warn!(error = %e, "HTTP redirect server error");
}
}
_ = shutdown.cancelled() => {
info!("HTTP redirect server shutting down");
}
}
});
}
// Start HTTPS server with graceful shutdown via Handle
let addr_parsed: std::net::SocketAddr = addr.parse()?;
let handle = axum_server::Handle::new();
let shutdown_handle = handle.clone();
// Spawn a task to trigger graceful shutdown
tokio::spawn(async move {
shutdown_signal().await;
shutdown_handle.graceful_shutdown(Some(std::time::Duration::from_secs(30)));
});
axum_server::bind_rustls(addr_parsed, tls_config_builder)
.handle(handle)
.serve(router.into_make_service_with_connect_info::<std::net::SocketAddr>())
.await?;
} else {
// Plain HTTP mode
info!(addr = %addr, "server listening");
let listener = tokio::net::TcpListener::bind(&addr).await?;
@ -500,12 +586,31 @@ async fn main() -> Result<()> {
)
.with_graceful_shutdown(shutdown_signal())
.await?;
}
shutdown_token.cancel();
info!("server shut down");
Ok(())
}
/// Create a router that redirects all HTTP requests to HTTPS
fn create_https_redirect_router(https_host: String, https_port: u16) -> Router {
Router::new().fallback(any(move |uri: axum::http::Uri| {
let https_host = https_host.clone();
async move {
let path_and_query = uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("/");
let https_url = if https_port == 443 {
format!("https://{}{}", https_host, path_and_query)
} else {
format!("https://{}:{}{}", https_host, https_port, path_and_query)
};
Redirect::permanent(&https_url)
}
}))
}
async fn shutdown_signal() {
let ctrl_c = async {
match tokio::signal::ctrl_c().await {

View file

@ -5,6 +5,12 @@ use axum::http::{HeaderMap, StatusCode};
use crate::dto::{LoginRequest, LoginResponse, UserInfoResponse};
use crate::state::AppState;
/// Dummy password hash to use for timing-safe comparison when user doesn't exist.
/// This is a valid argon2 hash that will always fail verification but takes
/// similar time to verify as a real hash, preventing timing attacks that could
/// reveal whether a username exists.
const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
pub async fn login(
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
@ -25,27 +31,47 @@ pub async fn login(
.iter()
.find(|u| u.username == req.username);
let user = match user {
Some(u) => u,
None => {
tracing::warn!(username = %req.username, "login failed: unknown user");
return Err(StatusCode::UNAUTHORIZED);
}
// Always perform password verification to prevent timing attacks.
// If the user doesn't exist, we verify against a dummy hash to ensure
// consistent response times regardless of whether the username exists.
use argon2::password_hash::PasswordVerifier;
let (hash_to_verify, user_found) = match user {
Some(u) => (&u.password_hash as &str, true),
None => (DUMMY_HASH, false),
};
// Verify password using argon2
use argon2::password_hash::PasswordVerifier;
let hash = &user.password_hash;
let parsed_hash = argon2::password_hash::PasswordHash::new(hash)
let parsed_hash = argon2::password_hash::PasswordHash::new(hash_to_verify)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let valid = argon2::Argon2::default()
let password_valid = argon2::Argon2::default()
.verify_password(req.password.as_bytes(), &parsed_hash)
.is_ok();
if !valid {
// Authentication fails if user wasn't found OR password was invalid
if !user_found || !password_valid {
// Log different messages for debugging but return same error
if !user_found {
tracing::warn!(username = %req.username, "login failed: unknown user");
} else {
tracing::warn!(username = %req.username, "login failed: invalid password");
}
// Record failed login attempt in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginFailed,
Some(format!("username: {}", req.username)),
)
.await;
return Err(StatusCode::UNAUTHORIZED);
}
// At this point we know the user exists and password is valid
let user = user.expect("user should exist at this point");
// Generate session token
use rand::Rng;
let token: String = rand::rng()
@ -72,6 +98,15 @@ pub async fn login(
tracing::info!(username = %username, role = %role, "login successful");
// Record successful login in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginSuccess,
Some(format!("username: {}, role: {}", username, role)),
)
.await;
Ok(Json(LoginResponse {
token,
username,
@ -81,8 +116,24 @@ pub async fn login(
pub async fn logout(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
let sessions = state.sessions.read().await;
let username = sessions.get(token).map(|s| s.username.clone());
drop(sessions);
let mut sessions = state.sessions.write().await;
sessions.remove(token);
drop(sessions);
// Record logout in audit log
if let Some(user) = username {
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {}", user)),
)
.await;
}
}
StatusCode::OK
}

View file

@ -1,8 +1,221 @@
use axum::Json;
use std::time::Instant;
pub async fn health() -> Json<serde_json::Value> {
Json(serde_json::json!({
"status": "ok",
"version": env!("CARGO_PKG_VERSION"),
}))
use axum::Json;
use axum::extract::State;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use serde::{Deserialize, Serialize};
use crate::state::AppState;
/// Basic health check response
#[derive(Debug, Serialize, Deserialize)]
pub struct HealthResponse {
pub status: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub database: Option<DatabaseHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub filesystem: Option<FilesystemHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache: Option<CacheHealth>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DatabaseHealth {
pub status: String,
pub latency_ms: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub media_count: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FilesystemHealth {
pub status: String,
pub roots_configured: usize,
pub roots_accessible: usize,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CacheHealth {
pub hit_rate: f64,
pub total_entries: u64,
pub responses_size: u64,
pub queries_size: u64,
pub media_size: u64,
}
/// Comprehensive health check - includes database, filesystem, and cache status
pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
let mut response = HealthResponse {
status: "ok".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
database: None,
filesystem: None,
cache: None,
};
// Check database health
let db_start = Instant::now();
let db_health = match state.storage.count_media().await {
Ok(count) => DatabaseHealth {
status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count),
},
Err(e) => {
response.status = "degraded".to_string();
DatabaseHealth {
status: format!("error: {}", e),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None,
}
}
};
response.database = Some(db_health);
// Check filesystem health (root directories)
let roots = match state.storage.list_root_dirs().await {
Ok(r) => r,
Err(_) => Vec::new(),
};
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();
}
response.filesystem = Some(FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
});
// Get cache statistics
let cache_stats = state.cache.stats();
response.cache = Some(CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
});
Json(response)
}
/// Liveness probe - just checks if the server is running
/// Returns 200 OK if the server process is alive
pub async fn liveness() -> impl IntoResponse {
(
StatusCode::OK,
Json(serde_json::json!({
"status": "alive"
})),
)
}
/// Readiness probe - checks if the server can serve requests
/// Returns 200 OK if database is accessible
pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
// Check database connectivity
let db_start = Instant::now();
match state.storage.count_media().await {
Ok(_) => {
let latency = db_start.elapsed().as_millis() as u64;
(
StatusCode::OK,
Json(serde_json::json!({
"status": "ready",
"database_latency_ms": latency
})),
)
}
Err(e) => (
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"status": "not_ready",
"reason": e.to_string()
})),
),
}
}
/// Detailed health check for monitoring dashboards
#[derive(Debug, Serialize, Deserialize)]
pub struct DetailedHealthResponse {
pub status: String,
pub version: String,
pub uptime_seconds: u64,
pub database: DatabaseHealth,
pub filesystem: FilesystemHealth,
pub cache: CacheHealth,
pub jobs: JobsHealth,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct JobsHealth {
pub pending: usize,
pub running: usize,
}
pub async fn health_detailed(State(state): State<AppState>) -> Json<DetailedHealthResponse> {
// Check database
let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await {
Ok(count) => ("ok".to_string(), Some(count)),
Err(e) => (format!("error: {}", e), None),
};
let db_latency = db_start.elapsed().as_millis() as u64;
// Check filesystem
let roots = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
// Get cache stats
let cache_stats = state.cache.stats();
// Get job queue stats
let job_stats = state.job_queue.stats().await;
let overall_status = if db_status == "ok" && roots_accessible == roots.len() {
"ok"
} else {
"degraded"
};
Json(DetailedHealthResponse {
status: overall_status.to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
uptime_seconds: 0, // Could track server start time
database: DatabaseHealth {
status: db_status,
latency_ms: db_latency,
media_count,
},
filesystem: FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
},
cache: CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
},
jobs: JobsHealth {
pending: job_stats.pending,
running: job_stats.running,
},
})
}

View file

@ -12,7 +12,7 @@ use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType,
StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, UserAccount, UserRole,
StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig, UserAccount, UserRole,
WebhookConfig,
};
use pinakes_core::jobs::JobQueue;
@ -112,6 +112,7 @@ fn default_config() -> Config {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),

View file

@ -12,7 +12,7 @@ use pinakes_core::cache::CacheLayer;
use pinakes_core::config::{
AccountsConfig, AnalyticsConfig, CloudConfig, Config, DirectoryConfig, EnrichmentConfig,
JobsConfig, PluginsConfig, ScanningConfig, ServerConfig, SqliteConfig, StorageBackendType,
StorageConfig, ThumbnailConfig, TranscodingConfig, UiConfig, WebhookConfig,
StorageConfig, ThumbnailConfig, TlsConfig, TranscodingConfig, UiConfig, WebhookConfig,
};
use pinakes_core::jobs::JobQueue;
use pinakes_core::plugin::PluginManager;
@ -77,6 +77,7 @@ async fn setup_app_with_plugins() -> (axum::Router, Arc<PluginManager>, tempfile
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
},
ui: UiConfig::default(),
accounts: AccountsConfig::default(),

View file

@ -1,3 +1,4 @@
use std::collections::HashSet;
use std::time::Duration;
use anyhow::Result;
@ -53,6 +54,9 @@ pub struct AppState {
pub page_size: u64,
pub total_media_count: u64,
pub server_url: String,
// Multi-select support
pub selected_items: HashSet<String>,
pub selection_mode: bool,
// Duplicates view
pub duplicate_groups: Vec<crate::client::DuplicateGroupResponse>,
pub duplicates_selected: Option<usize>,
@ -131,6 +135,9 @@ impl AppState {
page_size: 50,
total_media_count: 0,
server_url: server_url.to_string(),
// Multi-select
selected_items: HashSet::new(),
selection_mode: false,
}
}
}
@ -1156,6 +1163,154 @@ async fn handle_action(
state.current_view = View::Detail;
}
}
Action::ToggleSelection => {
// Toggle selection of current item
let item_id = match state.current_view {
View::Search => state
.search_selected
.and_then(|i| state.search_results.get(i))
.map(|m| m.id.clone()),
View::Library => state
.selected_index
.and_then(|i| state.media_list.get(i))
.map(|m| m.id.clone()),
_ => None,
};
if let Some(id) = item_id {
if state.selected_items.contains(&id) {
state.selected_items.remove(&id);
} else {
state.selected_items.insert(id);
}
let count = state.selected_items.len();
state.status_message = Some(format!("{} item(s) selected", count));
}
}
Action::SelectAll => {
// Select all items in current view
let items: Vec<String> = match state.current_view {
View::Search => state.search_results.iter().map(|m| m.id.clone()).collect(),
View::Library => state.media_list.iter().map(|m| m.id.clone()).collect(),
_ => Vec::new(),
};
for id in items {
state.selected_items.insert(id);
}
let count = state.selected_items.len();
state.status_message = Some(format!("{} item(s) selected", count));
}
Action::ClearSelection => {
state.selected_items.clear();
state.selection_mode = false;
state.status_message = Some("Selection cleared".into());
}
Action::ToggleSelectionMode => {
state.selection_mode = !state.selection_mode;
if state.selection_mode {
state.status_message =
Some("Selection mode: ON (Space to toggle, u to clear)".into());
} else {
state.status_message = Some("Selection mode: OFF".into());
}
}
Action::BatchDelete => {
if state.selected_items.is_empty() {
state.status_message = Some("No items selected".into());
} else {
let count = state.selected_items.len();
let ids: Vec<String> = state.selected_items.iter().cloned().collect();
state.status_message = Some(format!("Deleting {} item(s)...", count));
let client = client.clone();
let tx = event_sender.clone();
let page_offset = state.page_offset;
let page_size = state.page_size;
tokio::spawn(async move {
let mut deleted = 0;
let mut errors = Vec::new();
for id in &ids {
match client.delete_media(id).await {
Ok(_) => deleted += 1,
Err(e) => errors.push(format!("{}: {}", id, e)),
}
}
// Refresh the media list
if let Ok(items) = client.list_media(page_offset, page_size).await {
let _ = tx.send(AppEvent::ApiResult(ApiResult::MediaList(items)));
}
if errors.is_empty() {
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Deleted {} item(s)",
deleted
))));
} else {
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Deleted {} item(s), {} error(s)",
deleted,
errors.len()
))));
}
});
state.selected_items.clear();
}
}
Action::BatchTag => {
if state.selected_items.is_empty() {
state.status_message = Some("No items selected".into());
} else if state.all_tags.is_empty() {
// Load tags first
match client.list_tags().await {
Ok(tags) => {
state.all_tags = tags;
if state.all_tags.is_empty() {
state.status_message =
Some("No tags available. Create a tag first.".into());
} else {
state.tag_selected = Some(0);
state.status_message = Some(format!(
"{} item(s) selected. Press +/- to tag/untag with selected tag.",
state.selected_items.len()
));
}
}
Err(e) => state.status_message = Some(format!("Failed to load tags: {e}")),
}
} else if let Some(tag_idx) = state.tag_selected
&& let Some(tag) = state.all_tags.get(tag_idx)
{
let count = state.selected_items.len();
let ids: Vec<String> = state.selected_items.iter().cloned().collect();
let tag_id = tag.id.clone();
let tag_name = tag.name.clone();
state.status_message =
Some(format!("Tagging {} item(s) with '{}'...", count, tag_name));
let client = client.clone();
let tx = event_sender.clone();
tokio::spawn(async move {
let mut tagged = 0;
let mut errors = Vec::new();
for id in &ids {
match client.tag_media(id, &tag_id).await {
Ok(_) => tagged += 1,
Err(e) => errors.push(format!("{}: {}", id, e)),
}
}
if errors.is_empty() {
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Tagged {} item(s) with '{}'",
tagged, tag_name
))));
} else {
let _ = tx.send(AppEvent::ApiResult(ApiResult::Error(format!(
"Tagged {} item(s), {} error(s)",
tagged,
errors.len()
))));
}
});
} else {
state.status_message = Some("Select a tag first (use t to view tags)".into());
}
}
Action::NavigateLeft | Action::NavigateRight | Action::None => {}
}
}

View file

@ -43,6 +43,13 @@ pub enum Action {
Save,
Char(char),
Backspace,
// Multi-select actions
ToggleSelection,
SelectAll,
ClearSelection,
ToggleSelectionMode,
BatchDelete,
BatchTag,
None,
}
@ -87,13 +94,25 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac
_ => Action::TagView,
},
(KeyCode::Char('c'), _) => Action::CollectionView,
// Multi-select: Ctrl+A for SelectAll (must come before plain 'a')
(KeyCode::Char('a'), KeyModifiers::CONTROL) => match current_view {
View::Library | View::Search => Action::SelectAll,
_ => Action::None,
},
(KeyCode::Char('a'), _) => Action::AuditView,
(KeyCode::Char('S'), _) => Action::SettingsView,
(KeyCode::Char('D'), _) => Action::DuplicatesView,
(KeyCode::Char('B'), _) => Action::DatabaseView,
(KeyCode::Char('Q'), _) => Action::QueueView,
(KeyCode::Char('X'), _) => Action::StatisticsView,
(KeyCode::Char('T'), _) => Action::TasksView,
// Use plain D/T for views in non-library contexts, keep for batch ops in library/search
(KeyCode::Char('D'), _) => match current_view {
View::Library | View::Search => Action::BatchDelete,
_ => Action::DuplicatesView,
},
(KeyCode::Char('T'), _) => match current_view {
View::Library | View::Search => Action::BatchTag,
_ => Action::TasksView,
},
// Ctrl+S must come before plain 's' to ensure proper precedence
(KeyCode::Char('s'), KeyModifiers::CONTROL) => match current_view {
View::MetadataEdit => Action::Save,
@ -106,7 +125,7 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac
(KeyCode::Char('-'), _) => Action::UntagMedia,
(KeyCode::Char('v'), _) => match current_view {
View::Database => Action::Vacuum,
_ => Action::None,
_ => Action::ToggleSelectionMode,
},
(KeyCode::Char('x'), _) => match current_view {
View::Tasks => Action::RunNow,
@ -116,6 +135,15 @@ pub fn handle_key(key: KeyEvent, in_input_mode: bool, current_view: &View) -> Ac
(KeyCode::BackTab, _) => Action::PrevTab,
(KeyCode::PageUp, _) => Action::PageUp,
(KeyCode::PageDown, _) => Action::PageDown,
// Multi-select keys
(KeyCode::Char(' '), _) => match current_view {
View::Library | View::Search => Action::ToggleSelection,
_ => Action::None,
},
(KeyCode::Char('u'), _) => match current_view {
View::Library | View::Search => Action::ClearSelection,
_ => Action::None,
},
_ => Action::None,
}
}

View file

@ -8,7 +8,7 @@ use super::{format_duration, format_size, media_type_color};
use crate::app::AppState;
pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let header = Row::new(vec!["Title / Name", "Type", "Duration", "Year", "Size"]).style(
let header = Row::new(vec!["", "Title / Name", "Type", "Duration", "Year", "Size"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
@ -19,12 +19,27 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
.iter()
.enumerate()
.map(|(i, item)| {
let style = if Some(i) == state.selected_index {
let is_cursor = Some(i) == state.selected_index;
let is_selected = state.selected_items.contains(&item.id);
let style = if is_cursor {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else if is_selected {
Style::default().fg(Color::Black).bg(Color::Green)
} else {
Style::default()
};
// Selection marker
let marker = if is_selected { "[*]" } else { "[ ]" };
let marker_style = if is_selected {
Style::default()
.fg(Color::Green)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(Color::DarkGray)
};
let display_name = item.title.as_deref().unwrap_or(&item.file_name).to_string();
let type_color = media_type_color(&item.media_type);
@ -44,6 +59,7 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
.unwrap_or_else(|| "-".to_string());
Row::new(vec![
Cell::from(Span::styled(marker, marker_style)),
Cell::from(display_name),
type_cell,
Cell::from(duration),
@ -56,16 +72,22 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let page = (state.page_offset / state.page_size) + 1;
let item_count = state.media_list.len();
let title = format!(" Library (page {page}, {item_count} items) ");
let selected_count = state.selected_items.len();
let title = if selected_count > 0 {
format!(" Library (page {page}, {item_count} items, {selected_count} selected) ")
} else {
format!(" Library (page {page}, {item_count} items) ")
};
let table = Table::new(
rows,
[
Constraint::Percentage(35),
Constraint::Percentage(20),
Constraint::Percentage(15),
Constraint::Percentage(10),
Constraint::Percentage(20),
Constraint::Length(3), // Selection marker
Constraint::Percentage(33), // Title
Constraint::Percentage(18), // Type
Constraint::Percentage(13), // Duration
Constraint::Percentage(8), // Year
Constraint::Percentage(18), // Size
],
)
.header(header)

View file

@ -28,7 +28,7 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
f.render_widget(input, chunks[0]);
// Results
let header = Row::new(vec!["Name", "Type", "Artist", "Size"]).style(
let header = Row::new(vec!["", "Name", "Type", "Artist", "Size"]).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
@ -39,12 +39,27 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
.iter()
.enumerate()
.map(|(i, item)| {
let style = if Some(i) == state.search_selected {
let is_cursor = Some(i) == state.search_selected;
let is_selected = state.selected_items.contains(&item.id);
let style = if is_cursor {
Style::default().fg(Color::Black).bg(Color::Cyan)
} else if is_selected {
Style::default().fg(Color::Black).bg(Color::Green)
} else {
Style::default()
};
// Selection marker
let marker = if is_selected { "[*]" } else { "[ ]" };
let marker_style = if is_selected {
Style::default()
.fg(Color::Green)
.add_modifier(Modifier::BOLD)
} else {
Style::default().fg(Color::DarkGray)
};
let type_color = media_type_color(&item.media_type);
let type_cell = Cell::from(Span::styled(
item.media_type.clone(),
@ -52,6 +67,7 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
));
Row::new(vec![
Cell::from(Span::styled(marker, marker_style)),
Cell::from(item.file_name.clone()),
type_cell,
Cell::from(item.artist.clone().unwrap_or_default()),
@ -63,15 +79,21 @@ pub fn render(f: &mut Frame, state: &AppState, area: Rect) {
let shown = state.search_results.len();
let total = state.search_total_count;
let results_title = format!(" Results: {shown} shown, {total} total ");
let selected_count = state.selected_items.len();
let results_title = if selected_count > 0 {
format!(" Results: {shown} shown, {total} total, {selected_count} selected ")
} else {
format!(" Results: {shown} shown, {total} total ")
};
let table = Table::new(
rows,
[
Constraint::Percentage(35),
Constraint::Percentage(20),
Constraint::Percentage(25),
Constraint::Percentage(20),
Constraint::Length(3), // Selection marker
Constraint::Percentage(33), // Name
Constraint::Percentage(18), // Type
Constraint::Percentage(23), // Artist
Constraint::Percentage(18), // Size
],
)
.header(header)

View file

@ -16,6 +16,7 @@ tracing-subscriber = { workspace = true }
reqwest = { workspace = true }
dioxus = { workspace = true }
tokio = { workspace = true }
futures = { workspace = true }
rfd = "0.17"
pulldown-cmark = { workspace = true }
gray_matter = { workspace = true }

View file

@ -1,6 +1,8 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use dioxus::prelude::*;
use futures::future::join_all;
use crate::client::*;
use crate::components::{
@ -85,6 +87,9 @@ pub fn App() -> Element {
let mut last_search_query = use_signal(String::new);
let mut last_search_sort = use_signal(|| Option::<String>::None);
// Phase 3.6: Saved searches
let mut saved_searches = use_signal(Vec::<SavedSearchResponse>::new);
// Phase 6.1: Audit pagination & filter
let mut audit_page = use_signal(|| 0u64);
let audit_page_size = use_signal(|| 200u64);
@ -107,8 +112,44 @@ pub fn App() -> Element {
let mut login_loading = use_signal(|| false);
let mut auto_play_media = use_signal(|| false);
// Theme state (Phase 3.3)
let mut current_theme = use_signal(|| "dark".to_string());
let mut system_prefers_dark = use_signal(|| true);
// Detect system color scheme preference
use_effect(move || {
spawn(async move {
// Check system preference using JavaScript
let result =
document::eval(r#"window.matchMedia('(prefers-color-scheme: dark)').matches"#);
if let Ok(val) = result.await {
if let Some(prefers_dark) = val.as_bool() {
system_prefers_dark.set(prefers_dark);
}
}
});
});
// Compute effective theme based on preference
let effective_theme = use_memo(move || {
let theme = current_theme.read().clone();
if theme == "system" {
if *system_prefers_dark.read() {
"dark".to_string()
} else {
"light".to_string()
}
} else {
theme
}
});
// Import state for UI feedback
let mut import_in_progress = use_signal(|| false);
// Extended import state: current file name, queue of pending imports, progress (completed, total)
let mut import_current_file = use_signal(|| Option::<String>::None);
let mut import_queue = use_signal(Vec::<String>::new);
let mut import_progress = use_signal(|| (0usize, 0usize)); // (completed, total)
// Check auth on startup
let client_auth = client.read().clone();
@ -136,6 +177,7 @@ pub fn App() -> Element {
if let Ok(cfg) = client.get_config().await {
auto_play_media.set(cfg.ui.auto_play_media);
sidebar_collapsed.set(cfg.ui.sidebar_collapsed);
current_theme.set(cfg.ui.theme.clone());
if cfg.ui.default_page_size > 0 {
media_page_size.set(cfg.ui.default_page_size as u64);
}
@ -183,6 +225,10 @@ pub fn App() -> Element {
if let Ok(c) = client.list_collections().await {
collections_list.set(c);
}
// Phase 3.6: Load saved searches
if let Ok(ss) = client.list_saved_searches().await {
saved_searches.set(ss);
}
loading.set(false);
});
});
@ -310,14 +356,17 @@ pub fn App() -> Element {
} else {
// Phase 7.1: Keyboard shortcuts
div {
class: "app",
class: if *effective_theme.read() == "light" { "app theme-light" } else { "app" },
tabindex: "0",
onkeydown: {
move |evt: KeyboardEvent| {
let key = evt.key();
let ctrl = evt.modifiers().contains(Modifiers::CONTROL);
let meta = evt.modifiers().contains(Modifiers::META);
let shift = evt.modifiers().contains(Modifiers::SHIFT);
match key {
// Escape - close modal/go back
Key::Escape => {
if *show_help.read() {
show_help.set(false);
@ -325,6 +374,7 @@ pub fn App() -> Element {
current_view.set(View::Library);
}
}
// / or Ctrl+K - focus search
Key::Character(ref c) if c == "/" && !ctrl && !meta => {
evt.prevent_default();
current_view.set(View::Search);
@ -333,9 +383,43 @@ pub fn App() -> Element {
evt.prevent_default();
current_view.set(View::Search);
}
// ? - toggle help overlay
Key::Character(ref c) if c == "?" && !ctrl && !meta => {
show_help.toggle();
}
// Ctrl+, - open settings
Key::Character(ref c) if c == "," && (ctrl || meta) => {
evt.prevent_default();
current_view.set(View::Settings);
}
// Number keys 1-6 for quick view switching (without modifiers)
Key::Character(ref c) if c == "1" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Library);
}
Key::Character(ref c) if c == "2" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Search);
}
Key::Character(ref c) if c == "3" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Import);
}
Key::Character(ref c) if c == "4" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Tags);
}
Key::Character(ref c) if c == "5" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Collections);
}
Key::Character(ref c) if c == "6" && !ctrl && !meta && !shift => {
evt.prevent_default();
current_view.set(View::Audit);
}
// g then l - go to library (vim-style)
// Could implement g-prefix commands in the future
Key::Character(ref c) if c == "g" && !ctrl && !meta => {}
_ => {}
}
}
@ -492,6 +576,44 @@ pub fn App() -> Element {
div { class: "sidebar-spacer" }
// Show import progress in sidebar when not on import page
if *import_in_progress.read() && *current_view.read() != View::Import {
{
let (completed, total) = *import_progress.read();
let has_progress = total > 0;
let pct = if total > 0 { (completed * 100) / total } else { 0 };
let current = import_current_file.read().clone();
let queue_len = import_queue.read().len();
rsx! {
div { class: "sidebar-import-progress",
div { class: "sidebar-import-header",
div { class: "status-dot checking" }
span {
if has_progress {
"Importing {completed}/{total}"
} else {
"Importing..."
}
}
if queue_len > 0 {
span { class: "import-queue-badge", "+{queue_len}" }
}
}
if let Some(ref file_name) = current {
div { class: "sidebar-import-file", "{file_name}" }
}
div { class: "progress-bar",
if has_progress {
div { class: "progress-fill", style: "width: {pct}%;" }
} else {
div { class: "progress-fill indeterminate" }
}
}
}
}
}
}
// Sidebar collapse toggle
button {
class: "sidebar-toggle",
@ -867,6 +989,62 @@ pub fn App() -> Element {
});
}
},
// Phase 3.6: Saved searches
saved_searches: saved_searches.read().clone(),
on_save_search: {
let client = client.read().clone();
move |(name, query, sort): (String, String, Option<String>)| {
let client = client.clone();
spawn(async move {
match client.create_saved_search(&name, &query, sort.as_deref()).await {
Ok(ss) => {
saved_searches.write().push(ss);
show_toast(format!("Search '{}' saved", name), false);
}
Err(e) => show_toast(format!("Failed to save search: {e}"), true),
}
});
}
},
on_delete_saved_search: {
let client = client.read().clone();
move |id: String| {
let client = client.clone();
spawn(async move {
match client.delete_saved_search(&id).await {
Ok(_) => {
saved_searches.write().retain(|s| s.id != id);
show_toast("Search deleted".into(), false);
}
Err(e) => show_toast(format!("Failed to delete: {e}"), true),
}
});
}
},
on_load_saved_search: {
let client = client.read().clone();
move |ss: SavedSearchResponse| {
let client = client.clone();
let query = ss.query.clone();
let sort = ss.sort_order.clone();
search_page.set(0);
last_search_query.set(query.clone());
last_search_sort.set(sort.clone());
spawn(async move {
loading.set(true);
let offset = 0;
let limit = *search_page_size.read();
match client.search(&query, sort.as_deref(), offset, limit).await {
Ok(resp) => {
search_total.set(resp.total_count);
search_results.set(resp.items);
}
Err(e) => show_toast(format!("Search failed: {e}"), true),
}
loading.set(false);
});
}
},
}
},
View::Detail => {
@ -1225,10 +1403,54 @@ pub fn App() -> Element {
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
move |(path, tag_ids, new_tags, col_id): ImportEvent| {
// Extract file name from path
let file_name = path.rsplit('/').next().unwrap_or(&path).to_string();
// Check if already importing - if so, add to queue
// Extract directory name from path
// Check if already importing - if so, add to queue
if *import_in_progress.read() {
// Get preview files if available for per-file progress
// Use parallel import with per-batch progress
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Fallback: use server-side directory import (no per-file progress)
// Check if already importing - if so, add to queue
// Update progress from scan status
// Check if already importing - if so, add to queue
// Process files in parallel batches for better performance
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Extended import state
import_queue.write().push(file_name);
show_toast("Added to import queue".into(), false);
return;
}
let client = client.clone();
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
import_in_progress.set(true);
import_current_file.set(Some(file_name));
import_progress.set((0, 1));
spawn(async move {
if tag_ids.is_empty() && new_tags.is_empty() && col_id.is_none() {
match client.import_file(&path).await {
@ -1275,6 +1497,8 @@ pub fn App() -> Element {
Err(e) => show_toast(format!("Import failed: {e}"), true),
}
}
import_progress.set((1, 1));
import_current_file.set(None);
import_in_progress.set(false);
});
}
@ -1284,10 +1508,121 @@ pub fn App() -> Element {
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
move |(path, tag_ids, new_tags, col_id): ImportEvent| {
let dir_name = path.rsplit('/').next().unwrap_or(&path).to_string();
if *import_in_progress.read() {
import_queue.write().push(format!("{dir_name}/ (directory)"));
show_toast("Added directory to import queue".into(), false);
return;
}
let files_to_import: Vec<String> = preview_files
.read()
.iter()
.map(|f| f.path.clone())
.collect();
let client = client.clone();
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
import_in_progress.set(true);
if !files_to_import.is_empty() {
let file_count = files_to_import.len();
import_progress.set((0, file_count));
let client = Arc::new(client);
let tag_ids = Arc::new(tag_ids);
let new_tags = Arc::new(new_tags);
let col_id = Arc::new(col_id);
const BATCH_SIZE: usize = 6;
spawn(async move {
let imported = Arc::new(AtomicUsize::new(0));
let duplicates = Arc::new(AtomicUsize::new(0));
let errors = Arc::new(AtomicUsize::new(0));
let completed = Arc::new(AtomicUsize::new(0));
for chunk in files_to_import.chunks(BATCH_SIZE) {
if let Some(first_path) = chunk.first() {
let file_name = first_path
.rsplit('/')
.next()
.unwrap_or(first_path);
import_current_file.set(Some(file_name.to_string()));
}
let futures: Vec<_> = chunk
.iter()
.map(|file_path| {
let client = Arc::clone(&client);
let tag_ids = Arc::clone(&tag_ids);
let new_tags = Arc::clone(&new_tags);
let col_id = Arc::clone(&col_id);
let imported = Arc::clone(&imported);
let duplicates = Arc::clone(&duplicates);
let errors = Arc::clone(&errors);
let completed = Arc::clone(&completed);
let file_path = file_path.clone();
async move {
let result = if tag_ids.is_empty() && new_tags.is_empty()
&& col_id.is_none()
{
client.import_file(&file_path).await
} else {
client
.import_with_options(
&file_path,
&tag_ids,
&new_tags,
col_id.as_deref(),
)
.await
};
match result {
Ok(resp) => {
if resp.was_duplicate {
duplicates.fetch_add(1, Ordering::Relaxed);
} else {
imported.fetch_add(1, Ordering::Relaxed);
}
}
Err(_) => {
errors.fetch_add(1, Ordering::Relaxed);
}
}
completed.fetch_add(1, Ordering::Relaxed);
}
})
.collect();
join_all(futures).await;
let done = completed.load(Ordering::Relaxed);
import_progress.set((done, file_count));
}
let imported = imported.load(Ordering::Relaxed);
let duplicates = duplicates.load(Ordering::Relaxed);
let errors = errors.load(Ordering::Relaxed);
show_toast(
format!(
"Done: {imported} imported, {duplicates} duplicates, {errors} errors",
),
errors > 0,
);
refresh_media();
if !new_tags.is_empty() {
refresh_tags();
}
preview_files.set(Vec::new());
preview_total_size.set(0);
import_progress.set((file_count, file_count));
import_current_file.set(None);
import_in_progress.set(false);
});
} else {
import_current_file.set(Some(format!("{dir_name}/")));
import_progress.set((0, 0));
spawn(async move {
match client
.import_directory(&path, &tag_ids, &new_tags, col_id.as_deref())
@ -1310,19 +1645,32 @@ pub fn App() -> Element {
preview_files.set(Vec::new());
preview_total_size.set(0);
}
Err(e) => show_toast(format!("Directory import failed: {e}"), true),
Err(e) => {
show_toast(format!("Directory import failed: {e}"), true)
}
}
import_current_file.set(None);
import_progress.set((0, 0));
import_in_progress.set(false);
});
}
}
},
on_scan: {
let client = client.read().clone();
let refresh_media = refresh_media.clone();
move |_| {
if *import_in_progress.read() {
import_queue.write().push("Scan roots".to_string());
show_toast("Added scan to import queue".into(), false);
return;
}
let client = client.clone();
let refresh_media = refresh_media.clone();
import_in_progress.set(true);
import_current_file.set(Some("Scanning roots...".to_string()));
import_progress.set((0, 0)); // Will be updated from scan_progress
spawn(async move {
match client.trigger_scan().await {
Ok(_results) => {
@ -1330,6 +1678,23 @@ pub fn App() -> Element {
match client.scan_status().await {
Ok(status) => {
let done = !status.scanning;
import_progress
.set((
status.files_processed as usize,
status.files_found as usize,
));
if status.files_found > 0 {
import_current_file
.set(
Some(
format!(
"Scanning ({}/{})",
status.files_processed,
status.files_found,
),
),
);
}
scan_progress.set(Some(status.clone()));
if done {
let total = status.files_processed;
@ -1348,6 +1713,8 @@ pub fn App() -> Element {
}
Err(e) => show_toast(format!("Scan failed: {e}"), true),
}
import_current_file.set(None);
import_progress.set((0, 0));
import_in_progress.set(false);
});
}
@ -1357,25 +1724,96 @@ pub fn App() -> Element {
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
move |(paths, tag_ids, new_tags, col_id): import::BatchImportEvent| {
let client = client.clone();
let file_count = paths.len();
if *import_in_progress.read() {
import_queue.write().push(format!("{file_count} files (batch)"));
show_toast("Added batch to import queue".into(), false);
return;
}
let client = Arc::new(client.clone());
let refresh_media = refresh_media.clone();
let refresh_tags = refresh_tags.clone();
let file_count = paths.len();
let tag_ids = Arc::new(tag_ids);
let new_tags = Arc::new(new_tags);
let col_id = Arc::new(col_id);
import_in_progress.set(true);
import_progress.set((0, file_count));
const BATCH_SIZE: usize = 6;
spawn(async move {
match client
.batch_import(&paths, &tag_ids, &new_tags, col_id.as_deref())
.await
let imported = Arc::new(AtomicUsize::new(0));
let duplicates = Arc::new(AtomicUsize::new(0));
let errors = Arc::new(AtomicUsize::new(0));
let completed = Arc::new(AtomicUsize::new(0));
for chunk in paths.chunks(BATCH_SIZE) {
if let Some(first_path) = chunk.first() {
let file_name = first_path
.rsplit('/')
.next()
.unwrap_or(first_path);
import_current_file.set(Some(file_name.to_string()));
}
let futures: Vec<_> = chunk
.iter()
.map(|path| {
let client = Arc::clone(&client);
let tag_ids = Arc::clone(&tag_ids);
let new_tags = Arc::clone(&new_tags);
let col_id = Arc::clone(&col_id);
let imported = Arc::clone(&imported);
let duplicates = Arc::clone(&duplicates);
let errors = Arc::clone(&errors);
let completed = Arc::clone(&completed);
let path = path.clone();
async move {
let result = if tag_ids.is_empty() && new_tags.is_empty()
&& col_id.is_none()
{
client.import_file(&path).await
} else {
client
.import_with_options(
&path,
&tag_ids,
&new_tags,
col_id.as_deref(),
)
.await
};
match result {
Ok(resp) => {
if resp.was_duplicate {
duplicates.fetch_add(1, Ordering::Relaxed);
} else {
imported.fetch_add(1, Ordering::Relaxed);
}
}
Err(_) => {
errors.fetch_add(1, Ordering::Relaxed);
}
}
completed.fetch_add(1, Ordering::Relaxed);
}
})
.collect();
join_all(futures).await;
let done = completed.load(Ordering::Relaxed);
import_progress.set((done, file_count));
}
let imported = imported.load(Ordering::Relaxed);
let duplicates = duplicates.load(Ordering::Relaxed);
let errors = errors.load(Ordering::Relaxed);
show_toast(
format!(
"Done: {} imported, {} duplicates, {} errors",
resp.imported,
resp.duplicates,
resp.errors,
"Done: {imported} imported, {duplicates} duplicates, {errors} errors",
),
resp.errors > 0,
errors > 0,
);
refresh_media();
if !new_tags.is_empty() {
@ -1383,14 +1821,8 @@ pub fn App() -> Element {
}
preview_files.set(Vec::new());
preview_total_size.set(0);
}
Err(e) => {
show_toast(
format!("Batch import failed ({file_count} files): {e}"),
true,
)
}
}
import_progress.set((file_count, file_count));
import_current_file.set(None);
import_in_progress.set(false);
});
}
@ -1416,6 +1848,9 @@ pub fn App() -> Element {
},
preview_files: preview_files.read().clone(),
preview_total_size: *preview_total_size.read(),
current_file: import_current_file.read().clone(),
import_queue: import_queue.read().clone(),
import_progress: *import_progress.read(),
}
},
View::Database => {
@ -1620,6 +2055,7 @@ pub fn App() -> Element {
Ok(ui_cfg) => {
auto_play_media.set(ui_cfg.auto_play_media);
sidebar_collapsed.set(ui_cfg.sidebar_collapsed);
current_theme.set(ui_cfg.theme.clone());
if let Ok(cfg) = client.get_config().await {
config_data.set(Some(cfg));
}
@ -1654,6 +2090,7 @@ pub fn App() -> Element {
onclick: move |evt: MouseEvent| evt.stop_propagation(),
h3 { "Keyboard Shortcuts" }
div { class: "help-shortcuts",
h4 { "Navigation" }
div { class: "shortcut-row",
kbd { "Esc" }
span { "Go back / close overlay" }
@ -1664,12 +2101,42 @@ pub fn App() -> Element {
}
div { class: "shortcut-row",
kbd { "Ctrl+K" }
span { "Focus search" }
span { "Focus search (alternative)" }
}
div { class: "shortcut-row",
kbd { "Ctrl+," }
span { "Open settings" }
}
div { class: "shortcut-row",
kbd { "?" }
span { "Toggle this help" }
}
h4 { "Quick Views" }
div { class: "shortcut-row",
kbd { "1" }
span { "Library" }
}
div { class: "shortcut-row",
kbd { "2" }
span { "Search" }
}
div { class: "shortcut-row",
kbd { "3" }
span { "Import" }
}
div { class: "shortcut-row",
kbd { "4" }
span { "Tags" }
}
div { class: "shortcut-row",
kbd { "5" }
span { "Collections" }
}
div { class: "shortcut-row",
kbd { "6" }
span { "Audit Log" }
}
}
button {
class: "help-close",

View file

@ -277,6 +277,22 @@ pub struct DatabaseStatsResponse {
pub backend_name: String,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct SavedSearchResponse {
pub id: String,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
}
#[derive(Debug, Clone, Serialize)]
pub struct CreateSavedSearchRequest {
pub name: String,
pub query: String,
pub sort_order: Option<String>,
}
#[allow(dead_code)]
impl ApiClient {
pub fn new(base_url: &str, api_key: Option<&str>) -> Self {
@ -1053,6 +1069,50 @@ impl ApiClient {
.await?)
}
// ── Saved Searches ──
pub async fn list_saved_searches(&self) -> Result<Vec<SavedSearchResponse>> {
Ok(self
.client
.get(self.url("/saved-searches"))
.send()
.await?
.error_for_status()?
.json()
.await?)
}
pub async fn create_saved_search(
&self,
name: &str,
query: &str,
sort_order: Option<&str>,
) -> Result<SavedSearchResponse> {
let req = CreateSavedSearchRequest {
name: name.to_string(),
query: query.to_string(),
sort_order: sort_order.map(|s| s.to_string()),
};
Ok(self
.client
.post(self.url("/saved-searches"))
.json(&req)
.send()
.await?
.error_for_status()?
.json()
.await?)
}
pub async fn delete_saved_search(&self, id: &str) -> Result<()> {
self.client
.delete(self.url(&format!("/saved-searches/{id}")))
.send()
.await?
.error_for_status()?;
Ok(())
}
pub fn set_token(&mut self, token: &str) {
let mut headers = header::HeaderMap::new();
if let Ok(val) = header::HeaderValue::from_str(&format!("Bearer {token}")) {

View file

@ -3,6 +3,7 @@ use dioxus::prelude::*;
use super::image_viewer::ImageViewer;
use super::markdown_viewer::MarkdownViewer;
use super::media_player::MediaPlayer;
use super::pdf_viewer::PdfViewer;
use super::utils::{format_duration, format_size, media_category, type_badge_class};
use crate::client::{MediaResponse, MediaUpdateEvent, TagResponse};
@ -262,6 +263,10 @@ pub fn Detail(
media_type: media.media_type.clone(),
}
} else if category == "document" {
if media.media_type == "pdf" {
PdfViewer { src: stream_url.clone() }
} else {
// EPUB and other document types
div { class: "detail-no-preview",
p { class: "text-muted", "Preview not available for this document type." }
button {
@ -273,6 +278,7 @@ pub fn Detail(
"Open Externally"
}
}
}
} else if has_thumbnail {
img {
src: "{thumbnail_url}",

View file

@ -69,6 +69,8 @@ pub fn Duplicates(
rsx! {
div { class: "duplicate-group", key: "{hash}",
button {
class: "duplicate-group-header",
onclick: move |_| {
@ -109,8 +111,6 @@ pub fn Duplicates(
div { class: "dup-thumb",
if has_thumb {
img {

View file

@ -23,6 +23,10 @@ pub fn Import(
preview_total_size: u64,
scan_progress: Option<ScanStatusResponse>,
#[props(default = false)] is_importing: bool,
// Extended import state
#[props(default)] current_file: Option<String>,
#[props(default)] import_queue: Vec<String>,
#[props(default = (0, 0))] import_progress: (usize, usize),
) -> Element {
let mut import_mode = use_signal(|| 0usize);
let mut file_path = use_signal(String::new);
@ -47,16 +51,48 @@ pub fn Import(
rsx! {
// Import status panel (shown when import is in progress)
if is_importing {
{
let (completed, total) = import_progress;
let has_progress = total > 0;
let pct = if total > 0 { (completed * 100) / total } else { 0 };
let queue_count = import_queue.len();
rsx! {
div { class: "import-status-panel",
div { class: "import-status-header",
div { class: "status-dot checking" }
span { "Import in progress..." }
span {
if has_progress {
"Importing {completed}/{total}..."
} else {
"Import in progress..."
}
}
}
// Show current file being imported
if let Some(ref file_name) = current_file {
div { class: "import-current-file",
span { class: "import-file-label", "Current: " }
span { class: "import-file-name", "{file_name}" }
}
}
// Show queue indicator
if queue_count > 0 {
div { class: "import-queue-indicator",
span { class: "import-queue-badge", "{queue_count}" }
span { class: "import-queue-text", " item(s) queued" }
}
}
div { class: "progress-bar",
if has_progress {
div { class: "progress-fill", style: "width: {pct}%;" }
} else {
div { class: "progress-fill indeterminate" }
}
}
}
}
}
}
// Tab bar
div { class: "import-tabs",
@ -229,13 +265,13 @@ pub fn Import(
// Recursive toggle
div { class: "form-group",
label { class: "form-row",
label { class: "checkbox-label",
input {
r#type: "checkbox",
checked: *recursive.read(),
onchange: move |_| recursive.toggle(),
}
span { style: "margin-left: 6px;", "Recursive (include subdirectories)" }
span { "Recursive (include subdirectories)" }
}
}
}
@ -299,9 +335,12 @@ pub fn Import(
}
}
div { class: "filter-bar",
div { class: "flex-row mb-8",
label {
div { class: "filter-row",
span { class: "filter-label", "Types" }
label { class: if types_snapshot[0] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[0],
@ -311,9 +350,9 @@ pub fn Import(
filter_types.set(types);
},
}
" Audio"
"Audio"
}
label {
label { class: if types_snapshot[1] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[1],
@ -323,9 +362,9 @@ pub fn Import(
filter_types.set(types);
},
}
" Video"
"Video"
}
label {
label { class: if types_snapshot[2] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[2],
@ -335,9 +374,9 @@ pub fn Import(
filter_types.set(types);
},
}
" Image"
"Image"
}
label {
label { class: if types_snapshot[3] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[3],
@ -347,9 +386,9 @@ pub fn Import(
filter_types.set(types);
},
}
" Document"
"Document"
}
label {
label { class: if types_snapshot[4] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[4],
@ -359,9 +398,9 @@ pub fn Import(
filter_types.set(types);
},
}
" Text"
"Text"
}
label {
label { class: if types_snapshot[5] { "filter-chip active" } else { "filter-chip" },
input {
r#type: "checkbox",
checked: types_snapshot[5],
@ -371,14 +410,16 @@ pub fn Import(
filter_types.set(types);
},
}
" Other"
"Other"
}
}
div { class: "flex-row",
label { class: "form-label", "Min size (MB): " }
div { class: "size-filters",
div { class: "size-filter-group",
label { "Min size" }
input {
r#type: "number",
value: "{min / (1024 * 1024)}",
placeholder: "MB",
value: if min > 0 { format!("{}", min / (1024 * 1024)) } else { String::new() },
oninput: move |e| {
if let Ok(mb) = e.value().parse::<u64>() {
filter_min_size.set(mb * 1024 * 1024);
@ -387,10 +428,14 @@ pub fn Import(
}
},
}
label { class: "form-label", "Max size (MB): " }
span { class: "text-muted text-sm", "MB" }
}
div { class: "size-filter-group",
label { "Max size" }
input {
r#type: "number",
value: "{max / (1024 * 1024)}",
placeholder: "MB",
value: if max > 0 { format!("{}", max / (1024 * 1024)) } else { String::new() },
oninput: move |e| {
if let Ok(mb) = e.value().parse::<u64>() {
filter_max_size.set(mb * 1024 * 1024);
@ -399,6 +444,8 @@ pub fn Import(
}
},
}
span { class: "text-muted text-sm", "MB" }
}
}
}
@ -565,9 +612,15 @@ pub fn Import(
}
// Import entire directory
{
let has_dir = !dir_path.read().is_empty();
let has_preview = !preview_files.is_empty();
let file_count = preview_files.len();
rsx! {
button {
class: "btn btn-secondary",
disabled: is_importing,
class: if has_dir { "btn btn-secondary" } else { "btn btn-secondary btn-disabled-hint" },
disabled: is_importing || !has_dir,
title: if !has_dir { "Select a directory first" } else { "" },
onclick: {
let mut dir_path = dir_path;
let mut selected_tags = selected_tags;
@ -591,8 +644,14 @@ pub fn Import(
},
if is_importing {
"Importing..."
} else {
} else if has_preview {
"Import All ({file_count} files)"
} else if has_dir {
"Import Entire Directory"
} else {
"Select Directory First"
}
}
}
}
}

View file

@ -595,8 +595,6 @@ pub fn Library(
let badge_class = type_badge_class(&item.media_type);
let is_checked = current_selection.contains(&id);
// Build a list of all visible IDs for shift+click range selection.
// Shift+click: select range from last_click_index to current idx.
@ -609,6 +607,11 @@ pub fn Library(
// Thumbnail with CSS fallback: icon always
// rendered, img overlays when available.
let card_click = {
let id = item.id.clone();
move |_| on_select.call(id.clone())
@ -616,8 +619,6 @@ pub fn Library(
let visible_ids: Vec<String> = filtered_media
.iter()
.map(|m| m.id.clone())
.collect();
@ -665,6 +666,8 @@ pub fn Library(
rsx! {
div { key: "{item.id}", class: "{card_class}", onclick: card_click,
div { class: "card-checkbox",
input { r#type: "checkbox", checked: is_checked, onclick: toggle_id }
}

View file

@ -12,6 +12,7 @@ pub mod login;
pub mod markdown_viewer;
pub mod media_player;
pub mod pagination;
pub mod pdf_viewer;
pub mod search;
pub mod settings;
pub mod statistics;

View file

@ -0,0 +1,112 @@
use dioxus::prelude::*;
#[component]
pub fn PdfViewer(
src: String,
#[props(default = 1)] initial_page: usize,
#[props(default = 100)] initial_zoom: usize,
) -> Element {
let current_page = use_signal(|| initial_page);
let mut zoom_level = use_signal(|| initial_zoom);
let mut loading = use_signal(|| true);
let mut error = use_signal(|| Option::<String>::None);
// For navigation controls
let zoom = *zoom_level.read();
let page = *current_page.read();
rsx! {
div { class: "pdf-viewer",
// Toolbar
div { class: "pdf-toolbar",
div { class: "pdf-toolbar-group",
button {
class: "pdf-toolbar-btn",
title: "Zoom out",
disabled: zoom <= 50,
onclick: move |_| {
let new_zoom = (*zoom_level.read()).saturating_sub(25).max(50);
zoom_level.set(new_zoom);
},
"\u{2212}" // minus
}
span { class: "pdf-zoom-label", "{zoom}%" }
button {
class: "pdf-toolbar-btn",
title: "Zoom in",
disabled: zoom >= 200,
onclick: move |_| {
let new_zoom = (*zoom_level.read() + 25).min(200);
zoom_level.set(new_zoom);
},
"+" // plus
}
}
div { class: "pdf-toolbar-group",
button {
class: "pdf-toolbar-btn",
title: "Fit to width",
onclick: move |_| zoom_level.set(100),
"\u{2194}" // left-right arrow
}
}
}
// PDF embed container
div { class: "pdf-container",
if *loading.read() {
div { class: "pdf-loading",
div { class: "spinner" }
span { "Loading PDF..." }
}
}
if let Some(ref err) = *error.read() {
div { class: "pdf-error",
p { "{err}" }
a {
href: "{src}",
target: "_blank",
class: "btn btn-primary",
"Download PDF"
}
}
}
// Use object/embed for PDF rendering
// The webview should handle PDF rendering natively
object {
class: "pdf-object",
r#type: "application/pdf",
data: "{src}#zoom={zoom}&page={page}",
width: "100%",
height: "100%",
onload: move |_| {
loading.set(false);
error.set(None);
},
onerror: move |_| {
loading.set(false);
error
.set(
Some(
"Unable to display PDF. Your browser may not support embedded PDF viewing."
.to_string(),
),
);
},
// Fallback content
div { class: "pdf-fallback",
p { "PDF preview is not available in this browser." }
a {
href: "{src}",
target: "_blank",
class: "btn btn-primary",
"Download PDF"
}
}
}
}
}
}
}

View file

@ -2,7 +2,7 @@ use dioxus::prelude::*;
use super::pagination::Pagination as PaginationControls;
use super::utils::{format_size, type_badge_class, type_icon};
use crate::client::MediaResponse;
use crate::client::{MediaResponse, SavedSearchResponse};
#[component]
pub fn Search(
@ -14,10 +14,17 @@ pub fn Search(
on_select: EventHandler<String>,
on_page_change: EventHandler<u64>,
server_url: String,
#[props(default)] saved_searches: Vec<SavedSearchResponse>,
#[props(default)] on_save_search: Option<EventHandler<(String, String, Option<String>)>>,
#[props(default)] on_delete_saved_search: Option<EventHandler<String>>,
#[props(default)] on_load_saved_search: Option<EventHandler<SavedSearchResponse>>,
) -> Element {
let mut query = use_signal(String::new);
let mut sort_by = use_signal(|| String::from("relevance"));
let mut show_help = use_signal(|| false);
let mut show_save_dialog = use_signal(|| false);
let mut save_name = use_signal(String::new);
let mut show_saved_list = use_signal(|| false);
// 0 = table, 1 = grid
let mut view_mode = use_signal(|| 0u8);
@ -87,6 +94,23 @@ pub fn Search(
button { class: "btn btn-primary", onclick: do_search, "Search" }
button { class: "btn btn-ghost", onclick: toggle_help, "Syntax Help" }
// Save/Load search buttons
if on_save_search.is_some() {
button {
class: "btn btn-secondary",
disabled: query.read().is_empty(),
onclick: move |_| show_save_dialog.set(true),
"Save"
}
}
if !saved_searches.is_empty() {
button {
class: "btn btn-ghost",
onclick: move |_| show_saved_list.toggle(),
"Saved ({saved_searches.len()})"
}
}
// View mode toggle
div { class: "view-toggle",
button {
@ -148,6 +172,147 @@ pub fn Search(
}
}
// Save search dialog
if *show_save_dialog.read() {
div {
class: "modal-overlay",
onclick: move |_| show_save_dialog.set(false),
div {
class: "modal-content",
onclick: move |evt: MouseEvent| evt.stop_propagation(),
h3 { "Save Search" }
div { class: "form-field",
label { "Name" }
input {
r#type: "text",
placeholder: "Enter a name for this search...",
value: "{save_name}",
oninput: move |e| save_name.set(e.value()),
onkeypress: {
let query = query.read().clone();
let sort = sort_by.read().clone();
let handler = on_save_search;
move |e: KeyboardEvent| {
if e.key() == Key::Enter {
let name = save_name.read().clone();
if !name.is_empty() {
let sort_opt = if sort == "relevance" {
None
} else {
Some(sort.clone())
};
if let Some(ref h) = handler {
h.call((name, query.clone(), sort_opt));
}
show_save_dialog.set(false);
save_name.set(String::new());
}
}
}
},
}
}
p { class: "text-muted text-sm", "Query: {query}" }
div { class: "modal-actions",
button {
class: "btn btn-ghost",
onclick: move |_| {
show_save_dialog.set(false);
save_name.set(String::new());
},
"Cancel"
}
button {
class: "btn btn-primary",
disabled: save_name.read().is_empty(),
onclick: {
let query_val = query.read().clone();
let sort_val = sort_by.read().clone();
let handler = on_save_search;
move |_| {
let name = save_name.read().clone();
if !name.is_empty() {
let sort_opt = if sort_val == "relevance" {
None
} else {
Some(sort_val.clone())
};
if let Some(ref h) = handler {
h.call((name, query_val.clone(), sort_opt));
}
show_save_dialog.set(false);
save_name.set(String::new());
}
}
},
"Save"
}
}
}
}
}
// Saved searches list
if *show_saved_list.read() && !saved_searches.is_empty() {
div { class: "card mb-16",
div { class: "card-header",
h4 { "Saved Searches" }
button {
class: "btn btn-ghost btn-sm",
onclick: move |_| show_saved_list.set(false),
"Close"
}
}
div { class: "saved-searches-list",
for search in saved_searches.iter() {
{
let search_clone = search.clone();
let id_for_delete = search.id.clone();
let load_handler = on_load_saved_search;
let delete_handler = on_delete_saved_search;
rsx! {
div { class: "saved-search-item", key: "{search.id}",
div {
class: "saved-search-info",
onclick: {
let sc = search_clone.clone();
move |_| {
if let Some(ref h) = load_handler {
h.call(sc.clone());
}
query.set(sc.query.clone());
if let Some(ref s) = sc.sort_order {
sort_by.set(s.clone());
} else {
sort_by.set("relevance".to_string());
}
show_saved_list.set(false);
}
},
span { class: "saved-search-name", "{search.name}" }
span { class: "saved-search-query text-muted", "{search.query}" }
}
button {
class: "btn btn-danger btn-sm",
onclick: {
let id = id_for_delete.clone();
move |evt: MouseEvent| {
evt.stop_propagation();
if let Some(ref h) = delete_handler {
h.call(id.clone());
}
}
},
"Delete"
}
}
}
}
}
}
}
}
p { class: "text-muted text-sm mb-8", "Results: {total_count}" }
if results.is_empty() && query.read().is_empty() {
@ -190,6 +355,8 @@ pub fn Search(
rsx! {
div { key: "{item.id}", class: "media-card", onclick: card_click,
div { class: "card-thumbnail",

View file

@ -419,6 +419,7 @@ pub fn Settings(
},
option { value: "dark", "Dark" }
option { value: "light", "Light" }
option { value: "system", "System" }
}
}

View file

@ -66,7 +66,6 @@ pub fn Statistics(
// Media by Type
// Storage by Type
// Top Tags
@ -74,6 +73,12 @@ pub fn Statistics(
// Top Collections
// Date Range
if !s.media_by_type.is_empty() {
div { class: "card mt-16",
h4 { class: "card-title", "Media by Type" }

View file

@ -137,6 +137,8 @@ pub fn Tags(
if !children.is_empty() {
div {
class: "tag-children",
style: "margin-left: 16px; margin-top: 4px;",
for child in children.iter() {

View file

@ -81,13 +81,25 @@ body {
.sidebar.collapsed .sidebar-header .logo,
.sidebar.collapsed .sidebar-header .version,
.sidebar.collapsed .nav-badge { display: none; }
.sidebar.collapsed .nav-item { justify-content: center; padding: 8px; border-left: none; }
.sidebar.collapsed .nav-item { justify-content: center; padding: 8px; border-left: none; border-radius: var(--radius-sm); }
.sidebar.collapsed .nav-item.active { border-left: none; }
.sidebar.collapsed .nav-icon { width: auto; margin: 0; }
.sidebar.collapsed .sidebar-header { padding: 12px 8px; justify-content: center; }
.sidebar.collapsed .nav-section { padding: 0 4px; }
.sidebar.collapsed .sidebar-footer { padding: 8px 4px; }
/* Nav item text - hide when collapsed */
/* Nav item text - hide when collapsed, properly handle overflow when expanded */
.nav-item-text {
flex: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
min-width: 0;
}
/* When sidebar is expanded, allow text to show fully */
.sidebar:not(.collapsed) .nav-item-text {
overflow: visible;
}
.sidebar.collapsed .nav-item-text { display: none; }
@ -179,8 +191,14 @@ body {
.sidebar-footer {
padding: 12px;
border-top: 1px solid var(--border-subtle);
overflow: visible;
min-width: 0;
}
/* Hide footer content in collapsed sidebar */
.sidebar.collapsed .sidebar-footer .status-text { display: none; }
.sidebar.collapsed .sidebar-footer .user-info { justify-content: center; }
/* ── Main ── */
.main {
flex: 1;
@ -747,10 +765,86 @@ input[type="text"]:focus, textarea:focus, select:focus {
/* ── Checkbox ── */
input[type="checkbox"] {
accent-color: var(--accent);
width: 14px;
height: 14px;
appearance: none;
-webkit-appearance: none;
width: 16px;
height: 16px;
border: 1px solid var(--border-strong);
border-radius: 3px;
background: var(--bg-2);
cursor: pointer;
position: relative;
flex-shrink: 0;
transition: all 0.15s ease;
}
input[type="checkbox"]:hover {
border-color: var(--accent);
background: var(--bg-3);
}
input[type="checkbox"]:checked {
background: var(--accent);
border-color: var(--accent);
}
input[type="checkbox"]:checked::after {
content: "";
position: absolute;
left: 5px;
top: 2px;
width: 4px;
height: 8px;
border: solid var(--bg-0);
border-width: 0 2px 2px 0;
transform: rotate(45deg);
}
input[type="checkbox"]:focus-visible {
outline: 2px solid var(--accent);
outline-offset: 2px;
}
/* Checkbox with label */
.checkbox-label {
display: inline-flex;
align-items: center;
gap: 8px;
cursor: pointer;
font-size: 13px;
color: var(--text-1);
user-select: none;
}
.checkbox-label:hover {
color: var(--text-0);
}
.checkbox-label input[type="checkbox"] {
margin: 0;
}
/* Number input */
input[type="number"] {
width: 80px;
padding: 6px 8px;
background: var(--bg-2);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
color: var(--text-0);
font-size: 12px;
-moz-appearance: textfield;
}
input[type="number"]::-webkit-outer-spin-button,
input[type="number"]::-webkit-inner-spin-button {
-webkit-appearance: none;
margin: 0;
}
input[type="number"]:focus {
outline: none;
border-color: var(--accent);
}
/* ── Select ── */
@ -784,6 +878,8 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
gap: 6px;
font-size: 11px;
font-weight: 500;
min-width: 0;
overflow: visible;
}
.status-dot {
@ -802,7 +898,18 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
50% { opacity: 0.3; }
}
.status-text { color: var(--text-2); }
.status-text {
color: var(--text-2);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
min-width: 0;
}
/* Ensure status text is visible in expanded sidebar */
.sidebar:not(.collapsed) .status-text {
overflow: visible;
}
/* ── Modal ── */
.modal-overlay {
@ -850,6 +957,61 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
gap: 6px;
}
/* ── Saved Searches ── */
.saved-searches-list {
display: flex;
flex-direction: column;
gap: 4px;
max-height: 300px;
overflow-y: auto;
}
.saved-search-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 8px 12px;
background: var(--bg-1);
border-radius: var(--radius-sm);
cursor: pointer;
transition: background 0.15s ease;
}
.saved-search-item:hover {
background: var(--bg-2);
}
.saved-search-info {
display: flex;
flex-direction: column;
gap: 2px;
flex: 1;
min-width: 0;
}
.saved-search-name {
font-weight: 500;
color: var(--text-0);
}
.saved-search-query {
font-size: 11px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.card-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 12px;
}
.card-header h4 {
margin: 0;
}
/* ── Offline banner ── */
.offline-banner {
background: rgba(228, 88, 88, 0.06);
@ -881,15 +1043,94 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
/* ── Filter bar ── */
.filter-bar {
display: flex;
flex-wrap: wrap;
align-items: center;
flex-direction: column;
gap: 12px;
padding: 8px 12px;
padding: 12px;
background: var(--bg-0);
border: 1px solid var(--border-subtle);
border-radius: var(--radius-sm);
margin-bottom: 8px;
font-size: 12px;
border-radius: var(--radius);
margin-bottom: 12px;
}
.filter-bar .filter-row {
display: flex;
flex-wrap: wrap;
align-items: center;
gap: 8px;
}
.filter-bar .filter-label {
font-size: 11px;
font-weight: 500;
color: var(--text-2);
text-transform: uppercase;
letter-spacing: 0.5px;
margin-right: 4px;
}
/* Filter chip/toggle style */
.filter-chip {
display: inline-flex;
align-items: center;
gap: 6px;
padding: 5px 10px;
background: var(--bg-2);
border: 1px solid var(--border);
border-radius: 14px;
cursor: pointer;
font-size: 11px;
color: var(--text-1);
transition: all 0.15s ease;
user-select: none;
}
.filter-chip:hover {
background: var(--bg-3);
border-color: var(--border-strong);
color: var(--text-0);
}
.filter-chip.active {
background: var(--accent-dim);
border-color: var(--accent);
color: var(--accent-text);
}
.filter-chip input[type="checkbox"] {
width: 12px;
height: 12px;
margin: 0;
}
.filter-chip input[type="checkbox"]:checked::after {
left: 3px;
top: 1px;
width: 3px;
height: 6px;
}
/* Size filter inputs */
.filter-bar .size-filters {
display: flex;
align-items: center;
gap: 8px;
padding-top: 8px;
border-top: 1px solid var(--border-subtle);
}
.filter-bar .size-filter-group {
display: flex;
align-items: center;
gap: 6px;
}
.filter-bar .size-filter-group label {
font-size: 11px;
color: var(--text-2);
}
.filter-bar input[type="number"] {
width: 70px;
}
.filter-group {
@ -1071,6 +1312,14 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
pointer-events: none;
}
/* Disabled with hint - shows what action is needed */
.btn.btn-disabled-hint:disabled {
opacity: 0.6;
border-style: dashed;
pointer-events: auto;
cursor: help;
}
/* ── Library Toolbar ── */
.library-toolbar {
display: flex;
@ -1589,6 +1838,93 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
color: var(--text-0);
}
.import-current-file {
display: flex;
align-items: center;
gap: 4px;
margin-bottom: 6px;
font-size: 12px;
overflow: hidden;
}
.import-file-label {
color: var(--text-2);
flex-shrink: 0;
}
.import-file-name {
color: var(--text-0);
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
font-family: monospace;
font-size: 11px;
}
.import-queue-indicator {
display: flex;
align-items: center;
gap: 4px;
margin-bottom: 8px;
font-size: 11px;
}
.import-queue-badge {
display: inline-flex;
align-items: center;
justify-content: center;
min-width: 18px;
height: 18px;
padding: 0 6px;
background: var(--accent-dim);
color: var(--accent-text);
border-radius: 9px;
font-weight: 600;
font-size: 10px;
}
.import-queue-text {
color: var(--text-2);
}
/* ── Sidebar import progress ── */
.sidebar-import-progress {
padding: 8px 12px;
background: var(--bg-2);
border-top: 1px solid var(--border-subtle);
font-size: 11px;
}
.sidebar-import-header {
display: flex;
align-items: center;
gap: 6px;
margin-bottom: 4px;
color: var(--text-1);
}
.sidebar-import-file {
color: var(--text-2);
font-size: 10px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
margin-bottom: 4px;
}
.sidebar-import-progress .progress-bar {
height: 3px;
}
.sidebar.collapsed .sidebar-import-progress {
padding: 6px;
}
.sidebar.collapsed .sidebar-import-header span,
.sidebar.collapsed .sidebar-import-file {
display: none;
}
/* ── Tag confirmation ── */
.tag-confirm-delete {
display: inline-flex;
@ -2391,9 +2727,13 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
/* Hide user details in collapsed sidebar, show only logout icon */
.sidebar.collapsed .user-info .user-name,
.sidebar.collapsed .user-info .role-badge { display: none; }
.sidebar.collapsed .user-info .role-badge,
.sidebar.collapsed .user-info .btn { display: none; }
.sidebar.collapsed .user-info .btn { padding: 6px; }
.sidebar.collapsed .user-info {
justify-content: center;
padding: 4px;
}
.role-badge {
display: inline-block;
@ -2676,4 +3016,117 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
color: var(--text-2);
font-size: 0.85rem;
}
/* ── PDF Viewer ── */
.pdf-viewer {
display: flex;
flex-direction: column;
height: 100%;
min-height: 500px;
background: var(--bg-0);
border-radius: var(--radius);
overflow: hidden;
}
.pdf-toolbar {
display: flex;
align-items: center;
gap: 12px;
padding: 8px 12px;
background: var(--bg-1);
border-bottom: 1px solid var(--border);
}
.pdf-toolbar-group {
display: flex;
align-items: center;
gap: 4px;
}
.pdf-toolbar-btn {
display: flex;
align-items: center;
justify-content: center;
width: 28px;
height: 28px;
background: var(--bg-2);
border: 1px solid var(--border);
border-radius: var(--radius-sm);
color: var(--text-1);
font-size: 14px;
cursor: pointer;
transition: all 0.15s;
}
.pdf-toolbar-btn:hover:not(:disabled) {
background: var(--bg-3);
color: var(--text-0);
}
.pdf-toolbar-btn:disabled {
opacity: 0.4;
cursor: not-allowed;
}
.pdf-zoom-label {
min-width: 45px;
text-align: center;
font-size: 12px;
color: var(--text-1);
}
.pdf-container {
flex: 1;
position: relative;
overflow: hidden;
background: var(--bg-2);
}
.pdf-object {
width: 100%;
height: 100%;
border: none;
}
.pdf-loading {
position: absolute;
inset: 0;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 12px;
background: var(--bg-1);
color: var(--text-1);
}
.pdf-error {
position: absolute;
inset: 0;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 16px;
background: var(--bg-1);
color: var(--text-1);
padding: 24px;
text-align: center;
}
.pdf-fallback {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 16px;
padding: 48px 24px;
text-align: center;
color: var(--text-2);
}
/* Light theme adjustments */
.theme-light .pdf-container {
background: #e8e8e8;
}
"#;