treewide: fix various UI bugs; optimize crypto dependencies & format

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8fe8b38c1d9c4fecd40ff71f88d2ae06a6a6964
This commit is contained in:
raf 2026-02-10 12:56:05 +03:00
commit 3ccddce7fd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
178 changed files with 58342 additions and 54241 deletions

View file

@ -1,74 +1,76 @@
use std::sync::Arc;
use axum::Router;
use axum::extract::DefaultBodyLimit;
use axum::http::{HeaderValue, Method, header};
use axum::middleware;
use axum::routing::{delete, get, patch, post, put};
use axum::{
Router,
extract::DefaultBodyLimit,
http::{HeaderValue, Method, header},
middleware,
routing::{delete, get, patch, post, put},
};
use tower::ServiceBuilder;
use tower_governor::GovernorLayer;
use tower_governor::governor::GovernorConfigBuilder;
use tower_http::cors::CorsLayer;
use tower_http::set_header::SetResponseHeaderLayer;
use tower_http::trace::TraceLayer;
use tower_governor::{GovernorLayer, governor::GovernorConfigBuilder};
use tower_http::{
cors::CorsLayer,
set_header::SetResponseHeaderLayer,
trace::TraceLayer,
};
use crate::auth;
use crate::routes;
use crate::state::AppState;
use crate::{auth, routes, state::AppState};
/// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(state: AppState) -> Router {
create_router_with_tls(state, None)
create_router_with_tls(state, None)
}
/// Create the router with TLS configuration for security headers
pub fn create_router_with_tls(
state: AppState,
tls_config: Option<&pinakes_core::config::TlsConfig>,
state: AppState,
tls_config: Option<&pinakes_core::config::TlsConfig>,
) -> Router {
// Global rate limit: 100 requests/sec per IP
let global_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(1)
.burst_size(100)
.finish()
.unwrap(),
);
// Global rate limit: 100 requests/sec per IP
let global_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(1)
.burst_size(100)
.finish()
.unwrap(),
);
// Strict rate limit for login: 5 requests/min per IP
let login_governor = Arc::new(
GovernorConfigBuilder::default()
// Strict rate limit for login: 5 requests/min per IP
let login_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(12) // replenish one every 12 seconds
.burst_size(5)
.finish()
.unwrap(),
);
);
// Rate limit for search: 10 requests/min per IP
let search_governor = Arc::new(
GovernorConfigBuilder::default()
// Rate limit for search: 10 requests/min per IP
let search_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(6) // replenish one every 6 seconds (10/min)
.burst_size(10)
.finish()
.unwrap(),
);
);
// Rate limit for streaming: 5 requests per IP (very restrictive for concurrent streams)
let stream_governor = Arc::new(
GovernorConfigBuilder::default()
// Rate limit for streaming: 5 requests per IP (very restrictive for
// concurrent streams)
let stream_governor = Arc::new(
GovernorConfigBuilder::default()
.per_second(60) // replenish slowly (one per minute)
.burst_size(5) // max 5 concurrent connections
.finish()
.unwrap(),
);
);
// Login route with strict rate limiting
let login_route = Router::new()
.route("/auth/login", post(routes::auth::login))
.layer(GovernorLayer::new(login_governor));
// Login route with strict rate limiting
let login_route = Router::new()
.route("/auth/login", post(routes::auth::login))
.layer(GovernorLayer::new(login_governor));
// Public routes (no auth required)
let public_routes = Router::new()
// Public routes (no auth required)
let public_routes = Router::new()
.route("/s/{token}", get(routes::social::access_shared_media))
// Enhanced sharing: public share access
.route("/shared/{token}", get(routes::shares::access_shared))
@ -76,19 +78,19 @@ pub fn create_router_with_tls(
.route("/health/live", get(routes::health::liveness))
.route("/health/ready", get(routes::health::readiness));
// Search routes with enhanced rate limiting (10 req/min)
let search_routes = Router::new()
.route("/search", get(routes::search::search))
.route("/search", post(routes::search::search_post))
.layer(GovernorLayer::new(search_governor));
// Search routes with enhanced rate limiting (10 req/min)
let search_routes = Router::new()
.route("/search", get(routes::search::search))
.route("/search", post(routes::search::search_post))
.layer(GovernorLayer::new(search_governor));
// Streaming routes with enhanced rate limiting (5 concurrent)
let streaming_routes = Router::new()
.route("/media/{id}/stream", get(routes::media::stream_media))
.layer(GovernorLayer::new(stream_governor));
// Streaming routes with enhanced rate limiting (5 concurrent)
let streaming_routes = Router::new()
.route("/media/{id}/stream", get(routes::media::stream_media))
.layer(GovernorLayer::new(stream_governor));
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
.route("/health", get(routes::health::health))
.route("/health/detailed", get(routes::health::health_detailed))
.route("/media/count", get(routes::media::get_media_count))
@ -240,8 +242,8 @@ pub fn create_router_with_tls(
)
.nest("/notes", routes::notes::routes());
// Write routes: Editor+ required
let editor_routes = Router::new()
// Write routes: Editor+ required
let editor_routes = Router::new()
.route("/media/import", post(routes::media::import_media))
.route(
"/media/import/options",
@ -456,8 +458,8 @@ pub fn create_router_with_tls(
)
.layer(middleware::from_fn(auth::require_editor));
// Admin-only routes: destructive/config operations
let admin_routes = Router::new()
// Admin-only routes: destructive/config operations
let admin_routes = Router::new()
.route(
"/config/scanning",
put(routes::config::update_scanning_config),
@ -496,43 +498,43 @@ pub fn create_router_with_tls(
.route("/auth/sessions", get(routes::auth::list_active_sessions))
.layer(middleware::from_fn(auth::require_admin));
// CORS: allow same-origin by default, plus the desktop UI origin
let cors = CorsLayer::new()
.allow_origin([
"http://localhost:3000".parse::<HeaderValue>().unwrap(),
"http://127.0.0.1:3000".parse::<HeaderValue>().unwrap(),
"tauri://localhost".parse::<HeaderValue>().unwrap(),
])
.allow_methods([
Method::GET,
Method::POST,
Method::PUT,
Method::PATCH,
Method::DELETE,
])
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true);
// CORS: allow same-origin by default, plus the desktop UI origin
let cors = CorsLayer::new()
.allow_origin([
"http://localhost:3000".parse::<HeaderValue>().unwrap(),
"http://127.0.0.1:3000".parse::<HeaderValue>().unwrap(),
"tauri://localhost".parse::<HeaderValue>().unwrap(),
])
.allow_methods([
Method::GET,
Method::POST,
Method::PUT,
Method::PATCH,
Method::DELETE,
])
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true);
// Create protected routes with auth middleware
let protected_api = Router::new()
.merge(viewer_routes)
.merge(search_routes)
.merge(streaming_routes)
.merge(editor_routes)
.merge(admin_routes)
.layer(middleware::from_fn_with_state(
state.clone(),
auth::require_auth,
));
// Create protected routes with auth middleware
let protected_api = Router::new()
.merge(viewer_routes)
.merge(search_routes)
.merge(streaming_routes)
.merge(editor_routes)
.merge(admin_routes)
.layer(middleware::from_fn_with_state(
state.clone(),
auth::require_auth,
));
// Combine protected and public routes
let full_api = Router::new()
.merge(login_route)
.merge(public_routes)
.merge(protected_api);
// Combine protected and public routes
let full_api = Router::new()
.merge(login_route)
.merge(public_routes)
.merge(protected_api);
// Build security headers layer
let security_headers = ServiceBuilder::new()
// Build security headers layer
let security_headers = ServiceBuilder::new()
// Prevent MIME type sniffing
.layer(SetResponseHeaderLayer::overriding(
header::X_CONTENT_TYPE_OPTIONS,
@ -564,32 +566,34 @@ pub fn create_router_with_tls(
HeaderValue::from_static("default-src 'none'; frame-ancestors 'none'"),
));
let router = Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer::new(global_governor))
.layer(TraceLayer::new_for_http())
.layer(cors)
.layer(security_headers);
let router = Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer::new(global_governor))
.layer(TraceLayer::new_for_http())
.layer(cors)
.layer(security_headers);
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {
let hsts_value = format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header = HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains")
});
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {
let hsts_value =
format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header =
HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains")
});
router
.layer(SetResponseHeaderLayer::overriding(
header::STRICT_TRANSPORT_SECURITY,
hsts_header,
))
.with_state(state)
} else {
router.with_state(state)
}
router
.layer(SetResponseHeaderLayer::overriding(
header::STRICT_TRANSPORT_SECURITY,
hsts_header,
))
.with_state(state)
} else {
router.with_state(state)
router.with_state(state)
}
} else {
router.with_state(state)
}
}

View file

@ -1,229 +1,237 @@
use axum::extract::{Request, State};
use axum::http::StatusCode;
use axum::middleware::Next;
use axum::response::{IntoResponse, Response};
use axum::{
extract::{Request, State},
http::StatusCode,
middleware::Next,
response::{IntoResponse, Response},
};
use pinakes_core::config::UserRole;
use crate::state::AppState;
/// Constant-time string comparison to prevent timing attacks on API keys.
fn constant_time_eq(a: &str, b: &str) -> bool {
if a.len() != b.len() {
return false;
}
a.as_bytes()
.iter()
.zip(b.as_bytes())
.fold(0u8, |acc, (x, y)| acc | (x ^ y))
== 0
if a.len() != b.len() {
return false;
}
a.as_bytes()
.iter()
.zip(b.as_bytes())
.fold(0u8, |acc, (x, y)| acc | (x ^ y))
== 0
}
/// Axum middleware that checks for a valid Bearer token.
///
/// If `accounts.enabled == true`: look up bearer token in database session store.
/// If `accounts.enabled == false`: use existing api_key logic (unchanged behavior).
/// Skips authentication for the `/health` and `/auth/login` path suffixes.
/// If `accounts.enabled == true`: look up bearer token in database session
/// store. If `accounts.enabled == false`: use existing api_key logic (unchanged
/// behavior). Skips authentication for the `/health` and `/auth/login` path
/// suffixes.
pub async fn require_auth(
State(state): State<AppState>,
mut request: Request,
next: Next,
State(state): State<AppState>,
mut request: Request,
next: Next,
) -> Response {
let path = request.uri().path().to_string();
let path = request.uri().path().to_string();
// Always allow health and login endpoints
if path.ends_with("/health") || path.ends_with("/auth/login") {
return next.run(request).await;
// Always allow health and login endpoints
if path.ends_with("/health") || path.ends_with("/auth/login") {
return next.run(request).await;
}
let config = state.config.read().await;
// Check if authentication is explicitly disabled
if config.server.authentication_disabled {
drop(config);
tracing::warn!("authentication is disabled - allowing all requests");
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
return next.run(request).await;
}
if config.accounts.enabled {
drop(config);
// Session-based auth using database
let token = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
.map(|s| s.to_string());
let Some(token) = token else {
tracing::debug!(path = %path, "rejected: missing Authorization header");
return unauthorized("missing Authorization header");
};
// Look up session in database
let session_result = state.storage.get_session(&token).await;
let session = match session_result {
Ok(Some(session)) => session,
Ok(None) => {
tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token");
},
Err(e) => {
tracing::error!(error = %e, "failed to query session from database");
return (StatusCode::INTERNAL_SERVER_ERROR, "database error")
.into_response();
},
};
// Check session expiry
let now = chrono::Utc::now();
if session.expires_at < now {
let username = session.username.clone();
// Delete expired session asynchronously (fire-and-forget)
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.delete_session(&token_owned).await {
tracing::error!(error = %e, "failed to delete expired session");
}
});
tracing::info!(username = %username, "session expired");
return unauthorized("session expired");
}
let config = state.config.read().await;
// Update last_accessed timestamp asynchronously (fire-and-forget)
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.touch_session(&token_owned).await {
tracing::warn!(error = %e, "failed to update session last_accessed");
}
});
// Check if authentication is explicitly disabled
if config.server.authentication_disabled {
drop(config);
tracing::warn!("authentication is disabled - allowing all requests");
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
return next.run(request).await;
// Parse role from string
let role = match session.role.as_str() {
"admin" => UserRole::Admin,
"editor" => UserRole::Editor,
"viewer" => UserRole::Viewer,
_ => {
tracing::warn!(role = %session.role, "unknown role, defaulting to viewer");
UserRole::Viewer
},
};
// Inject role and username into request extensions
request.extensions_mut().insert(role);
request.extensions_mut().insert(session.username.clone());
} else {
// Legacy API key auth
let api_key = std::env::var("PINAKES_API_KEY")
.ok()
.or_else(|| config.server.api_key.clone());
drop(config);
let Some(ref expected_key) = api_key else {
tracing::error!("no authentication configured");
return unauthorized("authentication not configured");
};
if expected_key.is_empty() {
// Empty key is not allowed - must use authentication_disabled flag
tracing::error!(
"empty api_key rejected, use authentication_disabled flag instead"
);
return unauthorized("authentication not properly configured");
}
if config.accounts.enabled {
drop(config);
let auth_header = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok());
// Session-based auth using database
let token = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
.map(|s| s.to_string());
let Some(token) = token else {
tracing::debug!(path = %path, "rejected: missing Authorization header");
return unauthorized("missing Authorization header");
};
// Look up session in database
let session_result = state.storage.get_session(&token).await;
let session = match session_result {
Ok(Some(session)) => session,
Ok(None) => {
tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token");
}
Err(e) => {
tracing::error!(error = %e, "failed to query session from database");
return (StatusCode::INTERNAL_SERVER_ERROR, "database error").into_response();
}
};
// Check session expiry
let now = chrono::Utc::now();
if session.expires_at < now {
let username = session.username.clone();
// Delete expired session asynchronously (fire-and-forget)
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.delete_session(&token_owned).await {
tracing::error!(error = %e, "failed to delete expired session");
}
});
tracing::info!(username = %username, "session expired");
return unauthorized("session expired");
match auth_header {
Some(header) if header.starts_with("Bearer ") => {
let token = &header[7..];
if !constant_time_eq(token, expected_key.as_str()) {
tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key");
}
// Update last_accessed timestamp asynchronously (fire-and-forget)
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.touch_session(&token_owned).await {
tracing::warn!(error = %e, "failed to update session last_accessed");
}
});
// Parse role from string
let role = match session.role.as_str() {
"admin" => UserRole::Admin,
"editor" => UserRole::Editor,
"viewer" => UserRole::Viewer,
_ => {
tracing::warn!(role = %session.role, "unknown role, defaulting to viewer");
UserRole::Viewer
}
};
// Inject role and username into request extensions
request.extensions_mut().insert(role);
request.extensions_mut().insert(session.username.clone());
} else {
// Legacy API key auth
let api_key = std::env::var("PINAKES_API_KEY")
.ok()
.or_else(|| config.server.api_key.clone());
drop(config);
let Some(ref expected_key) = api_key else {
tracing::error!("no authentication configured");
return unauthorized("authentication not configured");
};
if expected_key.is_empty() {
// Empty key is not allowed - must use authentication_disabled flag
tracing::error!("empty api_key rejected, use authentication_disabled flag instead");
return unauthorized("authentication not properly configured");
}
let auth_header = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok());
match auth_header {
Some(header) if header.starts_with("Bearer ") => {
let token = &header[7..];
if !constant_time_eq(token, expected_key.as_str()) {
tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key");
}
}
_ => {
return unauthorized(
"missing or malformed Authorization header, expected: Bearer <api_key>",
);
}
}
// API key matches, grant admin
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
},
_ => {
return unauthorized(
"missing or malformed Authorization header, expected: Bearer \
<api_key>",
);
},
}
next.run(request).await
// API key matches, grant admin
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
}
next.run(request).await
}
/// Middleware: requires Editor or Admin role.
pub async fn require_editor(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_write() {
next.run(request).await
} else {
forbidden("editor role required")
}
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_write() {
next.run(request).await
} else {
forbidden("editor role required")
}
}
/// Middleware: requires Admin role.
pub async fn require_admin(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_admin() {
next.run(request).await
} else {
forbidden("admin role required")
}
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_admin() {
next.run(request).await
} else {
forbidden("admin role required")
}
}
/// Resolve the authenticated username (from request extensions) to a UserId.
///
/// Returns an error if the user cannot be found.
pub async fn resolve_user_id(
storage: &pinakes_core::storage::DynStorageBackend,
username: &str,
storage: &pinakes_core::storage::DynStorageBackend,
username: &str,
) -> Result<pinakes_core::users::UserId, crate::error::ApiError> {
match storage.get_user_by_username(username).await {
Ok(user) => Ok(user.id),
Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication("user not found".into()),
))
}
}
match storage.get_user_by_username(username).await {
Ok(user) => Ok(user.id),
Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication(
"user not found".into(),
),
))
},
}
}
fn unauthorized(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::UNAUTHORIZED,
[("content-type", "application/json")],
body,
)
.into_response()
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::UNAUTHORIZED,
[("content-type", "application/json")],
body,
)
.into_response()
}
fn forbidden(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::FORBIDDEN,
[("content-type", "application/json")],
body,
)
.into_response()
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::FORBIDDEN,
[("content-type", "application/json")],
body,
)
.into_response()
}

File diff suppressed because it is too large Load diff

View file

@ -1,99 +1,107 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use serde::Serialize;
#[derive(Debug, Serialize)]
struct ErrorResponse {
error: String,
error: String,
}
pub struct ApiError(pub pinakes_core::error::PinakesError);
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
use pinakes_core::error::PinakesError;
let (status, message) = match &self.0 {
PinakesError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::FileNotFound(path) => {
// Only expose the file name, not the full path
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}"))
}
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => {
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
(
StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"),
)
}
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::Authentication(msg) => (StatusCode::UNAUTHORIZED, msg.clone()),
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(),
)
}
_ => {
tracing::error!(error = %self.0, "internal server error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(),
)
}
};
fn into_response(self) -> Response {
use pinakes_core::error::PinakesError;
let (status, message) = match &self.0 {
PinakesError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::FileNotFound(path) => {
// Only expose the file name, not the full path
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}"))
},
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
},
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => {
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string());
(
StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"),
)
},
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => {
(StatusCode::BAD_REQUEST, msg.clone())
},
PinakesError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.clone())
},
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(),
)
},
_ => {
tracing::error!(error = %self.0, "internal server error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(),
)
},
};
let body = serde_json::to_string(&ErrorResponse {
error: message.clone(),
})
.unwrap_or_else(|_| format!(r#"{{"error":"{}"}}"#, message));
(status, [("content-type", "application/json")], body).into_response()
}
let body = serde_json::to_string(&ErrorResponse {
error: message.clone(),
})
.unwrap_or_else(|_| format!(r#"{{"error":"{}"}}"#, message));
(status, [("content-type", "application/json")], body).into_response()
}
}
impl From<pinakes_core::error::PinakesError> for ApiError {
fn from(e: pinakes_core::error::PinakesError) -> Self {
Self(e)
}
fn from(e: pinakes_core::error::PinakesError) -> Self {
Self(e)
}
}
impl ApiError {
pub fn bad_request(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::InvalidOperation(
msg.into(),
))
}
pub fn bad_request(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::InvalidOperation(
msg.into(),
))
}
pub fn not_found(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::NotFound(msg.into()))
}
pub fn not_found(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::NotFound(msg.into()))
}
pub fn internal(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Database(msg.into()))
}
pub fn internal(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Database(msg.into()))
}
pub fn forbidden(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authorization(msg.into()))
}
pub fn forbidden(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authorization(msg.into()))
}
pub fn unauthorized(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authentication(
msg.into(),
))
}
pub fn unauthorized(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authentication(
msg.into(),
))
}
}
pub type ApiResult<T> = Result<T, ApiError>;

File diff suppressed because it is too large Load diff

View file

@ -1,94 +1,96 @@
use axum::Json;
use axum::extract::{Extension, Path, Query, State};
use axum::{
Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::{
analytics::{UsageEvent, UsageEventType},
model::MediaId,
};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::analytics::{UsageEvent, UsageEventType};
use pinakes_core::model::MediaId;
use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
const MAX_LIMIT: u64 = 100;
pub async fn get_most_viewed(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MostViewedResponse>>, ApiError> {
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let results = state.storage.get_most_viewed(limit).await?;
Ok(Json(
results
.into_iter()
.map(|(item, count)| MostViewedResponse {
media: MediaResponse::from(item),
view_count: count,
})
.collect(),
))
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let results = state.storage.get_most_viewed(limit).await?;
Ok(Json(
results
.into_iter()
.map(|(item, count)| {
MostViewedResponse {
media: MediaResponse::from(item),
view_count: count,
}
})
.collect(),
))
}
pub async fn get_recently_viewed(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let items = state.storage.get_recently_viewed(user_id, limit).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
let user_id = resolve_user_id(&state.storage, &username).await?;
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let items = state.storage.get_recently_viewed(user_id, limit).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
pub async fn record_event(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let event_type: UsageEventType = req
.event_type
.parse()
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent {
id: Uuid::now_v7(),
media_id: req.media_id.map(MediaId),
user_id: Some(user_id),
event_type,
timestamp: chrono::Utc::now(),
duration_secs: req.duration_secs,
context_json: req.context.map(|v| v.to_string()),
};
state.storage.record_usage_event(&event).await?;
Ok(Json(serde_json::json!({"recorded": true})))
let event_type: UsageEventType =
req.event_type.parse().map_err(|e: String| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
})?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent {
id: Uuid::now_v7(),
media_id: req.media_id.map(MediaId),
user_id: Some(user_id),
event_type,
timestamp: chrono::Utc::now(),
duration_secs: req.duration_secs,
context_json: req.context.map(|v| v.to_string()),
};
state.storage.record_usage_event(&event).await?;
Ok(Json(serde_json::json!({"recorded": true})))
}
pub async fn get_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<WatchProgressResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let progress = state
.storage
.get_watch_progress(user_id, MediaId(id))
.await?
.unwrap_or(0.0);
Ok(Json(WatchProgressResponse {
progress_secs: progress,
}))
let user_id = resolve_user_id(&state.storage, &username).await?;
let progress = state
.storage
.get_watch_progress(user_id, MediaId(id))
.await?
.unwrap_or(0.0);
Ok(Json(WatchProgressResponse {
progress_secs: progress,
}))
}
pub async fn update_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<WatchProgressRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<WatchProgressRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.update_watch_progress(user_id, MediaId(id), req.progress_secs)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.update_watch_progress(user_id, MediaId(id), req.progress_secs)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

View file

@ -1,23 +1,22 @@
use axum::Json;
use axum::extract::{Query, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use axum::{
Json,
extract::{Query, State},
};
use pinakes_core::model::Pagination;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn list_audit(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<AuditEntryResponse>>, ApiError> {
let pagination = Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
);
let entries = state.storage.list_audit_entries(None, &pagination).await?;
Ok(Json(
entries.into_iter().map(AuditEntryResponse::from).collect(),
))
let pagination = Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
);
let entries = state.storage.list_audit_entries(None, &pagination).await?;
Ok(Json(
entries.into_iter().map(AuditEntryResponse::from).collect(),
))
}

View file

@ -1,271 +1,286 @@
use axum::Json;
use axum::extract::State;
use axum::http::{HeaderMap, StatusCode};
use axum::{
Json,
extract::State,
http::{HeaderMap, StatusCode},
};
use crate::dto::{LoginRequest, LoginResponse, UserInfoResponse};
use crate::state::AppState;
use crate::{
dto::{LoginRequest, LoginResponse, UserInfoResponse},
state::AppState,
};
/// Dummy password hash to use for timing-safe comparison when user doesn't exist.
/// This is a valid argon2 hash that will always fail verification but takes
/// similar time to verify as a real hash, preventing timing attacks that could
/// reveal whether a username exists.
const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
/// Dummy password hash to use for timing-safe comparison when user doesn't
/// exist. This is a valid argon2 hash that will always fail verification but
/// takes similar time to verify as a real hash, preventing timing attacks that
/// could reveal whether a username exists.
const DUMMY_HASH: &str =
"$argon2id$v=19$m=19456,t=2,\
p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
pub async fn login(
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
) -> Result<Json<LoginResponse>, StatusCode> {
// Limit input sizes to prevent DoS
if req.username.len() > 255 || req.password.len() > 1024 {
return Err(StatusCode::BAD_REQUEST);
// Limit input sizes to prevent DoS
if req.username.len() > 255 || req.password.len() > 1024 {
return Err(StatusCode::BAD_REQUEST);
}
let config = state.config.read().await;
if !config.accounts.enabled {
return Err(StatusCode::NOT_FOUND);
}
let user = config
.accounts
.users
.iter()
.find(|u| u.username == req.username);
// Always perform password verification to prevent timing attacks.
// If the user doesn't exist, we verify against a dummy hash to ensure
// consistent response times regardless of whether the username exists.
use argon2::password_hash::PasswordVerifier;
let (hash_to_verify, user_found) = match user {
Some(u) => (&u.password_hash as &str, true),
None => (DUMMY_HASH, false),
};
let parsed_hash = argon2::password_hash::PasswordHash::new(hash_to_verify)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let password_valid = argon2::Argon2::default()
.verify_password(req.password.as_bytes(), &parsed_hash)
.is_ok();
// Authentication fails if user wasn't found OR password was invalid
if !user_found || !password_valid {
// Log different messages for debugging but return same error
if !user_found {
tracing::warn!(username = %req.username, "login failed: unknown user");
} else {
tracing::warn!(username = %req.username, "login failed: invalid password");
}
let config = state.config.read().await;
if !config.accounts.enabled {
return Err(StatusCode::NOT_FOUND);
}
let user = config
.accounts
.users
.iter()
.find(|u| u.username == req.username);
// Always perform password verification to prevent timing attacks.
// If the user doesn't exist, we verify against a dummy hash to ensure
// consistent response times regardless of whether the username exists.
use argon2::password_hash::PasswordVerifier;
let (hash_to_verify, user_found) = match user {
Some(u) => (&u.password_hash as &str, true),
None => (DUMMY_HASH, false),
};
let parsed_hash = argon2::password_hash::PasswordHash::new(hash_to_verify)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let password_valid = argon2::Argon2::default()
.verify_password(req.password.as_bytes(), &parsed_hash)
.is_ok();
// Authentication fails if user wasn't found OR password was invalid
if !user_found || !password_valid {
// Log different messages for debugging but return same error
if !user_found {
tracing::warn!(username = %req.username, "login failed: unknown user");
} else {
tracing::warn!(username = %req.username, "login failed: invalid password");
}
// Record failed login attempt in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginFailed,
Some(format!("username: {}", req.username)),
)
.await;
return Err(StatusCode::UNAUTHORIZED);
}
// At this point we know the user exists and password is valid
let user = user.expect("user should exist at this point");
// Generate session token
let token: String = (0..48)
.map(|_| {
const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
abcdefghijklmnopqrstuvwxyz\
0123456789";
let idx = (rand::random::<u32>() as usize) % CHARSET.len();
CHARSET[idx] as char
})
.collect();
let role = user.role;
let username = user.username.clone();
// Create session in database
let now = chrono::Utc::now();
let session_data = pinakes_core::storage::SessionData {
session_token: token.clone(),
user_id: None, // Could be set if we had user IDs
username: username.clone(),
role: role.to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24), // 24 hour sessions
last_accessed: now,
};
if let Err(e) = state.storage.create_session(&session_data).await {
tracing::error!(error = %e, "failed to create session in database");
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
tracing::info!(username = %username, role = %role, "login successful");
// Record successful login in audit log
// Record failed login attempt in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginSuccess,
Some(format!("username: {}, role: {}", username, role)),
&state.storage,
None,
pinakes_core::model::AuditAction::LoginFailed,
Some(format!("username: {}", req.username)),
)
.await;
Ok(Json(LoginResponse {
token,
username,
role: role.to_string(),
}))
return Err(StatusCode::UNAUTHORIZED);
}
// At this point we know the user exists and password is valid
let user = user.expect("user should exist at this point");
// Generate session token
let token: String = (0..48)
.map(|_| {
const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
abcdefghijklmnopqrstuvwxyz\
0123456789";
let idx = (rand::random::<u32>() as usize) % CHARSET.len();
CHARSET[idx] as char
})
.collect();
let role = user.role;
let username = user.username.clone();
// Create session in database
let now = chrono::Utc::now();
let session_data = pinakes_core::storage::SessionData {
session_token: token.clone(),
user_id: None, // Could be set if we had user IDs
username: username.clone(),
role: role.to_string(),
created_at: now,
expires_at: now + chrono::Duration::hours(24), // 24 hour sessions
last_accessed: now,
};
if let Err(e) = state.storage.create_session(&session_data).await {
tracing::error!(error = %e, "failed to create session in database");
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
tracing::info!(username = %username, role = %role, "login successful");
// Record successful login in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginSuccess,
Some(format!("username: {}, role: {}", username, role)),
)
.await;
Ok(Json(LoginResponse {
token,
username,
role: role.to_string(),
}))
}
pub async fn logout(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
// Get username before deleting session
let username = match state.storage.get_session(token).await {
Ok(Some(session)) => Some(session.username),
_ => None,
};
pub async fn logout(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
if let Some(token) = extract_bearer_token(&headers) {
// Get username before deleting session
let username = match state.storage.get_session(token).await {
Ok(Some(session)) => Some(session.username),
_ => None,
};
// Delete session from database
if let Err(e) = state.storage.delete_session(token).await {
tracing::error!(error = %e, "failed to delete session from database");
return StatusCode::INTERNAL_SERVER_ERROR;
}
// Record logout in audit log
if let Some(user) = username {
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {}", user)),
)
.await;
}
// Delete session from database
if let Err(e) = state.storage.delete_session(token).await {
tracing::error!(error = %e, "failed to delete session from database");
return StatusCode::INTERNAL_SERVER_ERROR;
}
StatusCode::OK
// Record logout in audit log
if let Some(user) = username {
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {}", user)),
)
.await;
}
}
StatusCode::OK
}
pub async fn me(
State(state): State<AppState>,
headers: HeaderMap,
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<UserInfoResponse>, StatusCode> {
let config = state.config.read().await;
if !config.accounts.enabled {
// When accounts are not enabled, return a default admin user
return Ok(Json(UserInfoResponse {
username: "admin".to_string(),
role: "admin".to_string(),
}));
}
drop(config);
let config = state.config.read().await;
if !config.accounts.enabled {
// When accounts are not enabled, return a default admin user
return Ok(Json(UserInfoResponse {
username: "admin".to_string(),
role: "admin".to_string(),
}));
}
drop(config);
let token = extract_bearer_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let session = state
.storage
.get_session(token)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::UNAUTHORIZED)?;
let token = extract_bearer_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let session = state
.storage
.get_session(token)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::UNAUTHORIZED)?;
Ok(Json(UserInfoResponse {
username: session.username.clone(),
role: session.role.clone(),
}))
Ok(Json(UserInfoResponse {
username: session.username.clone(),
role: session.role.clone(),
}))
}
fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
headers
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
headers
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
}
/// Revoke all sessions for the current user
pub async fn revoke_all_sessions(State(state): State<AppState>, headers: HeaderMap) -> StatusCode {
let token = match extract_bearer_token(&headers) {
Some(t) => t,
None => return StatusCode::UNAUTHORIZED,
};
pub async fn revoke_all_sessions(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
let token = match extract_bearer_token(&headers) {
Some(t) => t,
None => return StatusCode::UNAUTHORIZED,
};
// Get current session to find username
let session = match state.storage.get_session(token).await {
Ok(Some(s)) => s,
Ok(None) => return StatusCode::UNAUTHORIZED,
Err(e) => {
tracing::error!(error = %e, "failed to get session");
return StatusCode::INTERNAL_SERVER_ERROR;
}
};
// Get current session to find username
let session = match state.storage.get_session(token).await {
Ok(Some(s)) => s,
Ok(None) => return StatusCode::UNAUTHORIZED,
Err(e) => {
tracing::error!(error = %e, "failed to get session");
return StatusCode::INTERNAL_SERVER_ERROR;
},
};
let username = session.username.clone();
let username = session.username.clone();
// Delete all sessions for this user
match state.storage.delete_user_sessions(&username).await {
Ok(count) => {
tracing::info!(username = %username, count = count, "revoked all user sessions");
// Delete all sessions for this user
match state.storage.delete_user_sessions(&username).await {
Ok(count) => {
tracing::info!(username = %username, count = count, "revoked all user sessions");
// Record in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("revoked all sessions for username: {}", username)),
)
.await;
// Record in audit log
let _ = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("revoked all sessions for username: {}", username)),
)
.await;
StatusCode::OK
}
Err(e) => {
tracing::error!(error = %e, "failed to revoke sessions");
StatusCode::INTERNAL_SERVER_ERROR
}
}
StatusCode::OK
},
Err(e) => {
tracing::error!(error = %e, "failed to revoke sessions");
StatusCode::INTERNAL_SERVER_ERROR
},
}
}
/// List all active sessions (admin only)
#[derive(serde::Serialize)]
pub struct SessionListResponse {
pub sessions: Vec<SessionInfo>,
pub sessions: Vec<SessionInfo>,
}
#[derive(serde::Serialize)]
pub struct SessionInfo {
pub username: String,
pub role: String,
pub created_at: String,
pub last_accessed: String,
pub expires_at: String,
pub username: String,
pub role: String,
pub created_at: String,
pub last_accessed: String,
pub expires_at: String,
}
pub async fn list_active_sessions(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<SessionListResponse>, StatusCode> {
// Get all active sessions
let sessions = state
.storage
.list_active_sessions(None)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list active sessions");
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Get all active sessions
let sessions =
state
.storage
.list_active_sessions(None)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list active sessions");
StatusCode::INTERNAL_SERVER_ERROR
})?;
let session_infos = sessions
.into_iter()
.map(|s| SessionInfo {
username: s.username,
role: s.role,
created_at: s.created_at.to_rfc3339(),
last_accessed: s.last_accessed.to_rfc3339(),
expires_at: s.expires_at.to_rfc3339(),
})
.collect();
let session_infos = sessions
.into_iter()
.map(|s| {
SessionInfo {
username: s.username,
role: s.role,
created_at: s.created_at.to_rfc3339(),
last_accessed: s.last_accessed.to_rfc3339(),
expires_at: s.expires_at.to_rfc3339(),
}
})
.collect();
Ok(Json(SessionListResponse {
sessions: session_infos,
}))
Ok(Json(SessionListResponse {
sessions: session_infos,
}))
}

View file

@ -1,305 +1,332 @@
use axum::{
Json, Router,
extract::{Extension, Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, put},
Json,
Router,
extract::{Extension, Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, put},
};
use pinakes_core::{
error::PinakesError,
model::{
AuthorInfo,
BookMetadata,
MediaId,
Pagination,
ReadingProgress,
ReadingStatus,
},
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::{
error::PinakesError,
model::{AuthorInfo, BookMetadata, MediaId, Pagination, ReadingProgress, ReadingStatus},
use crate::{
auth::resolve_user_id,
dto::MediaResponse,
error::ApiError,
state::AppState,
};
use crate::{auth::resolve_user_id, dto::MediaResponse, error::ApiError, state::AppState};
/// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct BookMetadataResponse {
pub media_id: Uuid,
pub isbn: Option<String>,
pub isbn13: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub publication_date: Option<String>,
pub series_name: Option<String>,
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
pub identifiers: std::collections::HashMap<String, Vec<String>>,
pub media_id: Uuid,
pub isbn: Option<String>,
pub isbn13: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub publication_date: Option<String>,
pub series_name: Option<String>,
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
pub identifiers: std::collections::HashMap<String, Vec<String>>,
}
impl From<BookMetadata> for BookMetadataResponse {
fn from(meta: BookMetadata) -> Self {
Self {
media_id: meta.media_id.0,
isbn: meta.isbn,
isbn13: meta.isbn13,
publisher: meta.publisher,
language: meta.language,
page_count: meta.page_count,
publication_date: meta.publication_date.map(|d| d.to_string()),
series_name: meta.series_name,
series_index: meta.series_index,
format: meta.format,
authors: meta.authors.into_iter().map(AuthorResponse::from).collect(),
identifiers: meta.identifiers,
}
fn from(meta: BookMetadata) -> Self {
Self {
media_id: meta.media_id.0,
isbn: meta.isbn,
isbn13: meta.isbn13,
publisher: meta.publisher,
language: meta.language,
page_count: meta.page_count,
publication_date: meta.publication_date.map(|d| d.to_string()),
series_name: meta.series_name,
series_index: meta.series_index,
format: meta.format,
authors: meta
.authors
.into_iter()
.map(AuthorResponse::from)
.collect(),
identifiers: meta.identifiers,
}
}
}
/// Author response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct AuthorResponse {
pub name: String,
pub role: String,
pub file_as: Option<String>,
pub position: i32,
pub name: String,
pub role: String,
pub file_as: Option<String>,
pub position: i32,
}
impl From<AuthorInfo> for AuthorResponse {
fn from(author: AuthorInfo) -> Self {
Self {
name: author.name,
role: author.role,
file_as: author.file_as,
position: author.position,
}
fn from(author: AuthorInfo) -> Self {
Self {
name: author.name,
role: author.role,
file_as: author.file_as,
position: author.position,
}
}
}
/// Reading progress response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct ReadingProgressResponse {
pub media_id: Uuid,
pub user_id: Uuid,
pub current_page: i32,
pub total_pages: Option<i32>,
pub progress_percent: f64,
pub last_read_at: String,
pub media_id: Uuid,
pub user_id: Uuid,
pub current_page: i32,
pub total_pages: Option<i32>,
pub progress_percent: f64,
pub last_read_at: String,
}
impl From<ReadingProgress> for ReadingProgressResponse {
fn from(progress: ReadingProgress) -> Self {
Self {
media_id: progress.media_id.0,
user_id: progress.user_id,
current_page: progress.current_page,
total_pages: progress.total_pages,
progress_percent: progress.progress_percent,
last_read_at: progress.last_read_at.to_rfc3339(),
}
fn from(progress: ReadingProgress) -> Self {
Self {
media_id: progress.media_id.0,
user_id: progress.user_id,
current_page: progress.current_page,
total_pages: progress.total_pages,
progress_percent: progress.progress_percent,
last_read_at: progress.last_read_at.to_rfc3339(),
}
}
}
/// Update reading progress request
#[derive(Debug, Deserialize)]
pub struct UpdateProgressRequest {
pub current_page: i32,
pub current_page: i32,
}
/// Search books query parameters
#[derive(Debug, Deserialize)]
pub struct SearchBooksQuery {
pub isbn: Option<String>,
pub author: Option<String>,
pub series: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
#[serde(default = "default_offset")]
pub offset: u64,
#[serde(default = "default_limit")]
pub limit: u64,
pub isbn: Option<String>,
pub author: Option<String>,
pub series: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
#[serde(default = "default_offset")]
pub offset: u64,
#[serde(default = "default_limit")]
pub limit: u64,
}
fn default_offset() -> u64 {
0
0
}
fn default_limit() -> u64 {
50
50
}
/// Series summary DTO
#[derive(Debug, Serialize)]
pub struct SeriesSummary {
pub name: String,
pub book_count: u64,
pub name: String,
pub book_count: u64,
}
/// Author summary DTO
#[derive(Debug, Serialize)]
pub struct AuthorSummary {
pub name: String,
pub book_count: u64,
pub name: String,
pub book_count: u64,
}
/// Get book metadata by media ID
pub async fn get_book_metadata(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let media_id = MediaId(media_id);
let metadata = state
.storage
.get_book_metadata(media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Book metadata not found".to_string(),
)))?;
let media_id = MediaId(media_id);
let metadata =
state
.storage
.get_book_metadata(media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Book metadata not found".to_string(),
)))?;
Ok(Json(BookMetadataResponse::from(metadata)))
Ok(Json(BookMetadataResponse::from(metadata)))
}
/// List all books with optional search filters
pub async fn list_books(
State(state): State<AppState>,
Query(query): Query<SearchBooksQuery>,
State(state): State<AppState>,
Query(query): Query<SearchBooksQuery>,
) -> Result<impl IntoResponse, ApiError> {
let pagination = Pagination {
offset: query.offset,
limit: query.limit,
sort: None,
};
let pagination = Pagination {
offset: query.offset,
limit: query.limit,
sort: None,
};
let items = state
.storage
.search_books(
query.isbn.as_deref(),
query.author.as_deref(),
query.series.as_deref(),
query.publisher.as_deref(),
query.language.as_deref(),
&pagination,
)
.await?;
let items = state
.storage
.search_books(
query.isbn.as_deref(),
query.author.as_deref(),
query.series.as_deref(),
query.publisher.as_deref(),
query.language.as_deref(),
&pagination,
)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// List all series with book counts
pub async fn list_series(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series
.into_iter()
.map(|(name, count)| SeriesSummary {
name,
book_count: count,
})
.collect();
pub async fn list_series(
State(state): State<AppState>,
) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series
.into_iter()
.map(|(name, count)| {
SeriesSummary {
name,
book_count: count,
}
})
.collect();
Ok(Json(response))
Ok(Json(response))
}
/// Get books in a specific series
pub async fn get_series_books(
State(state): State<AppState>,
Path(series_name): Path<String>,
State(state): State<AppState>,
Path(series_name): Path<String>,
) -> Result<impl IntoResponse, ApiError> {
let items = state.storage.get_series_books(&series_name).await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
let items = state.storage.get_series_books(&series_name).await?;
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// List all authors with book counts
pub async fn list_authors(
State(state): State<AppState>,
Query(pagination): Query<Pagination>,
State(state): State<AppState>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors
.into_iter()
.map(|(name, count)| AuthorSummary {
name,
book_count: count,
})
.collect();
let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors
.into_iter()
.map(|(name, count)| {
AuthorSummary {
name,
book_count: count,
}
})
.collect();
Ok(Json(response))
Ok(Json(response))
}
/// Get books by a specific author
pub async fn get_author_books(
State(state): State<AppState>,
Path(author_name): Path<String>,
Query(pagination): Query<Pagination>,
State(state): State<AppState>,
Path(author_name): Path<String>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let items = state
.storage
.search_books(None, Some(&author_name), None, None, None, &pagination)
.await?;
let items = state
.storage
.search_books(None, Some(&author_name), None, None, None, &pagination)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// Get reading progress for a book
pub async fn get_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
let progress = state
.storage
.get_reading_progress(user_id.0, media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Reading progress not found".to_string(),
)))?;
let progress = state
.storage
.get_reading_progress(user_id.0, media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Reading progress not found".to_string(),
)))?;
Ok(Json(ReadingProgressResponse::from(progress)))
Ok(Json(ReadingProgressResponse::from(progress)))
}
/// Update reading progress for a book
pub async fn update_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
Json(req): Json<UpdateProgressRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
Json(req): Json<UpdateProgressRequest>,
) -> Result<impl IntoResponse, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
state
.storage
.update_reading_progress(user_id.0, media_id, req.current_page)
.await?;
state
.storage
.update_reading_progress(user_id.0, media_id, req.current_page)
.await?;
Ok(StatusCode::NO_CONTENT)
Ok(StatusCode::NO_CONTENT)
}
/// Get user's reading list
pub async fn get_reading_list(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<ReadingListQuery>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<ReadingListQuery>,
) -> Result<impl IntoResponse, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_reading_list(user_id.0, params.status)
.await?;
let items = state
.storage
.get_reading_list(user_id.0, params.status)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
let response: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
#[derive(Debug, Deserialize)]
pub struct ReadingListQuery {
pub status: Option<ReadingStatus>,
pub status: Option<ReadingStatus>,
}
/// Build the books router
pub fn routes() -> Router<AppState> {
Router::new()
Router::new()
// Metadata routes
.route("/{id}/metadata", get(get_book_metadata))
// Browse routes

View file

@ -1,101 +1,106 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::{CollectionKind, MediaId};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{CollectionKind, MediaId};
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn create_collection(
State(state): State<AppState>,
Json(req): Json<CreateCollectionRequest>,
State(state): State<AppState>,
Json(req): Json<CreateCollectionRequest>,
) -> Result<Json<CollectionResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"collection name must be 1-255 characters".into(),
),
));
}
if let Some(ref desc) = req.description
&& desc.len() > 10_000
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"description exceeds 10000 characters".into(),
),
));
}
let kind = match req.kind.as_str() {
"virtual" => CollectionKind::Virtual,
_ => CollectionKind::Manual,
};
let col = pinakes_core::collections::create_collection(
&state.storage,
&req.name,
kind,
req.description.as_deref(),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(CollectionResponse::from(col)))
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"collection name must be 1-255 characters".into(),
),
));
}
if let Some(ref desc) = req.description
&& desc.len() > 10_000
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"description exceeds 10000 characters".into(),
),
));
}
let kind = match req.kind.as_str() {
"virtual" => CollectionKind::Virtual,
_ => CollectionKind::Manual,
};
let col = pinakes_core::collections::create_collection(
&state.storage,
&req.name,
kind,
req.description.as_deref(),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(CollectionResponse::from(col)))
}
pub async fn list_collections(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
let cols = state.storage.list_collections().await?;
Ok(Json(
cols.into_iter().map(CollectionResponse::from).collect(),
))
let cols = state.storage.list_collections().await?;
Ok(Json(
cols.into_iter().map(CollectionResponse::from).collect(),
))
}
pub async fn get_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<CollectionResponse>, ApiError> {
let col = state.storage.get_collection(id).await?;
Ok(Json(CollectionResponse::from(col)))
let col = state.storage.get_collection(id).await?;
Ok(Json(CollectionResponse::from(col)))
}
pub async fn delete_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_collection(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
state.storage.delete_collection(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn add_member(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
Json(req): Json<AddMemberRequest>,
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
Json(req): Json<AddMemberRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::add_member(
&state.storage,
collection_id,
MediaId(req.media_id),
req.position.unwrap_or(0),
)
.await?;
Ok(Json(serde_json::json!({"added": true})))
pinakes_core::collections::add_member(
&state.storage,
collection_id,
MediaId(req.media_id),
req.position.unwrap_or(0),
)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_member(
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::remove_member(&state.storage, collection_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
pinakes_core::collections::remove_member(
&state.storage,
collection_id,
MediaId(media_id),
)
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn get_members(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let items = pinakes_core::collections::get_members(&state.storage, collection_id).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
let items =
pinakes_core::collections::get_members(&state.storage, collection_id)
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}

View file

@ -1,217 +1,218 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn get_config(State(state): State<AppState>) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?;
pub async fn get_config(
State(state): State<AppState>,
) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
}
None => false,
};
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path
.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
},
None => false,
};
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
pub async fn get_ui_config(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let config = state.config.read().await;
Ok(Json(UiConfigResponse::from(&config.ui)))
let config = state.config.read().await;
Ok(Json(UiConfigResponse::from(&config.ui)))
}
pub async fn update_ui_config(
State(state): State<AppState>,
Json(req): Json<UpdateUiConfigRequest>,
State(state): State<AppState>,
Json(req): Json<UpdateUiConfigRequest>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(theme) = req.theme {
config.ui.theme = theme;
}
if let Some(default_view) = req.default_view {
config.ui.default_view = default_view;
}
if let Some(default_page_size) = req.default_page_size {
config.ui.default_page_size = default_page_size;
}
if let Some(default_view_mode) = req.default_view_mode {
config.ui.default_view_mode = default_view_mode;
}
if let Some(auto_play) = req.auto_play_media {
config.ui.auto_play_media = auto_play;
}
if let Some(show_thumbs) = req.show_thumbnails {
config.ui.show_thumbnails = show_thumbs;
}
if let Some(collapsed) = req.sidebar_collapsed {
config.ui.sidebar_collapsed = collapsed;
}
let mut config = state.config.write().await;
if let Some(theme) = req.theme {
config.ui.theme = theme;
}
if let Some(default_view) = req.default_view {
config.ui.default_view = default_view;
}
if let Some(default_page_size) = req.default_page_size {
config.ui.default_page_size = default_page_size;
}
if let Some(default_view_mode) = req.default_view_mode {
config.ui.default_view_mode = default_view_mode;
}
if let Some(auto_play) = req.auto_play_media {
config.ui.auto_play_media = auto_play;
}
if let Some(show_thumbs) = req.show_thumbnails {
config.ui.show_thumbnails = show_thumbs;
}
if let Some(collapsed) = req.sidebar_collapsed {
config.ui.sidebar_collapsed = collapsed;
}
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
Ok(Json(UiConfigResponse::from(&config.ui)))
Ok(Json(UiConfigResponse::from(&config.ui)))
}
pub async fn update_scanning_config(
State(state): State<AppState>,
Json(req): Json<UpdateScanningRequest>,
State(state): State<AppState>,
Json(req): Json<UpdateScanningRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(watch) = req.watch {
config.scanning.watch = watch;
}
if let Some(interval) = req.poll_interval_secs {
config.scanning.poll_interval_secs = interval;
}
if let Some(patterns) = req.ignore_patterns {
config.scanning.ignore_patterns = patterns;
}
let mut config = state.config.write().await;
if let Some(watch) = req.watch {
config.scanning.watch = watch;
}
if let Some(interval) = req.poll_interval_secs {
config.scanning.poll_interval_secs = interval;
}
if let Some(patterns) = req.ignore_patterns {
config.scanning.ignore_patterns = patterns;
}
// Persist to disk if we have a config path
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
// Persist to disk if we have a config path
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
let roots = state.storage.list_root_dirs().await?;
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
}
None => false,
};
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
} else {
path
.parent()
.map(|parent| {
std::fs::metadata(parent)
.map(|m| !m.permissions().readonly())
.unwrap_or(false)
})
.unwrap_or(false)
}
},
None => false,
};
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
Ok(Json(ConfigResponse {
backend: format!("{:?}", config.storage.backend).to_lowercase(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
pub async fn add_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
let path = std::path::PathBuf::from(&req.path);
if !path.exists() {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
if !path.exists() {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
}
state.storage.add_root_dir(path.clone()).await?;
{
let mut config = state.config.write().await;
if !config.directories.roots.contains(&path) {
config.directories.roots.push(path);
}
state.storage.add_root_dir(path.clone()).await?;
{
let mut config = state.config.write().await;
if !config.directories.roots.contains(&path) {
config.directories.roots.push(path);
}
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
get_config(State(state)).await
}
pub async fn remove_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
let path = std::path::PathBuf::from(&req.path);
state.storage.remove_root_dir(&path).await?;
state.storage.remove_root_dir(&path).await?;
{
let mut config = state.config.write().await;
config.directories.roots.retain(|r| r != &path);
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
{
let mut config = state.config.write().await;
config.directories.roots.retain(|r| r != &path);
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
get_config(State(state)).await
}

View file

@ -1,34 +1,31 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::DatabaseStatsResponse;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::DatabaseStatsResponse, error::ApiError, state::AppState};
pub async fn database_stats(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<DatabaseStatsResponse>, ApiError> {
let stats = state.storage.database_stats().await?;
Ok(Json(DatabaseStatsResponse {
media_count: stats.media_count,
tag_count: stats.tag_count,
collection_count: stats.collection_count,
audit_count: stats.audit_count,
database_size_bytes: stats.database_size_bytes,
backend_name: stats.backend_name,
}))
let stats = state.storage.database_stats().await?;
Ok(Json(DatabaseStatsResponse {
media_count: stats.media_count,
tag_count: stats.tag_count,
collection_count: stats.collection_count,
audit_count: stats.audit_count,
database_size_bytes: stats.database_size_bytes,
backend_name: stats.backend_name,
}))
}
pub async fn vacuum_database(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.vacuum().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
state.storage.vacuum().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
}
pub async fn clear_database(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.clear_all_data().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
state.storage.clear_all_data().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
}

View file

@ -1,30 +1,31 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::{DuplicateGroupResponse, MediaResponse};
use crate::error::ApiError;
use crate::state::AppState;
use crate::{
dto::{DuplicateGroupResponse, MediaResponse},
error::ApiError,
state::AppState,
};
pub async fn list_duplicates(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<DuplicateGroupResponse>>, ApiError> {
let groups = state.storage.find_duplicates().await?;
let groups = state.storage.find_duplicates().await?;
let response: Vec<DuplicateGroupResponse> = groups
.into_iter()
.map(|items| {
let content_hash = items
.first()
.map(|i| i.content_hash.0.clone())
.unwrap_or_default();
let media_items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
DuplicateGroupResponse {
content_hash,
items: media_items,
}
})
.collect();
let response: Vec<DuplicateGroupResponse> = groups
.into_iter()
.map(|items| {
let content_hash = items
.first()
.map(|i| i.content_hash.0.clone())
.unwrap_or_default();
let media_items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
DuplicateGroupResponse {
content_hash,
items: media_items,
}
})
.collect();
Ok(Json(response))
Ok(Json(response))
}

View file

@ -1,48 +1,48 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn trigger_enrichment(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Submit enrichment as a background job
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich {
media_ids: vec![MediaId(id)],
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
// Submit enrichment as a background job
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich {
media_ids: vec![MediaId(id)],
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
pub async fn get_external_metadata(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<ExternalMetadataResponse>>, ApiError> {
let metadata = state.storage.get_external_metadata(MediaId(id)).await?;
Ok(Json(
metadata
.into_iter()
.map(ExternalMetadataResponse::from)
.collect(),
))
let metadata = state.storage.get_external_metadata(MediaId(id)).await?;
Ok(Json(
metadata
.into_iter()
.map(ExternalMetadataResponse::from)
.collect(),
))
}
pub async fn batch_enrich(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids: Vec<MediaId> = req.media_ids.into_iter().map(MediaId).collect();
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids })
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
let media_ids: Vec<MediaId> =
req.media_ids.into_iter().map(MediaId).collect();
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids })
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}

View file

@ -1,42 +1,42 @@
use axum::Json;
use axum::extract::State;
use serde::Deserialize;
use std::path::PathBuf;
use crate::error::ApiError;
use crate::state::AppState;
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct ExportRequest {
pub format: String,
pub destination: PathBuf,
pub format: String,
pub destination: PathBuf,
}
pub async fn trigger_export(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Default export to JSON in data dir
let dest = pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json,
destination: dest,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
// Default export to JSON in data dir
let dest =
pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json,
destination: dest,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
pub async fn trigger_export_with_options(
State(state): State<AppState>,
Json(req): Json<ExportRequest>,
State(state): State<AppState>,
Json(req): Json<ExportRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let format = match req.format.as_str() {
"csv" => pinakes_core::jobs::ExportFormat::Csv,
_ => pinakes_core::jobs::ExportFormat::Json,
};
let kind = pinakes_core::jobs::JobKind::Export {
format,
destination: req.destination,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
let format = match req.format.as_str() {
"csv" => pinakes_core::jobs::ExportFormat::Csv,
_ => pinakes_core::jobs::ExportFormat::Json,
};
let kind = pinakes_core::jobs::JobKind::Export {
format,
destination: req.destination,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}

View file

@ -1,9 +1,6 @@
use std::time::Instant;
use axum::Json;
use axum::extract::State;
use axum::http::StatusCode;
use axum::response::IntoResponse;
use axum::{Json, extract::State, http::StatusCode, response::IntoResponse};
use serde::{Deserialize, Serialize};
use crate::state::AppState;
@ -11,208 +8,215 @@ use crate::state::AppState;
/// Basic health check response
#[derive(Debug, Serialize, Deserialize)]
pub struct HealthResponse {
pub status: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub database: Option<DatabaseHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub filesystem: Option<FilesystemHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache: Option<CacheHealth>,
pub status: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub database: Option<DatabaseHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub filesystem: Option<FilesystemHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache: Option<CacheHealth>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DatabaseHealth {
pub status: String,
pub latency_ms: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub media_count: Option<u64>,
pub status: String,
pub latency_ms: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub media_count: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct FilesystemHealth {
pub status: String,
pub roots_configured: usize,
pub roots_accessible: usize,
pub status: String,
pub roots_configured: usize,
pub roots_accessible: usize,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CacheHealth {
pub hit_rate: f64,
pub total_entries: u64,
pub responses_size: u64,
pub queries_size: u64,
pub media_size: u64,
pub hit_rate: f64,
pub total_entries: u64,
pub responses_size: u64,
pub queries_size: u64,
pub media_size: u64,
}
/// Comprehensive health check - includes database, filesystem, and cache status
pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
let mut response = HealthResponse {
status: "ok".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
database: None,
filesystem: None,
cache: None,
};
let mut response = HealthResponse {
status: "ok".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
database: None,
filesystem: None,
cache: None,
};
// Check database health
let db_start = Instant::now();
let db_health = match state.storage.count_media().await {
Ok(count) => DatabaseHealth {
status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count),
},
Err(e) => {
response.status = "degraded".to_string();
DatabaseHealth {
status: format!("error: {}", e),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None,
}
}
};
response.database = Some(db_health);
// Check database health
let db_start = Instant::now();
let db_health = match state.storage.count_media().await {
Ok(count) => {
DatabaseHealth {
status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count),
}
},
Err(e) => {
response.status = "degraded".to_string();
DatabaseHealth {
status: format!("error: {}", e),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None,
}
},
};
response.database = Some(db_health);
// Check filesystem health (root directories)
let roots: Vec<std::path::PathBuf> = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();
// Check filesystem health (root directories)
let roots: Vec<std::path::PathBuf> =
state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();
}
response.filesystem = Some(FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
response.filesystem = Some(FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
});
.to_string(),
roots_configured: roots.len(),
roots_accessible,
});
// Get cache statistics
let cache_stats = state.cache.stats();
response.cache = Some(CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
});
// Get cache statistics
let cache_stats = state.cache.stats();
response.cache = Some(CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
});
Json(response)
Json(response)
}
/// Liveness probe - just checks if the server is running
/// Returns 200 OK if the server process is alive
pub async fn liveness() -> impl IntoResponse {
(
StatusCode::OK,
Json(serde_json::json!({
"status": "alive"
})),
)
(
StatusCode::OK,
Json(serde_json::json!({
"status": "alive"
})),
)
}
/// Readiness probe - checks if the server can serve requests
/// Returns 200 OK if database is accessible
pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
// Check database connectivity
let db_start = Instant::now();
match state.storage.count_media().await {
Ok(_) => {
let latency = db_start.elapsed().as_millis() as u64;
(
StatusCode::OK,
Json(serde_json::json!({
"status": "ready",
"database_latency_ms": latency
})),
)
}
Err(e) => (
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"status": "not_ready",
"reason": e.to_string()
})),
),
}
// Check database connectivity
let db_start = Instant::now();
match state.storage.count_media().await {
Ok(_) => {
let latency = db_start.elapsed().as_millis() as u64;
(
StatusCode::OK,
Json(serde_json::json!({
"status": "ready",
"database_latency_ms": latency
})),
)
},
Err(e) => {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"status": "not_ready",
"reason": e.to_string()
})),
)
},
}
}
/// Detailed health check for monitoring dashboards
#[derive(Debug, Serialize, Deserialize)]
pub struct DetailedHealthResponse {
pub status: String,
pub version: String,
pub uptime_seconds: u64,
pub database: DatabaseHealth,
pub filesystem: FilesystemHealth,
pub cache: CacheHealth,
pub jobs: JobsHealth,
pub status: String,
pub version: String,
pub uptime_seconds: u64,
pub database: DatabaseHealth,
pub filesystem: FilesystemHealth,
pub cache: CacheHealth,
pub jobs: JobsHealth,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct JobsHealth {
pub pending: usize,
pub running: usize,
pub pending: usize,
pub running: usize,
}
pub async fn health_detailed(State(state): State<AppState>) -> Json<DetailedHealthResponse> {
// Check database
let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await {
Ok(count) => ("ok".to_string(), Some(count)),
Err(e) => (format!("error: {}", e), None),
};
let db_latency = db_start.elapsed().as_millis() as u64;
pub async fn health_detailed(
State(state): State<AppState>,
) -> Json<DetailedHealthResponse> {
// Check database
let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await {
Ok(count) => ("ok".to_string(), Some(count)),
Err(e) => (format!("error: {}", e), None),
};
let db_latency = db_start.elapsed().as_millis() as u64;
// Check filesystem
let roots = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
// Check filesystem
let roots = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
// Get cache stats
let cache_stats = state.cache.stats();
// Get cache stats
let cache_stats = state.cache.stats();
// Get job queue stats
let job_stats = state.job_queue.stats().await;
// Get job queue stats
let job_stats = state.job_queue.stats().await;
let overall_status = if db_status == "ok" && roots_accessible == roots.len() {
let overall_status = if db_status == "ok" && roots_accessible == roots.len() {
"ok"
} else {
"degraded"
};
Json(DetailedHealthResponse {
status: overall_status.to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
uptime_seconds: 0, // Could track server start time
database: DatabaseHealth {
status: db_status,
latency_ms: db_latency,
media_count,
},
filesystem: FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
} else {
"degraded"
};
Json(DetailedHealthResponse {
status: overall_status.to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
uptime_seconds: 0, // Could track server start time
database: DatabaseHealth {
status: db_status,
latency_ms: db_latency,
media_count,
},
filesystem: FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
},
cache: CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
},
jobs: JobsHealth {
pending: job_stats.pending,
running: job_stats.running,
},
})
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
},
cache: CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
},
jobs: JobsHealth {
pending: job_stats.pending,
running: job_stats.running,
},
})
}

View file

@ -1,99 +1,98 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct OrphanResolveRequest {
pub action: String,
pub ids: Vec<uuid::Uuid>,
pub action: String,
pub ids: Vec<uuid::Uuid>,
}
pub async fn trigger_orphan_detection(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::OrphanDetection;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
let kind = pinakes_core::jobs::JobKind::OrphanDetection;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
pub async fn trigger_verify_integrity(
State(state): State<AppState>,
Json(req): Json<VerifyIntegrityRequest>,
State(state): State<AppState>,
Json(req): Json<VerifyIntegrityRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids = req
.media_ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
let media_ids = req
.media_ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
pub struct VerifyIntegrityRequest {
pub media_ids: Vec<uuid::Uuid>,
pub media_ids: Vec<uuid::Uuid>,
}
pub async fn trigger_cleanup_thumbnails(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::CleanupThumbnails;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
let kind = pinakes_core::jobs::JobKind::CleanupThumbnails;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize)]
pub struct GenerateThumbnailsRequest {
/// When true, only generate thumbnails for items that don't have one yet.
/// When false (default), regenerate all thumbnails.
#[serde(default)]
pub only_missing: bool,
/// When true, only generate thumbnails for items that don't have one yet.
/// When false (default), regenerate all thumbnails.
#[serde(default)]
pub only_missing: bool,
}
pub async fn generate_all_thumbnails(
State(state): State<AppState>,
body: Option<Json<GenerateThumbnailsRequest>>,
State(state): State<AppState>,
body: Option<Json<GenerateThumbnailsRequest>>,
) -> Result<Json<serde_json::Value>, ApiError> {
let only_missing = body.map(|b| b.only_missing).unwrap_or(false);
let media_ids = state
.storage
.list_media_ids_for_thumbnails(only_missing)
.await?;
let count = media_ids.len();
if count == 0 {
return Ok(Json(serde_json::json!({
"job_id": null,
"media_count": 0,
"message": "no media items to process"
})));
}
let kind = pinakes_core::jobs::JobKind::GenerateThumbnails { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({
"job_id": job_id.to_string(),
"media_count": count
})))
let only_missing = body.map(|b| b.only_missing).unwrap_or(false);
let media_ids = state
.storage
.list_media_ids_for_thumbnails(only_missing)
.await?;
let count = media_ids.len();
if count == 0 {
return Ok(Json(serde_json::json!({
"job_id": null,
"media_count": 0,
"message": "no media items to process"
})));
}
let kind = pinakes_core::jobs::JobKind::GenerateThumbnails { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({
"job_id": job_id.to_string(),
"media_count": count
})))
}
pub async fn resolve_orphans(
State(state): State<AppState>,
Json(req): Json<OrphanResolveRequest>,
State(state): State<AppState>,
Json(req): Json<OrphanResolveRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let action = match req.action.as_str() {
"delete" => pinakes_core::integrity::OrphanAction::Delete,
_ => pinakes_core::integrity::OrphanAction::Ignore,
};
let ids: Vec<pinakes_core::model::MediaId> = req
.ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let count = pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count })))
let action = match req.action.as_str() {
"delete" => pinakes_core::integrity::OrphanAction::Delete,
_ => pinakes_core::integrity::OrphanAction::Ignore,
};
let ids: Vec<pinakes_core::model::MediaId> = req
.ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let count =
pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count })))
}

View file

@ -1,34 +1,38 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::error::ApiError;
use crate::state::AppState;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::jobs::Job;
use crate::{error::ApiError, state::AppState};
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await)
Json(state.job_queue.list().await)
}
pub async fn get_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<Job>, ApiError> {
state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}")).into()
})
state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}"))
.into()
})
}
pub async fn cancel_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let cancelled = state.job_queue.cancel(id).await;
if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true })))
} else {
Err(pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}"
))
.into())
}
let cancelled = state.job_queue.cancel(id).await;
if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true })))
} else {
Err(
pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}"
))
.into(),
)
}
}

File diff suppressed because it is too large Load diff

View file

@ -7,15 +7,22 @@
//! - Link reindexing
use axum::{
Json, Router,
extract::{Path, Query, State},
routing::{get, post},
Json,
Router,
extract::{Path, Query, State},
routing::{get, post},
};
use pinakes_core::model::{
BacklinkInfo,
GraphData,
GraphEdge,
GraphNode,
MarkdownLink,
MediaId,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::model::{BacklinkInfo, GraphData, GraphEdge, GraphNode, MarkdownLink, MediaId};
use crate::{error::ApiError, state::AppState};
// ===== Response DTOs =====
@ -23,174 +30,174 @@ use crate::{error::ApiError, state::AppState};
/// Response for backlinks query
#[derive(Debug, Serialize)]
pub struct BacklinksResponse {
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
}
/// Individual backlink item
#[derive(Debug, Serialize)]
pub struct BacklinkItem {
pub link_id: Uuid,
pub source_id: Uuid,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: String,
pub link_id: Uuid,
pub source_id: Uuid,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: String,
}
impl From<BacklinkInfo> for BacklinkItem {
fn from(info: BacklinkInfo) -> Self {
Self {
link_id: info.link_id,
source_id: info.source_id.0,
source_title: info.source_title,
source_path: info.source_path,
link_text: info.link_text,
line_number: info.line_number,
context: info.context,
link_type: info.link_type.to_string(),
}
fn from(info: BacklinkInfo) -> Self {
Self {
link_id: info.link_id,
source_id: info.source_id.0,
source_title: info.source_title,
source_path: info.source_path,
link_text: info.link_text,
line_number: info.line_number,
context: info.context,
link_type: info.link_type.to_string(),
}
}
}
/// Response for outgoing links query
#[derive(Debug, Serialize)]
pub struct OutgoingLinksResponse {
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
}
/// Individual outgoing link item
#[derive(Debug, Serialize)]
pub struct OutgoingLinkItem {
pub id: Uuid,
pub target_path: String,
pub target_id: Option<Uuid>,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub link_type: String,
pub is_resolved: bool,
pub id: Uuid,
pub target_path: String,
pub target_id: Option<Uuid>,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub link_type: String,
pub is_resolved: bool,
}
impl From<MarkdownLink> for OutgoingLinkItem {
fn from(link: MarkdownLink) -> Self {
Self {
id: link.id,
target_path: link.target_path,
target_id: link.target_media_id.map(|id| id.0),
link_text: link.link_text,
line_number: link.line_number,
link_type: link.link_type.to_string(),
is_resolved: link.target_media_id.is_some(),
}
fn from(link: MarkdownLink) -> Self {
Self {
id: link.id,
target_path: link.target_path,
target_id: link.target_media_id.map(|id| id.0),
link_text: link.link_text,
line_number: link.line_number,
link_type: link.link_type.to_string(),
is_resolved: link.target_media_id.is_some(),
}
}
}
/// Response for graph visualization
#[derive(Debug, Serialize)]
pub struct GraphResponse {
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
pub node_count: usize,
pub edge_count: usize,
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
pub node_count: usize,
pub edge_count: usize,
}
/// Graph node for visualization
#[derive(Debug, Serialize)]
pub struct GraphNodeResponse {
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
pub link_count: u32,
pub backlink_count: u32,
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
pub link_count: u32,
pub backlink_count: u32,
}
impl From<GraphNode> for GraphNodeResponse {
fn from(node: GraphNode) -> Self {
Self {
id: node.id,
label: node.label,
title: node.title,
media_type: node.media_type,
link_count: node.link_count,
backlink_count: node.backlink_count,
}
fn from(node: GraphNode) -> Self {
Self {
id: node.id,
label: node.label,
title: node.title,
media_type: node.media_type,
link_count: node.link_count,
backlink_count: node.backlink_count,
}
}
}
/// Graph edge for visualization
#[derive(Debug, Serialize)]
pub struct GraphEdgeResponse {
pub source: String,
pub target: String,
pub link_type: String,
pub source: String,
pub target: String,
pub link_type: String,
}
impl From<GraphEdge> for GraphEdgeResponse {
fn from(edge: GraphEdge) -> Self {
Self {
source: edge.source,
target: edge.target,
link_type: edge.link_type.to_string(),
}
fn from(edge: GraphEdge) -> Self {
Self {
source: edge.source,
target: edge.target,
link_type: edge.link_type.to_string(),
}
}
}
impl From<GraphData> for GraphResponse {
fn from(data: GraphData) -> Self {
let node_count = data.nodes.len();
let edge_count = data.edges.len();
Self {
nodes: data
.nodes
.into_iter()
.map(GraphNodeResponse::from)
.collect(),
edges: data
.edges
.into_iter()
.map(GraphEdgeResponse::from)
.collect(),
node_count,
edge_count,
}
fn from(data: GraphData) -> Self {
let node_count = data.nodes.len();
let edge_count = data.edges.len();
Self {
nodes: data
.nodes
.into_iter()
.map(GraphNodeResponse::from)
.collect(),
edges: data
.edges
.into_iter()
.map(GraphEdgeResponse::from)
.collect(),
node_count,
edge_count,
}
}
}
/// Query parameters for graph endpoint
#[derive(Debug, Deserialize)]
pub struct GraphQuery {
/// Center node ID (optional, if not provided returns entire graph)
pub center: Option<Uuid>,
/// Depth of traversal from center (default: 2, max: 5)
#[serde(default = "default_depth")]
pub depth: u32,
/// Center node ID (optional, if not provided returns entire graph)
pub center: Option<Uuid>,
/// Depth of traversal from center (default: 2, max: 5)
#[serde(default = "default_depth")]
pub depth: u32,
}
fn default_depth() -> u32 {
2
2
}
/// Response for reindex operation
#[derive(Debug, Serialize)]
pub struct ReindexResponse {
pub message: String,
pub links_extracted: usize,
pub message: String,
pub links_extracted: usize,
}
/// Response for link resolution
#[derive(Debug, Serialize)]
pub struct ResolveLinksResponse {
pub resolved_count: u64,
pub resolved_count: u64,
}
/// Response for unresolved links count
#[derive(Debug, Serialize)]
pub struct UnresolvedLinksResponse {
pub count: u64,
pub count: u64,
}
// ===== Handlers =====
@ -199,129 +206,131 @@ pub struct UnresolvedLinksResponse {
///
/// GET /api/v1/media/{id}/backlinks
pub async fn get_backlinks(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<BacklinksResponse>, ApiError> {
let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?;
let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?;
let items: Vec<BacklinkItem> = backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len();
let items: Vec<BacklinkItem> =
backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len();
Ok(Json(BacklinksResponse {
backlinks: items,
count,
}))
Ok(Json(BacklinksResponse {
backlinks: items,
count,
}))
}
/// Get outgoing links from a media item.
///
/// GET /api/v1/media/{id}/outgoing-links
pub async fn get_outgoing_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<OutgoingLinksResponse>, ApiError> {
let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?;
let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?;
let items: Vec<OutgoingLinkItem> = links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len();
let items: Vec<OutgoingLinkItem> =
links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len();
Ok(Json(OutgoingLinksResponse {
links: items,
count,
}))
Ok(Json(OutgoingLinksResponse {
links: items,
count,
}))
}
/// Get graph data for visualization.
///
/// GET /api/v1/notes/graph?center={uuid}&depth={n}
pub async fn get_graph(
State(state): State<AppState>,
Query(params): Query<GraphQuery>,
State(state): State<AppState>,
Query(params): Query<GraphQuery>,
) -> Result<Json<GraphResponse>, ApiError> {
let center_id = params.center.map(MediaId);
let depth = params.depth.min(5); // Enforce max depth
let center_id = params.center.map(MediaId);
let depth = params.depth.min(5); // Enforce max depth
let graph_data = state.storage.get_graph_data(center_id, depth).await?;
let graph_data = state.storage.get_graph_data(center_id, depth).await?;
Ok(Json(GraphResponse::from(graph_data)))
Ok(Json(GraphResponse::from(graph_data)))
}
/// Re-extract links from a media item.
///
/// POST /api/v1/media/{id}/reindex-links
pub async fn reindex_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<ReindexResponse>, ApiError> {
let media_id = MediaId(id);
let media_id = MediaId(id);
// Get the media item to read its content
let media = state.storage.get_media(media_id).await?;
// Get the media item to read its content
let media = state.storage.get_media(media_id).await?;
// Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {}
_ => {
return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(),
links_extracted: 0,
}));
}
}
// Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {},
_ => {
return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(),
links_extracted: 0,
}));
},
}
// Read the file content
let content = tokio::fs::read_to_string(&media.path)
.await
.map_err(|e| ApiError::internal(format!("Failed to read file: {}", e)))?;
// Read the file content
let content = tokio::fs::read_to_string(&media.path)
.await
.map_err(|e| ApiError::internal(format!("Failed to read file: {}", e)))?;
// Extract links
let links = pinakes_core::links::extract_links(media_id, &content);
let links_count = links.len();
// Extract links
let links = pinakes_core::links::extract_links(media_id, &content);
let links_count = links.len();
// Save links to database
state.storage.save_markdown_links(media_id, &links).await?;
// Save links to database
state.storage.save_markdown_links(media_id, &links).await?;
// Mark as extracted
state.storage.mark_links_extracted(media_id).await?;
// Mark as extracted
state.storage.mark_links_extracted(media_id).await?;
// Try to resolve any unresolved links
state.storage.resolve_links().await?;
// Try to resolve any unresolved links
state.storage.resolve_links().await?;
Ok(Json(ReindexResponse {
message: "Links extracted successfully".to_string(),
links_extracted: links_count,
}))
Ok(Json(ReindexResponse {
message: "Links extracted successfully".to_string(),
links_extracted: links_count,
}))
}
/// Resolve all unresolved links in the database.
///
/// POST /api/v1/notes/resolve-links
pub async fn resolve_links(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<ResolveLinksResponse>, ApiError> {
let resolved_count = state.storage.resolve_links().await?;
let resolved_count = state.storage.resolve_links().await?;
Ok(Json(ResolveLinksResponse { resolved_count }))
Ok(Json(ResolveLinksResponse { resolved_count }))
}
/// Get count of unresolved links.
///
/// GET /api/v1/notes/unresolved-count
pub async fn get_unresolved_count(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<UnresolvedLinksResponse>, ApiError> {
let count = state.storage.count_unresolved_links().await?;
let count = state.storage.count_unresolved_links().await?;
Ok(Json(UnresolvedLinksResponse { count }))
Ok(Json(UnresolvedLinksResponse { count }))
}
/// Create the routes for notes/links functionality.
pub fn routes() -> Router<AppState> {
Router::new()
.route("/graph", get(get_graph))
.route("/resolve-links", post(resolve_links))
.route("/unresolved-count", get(get_unresolved_count))
Router::new()
.route("/graph", get(get_graph))
.route("/resolve-links", post(resolve_links))
.route("/unresolved-count", get(get_unresolved_count))
}

View file

@ -1,12 +1,14 @@
use std::collections::HashMap;
use axum::{
Json, Router,
extract::{Query, State},
response::IntoResponse,
routing::get,
Json,
Router,
extract::{Query, State},
response::IntoResponse,
routing::get,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::{dto::MediaResponse, error::ApiError, state::AppState};
@ -14,182 +16,186 @@ use crate::{dto::MediaResponse, error::ApiError, state::AppState};
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum GroupBy {
#[default]
Day,
Month,
Year,
#[default]
Day,
Month,
Year,
}
/// Timeline query parameters
#[derive(Debug, Deserialize)]
pub struct TimelineQuery {
#[serde(default)]
pub group_by: GroupBy,
pub year: Option<i32>,
pub month: Option<u32>,
#[serde(default = "default_timeline_limit")]
pub limit: u64,
#[serde(default)]
pub group_by: GroupBy,
pub year: Option<i32>,
pub month: Option<u32>,
#[serde(default = "default_timeline_limit")]
pub limit: u64,
}
fn default_timeline_limit() -> u64 {
10000
10000
}
/// Timeline group response
#[derive(Debug, Serialize)]
pub struct TimelineGroup {
pub date: String,
pub count: usize,
pub cover_id: Option<String>,
pub items: Vec<MediaResponse>,
pub date: String,
pub count: usize,
pub cover_id: Option<String>,
pub items: Vec<MediaResponse>,
}
/// Map query parameters
#[derive(Debug, Deserialize)]
pub struct MapQuery {
pub lat1: f64,
pub lon1: f64,
pub lat2: f64,
pub lon2: f64,
pub lat1: f64,
pub lon1: f64,
pub lat2: f64,
pub lon2: f64,
}
/// Map marker response
#[derive(Debug, Serialize)]
pub struct MapMarker {
pub id: String,
pub latitude: f64,
pub longitude: f64,
pub thumbnail_url: Option<String>,
pub date_taken: Option<DateTime<Utc>>,
pub id: String,
pub latitude: f64,
pub longitude: f64,
pub thumbnail_url: Option<String>,
pub date_taken: Option<DateTime<Utc>>,
}
/// Get timeline of photos grouped by date
pub async fn get_timeline(
State(state): State<AppState>,
Query(query): Query<TimelineQuery>,
State(state): State<AppState>,
Query(query): Query<TimelineQuery>,
) -> Result<impl IntoResponse, ApiError> {
// Query photos with date_taken (limit is configurable, defaults to 10000)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: query.limit.min(50000), // Cap at 50000 for safety
sort: Some("date_taken DESC".to_string()),
})
.await?;
// Query photos with date_taken (limit is configurable, defaults to 10000)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: query.limit.min(50000), // Cap at 50000 for safety
sort: Some("date_taken DESC".to_string()),
})
.await?;
// Filter to only photos with date_taken
let photos: Vec<_> = all_media
.into_iter()
.filter(|item| {
item.date_taken.is_some()
&& item.media_type.category() == pinakes_core::media_type::MediaCategory::Image
})
.collect();
// Filter to only photos with date_taken
let photos: Vec<_> = all_media
.into_iter()
.filter(|item| {
item.date_taken.is_some()
&& item.media_type.category()
== pinakes_core::media_type::MediaCategory::Image
})
.collect();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> = HashMap::new();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> =
HashMap::new();
for photo in photos {
if let Some(date_taken) = photo.date_taken {
use chrono::Datelike;
for photo in photos {
if let Some(date_taken) = photo.date_taken {
use chrono::Datelike;
// Filter by year/month if specified
if let Some(y) = query.year
&& date_taken.year() != y
{
continue;
}
if let Some(m) = query.month
&& date_taken.month() != m
{
continue;
}
// Filter by year/month if specified
if let Some(y) = query.year
&& date_taken.year() != y
{
continue;
}
if let Some(m) = query.month
&& date_taken.month() != m
{
continue;
}
let key = match query.group_by {
GroupBy::Day => date_taken.format("%Y-%m-%d").to_string(),
GroupBy::Month => date_taken.format("%Y-%m").to_string(),
GroupBy::Year => date_taken.format("%Y").to_string(),
};
let key = match query.group_by {
GroupBy::Day => date_taken.format("%Y-%m-%d").to_string(),
GroupBy::Month => date_taken.format("%Y-%m").to_string(),
GroupBy::Year => date_taken.format("%Y").to_string(),
};
groups.entry(key).or_default().push(photo);
}
groups.entry(key).or_default().push(photo);
}
}
// Convert to response format
let mut timeline: Vec<TimelineGroup> = groups
.into_iter()
.map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len();
let items: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
// Convert to response format
let mut timeline: Vec<TimelineGroup> = groups
.into_iter()
.map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len();
let items: Vec<MediaResponse> =
items.into_iter().map(MediaResponse::from).collect();
TimelineGroup {
date,
count,
cover_id,
items,
}
})
.collect();
TimelineGroup {
date,
count,
cover_id,
items,
}
})
.collect();
// Sort by date descending
timeline.sort_by(|a, b| b.date.cmp(&a.date));
// Sort by date descending
timeline.sort_by(|a, b| b.date.cmp(&a.date));
Ok(Json(timeline))
Ok(Json(timeline))
}
/// Get photos in a bounding box for map view
pub async fn get_map_photos(
State(state): State<AppState>,
Query(query): Query<MapQuery>,
State(state): State<AppState>,
Query(query): Query<MapQuery>,
) -> Result<impl IntoResponse, ApiError> {
// Validate bounding box
let min_lat = query.lat1.min(query.lat2);
let max_lat = query.lat1.max(query.lat2);
let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2);
// Validate bounding box
let min_lat = query.lat1.min(query.lat2);
let max_lat = query.lat1.max(query.lat2);
let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2);
// Query all media (we'll filter in-memory for now - could optimize with DB query)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: 10000,
sort: None,
})
.await?;
// Query all media (we'll filter in-memory for now - could optimize with DB
// query)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: 10000,
sort: None,
})
.await?;
// Filter to photos with GPS coordinates in the bounding box
let markers: Vec<MapMarker> = all_media
.into_iter()
.filter_map(|item| {
if let (Some(lat), Some(lon)) = (item.latitude, item.longitude)
&& lat >= min_lat
&& lat <= max_lat
&& lon >= min_lon
&& lon <= max_lon
{
return Some(MapMarker {
id: item.id.0.to_string(),
latitude: lat,
longitude: lon,
thumbnail_url: item
.thumbnail_path
.map(|_p| format!("/api/v1/media/{}/thumbnail", item.id.0)),
date_taken: item.date_taken,
});
}
None
})
.collect();
// Filter to photos with GPS coordinates in the bounding box
let markers: Vec<MapMarker> = all_media
.into_iter()
.filter_map(|item| {
if let (Some(lat), Some(lon)) = (item.latitude, item.longitude)
&& lat >= min_lat
&& lat <= max_lat
&& lon >= min_lon
&& lon <= max_lon
{
return Some(MapMarker {
id: item.id.0.to_string(),
latitude: lat,
longitude: lon,
thumbnail_url: item
.thumbnail_path
.map(|_p| format!("/api/v1/media/{}/thumbnail", item.id.0)),
date_taken: item.date_taken,
});
}
None
})
.collect();
Ok(Json(markers))
Ok(Json(markers))
}
/// Photo routes
pub fn routes() -> Router<AppState> {
Router::new()
.route("/timeline", get(get_timeline))
.route("/map", get(get_map_photos))
Router::new()
.route("/timeline", get(get_timeline))
.route("/map", get(get_map_photos))
}

View file

@ -1,15 +1,11 @@
use axum::Json;
use axum::extract::{Extension, Path, State};
use axum::{
Json,
extract::{Extension, Path, State},
};
use pinakes_core::{model::MediaId, playlists::Playlist, users::UserId};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::playlists::Playlist;
use pinakes_core::users::UserId;
use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
/// Check whether a user has access to a playlist.
///
@ -17,193 +13,194 @@ use pinakes_core::users::UserId;
/// mutations such as update, delete, add/remove/reorder items). When `false`
/// the playlist must either be public or owned by the requesting user.
async fn check_playlist_access(
storage: &pinakes_core::storage::DynStorageBackend,
playlist_id: Uuid,
user_id: UserId,
require_write: bool,
storage: &pinakes_core::storage::DynStorageBackend,
playlist_id: Uuid,
user_id: UserId,
require_write: bool,
) -> Result<Playlist, ApiError> {
let playlist = storage.get_playlist(playlist_id).await.map_err(ApiError)?;
if require_write {
// Write operations require ownership
if playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"only the playlist owner can modify this playlist".into(),
)));
}
} else {
// Read operations: allow if public or owner
if !playlist.is_public && playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"playlist is private".into(),
)));
}
let playlist = storage.get_playlist(playlist_id).await.map_err(ApiError)?;
if require_write {
// Write operations require ownership
if playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"only the playlist owner can modify this playlist".into(),
)));
}
Ok(playlist)
} else {
// Read operations: allow if public or owner
if !playlist.is_public && playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"playlist is private".into(),
)));
}
}
Ok(playlist)
}
pub async fn create_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreatePlaylistRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if req.name.is_empty() || req.name.chars().count() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let owner_id = resolve_user_id(&state.storage, &username).await?;
let playlist = state
.storage
.create_playlist(
owner_id,
&req.name,
req.description.as_deref(),
req.is_public.unwrap_or(false),
req.is_smart.unwrap_or(false),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
if req.name.is_empty() || req.name.chars().count() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let owner_id = resolve_user_id(&state.storage, &username).await?;
let playlist = state
.storage
.create_playlist(
owner_id,
&req.name,
req.description.as_deref(),
req.is_public.unwrap_or(false),
req.is_smart.unwrap_or(false),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn list_playlists(
State(state): State<AppState>,
Extension(username): Extension<String>,
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<PlaylistResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
// Fetch all playlists and filter to only public ones plus the user's own
let playlists = state.storage.list_playlists(None).await?;
let visible: Vec<PlaylistResponse> = playlists
.into_iter()
.filter(|p| p.is_public || p.owner_id == user_id)
.map(PlaylistResponse::from)
.collect();
Ok(Json(visible))
let user_id = resolve_user_id(&state.storage, &username).await?;
// Fetch all playlists and filter to only public ones plus the user's own
let playlists = state.storage.list_playlists(None).await?;
let visible: Vec<PlaylistResponse> = playlists
.into_iter()
.filter(|p| p.is_public || p.owner_id == user_id)
.map(PlaylistResponse::from)
.collect();
Ok(Json(visible))
}
pub async fn get_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<PlaylistResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist = check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist)))
let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist =
check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn update_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdatePlaylistRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if let Some(ref name) = req.name
&& (name.is_empty() || name.chars().count() > 255)
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let playlist = state
.storage
.update_playlist(
id,
req.name.as_deref(),
req.description.as_deref(),
req.is_public,
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
if let Some(ref name) = req.name
&& (name.is_empty() || name.chars().count() > 255)
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let playlist = state
.storage
.update_playlist(
id,
req.name.as_deref(),
req.description.as_deref(),
req.is_public,
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
pub async fn delete_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state.storage.delete_playlist(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state.storage.delete_playlist(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn add_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<PlaylistItemRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<PlaylistItemRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let position = match req.position {
Some(p) => p,
None => {
let items = state.storage.get_playlist_items(id).await?;
items.len() as i32
}
};
state
.storage
.add_to_playlist(id, MediaId(req.media_id), position)
.await?;
Ok(Json(serde_json::json!({"added": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let position = match req.position {
Some(p) => p,
None => {
let items = state.storage.get_playlist_items(id).await?;
items.len() as i32
},
};
state
.storage
.add_to_playlist(id, MediaId(req.media_id), position)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path((id, media_id)): Path<(Uuid, Uuid)>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path((id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.remove_from_playlist(id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.remove_from_playlist(id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn list_items(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
let items = state.storage.get_playlist_items(id).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
let items = state.storage.get_playlist_items(id).await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
pub async fn reorder_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<ReorderPlaylistRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<ReorderPlaylistRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.reorder_playlist(id, MediaId(req.media_id), req.new_position)
.await?;
Ok(Json(serde_json::json!({"reordered": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.reorder_playlist(id, MediaId(req.media_id), req.new_position)
.await?;
Ok(Json(serde_json::json!({"reordered": true})))
}
pub async fn shuffle_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
use rand::seq::SliceRandom;
let mut items = state.storage.get_playlist_items(id).await?;
items.shuffle(&mut rand::rng());
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
use rand::seq::SliceRandom;
let mut items = state.storage.get_playlist_items(id).await?;
items.shuffle(&mut rand::rng());
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}

View file

@ -1,149 +1,151 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
/// List all installed plugins
pub async fn list_plugins(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<PluginResponse>>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugins = plugin_manager.list_plugins().await;
let mut responses = Vec::with_capacity(plugins.len());
for meta in plugins {
let enabled = plugin_manager.is_plugin_enabled(&meta.id).await;
responses.push(PluginResponse::new(meta, enabled));
}
Ok(Json(responses))
let plugins = plugin_manager.list_plugins().await;
let mut responses = Vec::with_capacity(plugins.len());
for meta in plugins {
let enabled = plugin_manager.is_plugin_enabled(&meta.id).await;
responses.push(PluginResponse::new(meta, enabled));
}
Ok(Json(responses))
}
/// Get a specific plugin by ID
pub async fn get_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin = plugin_manager.get_plugin(&id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"Plugin not found: {}",
id
)))
})?;
let plugin = plugin_manager.get_plugin(&id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"Plugin not found: {}",
id
)))
})?;
let enabled = plugin_manager.is_plugin_enabled(&id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
let enabled = plugin_manager.is_plugin_enabled(&id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Install a plugin from URL or file path
pub async fn install_plugin(
State(state): State<AppState>,
Json(req): Json<InstallPluginRequest>,
State(state): State<AppState>,
Json(req): Json<InstallPluginRequest>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_id =
plugin_manager
.install_plugin(&req.source)
.await
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
format!("Failed to install plugin: {}", e),
))
})?;
let plugin =
plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(),
))
})?;
let plugin_id = plugin_manager
.install_plugin(&req.source)
.await
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to install plugin: {}", e),
))
})?;
let plugin = plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(),
))
})?;
let enabled = plugin_manager.is_plugin_enabled(&plugin_id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
let enabled = plugin_manager.is_plugin_enabled(&plugin_id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Uninstall a plugin
pub async fn uninstall_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
plugin_manager.uninstall_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to uninstall plugin: {}", e),
))
})?;
plugin_manager.uninstall_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to uninstall plugin: {}", e),
))
})?;
Ok(Json(serde_json::json!({"uninstalled": true})))
Ok(Json(serde_json::json!({"uninstalled": true})))
}
/// Enable or disable a plugin
pub async fn toggle_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<TogglePluginRequest>,
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<TogglePluginRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
if req.enabled {
plugin_manager.enable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to enable plugin: {}", e),
))
})?;
} else {
plugin_manager.disable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to disable plugin: {}", e),
))
})?;
}
if req.enabled {
plugin_manager.enable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to enable plugin: {}", e),
))
})?;
} else {
plugin_manager.disable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to disable plugin: {}", e),
))
})?;
}
Ok(Json(serde_json::json!({
"id": id,
"enabled": req.enabled
})))
Ok(Json(serde_json::json!({
"id": id,
"enabled": req.enabled
})))
}
/// Reload a plugin (for development)
pub async fn reload_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
let plugin_manager = state.plugin_manager.as_ref().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})?;
plugin_manager.reload_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to reload plugin: {}", e),
))
})?;
plugin_manager.reload_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to reload plugin: {}", e),
))
})?;
Ok(Json(serde_json::json!({"reloaded": true})))
Ok(Json(serde_json::json!({"reloaded": true})))
}

View file

@ -1,76 +1,79 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use serde::{Deserialize, Serialize};
use crate::error::ApiError;
use crate::state::AppState;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize)]
pub struct CreateSavedSearchRequest {
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct SavedSearchResponse {
pub id: String,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
pub id: String,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
}
pub async fn create_saved_search(
State(state): State<AppState>,
Json(req): Json<CreateSavedSearchRequest>,
State(state): State<AppState>,
Json(req): Json<CreateSavedSearchRequest>,
) -> Result<Json<SavedSearchResponse>, ApiError> {
let id = uuid::Uuid::now_v7();
state
.storage
.save_search(id, &req.name, &req.query, req.sort_order.as_deref())
.await
.map_err(ApiError)?;
let id = uuid::Uuid::now_v7();
state
.storage
.save_search(id, &req.name, &req.query, req.sort_order.as_deref())
.await
.map_err(ApiError)?;
Ok(Json(SavedSearchResponse {
id: id.to_string(),
name: req.name,
query: req.query,
sort_order: req.sort_order,
created_at: chrono::Utc::now(),
}))
Ok(Json(SavedSearchResponse {
id: id.to_string(),
name: req.name,
query: req.query,
sort_order: req.sort_order,
created_at: chrono::Utc::now(),
}))
}
pub async fn list_saved_searches(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<SavedSearchResponse>>, ApiError> {
let searches = state
.storage
.list_saved_searches()
.await
.map_err(ApiError)?;
Ok(Json(
searches
.into_iter()
.map(|s| SavedSearchResponse {
id: s.id.to_string(),
name: s.name,
query: s.query,
sort_order: s.sort_order,
created_at: s.created_at,
})
.collect(),
))
let searches = state
.storage
.list_saved_searches()
.await
.map_err(ApiError)?;
Ok(Json(
searches
.into_iter()
.map(|s| {
SavedSearchResponse {
id: s.id.to_string(),
name: s.name,
query: s.query,
sort_order: s.sort_order,
created_at: s.created_at,
}
})
.collect(),
))
}
pub async fn delete_saved_search(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.delete_saved_search(id)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "deleted": true })))
state
.storage
.delete_saved_search(id)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "deleted": true })))
}

View file

@ -1,30 +1,29 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::*, error::ApiError, state::AppState};
/// Trigger a scan as a background job. Returns the job ID immediately.
pub async fn trigger_scan(
State(state): State<AppState>,
Json(req): Json<ScanRequest>,
State(state): State<AppState>,
Json(req): Json<ScanRequest>,
) -> Result<Json<ScanJobResponse>, ApiError> {
let kind = pinakes_core::jobs::JobKind::Scan { path: req.path };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(ScanJobResponse {
job_id: job_id.to_string(),
}))
let kind = pinakes_core::jobs::JobKind::Scan { path: req.path };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(ScanJobResponse {
job_id: job_id.to_string(),
}))
}
pub async fn scan_status(State(state): State<AppState>) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len();
Json(ScanStatusResponse {
scanning: snapshot.scanning,
files_found: snapshot.files_found,
files_processed: snapshot.files_processed,
error_count,
errors: snapshot.errors,
})
pub async fn scan_status(
State(state): State<AppState>,
) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len();
Json(ScanStatusResponse {
scanning: snapshot.scanning,
files_found: snapshot.files_found,
files_processed: snapshot.files_processed,
error_count,
errors: snapshot.errors,
})
}

View file

@ -1,55 +1,65 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use crate::dto::ScheduledTaskResponse;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::ScheduledTaskResponse, error::ApiError, state::AppState};
pub async fn list_scheduled_tasks(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<ScheduledTaskResponse>>, ApiError> {
let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter()
.map(|t| ScheduledTaskResponse {
id: t.id,
name: t.name,
schedule: t.schedule.display_string(),
enabled: t.enabled,
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status,
})
.collect();
Ok(Json(responses))
let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter()
.map(|t| {
ScheduledTaskResponse {
id: t.id,
name: t.name,
schedule: t.schedule.display_string(),
enabled: t.enabled,
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status,
}
})
.collect();
Ok(Json(responses))
}
pub async fn toggle_scheduled_task(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.toggle_task(&id).await {
Some(enabled) => Ok(Json(serde_json::json!({
"id": id,
"enabled": enabled,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
}
match state.scheduler.toggle_task(&id).await {
Some(enabled) => {
Ok(Json(serde_json::json!({
"id": id,
"enabled": enabled,
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
)))
},
}
}
pub async fn run_scheduled_task_now(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.run_now(&id).await {
Some(job_id) => Ok(Json(serde_json::json!({
"id": id,
"job_id": job_id,
}))),
None => Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
))),
}
match state.scheduler.run_now(&id).await {
Some(job_id) => {
Ok(Json(serde_json::json!({
"id": id,
"job_id": job_id,
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
)))
},
}
}

View file

@ -1,87 +1,88 @@
use axum::Json;
use axum::extract::{Query, State};
use axum::{
Json,
extract::{Query, State},
};
use pinakes_core::{
model::Pagination,
search::{SearchRequest, SortOrder, parse_search_query},
};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::Pagination;
use pinakes_core::search::{SearchRequest, SortOrder, parse_search_query};
use crate::{dto::*, error::ApiError, state::AppState};
fn resolve_sort(sort: Option<&str>) -> SortOrder {
match sort {
Some("date_asc") => SortOrder::DateAsc,
Some("date_desc") => SortOrder::DateDesc,
Some("name_asc") => SortOrder::NameAsc,
Some("name_desc") => SortOrder::NameDesc,
Some("size_asc") => SortOrder::SizeAsc,
Some("size_desc") => SortOrder::SizeDesc,
_ => SortOrder::Relevance,
}
match sort {
Some("date_asc") => SortOrder::DateAsc,
Some("date_desc") => SortOrder::DateDesc,
Some("name_asc") => SortOrder::NameAsc,
Some("name_desc") => SortOrder::NameDesc,
Some("size_asc") => SortOrder::SizeAsc,
Some("size_desc") => SortOrder::SizeDesc,
_ => SortOrder::Relevance,
}
}
pub async fn search(
State(state): State<AppState>,
Query(params): Query<SearchParams>,
State(state): State<AppState>,
Query(params): Query<SearchParams>,
) -> Result<Json<SearchResponse>, ApiError> {
if params.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
if params.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&params.q)?;
let sort = resolve_sort(params.sort.as_deref());
let query = parse_search_query(&params.q)?;
let sort = resolve_sort(params.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
),
};
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
params.offset.unwrap_or(0),
params.limit.unwrap_or(50).min(1000),
None,
),
};
let results = state.storage.search(&request).await?;
let results = state.storage.search(&request).await?;
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
}
pub async fn search_post(
State(state): State<AppState>,
Json(body): Json<SearchRequestBody>,
State(state): State<AppState>,
Json(body): Json<SearchRequestBody>,
) -> Result<Json<SearchResponse>, ApiError> {
if body.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
if body.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&body.q)?;
let sort = resolve_sort(body.sort.as_deref());
let query = parse_search_query(&body.q)?;
let sort = resolve_sort(body.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
body.offset.unwrap_or(0),
body.limit.unwrap_or(50).min(1000),
None,
),
};
let request = SearchRequest {
query,
sort,
pagination: Pagination::new(
body.offset.unwrap_or(0),
body.limit.unwrap_or(50).min(1000),
None,
),
};
let results = state.storage.search(&request).await?;
let results = state.storage.search(&request).await?;
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
Ok(Json(SearchResponse {
items: results.items.into_iter().map(MediaResponse::from).collect(),
total_count: results.total_count,
}))
}

View file

@ -1,543 +1,576 @@
use std::net::SocketAddr;
use axum::{
Json,
extract::{ConnectInfo, Extension, Path, Query, State},
http::StatusCode,
Json,
extract::{ConnectInfo, Extension, Path, Query, State},
http::StatusCode,
};
use chrono::Utc;
use std::net::SocketAddr;
use pinakes_core::{
model::{MediaId, Pagination},
sharing::{
Share,
ShareActivity,
ShareActivityAction,
ShareId,
ShareNotification,
ShareNotificationType,
SharePermissions,
ShareRecipient,
ShareTarget,
generate_share_token,
hash_share_password,
verify_share_password,
},
users::UserId,
};
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::{
AccessSharedRequest, BatchDeleteSharesRequest, CreateShareRequest, MediaResponse,
PaginationParams, ShareActivityResponse, ShareNotificationResponse, ShareResponse,
use crate::{
auth::resolve_user_id,
dto::{
AccessSharedRequest,
BatchDeleteSharesRequest,
CreateShareRequest,
MediaResponse,
PaginationParams,
ShareActivityResponse,
ShareNotificationResponse,
ShareResponse,
UpdateShareRequest,
},
error::{ApiError, ApiResult},
state::AppState,
};
use crate::error::{ApiError, ApiResult};
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::model::Pagination;
use pinakes_core::sharing::{
Share, ShareActivity, ShareActivityAction, ShareId, ShareNotification, ShareNotificationType,
SharePermissions, ShareRecipient, ShareTarget, generate_share_token, hash_share_password,
verify_share_password,
};
use pinakes_core::users::UserId;
/// Create a new share
/// POST /api/shares
pub async fn create_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareRequest>,
) -> ApiResult<Json<ShareResponse>> {
let config = state.config.read().await;
if !config.sharing.enabled {
return Err(ApiError::bad_request("Sharing is not enabled"));
let config = state.config.read().await;
if !config.sharing.enabled {
return Err(ApiError::bad_request("Sharing is not enabled"));
}
// Validate public links are allowed
if req.recipient_type == "public_link" && !config.sharing.allow_public_links {
return Err(ApiError::bad_request("Public links are not allowed"));
}
drop(config);
let owner_id = resolve_user_id(&state.storage, &username).await?;
// Parse target
let target_id: Uuid = req
.target_id
.parse()
.map_err(|_| ApiError::bad_request("Invalid target_id"))?;
let target = match req.target_type.as_str() {
"media" => {
ShareTarget::Media {
media_id: MediaId(target_id),
}
},
"collection" => {
ShareTarget::Collection {
collection_id: target_id,
}
},
"tag" => ShareTarget::Tag { tag_id: target_id },
"saved_search" => {
ShareTarget::SavedSearch {
search_id: target_id,
}
},
_ => return Err(ApiError::bad_request("Invalid target_type")),
};
// Parse recipient
let recipient = match req.recipient_type.as_str() {
"public_link" => {
let token = generate_share_token();
let password_hash = req.password.as_ref().map(|p| hash_share_password(p));
ShareRecipient::PublicLink {
token,
password_hash,
}
},
"user" => {
let recipient_user_id = req.recipient_user_id.ok_or_else(|| {
ApiError::bad_request("recipient_user_id required for user share")
})?;
ShareRecipient::User {
user_id: UserId(recipient_user_id),
}
},
"group" => {
let group_id = req.recipient_group_id.ok_or_else(|| {
ApiError::bad_request("recipient_group_id required for group share")
})?;
ShareRecipient::Group { group_id }
},
_ => return Err(ApiError::bad_request("Invalid recipient_type")),
};
// Parse permissions
let permissions = if let Some(perms) = req.permissions {
SharePermissions {
can_view: perms.can_view.unwrap_or(true),
can_download: perms.can_download.unwrap_or(false),
can_edit: perms.can_edit.unwrap_or(false),
can_delete: perms.can_delete.unwrap_or(false),
can_reshare: perms.can_reshare.unwrap_or(false),
can_add: perms.can_add.unwrap_or(false),
}
} else {
SharePermissions::view_only()
};
// Validate public links are allowed
if req.recipient_type == "public_link" && !config.sharing.allow_public_links {
return Err(ApiError::bad_request("Public links are not allowed"));
}
drop(config);
// Calculate expiration
let expires_at = req
.expires_in_hours
.map(|hours| Utc::now() + chrono::Duration::hours(hours as i64));
let owner_id = resolve_user_id(&state.storage, &username).await?;
let share = Share {
id: ShareId(Uuid::now_v7()),
target,
owner_id,
recipient,
permissions,
note: req.note,
expires_at,
access_count: 0,
last_accessed: None,
inherit_to_children: req.inherit_to_children.unwrap_or(true),
parent_share_id: None,
created_at: Utc::now(),
updated_at: Utc::now(),
};
// Parse target
let target_id: Uuid = req
.target_id
.parse()
.map_err(|_| ApiError::bad_request("Invalid target_id"))?;
let created = state.storage.create_share(&share).await.map_err(|e| {
ApiError::internal(format!("Failed to create share: {}", e))
})?;
let target = match req.target_type.as_str() {
"media" => ShareTarget::Media {
media_id: MediaId(target_id),
},
"collection" => ShareTarget::Collection {
collection_id: target_id,
},
"tag" => ShareTarget::Tag { tag_id: target_id },
"saved_search" => ShareTarget::SavedSearch {
search_id: target_id,
},
_ => return Err(ApiError::bad_request("Invalid target_type")),
// Send notification to recipient if it's a user share
if let ShareRecipient::User { user_id } = &created.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: created.id,
notification_type: ShareNotificationType::NewShare,
is_read: false,
created_at: Utc::now(),
};
// Parse recipient
let recipient = match req.recipient_type.as_str() {
"public_link" => {
let token = generate_share_token();
let password_hash = req.password.as_ref().map(|p| hash_share_password(p));
ShareRecipient::PublicLink {
token,
password_hash,
}
}
"user" => {
let recipient_user_id = req.recipient_user_id.ok_or_else(|| {
ApiError::bad_request("recipient_user_id required for user share")
})?;
ShareRecipient::User {
user_id: UserId(recipient_user_id),
}
}
"group" => {
let group_id = req.recipient_group_id.ok_or_else(|| {
ApiError::bad_request("recipient_group_id required for group share")
})?;
ShareRecipient::Group { group_id }
}
_ => return Err(ApiError::bad_request("Invalid recipient_type")),
};
// Ignore notification errors
let _ = state.storage.create_share_notification(&notification).await;
}
// Parse permissions
let permissions = if let Some(perms) = req.permissions {
SharePermissions {
can_view: perms.can_view.unwrap_or(true),
can_download: perms.can_download.unwrap_or(false),
can_edit: perms.can_edit.unwrap_or(false),
can_delete: perms.can_delete.unwrap_or(false),
can_reshare: perms.can_reshare.unwrap_or(false),
can_add: perms.can_add.unwrap_or(false),
}
} else {
SharePermissions::view_only()
};
// Calculate expiration
let expires_at = req
.expires_in_hours
.map(|hours| Utc::now() + chrono::Duration::hours(hours as i64));
let share = Share {
id: ShareId(Uuid::now_v7()),
target,
owner_id,
recipient,
permissions,
note: req.note,
expires_at,
access_count: 0,
last_accessed: None,
inherit_to_children: req.inherit_to_children.unwrap_or(true),
parent_share_id: None,
created_at: Utc::now(),
updated_at: Utc::now(),
};
let created = state
.storage
.create_share(&share)
.await
.map_err(|e| ApiError::internal(format!("Failed to create share: {}", e)))?;
// Send notification to recipient if it's a user share
if let ShareRecipient::User { user_id } = &created.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: created.id,
notification_type: ShareNotificationType::NewShare,
is_read: false,
created_at: Utc::now(),
};
// Ignore notification errors
let _ = state.storage.create_share_notification(&notification).await;
}
Ok(Json(created.into()))
Ok(Json(created.into()))
}
/// List outgoing shares (shares I created)
/// GET /api/shares/outgoing
pub async fn list_outgoing(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let shares = state
.storage
.list_shares_by_owner(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {}", e)))?;
let shares = state
.storage
.list_shares_by_owner(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {}", e)))?;
Ok(Json(shares.into_iter().map(Into::into).collect()))
Ok(Json(shares.into_iter().map(Into::into).collect()))
}
/// List incoming shares (shares shared with me)
/// GET /api/shares/incoming
pub async fn list_incoming(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let shares = state
.storage
.list_shares_for_user(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {}", e)))?;
let shares = state
.storage
.list_shares_for_user(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {}", e)))?;
Ok(Json(shares.into_iter().map(Into::into).collect()))
Ok(Json(shares.into_iter().map(Into::into).collect()))
}
/// Get share details
/// GET /api/shares/{id}
pub async fn get_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<Json<ShareResponse>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Check authorization
let is_owner = share.owner_id == user_id;
let is_recipient = match &share.recipient {
ShareRecipient::User {
user_id: recipient_id,
} => *recipient_id == user_id,
_ => false,
};
// Check authorization
let is_owner = share.owner_id == user_id;
let is_recipient = match &share.recipient {
ShareRecipient::User {
user_id: recipient_id,
} => *recipient_id == user_id,
_ => false,
};
if !is_owner && !is_recipient {
return Err(ApiError::forbidden("Not authorized to view this share"));
}
if !is_owner && !is_recipient {
return Err(ApiError::forbidden("Not authorized to view this share"));
}
Ok(Json(share.into()))
Ok(Json(share.into()))
}
/// Update a share
/// PATCH /api/shares/{id}
pub async fn update_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateShareRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateShareRequest>,
) -> ApiResult<Json<ShareResponse>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let mut share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let mut share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Only owner can update
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can update this share"));
}
// Only owner can update
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can update this share"));
}
// Update fields
if let Some(perms) = req.permissions {
share.permissions = SharePermissions {
can_view: perms.can_view.unwrap_or(share.permissions.can_view),
can_download: perms.can_download.unwrap_or(share.permissions.can_download),
can_edit: perms.can_edit.unwrap_or(share.permissions.can_edit),
can_delete: perms.can_delete.unwrap_or(share.permissions.can_delete),
can_reshare: perms.can_reshare.unwrap_or(share.permissions.can_reshare),
can_add: perms.can_add.unwrap_or(share.permissions.can_add),
};
}
// Update fields
if let Some(perms) = req.permissions {
share.permissions = SharePermissions {
can_view: perms.can_view.unwrap_or(share.permissions.can_view),
can_download: perms
.can_download
.unwrap_or(share.permissions.can_download),
can_edit: perms.can_edit.unwrap_or(share.permissions.can_edit),
can_delete: perms.can_delete.unwrap_or(share.permissions.can_delete),
can_reshare: perms.can_reshare.unwrap_or(share.permissions.can_reshare),
can_add: perms.can_add.unwrap_or(share.permissions.can_add),
};
}
if let Some(note) = req.note {
share.note = Some(note);
}
if let Some(note) = req.note {
share.note = Some(note);
}
if let Some(expires_at) = req.expires_at {
share.expires_at = Some(expires_at);
}
if let Some(expires_at) = req.expires_at {
share.expires_at = Some(expires_at);
}
if let Some(inherit) = req.inherit_to_children {
share.inherit_to_children = inherit;
}
if let Some(inherit) = req.inherit_to_children {
share.inherit_to_children = inherit;
}
share.updated_at = Utc::now();
share.updated_at = Utc::now();
let updated = state
.storage
.update_share(&share)
.await
.map_err(|e| ApiError::internal(format!("Failed to update share: {}", e)))?;
let updated = state.storage.update_share(&share).await.map_err(|e| {
ApiError::internal(format!("Failed to update share: {}", e))
})?;
// Notify recipient of update
if let ShareRecipient::User { user_id } = &updated.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: updated.id,
notification_type: ShareNotificationType::ShareUpdated,
is_read: false,
created_at: Utc::now(),
};
let _ = state.storage.create_share_notification(&notification).await;
}
// Notify recipient of update
if let ShareRecipient::User { user_id } = &updated.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: updated.id,
notification_type: ShareNotificationType::ShareUpdated,
is_read: false,
created_at: Utc::now(),
};
let _ = state.storage.create_share_notification(&notification).await;
}
Ok(Json(updated.into()))
Ok(Json(updated.into()))
}
/// Delete (revoke) a share
/// DELETE /api/shares/{id}
pub async fn delete_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Only owner can delete
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can revoke this share"));
}
// Only owner can delete
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can revoke this share"));
}
// Notify recipient before deletion
if let ShareRecipient::User { user_id } = &share.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: share.id,
notification_type: ShareNotificationType::ShareRevoked,
is_read: false,
created_at: Utc::now(),
};
let _ = state.storage.create_share_notification(&notification).await;
}
// Notify recipient before deletion
if let ShareRecipient::User { user_id } = &share.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: share.id,
notification_type: ShareNotificationType::ShareRevoked,
is_read: false,
created_at: Utc::now(),
};
let _ = state.storage.create_share_notification(&notification).await;
}
state
.storage
.delete_share(ShareId(id))
.await
.map_err(|e| ApiError::internal(format!("Failed to delete share: {}", e)))?;
state.storage.delete_share(ShareId(id)).await.map_err(|e| {
ApiError::internal(format!("Failed to delete share: {}", e))
})?;
Ok(StatusCode::NO_CONTENT)
Ok(StatusCode::NO_CONTENT)
}
/// Batch delete shares
/// POST /api/shares/batch/delete
pub async fn batch_delete(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<BatchDeleteSharesRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<BatchDeleteSharesRequest>,
) -> ApiResult<Json<serde_json::Value>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share_ids: Vec<ShareId> = req.share_ids.into_iter().map(ShareId).collect();
let user_id = resolve_user_id(&state.storage, &username).await?;
let share_ids: Vec<ShareId> =
req.share_ids.into_iter().map(ShareId).collect();
// Verify ownership of all shares
for share_id in &share_ids {
let share = state
.storage
.get_share(*share_id)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Verify ownership of all shares
for share_id in &share_ids {
let share = state
.storage
.get_share(*share_id)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
if share.owner_id != user_id {
return Err(ApiError::forbidden(format!(
"Not authorized to delete share {}",
share_id.0
)));
}
if share.owner_id != user_id {
return Err(ApiError::forbidden(format!(
"Not authorized to delete share {}",
share_id.0
)));
}
}
let deleted = state
.storage
.batch_delete_shares(&share_ids)
.await
.map_err(|e| ApiError::internal(format!("Failed to batch delete: {}", e)))?;
let deleted = state
.storage
.batch_delete_shares(&share_ids)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to batch delete: {}", e))
})?;
Ok(Json(serde_json::json!({ "deleted": deleted })))
Ok(Json(serde_json::json!({ "deleted": deleted })))
}
/// Access a public shared resource
/// GET /api/shared/{token}
pub async fn access_shared(
State(state): State<AppState>,
Path(token): Path<String>,
Query(params): Query<AccessSharedRequest>,
ConnectInfo(addr): ConnectInfo<SocketAddr>,
State(state): State<AppState>,
Path(token): Path<String>,
Query(params): Query<AccessSharedRequest>,
ConnectInfo(addr): ConnectInfo<SocketAddr>,
) -> ApiResult<Json<MediaResponse>> {
let share = state
.storage
.get_share_by_token(&token)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
let share = state
.storage
.get_share_by_token(&token)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Check expiration
if let Some(expires_at) = share.expires_at {
if Utc::now() > expires_at {
return Err(ApiError::not_found("Share has expired"));
}
// Check expiration
if let Some(expires_at) = share.expires_at {
if Utc::now() > expires_at {
return Err(ApiError::not_found("Share has expired"));
}
}
// Check password if required
if let ShareRecipient::PublicLink { password_hash, .. } = &share.recipient {
if let Some(hash) = password_hash {
let provided_password = params
.password
.as_ref()
.ok_or_else(|| ApiError::unauthorized("Password required"))?;
// Check password if required
if let ShareRecipient::PublicLink { password_hash, .. } = &share.recipient {
if let Some(hash) = password_hash {
let provided_password = params
.password
.as_ref()
.ok_or_else(|| ApiError::unauthorized("Password required"))?;
if !verify_share_password(provided_password, hash) {
// Log failed attempt
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::PasswordFailed,
details: None,
timestamp: Utc::now(),
};
let _ = state.storage.record_share_activity(&activity).await;
if !verify_share_password(provided_password, hash) {
// Log failed attempt
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::PasswordFailed,
details: None,
timestamp: Utc::now(),
};
let _ = state.storage.record_share_activity(&activity).await;
return Err(ApiError::unauthorized("Invalid password"));
}
}
return Err(ApiError::unauthorized("Invalid password"));
}
}
}
// Record access
state
.storage
.record_share_access(share.id)
.await
.map_err(|e| ApiError::internal(format!("Failed to record access: {}", e)))?;
// Record access
state
.storage
.record_share_access(share.id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to record access: {}", e))
})?;
// Log the access
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::Accessed,
details: None,
timestamp: Utc::now(),
};
let _ = state.storage.record_share_activity(&activity).await;
// Log the access
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::Accessed,
details: None,
timestamp: Utc::now(),
};
let _ = state.storage.record_share_activity(&activity).await;
// Return the shared content
match &share.target {
ShareTarget::Media { media_id } => {
let item = state
.storage
.get_media(*media_id)
.await
.map_err(|e| ApiError::not_found(format!("Media not found: {}", e)))?;
// Return the shared content
match &share.target {
ShareTarget::Media { media_id } => {
let item =
state.storage.get_media(*media_id).await.map_err(|e| {
ApiError::not_found(format!("Media not found: {}", e))
})?;
Ok(Json(item.into()))
}
_ => {
// For collections/tags, return a placeholder
// Full implementation would return the collection contents
Err(ApiError::bad_request(
"Collection/tag sharing not yet fully implemented",
))
}
}
Ok(Json(item.into()))
},
_ => {
// For collections/tags, return a placeholder
// Full implementation would return the collection contents
Err(ApiError::bad_request(
"Collection/tag sharing not yet fully implemented",
))
},
}
}
/// Get share activity log
/// GET /api/shares/{id}/activity
pub async fn get_activity(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareActivityResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {}", e)))?;
// Only owner can view activity
if share.owner_id != user_id {
return Err(ApiError::forbidden(
"Only the owner can view share activity",
));
}
// Only owner can view activity
if share.owner_id != user_id {
return Err(ApiError::forbidden(
"Only the owner can view share activity",
));
}
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let pagination = Pagination {
offset: params.offset.unwrap_or(0),
limit: params.limit.unwrap_or(50),
sort: params.sort,
};
let activity = state
.storage
.get_share_activity(ShareId(id), &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to get activity: {}", e)))?;
let activity = state
.storage
.get_share_activity(ShareId(id), &pagination)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to get activity: {}", e))
})?;
Ok(Json(activity.into_iter().map(Into::into).collect()))
Ok(Json(activity.into_iter().map(Into::into).collect()))
}
/// Get unread share notifications
/// GET /api/notifications/shares
pub async fn get_notifications(
State(state): State<AppState>,
Extension(username): Extension<String>,
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> ApiResult<Json<Vec<ShareNotificationResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let notifications = state
.storage
.get_unread_notifications(user_id)
.await
.map_err(|e| ApiError::internal(format!("Failed to get notifications: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let notifications = state
.storage
.get_unread_notifications(user_id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to get notifications: {}", e))
})?;
Ok(Json(notifications.into_iter().map(Into::into).collect()))
Ok(Json(notifications.into_iter().map(Into::into).collect()))
}
/// Mark a notification as read
/// POST /api/notifications/shares/{id}/read
pub async fn mark_notification_read(
State(state): State<AppState>,
Extension(_username): Extension<String>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Extension(_username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
state
.storage
.mark_notification_read(id)
.await
.map_err(|e| ApiError::internal(format!("Failed to mark as read: {}", e)))?;
state
.storage
.mark_notification_read(id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to mark as read: {}", e))
})?;
Ok(StatusCode::OK)
Ok(StatusCode::OK)
}
/// Mark all notifications as read
/// POST /api/notifications/shares/read-all
pub async fn mark_all_read(
State(state): State<AppState>,
Extension(username): Extension<String>,
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> ApiResult<StatusCode> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.mark_all_notifications_read(user_id)
.await
.map_err(|e| ApiError::internal(format!("Failed to mark all as read: {}", e)))?;
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.mark_all_notifications_read(user_id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to mark all as read: {}", e))
})?;
Ok(StatusCode::OK)
Ok(StatusCode::OK)
}

View file

@ -1,199 +1,204 @@
use axum::Json;
use axum::extract::{Extension, Path, Query, State};
use axum::{
Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::model::{MediaId, Pagination};
use serde::Deserialize;
use uuid::Uuid;
use crate::auth::resolve_user_id;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::{MediaId, Pagination};
use crate::{auth::resolve_user_id, dto::*, error::ApiError, state::AppState};
#[derive(Deserialize)]
pub struct ShareLinkQuery {
pub password: Option<String>,
pub password: Option<String>,
}
// ===== Ratings =====
pub async fn rate_media(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateRatingRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateRatingRequest>,
) -> Result<Json<RatingResponse>, ApiError> {
if req.stars < 1 || req.stars > 5 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"stars must be between 1 and 5".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let rating = state
.storage
.rate_media(user_id, MediaId(id), req.stars, req.review_text.as_deref())
.await?;
Ok(Json(RatingResponse::from(rating)))
if req.stars < 1 || req.stars > 5 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"stars must be between 1 and 5".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let rating = state
.storage
.rate_media(user_id, MediaId(id), req.stars, req.review_text.as_deref())
.await?;
Ok(Json(RatingResponse::from(rating)))
}
pub async fn get_media_ratings(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<RatingResponse>>, ApiError> {
let ratings = state.storage.get_media_ratings(MediaId(id)).await?;
Ok(Json(
ratings.into_iter().map(RatingResponse::from).collect(),
))
let ratings = state.storage.get_media_ratings(MediaId(id)).await?;
Ok(Json(
ratings.into_iter().map(RatingResponse::from).collect(),
))
}
// ===== Comments =====
pub async fn add_comment(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateCommentRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateCommentRequest>,
) -> Result<Json<CommentResponse>, ApiError> {
let char_count = req.text.chars().count();
if char_count == 0 || char_count > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"comment text must be 1-10000 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let comment = state
.storage
.add_comment(user_id, MediaId(id), &req.text, req.parent_id)
.await?;
Ok(Json(CommentResponse::from(comment)))
let char_count = req.text.chars().count();
if char_count == 0 || char_count > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"comment text must be 1-10000 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let comment = state
.storage
.add_comment(user_id, MediaId(id), &req.text, req.parent_id)
.await?;
Ok(Json(CommentResponse::from(comment)))
}
pub async fn get_media_comments(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<CommentResponse>>, ApiError> {
let comments = state.storage.get_media_comments(MediaId(id)).await?;
Ok(Json(
comments.into_iter().map(CommentResponse::from).collect(),
))
let comments = state.storage.get_media_comments(MediaId(id)).await?;
Ok(Json(
comments.into_iter().map(CommentResponse::from).collect(),
))
}
// ===== Favorites =====
pub async fn add_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<FavoriteRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<FavoriteRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.add_favorite(user_id, MediaId(req.media_id))
.await?;
Ok(Json(serde_json::json!({"added": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.add_favorite(user_id, MediaId(req.media_id))
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
pub async fn remove_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.remove_favorite(user_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.remove_favorite(user_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
pub async fn list_favorites(
State(state): State<AppState>,
Extension(username): Extension<String>,
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_user_favorites(user_id, &Pagination::default())
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_user_favorites(user_id, &Pagination::default())
.await?;
Ok(Json(items.into_iter().map(MediaResponse::from).collect()))
}
// ===== Share Links =====
pub async fn create_share_link(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareLinkRequest>,
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareLinkRequest>,
) -> Result<Json<ShareLinkResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let token = uuid::Uuid::now_v7().to_string().replace('-', "");
let password_hash = match req.password.as_ref() {
Some(p) => Some(pinakes_core::users::auth::hash_password(p).map_err(ApiError)?),
None => None,
};
const MAX_EXPIRY_HOURS: u64 = 8760; // 1 year
if let Some(h) = req.expires_in_hours
&& h > MAX_EXPIRY_HOURS
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"expires_in_hours cannot exceed {}",
MAX_EXPIRY_HOURS
)),
));
}
let expires_at = req
.expires_in_hours
.map(|h| chrono::Utc::now() + chrono::Duration::hours(h as i64));
let link = state
.storage
.create_share_link(
MediaId(req.media_id),
user_id,
&token,
password_hash.as_deref(),
expires_at,
)
.await?;
Ok(Json(ShareLinkResponse::from(link)))
let user_id = resolve_user_id(&state.storage, &username).await?;
let token = uuid::Uuid::now_v7().to_string().replace('-', "");
let password_hash = match req.password.as_ref() {
Some(p) => {
Some(pinakes_core::users::auth::hash_password(p).map_err(ApiError)?)
},
None => None,
};
const MAX_EXPIRY_HOURS: u64 = 8760; // 1 year
if let Some(h) = req.expires_in_hours
&& h > MAX_EXPIRY_HOURS
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"expires_in_hours cannot exceed {}",
MAX_EXPIRY_HOURS
)),
));
}
let expires_at = req
.expires_in_hours
.map(|h| chrono::Utc::now() + chrono::Duration::hours(h as i64));
let link = state
.storage
.create_share_link(
MediaId(req.media_id),
user_id,
&token,
password_hash.as_deref(),
expires_at,
)
.await?;
Ok(Json(ShareLinkResponse::from(link)))
}
pub async fn access_shared_media(
State(state): State<AppState>,
Path(token): Path<String>,
Query(query): Query<ShareLinkQuery>,
State(state): State<AppState>,
Path(token): Path<String>,
Query(query): Query<ShareLinkQuery>,
) -> Result<Json<MediaResponse>, ApiError> {
let link = state.storage.get_share_link(&token).await?;
// Check expiration
if let Some(expires) = link.expires_at
&& chrono::Utc::now() > expires
{
let link = state.storage.get_share_link(&token).await?;
// Check expiration
if let Some(expires) = link.expires_at
&& chrono::Utc::now() > expires
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"share link has expired".into(),
),
));
}
// Verify password if set
if let Some(ref hash) = link.password_hash {
let password = match query.password.as_deref() {
Some(p) => p,
None => {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation("share link has expired".into()),
pinakes_core::error::PinakesError::Authentication(
"password required for this share link".into(),
),
));
},
};
let valid = pinakes_core::users::auth::verify_password(password, hash)
.unwrap_or(false);
if !valid {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"invalid share link password".into(),
)));
}
// Verify password if set
if let Some(ref hash) = link.password_hash {
let password = match query.password.as_deref() {
Some(p) => p,
None => {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"password required for this share link".into(),
)));
}
};
let valid = pinakes_core::users::auth::verify_password(password, hash).unwrap_or(false);
if !valid {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"invalid share link password".into(),
)));
}
}
state.storage.increment_share_views(&token).await?;
let item = state.storage.get_media(link.media_id).await?;
Ok(Json(MediaResponse::from(item)))
}
state.storage.increment_share_views(&token).await?;
let item = state.storage.get_media(link.media_id).await?;
Ok(Json(MediaResponse::from(item)))
}

View file

@ -1,13 +1,10 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use crate::dto::LibraryStatisticsResponse;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{dto::LibraryStatisticsResponse, error::ApiError, state::AppState};
pub async fn library_statistics(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<LibraryStatisticsResponse>, ApiError> {
let stats = state.storage.library_statistics().await?;
Ok(Json(LibraryStatisticsResponse::from(stats)))
let stats = state.storage.library_statistics().await?;
Ok(Json(LibraryStatisticsResponse::from(stats)))
}

View file

@ -1,240 +1,269 @@
use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::{
extract::{Path, State},
http::StatusCode,
};
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use pinakes_core::{
model::MediaId,
transcode::{estimate_bandwidth, parse_resolution},
};
use uuid::Uuid;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::transcode::{estimate_bandwidth, parse_resolution};
use crate::{error::ApiError, state::AppState};
fn escape_xml(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&apos;")
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&apos;")
}
pub async fn hls_master_playlist(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
// Verify media exists
let _item = state.storage.get_media(MediaId(id)).await?;
// Verify media exists
let _item = state.storage.get_media(MediaId(id)).await?;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut playlist = String::from("#EXTM3U\n#EXT-X-VERSION:3\n\n");
let mut playlist = String::from("#EXTM3U\n#EXT-X-VERSION:3\n\n");
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let encoded_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
playlist.push_str(&format!(
"#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},RESOLUTION={w}x{h}\n\
/api/v1/media/{id}/stream/hls/{encoded_name}/playlist.m3u8\n\n",
));
}
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let encoded_name =
utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
playlist.push_str(&format!(
"#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},RESOLUTION={w}x{h}\n/api/v1/\
media/{id}/stream/hls/{encoded_name}/playlist.m3u8\n\n",
));
}
Ok(axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap())
Ok(
axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap(),
)
}
pub async fn hls_variant_playlist(
State(state): State<AppState>,
Path((id, profile)): Path<(Uuid, String)>,
State(state): State<AppState>,
Path((id, profile)): Path<(Uuid, String)>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate HLS playlist for media with unknown or zero duration".into(),
),
));
}
let segment_duration = 10.0;
let num_segments = (duration / segment_duration).ceil() as usize;
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate HLS playlist for media with unknown or zero duration"
.into(),
),
));
}
let segment_duration = 10.0;
let num_segments = (duration / segment_duration).ceil() as usize;
let mut playlist = String::from(
"#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#EXT-X-MEDIA-SEQUENCE:0\n",
);
for i in 0..num_segments.max(1) {
let seg_dur = if i == num_segments.saturating_sub(1) && duration > 0.0 {
duration - (i as f64 * segment_duration)
} else {
segment_duration
};
playlist.push_str(&format!("#EXTINF:{seg_dur:.3},\n"));
playlist.push_str(&format!(
"/api/v1/media/{id}/stream/hls/{profile}/segment{i}.ts\n"
));
}
playlist.push_str("#EXT-X-ENDLIST\n");
let mut playlist = String::from(
"#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#\
EXT-X-MEDIA-SEQUENCE:0\n",
);
for i in 0..num_segments.max(1) {
let seg_dur = if i == num_segments.saturating_sub(1) && duration > 0.0 {
duration - (i as f64 * segment_duration)
} else {
segment_duration
};
playlist.push_str(&format!("#EXTINF:{seg_dur:.3},\n"));
playlist.push_str(&format!(
"/api/v1/media/{id}/stream/hls/{profile}/segment{i}.ts\n"
));
}
playlist.push_str("#EXT-X-ENDLIST\n");
Ok(axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap())
Ok(
axum::response::Response::builder()
.header("Content-Type", "application/vnd.apple.mpegurl")
.body(axum::body::Body::from(playlist))
.unwrap(),
)
}
pub async fn hls_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()),
));
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"invalid segment name".into(),
),
));
}
let media_id = MediaId(id);
// Look for an active/completed transcode session
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) =
transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(
axum::response::Response::builder()
.header("Content-Type", "video/MP2T")
.body(axum::body::Body::from(data))
.unwrap(),
);
}
let media_id = MediaId(id);
// Session exists but segment not ready yet
return Ok(
axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap(),
);
}
// Look for an active/completed transcode session
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) = transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(axum::response::Response::builder()
.header("Content-Type", "video/MP2T")
.body(axum::body::Body::from(data))
.unwrap());
}
// Session exists but segment not ready yet
return Ok(axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap());
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST /media/{id}/transcode"
.into(),
),
))
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST \
/media/{id}/transcode"
.into(),
),
))
}
pub async fn dash_manifest(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate DASH manifest for media with unknown or zero duration".into(),
),
));
}
let hours = (duration / 3600.0) as u32;
let minutes = ((duration % 3600.0) / 60.0) as u32;
let seconds = duration % 60.0;
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate DASH manifest for media with unknown or zero duration"
.into(),
),
));
}
let hours = (duration / 3600.0) as u32;
let minutes = ((duration % 3600.0) / 60.0) as u32;
let seconds = duration % 60.0;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut representations = String::new();
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let xml_name = escape_xml(&profile.name);
let url_name = utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
representations.push_str(&format!(
let mut representations = String::new();
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let xml_name = escape_xml(&profile.name);
let url_name =
utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
representations.push_str(&format!(
r#" <Representation id="{xml_name}" bandwidth="{bandwidth}" width="{w}" height="{h}">
<SegmentTemplate media="/api/v1/media/{id}/stream/dash/{url_name}/segment$Number$.m4s" initialization="/api/v1/media/{id}/stream/dash/{url_name}/init.mp4" duration="10000" timescale="1000" startNumber="0"/>
</Representation>
"#,
));
}
}
let mpd = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
let mpd = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" type="static" mediaPresentationDuration="PT{hours}H{minutes}M{seconds:.1}S" minBufferTime="PT1.5S">
<Period>
<AdaptationSet mimeType="video/mp4" segmentAlignment="true">
{representations} </AdaptationSet>
</Period>
</MPD>"#
);
);
Ok(axum::response::Response::builder()
.header("Content-Type", "application/dash+xml")
.body(axum::body::Body::from(mpd))
.unwrap())
Ok(
axum::response::Response::builder()
.header("Content-Type", "application/dash+xml")
.body(axum::body::Body::from(mpd))
.unwrap(),
)
}
pub async fn dash_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation("invalid segment name".into()),
));
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"invalid segment name".into(),
),
));
}
let media_id = MediaId(id);
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) =
transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(
axum::response::Response::builder()
.header("Content-Type", "video/mp4")
.body(axum::body::Body::from(data))
.unwrap(),
);
}
let media_id = MediaId(id);
return Ok(
axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap(),
);
}
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) = transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {}", e),
))
})?;
return Ok(axum::response::Response::builder()
.header("Content-Type", "video/mp4")
.body(axum::body::Body::from(data))
.unwrap());
}
return Ok(axum::response::Response::builder()
.status(StatusCode::ACCEPTED)
.header("Retry-After", "2")
.body(axum::body::Body::from("segment not yet available"))
.unwrap());
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST /media/{id}/transcode"
.into(),
),
))
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST \
/media/{id}/transcode"
.into(),
),
))
}

View file

@ -1,123 +1,125 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::{
model::MediaId,
subtitles::{Subtitle, SubtitleFormat},
};
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::subtitles::{Subtitle, SubtitleFormat};
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn list_subtitles(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<SubtitleResponse>>, ApiError> {
let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?;
Ok(Json(
subtitles.into_iter().map(SubtitleResponse::from).collect(),
))
let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?;
Ok(Json(
subtitles.into_iter().map(SubtitleResponse::from).collect(),
))
}
pub async fn add_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<AddSubtitleRequest>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<AddSubtitleRequest>,
) -> Result<Json<SubtitleResponse>, ApiError> {
let format: SubtitleFormat = req
.format
.parse()
.map_err(|e: String| ApiError(pinakes_core::error::PinakesError::InvalidOperation(e)))?;
let is_embedded = req.is_embedded.unwrap_or(false);
if !is_embedded && req.file_path.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
),
));
}
if is_embedded && req.track_index.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
),
));
}
let subtitle = Subtitle {
id: Uuid::now_v7(),
media_id: MediaId(id),
language: req.language,
format,
file_path: req.file_path.map(std::path::PathBuf::from),
is_embedded,
track_index: req.track_index,
offset_ms: req.offset_ms.unwrap_or(0),
created_at: chrono::Utc::now(),
};
state.storage.add_subtitle(&subtitle).await?;
Ok(Json(SubtitleResponse::from(subtitle)))
let format: SubtitleFormat = req.format.parse().map_err(|e: String| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
})?;
let is_embedded = req.is_embedded.unwrap_or(false);
if !is_embedded && req.file_path.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
),
));
}
if is_embedded && req.track_index.is_none() {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
),
));
}
let subtitle = Subtitle {
id: Uuid::now_v7(),
media_id: MediaId(id),
language: req.language,
format,
file_path: req.file_path.map(std::path::PathBuf::from),
is_embedded,
track_index: req.track_index,
offset_ms: req.offset_ms.unwrap_or(0),
created_at: chrono::Utc::now(),
};
state.storage.add_subtitle(&subtitle).await?;
Ok(Json(SubtitleResponse::from(subtitle)))
}
pub async fn delete_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_subtitle(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
state.storage.delete_subtitle(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn get_subtitle_content(
State(state): State<AppState>,
Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>,
State(state): State<AppState>,
Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>,
) -> Result<axum::response::Response, ApiError> {
let subtitles = state.storage.get_media_subtitles(MediaId(media_id)).await?;
let subtitle = subtitles
.into_iter()
.find(|s| s.id == subtitle_id)
.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"subtitle {subtitle_id}"
)))
})?;
let subtitles = state.storage.get_media_subtitles(MediaId(media_id)).await?;
let subtitle = subtitles
.into_iter()
.find(|s| s.id == subtitle_id)
.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"subtitle {subtitle_id}"
)))
})?;
if let Some(ref path) = subtitle.file_path {
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {}", path.display(), e),
))
}
})?;
let content_type = match subtitle.format {
SubtitleFormat::Vtt => "text/vtt",
SubtitleFormat::Srt => "application/x-subrip",
_ => "text/plain",
};
Ok(axum::response::Response::builder()
.header("Content-Type", content_type)
.body(axum::body::Body::from(content))
.unwrap())
} else {
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"subtitle is embedded, no file to serve".into(),
),
if let Some(ref path) = subtitle.file_path {
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
}
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {}", path.display(), e),
))
}
})?;
let content_type = match subtitle.format {
SubtitleFormat::Vtt => "text/vtt",
SubtitleFormat::Srt => "application/x-subrip",
_ => "text/plain",
};
Ok(
axum::response::Response::builder()
.header("Content-Type", content_type)
.body(axum::body::Body::from(content))
.unwrap(),
)
} else {
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"subtitle is embedded, no file to serve".into(),
),
))
}
}
pub async fn update_offset(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateSubtitleOffsetRequest>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateSubtitleOffsetRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.update_subtitle_offset(id, req.offset_ms)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
state
.storage
.update_subtitle_offset(id, req.offset_ms)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

File diff suppressed because it is too large Load diff

View file

@ -1,70 +1,75 @@
use axum::Json;
use axum::extract::{Path, State};
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn create_tag(
State(state): State<AppState>,
Json(req): Json<CreateTagRequest>,
State(state): State<AppState>,
Json(req): Json<CreateTagRequest>,
) -> Result<Json<TagResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"tag name must be 1-255 characters".into(),
),
));
}
let tag = pinakes_core::tags::create_tag(&state.storage, &req.name, req.parent_id).await?;
Ok(Json(TagResponse::from(tag)))
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"tag name must be 1-255 characters".into(),
),
));
}
let tag =
pinakes_core::tags::create_tag(&state.storage, &req.name, req.parent_id)
.await?;
Ok(Json(TagResponse::from(tag)))
}
pub async fn list_tags(State(state): State<AppState>) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.list_tags().await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
pub async fn list_tags(
State(state): State<AppState>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.list_tags().await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}
pub async fn get_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TagResponse>, ApiError> {
let tag = state.storage.get_tag(id).await?;
Ok(Json(TagResponse::from(tag)))
let tag = state.storage.get_tag(id).await?;
Ok(Json(TagResponse::from(tag)))
}
pub async fn delete_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_tag(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
state.storage.delete_tag(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
pub async fn tag_media(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
Json(req): Json<TagMediaRequest>,
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
Json(req): Json<TagMediaRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::tag_media(&state.storage, MediaId(media_id), req.tag_id).await?;
Ok(Json(serde_json::json!({"tagged": true})))
pinakes_core::tags::tag_media(&state.storage, MediaId(media_id), req.tag_id)
.await?;
Ok(Json(serde_json::json!({"tagged": true})))
}
pub async fn untag_media(
State(state): State<AppState>,
Path((media_id, tag_id)): Path<(Uuid, Uuid)>,
State(state): State<AppState>,
Path((media_id, tag_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::untag_media(&state.storage, MediaId(media_id), tag_id).await?;
Ok(Json(serde_json::json!({"untagged": true})))
pinakes_core::tags::untag_media(&state.storage, MediaId(media_id), tag_id)
.await?;
Ok(Json(serde_json::json!({"untagged": true})))
}
pub async fn get_media_tags(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.get_media_tags(MediaId(media_id)).await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
let tags = state.storage.get_media_tags(MediaId(media_id)).await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}

View file

@ -1,63 +1,66 @@
use axum::Json;
use axum::extract::{Path, Query, State};
use axum::{
Json,
extract::{Path, Query, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use pinakes_core::model::MediaId;
use crate::{dto::*, error::ApiError, state::AppState};
pub async fn start_transcode(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<CreateTranscodeRequest>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<CreateTranscodeRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Transcode {
media_id: MediaId(id),
profile: req.profile,
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Transcode {
media_id: MediaId(id),
profile: req.profile,
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
pub async fn get_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TranscodeSessionResponse>, ApiError> {
let session = state.storage.get_transcode_session(id).await?;
Ok(Json(TranscodeSessionResponse::from(session)))
let session = state.storage.get_transcode_session(id).await?;
Ok(Json(TranscodeSessionResponse::from(session)))
}
pub async fn list_sessions(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<TranscodeSessionResponse>>, ApiError> {
let _ = params; // reserved for future filtering
let sessions = state.storage.list_transcode_sessions(None).await?;
Ok(Json(
sessions
.into_iter()
.map(TranscodeSessionResponse::from)
.collect(),
))
let _ = params; // reserved for future filtering
let sessions = state.storage.list_transcode_sessions(None).await?;
Ok(Json(
sessions
.into_iter()
.map(TranscodeSessionResponse::from)
.collect(),
))
}
pub async fn cancel_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
if let Some(transcode_service) = &state.transcode_service {
transcode_service
.cancel_transcode(id, &state.storage)
.await?;
} else {
state
.storage
.update_transcode_status(id, pinakes_core::transcode::TranscodeStatus::Cancelled, 0.0)
.await?;
}
Ok(Json(serde_json::json!({"cancelled": true})))
if let Some(transcode_service) = &state.transcode_service {
transcode_service
.cancel_transcode(id, &state.storage)
.await?;
} else {
state
.storage
.update_transcode_status(
id,
pinakes_core::transcode::TranscodeStatus::Cancelled,
0.0,
)
.await?;
}
Ok(Json(serde_json::json!({"cancelled": true})))
}

View file

@ -1,120 +1,97 @@
use axum::{
Json,
extract::{Multipart, Path, State},
http::{StatusCode, header},
response::IntoResponse,
Json,
extract::{Multipart, Path, State},
http::{StatusCode, header},
response::IntoResponse,
};
use pinakes_core::{model::MediaId, upload};
use tokio_util::io::ReaderStream;
use uuid::Uuid;
use crate::dto::{ManagedStorageStatsResponse, UploadResponse};
use crate::error::{ApiError, ApiResult};
use crate::state::AppState;
use pinakes_core::model::MediaId;
use pinakes_core::upload;
use crate::{
dto::{ManagedStorageStatsResponse, UploadResponse},
error::{ApiError, ApiResult},
state::AppState,
};
/// Upload a file to managed storage
/// POST /api/upload
pub async fn upload_file(
State(state): State<AppState>,
mut multipart: Multipart,
State(state): State<AppState>,
mut multipart: Multipart,
) -> ApiResult<Json<UploadResponse>> {
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let config = state.config.read().await;
if !config.managed_storage.enabled {
return Err(ApiError::bad_request("Managed storage is not enabled"));
}
drop(config);
let config = state.config.read().await;
if !config.managed_storage.enabled {
return Err(ApiError::bad_request("Managed storage is not enabled"));
}
drop(config);
// Extract file from multipart
let field = multipart
.next_field()
.await
.map_err(|e| ApiError::bad_request(format!("Failed to read multipart field: {}", e)))?
.ok_or_else(|| ApiError::bad_request("No file provided"))?;
let original_filename = field
.file_name()
.map(|s| s.to_string())
.unwrap_or_else(|| "unknown".to_string());
let content_type = field
.content_type()
.map(|s| s.to_string())
.unwrap_or_else(|| "application/octet-stream".to_string());
let data = field
.bytes()
.await
.map_err(|e| ApiError::bad_request(format!("Failed to read file data: {}", e)))?;
// Process the upload
let result = upload::process_upload_bytes(
&state.storage,
managed_storage.as_ref(),
&data,
&original_filename,
Some(&content_type),
)
// Extract file from multipart
let field = multipart
.next_field()
.await
.map_err(|e| ApiError::internal(format!("Upload failed: {}", e)))?;
.map_err(|e| {
ApiError::bad_request(format!("Failed to read multipart field: {}", e))
})?
.ok_or_else(|| ApiError::bad_request("No file provided"))?;
Ok(Json(result.into()))
let original_filename = field
.file_name()
.map(|s| s.to_string())
.unwrap_or_else(|| "unknown".to_string());
let content_type = field
.content_type()
.map(|s| s.to_string())
.unwrap_or_else(|| "application/octet-stream".to_string());
let data = field.bytes().await.map_err(|e| {
ApiError::bad_request(format!("Failed to read file data: {}", e))
})?;
// Process the upload
let result = upload::process_upload_bytes(
&state.storage,
managed_storage.as_ref(),
&data,
&original_filename,
Some(&content_type),
)
.await
.map_err(|e| ApiError::internal(format!("Upload failed: {}", e)))?;
Ok(Json(result.into()))
}
/// Download a managed file
/// GET /api/media/{id}/download
pub async fn download_file(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> ApiResult<impl IntoResponse> {
let media_id = MediaId(id);
let item = state
.storage
.get_media(media_id)
.await
.map_err(|e| ApiError::not_found(format!("Media not found: {}", e)))?;
let media_id = MediaId(id);
let item = state
.storage
.get_media(media_id)
.await
.map_err(|e| ApiError::not_found(format!("Media not found: {}", e)))?;
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
// Check if this is a managed file
if item.storage_mode != pinakes_core::model::StorageMode::Managed {
// For external files, stream from their original path
let file = tokio::fs::File::open(&item.path)
.await
.map_err(|e| ApiError::not_found(format!("File not found: {}", e)))?;
let stream = ReaderStream::new(file);
let body = axum::body::Body::from_stream(stream);
let content_type = item.media_type.mime_type();
let filename = item.original_filename.unwrap_or(item.file_name);
return Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
format!("attachment; filename=\"{}\"", filename),
),
],
body,
));
}
// For managed files, stream from content-addressable storage
let file = managed_storage
.open(&item.content_hash)
.await
.map_err(|e| ApiError::not_found(format!("Blob not found: {}", e)))?;
// Check if this is a managed file
if item.storage_mode != pinakes_core::model::StorageMode::Managed {
// For external files, stream from their original path
let file = tokio::fs::File::open(&item.path)
.await
.map_err(|e| ApiError::not_found(format!("File not found: {}", e)))?;
let stream = ReaderStream::new(file);
let body = axum::body::Body::from_stream(stream);
@ -123,47 +100,76 @@ pub async fn download_file(
let filename = item.original_filename.unwrap_or(item.file_name);
Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
format!("attachment; filename=\"{}\"", filename),
),
],
body,
))
return Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
format!("attachment; filename=\"{}\"", filename),
),
],
body,
));
}
// For managed files, stream from content-addressable storage
let file = managed_storage
.open(&item.content_hash)
.await
.map_err(|e| ApiError::not_found(format!("Blob not found: {}", e)))?;
let stream = ReaderStream::new(file);
let body = axum::body::Body::from_stream(stream);
let content_type = item.media_type.mime_type();
let filename = item.original_filename.unwrap_or(item.file_name);
Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
format!("attachment; filename=\"{}\"", filename),
),
],
body,
))
}
/// Migrate an external file to managed storage
/// POST /api/media/{id}/move-to-managed
pub async fn move_to_managed(
State(state): State<AppState>,
Path(id): Path<Uuid>,
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let media_id = MediaId(id);
upload::migrate_to_managed(&state.storage, managed_storage.as_ref(), media_id)
.await
.map_err(|e| ApiError::internal(format!("Migration failed: {}", e)))?;
let media_id = MediaId(id);
upload::migrate_to_managed(
&state.storage,
managed_storage.as_ref(),
media_id,
)
.await
.map_err(|e| ApiError::internal(format!("Migration failed: {}", e)))?;
Ok(StatusCode::NO_CONTENT)
Ok(StatusCode::NO_CONTENT)
}
/// Get managed storage statistics
/// GET /api/managed/stats
pub async fn managed_stats(
State(state): State<AppState>,
State(state): State<AppState>,
) -> ApiResult<Json<ManagedStorageStatsResponse>> {
let stats = state
.storage
.managed_storage_stats()
.await
.map_err(|e| ApiError::internal(format!("Failed to get stats: {}", e)))?;
let stats = state
.storage
.managed_storage_stats()
.await
.map_err(|e| ApiError::internal(format!("Failed to get stats: {}", e)))?;
Ok(Json(stats.into()))
Ok(Json(stats.into()))
}

View file

@ -1,171 +1,176 @@
use axum::Json;
use axum::extract::{Path, State};
use crate::dto::*;
use crate::error::ApiError;
use crate::state::AppState;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::users::{CreateUserRequest, UpdateUserRequest, UserId};
use crate::{dto::*, error::ApiError, state::AppState};
/// List all users (admin only)
pub async fn list_users(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<UserResponse>>, ApiError> {
let users = state.storage.list_users().await?;
Ok(Json(users.into_iter().map(UserResponse::from).collect()))
let users = state.storage.list_users().await?;
Ok(Json(users.into_iter().map(UserResponse::from).collect()))
}
/// Create a new user (admin only)
pub async fn create_user(
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>,
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
// Validate username
if req.username.is_empty() || req.username.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"username must be 1-255 characters".into(),
),
));
}
// Validate username
if req.username.is_empty() || req.username.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"username must be 1-255 characters".into(),
),
));
}
// Validate password
if req.password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
// Validate password
if req.password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
// Hash password
let password_hash = pinakes_core::users::auth::hash_password(&req.password)?;
// Hash password
let password_hash = pinakes_core::users::auth::hash_password(&req.password)?;
// Create user - rely on DB unique constraint for username to avoid TOCTOU race
let user = state
.storage
.create_user(&req.username, &password_hash, req.role, req.profile)
.await
.map_err(|e| {
// Map unique constraint violations to a user-friendly conflict error
let err_str = e.to_string();
if err_str.contains("UNIQUE")
|| err_str.contains("unique")
|| err_str.contains("duplicate key")
{
ApiError(pinakes_core::error::PinakesError::DuplicateHash(
"username already exists".into(),
))
} else {
ApiError(e)
}
})?;
// Create user - rely on DB unique constraint for username to avoid TOCTOU
// race
let user = state
.storage
.create_user(&req.username, &password_hash, req.role, req.profile)
.await
.map_err(|e| {
// Map unique constraint violations to a user-friendly conflict error
let err_str = e.to_string();
if err_str.contains("UNIQUE")
|| err_str.contains("unique")
|| err_str.contains("duplicate key")
{
ApiError(pinakes_core::error::PinakesError::DuplicateHash(
"username already exists".into(),
))
} else {
ApiError(e)
}
})?;
Ok(Json(UserResponse::from(user)))
Ok(Json(UserResponse::from(user)))
}
/// Get a specific user by ID
pub async fn get_user(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let user = state.storage.get_user(user_id).await?;
Ok(Json(UserResponse::from(user)))
let user = state.storage.get_user(user_id).await?;
Ok(Json(UserResponse::from(user)))
}
/// Update a user
pub async fn update_user(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<UpdateUserRequest>,
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<UpdateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
// Hash password if provided
let password_hash = if let Some(ref password) = req.password {
if password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
Some(pinakes_core::users::auth::hash_password(password)?)
} else {
None
};
// Hash password if provided
let password_hash = if let Some(ref password) = req.password {
if password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
Some(pinakes_core::users::auth::hash_password(password)?)
} else {
None
};
let user = state
.storage
.update_user(user_id, password_hash.as_deref(), req.role, req.profile)
.await?;
let user = state
.storage
.update_user(user_id, password_hash.as_deref(), req.role, req.profile)
.await?;
Ok(Json(UserResponse::from(user)))
Ok(Json(UserResponse::from(user)))
}
/// Delete a user (admin only)
pub async fn delete_user(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state.storage.delete_user(user_id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
state.storage.delete_user(user_id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
/// Get user's accessible libraries
pub async fn get_user_libraries(
State(state): State<AppState>,
Path(id): Path<String>,
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<Vec<UserLibraryResponse>>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let libraries = state.storage.get_user_libraries(user_id).await?;
Ok(Json(
libraries
.into_iter()
.map(UserLibraryResponse::from)
.collect(),
))
let libraries = state.storage.get_user_libraries(user_id).await?;
Ok(Json(
libraries
.into_iter()
.map(UserLibraryResponse::from)
.collect(),
))
}
/// Grant library access to a user (admin only)
pub async fn grant_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<GrantLibraryAccessRequest>,
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<GrantLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.grant_library_access(user_id, &req.root_path, req.permission)
.await?;
state
.storage
.grant_library_access(user_id, &req.root_path, req.permission)
.await?;
Ok(Json(serde_json::json!({"granted": true})))
Ok(Json(serde_json::json!({"granted": true})))
}
/// Revoke library access from a user (admin only)
@ -173,19 +178,20 @@ pub async fn grant_library_access(
/// Uses a JSON body instead of a path parameter because root_path may contain
/// slashes that conflict with URL routing.
pub async fn revoke_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<RevokeLibraryAccessRequest>,
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<RevokeLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId = id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.revoke_library_access(user_id, &req.root_path)
.await?;
Ok(Json(serde_json::json!({"revoked": true})))
state
.storage
.revoke_library_access(user_id, &req.root_path)
.await?;
Ok(Json(serde_json::json!({"revoked": true})))
}

View file

@ -1,40 +1,40 @@
use axum::Json;
use axum::extract::State;
use axum::{Json, extract::State};
use serde::Serialize;
use crate::error::ApiError;
use crate::state::AppState;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Serialize)]
pub struct WebhookInfo {
pub url: String,
pub events: Vec<String>,
pub url: String,
pub events: Vec<String>,
}
pub async fn list_webhooks(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<Vec<WebhookInfo>>, ApiError> {
let config = state.config.read().await;
let hooks: Vec<WebhookInfo> = config
.webhooks
.iter()
.map(|h| WebhookInfo {
url: h.url.clone(),
events: h.events.clone(),
})
.collect();
Ok(Json(hooks))
let config = state.config.read().await;
let hooks: Vec<WebhookInfo> = config
.webhooks
.iter()
.map(|h| {
WebhookInfo {
url: h.url.clone(),
events: h.events.clone(),
}
})
.collect();
Ok(Json(hooks))
}
pub async fn test_webhook(
State(state): State<AppState>,
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let config = state.config.read().await;
let count = config.webhooks.len();
// Emit a test event to all configured webhooks
// In production, the event bus would handle delivery
Ok(Json(serde_json::json!({
"webhooks_configured": count,
"test_sent": true
})))
let config = state.config.read().await;
let count = config.webhooks.len();
// Emit a test event to all configured webhooks
// In production, the event bus would handle delivery
Ok(Json(serde_json::json!({
"webhooks_configured": count,
"test_sent": true
})))
}

View file

@ -1,33 +1,33 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{path::PathBuf, sync::Arc};
use pinakes_core::{
cache::CacheLayer,
config::Config,
jobs::JobQueue,
managed_storage::ManagedStorageService,
plugin::PluginManager,
scan::ScanProgress,
scheduler::TaskScheduler,
storage::DynStorageBackend,
sync::ChunkedUploadManager,
transcode::TranscodeService,
};
use tokio::sync::RwLock;
use pinakes_core::cache::CacheLayer;
use pinakes_core::config::Config;
use pinakes_core::jobs::JobQueue;
use pinakes_core::managed_storage::ManagedStorageService;
use pinakes_core::plugin::PluginManager;
use pinakes_core::scan::ScanProgress;
use pinakes_core::scheduler::TaskScheduler;
use pinakes_core::storage::DynStorageBackend;
use pinakes_core::sync::ChunkedUploadManager;
use pinakes_core::transcode::TranscodeService;
// Note: Sessions are now stored in the database via StorageBackend
// See storage::SessionData and related methods
#[derive(Clone)]
pub struct AppState {
pub storage: DynStorageBackend,
pub config: Arc<RwLock<Config>>,
pub config_path: Option<PathBuf>,
pub scan_progress: ScanProgress,
pub job_queue: Arc<JobQueue>,
pub cache: Arc<CacheLayer>,
pub scheduler: Arc<TaskScheduler>,
pub plugin_manager: Option<Arc<PluginManager>>,
pub transcode_service: Option<Arc<TranscodeService>>,
pub managed_storage: Option<Arc<ManagedStorageService>>,
pub chunked_upload_manager: Option<Arc<ChunkedUploadManager>>,
pub storage: DynStorageBackend,
pub config: Arc<RwLock<Config>>,
pub config_path: Option<PathBuf>,
pub scan_progress: ScanProgress,
pub job_queue: Arc<JobQueue>,
pub cache: Arc<CacheLayer>,
pub scheduler: Arc<TaskScheduler>,
pub plugin_manager: Option<Arc<PluginManager>>,
pub transcode_service: Option<Arc<TranscodeService>>,
pub managed_storage: Option<Arc<ManagedStorageService>>,
pub chunked_upload_manager: Option<Arc<ChunkedUploadManager>>,
}