meta: move public crates to packages/

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I928162008cb1ba02e1aa0e7aa971e8326a6a6964
This commit is contained in:
raf 2026-03-23 02:32:37 +03:00
commit 00bab69598
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
308 changed files with 53890 additions and 53889 deletions

View file

@ -0,0 +1,45 @@
[package]
name = "pinakes-server"
edition.workspace = true
version.workspace = true
license.workspace = true
[dependencies]
pinakes-core = { workspace = true }
pinakes-plugin-api = { workspace = true }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
toml = { workspace = true }
chrono = { workspace = true }
uuid = { workspace = true }
thiserror = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
axum = { workspace = true }
axum-server = { workspace = true, features = ["tls-rustls"] }
tower = { workspace = true }
tower-http = { workspace = true }
governor = { workspace = true }
tower_governor = { workspace = true }
tokio-util = { workspace = true, features = ["io"] }
argon2 = { workspace = true }
blake3 = { workspace = true }
rand = { workspace = true }
percent-encoding = { workspace = true }
http = { workspace = true }
rustc-hash = { workspace = true }
utoipa = { workspace = true }
utoipa-axum = { workspace = true }
utoipa-swagger-ui = { workspace = true }
[dev-dependencies]
http-body-util = { workspace = true }
reqwest = { workspace = true }
tempfile = { workspace = true }
[lints]
workspace = true

View file

@ -0,0 +1,486 @@
use utoipa::OpenApi;
/// Central `OpenAPI` document registry.
/// Handler functions and schemas are added here as route modules are annotated.
#[derive(OpenApi)]
#[openapi(
info(
title = "Pinakes API",
version = env!("CARGO_PKG_VERSION"),
description = "Media cataloging and library management API"
),
paths(
// analytics
crate::routes::analytics::get_most_viewed,
crate::routes::analytics::get_recently_viewed,
crate::routes::analytics::record_event,
crate::routes::analytics::get_watch_progress,
crate::routes::analytics::update_watch_progress,
// audit
crate::routes::audit::list_audit,
// auth
crate::routes::auth::login,
crate::routes::auth::logout,
crate::routes::auth::me,
crate::routes::auth::refresh,
crate::routes::auth::revoke_all_sessions,
crate::routes::auth::list_active_sessions,
// backup
crate::routes::backup::create_backup,
// books
crate::routes::books::get_book_metadata,
crate::routes::books::list_books,
crate::routes::books::list_series,
crate::routes::books::get_series_books,
crate::routes::books::list_authors,
crate::routes::books::get_author_books,
crate::routes::books::get_reading_progress,
crate::routes::books::update_reading_progress,
crate::routes::books::get_reading_list,
// collections
crate::routes::collections::create_collection,
crate::routes::collections::list_collections,
crate::routes::collections::get_collection,
crate::routes::collections::delete_collection,
crate::routes::collections::add_member,
crate::routes::collections::remove_member,
crate::routes::collections::get_members,
// config
crate::routes::config::get_config,
crate::routes::config::get_ui_config,
crate::routes::config::update_ui_config,
crate::routes::config::update_scanning_config,
crate::routes::config::add_root,
crate::routes::config::remove_root,
// database
crate::routes::database::database_stats,
crate::routes::database::vacuum_database,
crate::routes::database::clear_database,
// duplicates
crate::routes::duplicates::list_duplicates,
// enrichment
crate::routes::enrichment::trigger_enrichment,
crate::routes::enrichment::get_external_metadata,
crate::routes::enrichment::batch_enrich,
// export
crate::routes::export::trigger_export,
crate::routes::export::trigger_export_with_options,
// health
crate::routes::health::health,
crate::routes::health::liveness,
crate::routes::health::readiness,
crate::routes::health::health_detailed,
// integrity
crate::routes::integrity::trigger_orphan_detection,
crate::routes::integrity::trigger_verify_integrity,
crate::routes::integrity::trigger_cleanup_thumbnails,
crate::routes::integrity::generate_all_thumbnails,
crate::routes::integrity::resolve_orphans,
// jobs
crate::routes::jobs::list_jobs,
crate::routes::jobs::get_job,
crate::routes::jobs::cancel_job,
// media
crate::routes::media::import_media,
crate::routes::media::list_media,
crate::routes::media::get_media,
crate::routes::media::update_media,
crate::routes::media::delete_media,
crate::routes::media::open_media,
crate::routes::media::import_with_options,
crate::routes::media::batch_import,
crate::routes::media::import_directory_endpoint,
crate::routes::media::preview_directory,
crate::routes::media::set_custom_field,
crate::routes::media::delete_custom_field,
crate::routes::media::batch_tag,
crate::routes::media::delete_all_media,
crate::routes::media::batch_delete,
crate::routes::media::batch_add_to_collection,
crate::routes::media::batch_update,
crate::routes::media::get_thumbnail,
crate::routes::media::get_media_count,
crate::routes::media::rename_media,
crate::routes::media::move_media_endpoint,
crate::routes::media::batch_move_media,
crate::routes::media::soft_delete_media,
crate::routes::media::restore_media,
crate::routes::media::list_trash,
crate::routes::media::trash_info,
crate::routes::media::empty_trash,
crate::routes::media::permanent_delete_media,
crate::routes::media::stream_media,
// notes
crate::routes::notes::get_backlinks,
crate::routes::notes::get_outgoing_links,
crate::routes::notes::get_graph,
crate::routes::notes::reindex_links,
crate::routes::notes::resolve_links,
crate::routes::notes::get_unresolved_count,
// photos
crate::routes::photos::get_timeline,
crate::routes::photos::get_map_photos,
// playlists
crate::routes::playlists::create_playlist,
crate::routes::playlists::list_playlists,
crate::routes::playlists::get_playlist,
crate::routes::playlists::update_playlist,
crate::routes::playlists::delete_playlist,
crate::routes::playlists::add_item,
crate::routes::playlists::remove_item,
crate::routes::playlists::list_items,
crate::routes::playlists::reorder_item,
crate::routes::playlists::shuffle_playlist,
// plugins
crate::routes::plugins::list_plugins,
crate::routes::plugins::get_plugin,
crate::routes::plugins::install_plugin,
crate::routes::plugins::uninstall_plugin,
crate::routes::plugins::toggle_plugin,
crate::routes::plugins::list_plugin_ui_pages,
crate::routes::plugins::list_plugin_ui_widgets,
crate::routes::plugins::emit_plugin_event,
crate::routes::plugins::list_plugin_ui_theme_extensions,
crate::routes::plugins::reload_plugin,
// saved_searches
crate::routes::saved_searches::create_saved_search,
crate::routes::saved_searches::list_saved_searches,
crate::routes::saved_searches::delete_saved_search,
// scan
crate::routes::scan::trigger_scan,
crate::routes::scan::scan_status,
// scheduled_tasks
crate::routes::scheduled_tasks::list_scheduled_tasks,
crate::routes::scheduled_tasks::toggle_scheduled_task,
crate::routes::scheduled_tasks::run_scheduled_task_now,
// search
crate::routes::search::search,
crate::routes::search::search_post,
// shares
crate::routes::shares::create_share,
crate::routes::shares::list_outgoing,
crate::routes::shares::list_incoming,
crate::routes::shares::get_share,
crate::routes::shares::update_share,
crate::routes::shares::delete_share,
crate::routes::shares::batch_delete,
crate::routes::shares::access_shared,
crate::routes::shares::get_activity,
crate::routes::shares::get_notifications,
crate::routes::shares::mark_notification_read,
crate::routes::shares::mark_all_read,
// social
crate::routes::social::rate_media,
crate::routes::social::get_media_ratings,
crate::routes::social::add_comment,
crate::routes::social::get_media_comments,
crate::routes::social::add_favorite,
crate::routes::social::remove_favorite,
crate::routes::social::list_favorites,
crate::routes::social::create_share_link,
crate::routes::social::access_shared_media,
// statistics
crate::routes::statistics::library_statistics,
// streaming
crate::routes::streaming::hls_master_playlist,
crate::routes::streaming::hls_variant_playlist,
crate::routes::streaming::hls_segment,
crate::routes::streaming::dash_manifest,
crate::routes::streaming::dash_segment,
// subtitles
crate::routes::subtitles::list_subtitles,
crate::routes::subtitles::add_subtitle,
crate::routes::subtitles::delete_subtitle,
crate::routes::subtitles::get_subtitle_content,
crate::routes::subtitles::update_offset,
// sync
crate::routes::sync::register_device,
crate::routes::sync::list_devices,
crate::routes::sync::get_device,
crate::routes::sync::update_device,
crate::routes::sync::delete_device,
crate::routes::sync::regenerate_token,
crate::routes::sync::get_changes,
crate::routes::sync::report_changes,
crate::routes::sync::acknowledge_changes,
crate::routes::sync::list_conflicts,
crate::routes::sync::resolve_conflict,
crate::routes::sync::create_upload,
crate::routes::sync::upload_chunk,
crate::routes::sync::get_upload_status,
crate::routes::sync::complete_upload,
crate::routes::sync::cancel_upload,
crate::routes::sync::download_file,
// tags
crate::routes::tags::create_tag,
crate::routes::tags::list_tags,
crate::routes::tags::get_tag,
crate::routes::tags::delete_tag,
crate::routes::tags::tag_media,
crate::routes::tags::untag_media,
crate::routes::tags::get_media_tags,
// transcode
crate::routes::transcode::start_transcode,
crate::routes::transcode::get_session,
crate::routes::transcode::list_sessions,
crate::routes::transcode::cancel_session,
// upload
crate::routes::upload::upload_file,
crate::routes::upload::download_file,
crate::routes::upload::move_to_managed,
crate::routes::upload::managed_stats,
// users
crate::routes::users::list_users,
crate::routes::users::create_user,
crate::routes::users::get_user,
crate::routes::users::update_user,
crate::routes::users::delete_user,
crate::routes::users::get_user_libraries,
crate::routes::users::grant_library_access,
crate::routes::users::revoke_library_access,
// webhooks
crate::routes::webhooks::list_webhooks,
crate::routes::webhooks::test_webhook,
),
components(
schemas(
// analytics DTOs
crate::dto::UsageEventResponse,
crate::dto::RecordUsageEventRequest,
// audit DTOs
crate::dto::AuditEntryResponse,
// auth local types
crate::routes::auth::SessionListResponse,
crate::routes::auth::SessionInfo,
// batch DTOs
crate::dto::BatchTagRequest,
crate::dto::BatchCollectionRequest,
crate::dto::BatchDeleteRequest,
crate::dto::BatchUpdateRequest,
crate::dto::BatchOperationResponse,
// books local types
crate::routes::books::BookMetadataResponse,
crate::routes::books::AuthorResponse,
crate::routes::books::ReadingProgressResponse,
crate::routes::books::UpdateProgressRequest,
crate::routes::books::SeriesSummary,
crate::routes::books::AuthorSummary,
// collections DTOs
crate::dto::CollectionResponse,
crate::dto::CreateCollectionRequest,
crate::dto::AddMemberRequest,
// config DTOs
crate::dto::ConfigResponse,
crate::dto::ScanningConfigResponse,
crate::dto::ServerConfigResponse,
crate::dto::UpdateScanningRequest,
crate::dto::RootDirRequest,
crate::dto::UiConfigResponse,
crate::dto::UpdateUiConfigRequest,
// database DTOs
crate::dto::DatabaseStatsResponse,
// duplicate DTOs
crate::dto::DuplicateGroupResponse,
// enrichment DTOs
crate::dto::ExternalMetadataResponse,
// export local types
crate::routes::export::ExportRequest,
// health local types
crate::routes::health::HealthResponse,
crate::routes::health::DatabaseHealth,
crate::routes::health::FilesystemHealth,
crate::routes::health::CacheHealth,
crate::routes::health::DetailedHealthResponse,
crate::routes::health::JobsHealth,
// integrity local types
crate::routes::integrity::OrphanResolveRequest,
crate::routes::integrity::VerifyIntegrityRequest,
crate::routes::integrity::GenerateThumbnailsRequest,
// media DTOs
crate::dto::MediaResponse,
crate::dto::CustomFieldResponse,
crate::dto::ImportRequest,
crate::dto::ImportWithOptionsRequest,
crate::dto::DirectoryImportRequest,
crate::dto::DirectoryPreviewResponse,
crate::dto::UpdateMediaRequest,
crate::dto::MoveMediaRequest,
crate::dto::RenameMediaRequest,
crate::dto::BatchMoveRequest,
crate::dto::BatchImportRequest,
crate::dto::SetCustomFieldRequest,
crate::dto::MediaCountResponse,
crate::dto::TrashInfoResponse,
crate::dto::ImportResponse,
crate::dto::TrashResponse,
crate::dto::EmptyTrashResponse,
crate::dto::BatchImportResponse,
crate::dto::BatchImportItemResult,
crate::dto::DirectoryPreviewFile,
crate::dto::UpdateMediaFullRequest,
crate::dto::OpenRequest,
crate::dto::WatchProgressRequest,
crate::dto::WatchProgressResponse,
// notes local types
crate::routes::notes::BacklinksResponse,
crate::routes::notes::BacklinkItem,
crate::routes::notes::OutgoingLinksResponse,
crate::routes::notes::OutgoingLinkItem,
crate::routes::notes::GraphResponse,
crate::routes::notes::GraphNodeResponse,
crate::routes::notes::GraphEdgeResponse,
crate::routes::notes::ReindexResponse,
crate::routes::notes::ResolveLinksResponse,
crate::routes::notes::UnresolvedLinksResponse,
// photos local types
crate::routes::photos::TimelineGroup,
crate::routes::photos::MapMarker,
// playlists DTOs
crate::dto::PlaylistResponse,
crate::dto::CreatePlaylistRequest,
crate::dto::UpdatePlaylistRequest,
crate::dto::PlaylistItemRequest,
crate::dto::ReorderPlaylistRequest,
// plugins DTOs
crate::dto::PluginResponse,
crate::dto::InstallPluginRequest,
crate::dto::TogglePluginRequest,
crate::dto::PluginUiPageEntry,
crate::dto::PluginUiWidgetEntry,
crate::dto::PluginEventRequest,
// saved_searches local types
crate::routes::saved_searches::CreateSavedSearchRequest,
crate::routes::saved_searches::SavedSearchResponse,
// scan DTOs
crate::dto::ScanRequest,
crate::dto::ScanResponse,
crate::dto::ScanJobResponse,
crate::dto::ScanStatusResponse,
// search DTOs
crate::dto::SearchParams,
crate::dto::SearchResponse,
crate::dto::SearchRequestBody,
crate::dto::PaginationParams,
// sharing DTOs
crate::dto::CreateShareRequest,
crate::dto::UpdateShareRequest,
crate::dto::ShareResponse,
crate::dto::SharePermissionsRequest,
crate::dto::BatchDeleteSharesRequest,
crate::dto::AccessSharedRequest,
crate::dto::SharedContentResponse,
crate::dto::ShareActivityResponse,
crate::dto::ShareNotificationResponse,
// social DTOs
crate::dto::RatingResponse,
crate::dto::CreateRatingRequest,
crate::dto::CommentResponse,
crate::dto::CreateCommentRequest,
crate::dto::FavoriteRequest,
crate::dto::CreateShareLinkRequest,
crate::dto::ShareLinkResponse,
// statistics DTOs
crate::dto::LibraryStatisticsResponse,
crate::dto::TypeCountResponse,
crate::dto::ScheduledTaskResponse,
// subtitles DTOs
crate::dto::SubtitleResponse,
crate::dto::AddSubtitleRequest,
crate::dto::UpdateSubtitleOffsetRequest,
crate::dto::SubtitleListResponse,
crate::dto::SubtitleTrackInfoResponse,
// sync DTOs
crate::dto::RegisterDeviceRequest,
crate::dto::DeviceResponse,
crate::dto::DeviceRegistrationResponse,
crate::dto::UpdateDeviceRequest,
crate::dto::GetChangesParams,
crate::dto::SyncChangeResponse,
crate::dto::ChangesResponse,
crate::dto::ReportChangesRequest,
crate::dto::ReportChangesResponse,
crate::dto::AcknowledgeChangesRequest,
crate::dto::ConflictResponse,
crate::dto::ResolveConflictRequest,
crate::dto::CreateUploadSessionRequest,
crate::dto::UploadSessionResponse,
crate::dto::ChunkUploadedResponse,
crate::dto::MostViewedResponse,
// tags DTOs
crate::dto::TagResponse,
crate::dto::CreateTagRequest,
crate::dto::TagMediaRequest,
// transcode DTOs
crate::dto::TranscodeSessionResponse,
crate::dto::CreateTranscodeRequest,
// upload DTOs
crate::dto::UploadResponse,
crate::dto::ManagedStorageStatsResponse,
// users DTOs
crate::dto::UserResponse,
crate::dto::UserLibraryResponse,
crate::dto::GrantLibraryAccessRequest,
crate::dto::RevokeLibraryAccessRequest,
// webhooks local types
crate::routes::webhooks::WebhookInfo,
)
),
tags(
(name = "analytics", description = "Usage analytics and viewing history"),
(name = "audit", description = "Audit log entries"),
(name = "auth", description = "Authentication and session management"),
(name = "backup", description = "Database backup"),
(name = "books", description = "Book metadata, series, authors, and reading progress"),
(name = "collections", description = "Media collections"),
(name = "config", description = "Server configuration"),
(name = "database", description = "Database administration"),
(name = "duplicates", description = "Duplicate media detection"),
(name = "enrichment", description = "External metadata enrichment"),
(name = "export", description = "Media library export"),
(name = "health", description = "Server health checks"),
(name = "integrity", description = "Library integrity checks and repairs"),
(name = "jobs", description = "Background job management"),
(name = "media", description = "Media item management"),
(name = "notes", description = "Markdown notes link graph"),
(name = "photos", description = "Photo timeline and map view"),
(name = "playlists", description = "Media playlists"),
(name = "plugins", description = "Plugin management"),
(name = "saved_searches", description = "Saved search queries"),
(name = "scan", description = "Directory scanning"),
(name = "scheduled_tasks", description = "Scheduled background tasks"),
(name = "search", description = "Full-text media search"),
(name = "shares", description = "Media sharing and notifications"),
(name = "social", description = "Ratings, comments, favorites, and share links"),
(name = "statistics", description = "Library statistics"),
(name = "streaming", description = "HLS and DASH adaptive streaming"),
(name = "subtitles", description = "Media subtitle management"),
(name = "sync", description = "Multi-device library synchronization"),
(name = "tags", description = "Media tag management"),
(name = "transcode", description = "Video transcoding sessions"),
(name = "upload", description = "File upload and managed storage"),
(name = "users", description = "User and library access management"),
(name = "webhooks", description = "Webhook configuration"),
),
security(
("bearer_auth" = [])
),
modifiers(&SecurityAddon)
)]
pub struct ApiDoc;
struct SecurityAddon;
impl utoipa::Modify for SecurityAddon {
fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) {
if let Some(components) = openapi.components.as_mut() {
components.add_security_scheme(
"bearer_auth",
utoipa::openapi::security::SecurityScheme::Http(
utoipa::openapi::security::Http::new(
utoipa::openapi::security::HttpAuthScheme::Bearer,
),
),
);
}
}
}

View file

@ -0,0 +1,653 @@
use std::sync::Arc;
use axum::{
Router,
extract::DefaultBodyLimit,
http::{HeaderValue, Method, header},
middleware,
routing::{delete, get, patch, post, put},
};
use tower::ServiceBuilder;
use tower_governor::{GovernorLayer, governor::GovernorConfigBuilder};
use tower_http::{
cors::CorsLayer,
set_header::SetResponseHeaderLayer,
trace::TraceLayer,
};
use utoipa::OpenApi as _;
use utoipa_swagger_ui::SwaggerUi;
use crate::{api_doc::ApiDoc, auth, routes, state::AppState};
/// Create the router with optional TLS configuration for HSTS headers
pub fn create_router(
state: AppState,
rate_limits: &pinakes_core::config::RateLimitConfig,
) -> Router {
create_router_with_tls(state, rate_limits, None)
}
/// Build a governor rate limiter from per-second and burst-size values.
/// Panics if the config is invalid (callers must validate before use).
fn build_governor(
per_second: u64,
burst_size: u32,
) -> Arc<
tower_governor::governor::GovernorConfig<
tower_governor::key_extractor::PeerIpKeyExtractor,
governor::middleware::NoOpMiddleware,
>,
> {
Arc::new(
GovernorConfigBuilder::default()
.per_second(per_second)
.burst_size(burst_size)
.finish()
.expect("rate limit config was validated at startup"),
)
}
/// Create the router with TLS configuration for security headers
pub fn create_router_with_tls(
state: AppState,
rate_limits: &pinakes_core::config::RateLimitConfig,
tls_config: Option<&pinakes_core::config::TlsConfig>,
) -> Router {
let swagger_ui_enabled = state
.config
.try_read()
.is_ok_and(|cfg| cfg.server.swagger_ui);
let global_governor = build_governor(
rate_limits.global_per_second,
rate_limits.global_burst_size,
);
let login_governor =
build_governor(rate_limits.login_per_second, rate_limits.login_burst_size);
let search_governor = build_governor(
rate_limits.search_per_second,
rate_limits.search_burst_size,
);
let stream_governor = build_governor(
rate_limits.stream_per_second,
rate_limits.stream_burst_size,
);
let share_governor =
build_governor(rate_limits.share_per_second, rate_limits.share_burst_size);
// Login route with strict rate limiting
let login_route = Router::new()
.route("/auth/login", post(routes::auth::login))
.layer(GovernorLayer::new(login_governor));
// Share routes with dedicated rate limiting
let share_routes = Router::new()
.route("/s/{token}", get(routes::social::access_shared_media))
.route("/shared/{token}", get(routes::shares::access_shared))
.layer(GovernorLayer::new(share_governor));
// Public routes (no auth required)
let public_routes = Router::new()
// Kubernetes-style health probes (no auth required for orchestration)
.route("/health/live", get(routes::health::liveness))
.route("/health/ready", get(routes::health::readiness));
// Search routes with enhanced rate limiting (10 req/min)
let search_routes = Router::new()
.route("/search", get(routes::search::search))
.route("/search", post(routes::search::search_post))
.layer(GovernorLayer::new(search_governor));
// Streaming routes with enhanced rate limiting (5 concurrent)
let streaming_routes = Router::new()
.route("/media/{id}/stream", get(routes::media::stream_media))
.layer(GovernorLayer::new(stream_governor));
// Read-only routes: any authenticated user (Viewer+)
let viewer_routes = Router::new()
.route("/health", get(routes::health::health))
.route("/health/detailed", get(routes::health::health_detailed))
.route("/media/count", get(routes::media::get_media_count))
.route("/media", get(routes::media::list_media))
.route("/media/{id}", get(routes::media::get_media))
.route("/media/{id}/thumbnail", get(routes::media::get_thumbnail))
.route("/media/{media_id}/tags", get(routes::tags::get_media_tags))
// Books API
.nest("/books", routes::books::routes())
// Photos API
.nest("/photos", routes::photos::routes())
.route("/tags", get(routes::tags::list_tags))
.route("/tags/{id}", get(routes::tags::get_tag))
.route("/collections", get(routes::collections::list_collections))
.route(
"/collections/{id}",
get(routes::collections::get_collection),
)
.route(
"/collections/{id}/members",
get(routes::collections::get_members),
)
.route("/audit", get(routes::audit::list_audit))
.route("/scan/status", get(routes::scan::scan_status))
.route("/config", get(routes::config::get_config))
.route("/config/ui", get(routes::config::get_ui_config))
.route("/database/stats", get(routes::database::database_stats))
.route("/duplicates", get(routes::duplicates::list_duplicates))
// Statistics
.route("/statistics", get(routes::statistics::library_statistics))
// Scheduled tasks (read)
.route(
"/tasks/scheduled",
get(routes::scheduled_tasks::list_scheduled_tasks),
)
// Jobs
.route("/jobs", get(routes::jobs::list_jobs))
.route("/jobs/{id}", get(routes::jobs::get_job))
// Saved searches (read)
.route(
"/searches/saved",
get(routes::saved_searches::list_saved_searches),
)
// Webhooks (read)
.route("/webhooks", get(routes::webhooks::list_webhooks))
// Auth endpoints (self-service); login is handled separately with a stricter rate limit
.route("/auth/logout", post(routes::auth::logout))
.route("/auth/me", get(routes::auth::me))
.route("/auth/refresh", post(routes::auth::refresh))
.route("/auth/revoke-all", post(routes::auth::revoke_all_sessions))
// Social: ratings & comments (read)
.route(
"/media/{id}/ratings",
get(routes::social::get_media_ratings),
)
.route(
"/media/{id}/comments",
get(routes::social::get_media_comments),
)
// Favorites (read)
.route("/favorites", get(routes::social::list_favorites))
// Playlists (read)
.route("/playlists", get(routes::playlists::list_playlists))
.route("/playlists/{id}", get(routes::playlists::get_playlist))
.route("/playlists/{id}/items", get(routes::playlists::list_items))
.route(
"/playlists/{id}/shuffle",
post(routes::playlists::shuffle_playlist),
)
// Analytics (read)
.route(
"/analytics/most-viewed",
get(routes::analytics::get_most_viewed),
)
.route(
"/analytics/recently-viewed",
get(routes::analytics::get_recently_viewed),
)
.route("/analytics/events", post(routes::analytics::record_event))
.route(
"/media/{id}/progress",
get(routes::analytics::get_watch_progress),
)
.route(
"/media/{id}/progress",
post(routes::analytics::update_watch_progress),
)
// Subtitles (read)
.route(
"/media/{id}/subtitles",
get(routes::subtitles::list_subtitles),
)
.route(
"/media/{media_id}/subtitles/{subtitle_id}/content",
get(routes::subtitles::get_subtitle_content),
)
// Enrichment (read)
.route(
"/media/{id}/external-metadata",
get(routes::enrichment::get_external_metadata),
)
// Transcode (read)
.route("/transcode/{id}", get(routes::transcode::get_session))
.route("/transcode", get(routes::transcode::list_sessions))
// Streaming
.route(
"/media/{id}/stream/hls/master.m3u8",
get(routes::streaming::hls_master_playlist),
)
.route(
"/media/{id}/stream/hls/{profile}/playlist.m3u8",
get(routes::streaming::hls_variant_playlist),
)
.route(
"/media/{id}/stream/hls/{profile}/{segment}",
get(routes::streaming::hls_segment),
)
.route(
"/media/{id}/stream/dash/manifest.mpd",
get(routes::streaming::dash_manifest),
)
.route(
"/media/{id}/stream/dash/{profile}/{segment}",
get(routes::streaming::dash_segment),
)
// Managed storage (read)
.route("/media/{id}/download", get(routes::upload::download_file))
.route("/managed/stats", get(routes::upload::managed_stats))
// Sync (read)
.route("/sync/devices", get(routes::sync::list_devices))
.route("/sync/devices/{id}", get(routes::sync::get_device))
.route("/sync/changes", get(routes::sync::get_changes))
.route("/sync/conflicts", get(routes::sync::list_conflicts))
.route("/sync/upload/{id}", get(routes::sync::get_upload_status))
.route("/sync/download/{*path}", get(routes::sync::download_file))
// Enhanced sharing (read)
.route("/shares/outgoing", get(routes::shares::list_outgoing))
.route("/shares/incoming", get(routes::shares::list_incoming))
.route("/shares/{id}", get(routes::shares::get_share))
.route("/shares/{id}/activity", get(routes::shares::get_activity))
.route(
"/notifications/shares",
get(routes::shares::get_notifications),
)
// Markdown notes/links (read)
.route("/media/{id}/backlinks", get(routes::notes::get_backlinks))
.route(
"/media/{id}/outgoing-links",
get(routes::notes::get_outgoing_links),
)
.nest("/notes", routes::notes::routes());
// Write routes: Editor+ required
let editor_routes = Router::new()
.route("/media/import", post(routes::media::import_media))
.route(
"/media/import/options",
post(routes::media::import_with_options),
)
.route("/media/import/batch", post(routes::media::batch_import))
.route(
"/media/import/directory",
post(routes::media::import_directory_endpoint),
)
.route(
"/media/import/preview",
post(routes::media::preview_directory),
)
.route("/media/batch/tag", post(routes::media::batch_tag))
.route("/media/batch/delete", post(routes::media::batch_delete))
.route("/media/batch/update", patch(routes::media::batch_update))
.route(
"/media/batch/collection",
post(routes::media::batch_add_to_collection),
)
.route("/media/all", delete(routes::media::delete_all_media))
.route("/media/{id}", patch(routes::media::update_media))
.route("/media/{id}", delete(routes::media::permanent_delete_media))
.route("/media/{id}/open", post(routes::media::open_media))
// File management
.route("/media/{id}/rename", patch(routes::media::rename_media))
.route(
"/media/{id}/move",
patch(routes::media::move_media_endpoint),
)
.route("/media/{id}/trash", post(routes::media::soft_delete_media))
.route("/media/{id}/restore", post(routes::media::restore_media))
.route("/media/batch/move", post(routes::media::batch_move_media))
// Trash management
.route("/trash", get(routes::media::list_trash))
.route("/trash/info", get(routes::media::trash_info))
.route("/trash", delete(routes::media::empty_trash))
.route(
"/media/{id}/custom-fields",
post(routes::media::set_custom_field),
)
.route(
"/media/{id}/custom-fields/{name}",
delete(routes::media::delete_custom_field),
)
// Markdown notes/links (write)
.route(
"/media/{id}/reindex-links",
post(routes::notes::reindex_links),
)
.route("/tags", post(routes::tags::create_tag))
.route("/tags/{id}", delete(routes::tags::delete_tag))
.route("/media/{media_id}/tags", post(routes::tags::tag_media))
.route(
"/media/{media_id}/tags/{tag_id}",
delete(routes::tags::untag_media),
)
.route("/collections", post(routes::collections::create_collection))
.route(
"/collections/{id}",
delete(routes::collections::delete_collection),
)
.route(
"/collections/{id}/members",
post(routes::collections::add_member),
)
.route(
"/collections/{collection_id}/members/{media_id}",
delete(routes::collections::remove_member),
)
.route("/scan", post(routes::scan::trigger_scan))
.route("/jobs/{id}/cancel", post(routes::jobs::cancel_job))
// Saved searches (write)
.route(
"/searches/saved",
post(routes::saved_searches::create_saved_search),
)
.route(
"/searches/saved/{id}",
delete(routes::saved_searches::delete_saved_search),
)
// Integrity
.route(
"/jobs/orphan-detection",
post(routes::integrity::trigger_orphan_detection),
)
.route(
"/jobs/verify-integrity",
post(routes::integrity::trigger_verify_integrity),
)
.route(
"/jobs/cleanup-thumbnails",
post(routes::integrity::trigger_cleanup_thumbnails),
)
.route(
"/jobs/generate-thumbnails",
post(routes::integrity::generate_all_thumbnails),
)
.route("/orphans/resolve", post(routes::integrity::resolve_orphans))
// Export
.route("/jobs/export", post(routes::export::trigger_export))
.route(
"/jobs/export/options",
post(routes::export::trigger_export_with_options),
)
// Scheduled tasks (write)
.route(
"/tasks/scheduled/{id}/toggle",
post(routes::scheduled_tasks::toggle_scheduled_task),
)
.route(
"/tasks/scheduled/{id}/run-now",
post(routes::scheduled_tasks::run_scheduled_task_now),
)
// Webhooks
.route("/webhooks/test", post(routes::webhooks::test_webhook))
// Social: ratings & comments (write)
.route("/media/{id}/ratings", post(routes::social::rate_media))
.route("/media/{id}/comments", post(routes::social::add_comment))
// Favorites (write)
.route("/favorites", post(routes::social::add_favorite))
.route(
"/favorites/{media_id}",
delete(routes::social::remove_favorite),
)
// Share links
.route("/share", post(routes::social::create_share_link))
// Playlists (write)
.route("/playlists", post(routes::playlists::create_playlist))
.route("/playlists/{id}", patch(routes::playlists::update_playlist))
.route(
"/playlists/{id}",
delete(routes::playlists::delete_playlist),
)
.route("/playlists/{id}/items", post(routes::playlists::add_item))
.route(
"/playlists/{id}/items/{media_id}",
delete(routes::playlists::remove_item),
)
.route(
"/playlists/{id}/reorder",
post(routes::playlists::reorder_item),
)
// Subtitles (write)
.route(
"/media/{id}/subtitles",
post(routes::subtitles::add_subtitle),
)
.route(
"/subtitles/{id}",
delete(routes::subtitles::delete_subtitle),
)
.route(
"/subtitles/{id}/offset",
patch(routes::subtitles::update_offset),
)
// Enrichment (write)
.route(
"/media/{id}/enrich",
post(routes::enrichment::trigger_enrichment),
)
.route("/jobs/enrich", post(routes::enrichment::batch_enrich))
// Transcode (write)
.route(
"/media/{id}/transcode",
post(routes::transcode::start_transcode),
)
.route("/transcode/{id}", delete(routes::transcode::cancel_session))
// Managed storage (write)
.route("/upload", post(routes::upload::upload_file))
.route(
"/media/{id}/move-to-managed",
post(routes::upload::move_to_managed),
)
// Sync (write)
.route("/sync/devices", post(routes::sync::register_device))
.route("/sync/devices/{id}", put(routes::sync::update_device))
.route("/sync/devices/{id}", delete(routes::sync::delete_device))
.route(
"/sync/devices/{id}/token",
post(routes::sync::regenerate_token),
)
.route("/sync/report", post(routes::sync::report_changes))
.route("/sync/ack", post(routes::sync::acknowledge_changes))
.route(
"/sync/conflicts/{id}/resolve",
post(routes::sync::resolve_conflict),
)
.route("/sync/upload", post(routes::sync::create_upload))
.route(
"/sync/upload/{id}/chunks/{index}",
put(routes::sync::upload_chunk),
)
.route(
"/sync/upload/{id}/complete",
post(routes::sync::complete_upload),
)
.route("/sync/upload/{id}", delete(routes::sync::cancel_upload))
// Enhanced sharing (write)
.route("/shares", post(routes::shares::create_share))
.route("/shares/{id}", patch(routes::shares::update_share))
.route("/shares/{id}", delete(routes::shares::delete_share))
.route("/shares/batch/delete", post(routes::shares::batch_delete))
.route(
"/notifications/shares/{id}/read",
post(routes::shares::mark_notification_read),
)
.route(
"/notifications/shares/read-all",
post(routes::shares::mark_all_read),
)
.layer(middleware::from_fn(auth::require_editor));
// Admin-only routes: destructive/config operations
let admin_routes = Router::new()
.route(
"/config/scanning",
put(routes::config::update_scanning_config),
)
.route("/config/roots", post(routes::config::add_root))
.route("/config/roots", delete(routes::config::remove_root))
.route("/config/ui", put(routes::config::update_ui_config))
.route("/database/vacuum", post(routes::database::vacuum_database))
.route("/database/clear", post(routes::database::clear_database))
.route("/database/backup", post(routes::backup::create_backup))
// Plugin management
.route("/plugins", get(routes::plugins::list_plugins))
.route("/plugins/events", post(routes::plugins::emit_plugin_event))
.route("/plugins/ui-pages", get(routes::plugins::list_plugin_ui_pages))
.route("/plugins/ui-widgets", get(routes::plugins::list_plugin_ui_widgets))
.route("/plugins/ui-theme-extensions", get(routes::plugins::list_plugin_ui_theme_extensions))
.route("/plugins/{id}", get(routes::plugins::get_plugin))
.route("/plugins/install", post(routes::plugins::install_plugin))
.route("/plugins/{id}", delete(routes::plugins::uninstall_plugin))
.route("/plugins/{id}/toggle", post(routes::plugins::toggle_plugin))
.route("/plugins/{id}/reload", post(routes::plugins::reload_plugin))
// User management
.route("/users", get(routes::users::list_users))
.route("/users", post(routes::users::create_user))
.route("/users/{id}", get(routes::users::get_user))
.route("/users/{id}", patch(routes::users::update_user))
.route("/users/{id}", delete(routes::users::delete_user))
.route(
"/users/{id}/libraries",
get(routes::users::get_user_libraries),
)
.route(
"/users/{id}/libraries",
post(routes::users::grant_library_access),
)
.route(
"/users/{id}/libraries",
delete(routes::users::revoke_library_access),
)
// Session management (admin)
.route("/auth/sessions", get(routes::auth::list_active_sessions))
.layer(middleware::from_fn(auth::require_admin));
// CORS configuration: use config-driven origins if specified,
// otherwise fall back to default localhost origins
let cors = {
let origins: Vec<HeaderValue> =
if let Ok(config_read) = state.config.try_read() {
if config_read.server.cors_enabled
&& !config_read.server.cors_origins.is_empty()
{
config_read
.server
.cors_origins
.iter()
.filter_map(|o| HeaderValue::from_str(o).ok())
.collect()
} else {
vec![
HeaderValue::from_static("http://localhost:3000"),
HeaderValue::from_static("http://127.0.0.1:3000"),
HeaderValue::from_static("tauri://localhost"),
]
}
} else {
vec![
HeaderValue::from_static("http://localhost:3000"),
HeaderValue::from_static("http://127.0.0.1:3000"),
HeaderValue::from_static("tauri://localhost"),
]
};
CorsLayer::new()
.allow_origin(origins)
.allow_methods([
Method::GET,
Method::POST,
Method::PUT,
Method::PATCH,
Method::DELETE,
])
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true)
};
// Create protected routes with auth middleware
let protected_api = Router::new()
.merge(viewer_routes)
.merge(search_routes)
.merge(streaming_routes)
.merge(editor_routes)
.merge(admin_routes)
.layer(middleware::from_fn_with_state(
state.clone(),
auth::require_auth,
));
// Combine protected, public, and share routes
let full_api = Router::new()
.merge(login_route)
.merge(public_routes)
.merge(share_routes)
.merge(protected_api);
// Build security headers layer
let security_headers = ServiceBuilder::new()
// Prevent MIME type sniffing
.layer(SetResponseHeaderLayer::overriding(
header::X_CONTENT_TYPE_OPTIONS,
HeaderValue::from_static("nosniff"),
))
// Prevent clickjacking
.layer(SetResponseHeaderLayer::overriding(
header::X_FRAME_OPTIONS,
HeaderValue::from_static("DENY"),
))
// XSS protection (legacy but still useful for older browsers)
.layer(SetResponseHeaderLayer::overriding(
header::HeaderName::from_static("x-xss-protection"),
HeaderValue::from_static("1; mode=block"),
))
// Referrer policy
.layer(SetResponseHeaderLayer::overriding(
header::REFERRER_POLICY,
HeaderValue::from_static("strict-origin-when-cross-origin"),
))
// Permissions policy (disable unnecessary features)
.layer(SetResponseHeaderLayer::overriding(
header::HeaderName::from_static("permissions-policy"),
HeaderValue::from_static("geolocation=(), microphone=(), camera=()"),
))
// Content Security Policy for API responses
.layer(SetResponseHeaderLayer::overriding(
header::CONTENT_SECURITY_POLICY,
HeaderValue::from_static("default-src 'none'; frame-ancestors 'none'"),
));
let base_router = Router::new()
.nest("/api/v1", full_api)
.layer(DefaultBodyLimit::max(10 * 1024 * 1024))
.layer(GovernorLayer::new(global_governor))
.layer(TraceLayer::new_for_http())
.layer(cors)
.layer(security_headers);
let router = if swagger_ui_enabled {
base_router.merge(
SwaggerUi::new("/api/docs").url("/api/openapi.json", ApiDoc::openapi()),
)
} else {
base_router
};
// Add HSTS header when TLS is enabled
if let Some(tls) = tls_config {
if tls.enabled && tls.hsts_enabled {
let hsts_value =
format!("max-age={}; includeSubDomains", tls.hsts_max_age);
let hsts_header =
HeaderValue::from_str(&hsts_value).unwrap_or_else(|_| {
HeaderValue::from_static("max-age=31536000; includeSubDomains")
});
router
.layer(SetResponseHeaderLayer::overriding(
header::STRICT_TRANSPORT_SECURITY,
hsts_header,
))
.with_state(state)
} else {
router.with_state(state)
}
} else {
router.with_state(state)
}
}

View file

@ -0,0 +1,248 @@
use axum::{
extract::{Request, State},
http::StatusCode,
middleware::Next,
response::{IntoResponse, Response},
};
use pinakes_core::config::UserRole;
use crate::state::AppState;
/// Constant-time string comparison to prevent timing attacks on API keys.
///
/// Always iterates to `max(len_a, len_b)` so that neither a length difference
/// nor a byte mismatch causes an early return.
fn constant_time_eq(a: &str, b: &str) -> bool {
let a = a.as_bytes();
let b = b.as_bytes();
let len = a.len().max(b.len());
let mut result = a.len() ^ b.len(); // non-zero if lengths differ
for i in 0..len {
let ab = a.get(i).copied().unwrap_or(0);
let bb = b.get(i).copied().unwrap_or(0);
result |= usize::from(ab ^ bb);
}
result == 0
}
/// Axum middleware that checks for a valid Bearer token.
///
/// If `accounts.enabled == true`: look up bearer token in database session
/// store. If `accounts.enabled == false`: use existing `api_key` logic
/// (unchanged behavior). Skips authentication for the `/health` and
/// `/auth/login` path suffixes.
pub async fn require_auth(
State(state): State<AppState>,
mut request: Request,
next: Next,
) -> Response {
let path = request.uri().path().to_string();
// Always allow health and login endpoints
if path.ends_with("/health") || path.ends_with("/auth/login") {
return next.run(request).await;
}
let config = state.config.read().await;
// Check if authentication is explicitly disabled
if config.server.authentication_disabled {
drop(config);
tracing::warn!("authentication is disabled - allowing all requests");
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
return next.run(request).await;
}
if config.accounts.enabled {
drop(config);
// Session-based auth using database
let token = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
.map(std::string::ToString::to_string);
let Some(token) = token else {
tracing::debug!(path = %path, "rejected: missing Authorization header");
return unauthorized("missing Authorization header");
};
// Look up session in database
let session_result = state.storage.get_session(&token).await;
let session = match session_result {
Ok(Some(session)) => session,
Ok(None) => {
tracing::debug!(path = %path, "rejected: invalid session token");
return unauthorized("invalid or expired session token");
},
Err(e) => {
tracing::error!(error = %e, "failed to query session from database");
return (StatusCode::INTERNAL_SERVER_ERROR, "database error")
.into_response();
},
};
// Check session expiry
let now = chrono::Utc::now();
if session.expires_at < now {
let username = session.username;
// Delete expired session in a bounded background task
if let Ok(permit) = state.session_semaphore.clone().try_acquire_owned() {
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.delete_session(&token_owned).await {
tracing::error!(error = %e, "failed to delete expired session");
}
drop(permit);
});
}
tracing::info!(username = %username, "session expired");
return unauthorized("session expired");
}
// Update last_accessed timestamp in a bounded background task
if let Ok(permit) = state.session_semaphore.clone().try_acquire_owned() {
let storage = state.storage.clone();
let token_owned = token.clone();
tokio::spawn(async move {
if let Err(e) = storage.touch_session(&token_owned).await {
tracing::warn!(error = %e, "failed to update session last_accessed");
}
drop(permit);
});
}
// Parse role from string
let role = match session.role.as_str() {
"admin" => UserRole::Admin,
"editor" => UserRole::Editor,
"viewer" => UserRole::Viewer,
_ => {
tracing::warn!(role = %session.role, "unknown role, defaulting to viewer");
UserRole::Viewer
},
};
// Inject role and username into request extensions
request.extensions_mut().insert(role);
request.extensions_mut().insert(session.username);
} else {
// Legacy API key auth
let api_key = std::env::var("PINAKES_API_KEY")
.ok()
.or_else(|| config.server.api_key.clone());
drop(config);
let Some(ref expected_key) = api_key else {
tracing::error!("no authentication configured");
return unauthorized("authentication not configured");
};
if expected_key.is_empty() {
// Empty key is not allowed - must use authentication_disabled flag
tracing::error!(
"empty api_key rejected, use authentication_disabled flag instead"
);
return unauthorized("authentication not properly configured");
}
let auth_header = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok());
match auth_header {
Some(header) if header.starts_with("Bearer ") => {
let token = &header[7..];
if !constant_time_eq(token, expected_key.as_str()) {
tracing::warn!(path = %path, "rejected: invalid API key");
return unauthorized("invalid api key");
}
},
_ => {
return unauthorized(
"missing or malformed Authorization header, expected: Bearer \
<api_key>",
);
},
}
// API key matches, grant admin
request.extensions_mut().insert(UserRole::Admin);
request.extensions_mut().insert("admin".to_string());
}
next.run(request).await
}
/// Middleware: requires Editor or Admin role.
pub async fn require_editor(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_write() {
next.run(request).await
} else {
forbidden("editor role required")
}
}
/// Middleware: requires Admin role.
pub async fn require_admin(request: Request, next: Next) -> Response {
let role = request
.extensions()
.get::<UserRole>()
.copied()
.unwrap_or(UserRole::Viewer);
if role.can_admin() {
next.run(request).await
} else {
forbidden("admin role required")
}
}
/// Resolve the authenticated username (from request extensions) to a `UserId`.
///
/// Returns an error if the user cannot be found.
pub async fn resolve_user_id(
storage: &pinakes_core::storage::DynStorageBackend,
username: &str,
) -> Result<pinakes_core::users::UserId, crate::error::ApiError> {
match storage.get_user_by_username(username).await {
Ok(user) => Ok(user.id),
Err(e) => {
tracing::warn!(username = %username, error = ?e, "failed to resolve user");
Err(crate::error::ApiError(
pinakes_core::error::PinakesError::Authentication(
"user not found".into(),
),
))
},
}
}
fn unauthorized(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::UNAUTHORIZED,
[("content-type", "application/json")],
body,
)
.into_response()
}
fn forbidden(message: &str) -> Response {
let body = format!(r#"{{"error":"{message}"}}"#);
(
StatusCode::FORBIDDEN,
[("content-type", "application/json")],
body,
)
.into_response()
}

View file

@ -0,0 +1,35 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UsageEventResponse {
pub id: String,
pub media_id: Option<String>,
pub user_id: Option<String>,
pub event_type: String,
pub timestamp: DateTime<Utc>,
pub duration_secs: Option<f64>,
}
impl From<pinakes_core::analytics::UsageEvent> for UsageEventResponse {
fn from(e: pinakes_core::analytics::UsageEvent) -> Self {
Self {
id: e.id.to_string(),
media_id: e.media_id.map(|m| m.0.to_string()),
user_id: e.user_id.map(|u| u.0.to_string()),
event_type: e.event_type.to_string(),
timestamp: e.timestamp,
duration_secs: e.duration_secs,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RecordUsageEventRequest {
pub media_id: Option<Uuid>,
pub event_type: String,
pub duration_secs: Option<f64>,
#[schema(value_type = Option<Object>)]
pub context: Option<serde_json::Value>,
}

View file

@ -0,0 +1,23 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct AuditEntryResponse {
pub id: String,
pub media_id: Option<String>,
pub action: String,
pub details: Option<String>,
pub timestamp: DateTime<Utc>,
}
impl From<pinakes_core::model::AuditEntry> for AuditEntryResponse {
fn from(entry: pinakes_core::model::AuditEntry) -> Self {
Self {
id: entry.id.to_string(),
media_id: entry.media_id.map(|id| id.0.to_string()),
action: entry.action.to_string(),
details: entry.details,
timestamp: entry.timestamp,
}
}
}

View file

@ -0,0 +1,36 @@
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchTagRequest {
pub media_ids: Vec<Uuid>,
pub tag_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchCollectionRequest {
pub media_ids: Vec<Uuid>,
pub collection_id: Uuid,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchDeleteRequest {
pub media_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchUpdateRequest {
pub media_ids: Vec<Uuid>,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchOperationResponse {
pub processed: usize,
pub errors: Vec<String>,
}

View file

@ -0,0 +1,42 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CollectionResponse {
pub id: String,
pub name: String,
pub description: Option<String>,
pub kind: String,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateCollectionRequest {
pub name: String,
pub kind: String,
pub description: Option<String>,
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AddMemberRequest {
pub media_id: Uuid,
pub position: Option<i32>,
}
impl From<pinakes_core::model::Collection> for CollectionResponse {
fn from(col: pinakes_core::model::Collection) -> Self {
Self {
id: col.id.to_string(),
name: col.name,
description: col.description,
kind: col.kind.to_string(),
filter_query: col.filter_query,
created_at: col.created_at,
updated_at: col.updated_at,
}
}
}

View file

@ -0,0 +1,75 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ConfigResponse {
pub backend: String,
pub database_path: Option<String>,
pub roots: Vec<String>,
pub scanning: ScanningConfigResponse,
pub server: ServerConfigResponse,
pub ui: UiConfigResponse,
pub config_path: Option<String>,
pub config_writable: bool,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanningConfigResponse {
pub watch: bool,
pub poll_interval_secs: u64,
pub ignore_patterns: Vec<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ServerConfigResponse {
pub host: String,
pub port: u16,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateScanningRequest {
pub watch: Option<bool>,
pub poll_interval_secs: Option<u64>,
pub ignore_patterns: Option<Vec<String>>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RootDirRequest {
pub path: String,
}
// UI Config
#[derive(Debug, Serialize, Deserialize, Clone, utoipa::ToSchema)]
pub struct UiConfigResponse {
pub theme: String,
pub default_view: String,
pub default_page_size: usize,
pub default_view_mode: String,
pub auto_play_media: bool,
pub show_thumbnails: bool,
pub sidebar_collapsed: bool,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateUiConfigRequest {
pub theme: Option<String>,
pub default_view: Option<String>,
pub default_page_size: Option<usize>,
pub default_view_mode: Option<String>,
pub auto_play_media: Option<bool>,
pub show_thumbnails: Option<bool>,
pub sidebar_collapsed: Option<bool>,
}
impl From<&pinakes_core::config::UiConfig> for UiConfigResponse {
fn from(ui: &pinakes_core::config::UiConfig) -> Self {
Self {
theme: ui.theme.clone(),
default_view: ui.default_view.clone(),
default_page_size: ui.default_page_size,
default_view_mode: ui.default_view_mode.clone(),
auto_play_media: ui.auto_play_media,
show_thumbnails: ui.show_thumbnails,
sidebar_collapsed: ui.sidebar_collapsed,
}
}
}

View file

@ -0,0 +1,38 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ExternalMetadataResponse {
pub id: String,
pub media_id: String,
pub source: String,
pub external_id: Option<String>,
#[schema(value_type = Object)]
pub metadata: serde_json::Value,
pub confidence: f64,
pub last_updated: DateTime<Utc>,
}
impl From<pinakes_core::enrichment::ExternalMetadata>
for ExternalMetadataResponse
{
fn from(m: pinakes_core::enrichment::ExternalMetadata) -> Self {
let metadata = serde_json::from_str(&m.metadata_json).unwrap_or_else(|e| {
tracing::warn!(
"failed to deserialize external metadata JSON for media {}: {}",
m.media_id.0,
e
);
serde_json::Value::Null
});
Self {
id: m.id.to_string(),
media_id: m.media_id.0.to_string(),
source: m.source.to_string(),
external_id: m.external_id,
metadata,
confidence: m.confidence,
last_updated: m.last_updated,
}
}
}

View file

@ -0,0 +1,386 @@
use std::path::{Path, PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// Strip the longest matching root prefix from `full_path`, returning a
/// forward-slash-separated relative path string. Falls back to the full path
/// string when no root matches. If `roots` is empty, returns the full path as a
/// string so internal callers that have not yet migrated still work.
#[must_use]
pub fn relativize_path(full_path: &Path, roots: &[PathBuf]) -> String {
let mut best: Option<&PathBuf> = None;
for root in roots {
if full_path.starts_with(root) {
let is_longer =
best.is_none_or(|b| root.components().count() > b.components().count());
if is_longer {
best = Some(root);
}
}
}
if let Some(root) = best
&& let Ok(rel) = full_path.strip_prefix(root)
{
// Normalise to forward slashes on all platforms.
return rel
.components()
.map(|c| c.as_os_str().to_string_lossy())
.collect::<Vec<_>>()
.join("/");
}
full_path.to_string_lossy().into_owned()
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MediaResponse {
pub id: String,
pub path: String,
pub file_name: String,
pub media_type: String,
pub content_hash: String,
pub file_size: u64,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub has_thumbnail: bool,
#[schema(value_type = Object)]
pub custom_fields: FxHashMap<String, CustomFieldResponse>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
// Markdown links
pub links_extracted_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CustomFieldResponse {
pub field_type: String,
pub value: String,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ImportRequest {
#[schema(value_type = String)]
pub path: PathBuf,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ImportResponse {
pub media_id: String,
pub was_duplicate: bool,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateMediaRequest {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
// File Management
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RenameMediaRequest {
pub new_name: String,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct MoveMediaRequest {
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchMoveRequest {
pub media_ids: Vec<Uuid>,
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TrashResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TrashInfoResponse {
pub count: u64,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct EmptyTrashResponse {
pub deleted_count: u64,
}
// Enhanced Import
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ImportWithOptionsRequest {
#[schema(value_type = String)]
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchImportRequest {
#[schema(value_type = Vec<String>)]
pub paths: Vec<PathBuf>,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchImportResponse {
pub results: Vec<BatchImportItemResult>,
pub total: usize,
pub imported: usize,
pub duplicates: usize,
pub errors: usize,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BatchImportItemResult {
pub path: String,
pub media_id: Option<String>,
pub was_duplicate: bool,
pub error: Option<String>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct DirectoryImportRequest {
#[schema(value_type = String)]
pub path: PathBuf,
pub tag_ids: Option<Vec<Uuid>>,
pub new_tags: Option<Vec<String>>,
pub collection_id: Option<Uuid>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DirectoryPreviewResponse {
pub files: Vec<DirectoryPreviewFile>,
pub total_count: usize,
pub total_size: u64,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DirectoryPreviewFile {
pub path: String,
pub file_name: String,
pub media_type: String,
pub file_size: u64,
}
// Custom Fields
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SetCustomFieldRequest {
pub name: String,
pub field_type: String,
pub value: String,
}
// Media update extended
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateMediaFullRequest {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub description: Option<String>,
}
// Search with sort
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MediaCountResponse {
pub count: u64,
}
// Duplicates
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DuplicateGroupResponse {
pub content_hash: String,
pub items: Vec<MediaResponse>,
}
// Open
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct OpenRequest {
pub media_id: Uuid,
}
// Upload
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UploadResponse {
pub media_id: String,
pub content_hash: String,
pub was_duplicate: bool,
pub file_size: u64,
}
impl From<pinakes_core::model::UploadResult> for UploadResponse {
fn from(result: pinakes_core::model::UploadResult) -> Self {
Self {
media_id: result.media_id.0.to_string(),
content_hash: result.content_hash.0,
was_duplicate: result.was_duplicate,
file_size: result.file_size,
}
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ManagedStorageStatsResponse {
pub total_blobs: u64,
pub total_size_bytes: u64,
pub orphaned_blobs: u64,
pub deduplication_ratio: f64,
}
impl From<pinakes_core::model::ManagedStorageStats>
for ManagedStorageStatsResponse
{
fn from(stats: pinakes_core::model::ManagedStorageStats) -> Self {
Self {
total_blobs: stats.total_blobs,
total_size_bytes: stats.total_size_bytes,
orphaned_blobs: stats.orphaned_blobs,
deduplication_ratio: stats.deduplication_ratio,
}
}
}
impl MediaResponse {
/// Build a `MediaResponse` from a `MediaItem`, stripping the longest
/// matching root prefix from the path before serialization. Pass the
/// configured root directories so that clients receive a relative path
/// (e.g. `"Music/song.mp3"`) rather than a full server filesystem path.
#[must_use]
pub fn new(item: pinakes_core::model::MediaItem, roots: &[PathBuf]) -> Self {
Self {
id: item.id.0.to_string(),
path: relativize_path(&item.path, roots),
file_name: item.file_name,
media_type: serde_json::to_value(item.media_type)
.ok()
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default(),
content_hash: item.content_hash.0,
file_size: item.file_size,
title: item.title,
artist: item.artist,
album: item.album,
genre: item.genre,
year: item.year,
duration_secs: item.duration_secs,
description: item.description,
has_thumbnail: item.thumbnail_path.is_some(),
custom_fields: item
.custom_fields
.into_iter()
.map(|(k, v)| {
(k, CustomFieldResponse {
field_type: v.field_type.to_string(),
value: v.value,
})
})
.collect(),
// Photo-specific metadata
date_taken: item.date_taken,
latitude: item.latitude,
longitude: item.longitude,
camera_make: item.camera_make,
camera_model: item.camera_model,
rating: item.rating,
created_at: item.created_at,
updated_at: item.updated_at,
// Markdown links
links_extracted_at: item.links_extracted_at,
}
}
}
// Conversion helpers
impl From<pinakes_core::model::MediaItem> for MediaResponse {
/// Convert using no root stripping. Prefer `MediaResponse::new(item, roots)`
/// at route-handler call sites where roots are available.
fn from(item: pinakes_core::model::MediaItem) -> Self {
Self::new(item, &[])
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn relativize_path_strips_matching_root() {
let roots = vec![PathBuf::from("/home/user/music")];
let path = Path::new("/home/user/music/artist/song.mp3");
assert_eq!(relativize_path(path, &roots), "artist/song.mp3");
}
#[test]
fn relativize_path_picks_longest_root() {
let roots = vec![
PathBuf::from("/home/user"),
PathBuf::from("/home/user/music"),
];
let path = Path::new("/home/user/music/song.mp3");
assert_eq!(relativize_path(path, &roots), "song.mp3");
}
#[test]
fn relativize_path_no_match_returns_full() {
let roots = vec![PathBuf::from("/home/user/music")];
let path = Path::new("/srv/videos/movie.mkv");
assert_eq!(relativize_path(path, &roots), "/srv/videos/movie.mkv");
}
#[test]
fn relativize_path_empty_roots_returns_full() {
let path = Path::new("/home/user/music/song.mp3");
assert_eq!(relativize_path(path, &[]), "/home/user/music/song.mp3");
}
#[test]
fn relativize_path_exact_root_match() {
let roots = vec![PathBuf::from("/media/library")];
let path = Path::new("/media/library/file.mp3");
assert_eq!(relativize_path(path, &roots), "file.mp3");
}
}
// Watch progress
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct WatchProgressRequest {
pub progress_secs: f64,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct WatchProgressResponse {
pub progress_secs: f64,
}

View file

@ -0,0 +1,39 @@
mod analytics;
mod audit;
mod batch;
mod collections;
mod config;
mod enrichment;
mod media;
mod playlists;
mod plugins;
mod scan;
mod search;
mod sharing;
mod social;
mod statistics;
mod subtitles;
mod sync;
mod tags;
mod transcode;
mod users;
pub use analytics::*;
pub use audit::*;
pub use batch::*;
pub use collections::*;
pub use config::*;
pub use enrichment::*;
pub use media::*;
pub use playlists::*;
pub use plugins::*;
pub use scan::*;
pub use search::*;
pub use sharing::*;
pub use social::*;
pub use statistics::*;
pub use subtitles::*;
pub use sync::*;
pub use tags::*;
pub use transcode::*;
pub use users::*;

View file

@ -0,0 +1,60 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PlaylistResponse {
pub id: String,
pub owner_id: String,
pub name: String,
pub description: Option<String>,
pub is_public: bool,
pub is_smart: bool,
pub filter_query: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
impl From<pinakes_core::playlists::Playlist> for PlaylistResponse {
fn from(p: pinakes_core::playlists::Playlist) -> Self {
Self {
id: p.id.to_string(),
owner_id: p.owner_id.0.to_string(),
name: p.name,
description: p.description,
is_public: p.is_public,
is_smart: p.is_smart,
filter_query: p.filter_query,
created_at: p.created_at,
updated_at: p.updated_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreatePlaylistRequest {
pub name: String,
pub description: Option<String>,
pub is_public: Option<bool>,
pub is_smart: Option<bool>,
pub filter_query: Option<String>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdatePlaylistRequest {
pub name: Option<String>,
pub description: Option<String>,
pub is_public: Option<bool>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PlaylistItemRequest {
pub media_id: Uuid,
pub position: Option<i32>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ReorderPlaylistRequest {
pub media_id: Uuid,
pub new_position: i32,
}

View file

@ -0,0 +1,70 @@
use pinakes_plugin_api::{UiPage, UiWidget};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginResponse {
pub id: String,
pub name: String,
pub version: String,
pub author: String,
pub description: String,
pub api_version: String,
pub enabled: bool,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct InstallPluginRequest {
pub source: String, // URL or file path
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct TogglePluginRequest {
pub enabled: bool,
}
/// A single plugin UI page entry in the list response
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginUiPageEntry {
/// Plugin ID that provides this page
pub plugin_id: String,
/// Full page definition
#[schema(value_type = Object)]
pub page: UiPage,
/// Endpoint paths this plugin is allowed to fetch (empty means no
/// restriction)
pub allowed_endpoints: Vec<String>,
}
/// A single plugin UI widget entry in the list response
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct PluginUiWidgetEntry {
/// Plugin ID that provides this widget
pub plugin_id: String,
/// Full widget definition
#[schema(value_type = Object)]
pub widget: UiWidget,
}
/// Request body for emitting a plugin event
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PluginEventRequest {
pub event: String,
#[serde(default)]
#[schema(value_type = Object)]
pub payload: serde_json::Value,
}
impl PluginResponse {
#[must_use]
pub fn new(meta: pinakes_plugin_api::PluginMetadata, enabled: bool) -> Self {
Self {
id: meta.id,
name: meta.name,
version: meta.version,
author: meta.author,
description: meta.description,
api_version: meta.api_version,
enabled,
}
}
}

View file

@ -0,0 +1,30 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ScanRequest {
#[schema(value_type = Option<String>)]
pub path: Option<PathBuf>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanResponse {
pub files_found: usize,
pub files_processed: usize,
pub errors: Vec<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanJobResponse {
pub job_id: String,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScanStatusResponse {
pub scanning: bool,
pub files_found: usize,
pub files_processed: usize,
pub error_count: usize,
pub errors: Vec<String>,
}

View file

@ -0,0 +1,74 @@
use pinakes_core::model::Pagination;
use serde::{Deserialize, Serialize};
use super::media::MediaResponse;
/// Maximum offset accepted from clients. Prevents pathologically large OFFSET
/// values that cause expensive sequential scans in the database.
pub const MAX_OFFSET: u64 = 10_000_000;
/// Maximum page size accepted from most listing endpoints.
pub const MAX_LIMIT: u64 = 1000;
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SearchParams {
pub q: String,
pub sort: Option<String>,
pub offset: Option<u64>,
pub limit: Option<u64>,
}
impl SearchParams {
#[must_use]
pub fn to_pagination(&self) -> Pagination {
Pagination::new(
self.offset.unwrap_or(0).min(MAX_OFFSET),
self.limit.unwrap_or(50).min(MAX_LIMIT),
None,
)
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SearchResponse {
pub items: Vec<MediaResponse>,
pub total_count: u64,
}
// Search (POST body)
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SearchRequestBody {
pub q: String,
pub sort: Option<String>,
pub offset: Option<u64>,
pub limit: Option<u64>,
}
impl SearchRequestBody {
#[must_use]
pub fn to_pagination(&self) -> Pagination {
Pagination::new(
self.offset.unwrap_or(0).min(MAX_OFFSET),
self.limit.unwrap_or(50).min(MAX_LIMIT),
None,
)
}
}
// Pagination
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct PaginationParams {
pub offset: Option<u64>,
pub limit: Option<u64>,
pub sort: Option<String>,
}
impl PaginationParams {
#[must_use]
pub fn to_pagination(&self) -> Pagination {
Pagination::new(
self.offset.unwrap_or(0).min(MAX_OFFSET),
self.limit.unwrap_or(50).min(MAX_LIMIT),
self.sort.clone(),
)
}
}

View file

@ -0,0 +1,202 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateShareRequest {
pub target_type: String,
pub target_id: String,
pub recipient_type: String,
pub recipient_user_id: Option<Uuid>,
pub recipient_group_id: Option<Uuid>,
pub password: Option<String>,
pub permissions: Option<SharePermissionsRequest>,
pub note: Option<String>,
pub expires_in_hours: Option<u64>,
pub inherit_to_children: Option<bool>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct SharePermissionsRequest {
pub can_view: Option<bool>,
pub can_download: Option<bool>,
pub can_edit: Option<bool>,
pub can_delete: Option<bool>,
pub can_reshare: Option<bool>,
pub can_add: Option<bool>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareResponse {
pub id: String,
pub target_type: String,
pub target_id: String,
pub owner_id: String,
pub recipient_type: String,
pub recipient_user_id: Option<String>,
pub recipient_group_id: Option<String>,
pub public_token: Option<String>,
pub permissions: SharePermissionsResponse,
pub note: Option<String>,
pub expires_at: Option<DateTime<Utc>>,
pub access_count: u64,
pub last_accessed: Option<DateTime<Utc>>,
pub inherit_to_children: bool,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SharePermissionsResponse {
pub can_view: bool,
pub can_download: bool,
pub can_edit: bool,
pub can_delete: bool,
pub can_reshare: bool,
pub can_add: bool,
}
impl From<pinakes_core::sharing::SharePermissions>
for SharePermissionsResponse
{
fn from(p: pinakes_core::sharing::SharePermissions) -> Self {
Self {
can_view: p.view.can_view,
can_download: p.view.can_download,
can_edit: p.mutate.can_edit,
can_delete: p.mutate.can_delete,
can_reshare: p.view.can_reshare,
can_add: p.mutate.can_add,
}
}
}
impl From<pinakes_core::sharing::Share> for ShareResponse {
fn from(s: pinakes_core::sharing::Share) -> Self {
let (target_type, target_id) = match &s.target {
pinakes_core::sharing::ShareTarget::Media { media_id } => {
("media".to_string(), media_id.0.to_string())
},
pinakes_core::sharing::ShareTarget::Collection { collection_id } => {
("collection".to_string(), collection_id.to_string())
},
pinakes_core::sharing::ShareTarget::Tag { tag_id } => {
("tag".to_string(), tag_id.to_string())
},
pinakes_core::sharing::ShareTarget::SavedSearch { search_id } => {
("saved_search".to_string(), search_id.to_string())
},
};
let (recipient_type, recipient_user_id, recipient_group_id, public_token) =
match &s.recipient {
pinakes_core::sharing::ShareRecipient::PublicLink { token, .. } => {
("public_link".to_string(), None, None, Some(token.clone()))
},
pinakes_core::sharing::ShareRecipient::User { user_id } => {
("user".to_string(), Some(user_id.0.to_string()), None, None)
},
pinakes_core::sharing::ShareRecipient::Group { group_id } => {
("group".to_string(), None, Some(group_id.to_string()), None)
},
pinakes_core::sharing::ShareRecipient::Federated { .. } => {
("federated".to_string(), None, None, None)
},
};
Self {
id: s.id.0.to_string(),
target_type,
target_id,
owner_id: s.owner_id.0.to_string(),
recipient_type,
recipient_user_id,
recipient_group_id,
public_token,
permissions: s.permissions.into(),
note: s.note,
expires_at: s.expires_at,
access_count: s.access_count,
last_accessed: s.last_accessed,
inherit_to_children: s.inherit_to_children,
created_at: s.created_at,
updated_at: s.updated_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateShareRequest {
pub permissions: Option<SharePermissionsRequest>,
pub note: Option<String>,
pub expires_at: Option<DateTime<Utc>>,
pub inherit_to_children: Option<bool>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareActivityResponse {
pub id: String,
pub share_id: String,
pub actor_id: Option<String>,
pub actor_ip: Option<String>,
pub action: String,
pub details: Option<String>,
pub timestamp: DateTime<Utc>,
}
impl From<pinakes_core::sharing::ShareActivity> for ShareActivityResponse {
fn from(a: pinakes_core::sharing::ShareActivity) -> Self {
Self {
id: a.id.to_string(),
share_id: a.share_id.0.to_string(),
actor_id: a.actor_id.map(|id| id.0.to_string()),
actor_ip: a.actor_ip,
action: a.action.to_string(),
details: a.details,
timestamp: a.timestamp,
}
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareNotificationResponse {
pub id: String,
pub share_id: String,
pub notification_type: String,
pub is_read: bool,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::sharing::ShareNotification>
for ShareNotificationResponse
{
fn from(n: pinakes_core::sharing::ShareNotification) -> Self {
Self {
id: n.id.to_string(),
share_id: n.share_id.0.to_string(),
notification_type: n.notification_type.to_string(),
is_read: n.is_read,
created_at: n.created_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct BatchDeleteSharesRequest {
pub share_ids: Vec<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AccessSharedRequest {
pub password: Option<String>,
}
/// Response for accessing shared content.
/// Single-media shares return the media object directly (backwards compatible).
/// Collection/Tag/SavedSearch shares return a list of items.
#[derive(Debug, Serialize, utoipa::ToSchema)]
#[serde(untagged)]
pub enum SharedContentResponse {
Single(super::MediaResponse),
Multiple { items: Vec<super::MediaResponse> },
}

View file

@ -0,0 +1,96 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct RatingResponse {
pub id: String,
pub user_id: String,
pub media_id: String,
pub stars: u8,
pub review_text: Option<String>,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::Rating> for RatingResponse {
fn from(r: pinakes_core::social::Rating) -> Self {
Self {
id: r.id.to_string(),
user_id: r.user_id.0.to_string(),
media_id: r.media_id.0.to_string(),
stars: r.stars,
review_text: r.review_text,
created_at: r.created_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateRatingRequest {
pub stars: u8,
pub review_text: Option<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct CommentResponse {
pub id: String,
pub user_id: String,
pub media_id: String,
pub parent_comment_id: Option<String>,
pub text: String,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::Comment> for CommentResponse {
fn from(c: pinakes_core::social::Comment) -> Self {
Self {
id: c.id.to_string(),
user_id: c.user_id.0.to_string(),
media_id: c.media_id.0.to_string(),
parent_comment_id: c.parent_comment_id.map(|id| id.to_string()),
text: c.text,
created_at: c.created_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateCommentRequest {
pub text: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct FavoriteRequest {
pub media_id: Uuid,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateShareLinkRequest {
pub media_id: Uuid,
pub password: Option<String>,
pub expires_in_hours: Option<u64>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ShareLinkResponse {
pub id: String,
pub media_id: String,
pub token: String,
pub expires_at: Option<DateTime<Utc>>,
pub view_count: u64,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::social::ShareLink> for ShareLinkResponse {
fn from(s: pinakes_core::social::ShareLink) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
token: s.token,
expires_at: s.expires_at,
view_count: s.view_count,
created_at: s.created_at,
}
}
}

View file

@ -0,0 +1,84 @@
use serde::Serialize;
// Library Statistics
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct LibraryStatisticsResponse {
pub total_media: u64,
pub total_size_bytes: u64,
pub avg_file_size_bytes: u64,
pub media_by_type: Vec<TypeCountResponse>,
pub storage_by_type: Vec<TypeCountResponse>,
pub newest_item: Option<String>,
pub oldest_item: Option<String>,
pub top_tags: Vec<TypeCountResponse>,
pub top_collections: Vec<TypeCountResponse>,
pub total_tags: u64,
pub total_collections: u64,
pub total_duplicates: u64,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TypeCountResponse {
pub name: String,
pub count: u64,
}
impl From<pinakes_core::storage::LibraryStatistics>
for LibraryStatisticsResponse
{
fn from(stats: pinakes_core::storage::LibraryStatistics) -> Self {
Self {
total_media: stats.total_media,
total_size_bytes: stats.total_size_bytes,
avg_file_size_bytes: stats.avg_file_size_bytes,
media_by_type: stats
.media_by_type
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
storage_by_type: stats
.storage_by_type
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
newest_item: stats.newest_item,
oldest_item: stats.oldest_item,
top_tags: stats
.top_tags
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
top_collections: stats
.top_collections
.into_iter()
.map(|(name, count)| TypeCountResponse { name, count })
.collect(),
total_tags: stats.total_tags,
total_collections: stats.total_collections,
total_duplicates: stats.total_duplicates,
}
}
}
// Database management
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DatabaseStatsResponse {
pub media_count: u64,
pub tag_count: u64,
pub collection_count: u64,
pub audit_count: u64,
pub database_size_bytes: u64,
pub backend_name: String,
}
// Scheduled Tasks
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ScheduledTaskResponse {
pub id: String,
pub name: String,
pub schedule: String,
pub enabled: bool,
pub last_run: Option<String>,
pub next_run: Option<String>,
pub last_status: Option<String>,
}

View file

@ -0,0 +1,73 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleResponse {
pub id: String,
pub media_id: String,
pub language: Option<String>,
pub format: String,
pub is_embedded: bool,
pub track_index: Option<u32>,
pub offset_ms: i64,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::subtitles::Subtitle> for SubtitleResponse {
fn from(s: pinakes_core::subtitles::Subtitle) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
language: s.language,
format: s.format.to_string(),
is_embedded: s.is_embedded,
track_index: s.track_index,
offset_ms: s.offset_ms,
created_at: s.created_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AddSubtitleRequest {
pub language: Option<String>,
pub format: String,
pub file_path: Option<String>,
pub is_embedded: Option<bool>,
pub track_index: Option<u32>,
pub offset_ms: Option<i64>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateSubtitleOffsetRequest {
pub offset_ms: i64,
}
/// Information about an embedded subtitle track available for extraction.
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleTrackInfoResponse {
pub index: u32,
pub language: Option<String>,
pub format: String,
pub title: Option<String>,
}
impl From<pinakes_core::subtitles::SubtitleTrackInfo>
for SubtitleTrackInfoResponse
{
fn from(t: pinakes_core::subtitles::SubtitleTrackInfo) -> Self {
Self {
index: t.index,
language: t.language,
format: t.format.to_string(),
title: t.title,
}
}
}
/// Response for listing subtitles on a media item.
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SubtitleListResponse {
pub subtitles: Vec<SubtitleResponse>,
pub available_tracks: Vec<SubtitleTrackInfoResponse>,
}

View file

@ -0,0 +1,197 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use super::media::MediaResponse;
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RegisterDeviceRequest {
pub name: String,
pub device_type: String,
pub client_version: String,
pub os_info: Option<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DeviceResponse {
pub id: String,
pub name: String,
pub device_type: String,
pub client_version: String,
pub os_info: Option<String>,
pub last_sync_at: Option<DateTime<Utc>>,
pub last_seen_at: DateTime<Utc>,
pub sync_cursor: Option<i64>,
pub enabled: bool,
pub created_at: DateTime<Utc>,
}
impl From<pinakes_core::sync::SyncDevice> for DeviceResponse {
fn from(d: pinakes_core::sync::SyncDevice) -> Self {
Self {
id: d.id.0.to_string(),
name: d.name,
device_type: d.device_type.to_string(),
client_version: d.client_version,
os_info: d.os_info,
last_sync_at: d.last_sync_at,
last_seen_at: d.last_seen_at,
sync_cursor: d.sync_cursor,
enabled: d.enabled,
created_at: d.created_at,
}
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct DeviceRegistrationResponse {
pub device: DeviceResponse,
pub device_token: String,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateDeviceRequest {
pub name: Option<String>,
pub enabled: Option<bool>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GetChangesParams {
pub cursor: Option<i64>,
pub limit: Option<u64>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SyncChangeResponse {
pub id: String,
pub sequence: i64,
pub change_type: String,
pub media_id: Option<String>,
pub path: String,
pub content_hash: Option<String>,
pub file_size: Option<u64>,
pub timestamp: DateTime<Utc>,
}
impl From<pinakes_core::sync::SyncLogEntry> for SyncChangeResponse {
fn from(e: pinakes_core::sync::SyncLogEntry) -> Self {
Self {
id: e.id.to_string(),
sequence: e.sequence,
change_type: e.change_type.to_string(),
media_id: e.media_id.map(|id| id.0.to_string()),
path: e.path,
content_hash: e.content_hash.map(|h| h.0),
file_size: e.file_size,
timestamp: e.timestamp,
}
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ChangesResponse {
pub changes: Vec<SyncChangeResponse>,
pub cursor: i64,
pub has_more: bool,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ClientChangeReport {
pub path: String,
pub change_type: String,
pub content_hash: Option<String>,
pub file_size: Option<u64>,
pub local_mtime: Option<i64>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ReportChangesRequest {
pub changes: Vec<ClientChangeReport>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ReportChangesResponse {
pub accepted: Vec<String>,
pub conflicts: Vec<ConflictResponse>,
pub upload_required: Vec<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ConflictResponse {
pub id: String,
pub path: String,
pub local_hash: String,
pub server_hash: String,
pub detected_at: DateTime<Utc>,
}
impl From<pinakes_core::sync::SyncConflict> for ConflictResponse {
fn from(c: pinakes_core::sync::SyncConflict) -> Self {
Self {
id: c.id.to_string(),
path: c.path,
local_hash: c.local_hash,
server_hash: c.server_hash,
detected_at: c.detected_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ResolveConflictRequest {
pub resolution: String,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateUploadSessionRequest {
pub target_path: String,
pub expected_hash: String,
pub expected_size: u64,
pub chunk_size: Option<u64>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UploadSessionResponse {
pub id: String,
pub target_path: String,
pub expected_hash: String,
pub expected_size: u64,
pub chunk_size: u64,
pub chunk_count: u64,
pub status: String,
pub created_at: DateTime<Utc>,
pub expires_at: DateTime<Utc>,
}
impl From<pinakes_core::sync::UploadSession> for UploadSessionResponse {
fn from(s: pinakes_core::sync::UploadSession) -> Self {
Self {
id: s.id.to_string(),
target_path: s.target_path,
expected_hash: s.expected_hash.0,
expected_size: s.expected_size,
chunk_size: s.chunk_size,
chunk_count: s.chunk_count,
status: s.status.to_string(),
created_at: s.created_at,
expires_at: s.expires_at,
}
}
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ChunkUploadedResponse {
pub chunk_index: u64,
pub received: bool,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct AcknowledgeChangesRequest {
pub cursor: i64,
}
// Most viewed (uses MediaResponse)
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MostViewedResponse {
pub media: MediaResponse,
pub view_count: u64,
}

View file

@ -0,0 +1,33 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TagResponse {
pub id: String,
pub name: String,
pub parent_id: Option<String>,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateTagRequest {
pub name: String,
pub parent_id: Option<Uuid>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct TagMediaRequest {
pub tag_id: Uuid,
}
impl From<pinakes_core::model::Tag> for TagResponse {
fn from(tag: pinakes_core::model::Tag) -> Self {
Self {
id: tag.id.to_string(),
name: tag.name,
parent_id: tag.parent_id.map(|id| id.to_string()),
created_at: tag.created_at,
}
}
}

View file

@ -0,0 +1,34 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TranscodeSessionResponse {
pub id: String,
pub media_id: String,
pub profile: String,
pub status: String,
pub progress: f32,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
}
impl From<pinakes_core::transcode::TranscodeSession>
for TranscodeSessionResponse
{
fn from(s: pinakes_core::transcode::TranscodeSession) -> Self {
Self {
id: s.id.to_string(),
media_id: s.media_id.0.to_string(),
profile: s.profile,
status: s.status.as_str().to_string(),
progress: s.progress,
created_at: s.created_at,
expires_at: s.expires_at,
}
}
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateTranscodeRequest {
pub profile: String,
}

View file

@ -0,0 +1,101 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
// Auth
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct LoginRequest {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct LoginResponse {
pub token: String,
pub username: String,
pub role: String,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserInfoResponse {
pub username: String,
pub role: String,
}
// Users
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserResponse {
pub id: String,
pub username: String,
pub role: String,
pub profile: UserProfileResponse,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserProfileResponse {
pub avatar_path: Option<String>,
pub bio: Option<String>,
pub preferences: UserPreferencesResponse,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserPreferencesResponse {
pub theme: Option<String>,
pub language: Option<String>,
pub default_video_quality: Option<String>,
pub auto_play: bool,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UserLibraryResponse {
pub user_id: String,
pub root_path: String,
pub permission: String,
pub granted_at: DateTime<Utc>,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GrantLibraryAccessRequest {
pub root_path: String,
#[schema(value_type = String)]
pub permission: pinakes_core::users::LibraryPermission,
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct RevokeLibraryAccessRequest {
pub root_path: String,
}
impl From<pinakes_core::users::User> for UserResponse {
fn from(user: pinakes_core::users::User) -> Self {
Self {
id: user.id.0.to_string(),
username: user.username,
role: user.role.to_string(),
profile: UserProfileResponse {
avatar_path: user.profile.avatar_path,
bio: user.profile.bio,
preferences: UserPreferencesResponse {
theme: user.profile.preferences.theme,
language: user.profile.preferences.language,
default_video_quality: user.profile.preferences.default_video_quality,
auto_play: user.profile.preferences.auto_play,
},
},
created_at: user.created_at,
updated_at: user.updated_at,
}
}
}
impl From<pinakes_core::users::UserLibraryAccess> for UserLibraryResponse {
fn from(access: pinakes_core::users::UserLibraryAccess) -> Self {
Self {
user_id: access.user_id.0.to_string(),
root_path: access.root_path,
permission: access.permission.to_string(),
granted_at: access.granted_at,
}
}
}

View file

@ -0,0 +1,133 @@
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use serde::Serialize;
#[derive(Debug, Serialize)]
struct ErrorResponse {
error: String,
}
pub struct ApiError(pub pinakes_core::error::PinakesError);
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
use pinakes_core::error::PinakesError;
let (status, message) = match &self.0 {
PinakesError::NotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::FileNotFound(path) => {
// Only expose the file name, not the full path
let name = path.file_name().map_or_else(
|| "unknown".to_string(),
|n| n.to_string_lossy().to_string(),
);
tracing::debug!(path = %path.display(), "file not found");
(StatusCode::NOT_FOUND, format!("file not found: {name}"))
},
PinakesError::TagNotFound(msg) => (StatusCode::NOT_FOUND, msg.clone()),
PinakesError::CollectionNotFound(msg) => {
(StatusCode::NOT_FOUND, msg.clone())
},
PinakesError::DuplicateHash(msg) => (StatusCode::CONFLICT, msg.clone()),
PinakesError::UnsupportedMediaType(path) => {
let name = path.file_name().map_or_else(
|| "unknown".to_string(),
|n| n.to_string_lossy().to_string(),
);
(
StatusCode::BAD_REQUEST,
format!("unsupported media type: {name}"),
)
},
PinakesError::SearchParse(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
PinakesError::InvalidOperation(msg) => {
(StatusCode::BAD_REQUEST, msg.clone())
},
PinakesError::InvalidLanguageCode(code) => {
(
StatusCode::BAD_REQUEST,
format!("invalid language code: {code}"),
)
},
PinakesError::SubtitleTrackNotFound { index } => {
(
StatusCode::NOT_FOUND,
format!("subtitle track {index} not found in media"),
)
},
PinakesError::ExternalTool { tool, .. } => {
tracing::error!(tool = %tool, error = %self.0, "external tool failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("external tool `{tool}` failed"),
)
},
PinakesError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.clone())
},
PinakesError::Authorization(msg) => (StatusCode::FORBIDDEN, msg.clone()),
PinakesError::Serialization(msg) => {
tracing::error!(error = %msg, "serialization error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"data serialization error".to_string(),
)
},
PinakesError::Config(_) => {
tracing::error!(error = %self.0, "configuration error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal configuration error".to_string(),
)
},
_ => {
tracing::error!(error = %self.0, "internal server error");
(
StatusCode::INTERNAL_SERVER_ERROR,
"internal server error".to_string(),
)
},
};
let body = serde_json::to_string(&ErrorResponse {
error: message.clone(),
})
.unwrap_or_else(|_| format!(r#"{{"error":"{message}"}}"#));
(status, [("content-type", "application/json")], body).into_response()
}
}
impl From<pinakes_core::error::PinakesError> for ApiError {
fn from(e: pinakes_core::error::PinakesError) -> Self {
Self(e)
}
}
impl ApiError {
pub fn bad_request(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::InvalidOperation(
msg.into(),
))
}
pub fn not_found(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::NotFound(msg.into()))
}
pub fn internal(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Database(msg.into()))
}
pub fn forbidden(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authorization(msg.into()))
}
pub fn unauthorized(msg: impl Into<String>) -> Self {
Self(pinakes_core::error::PinakesError::Authentication(
msg.into(),
))
}
}
pub type ApiResult<T> = Result<T, ApiError>;

View file

@ -0,0 +1,7 @@
pub mod api_doc;
pub mod app;
pub mod auth;
pub mod dto;
pub mod error;
pub mod routes;
pub mod state;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,194 @@
use axum::{
Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::{
analytics::{UsageEvent, UsageEventType},
model::MediaId,
};
use uuid::Uuid;
use crate::{
auth::resolve_user_id,
dto::{
MediaResponse,
MostViewedResponse,
PaginationParams,
RecordUsageEventRequest,
WatchProgressRequest,
WatchProgressResponse,
},
error::ApiError,
state::AppState,
};
const MAX_LIMIT: u64 = 100;
#[utoipa::path(
get,
path = "/api/v1/analytics/most-viewed",
tag = "analytics",
params(
("limit" = Option<u64>, Query, description = "Maximum number of results"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
),
responses(
(status = 200, description = "Most viewed media", body = Vec<MostViewedResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_most_viewed(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MostViewedResponse>>, ApiError> {
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let results = state.storage.get_most_viewed(limit).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
results
.into_iter()
.map(|(item, count)| {
MostViewedResponse {
media: MediaResponse::new(item, &roots),
view_count: count,
}
})
.collect(),
))
}
#[utoipa::path(
get,
path = "/api/v1/analytics/recently-viewed",
tag = "analytics",
params(
("limit" = Option<u64>, Query, description = "Maximum number of results"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
),
responses(
(status = 200, description = "Recently viewed media", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_recently_viewed(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let limit = params.limit.unwrap_or(20).min(MAX_LIMIT);
let items = state.storage.get_recently_viewed(user_id, limit).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
))
}
#[utoipa::path(
post,
path = "/api/v1/analytics/events",
tag = "analytics",
request_body = RecordUsageEventRequest,
responses(
(status = 200, description = "Event recorded"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn record_event(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<RecordUsageEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let event_type: UsageEventType =
req.event_type.parse().map_err(|e: String| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(e))
})?;
let user_id = resolve_user_id(&state.storage, &username).await?;
let event = UsageEvent {
id: Uuid::now_v7(),
media_id: req.media_id.map(MediaId),
user_id: Some(user_id),
event_type,
timestamp: chrono::Utc::now(),
duration_secs: req.duration_secs,
context_json: req.context.map(|v| v.to_string()),
};
state.storage.record_usage_event(&event).await?;
Ok(Json(serde_json::json!({"recorded": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/progress",
tag = "analytics",
params(
("id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Watch progress", body = WatchProgressResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<WatchProgressResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let progress = state
.storage
.get_watch_progress(user_id, MediaId(id))
.await?
.unwrap_or(0.0);
Ok(Json(WatchProgressResponse {
progress_secs: progress,
}))
}
#[utoipa::path(
put,
path = "/api/v1/media/{id}/progress",
tag = "analytics",
params(
("id" = Uuid, Path, description = "Media item ID"),
),
request_body = WatchProgressRequest,
responses(
(status = 200, description = "Progress updated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_watch_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<WatchProgressRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
if !req.progress_secs.is_finite() || req.progress_secs < 0.0 {
return Err(ApiError::bad_request(
"progress_secs must be a non-negative finite number",
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.update_watch_progress(user_id, MediaId(id), req.progress_secs)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

View file

@ -0,0 +1,36 @@
use axum::{
Json,
extract::{Query, State},
};
use crate::{
dto::{AuditEntryResponse, PaginationParams},
error::ApiError,
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/audit",
tag = "audit",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Page size"),
),
responses(
(status = 200, description = "Audit log entries", body = Vec<AuditEntryResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_audit(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<AuditEntryResponse>>, ApiError> {
let pagination = params.to_pagination();
let entries = state.storage.list_audit_entries(None, &pagination).await?;
Ok(Json(
entries.into_iter().map(AuditEntryResponse::from).collect(),
))
}

View file

@ -0,0 +1,400 @@
use axum::{
Json,
extract::State,
http::{HeaderMap, StatusCode},
};
use crate::{
dto::{LoginRequest, LoginResponse, UserInfoResponse},
state::AppState,
};
/// Dummy password hash to use for timing-safe comparison when user doesn't
/// exist. This is a valid argon2 hash that will always fail verification but
/// takes similar time to verify as a real hash, preventing timing attacks that
/// could reveal whether a username exists.
const DUMMY_HASH: &str =
"$argon2id$v=19$m=19456,t=2,\
p=1$VGltaW5nU2FmZUR1bW15$c2ltdWxhdGVkX2hhc2hfZm9yX3RpbWluZ19zYWZldHk";
#[utoipa::path(
post,
path = "/api/v1/auth/login",
tag = "auth",
request_body = LoginRequest,
responses(
(status = 200, description = "Login successful", body = LoginResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Invalid credentials"),
(status = 500, description = "Internal server error"),
),
security()
)]
pub async fn login(
State(state): State<AppState>,
Json(req): Json<LoginRequest>,
) -> Result<Json<LoginResponse>, StatusCode> {
// Limit input sizes to prevent DoS
if req.username.len() > 255 || req.password.len() > 1024 {
return Err(StatusCode::BAD_REQUEST);
}
let config = state.config.read().await;
if !config.accounts.enabled {
return Err(StatusCode::NOT_FOUND);
}
let user = config
.accounts
.users
.iter()
.find(|u| u.username == req.username);
// Always perform password verification to prevent timing attacks.
// If the user doesn't exist, we verify against a dummy hash to ensure
// consistent response times regardless of whether the username exists.
use argon2::password_hash::PasswordVerifier;
let (hash_to_verify, user_found) = match user {
Some(u) => (&u.password_hash as &str, true),
None => (DUMMY_HASH, false),
};
let parsed_hash = argon2::password_hash::PasswordHash::new(hash_to_verify)
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let password_valid = argon2::Argon2::default()
.verify_password(req.password.as_bytes(), &parsed_hash)
.is_ok();
// Authentication fails if user wasn't found OR password was invalid
if !user_found || !password_valid {
// Log different messages for debugging but return same error
if user_found {
tracing::warn!(username = %req.username, "login failed: invalid password");
} else {
tracing::warn!(username = %req.username, "login failed: unknown user");
}
// Record failed login attempt in audit log
if let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginFailed,
Some(format!("username: {}", req.username)),
)
.await
{
tracing::warn!(error = %e, "failed to record failed login audit");
}
return Err(StatusCode::UNAUTHORIZED);
}
// At this point we know the user exists and password is valid
let user = user.ok_or(StatusCode::INTERNAL_SERVER_ERROR)?;
// Generate session token using unbiased uniform distribution
#[expect(clippy::expect_used)]
let token: String = {
use rand::seq::IndexedRandom;
const CHARSET: &[u8] =
b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let mut rng = rand::rng();
(0..48)
.map(|_| *CHARSET.choose(&mut rng).expect("non-empty charset") as char)
.collect()
};
let role = user.role;
let username = user.username.clone();
// Create session in database
let now = chrono::Utc::now();
let session_data = pinakes_core::storage::SessionData {
session_token: token.clone(),
user_id: None, // Could be set if we had user IDs
username: username.clone(),
role: role.to_string(),
created_at: now,
expires_at: now
+ chrono::Duration::hours(config.accounts.session_expiry_hours as i64),
last_accessed: now,
};
if let Err(e) = state.storage.create_session(&session_data).await {
tracing::error!(error = %e, "failed to create session in database");
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
tracing::info!(username = %username, role = %role, "login successful");
// Record successful login in audit log
if let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::LoginSuccess,
Some(format!("username: {username}, role: {role}")),
)
.await
{
tracing::warn!(error = %e, "failed to record login audit");
}
Ok(Json(LoginResponse {
token,
username,
role: role.to_string(),
}))
}
#[utoipa::path(
post,
path = "/api/v1/auth/logout",
tag = "auth",
responses(
(status = 200, description = "Logged out"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn logout(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
let Some(token) = extract_bearer_token(&headers) else {
return StatusCode::UNAUTHORIZED;
};
// Get username before deleting session
let username = match state.storage.get_session(token).await {
Ok(Some(session)) => Some(session.username),
_ => None,
};
// Delete session from database
if let Err(e) = state.storage.delete_session(token).await {
tracing::error!(error = %e, "failed to delete session from database");
return StatusCode::INTERNAL_SERVER_ERROR;
}
// Record logout in audit log
if let Some(user) = username
&& let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("username: {user}")),
)
.await
{
tracing::warn!(error = %e, "failed to record logout audit");
}
StatusCode::OK
}
#[utoipa::path(
get,
path = "/api/v1/auth/me",
tag = "auth",
responses(
(status = 200, description = "Current user info", body = UserInfoResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn me(
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<UserInfoResponse>, StatusCode> {
let config = state.config.read().await;
if !config.accounts.enabled {
// When accounts are not enabled, return a default admin user
return Ok(Json(UserInfoResponse {
username: "admin".to_string(),
role: "admin".to_string(),
}));
}
drop(config);
let token = extract_bearer_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let session = state
.storage
.get_session(token)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::UNAUTHORIZED)?;
Ok(Json(UserInfoResponse {
username: session.username.clone(),
role: session.role,
}))
}
fn extract_bearer_token(headers: &HeaderMap) -> Option<&str> {
headers
.get("authorization")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.strip_prefix("Bearer "))
}
/// Refresh the current session, extending its expiry by the configured
/// duration.
#[utoipa::path(
post,
path = "/api/v1/auth/refresh",
tag = "auth",
responses(
(status = 200, description = "Session refreshed"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn refresh(
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<serde_json::Value>, StatusCode> {
let token = extract_bearer_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let config = state.config.read().await;
let expiry_hours = config.accounts.session_expiry_hours as i64;
drop(config);
let new_expires_at =
chrono::Utc::now() + chrono::Duration::hours(expiry_hours);
match state.storage.extend_session(token, new_expires_at).await {
Ok(Some(expires)) => {
Ok(Json(serde_json::json!({
"expires_at": expires.to_rfc3339()
})))
},
Ok(None) => Err(StatusCode::UNAUTHORIZED),
Err(e) => {
tracing::error!(error = %e, "failed to extend session");
Err(StatusCode::INTERNAL_SERVER_ERROR)
},
}
}
/// Revoke all sessions for the current user
#[utoipa::path(
post,
path = "/api/v1/auth/revoke-all",
tag = "auth",
responses(
(status = 200, description = "All sessions revoked"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn revoke_all_sessions(
State(state): State<AppState>,
headers: HeaderMap,
) -> StatusCode {
let token = match extract_bearer_token(&headers) {
Some(t) => t,
None => return StatusCode::UNAUTHORIZED,
};
// Get current session to find username
let session = match state.storage.get_session(token).await {
Ok(Some(s)) => s,
Ok(None) => return StatusCode::UNAUTHORIZED,
Err(e) => {
tracing::error!(error = %e, "failed to get session");
return StatusCode::INTERNAL_SERVER_ERROR;
},
};
let username = session.username.clone();
// Delete all sessions for this user
match state.storage.delete_user_sessions(&username).await {
Ok(count) => {
tracing::info!(username = %username, count = count, "revoked all user sessions");
// Record in audit log
if let Err(e) = pinakes_core::audit::record_action(
&state.storage,
None,
pinakes_core::model::AuditAction::Logout,
Some(format!("revoked all sessions for username: {username}")),
)
.await
{
tracing::warn!(error = %e, "failed to record session revocation audit");
}
StatusCode::OK
},
Err(e) => {
tracing::error!(error = %e, "failed to revoke sessions");
StatusCode::INTERNAL_SERVER_ERROR
},
}
}
/// List all active sessions (admin only)
#[derive(serde::Serialize, utoipa::ToSchema)]
pub struct SessionListResponse {
pub sessions: Vec<SessionInfo>,
}
#[derive(serde::Serialize, utoipa::ToSchema)]
pub struct SessionInfo {
pub username: String,
pub role: String,
pub created_at: String,
pub last_accessed: String,
pub expires_at: String,
}
#[utoipa::path(
get,
path = "/api/v1/auth/sessions",
tag = "auth",
responses(
(status = 200, description = "Active sessions", body = SessionListResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_active_sessions(
State(state): State<AppState>,
) -> Result<Json<SessionListResponse>, StatusCode> {
// Get all active sessions
let sessions =
state
.storage
.list_active_sessions(None)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list active sessions");
StatusCode::INTERNAL_SERVER_ERROR
})?;
let session_infos = sessions
.into_iter()
.map(|s| {
SessionInfo {
username: s.username,
role: s.role,
created_at: s.created_at.to_rfc3339(),
last_accessed: s.last_accessed.to_rfc3339(),
expires_at: s.expires_at.to_rfc3339(),
}
})
.collect();
Ok(Json(SessionListResponse {
sessions: session_infos,
}))
}

View file

@ -0,0 +1,61 @@
use axum::{
extract::State,
http::header::{CONTENT_DISPOSITION, CONTENT_TYPE},
response::{IntoResponse, Response},
};
use crate::{error::ApiError, state::AppState};
/// Create a database backup and return it as a downloadable file.
/// POST /api/v1/admin/backup
///
/// For `SQLite`: creates a backup via VACUUM INTO and returns the file.
/// For `PostgreSQL`: returns unsupported error (use `pg_dump` instead).
#[utoipa::path(
post,
path = "/api/v1/admin/backup",
tag = "backup",
responses(
(status = 200, description = "Backup file download"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_backup(
State(state): State<AppState>,
) -> Result<Response, ApiError> {
// Use a unique temp directory to avoid predictable paths
let backup_dir = std::env::temp_dir()
.join(format!("pinakes-backup-{}", uuid::Uuid::now_v7()));
tokio::fs::create_dir_all(&backup_dir)
.await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S");
let filename = format!("pinakes_backup_{timestamp}.db");
let backup_path = backup_dir.join(&filename);
state.storage.backup(&backup_path).await?;
// Read the backup into memory and clean up the temp file
let bytes = tokio::fs::read(&backup_path)
.await
.map_err(|e| ApiError(pinakes_core::error::PinakesError::Io(e)))?;
if let Err(e) = tokio::fs::remove_dir_all(&backup_dir).await {
tracing::warn!(path = %backup_dir.display(), error = %e, "failed to clean up backup temp dir");
}
let disposition = format!("attachment; filename=\"{filename}\"");
Ok(
(
[
(CONTENT_TYPE, "application/octet-stream".to_owned()),
(CONTENT_DISPOSITION, disposition),
],
bytes,
)
.into_response(),
)
}

View file

@ -0,0 +1,481 @@
use axum::{
Json,
Router,
extract::{Extension, Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, put},
};
use pinakes_core::{
error::PinakesError,
model::{
AuthorInfo,
BookMetadata,
MediaId,
Pagination,
ReadingProgress,
ReadingStatus,
},
};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
auth::resolve_user_id,
dto::{MAX_OFFSET, MediaResponse},
error::ApiError,
state::AppState,
};
/// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct BookMetadataResponse {
pub media_id: Uuid,
pub isbn: Option<String>,
pub isbn13: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub publication_date: Option<String>,
pub series_name: Option<String>,
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
#[schema(value_type = Object)]
pub identifiers: FxHashMap<String, Vec<String>>,
}
impl From<BookMetadata> for BookMetadataResponse {
fn from(meta: BookMetadata) -> Self {
Self {
media_id: meta.media_id.0,
isbn: meta.isbn,
isbn13: meta.isbn13,
publisher: meta.publisher,
language: meta.language,
page_count: meta.page_count,
publication_date: meta.publication_date.map(|d| d.to_string()),
series_name: meta.series_name,
series_index: meta.series_index,
format: meta.format,
authors: meta
.authors
.into_iter()
.map(AuthorResponse::from)
.collect(),
identifiers: meta.identifiers,
}
}
}
/// Author response DTO
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct AuthorResponse {
pub name: String,
pub role: String,
pub file_as: Option<String>,
pub position: i32,
}
impl From<AuthorInfo> for AuthorResponse {
fn from(author: AuthorInfo) -> Self {
Self {
name: author.name,
role: author.role,
file_as: author.file_as,
position: author.position,
}
}
}
/// Reading progress response DTO
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct ReadingProgressResponse {
pub media_id: Uuid,
pub user_id: Uuid,
pub current_page: i32,
pub total_pages: Option<i32>,
pub progress_percent: f64,
pub last_read_at: String,
}
impl From<ReadingProgress> for ReadingProgressResponse {
fn from(progress: ReadingProgress) -> Self {
Self {
media_id: progress.media_id.0,
user_id: progress.user_id,
current_page: progress.current_page,
total_pages: progress.total_pages,
progress_percent: progress.progress_percent,
last_read_at: progress.last_read_at.to_rfc3339(),
}
}
}
/// Update reading progress request
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct UpdateProgressRequest {
pub current_page: i32,
}
/// Search books query parameters
#[derive(Debug, Deserialize)]
pub struct SearchBooksQuery {
pub isbn: Option<String>,
pub author: Option<String>,
pub series: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
#[serde(default = "default_offset")]
pub offset: u64,
#[serde(default = "default_limit")]
pub limit: u64,
}
const fn default_offset() -> u64 {
0
}
const fn default_limit() -> u64 {
50
}
/// Series summary DTO
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SeriesSummary {
pub name: String,
pub book_count: u64,
}
/// Author summary DTO
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct AuthorSummary {
pub name: String,
pub book_count: u64,
}
/// Get book metadata by media ID
#[utoipa::path(
get,
path = "/api/v1/books/{id}/metadata",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Book metadata", body = BookMetadataResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_book_metadata(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let media_id = MediaId(media_id);
let metadata =
state
.storage
.get_book_metadata(media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Book metadata not found".to_string(),
)))?;
Ok(Json(BookMetadataResponse::from(metadata)))
}
/// List all books with optional search filters
#[utoipa::path(
get,
path = "/api/v1/books",
tag = "books",
params(
("isbn" = Option<String>, Query, description = "Filter by ISBN"),
("author" = Option<String>, Query, description = "Filter by author"),
("series" = Option<String>, Query, description = "Filter by series"),
("publisher" = Option<String>, Query, description = "Filter by publisher"),
("language" = Option<String>, Query, description = "Filter by language"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "List of books", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_books(
State(state): State<AppState>,
Query(query): Query<SearchBooksQuery>,
) -> Result<impl IntoResponse, ApiError> {
let pagination = Pagination {
offset: query.offset.min(MAX_OFFSET),
limit: query.limit.min(1000),
sort: None,
};
let items = state
.storage
.search_books(
query.isbn.as_deref(),
query.author.as_deref(),
query.series.as_deref(),
query.publisher.as_deref(),
query.language.as_deref(),
&pagination,
)
.await?;
let roots = state.config.read().await.directories.roots.clone();
let response: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(response))
}
/// List all series with book counts
#[utoipa::path(
get,
path = "/api/v1/books/series",
tag = "books",
responses(
(status = 200, description = "List of series with counts", body = Vec<SeriesSummary>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_series(
State(state): State<AppState>,
) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series
.into_iter()
.map(|(name, count)| {
SeriesSummary {
name,
book_count: count,
}
})
.collect();
Ok(Json(response))
}
/// Get books in a specific series
#[utoipa::path(
get,
path = "/api/v1/books/series/{name}",
tag = "books",
params(("name" = String, Path, description = "Series name")),
responses(
(status = 200, description = "Books in series", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_series_books(
State(state): State<AppState>,
Path(series_name): Path<String>,
) -> Result<impl IntoResponse, ApiError> {
let items = state.storage.get_series_books(&series_name).await?;
let roots = state.config.read().await.directories.roots.clone();
let response: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(response))
}
/// List all authors with book counts
#[utoipa::path(
get,
path = "/api/v1/books/authors",
tag = "books",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Authors with book counts", body = Vec<AuthorSummary>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_authors(
State(state): State<AppState>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors
.into_iter()
.map(|(name, count)| {
AuthorSummary {
name,
book_count: count,
}
})
.collect();
Ok(Json(response))
}
/// Get books by a specific author
#[utoipa::path(
get,
path = "/api/v1/books/authors/{name}/books",
tag = "books",
params(
("name" = String, Path, description = "Author name"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Books by author", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_author_books(
State(state): State<AppState>,
Path(author_name): Path<String>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let items = state
.storage
.search_books(None, Some(&author_name), None, None, None, &pagination)
.await?;
let roots = state.config.read().await.directories.roots.clone();
let response: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(response))
}
/// Get reading progress for a book
#[utoipa::path(
get,
path = "/api/v1/books/{id}/progress",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Reading progress", body = ReadingProgressResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
let progress = state
.storage
.get_reading_progress(user_id.0, media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Reading progress not found".to_string(),
)))?;
Ok(Json(ReadingProgressResponse::from(progress)))
}
/// Update reading progress for a book
#[utoipa::path(
put,
path = "/api/v1/books/{id}/progress",
tag = "books",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = UpdateProgressRequest,
responses(
(status = 204, description = "Progress updated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn update_reading_progress(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
Json(req): Json<UpdateProgressRequest>,
) -> Result<impl IntoResponse, ApiError> {
if req.current_page < 0 {
return Err(ApiError::bad_request("current_page must be non-negative"));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let media_id = MediaId(media_id);
// Verify the media item exists before writing progress; a FK violation from
// the storage layer would otherwise surface as a 500 rather than 404.
state.storage.get_media(media_id).await?;
state
.storage
.update_reading_progress(user_id.0, media_id, req.current_page)
.await?;
Ok(StatusCode::NO_CONTENT)
}
/// Get user's reading list
#[utoipa::path(
get,
path = "/api/v1/books/reading-list",
tag = "books",
params(("status" = Option<String>, Query, description = "Filter by reading status. Valid values: to_read, reading, completed, abandoned")),
responses(
(status = 200, description = "Reading list", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_reading_list(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<ReadingListQuery>,
) -> Result<impl IntoResponse, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_reading_list(user_id.0, params.status)
.await?;
let roots = state.config.read().await.directories.roots.clone();
let response: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(response))
}
#[derive(Debug, Deserialize)]
pub struct ReadingListQuery {
pub status: Option<ReadingStatus>,
}
/// Build the books router
pub fn routes() -> Router<AppState> {
Router::new()
// Metadata routes
.route("/{id}/metadata", get(get_book_metadata))
// Browse routes
.route("/", get(list_books))
.route("/series", get(list_series))
.route("/series/{name}", get(get_series_books))
.route("/authors", get(list_authors))
.route("/authors/{name}/books", get(get_author_books))
// Reading progress routes
.route("/{id}/progress", get(get_reading_progress))
.route("/{id}/progress", put(update_reading_progress))
.route("/reading-list", get(get_reading_list))
}

View file

@ -0,0 +1,233 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::{CollectionKind, MediaId};
use uuid::Uuid;
use crate::{
dto::{
AddMemberRequest,
CollectionResponse,
CreateCollectionRequest,
MediaResponse,
},
error::ApiError,
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/collections",
tag = "collections",
request_body = CreateCollectionRequest,
responses(
(status = 200, description = "Collection created", body = CollectionResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_collection(
State(state): State<AppState>,
Json(req): Json<CreateCollectionRequest>,
) -> Result<Json<CollectionResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"collection name must be 1-255 characters".into(),
),
));
}
if let Some(ref desc) = req.description
&& desc.len() > 10_000
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"description exceeds 10000 characters".into(),
),
));
}
let kind = match req.kind.as_str() {
"virtual" => CollectionKind::Virtual,
_ => CollectionKind::Manual,
};
let col = pinakes_core::collections::create_collection(
&state.storage,
&req.name,
kind,
req.description.as_deref(),
req.filter_query.as_deref(),
)
.await?;
state.emit_plugin_event(
"CollectionCreated",
&serde_json::json!({
"id": col.id.to_string(),
"name": col.name,
}),
);
Ok(Json(CollectionResponse::from(col)))
}
#[utoipa::path(
get,
path = "/api/v1/collections",
tag = "collections",
responses(
(status = 200, description = "List of collections", body = Vec<CollectionResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_collections(
State(state): State<AppState>,
) -> Result<Json<Vec<CollectionResponse>>, ApiError> {
let cols = state.storage.list_collections().await?;
Ok(Json(
cols.into_iter().map(CollectionResponse::from).collect(),
))
}
#[utoipa::path(
get,
path = "/api/v1/collections/{id}",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection", body = CollectionResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<CollectionResponse>, ApiError> {
let col = state.storage.get_collection(id).await?;
Ok(Json(CollectionResponse::from(col)))
}
#[utoipa::path(
delete,
path = "/api/v1/collections/{id}",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_collection(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_collection(id).await?;
state.emit_plugin_event(
"CollectionDeleted",
&serde_json::json!({"id": id.to_string()}),
);
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/collections/{id}/members",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
request_body = AddMemberRequest,
responses(
(status = 200, description = "Member added"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_member(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
Json(req): Json<AddMemberRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::add_member(
&state.storage,
collection_id,
MediaId(req.media_id),
req.position.unwrap_or(0),
)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/collections/{id}/members/{media_id}",
tag = "collections",
params(
("id" = Uuid, Path, description = "Collection ID"),
("media_id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Member removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_member(
State(state): State<AppState>,
Path((collection_id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::collections::remove_member(
&state.storage,
collection_id,
MediaId(media_id),
)
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/collections/{id}/members",
tag = "collections",
params(("id" = Uuid, Path, description = "Collection ID")),
responses(
(status = 200, description = "Collection members", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_members(
State(state): State<AppState>,
Path(collection_id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let items =
pinakes_core::collections::get_members(&state.storage, collection_id)
.await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
))
}

View file

@ -0,0 +1,292 @@
use axum::{Json, extract::State};
use crate::{
dto::{
ConfigResponse,
RootDirRequest,
ScanningConfigResponse,
ServerConfigResponse,
UiConfigResponse,
UpdateScanningRequest,
UpdateUiConfigRequest,
},
error::ApiError,
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/config",
tag = "config",
responses(
(status = 200, description = "Current server configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_config(
State(state): State<AppState>,
) -> Result<Json<ConfigResponse>, ApiError> {
let config = state.config.read().await;
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path).is_ok_and(|m| !m.permissions().readonly())
} else {
path.parent().is_some_and(|parent| {
std::fs::metadata(parent).is_ok_and(|m| !m.permissions().readonly())
})
}
},
None => false,
};
Ok(Json(ConfigResponse {
backend: config.storage.backend.to_string(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
#[utoipa::path(
get,
path = "/api/v1/config/ui",
tag = "config",
responses(
(status = 200, description = "UI configuration", body = UiConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_ui_config(
State(state): State<AppState>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let config = state.config.read().await;
Ok(Json(UiConfigResponse::from(&config.ui)))
}
#[utoipa::path(
patch,
path = "/api/v1/config/ui",
tag = "config",
request_body = UpdateUiConfigRequest,
responses(
(status = 200, description = "Updated UI configuration", body = UiConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_ui_config(
State(state): State<AppState>,
Json(req): Json<UpdateUiConfigRequest>,
) -> Result<Json<UiConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(theme) = req.theme {
config.ui.theme = theme;
}
if let Some(default_view) = req.default_view {
config.ui.default_view = default_view;
}
if let Some(default_page_size) = req.default_page_size {
config.ui.default_page_size = default_page_size;
}
if let Some(default_view_mode) = req.default_view_mode {
config.ui.default_view_mode = default_view_mode;
}
if let Some(auto_play) = req.auto_play_media {
config.ui.auto_play_media = auto_play;
}
if let Some(show_thumbs) = req.show_thumbnails {
config.ui.show_thumbnails = show_thumbs;
}
if let Some(collapsed) = req.sidebar_collapsed {
config.ui.sidebar_collapsed = collapsed;
}
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
Ok(Json(UiConfigResponse::from(&config.ui)))
}
#[utoipa::path(
patch,
path = "/api/v1/config/scanning",
tag = "config",
request_body = UpdateScanningRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn update_scanning_config(
State(state): State<AppState>,
Json(req): Json<UpdateScanningRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let mut config = state.config.write().await;
if let Some(watch) = req.watch {
config.scanning.watch = watch;
}
if let Some(interval) = req.poll_interval_secs {
config.scanning.poll_interval_secs = interval;
}
if let Some(patterns) = req.ignore_patterns {
config.scanning.ignore_patterns = patterns;
}
// Persist to disk if we have a config path
if let Some(ref path) = state.config_path {
config.save_to_file(path).map_err(ApiError)?;
}
let roots = state.storage.list_root_dirs().await?;
let config_path = state
.config_path
.as_ref()
.map(|p| p.to_string_lossy().to_string());
let config_writable = match &state.config_path {
Some(path) => {
if path.exists() {
std::fs::metadata(path).is_ok_and(|m| !m.permissions().readonly())
} else {
path.parent().is_some_and(|parent| {
std::fs::metadata(parent).is_ok_and(|m| !m.permissions().readonly())
})
}
},
None => false,
};
Ok(Json(ConfigResponse {
backend: config.storage.backend.to_string(),
database_path: config
.storage
.sqlite
.as_ref()
.map(|s| s.path.to_string_lossy().to_string()),
roots: roots
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect(),
scanning: ScanningConfigResponse {
watch: config.scanning.watch,
poll_interval_secs: config.scanning.poll_interval_secs,
ignore_patterns: config.scanning.ignore_patterns.clone(),
},
server: ServerConfigResponse {
host: config.server.host.clone(),
port: config.server.port,
},
ui: UiConfigResponse::from(&config.ui),
config_path,
config_writable,
}))
}
#[utoipa::path(
post,
path = "/api/v1/config/roots",
tag = "config",
request_body = RootDirRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
if !path.exists() {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
}
state.storage.add_root_dir(path.clone()).await?;
{
let mut config = state.config.write().await;
if !config.directories.roots.contains(&path) {
config.directories.roots.push(path);
}
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
}
#[utoipa::path(
delete,
path = "/api/v1/config/roots",
tag = "config",
request_body = RootDirRequest,
responses(
(status = 200, description = "Updated configuration", body = ConfigResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_root(
State(state): State<AppState>,
Json(req): Json<RootDirRequest>,
) -> Result<Json<ConfigResponse>, ApiError> {
let path = std::path::PathBuf::from(&req.path);
state.storage.remove_root_dir(&path).await?;
{
let mut config = state.config.write().await;
config.directories.roots.retain(|r| r != &path);
if let Some(ref config_path) = state.config_path {
config.save_to_file(config_path).map_err(ApiError)?;
}
}
get_config(State(state)).await
}

View file

@ -0,0 +1,69 @@
use axum::{Json, extract::State};
use crate::{dto::DatabaseStatsResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/admin/database/stats",
tag = "database",
responses(
(status = 200, description = "Database statistics", body = DatabaseStatsResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn database_stats(
State(state): State<AppState>,
) -> Result<Json<DatabaseStatsResponse>, ApiError> {
let stats = state.storage.database_stats().await?;
Ok(Json(DatabaseStatsResponse {
media_count: stats.media_count,
tag_count: stats.tag_count,
collection_count: stats.collection_count,
audit_count: stats.audit_count,
database_size_bytes: stats.database_size_bytes,
backend_name: stats.backend_name,
}))
}
#[utoipa::path(
post,
path = "/api/v1/admin/database/vacuum",
tag = "database",
responses(
(status = 200, description = "Database vacuumed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn vacuum_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.vacuum().await?;
Ok(Json(serde_json::json!({"status": "ok"})))
}
#[utoipa::path(
post,
path = "/api/v1/admin/database/clear",
tag = "database",
responses(
(status = 200, description = "Database cleared"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn clear_database(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
tracing::error!("clear_database: all data is being wiped by admin request");
state.storage.clear_all_data().await?;
tracing::error!("clear_database: all data wiped successfully");
Ok(Json(serde_json::json!({"status": "ok"})))
}

View file

@ -0,0 +1,45 @@
use axum::{Json, extract::State};
use crate::{
dto::{DuplicateGroupResponse, MediaResponse},
error::ApiError,
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/media/duplicates",
tag = "duplicates",
responses(
(status = 200, description = "Duplicate groups", body = Vec<DuplicateGroupResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_duplicates(
State(state): State<AppState>,
) -> Result<Json<Vec<DuplicateGroupResponse>>, ApiError> {
let groups = state.storage.find_duplicates().await?;
let roots = state.config.read().await.directories.roots.clone();
let response: Vec<DuplicateGroupResponse> = groups
.into_iter()
.map(|items| {
let content_hash = items
.first()
.map(|i| i.content_hash.0.clone())
.unwrap_or_default();
let media_items: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
DuplicateGroupResponse {
content_hash,
items: media_items,
}
})
.collect();
Ok(Json(response))
}

View file

@ -0,0 +1,100 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::{
dto::{BatchDeleteRequest, ExternalMetadataResponse},
error::ApiError,
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/media/{id}/enrich",
tag = "enrichment",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Enrichment job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_enrichment(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Submit enrichment as a background job
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich {
media_ids: vec![MediaId(id)],
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/metadata/external",
tag = "enrichment",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "External metadata", body = Vec<ExternalMetadataResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_external_metadata(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<ExternalMetadataResponse>>, ApiError> {
let metadata = state.storage.get_external_metadata(MediaId(id)).await?;
Ok(Json(
metadata
.into_iter()
.map(ExternalMetadataResponse::from)
.collect(),
))
}
#[utoipa::path(
post,
path = "/api/v1/media/enrich/batch",
tag = "enrichment",
request_body = BatchDeleteRequest,
responses(
(status = 200, description = "Enrichment job submitted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_enrich(
State(state): State<AppState>,
Json(req): Json<BatchDeleteRequest>, // Reuse: has media_ids field
) -> Result<Json<serde_json::Value>, ApiError> {
if req.media_ids.is_empty() || req.media_ids.len() > 1000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"media_ids must contain 1-1000 items".into(),
),
));
}
let media_ids: Vec<MediaId> =
req.media_ids.into_iter().map(MediaId).collect();
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Enrich { media_ids })
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}

View file

@ -0,0 +1,68 @@
use std::path::PathBuf;
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct ExportRequest {
pub format: String,
#[schema(value_type = String)]
pub destination: PathBuf,
}
#[utoipa::path(
post,
path = "/api/v1/export",
tag = "export",
responses(
(status = 200, description = "Export job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_export(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
// Default export to JSON in data dir
let dest =
pinakes_core::config::Config::default_data_dir().join("export.json");
let kind = pinakes_core::jobs::JobKind::Export {
format: pinakes_core::jobs::ExportFormat::Json,
destination: dest,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[utoipa::path(
post,
path = "/api/v1/export/options",
tag = "export",
request_body = ExportRequest,
responses(
(status = 200, description = "Export job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_export_with_options(
State(state): State<AppState>,
Json(req): Json<ExportRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let format = match req.format.as_str() {
"csv" => pinakes_core::jobs::ExportFormat::Csv,
_ => pinakes_core::jobs::ExportFormat::Json,
};
let kind = pinakes_core::jobs::JobKind::Export {
format,
destination: req.destination,
};
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}

View file

@ -0,0 +1,255 @@
use std::time::Instant;
use axum::{Json, extract::State, http::StatusCode, response::IntoResponse};
use serde::{Deserialize, Serialize};
use crate::state::AppState;
/// Basic health check response
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct HealthResponse {
pub status: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub database: Option<DatabaseHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub filesystem: Option<FilesystemHealth>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cache: Option<CacheHealth>,
}
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct DatabaseHealth {
pub status: String,
pub latency_ms: u64,
#[serde(skip_serializing_if = "Option::is_none")]
pub media_count: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct FilesystemHealth {
pub status: String,
pub roots_configured: usize,
pub roots_accessible: usize,
}
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct CacheHealth {
pub hit_rate: f64,
pub total_entries: u64,
pub responses_size: u64,
pub queries_size: u64,
pub media_size: u64,
}
/// Comprehensive health check - includes database, filesystem, and cache status
#[utoipa::path(
get,
path = "/api/v1/health",
tag = "health",
responses(
(status = 200, description = "Health status", body = HealthResponse),
)
)]
pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
let mut response = HealthResponse {
status: "ok".to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
database: None,
filesystem: None,
cache: None,
};
// Check database health
let db_start = Instant::now();
let db_health = match state.storage.count_media().await {
Ok(count) => {
DatabaseHealth {
status: "ok".to_string(),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: Some(count),
}
},
Err(e) => {
response.status = "degraded".to_string();
DatabaseHealth {
status: format!("error: {e}"),
latency_ms: db_start.elapsed().as_millis() as u64,
media_count: None,
}
},
};
response.database = Some(db_health);
// Check filesystem health (root directories)
let roots: Vec<std::path::PathBuf> =
state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();
}
response.filesystem = Some(FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
});
// Get cache statistics
let cache_stats = state.cache.stats();
response.cache = Some(CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
});
Json(response)
}
/// Liveness probe - just checks if the server is running
/// Returns 200 OK if the server process is alive
#[utoipa::path(
get,
path = "/api/v1/health/live",
tag = "health",
responses(
(status = 200, description = "Server is alive"),
)
)]
pub async fn liveness() -> impl IntoResponse {
(
StatusCode::OK,
Json(serde_json::json!({
"status": "alive"
})),
)
}
/// Readiness probe - checks if the server can serve requests
/// Returns 200 OK if database is accessible
#[utoipa::path(
get,
path = "/api/v1/health/ready",
tag = "health",
responses(
(status = 200, description = "Server is ready"),
(status = 503, description = "Server not ready"),
)
)]
pub async fn readiness(State(state): State<AppState>) -> impl IntoResponse {
// Check database connectivity
let db_start = Instant::now();
match state.storage.count_media().await {
Ok(_) => {
let latency = db_start.elapsed().as_millis() as u64;
(
StatusCode::OK,
Json(serde_json::json!({
"status": "ready",
"database_latency_ms": latency
})),
)
},
Err(e) => {
(
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"status": "not_ready",
"reason": e.to_string()
})),
)
},
}
}
/// Detailed health check for monitoring dashboards
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct DetailedHealthResponse {
pub status: String,
pub version: String,
pub uptime_seconds: u64,
pub database: DatabaseHealth,
pub filesystem: FilesystemHealth,
pub cache: CacheHealth,
pub jobs: JobsHealth,
}
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
pub struct JobsHealth {
pub pending: usize,
pub running: usize,
}
#[utoipa::path(
get,
path = "/api/v1/health/detailed",
tag = "health",
responses(
(status = 200, description = "Detailed health status", body = DetailedHealthResponse),
)
)]
pub async fn health_detailed(
State(state): State<AppState>,
) -> Json<DetailedHealthResponse> {
// Check database
let db_start = Instant::now();
let (db_status, media_count) = match state.storage.count_media().await {
Ok(count) => ("ok".to_string(), Some(count)),
Err(e) => (format!("error: {e}"), None),
};
let db_latency = db_start.elapsed().as_millis() as u64;
// Check filesystem
let roots = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
// Get cache stats
let cache_stats = state.cache.stats();
// Get job queue stats
let job_stats = state.job_queue.stats().await;
let overall_status = if db_status == "ok" && roots_accessible == roots.len() {
"ok"
} else {
"degraded"
};
Json(DetailedHealthResponse {
status: overall_status.to_string(),
version: env!("CARGO_PKG_VERSION").to_string(),
uptime_seconds: 0, // Could track server start time
database: DatabaseHealth {
status: db_status,
latency_ms: db_latency,
media_count,
},
filesystem: FilesystemHealth {
status: if roots_accessible == roots.len() {
"ok"
} else {
"degraded"
}
.to_string(),
roots_configured: roots.len(),
roots_accessible,
},
cache: CacheHealth {
hit_rate: cache_stats.overall_hit_rate(),
total_entries: cache_stats.total_entries(),
responses_size: cache_stats.responses.size,
queries_size: cache_stats.queries.size,
media_size: cache_stats.media.size,
},
jobs: JobsHealth {
pending: job_stats.pending,
running: job_stats.running,
},
})
}

View file

@ -0,0 +1,161 @@
use axum::{Json, extract::State};
use serde::Deserialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct OrphanResolveRequest {
pub action: String,
pub ids: Vec<uuid::Uuid>,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/orphans/detect",
tag = "integrity",
responses(
(status = 200, description = "Orphan detection job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_orphan_detection(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::OrphanDetection;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/verify",
tag = "integrity",
request_body = VerifyIntegrityRequest,
responses(
(status = 200, description = "Integrity verification job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_verify_integrity(
State(state): State<AppState>,
Json(req): Json<VerifyIntegrityRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_ids = req
.media_ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let kind = pinakes_core::jobs::JobKind::VerifyIntegrity { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct VerifyIntegrityRequest {
pub media_ids: Vec<uuid::Uuid>,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/thumbnails/cleanup",
tag = "integrity",
responses(
(status = 200, description = "Thumbnail cleanup job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_cleanup_thumbnails(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let kind = pinakes_core::jobs::JobKind::CleanupThumbnails;
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({ "job_id": job_id.to_string() })))
}
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct GenerateThumbnailsRequest {
/// When true, only generate thumbnails for items that don't have one yet.
/// When false (default), regenerate all thumbnails.
#[serde(default)]
pub only_missing: bool,
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/thumbnails/generate",
tag = "integrity",
request_body = GenerateThumbnailsRequest,
responses(
(status = 200, description = "Thumbnail generation job submitted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn generate_all_thumbnails(
State(state): State<AppState>,
body: Option<Json<GenerateThumbnailsRequest>>,
) -> Result<Json<serde_json::Value>, ApiError> {
let only_missing = body.is_some_and(|b| b.only_missing);
let media_ids = state
.storage
.list_media_ids_for_thumbnails(only_missing)
.await?;
let count = media_ids.len();
if count == 0 {
return Ok(Json(serde_json::json!({
"job_id": null,
"media_count": 0,
"message": "no media items to process"
})));
}
let kind = pinakes_core::jobs::JobKind::GenerateThumbnails { media_ids };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(serde_json::json!({
"job_id": job_id.to_string(),
"media_count": count
})))
}
#[utoipa::path(
post,
path = "/api/v1/admin/integrity/orphans/resolve",
tag = "integrity",
request_body = OrphanResolveRequest,
responses(
(status = 200, description = "Orphans resolved"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn resolve_orphans(
State(state): State<AppState>,
Json(req): Json<OrphanResolveRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let action = match req.action.as_str() {
"delete" => pinakes_core::integrity::OrphanAction::Delete,
_ => pinakes_core::integrity::OrphanAction::Ignore,
};
let ids: Vec<pinakes_core::model::MediaId> = req
.ids
.into_iter()
.map(pinakes_core::model::MediaId)
.collect();
let count =
pinakes_core::integrity::resolve_orphans(&state.storage, action, &ids)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "resolved": count })))
}

View file

@ -0,0 +1,75 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::jobs::Job;
use crate::{error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/jobs",
tag = "jobs",
responses(
(status = 200, description = "List of jobs"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_jobs(State(state): State<AppState>) -> Json<Vec<Job>> {
Json(state.job_queue.list().await)
}
#[utoipa::path(
get,
path = "/api/v1/jobs/{id}",
tag = "jobs",
params(("id" = uuid::Uuid, Path, description = "Job ID")),
responses(
(status = 200, description = "Job details"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<Job>, ApiError> {
state.job_queue.status(id).await.map(Json).ok_or_else(|| {
pinakes_core::error::PinakesError::NotFound(format!("job not found: {id}"))
.into()
})
}
#[utoipa::path(
post,
path = "/api/v1/jobs/{id}/cancel",
tag = "jobs",
params(("id" = uuid::Uuid, Path, description = "Job ID")),
responses(
(status = 200, description = "Job cancelled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn cancel_job(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let cancelled = state.job_queue.cancel(id).await;
if cancelled {
Ok(Json(serde_json::json!({ "cancelled": true })))
} else {
Err(
pinakes_core::error::PinakesError::NotFound(format!(
"job not found or already finished: {id}"
))
.into(),
)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,34 @@
pub mod analytics;
pub mod audit;
pub mod auth;
pub mod backup;
pub mod books;
pub mod collections;
pub mod config;
pub mod database;
pub mod duplicates;
pub mod enrichment;
pub mod export;
pub mod health;
pub mod integrity;
pub mod jobs;
pub mod media;
pub mod notes;
pub mod photos;
pub mod playlists;
pub mod plugins;
pub mod saved_searches;
pub mod scan;
pub mod scheduled_tasks;
pub mod search;
pub mod shares;
pub mod social;
pub mod statistics;
pub mod streaming;
pub mod subtitles;
pub mod sync;
pub mod tags;
pub mod transcode;
pub mod upload;
pub mod users;
pub mod webhooks;

View file

@ -0,0 +1,408 @@
//! API endpoints for Obsidian-style markdown notes features.
//!
//! Provides endpoints for:
//! - Backlinks (what links to this note)
//! - Outgoing links (what this note links to)
//! - Graph visualization data
//! - Link reindexing
use axum::{
Json,
Router,
extract::{Path, Query, State},
routing::{get, post},
};
use pinakes_core::model::{
BacklinkInfo,
GraphData,
GraphEdge,
GraphNode,
MarkdownLink,
MediaId,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{error::ApiError, state::AppState};
/// Response for backlinks query
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BacklinksResponse {
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
}
/// Individual backlink item
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct BacklinkItem {
pub link_id: Uuid,
pub source_id: Uuid,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: String,
}
impl From<BacklinkInfo> for BacklinkItem {
fn from(info: BacklinkInfo) -> Self {
Self {
link_id: info.link_id,
source_id: info.source_id.0,
source_title: info.source_title,
source_path: info.source_path,
link_text: info.link_text,
line_number: info.line_number,
context: info.context,
link_type: info.link_type.to_string(),
}
}
}
/// Response for outgoing links query
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct OutgoingLinksResponse {
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
}
/// Individual outgoing link item
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct OutgoingLinkItem {
pub id: Uuid,
pub target_path: String,
pub target_id: Option<Uuid>,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub link_type: String,
pub is_resolved: bool,
}
impl From<MarkdownLink> for OutgoingLinkItem {
fn from(link: MarkdownLink) -> Self {
Self {
id: link.id,
target_path: link.target_path,
target_id: link.target_media_id.map(|id| id.0),
link_text: link.link_text,
line_number: link.line_number,
link_type: link.link_type.to_string(),
is_resolved: link.target_media_id.is_some(),
}
}
}
/// Response for graph visualization
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphResponse {
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
pub node_count: usize,
pub edge_count: usize,
}
/// Graph node for visualization
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphNodeResponse {
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
pub link_count: u32,
pub backlink_count: u32,
}
impl From<GraphNode> for GraphNodeResponse {
fn from(node: GraphNode) -> Self {
Self {
id: node.id,
label: node.label,
title: node.title,
media_type: node.media_type,
link_count: node.link_count,
backlink_count: node.backlink_count,
}
}
}
/// Graph edge for visualization
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct GraphEdgeResponse {
pub source: String,
pub target: String,
pub link_type: String,
}
impl From<GraphEdge> for GraphEdgeResponse {
fn from(edge: GraphEdge) -> Self {
Self {
source: edge.source,
target: edge.target,
link_type: edge.link_type.to_string(),
}
}
}
impl From<GraphData> for GraphResponse {
fn from(data: GraphData) -> Self {
let node_count = data.nodes.len();
let edge_count = data.edges.len();
Self {
nodes: data
.nodes
.into_iter()
.map(GraphNodeResponse::from)
.collect(),
edges: data
.edges
.into_iter()
.map(GraphEdgeResponse::from)
.collect(),
node_count,
edge_count,
}
}
}
/// Query parameters for graph endpoint
#[derive(Debug, Deserialize)]
pub struct GraphQuery {
/// Center node ID (optional, if not provided returns entire graph)
pub center: Option<Uuid>,
/// Depth of traversal from center (default: 2, max: 5)
#[serde(default = "default_depth")]
pub depth: u32,
}
const fn default_depth() -> u32 {
2
}
/// Response for reindex operation
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ReindexResponse {
pub message: String,
pub links_extracted: usize,
}
/// Response for link resolution
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct ResolveLinksResponse {
pub resolved_count: u64,
}
/// Response for unresolved links count
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct UnresolvedLinksResponse {
pub count: u64,
}
/// Get backlinks (incoming links) to a media item.
///
/// GET /api/v1/media/{id}/backlinks
#[utoipa::path(
get,
path = "/api/v1/media/{id}/backlinks",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Backlinks", body = BacklinksResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_backlinks(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<BacklinksResponse>, ApiError> {
let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?;
let items: Vec<BacklinkItem> =
backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len();
Ok(Json(BacklinksResponse {
backlinks: items,
count,
}))
}
/// Get outgoing links from a media item.
///
/// GET /api/v1/media/{id}/outgoing-links
#[utoipa::path(
get,
path = "/api/v1/media/{id}/outgoing-links",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Outgoing links", body = OutgoingLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_outgoing_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<OutgoingLinksResponse>, ApiError> {
let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?;
let items: Vec<OutgoingLinkItem> =
links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len();
Ok(Json(OutgoingLinksResponse {
links: items,
count,
}))
}
/// Get graph data for visualization.
///
/// GET /api/v1/notes/graph?center={uuid}&depth={n}
#[utoipa::path(
get,
path = "/api/v1/notes/graph",
tag = "notes",
params(
("center" = Option<Uuid>, Query, description = "Center node ID"),
("depth" = Option<u32>, Query, description = "Traversal depth (max 5, default 2)"),
),
responses(
(status = 200, description = "Graph data", body = GraphResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_graph(
State(state): State<AppState>,
Query(params): Query<GraphQuery>,
) -> Result<Json<GraphResponse>, ApiError> {
let center_id = params.center.map(MediaId);
let depth = params.depth.min(5); // Enforce max depth
let graph_data = state.storage.get_graph_data(center_id, depth).await?;
Ok(Json(GraphResponse::from(graph_data)))
}
/// Re-extract links from a media item.
///
/// POST /api/v1/media/{id}/reindex-links
#[utoipa::path(
post,
path = "/api/v1/media/{id}/reindex-links",
tag = "notes",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Links reindexed", body = ReindexResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn reindex_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<ReindexResponse>, ApiError> {
let media_id = MediaId(id);
// Get the media item to read its content
let media = state.storage.get_media(media_id).await?;
// Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {},
_ => {
return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(),
links_extracted: 0,
}));
},
}
// Read the file content
let content = tokio::fs::read_to_string(&media.path)
.await
.map_err(|e| ApiError::internal(format!("Failed to read file: {e}")))?;
// Extract links
let links = pinakes_core::links::extract_links(media_id, &content);
let links_count = links.len();
// Save links to database
state.storage.save_markdown_links(media_id, &links).await?;
// Mark as extracted
state.storage.mark_links_extracted(media_id).await?;
// Try to resolve any unresolved links
state.storage.resolve_links().await?;
Ok(Json(ReindexResponse {
message: "Links extracted successfully".to_string(),
links_extracted: links_count,
}))
}
/// Resolve all unresolved links in the database.
///
/// POST /api/v1/notes/resolve-links
#[utoipa::path(
post,
path = "/api/v1/notes/resolve-links",
tag = "notes",
responses(
(status = 200, description = "Links resolved", body = ResolveLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn resolve_links(
State(state): State<AppState>,
) -> Result<Json<ResolveLinksResponse>, ApiError> {
let resolved_count = state.storage.resolve_links().await?;
Ok(Json(ResolveLinksResponse { resolved_count }))
}
/// Get count of unresolved links.
///
/// GET /api/v1/notes/unresolved-count
#[utoipa::path(
get,
path = "/api/v1/notes/unresolved-count",
tag = "notes",
responses(
(status = 200, description = "Unresolved link count", body = UnresolvedLinksResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_unresolved_count(
State(state): State<AppState>,
) -> Result<Json<UnresolvedLinksResponse>, ApiError> {
let count = state.storage.count_unresolved_links().await?;
Ok(Json(UnresolvedLinksResponse { count }))
}
/// Create the routes for notes/links functionality.
pub fn routes() -> Router<AppState> {
Router::new()
.route("/graph", get(get_graph))
.route("/resolve-links", post(resolve_links))
.route("/unresolved-count", get(get_unresolved_count))
}

View file

@ -0,0 +1,247 @@
use axum::{
Json,
Router,
extract::{Query, State},
response::IntoResponse,
routing::get,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use crate::{dto::MediaResponse, error::ApiError, state::AppState};
/// Timeline grouping mode
#[derive(Debug, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum GroupBy {
#[default]
Day,
Month,
Year,
}
/// Timeline query parameters
#[derive(Debug, Deserialize)]
pub struct TimelineQuery {
#[serde(default)]
pub group_by: GroupBy,
pub year: Option<i32>,
pub month: Option<u32>,
#[serde(default = "default_timeline_limit")]
pub limit: u64,
}
const fn default_timeline_limit() -> u64 {
10000
}
/// Timeline group response
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct TimelineGroup {
pub date: String,
pub count: usize,
pub cover_id: Option<String>,
pub items: Vec<MediaResponse>,
}
/// Map query parameters
#[derive(Debug, Deserialize)]
pub struct MapQuery {
pub lat1: f64,
pub lon1: f64,
pub lat2: f64,
pub lon2: f64,
}
/// Map marker response
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct MapMarker {
pub id: String,
pub latitude: f64,
pub longitude: f64,
pub thumbnail_url: Option<String>,
pub date_taken: Option<DateTime<Utc>>,
}
#[utoipa::path(
get,
path = "/api/v1/photos/timeline",
tag = "photos",
params(
("group_by" = Option<String>, Query, description = "Grouping: day, month, year"),
("year" = Option<i32>, Query, description = "Filter by year"),
("month" = Option<u32>, Query, description = "Filter by month"),
("limit" = Option<u64>, Query, description = "Max items (default 10000)"),
),
responses(
(status = 200, description = "Photo timeline groups", body = Vec<TimelineGroup>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
/// Get timeline of photos grouped by date
pub async fn get_timeline(
State(state): State<AppState>,
Query(query): Query<TimelineQuery>,
) -> Result<impl IntoResponse, ApiError> {
// Query photos with date_taken (limit is configurable, defaults to 10000)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: query.limit.min(50000), // Cap at 50000 for safety
sort: Some("date_taken DESC".to_string()),
})
.await?;
// Filter to only photos with date_taken
let photos: Vec<_> = all_media
.into_iter()
.filter(|item| {
item.date_taken.is_some()
&& item.media_type.category()
== pinakes_core::media_type::MediaCategory::Image
})
.collect();
// Group by the requested period
let mut groups: rustc_hash::FxHashMap<
String,
Vec<pinakes_core::model::MediaItem>,
> = rustc_hash::FxHashMap::default();
for photo in photos {
if let Some(date_taken) = photo.date_taken {
use chrono::Datelike;
// Filter by year/month if specified
if let Some(y) = query.year
&& date_taken.year() != y
{
continue;
}
if let Some(m) = query.month
&& date_taken.month() != m
{
continue;
}
let key = match query.group_by {
GroupBy::Day => date_taken.format("%Y-%m-%d").to_string(),
GroupBy::Month => date_taken.format("%Y-%m").to_string(),
GroupBy::Year => date_taken.format("%Y").to_string(),
};
groups.entry(key).or_default().push(photo);
}
}
// Convert to response format
let roots = state.config.read().await.directories.roots.clone();
let mut timeline: Vec<TimelineGroup> = groups
.into_iter()
.map(|(date, items)| {
let cover_id = items.first().map(|i| i.id.0.to_string());
let count = items.len();
let items: Vec<MediaResponse> = items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
TimelineGroup {
date,
count,
cover_id,
items,
}
})
.collect();
// Sort by date descending
timeline.sort_by(|a, b| b.date.cmp(&a.date));
Ok(Json(timeline))
}
#[utoipa::path(
get,
path = "/api/v1/photos/map",
tag = "photos",
params(
("lat1" = f64, Query, description = "Bounding box latitude 1"),
("lon1" = f64, Query, description = "Bounding box longitude 1"),
("lat2" = f64, Query, description = "Bounding box latitude 2"),
("lon2" = f64, Query, description = "Bounding box longitude 2"),
),
responses(
(status = 200, description = "Map markers", body = Vec<MapMarker>),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
/// Get photos in a bounding box for map view
pub async fn get_map_photos(
State(state): State<AppState>,
Query(query): Query<MapQuery>,
) -> Result<impl IntoResponse, ApiError> {
let valid_lat = |v: f64| v.is_finite() && (-90.0..=90.0).contains(&v);
let valid_lon = |v: f64| v.is_finite() && (-180.0..=180.0).contains(&v);
if !valid_lat(query.lat1) || !valid_lat(query.lat2) {
return Err(ApiError::bad_request("latitude must be in [-90, 90]"));
}
if !valid_lon(query.lon1) || !valid_lon(query.lon2) {
return Err(ApiError::bad_request("longitude must be in [-180, 180]"));
}
// Validate bounding box
let min_lat = query.lat1.min(query.lat2);
let max_lat = query.lat1.max(query.lat2);
let min_lon = query.lon1.min(query.lon2);
let max_lon = query.lon1.max(query.lon2);
// Query all media (we'll filter in-memory for now - could optimize with DB
// query)
let all_media = state
.storage
.list_media(&pinakes_core::model::Pagination {
offset: 0,
limit: 10000,
sort: None,
})
.await?;
// Filter to photos with GPS coordinates in the bounding box
let markers: Vec<MapMarker> = all_media
.into_iter()
.filter_map(|item| {
if let (Some(lat), Some(lon)) = (item.latitude, item.longitude)
&& lat >= min_lat
&& lat <= max_lat
&& lon >= min_lon
&& lon <= max_lon
{
return Some(MapMarker {
id: item.id.0.to_string(),
latitude: lat,
longitude: lon,
thumbnail_url: item
.thumbnail_path
.map(|_p| format!("/api/v1/media/{}/thumbnail", item.id.0)),
date_taken: item.date_taken,
});
}
None
})
.collect();
Ok(Json(markers))
}
/// Photo routes
pub fn routes() -> Router<AppState> {
Router::new()
.route("/timeline", get(get_timeline))
.route("/map", get(get_map_photos))
}

View file

@ -0,0 +1,366 @@
use axum::{
Json,
extract::{Extension, Path, State},
};
use pinakes_core::{model::MediaId, playlists::Playlist, users::UserId};
use uuid::Uuid;
use crate::{
auth::resolve_user_id,
dto::{
CreatePlaylistRequest,
MediaResponse,
PlaylistItemRequest,
PlaylistResponse,
ReorderPlaylistRequest,
UpdatePlaylistRequest,
},
error::ApiError,
state::AppState,
};
/// Check whether a user has access to a playlist.
///
/// # Arguments
///
/// * `require_write` - when `true` only the playlist owner is allowed (for
/// mutations such as update, delete, add/remove/reorder items). When `false`
/// the playlist must either be public or owned by the requesting user.
async fn check_playlist_access(
storage: &pinakes_core::storage::DynStorageBackend,
playlist_id: Uuid,
user_id: UserId,
require_write: bool,
) -> Result<Playlist, ApiError> {
let playlist = storage.get_playlist(playlist_id).await.map_err(ApiError)?;
if require_write {
// Write operations require ownership
if playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"only the playlist owner can modify this playlist".into(),
)));
}
} else {
// Read operations: allow if public or owner
if !playlist.is_public && playlist.owner_id != user_id {
return Err(ApiError(pinakes_core::error::PinakesError::Authorization(
"playlist is private".into(),
)));
}
}
Ok(playlist)
}
#[utoipa::path(
post,
path = "/api/v1/playlists",
tag = "playlists",
request_body = CreatePlaylistRequest,
responses(
(status = 200, description = "Playlist created", body = PlaylistResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if req.name.is_empty() || req.name.chars().count() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let owner_id = resolve_user_id(&state.storage, &username).await?;
let playlist = state
.storage
.create_playlist(
owner_id,
&req.name,
req.description.as_deref(),
req.is_public.unwrap_or(false),
req.is_smart.unwrap_or(false),
req.filter_query.as_deref(),
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
get,
path = "/api/v1/playlists",
tag = "playlists",
responses(
(status = 200, description = "List of playlists", body = Vec<PlaylistResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_playlists(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<PlaylistResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
// Fetch all playlists and filter to only public ones plus the user's own
let playlists = state.storage.list_playlists(None).await?;
let visible: Vec<PlaylistResponse> = playlists
.into_iter()
.filter(|p| p.is_public || p.owner_id == user_id)
.map(PlaylistResponse::from)
.collect();
Ok(Json(visible))
}
#[utoipa::path(
get,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist details", body = PlaylistResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<PlaylistResponse>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let playlist =
check_playlist_access(&state.storage, id, user_id, false).await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
patch,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = UpdatePlaylistRequest,
responses(
(status = 200, description = "Playlist updated", body = PlaylistResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdatePlaylistRequest>,
) -> Result<Json<PlaylistResponse>, ApiError> {
if let Some(ref name) = req.name
&& (name.is_empty() || name.chars().count() > 255)
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"playlist name must be 1-255 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let playlist = state
.storage
.update_playlist(
id,
req.name.as_deref(),
req.description.as_deref(),
req.is_public,
)
.await?;
Ok(Json(PlaylistResponse::from(playlist)))
}
#[utoipa::path(
delete,
path = "/api/v1/playlists/{id}",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state.storage.delete_playlist(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/playlists/{id}/items",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = PlaylistItemRequest,
responses(
(status = 200, description = "Item added"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn add_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<PlaylistItemRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
let position = if let Some(p) = req.position {
p
} else {
let items = state.storage.get_playlist_items(id).await?;
items.len() as i32
};
state
.storage
.add_to_playlist(id, MediaId(req.media_id), position)
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/playlists/{id}/items/{media_id}",
tag = "playlists",
params(
("id" = Uuid, Path, description = "Playlist ID"),
("media_id" = Uuid, Path, description = "Media item ID"),
),
responses(
(status = 200, description = "Item removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path((id, media_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.remove_from_playlist(id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/playlists/{id}/items",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Playlist items", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn list_items(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
let items = state.storage.get_playlist_items(id).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
))
}
#[utoipa::path(
patch,
path = "/api/v1/playlists/{id}/items/reorder",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
request_body = ReorderPlaylistRequest,
responses(
(status = 200, description = "Item reordered"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn reorder_item(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<ReorderPlaylistRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, true).await?;
state
.storage
.reorder_playlist(id, MediaId(req.media_id), req.new_position)
.await?;
Ok(Json(serde_json::json!({"reordered": true})))
}
#[utoipa::path(
post,
path = "/api/v1/playlists/{id}/shuffle",
tag = "playlists",
params(("id" = Uuid, Path, description = "Playlist ID")),
responses(
(status = 200, description = "Shuffled playlist items", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn shuffle_playlist(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
check_playlist_access(&state.storage, id, user_id, false).await?;
use rand::seq::SliceRandom;
let mut items = state.storage.get_playlist_items(id).await?;
items.shuffle(&mut rand::rng());
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
))
}

View file

@ -0,0 +1,345 @@
use std::sync::Arc;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::plugin::PluginManager;
use rustc_hash::FxHashMap;
use crate::{
dto::{
InstallPluginRequest,
PluginEventRequest,
PluginResponse,
PluginUiPageEntry,
PluginUiWidgetEntry,
TogglePluginRequest,
},
error::ApiError,
state::AppState,
};
fn require_plugin_manager(
state: &AppState,
) -> Result<Arc<PluginManager>, ApiError> {
state.plugin_manager.clone().ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Plugin system is not enabled".to_string(),
))
})
}
/// List all installed plugins
#[utoipa::path(
get,
path = "/api/v1/plugins",
tag = "plugins",
responses(
(status = 200, description = "List of plugins", body = Vec<PluginResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugins(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginResponse>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
let plugins = plugin_manager.list_plugins().await;
let mut responses = Vec::with_capacity(plugins.len());
for meta in plugins {
let enabled = plugin_manager.is_plugin_enabled(&meta.id).await;
responses.push(PluginResponse::new(meta, enabled));
}
Ok(Json(responses))
}
/// Get a specific plugin by ID
#[utoipa::path(
get,
path = "/api/v1/plugins/{id}",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin details", body = PluginResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
let plugin = plugin_manager.get_plugin(&id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"Plugin not found: {id}"
)))
})?;
let enabled = plugin_manager.is_plugin_enabled(&id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Install a plugin from URL or file path
#[utoipa::path(
post,
path = "/api/v1/plugins",
tag = "plugins",
request_body = InstallPluginRequest,
responses(
(status = 200, description = "Plugin installed", body = PluginResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn install_plugin(
State(state): State<AppState>,
Json(req): Json<InstallPluginRequest>,
) -> Result<Json<PluginResponse>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
let plugin_id =
plugin_manager
.install_plugin(&req.source)
.await
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to install plugin: {e}"),
))
})?;
let plugin =
plugin_manager.get_plugin(&plugin_id).await.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(
"Plugin installed but not found".to_string(),
))
})?;
let enabled = plugin_manager.is_plugin_enabled(&plugin_id).await;
Ok(Json(PluginResponse::new(plugin, enabled)))
}
/// Uninstall a plugin
#[utoipa::path(
delete,
path = "/api/v1/plugins/{id}",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin uninstalled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn uninstall_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
plugin_manager.uninstall_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to uninstall plugin: {e}"),
))
})?;
Ok(Json(serde_json::json!({"uninstalled": true})))
}
/// Enable or disable a plugin
#[utoipa::path(
patch,
path = "/api/v1/plugins/{id}/toggle",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
request_body = TogglePluginRequest,
responses(
(status = 200, description = "Plugin toggled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn toggle_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<TogglePluginRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
if req.enabled {
plugin_manager.enable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to enable plugin: {e}"),
))
})?;
} else {
plugin_manager.disable_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to disable plugin: {e}"),
))
})?;
}
// Re-discover capabilities after toggle so cached data stays current
if let Some(ref pipeline) = state.plugin_pipeline
&& let Err(e) = pipeline.discover_capabilities().await
{
tracing::warn!(
plugin_id = %id,
error = %e,
"failed to re-discover capabilities after plugin toggle"
);
}
Ok(Json(serde_json::json!({
"id": id,
"enabled": req.enabled
})))
}
/// List all UI pages provided by loaded plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/pages",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI pages", body = Vec<PluginUiPageEntry>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_pages(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginUiPageEntry>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
let pages = plugin_manager.list_ui_pages_with_endpoints().await;
let entries = pages
.into_iter()
.map(|(plugin_id, page, allowed_endpoints)| {
PluginUiPageEntry {
plugin_id,
page,
allowed_endpoints,
}
})
.collect();
Ok(Json(entries))
}
/// List all UI widgets provided by loaded plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/widgets",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI widgets", body = Vec<PluginUiWidgetEntry>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_widgets(
State(state): State<AppState>,
) -> Result<Json<Vec<PluginUiWidgetEntry>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
let widgets = plugin_manager.list_ui_widgets().await;
let entries = widgets
.into_iter()
.map(|(plugin_id, widget)| PluginUiWidgetEntry { plugin_id, widget })
.collect();
Ok(Json(entries))
}
/// Receive a plugin event emitted from the UI and dispatch it to interested
/// server-side event-handler plugins via the pipeline.
#[utoipa::path(
post,
path = "/api/v1/plugins/events",
tag = "plugins",
request_body = PluginEventRequest,
responses(
(status = 200, description = "Event received"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn emit_plugin_event(
State(state): State<AppState>,
Json(req): Json<PluginEventRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
tracing::info!(event = %req.event, "plugin UI event received");
state.emit_plugin_event(&req.event, &req.payload);
Ok(Json(
serde_json::json!({ "received": true, "event": req.event }),
))
}
/// List merged CSS custom property overrides from all enabled plugins
#[utoipa::path(
get,
path = "/api/v1/plugins/ui/theme",
tag = "plugins",
responses(
(status = 200, description = "Plugin UI theme extensions"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_plugin_ui_theme_extensions(
State(state): State<AppState>,
) -> Result<Json<FxHashMap<String, String>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
Ok(Json(plugin_manager.list_ui_theme_extensions().await))
}
/// Reload a plugin (for development)
#[utoipa::path(
post,
path = "/api/v1/plugins/{id}/reload",
tag = "plugins",
params(("id" = String, Path, description = "Plugin ID")),
responses(
(status = 200, description = "Plugin reloaded"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn reload_plugin(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
plugin_manager.reload_plugin(&id).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("Failed to reload plugin: {e}"),
))
})?;
// Re-discover capabilities after reload so cached data stays current
if let Some(ref pipeline) = state.plugin_pipeline
&& let Err(e) = pipeline.discover_capabilities().await
{
tracing::warn!(
plugin_id = %id,
error = %e,
"failed to re-discover capabilities after plugin reload"
);
}
Ok(Json(serde_json::json!({"reloaded": true})))
}

View file

@ -0,0 +1,150 @@
use axum::{
Json,
extract::{Path, State},
};
use serde::{Deserialize, Serialize};
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct CreateSavedSearchRequest {
pub name: String,
pub query: String,
pub sort_order: Option<String>,
}
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct SavedSearchResponse {
pub id: String,
pub name: String,
pub query: String,
pub sort_order: Option<String>,
pub created_at: chrono::DateTime<chrono::Utc>,
}
const VALID_SORT_ORDERS: &[&str] = &[
"date_asc",
"date_desc",
"name_asc",
"name_desc",
"size_asc",
"size_desc",
];
#[utoipa::path(
post,
path = "/api/v1/searches",
tag = "saved_searches",
request_body = CreateSavedSearchRequest,
responses(
(status = 200, description = "Search saved", body = SavedSearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_saved_search(
State(state): State<AppState>,
Json(req): Json<CreateSavedSearchRequest>,
) -> Result<Json<SavedSearchResponse>, ApiError> {
let name_len = req.name.chars().count();
if name_len == 0 || name_len > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"name must be 1-255 characters".into(),
),
));
}
if req.query.is_empty() || req.query.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"query must be 1-2048 bytes".into(),
),
));
}
if let Some(ref sort) = req.sort_order
&& !VALID_SORT_ORDERS.contains(&sort.as_str())
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"sort_order must be one of: {}",
VALID_SORT_ORDERS.join(", ")
)),
));
}
let id = uuid::Uuid::now_v7();
state
.storage
.save_search(id, &req.name, &req.query, req.sort_order.as_deref())
.await
.map_err(ApiError)?;
Ok(Json(SavedSearchResponse {
id: id.to_string(),
name: req.name,
query: req.query,
sort_order: req.sort_order,
created_at: chrono::Utc::now(),
}))
}
#[utoipa::path(
get,
path = "/api/v1/searches",
tag = "saved_searches",
responses(
(status = 200, description = "List of saved searches", body = Vec<SavedSearchResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_saved_searches(
State(state): State<AppState>,
) -> Result<Json<Vec<SavedSearchResponse>>, ApiError> {
let searches = state
.storage
.list_saved_searches()
.await
.map_err(ApiError)?;
Ok(Json(
searches
.into_iter()
.map(|s| {
SavedSearchResponse {
id: s.id.to_string(),
name: s.name,
query: s.query,
sort_order: s.sort_order,
created_at: s.created_at,
}
})
.collect(),
))
}
#[utoipa::path(
delete,
path = "/api/v1/searches/{id}",
tag = "saved_searches",
params(("id" = uuid::Uuid, Path, description = "Saved search ID")),
responses(
(status = 200, description = "Saved search deleted"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_saved_search(
State(state): State<AppState>,
Path(id): Path<uuid::Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.delete_saved_search(id)
.await
.map_err(ApiError)?;
Ok(Json(serde_json::json!({ "deleted": true })))
}

View file

@ -0,0 +1,56 @@
use axum::{Json, extract::State};
use crate::{
dto::{ScanJobResponse, ScanRequest, ScanStatusResponse},
error::ApiError,
state::AppState,
};
/// Trigger a scan as a background job. Returns the job ID immediately.
#[utoipa::path(
post,
path = "/api/v1/scan",
tag = "scan",
request_body = ScanRequest,
responses(
(status = 200, description = "Scan job submitted", body = ScanJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn trigger_scan(
State(state): State<AppState>,
Json(req): Json<ScanRequest>,
) -> Result<Json<ScanJobResponse>, ApiError> {
let kind = pinakes_core::jobs::JobKind::Scan { path: req.path };
let job_id = state.job_queue.submit(kind).await;
Ok(Json(ScanJobResponse {
job_id: job_id.to_string(),
}))
}
#[utoipa::path(
get,
path = "/api/v1/scan/status",
tag = "scan",
responses(
(status = 200, description = "Scan status", body = ScanStatusResponse),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn scan_status(
State(state): State<AppState>,
) -> Json<ScanStatusResponse> {
let snapshot = state.scan_progress.snapshot();
let error_count = snapshot.errors.len();
Json(ScanStatusResponse {
scanning: snapshot.scanning,
files_found: snapshot.files_found,
files_processed: snapshot.files_processed,
error_count,
errors: snapshot.errors,
})
}

View file

@ -0,0 +1,102 @@
use axum::{
Json,
extract::{Path, State},
};
use crate::{dto::ScheduledTaskResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/scheduled-tasks",
tag = "scheduled_tasks",
responses(
(status = 200, description = "List of scheduled tasks", body = Vec<ScheduledTaskResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_scheduled_tasks(
State(state): State<AppState>,
) -> Result<Json<Vec<ScheduledTaskResponse>>, ApiError> {
let tasks = state.scheduler.list_tasks().await;
let responses: Vec<ScheduledTaskResponse> = tasks
.into_iter()
.map(|t| {
ScheduledTaskResponse {
id: t.id,
name: t.name,
schedule: t.schedule.display_string(),
enabled: t.enabled,
last_run: t.last_run.map(|dt| dt.to_rfc3339()),
next_run: t.next_run.map(|dt| dt.to_rfc3339()),
last_status: t.last_status,
}
})
.collect();
Ok(Json(responses))
}
#[utoipa::path(
post,
path = "/api/v1/scheduled-tasks/{id}/toggle",
tag = "scheduled_tasks",
params(("id" = String, Path, description = "Task ID")),
responses(
(status = 200, description = "Task toggled"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn toggle_scheduled_task(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.toggle_task(&id).await {
Some(enabled) => {
Ok(Json(serde_json::json!({
"id": id,
"enabled": enabled,
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
)))
},
}
}
#[utoipa::path(
post,
path = "/api/v1/scheduled-tasks/{id}/run",
tag = "scheduled_tasks",
params(("id" = String, Path, description = "Task ID")),
responses(
(status = 200, description = "Task triggered"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn run_scheduled_task_now(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
match state.scheduler.run_now(&id).await {
Some(job_id) => {
Ok(Json(serde_json::json!({
"id": id,
"job_id": job_id,
})))
},
None => {
Err(ApiError(pinakes_core::error::PinakesError::NotFound(
format!("scheduled task not found: {id}"),
)))
},
}
}

View file

@ -0,0 +1,122 @@
use axum::{
Json,
extract::{Query, State},
};
use pinakes_core::search::{SearchRequest, SortOrder, parse_search_query};
use crate::{
dto::{MediaResponse, SearchParams, SearchRequestBody, SearchResponse},
error::ApiError,
state::AppState,
};
fn resolve_sort(sort: Option<&str>) -> SortOrder {
match sort {
Some("date_asc") => SortOrder::DateAsc,
Some("date_desc") => SortOrder::DateDesc,
Some("name_asc") => SortOrder::NameAsc,
Some("name_desc") => SortOrder::NameDesc,
Some("size_asc") => SortOrder::SizeAsc,
Some("size_desc") => SortOrder::SizeDesc,
_ => SortOrder::Relevance,
}
}
#[utoipa::path(
get,
path = "/api/v1/search",
tag = "search",
params(
("q" = String, Query, description = "Search query"),
("sort" = Option<String>, Query, description = "Sort order"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Search results", body = SearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn search(
State(state): State<AppState>,
Query(params): Query<SearchParams>,
) -> Result<Json<SearchResponse>, ApiError> {
if params.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&params.q)?;
let sort = resolve_sort(params.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: params.to_pagination(),
};
let results = state.storage.search(&request).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(SearchResponse {
items: results
.items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
total_count: results.total_count,
}))
}
#[utoipa::path(
post,
path = "/api/v1/search",
tag = "search",
request_body = SearchRequestBody,
responses(
(status = 200, description = "Search results", body = SearchResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn search_post(
State(state): State<AppState>,
Json(body): Json<SearchRequestBody>,
) -> Result<Json<SearchResponse>, ApiError> {
if body.q.len() > 2048 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"search query exceeds maximum length of 2048 characters".into(),
),
));
}
let query = parse_search_query(&body.q)?;
let sort = resolve_sort(body.sort.as_deref());
let request = SearchRequest {
query,
sort,
pagination: body.to_pagination(),
};
let results = state.storage.search(&request).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(SearchResponse {
items: results
.items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
total_count: results.total_count,
}))
}

View file

@ -0,0 +1,840 @@
use std::net::SocketAddr;
use axum::{
Json,
extract::{ConnectInfo, Extension, Path, Query, State},
http::StatusCode,
};
use chrono::Utc;
use pinakes_core::{
model::{MediaId, Pagination},
sharing::{
Share,
ShareActivity,
ShareActivityAction,
ShareId,
ShareMutatePermissions,
ShareNotification,
ShareNotificationType,
SharePermissions,
ShareRecipient,
ShareTarget,
ShareViewPermissions,
generate_share_token,
hash_share_password,
verify_share_password,
},
users::UserId,
};
use uuid::Uuid;
use crate::{
auth::resolve_user_id,
dto::{
AccessSharedRequest,
BatchDeleteSharesRequest,
CreateShareRequest,
MediaResponse,
PaginationParams,
ShareActivityResponse,
ShareNotificationResponse,
ShareResponse,
SharedContentResponse,
UpdateShareRequest,
},
error::{ApiError, ApiResult},
state::AppState,
};
/// Create a new share
/// POST /api/shares
#[utoipa::path(
post,
path = "/api/v1/shares",
tag = "shares",
request_body = CreateShareRequest,
responses(
(status = 200, description = "Share created", body = ShareResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareRequest>,
) -> ApiResult<Json<ShareResponse>> {
let config = state.config.read().await;
if !config.sharing.enabled() {
return Err(ApiError::bad_request("Sharing is not enabled"));
}
// Validate public links are allowed
if req.recipient_type == "public_link" && !config.sharing.allow_public_links()
{
return Err(ApiError::bad_request("Public links are not allowed"));
}
// Enforce password requirement for public links if configured
if req.recipient_type == "public_link"
&& config.sharing.require_public_link_password
&& req.password.is_none()
{
return Err(ApiError::bad_request(
"Public links require a password per server policy",
));
}
drop(config);
let owner_id = resolve_user_id(&state.storage, &username).await?;
// Parse target
let target_id: Uuid = req
.target_id
.parse()
.map_err(|_| ApiError::bad_request("Invalid target_id"))?;
let target = match req.target_type.as_str() {
"media" => {
ShareTarget::Media {
media_id: MediaId(target_id),
}
},
"collection" => {
ShareTarget::Collection {
collection_id: target_id,
}
},
"tag" => ShareTarget::Tag { tag_id: target_id },
"saved_search" => {
ShareTarget::SavedSearch {
search_id: target_id,
}
},
_ => return Err(ApiError::bad_request("Invalid target_type")),
};
// Parse recipient
let recipient = match req.recipient_type.as_str() {
"public_link" => {
let token = generate_share_token();
let password_hash = req
.password
.as_ref()
.map(|p| hash_share_password(p))
.transpose()
.map_err(ApiError)?;
ShareRecipient::PublicLink {
token,
password_hash,
}
},
"user" => {
let recipient_user_id = req.recipient_user_id.ok_or_else(|| {
ApiError::bad_request("recipient_user_id required for user share")
})?;
ShareRecipient::User {
user_id: UserId(recipient_user_id),
}
},
"group" => {
let group_id = req.recipient_group_id.ok_or_else(|| {
ApiError::bad_request("recipient_group_id required for group share")
})?;
ShareRecipient::Group { group_id }
},
_ => return Err(ApiError::bad_request("Invalid recipient_type")),
};
// Parse permissions
let permissions = if let Some(perms) = req.permissions {
SharePermissions {
view: ShareViewPermissions {
can_view: perms.can_view.unwrap_or(true),
can_download: perms.can_download.unwrap_or(false),
can_reshare: perms.can_reshare.unwrap_or(false),
},
mutate: ShareMutatePermissions {
can_edit: perms.can_edit.unwrap_or(false),
can_delete: perms.can_delete.unwrap_or(false),
can_add: perms.can_add.unwrap_or(false),
},
}
} else {
SharePermissions::view_only()
};
// Calculate expiration
let expires_at = req
.expires_in_hours
.map(|hours| Utc::now() + chrono::Duration::hours(hours as i64));
let share = Share {
id: ShareId(Uuid::now_v7()),
target,
owner_id,
recipient,
permissions,
note: req.note,
expires_at,
access_count: 0,
last_accessed: None,
inherit_to_children: req.inherit_to_children.unwrap_or(true),
parent_share_id: None,
created_at: Utc::now(),
updated_at: Utc::now(),
};
let created =
state.storage.create_share(&share).await.map_err(|e| {
ApiError::internal(format!("Failed to create share: {e}"))
})?;
// Send notification to recipient if it's a user share
if let ShareRecipient::User { user_id } = &created.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: created.id,
notification_type: ShareNotificationType::NewShare,
is_read: false,
created_at: Utc::now(),
};
if let Err(e) = state.storage.create_share_notification(&notification).await
{
tracing::warn!(error = %e, "failed to send share notification");
}
}
Ok(Json(created.into()))
}
/// List outgoing shares (shares I created)
/// GET /api/shares/outgoing
#[utoipa::path(
get,
path = "/api/v1/shares/outgoing",
tag = "shares",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Outgoing shares", body = Vec<ShareResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_outgoing(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = params.to_pagination();
let shares = state
.storage
.list_shares_by_owner(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {e}")))?;
Ok(Json(shares.into_iter().map(Into::into).collect()))
}
/// List incoming shares (shares shared with me)
/// GET /api/shares/incoming
#[utoipa::path(
get,
path = "/api/v1/shares/incoming",
tag = "shares",
params(
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Incoming shares", body = Vec<ShareResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_incoming(
State(state): State<AppState>,
Extension(username): Extension<String>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let pagination = params.to_pagination();
let shares = state
.storage
.list_shares_for_user(user_id, &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to list shares: {e}")))?;
Ok(Json(shares.into_iter().map(Into::into).collect()))
}
/// Get share details
/// GET /api/shares/{id}
#[utoipa::path(
get,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
responses(
(status = 200, description = "Share details", body = ShareResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<Json<ShareResponse>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
// Check authorization
let is_owner = share.owner_id == user_id;
let is_recipient = match &share.recipient {
ShareRecipient::User {
user_id: recipient_id,
} => *recipient_id == user_id,
_ => false,
};
if !is_owner && !is_recipient {
return Err(ApiError::forbidden("Not authorized to view this share"));
}
Ok(Json(share.into()))
}
/// Update a share
/// PATCH /api/shares/{id}
#[utoipa::path(
patch,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
request_body = UpdateShareRequest,
responses(
(status = 200, description = "Share updated", body = ShareResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateShareRequest>,
) -> ApiResult<Json<ShareResponse>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let mut share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
// Only owner can update
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can update this share"));
}
// Update fields
if let Some(perms) = req.permissions {
share.permissions = SharePermissions {
view: ShareViewPermissions {
can_view: perms.can_view.unwrap_or(share.permissions.view.can_view),
can_download: perms
.can_download
.unwrap_or(share.permissions.view.can_download),
can_reshare: perms
.can_reshare
.unwrap_or(share.permissions.view.can_reshare),
},
mutate: ShareMutatePermissions {
can_edit: perms.can_edit.unwrap_or(share.permissions.mutate.can_edit),
can_delete: perms
.can_delete
.unwrap_or(share.permissions.mutate.can_delete),
can_add: perms.can_add.unwrap_or(share.permissions.mutate.can_add),
},
};
}
if let Some(note) = req.note {
share.note = Some(note);
}
if let Some(expires_at) = req.expires_at {
share.expires_at = Some(expires_at);
}
if let Some(inherit) = req.inherit_to_children {
share.inherit_to_children = inherit;
}
share.updated_at = Utc::now();
let updated =
state.storage.update_share(&share).await.map_err(|e| {
ApiError::internal(format!("Failed to update share: {e}"))
})?;
// Notify recipient of update
if let ShareRecipient::User { user_id } = &updated.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: updated.id,
notification_type: ShareNotificationType::ShareUpdated,
is_read: false,
created_at: Utc::now(),
};
if let Err(e) = state.storage.create_share_notification(&notification).await
{
tracing::warn!(error = %e, "failed to send share update notification");
}
}
Ok(Json(updated.into()))
}
/// Delete (revoke) a share
/// DELETE /api/shares/{id}
#[utoipa::path(
delete,
path = "/api/v1/shares/{id}",
tag = "shares",
params(("id" = Uuid, Path, description = "Share ID")),
responses(
(status = 204, description = "Share deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_share(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
// Only owner can delete
if share.owner_id != user_id {
return Err(ApiError::forbidden("Only the owner can revoke this share"));
}
// Notify recipient before deletion
if let ShareRecipient::User { user_id } = &share.recipient {
let notification = ShareNotification {
id: Uuid::now_v7(),
user_id: *user_id,
share_id: share.id,
notification_type: ShareNotificationType::ShareRevoked,
is_read: false,
created_at: Utc::now(),
};
if let Err(e) = state.storage.create_share_notification(&notification).await
{
tracing::warn!(error = %e, "failed to send share revocation notification");
}
}
state
.storage
.delete_share(ShareId(id))
.await
.map_err(|e| ApiError::internal(format!("Failed to delete share: {e}")))?;
Ok(StatusCode::NO_CONTENT)
}
/// Batch delete shares
/// POST /api/shares/batch/delete
#[utoipa::path(
post,
path = "/api/v1/shares/batch/delete",
tag = "shares",
request_body = BatchDeleteSharesRequest,
responses(
(status = 200, description = "Shares deleted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn batch_delete(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<BatchDeleteSharesRequest>,
) -> ApiResult<Json<serde_json::Value>> {
if req.share_ids.is_empty() || req.share_ids.len() > 100 {
return Err(ApiError::bad_request("share_ids must contain 1-100 items"));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let share_ids: Vec<ShareId> =
req.share_ids.into_iter().map(ShareId).collect();
// Verify ownership of all shares
for share_id in &share_ids {
let share = state
.storage
.get_share(*share_id)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
if share.owner_id != user_id {
return Err(ApiError::forbidden(format!(
"Not authorized to delete share {}",
share_id.0
)));
}
}
let deleted = state
.storage
.batch_delete_shares(&share_ids)
.await
.map_err(|e| ApiError::internal(format!("Failed to batch delete: {e}")))?;
Ok(Json(serde_json::json!({ "deleted": deleted })))
}
/// Access a public shared resource
/// GET /api/shared/{token}
#[utoipa::path(
get,
path = "/api/v1/shared/{token}",
tag = "shares",
params(
("token" = String, Path, description = "Share token"),
("password" = Option<String>, Query, description = "Share password if required"),
),
responses(
(status = 200, description = "Shared content", body = SharedContentResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
)
)]
pub async fn access_shared(
State(state): State<AppState>,
Path(token): Path<String>,
Query(params): Query<AccessSharedRequest>,
ConnectInfo(addr): ConnectInfo<SocketAddr>,
) -> ApiResult<Json<SharedContentResponse>> {
let share = state
.storage
.get_share_by_token(&token)
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
// Check expiration
if let Some(expires_at) = share.expires_at
&& Utc::now() > expires_at
{
return Err(ApiError::not_found("Share has expired"));
}
// Check password if required
if let ShareRecipient::PublicLink {
password_hash: Some(hash),
..
} = &share.recipient
{
let provided_password = params
.password
.as_ref()
.ok_or_else(|| ApiError::unauthorized("Password required"))?;
if !verify_share_password(provided_password, hash) {
// Log failed attempt
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::PasswordFailed,
details: None,
timestamp: Utc::now(),
};
if let Err(e) = state.storage.record_share_activity(&activity).await {
tracing::warn!(error = %e, "failed to record share activity");
}
return Err(ApiError::unauthorized("Invalid password"));
}
}
// Record access
state
.storage
.record_share_access(share.id)
.await
.map_err(|e| ApiError::internal(format!("Failed to record access: {e}")))?;
// Log the access
let activity = ShareActivity {
id: Uuid::now_v7(),
share_id: share.id,
actor_id: None,
actor_ip: Some(addr.ip().to_string()),
action: ShareActivityAction::Accessed,
details: None,
timestamp: Utc::now(),
};
let _ = state.storage.record_share_activity(&activity).await;
// Return the shared content
let roots = state.config.read().await.directories.roots.clone();
match &share.target {
ShareTarget::Media { media_id } => {
let item = state
.storage
.get_media(*media_id)
.await
.map_err(|e| ApiError::not_found(format!("Media not found: {e}")))?;
Ok(Json(SharedContentResponse::Single(MediaResponse::new(
item, &roots,
))))
},
ShareTarget::Collection { collection_id } => {
let members = state
.storage
.get_collection_members(*collection_id)
.await
.map_err(|e| {
ApiError::not_found(format!("Collection not found: {e}"))
})?;
let items: Vec<MediaResponse> = members
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(SharedContentResponse::Multiple { items }))
},
ShareTarget::Tag { tag_id } => {
let tag = state
.storage
.get_tag(*tag_id)
.await
.map_err(|e| ApiError::not_found(format!("Tag not found: {e}")))?;
let request = pinakes_core::search::SearchRequest {
query: pinakes_core::search::SearchQuery::TagFilter(
tag.name.clone(),
),
sort: pinakes_core::search::SortOrder::default(),
pagination: Pagination::new(0, 100, None),
};
let results = state
.storage
.search(&request)
.await
.map_err(|e| ApiError::internal(format!("Search failed: {e}")))?;
let items: Vec<MediaResponse> = results
.items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(SharedContentResponse::Multiple { items }))
},
ShareTarget::SavedSearch { search_id } => {
let saved =
state
.storage
.get_saved_search(*search_id)
.await
.map_err(|e| {
ApiError::not_found(format!("Saved search not found: {e}"))
})?;
let parsed_query = pinakes_core::search::parse_search_query(&saved.query)
.map_err(|e| {
ApiError::internal(format!("Failed to parse search query: {e}"))
})?;
let request = pinakes_core::search::SearchRequest {
query: parsed_query,
sort: pinakes_core::search::SortOrder::default(),
pagination: Pagination::new(0, 100, None),
};
let results = state
.storage
.search(&request)
.await
.map_err(|e| ApiError::internal(format!("Search failed: {e}")))?;
let items: Vec<MediaResponse> = results
.items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect();
Ok(Json(SharedContentResponse::Multiple { items }))
},
}
}
/// Get share activity log
/// GET /api/shares/{id}/activity
#[utoipa::path(
get,
path = "/api/v1/shares/{id}/activity",
tag = "shares",
params(
("id" = Uuid, Path, description = "Share ID"),
("offset" = Option<u64>, Query, description = "Pagination offset"),
("limit" = Option<u64>, Query, description = "Pagination limit"),
),
responses(
(status = 200, description = "Share activity", body = Vec<ShareActivityResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_activity(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Query(params): Query<PaginationParams>,
) -> ApiResult<Json<Vec<ShareActivityResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let share = state
.storage
.get_share(ShareId(id))
.await
.map_err(|e| ApiError::not_found(format!("Share not found: {e}")))?;
// Only owner can view activity
if share.owner_id != user_id {
return Err(ApiError::forbidden(
"Only the owner can view share activity",
));
}
let pagination = params.to_pagination();
let activity = state
.storage
.get_share_activity(ShareId(id), &pagination)
.await
.map_err(|e| ApiError::internal(format!("Failed to get activity: {e}")))?;
Ok(Json(activity.into_iter().map(Into::into).collect()))
}
/// Get unread share notifications
/// GET /api/notifications/shares
#[utoipa::path(
get,
path = "/api/v1/notifications/shares",
tag = "shares",
responses(
(status = 200, description = "Unread notifications", body = Vec<ShareNotificationResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn get_notifications(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> ApiResult<Json<Vec<ShareNotificationResponse>>> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let notifications = state
.storage
.get_unread_notifications(user_id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to get notifications: {e}"))
})?;
Ok(Json(notifications.into_iter().map(Into::into).collect()))
}
/// Mark a notification as read
/// POST /api/notifications/shares/{id}/read
#[utoipa::path(
post,
path = "/api/v1/notifications/shares/{id}/read",
tag = "shares",
params(("id" = Uuid, Path, description = "Notification ID")),
responses(
(status = 200, description = "Notification marked as read"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn mark_notification_read(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.mark_notification_read(id, user_id)
.await
.map_err(|e| ApiError::internal(format!("Failed to mark as read: {e}")))?;
Ok(StatusCode::OK)
}
/// Mark all notifications as read
/// POST /api/notifications/shares/read-all
#[utoipa::path(
post,
path = "/api/v1/notifications/shares/read-all",
tag = "shares",
responses(
(status = 200, description = "All notifications marked as read"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn mark_all_read(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> ApiResult<StatusCode> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.mark_all_notifications_read(user_id)
.await
.map_err(|e| {
ApiError::internal(format!("Failed to mark all as read: {e}"))
})?;
Ok(StatusCode::OK)
}

View file

@ -0,0 +1,355 @@
use axum::{
Json,
extract::{Extension, Path, Query, State},
};
use pinakes_core::model::{MediaId, Pagination};
use serde::Deserialize;
use uuid::Uuid;
use crate::{
auth::resolve_user_id,
dto::{
CommentResponse,
CreateCommentRequest,
CreateRatingRequest,
CreateShareLinkRequest,
FavoriteRequest,
MediaResponse,
RatingResponse,
ShareLinkResponse,
},
error::ApiError,
state::AppState,
};
#[derive(Deserialize)]
pub struct ShareLinkQuery {
pub password: Option<String>,
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/rate",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateRatingRequest,
responses(
(status = 200, description = "Rating saved", body = RatingResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn rate_media(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateRatingRequest>,
) -> Result<Json<RatingResponse>, ApiError> {
if req.stars < 1 || req.stars > 5 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"stars must be between 1 and 5".into(),
),
));
}
if req
.review_text
.as_ref()
.is_some_and(|t| t.chars().count() > 10_000)
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"review_text must not exceed 10000 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let rating = state
.storage
.rate_media(user_id, MediaId(id), req.stars, req.review_text.as_deref())
.await?;
Ok(Json(RatingResponse::from(rating)))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/ratings",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media ratings", body = Vec<RatingResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_ratings(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<RatingResponse>>, ApiError> {
let ratings = state.storage.get_media_ratings(MediaId(id)).await?;
Ok(Json(
ratings.into_iter().map(RatingResponse::from).collect(),
))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/comments",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateCommentRequest,
responses(
(status = 200, description = "Comment added", body = CommentResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_comment(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(id): Path<Uuid>,
Json(req): Json<CreateCommentRequest>,
) -> Result<Json<CommentResponse>, ApiError> {
let char_count = req.text.chars().count();
if char_count == 0 || char_count > 10_000 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"comment text must be 1-10000 characters".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let comment = state
.storage
.add_comment(user_id, MediaId(id), &req.text, req.parent_id)
.await?;
Ok(Json(CommentResponse::from(comment)))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/comments",
tag = "social",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media comments", body = Vec<CommentResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_comments(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<Vec<CommentResponse>>, ApiError> {
let comments = state.storage.get_media_comments(MediaId(id)).await?;
Ok(Json(
comments.into_iter().map(CommentResponse::from).collect(),
))
}
#[utoipa::path(
post,
path = "/api/v1/favorites",
tag = "social",
request_body = FavoriteRequest,
responses(
(status = 200, description = "Added to favorites"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<FavoriteRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.add_favorite(user_id, MediaId(req.media_id))
.await?;
Ok(Json(serde_json::json!({"added": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/favorites/{media_id}",
tag = "social",
params(("media_id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Removed from favorites"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn remove_favorite(
State(state): State<AppState>,
Extension(username): Extension<String>,
Path(media_id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
state
.storage
.remove_favorite(user_id, MediaId(media_id))
.await?;
Ok(Json(serde_json::json!({"removed": true})))
}
#[utoipa::path(
get,
path = "/api/v1/favorites",
tag = "social",
responses(
(status = 200, description = "User favorites", body = Vec<MediaResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_favorites(
State(state): State<AppState>,
Extension(username): Extension<String>,
) -> Result<Json<Vec<MediaResponse>>, ApiError> {
let user_id = resolve_user_id(&state.storage, &username).await?;
let items = state
.storage
.get_user_favorites(user_id, &Pagination::default())
.await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(
items
.into_iter()
.map(|item| MediaResponse::new(item, &roots))
.collect(),
))
}
#[utoipa::path(
post,
path = "/api/v1/media/share",
tag = "social",
request_body = CreateShareLinkRequest,
responses(
(status = 200, description = "Share link created", body = ShareLinkResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_share_link(
State(state): State<AppState>,
Extension(username): Extension<String>,
Json(req): Json<CreateShareLinkRequest>,
) -> Result<Json<ShareLinkResponse>, ApiError> {
if req.password.as_ref().is_some_and(|p| p.len() > 1024) {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must not exceed 1024 bytes".into(),
),
));
}
let user_id = resolve_user_id(&state.storage, &username).await?;
let token = uuid::Uuid::now_v7().to_string().replace('-', "");
let password_hash = match req.password.as_ref() {
Some(p) => {
Some(pinakes_core::users::auth::hash_password(p).map_err(ApiError)?)
},
None => None,
};
const MAX_EXPIRY_HOURS: u64 = 8760; // 1 year
if let Some(h) = req.expires_in_hours
&& h > MAX_EXPIRY_HOURS
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"expires_in_hours cannot exceed {MAX_EXPIRY_HOURS}"
)),
));
}
let expires_at = req
.expires_in_hours
.map(|h| chrono::Utc::now() + chrono::Duration::hours(h as i64));
let link = state
.storage
.create_share_link(
MediaId(req.media_id),
user_id,
&token,
password_hash.as_deref(),
expires_at,
)
.await?;
Ok(Json(ShareLinkResponse::from(link)))
}
#[utoipa::path(
get,
path = "/api/v1/shared/media/{token}",
tag = "social",
params(
("token" = String, Path, description = "Share token"),
("password" = Option<String>, Query, description = "Share password"),
),
responses(
(status = 200, description = "Shared media", body = MediaResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
)
)]
pub async fn access_shared_media(
State(state): State<AppState>,
Path(token): Path<String>,
Query(query): Query<ShareLinkQuery>,
) -> Result<Json<MediaResponse>, ApiError> {
if query.password.as_ref().is_some_and(|p| p.len() > 1024) {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must not exceed 1024 bytes".into(),
),
));
}
let link = state.storage.get_share_link(&token).await?;
// Check expiration
if let Some(expires) = link.expires_at
&& chrono::Utc::now() > expires
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"share link has expired".into(),
),
));
}
// Verify password if set
if let Some(ref hash) = link.password_hash {
let password = match query.password.as_deref() {
Some(p) => p,
None => {
return Err(ApiError(
pinakes_core::error::PinakesError::Authentication(
"password required for this share link".into(),
),
));
},
};
let valid = pinakes_core::users::auth::verify_password(password, hash)
.unwrap_or(false);
if !valid {
return Err(ApiError(pinakes_core::error::PinakesError::Authentication(
"invalid share link password".into(),
)));
}
}
state.storage.increment_share_views(&token).await?;
let item = state.storage.get_media(link.media_id).await?;
let roots = state.config.read().await.directories.roots.clone();
Ok(Json(MediaResponse::new(item, &roots)))
}

View file

@ -0,0 +1,21 @@
use axum::{Json, extract::State};
use crate::{dto::LibraryStatisticsResponse, error::ApiError, state::AppState};
#[utoipa::path(
get,
path = "/api/v1/statistics",
tag = "statistics",
responses(
(status = 200, description = "Library statistics", body = LibraryStatisticsResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn library_statistics(
State(state): State<AppState>,
) -> Result<Json<LibraryStatisticsResponse>, ApiError> {
let stats = state.storage.library_statistics().await?;
Ok(Json(LibraryStatisticsResponse::from(stats)))
}

View file

@ -0,0 +1,345 @@
use axum::{
extract::{Path, State},
http::StatusCode,
};
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use pinakes_core::{
model::MediaId,
transcode::{estimate_bandwidth, parse_resolution},
};
use uuid::Uuid;
use crate::{error::ApiError, state::AppState};
fn build_response(
content_type: &str,
body: impl Into<axum::body::Body>,
) -> Result<axum::response::Response, ApiError> {
axum::response::Response::builder()
.header("Content-Type", content_type)
.body(body.into())
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
fn build_response_with_status(
status: StatusCode,
headers: &[(&str, &str)],
body: impl Into<axum::body::Body>,
) -> Result<axum::response::Response, ApiError> {
let mut builder = axum::response::Response::builder().status(status);
for (name, value) in headers {
builder = builder.header(*name, *value);
}
builder.body(body.into()).map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
fn escape_xml(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&apos;")
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/master.m3u8",
tag = "streaming",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "HLS master playlist"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_master_playlist(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
// Verify media exists
let _item = state.storage.get_media(MediaId(id)).await?;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut playlist = String::from("#EXTM3U\n#EXT-X-VERSION:3\n\n");
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let encoded_name =
utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
playlist.push_str(&format!(
"#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},RESOLUTION={w}x{h}\n/api/v1/\
media/{id}/stream/hls/{encoded_name}/playlist.m3u8\n\n",
));
}
build_response("application/vnd.apple.mpegurl", playlist)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/{profile}/playlist.m3u8",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
),
responses(
(status = 200, description = "HLS variant playlist"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_variant_playlist(
State(state): State<AppState>,
Path((id, profile)): Path<(Uuid, String)>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate HLS playlist for media with unknown or zero duration"
.into(),
),
));
}
let segment_duration = 10.0;
let num_segments = (duration / segment_duration).ceil() as usize;
let mut playlist = String::from(
"#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:10\n#\
EXT-X-MEDIA-SEQUENCE:0\n",
);
for i in 0..num_segments.max(1) {
let seg_dur = if i == num_segments.saturating_sub(1) && duration > 0.0 {
(i as f64).mul_add(-segment_duration, duration)
} else {
segment_duration
};
playlist.push_str(&format!("#EXTINF:{seg_dur:.3},\n"));
playlist.push_str(&format!(
"/api/v1/media/{id}/stream/hls/{profile}/segment{i}.ts\n"
));
}
playlist.push_str("#EXT-X-ENDLIST\n");
build_response("application/vnd.apple.mpegurl", playlist)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/hls/{profile}/{segment}",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
("segment" = String, Path, description = "Segment filename"),
),
responses(
(status = 200, description = "HLS segment data"),
(status = 202, description = "Segment not yet available"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn hls_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"invalid segment name".into(),
),
));
}
let media_id = MediaId(id);
// Look for an active/completed transcode session
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) =
transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {e}"),
))
})?;
return build_response("video/MP2T", data);
}
// Session exists but segment not ready yet
return build_response_with_status(
StatusCode::ACCEPTED,
&[("Retry-After", "2")],
"segment not yet available",
);
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST \
/media/{id}/transcode"
.into(),
),
))
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/dash/manifest.mpd",
tag = "streaming",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "DASH manifest"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn dash_manifest(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<axum::response::Response, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let duration = item.duration_secs.unwrap_or(0.0);
if duration <= 0.0 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"cannot generate DASH manifest for media with unknown or zero duration"
.into(),
),
));
}
let hours = (duration / 3600.0) as u32;
let minutes = ((duration % 3600.0) / 60.0) as u32;
let seconds = duration % 60.0;
let config = state.config.read().await;
let profiles = &config.transcoding.profiles;
let mut representations = String::new();
for profile in profiles {
let (w, h) = parse_resolution(&profile.max_resolution);
let bandwidth = estimate_bandwidth(profile);
let xml_name = escape_xml(&profile.name);
let url_name =
utf8_percent_encode(&profile.name, NON_ALPHANUMERIC).to_string();
representations.push_str(&format!(
r#" <Representation id="{xml_name}" bandwidth="{bandwidth}" width="{w}" height="{h}">
<SegmentTemplate media="/api/v1/media/{id}/stream/dash/{url_name}/segment$Number$.m4s" initialization="/api/v1/media/{id}/stream/dash/{url_name}/init.mp4" duration="10000" timescale="1000" startNumber="0"/>
</Representation>
"#,
));
}
let mpd = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" type="static" mediaPresentationDuration="PT{hours}H{minutes}M{seconds:.1}S" minBufferTime="PT1.5S">
<Period>
<AdaptationSet mimeType="video/mp4" segmentAlignment="true">
{representations} </AdaptationSet>
</Period>
</MPD>"#
);
build_response("application/dash+xml", mpd)
}
#[utoipa::path(
get,
path = "/api/v1/media/{id}/stream/dash/{profile}/{segment}",
tag = "streaming",
params(
("id" = Uuid, Path, description = "Media item ID"),
("profile" = String, Path, description = "Transcode profile name"),
("segment" = String, Path, description = "Segment filename"),
),
responses(
(status = 200, description = "DASH segment data"),
(status = 202, description = "Segment not yet available"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn dash_segment(
State(state): State<AppState>,
Path((id, profile, segment)): Path<(Uuid, String, String)>,
) -> Result<axum::response::Response, ApiError> {
// Strict validation: reject path traversal, null bytes, leading dots
if segment.is_empty()
|| segment.starts_with('.')
|| segment.contains('\0')
|| segment.contains("..")
|| segment.contains('/')
|| segment.contains('\\')
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"invalid segment name".into(),
),
));
}
let media_id = MediaId(id);
if let Some(transcode_service) = &state.transcode_service
&& let Some(session) =
transcode_service.find_session(media_id, &profile).await
{
let segment_path = session.cache_path.join(&segment);
if segment_path.exists() {
let data = tokio::fs::read(&segment_path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read segment: {e}"),
))
})?;
return build_response("video/mp4", data);
}
return build_response_with_status(
StatusCode::ACCEPTED,
&[("Retry-After", "2")],
"segment not yet available",
);
}
Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"no transcode session found; start a transcode first via POST \
/media/{id}/transcode"
.into(),
),
))
}

View file

@ -0,0 +1,313 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::{
model::MediaId,
subtitles::{
Subtitle,
detect_format,
extract_embedded_track,
list_embedded_tracks,
validate_language_code,
},
};
use uuid::Uuid;
use crate::{
dto::{
AddSubtitleRequest,
SubtitleListResponse,
SubtitleResponse,
SubtitleTrackInfoResponse,
UpdateSubtitleOffsetRequest,
},
error::ApiError,
state::AppState,
};
#[utoipa::path(
get,
path = "/api/v1/media/{id}/subtitles",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Subtitles and available embedded tracks", body = SubtitleListResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn list_subtitles(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<SubtitleListResponse>, ApiError> {
let item = state.storage.get_media(MediaId(id)).await?;
let subtitles = state.storage.get_media_subtitles(MediaId(id)).await?;
let available_tracks =
list_embedded_tracks(&item.path).await.unwrap_or_default();
Ok(Json(SubtitleListResponse {
subtitles: subtitles
.into_iter()
.map(SubtitleResponse::from)
.collect(),
available_tracks: available_tracks
.into_iter()
.map(SubtitleTrackInfoResponse::from)
.collect(),
}))
}
#[utoipa::path(
post,
path = "/api/v1/media/{id}/subtitles",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = AddSubtitleRequest,
responses(
(status = 200, description = "Subtitle added", body = SubtitleResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn add_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<AddSubtitleRequest>,
) -> Result<Json<SubtitleResponse>, ApiError> {
// Validate language code if provided.
if let Some(ref lang) = req.language
&& !validate_language_code(lang)
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidLanguageCode(lang.clone()),
));
}
let is_embedded = req.is_embedded.unwrap_or(false);
let (file_path, resolved_format) = if is_embedded {
// Embedded subtitle: validate track_index and extract via ffmpeg.
let track_index = req.track_index.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"track_index is required for embedded subtitles".into(),
))
})?;
let item = state.storage.get_media(MediaId(id)).await?;
let tracks = list_embedded_tracks(&item.path).await?;
let track =
tracks
.iter()
.find(|t| t.index == track_index)
.ok_or(ApiError(
pinakes_core::error::PinakesError::SubtitleTrackNotFound {
index: track_index,
},
))?;
// Use the format detected from the embedded track metadata as
// authoritative.
let embedded_format = track.format;
let ext = embedded_format.to_string();
let output_dir = pinakes_core::config::Config::default_data_dir()
.join("subtitles")
.join(id.to_string());
let output_path = output_dir.join(format!("{track_index}.{ext}"));
tokio::fs::create_dir_all(&output_dir).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to create subtitle output dir: {e}"),
))
})?;
extract_embedded_track(&item.path, track_index, &output_path).await?;
(Some(output_path), embedded_format)
} else {
// External subtitle file: validate path then detect format from content.
let path_str = req.file_path.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"file_path is required for non-embedded subtitles".into(),
))
})?;
let path = std::path::PathBuf::from(&path_str);
use std::path::Component;
if !path.is_absolute()
|| path.components().any(|c| c == Component::ParentDir)
{
return Err(ApiError::bad_request(
"file_path must be an absolute path within a configured root",
));
}
let roots = state.config.read().await.directories.roots.clone();
if !roots.iter().any(|root| path.starts_with(root)) {
return Err(ApiError::bad_request(
"file_path must be an absolute path within a configured root",
));
}
let exists = tokio::fs::try_exists(&path).await.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to check subtitle file: {e}"),
))
})?;
if !exists {
return Err(ApiError(pinakes_core::error::PinakesError::FileNotFound(
path,
)));
}
// Detect the actual format from the file extension; use it as authoritative
// rather than trusting the client-supplied format field.
let detected_format = detect_format(&path).ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("unrecognised subtitle format for: {}", path.display()),
))
})?;
(Some(path), detected_format)
};
let subtitle = Subtitle {
id: Uuid::now_v7(),
media_id: MediaId(id),
language: req.language,
format: resolved_format,
file_path,
is_embedded,
track_index: req.track_index,
offset_ms: req.offset_ms.unwrap_or(0),
created_at: chrono::Utc::now(),
};
state.storage.add_subtitle(&subtitle).await?;
Ok(Json(SubtitleResponse::from(subtitle)))
}
#[utoipa::path(
delete,
path = "/api/v1/subtitles/{id}",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Subtitle ID")),
responses(
(status = 200, description = "Subtitle deleted"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_subtitle(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_subtitle(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{media_id}/subtitles/{subtitle_id}/content",
tag = "subtitles",
params(
("media_id" = Uuid, Path, description = "Media item ID"),
("subtitle_id" = Uuid, Path, description = "Subtitle ID"),
),
responses(
(status = 200, description = "Subtitle content"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_subtitle_content(
State(state): State<AppState>,
Path((media_id, subtitle_id)): Path<(Uuid, Uuid)>,
) -> Result<axum::response::Response, ApiError> {
let subtitles = state.storage.get_media_subtitles(MediaId(media_id)).await?;
let subtitle = subtitles
.into_iter()
.find(|s| s.id == subtitle_id)
.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::NotFound(format!(
"subtitle {subtitle_id}"
)))
})?;
let path = subtitle.file_path.ok_or_else(|| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"subtitle has no associated file to serve".into(),
))
})?;
let fmt = subtitle.format;
let content_type = fmt.mime_type();
let body = if fmt.is_binary() {
let bytes = tokio::fs::read(&path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {e}", path.display()),
))
}
})?;
axum::body::Body::from(bytes)
} else {
let text = tokio::fs::read_to_string(&path).await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
ApiError(pinakes_core::error::PinakesError::FileNotFound(
path.clone(),
))
} else {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to read subtitle file {}: {e}", path.display()),
))
}
})?;
axum::body::Body::from(text)
};
axum::response::Response::builder()
.header("Content-Type", content_type)
.body(body)
.map_err(|e| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
format!("failed to build response: {e}"),
))
})
}
#[utoipa::path(
patch,
path = "/api/v1/subtitles/{id}/offset",
tag = "subtitles",
params(("id" = Uuid, Path, description = "Subtitle ID")),
request_body = UpdateSubtitleOffsetRequest,
responses(
(status = 200, description = "Offset updated"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_offset(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<UpdateSubtitleOffsetRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
state
.storage
.update_subtitle_offset(id, req.offset_ms)
.await?;
Ok(Json(serde_json::json!({"updated": true})))
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,194 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::{
dto::{CreateTagRequest, TagMediaRequest, TagResponse},
error::ApiError,
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/tags",
tag = "tags",
request_body = CreateTagRequest,
responses(
(status = 200, description = "Tag created", body = TagResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_tag(
State(state): State<AppState>,
Json(req): Json<CreateTagRequest>,
) -> Result<Json<TagResponse>, ApiError> {
if req.name.is_empty() || req.name.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"tag name must be 1-255 characters".into(),
),
));
}
let tag =
pinakes_core::tags::create_tag(&state.storage, &req.name, req.parent_id)
.await?;
Ok(Json(TagResponse::from(tag)))
}
#[utoipa::path(
get,
path = "/api/v1/tags",
tag = "tags",
responses(
(status = 200, description = "List of tags", body = Vec<TagResponse>),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn list_tags(
State(state): State<AppState>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.list_tags().await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}
#[utoipa::path(
get,
path = "/api/v1/tags/{id}",
tag = "tags",
params(("id" = Uuid, Path, description = "Tag ID")),
responses(
(status = 200, description = "Tag", body = TagResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TagResponse>, ApiError> {
let tag = state.storage.get_tag(id).await?;
Ok(Json(TagResponse::from(tag)))
}
#[utoipa::path(
delete,
path = "/api/v1/tags/{id}",
tag = "tags",
params(("id" = Uuid, Path, description = "Tag ID")),
responses(
(status = 200, description = "Tag deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_tag(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
state.storage.delete_tag(id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
#[utoipa::path(
post,
path = "/api/v1/media/{media_id}/tags",
tag = "tags",
params(("media_id" = Uuid, Path, description = "Media item ID")),
request_body = TagMediaRequest,
responses(
(status = 200, description = "Tag applied"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn tag_media(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
Json(req): Json<TagMediaRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::tag_media(&state.storage, MediaId(media_id), req.tag_id)
.await?;
state.emit_plugin_event(
"MediaTagged",
&serde_json::json!({
"media_id": media_id.to_string(),
"tag_id": req.tag_id.to_string(),
}),
);
Ok(Json(serde_json::json!({"tagged": true})))
}
#[utoipa::path(
delete,
path = "/api/v1/media/{media_id}/tags/{tag_id}",
tag = "tags",
params(
("media_id" = Uuid, Path, description = "Media item ID"),
("tag_id" = Uuid, Path, description = "Tag ID"),
),
responses(
(status = 200, description = "Tag removed"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn untag_media(
State(state): State<AppState>,
Path((media_id, tag_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<serde_json::Value>, ApiError> {
pinakes_core::tags::untag_media(&state.storage, MediaId(media_id), tag_id)
.await?;
state.emit_plugin_event(
"MediaUntagged",
&serde_json::json!({
"media_id": media_id.to_string(),
"tag_id": tag_id.to_string(),
}),
);
Ok(Json(serde_json::json!({"untagged": true})))
}
#[utoipa::path(
get,
path = "/api/v1/media/{media_id}/tags",
tag = "tags",
params(("media_id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "Media tags", body = Vec<TagResponse>),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn get_media_tags(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<Json<Vec<TagResponse>>, ApiError> {
let tags = state.storage.get_media_tags(MediaId(media_id)).await?;
Ok(Json(tags.into_iter().map(TagResponse::from).collect()))
}

View file

@ -0,0 +1,121 @@
use axum::{
Json,
extract::{Path, Query, State},
};
use pinakes_core::model::MediaId;
use uuid::Uuid;
use crate::{
dto::{CreateTranscodeRequest, PaginationParams, TranscodeSessionResponse},
error::ApiError,
state::AppState,
};
#[utoipa::path(
post,
path = "/api/v1/media/{id}/transcode",
tag = "transcode",
params(("id" = Uuid, Path, description = "Media item ID")),
request_body = CreateTranscodeRequest,
responses(
(status = 200, description = "Transcode job submitted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn start_transcode(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(req): Json<CreateTranscodeRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
if req.profile.is_empty() || req.profile.len() > 255 {
return Err(ApiError::bad_request("profile must be 1-255 bytes"));
}
let job_id = state
.job_queue
.submit(pinakes_core::jobs::JobKind::Transcode {
media_id: MediaId(id),
profile: req.profile,
})
.await;
Ok(Json(serde_json::json!({"job_id": job_id.to_string()})))
}
#[utoipa::path(
get,
path = "/api/v1/transcode/{id}",
tag = "transcode",
params(("id" = Uuid, Path, description = "Transcode session ID")),
responses(
(status = 200, description = "Transcode session details", body = TranscodeSessionResponse),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<TranscodeSessionResponse>, ApiError> {
let session = state.storage.get_transcode_session(id).await?;
Ok(Json(TranscodeSessionResponse::from(session)))
}
#[utoipa::path(
get,
path = "/api/v1/transcode",
tag = "transcode",
responses(
(status = 200, description = "List of transcode sessions", body = Vec<TranscodeSessionResponse>),
(status = 401, description = "Unauthorized"),
),
security(("bearer_auth" = []))
)]
pub async fn list_sessions(
State(state): State<AppState>,
Query(params): Query<PaginationParams>,
) -> Result<Json<Vec<TranscodeSessionResponse>>, ApiError> {
let _ = params; // reserved for future filtering
let sessions = state.storage.list_transcode_sessions(None).await?;
Ok(Json(
sessions
.into_iter()
.map(TranscodeSessionResponse::from)
.collect(),
))
}
#[utoipa::path(
delete,
path = "/api/v1/transcode/{id}",
tag = "transcode",
params(("id" = Uuid, Path, description = "Transcode session ID")),
responses(
(status = 200, description = "Transcode session cancelled"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn cancel_session(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
if let Some(transcode_service) = &state.transcode_service {
transcode_service
.cancel_transcode(id, &state.storage)
.await?;
} else {
state
.storage
.update_transcode_status(
id,
pinakes_core::transcode::TranscodeStatus::Cancelled,
0.0,
)
.await?;
}
Ok(Json(serde_json::json!({"cancelled": true})))
}

View file

@ -0,0 +1,239 @@
use axum::{
Json,
extract::{Multipart, Path, State},
http::{StatusCode, header},
response::IntoResponse,
};
use pinakes_core::{model::MediaId, upload};
use tokio_util::io::ReaderStream;
use uuid::Uuid;
use crate::{
dto::{ManagedStorageStatsResponse, UploadResponse},
error::{ApiError, ApiResult},
state::AppState,
};
/// Sanitize a filename for use in Content-Disposition headers.
/// Strips characters that could break header parsing or enable injection.
fn sanitize_content_disposition(filename: &str) -> String {
let safe: String = filename
.chars()
.map(|c| {
if c == '"' || c == '\\' || c == '\n' || c == '\r' {
'_'
} else {
c
}
})
.collect();
format!("attachment; filename=\"{safe}\"")
}
/// Upload a file to managed storage
/// POST /api/upload
#[utoipa::path(
post,
path = "/api/v1/upload",
tag = "upload",
responses(
(status = 200, description = "File uploaded", body = UploadResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn upload_file(
State(state): State<AppState>,
mut multipart: Multipart,
) -> ApiResult<Json<UploadResponse>> {
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let config = state.config.read().await;
if !config.managed_storage.enabled {
return Err(ApiError::bad_request("Managed storage is not enabled"));
}
drop(config);
// Extract file from multipart
let field = multipart
.next_field()
.await
.map_err(|e| {
ApiError::bad_request(format!("Failed to read multipart field: {e}"))
})?
.ok_or_else(|| ApiError::bad_request("No file provided"))?;
let original_filename = field
.file_name()
.map_or_else(|| "unknown".to_string(), std::string::ToString::to_string);
let content_type = field.content_type().map_or_else(
|| "application/octet-stream".to_string(),
std::string::ToString::to_string,
);
let data = field.bytes().await.map_err(|e| {
ApiError::bad_request(format!("Failed to read file data: {e}"))
})?;
// Process the upload
let result = upload::process_upload_bytes(
&state.storage,
managed_storage.as_ref(),
&data,
&original_filename,
Some(&content_type),
)
.await
.map_err(|e| ApiError::internal(format!("Upload failed: {e}")))?;
Ok(Json(result.into()))
}
/// Download a managed file
/// GET /api/media/{id}/download
#[utoipa::path(
get,
path = "/api/v1/media/{id}/download",
tag = "upload",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 200, description = "File content"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn download_file(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> ApiResult<impl IntoResponse> {
let media_id = MediaId(id);
let item = state
.storage
.get_media(media_id)
.await
.map_err(|e| ApiError::not_found(format!("Media not found: {e}")))?;
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
// Check if this is a managed file
if item.storage_mode != pinakes_core::model::StorageMode::Managed {
// For external files, stream from their original path
let file = tokio::fs::File::open(&item.path)
.await
.map_err(|e| ApiError::not_found(format!("File not found: {e}")))?;
let stream = ReaderStream::new(file);
let body = axum::body::Body::from_stream(stream);
let content_type = item.media_type.mime_type();
let filename = item.original_filename.unwrap_or(item.file_name);
return Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
sanitize_content_disposition(&filename),
),
],
body,
));
}
// For managed files, stream from content-addressable storage
let file = managed_storage
.open(&item.content_hash)
.await
.map_err(|e| ApiError::not_found(format!("Blob not found: {e}")))?;
let stream = ReaderStream::new(file);
let body = axum::body::Body::from_stream(stream);
let content_type = item.media_type.mime_type();
let filename = item.original_filename.unwrap_or(item.file_name);
Ok((
[
(header::CONTENT_TYPE, content_type),
(
header::CONTENT_DISPOSITION,
sanitize_content_disposition(&filename),
),
],
body,
))
}
/// Migrate an external file to managed storage
/// POST /api/media/{id}/move-to-managed
#[utoipa::path(
post,
path = "/api/v1/media/{id}/move-to-managed",
tag = "upload",
params(("id" = Uuid, Path, description = "Media item ID")),
responses(
(status = 204, description = "File migrated"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn move_to_managed(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> ApiResult<StatusCode> {
let managed_storage = state
.managed_storage
.as_ref()
.ok_or_else(|| ApiError::bad_request("Managed storage is not enabled"))?;
let media_id = MediaId(id);
upload::migrate_to_managed(
&state.storage,
managed_storage.as_ref(),
media_id,
)
.await
.map_err(|e| ApiError::internal(format!("Migration failed: {e}")))?;
Ok(StatusCode::NO_CONTENT)
}
/// Get managed storage statistics
/// GET /api/managed/stats
#[utoipa::path(
get,
path = "/api/v1/managed/stats",
tag = "upload",
responses(
(status = 200, description = "Managed storage statistics", body = ManagedStorageStatsResponse),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn managed_stats(
State(state): State<AppState>,
) -> ApiResult<Json<ManagedStorageStatsResponse>> {
let stats = state
.storage
.managed_storage_stats()
.await
.map_err(|e| ApiError::internal(format!("Failed to get stats: {e}")))?;
Ok(Json(stats.into()))
}

View file

@ -0,0 +1,337 @@
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::users::{CreateUserRequest, UpdateUserRequest, UserId};
use crate::{
dto::{
GrantLibraryAccessRequest,
RevokeLibraryAccessRequest,
UserLibraryResponse,
UserResponse,
},
error::ApiError,
state::AppState,
};
/// List all users (admin only)
#[utoipa::path(
get,
path = "/api/v1/admin/users",
tag = "users",
responses(
(status = 200, description = "List of users", body = Vec<UserResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_users(
State(state): State<AppState>,
) -> Result<Json<Vec<UserResponse>>, ApiError> {
let users = state.storage.list_users().await?;
Ok(Json(users.into_iter().map(UserResponse::from).collect()))
}
/// Create a new user (admin only)
#[utoipa::path(
post,
path = "/api/v1/admin/users",
tag = "users",
request_body(
content = inline(serde_json::Value),
description = "username, password, role, and optional profile fields",
content_type = "application/json"
),
responses(
(status = 200, description = "User created", body = UserResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 500, description = "Internal server error"),
),
security(("bearer_auth" = []))
)]
pub async fn create_user(
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
// Validate username
if req.username.is_empty() || req.username.len() > 255 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"username must be 1-255 characters".into(),
),
));
}
// Validate password
if req.password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
// Hash password
let password_hash = pinakes_core::users::auth::hash_password(&req.password)?;
// Create user - rely on DB unique constraint for username to avoid TOCTOU
// race
let user = state
.storage
.create_user(&req.username, &password_hash, req.role, req.profile)
.await
.map_err(|e| {
// Map unique constraint violations to a user-friendly conflict error
let err_str = e.to_string();
if err_str.contains("UNIQUE")
|| err_str.contains("unique")
|| err_str.contains("duplicate key")
{
ApiError(pinakes_core::error::PinakesError::DuplicateHash(
"username already exists".into(),
))
} else {
ApiError(e)
}
})?;
Ok(Json(UserResponse::from(user)))
}
/// Get a specific user by ID
#[utoipa::path(
get,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User details", body = UserResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn get_user(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let user = state.storage.get_user(user_id).await?;
Ok(Json(UserResponse::from(user)))
}
/// Update a user
#[utoipa::path(
patch,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body(
content = inline(serde_json::Value),
description = "Optional password, role, or profile fields to update",
content_type = "application/json"
),
responses(
(status = 200, description = "User updated", body = UserResponse),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn update_user(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<UpdateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
// Hash password if provided
let password_hash = if let Some(ref password) = req.password {
if password.len() < 8 {
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(
"password must be at least 8 characters".into(),
),
));
}
Some(pinakes_core::users::auth::hash_password(password)?)
} else {
None
};
let user = state
.storage
.update_user(user_id, password_hash.as_deref(), req.role, req.profile)
.await?;
Ok(Json(UserResponse::from(user)))
}
/// Delete a user (admin only)
#[utoipa::path(
delete,
path = "/api/v1/admin/users/{id}",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User deleted"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
(status = 404, description = "Not found"),
),
security(("bearer_auth" = []))
)]
pub async fn delete_user(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, ApiError> {
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state.storage.delete_user(user_id).await?;
Ok(Json(serde_json::json!({"deleted": true})))
}
/// Get user's accessible libraries
#[utoipa::path(
get,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
responses(
(status = 200, description = "User libraries", body = Vec<UserLibraryResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn get_user_libraries(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<Vec<UserLibraryResponse>>, ApiError> {
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
let libraries = state.storage.get_user_libraries(user_id).await?;
Ok(Json(
libraries
.into_iter()
.map(UserLibraryResponse::from)
.collect(),
))
}
fn validate_root_path(path: &str) -> Result<(), ApiError> {
if path.is_empty() || path.len() > 4096 {
return Err(ApiError::bad_request("root_path must be 1-4096 bytes"));
}
if !path.starts_with('/') {
return Err(ApiError::bad_request("root_path must be an absolute path"));
}
if path.split('/').any(|segment| segment == "..") {
return Err(ApiError::bad_request(
"root_path must not contain '..' traversal components",
));
}
Ok(())
}
/// Grant library access to a user (admin only)
#[utoipa::path(
post,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body = GrantLibraryAccessRequest,
responses(
(status = 200, description = "Access granted"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn grant_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<GrantLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
validate_root_path(&req.root_path)?;
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.grant_library_access(user_id, &req.root_path, req.permission)
.await?;
Ok(Json(serde_json::json!({"granted": true})))
}
/// Revoke library access from a user (admin only)
///
/// Uses a JSON body instead of a path parameter because `root_path` may contain
/// slashes that conflict with URL routing.
#[utoipa::path(
delete,
path = "/api/v1/admin/users/{id}/libraries",
tag = "users",
params(("id" = String, Path, description = "User ID")),
request_body = RevokeLibraryAccessRequest,
responses(
(status = 200, description = "Access revoked"),
(status = 400, description = "Bad request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn revoke_library_access(
State(state): State<AppState>,
Path(id): Path<String>,
Json(req): Json<RevokeLibraryAccessRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
validate_root_path(&req.root_path)?;
let user_id: UserId =
id.parse::<uuid::Uuid>().map(UserId::from).map_err(|_| {
ApiError(pinakes_core::error::PinakesError::InvalidOperation(
"Invalid user ID".into(),
))
})?;
state
.storage
.revoke_library_access(user_id, &req.root_path)
.await?;
Ok(Json(serde_json::json!({"revoked": true})))
}

View file

@ -0,0 +1,71 @@
use axum::{Json, extract::State};
use serde::Serialize;
use crate::{error::ApiError, state::AppState};
#[derive(Debug, Serialize, utoipa::ToSchema)]
pub struct WebhookInfo {
pub url: String,
pub events: Vec<String>,
}
#[utoipa::path(
get,
path = "/api/v1/webhooks",
tag = "webhooks",
responses(
(status = 200, description = "List of configured webhooks", body = Vec<WebhookInfo>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn list_webhooks(
State(state): State<AppState>,
) -> Result<Json<Vec<WebhookInfo>>, ApiError> {
let config = state.config.read().await;
let hooks: Vec<WebhookInfo> = config
.webhooks
.iter()
.map(|h| {
WebhookInfo {
url: h.url.clone(),
events: h.events.clone(),
}
})
.collect();
Ok(Json(hooks))
}
#[utoipa::path(
post,
path = "/api/v1/webhooks/test",
tag = "webhooks",
responses(
(status = 200, description = "Test webhook sent"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden"),
),
security(("bearer_auth" = []))
)]
pub async fn test_webhook(
State(state): State<AppState>,
) -> Result<Json<serde_json::Value>, ApiError> {
let config = state.config.read().await;
let count = config.webhooks.len();
drop(config);
if let Some(ref dispatcher) = state.webhook_dispatcher {
dispatcher.dispatch(pinakes_core::webhooks::WebhookEvent::Test);
Ok(Json(serde_json::json!({
"webhooks_configured": count,
"test_sent": true
})))
} else {
Ok(Json(serde_json::json!({
"webhooks_configured": 0,
"test_sent": false,
"message": "no webhooks configured"
})))
}
}

View file

@ -0,0 +1,54 @@
use std::{path::PathBuf, sync::Arc};
use pinakes_core::{
cache::CacheLayer,
config::Config,
jobs::JobQueue,
managed_storage::ManagedStorageService,
plugin::{PluginManager, PluginPipeline},
scan::ScanProgress,
scheduler::TaskScheduler,
storage::DynStorageBackend,
sync::ChunkedUploadManager,
transcode::TranscodeService,
webhooks::WebhookDispatcher,
};
use tokio::sync::{RwLock, Semaphore};
// Note: Sessions are now stored in the database via StorageBackend
// See storage::SessionData and related methods
/// Max concurrent background session operations (touch/delete).
/// Prevents unbounded task spawning under high request load.
pub const MAX_SESSION_BACKGROUND_TASKS: usize = 64;
#[derive(Clone)]
pub struct AppState {
pub storage: DynStorageBackend,
pub config: Arc<RwLock<Config>>,
pub config_path: Option<PathBuf>,
pub scan_progress: ScanProgress,
pub job_queue: Arc<JobQueue>,
pub cache: Arc<CacheLayer>,
pub scheduler: Arc<TaskScheduler>,
pub plugin_manager: Option<Arc<PluginManager>>,
pub plugin_pipeline: Option<Arc<PluginPipeline>>,
pub transcode_service: Option<Arc<TranscodeService>>,
pub managed_storage: Option<Arc<ManagedStorageService>>,
pub chunked_upload_manager: Option<Arc<ChunkedUploadManager>>,
pub webhook_dispatcher: Option<Arc<WebhookDispatcher>>,
pub session_semaphore: Arc<Semaphore>,
}
impl AppState {
/// Emit a plugin event if the pipeline is active.
pub fn emit_plugin_event(
&self,
event_type: &str,
payload: &serde_json::Value,
) {
if let Some(ref pipeline) = self.plugin_pipeline {
pipeline.emit_event(event_type, payload);
}
}
}

View file

@ -0,0 +1,738 @@
mod common;
use axum::{
body::Body,
http::{Request, StatusCode},
};
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
test_addr,
};
use http_body_util::BodyExt;
use tower::ServiceExt;
#[tokio::test]
async fn test_list_media_empty() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let items: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(items.len(), 0);
}
#[tokio::test]
async fn test_create_and_list_tags() {
let app = setup_app().await;
// Create a tag
let response = app
.clone()
.oneshot(post_json("/api/v1/tags", r#"{"name":"Music"}"#))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// List tags
let response = app.oneshot(get("/api/v1/tags")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let tags: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(tags.len(), 1);
assert_eq!(tags[0]["name"], "Music");
}
#[tokio::test]
async fn test_search_empty() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/search?q=test")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(result["total_count"], 0);
}
#[tokio::test]
async fn test_media_not_found() {
let app = setup_app().await;
let response = app
.oneshot(get("/api/v1/media/00000000-0000-0000-0000-000000000000"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_collections_crud() {
let app = setup_app().await;
// Create collection
let response = app
.clone()
.oneshot(post_json(
"/api/v1/collections",
r#"{"name":"Favorites","kind":"manual"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// List collections
let response = app.oneshot(get("/api/v1/collections")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let cols: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(cols.len(), 1);
assert_eq!(cols[0]["name"], "Favorites");
}
#[tokio::test]
async fn test_statistics_endpoint() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/statistics")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let stats: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(stats["total_media"], 0);
assert_eq!(stats["total_size_bytes"], 0);
}
#[tokio::test]
async fn test_scheduled_tasks_endpoint() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/tasks/scheduled")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let tasks: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert!(!tasks.is_empty(), "should have default scheduled tasks");
// Verify structure of first task
assert!(tasks[0]["id"].is_string());
assert!(tasks[0]["name"].is_string());
assert!(tasks[0]["schedule"].is_string());
}
#[tokio::test]
async fn test_user_management_crud() {
let app = setup_app().await;
// Create a user
let response = app
.clone()
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"testuser","password":"password123","role":"viewer"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let user: serde_json::Value = serde_json::from_slice(&body).unwrap();
assert_eq!(user["username"], "testuser");
assert_eq!(user["role"], "viewer");
let user_id = user["id"].as_str().unwrap();
// List users
let response = app.clone().oneshot(get("/api/v1/users")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let users: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(users.len(), 1);
assert_eq!(users[0]["username"], "testuser");
// Get specific user
let response = app
.clone()
.oneshot(get(&format!("/api/v1/users/{user_id}")))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let retrieved_user: serde_json::Value =
serde_json::from_slice(&body).unwrap();
assert_eq!(retrieved_user["username"], "testuser");
// Delete user
let mut req = Request::builder()
.method("DELETE")
.uri(format!("/api/v1/users/{user_id}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.clone().oneshot(req).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Verify user is deleted
let response = app
.oneshot(get(&format!("/api/v1/users/{user_id}")))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_health_endpoint() {
let app = setup_app().await;
// Health endpoint should be publicly accessible
let response = app.oneshot(get("/api/v1/health")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_user_duplicate_username() {
let app = setup_app().await;
// Create first user
let response = app
.clone()
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"duplicate","password":"password1","role":"viewer"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Try to create user with same username
let response = app
.oneshot(post_json(
"/api/v1/users",
r#"{"username":"duplicate","password":"password2","role":"viewer"}"#,
))
.await
.unwrap();
// Should fail with conflict (409) for duplicate username
assert_eq!(response.status(), StatusCode::CONFLICT);
}
#[tokio::test]
async fn test_unauthenticated_request_rejected() {
let (app, ..) = setup_app_with_auth().await;
// Request without Bearer token
let response = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_invalid_token_rejected() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/media", "totally-invalid-token"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_login_valid_credentials() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"admin","password":"adminpass"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body["token"].is_string());
assert_eq!(body["username"], "admin");
assert_eq!(body["role"], "admin");
}
#[tokio::test]
async fn test_login_invalid_password() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"admin","password":"wrongpassword"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_login_unknown_user() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json(
"/api/v1/auth/login",
r#"{"username":"nonexistent","password":"whatever"}"#,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_auth_me_endpoint() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/auth/me", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["username"], "admin");
assert_eq!(body["role"], "admin");
}
#[tokio::test]
async fn test_logout() {
let (app, admin_token, ..) = setup_app_with_auth().await;
// Logout
let response = app
.clone()
.oneshot(post_json_authed("/api/v1/auth/logout", "", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Subsequent requests with same token should fail
let response = app
.oneshot(get_authed("/api/v1/media", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn test_viewer_cannot_access_editor_routes() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// POST /tags is an editor-only route
let response = app
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"test"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_viewer_cannot_access_admin_routes() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// GET /users is an admin-only route
let response = app
.oneshot(get_authed("/api/v1/users", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_editor_cannot_access_admin_routes() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/users", &editor_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_editor_can_write() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"EditorTag"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_admin_can_access_all() {
let (app, admin_token, ..) = setup_app_with_auth().await;
// Viewer route
let response = app
.clone()
.oneshot(get_authed("/api/v1/media", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Editor route
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/tags",
r#"{"name":"AdminTag"}"#,
&admin_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Admin route
let response = app
.oneshot(get_authed("/api/v1/users", &admin_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_rating_invalid_stars_zero() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/ratings",
r#"{"stars":0}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_rating_invalid_stars_six() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/ratings",
r#"{"stars":6}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_comment_empty_text() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/comments",
r#"{"text":""}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_favorites_list_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/favorites", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_playlist_crud() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
// Create
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/playlists",
r#"{"name":"My Playlist"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
let playlist_id = body["id"].as_str().unwrap().to_string();
assert_eq!(body["name"], "My Playlist");
// List
let response = app
.clone()
.oneshot(get_authed("/api/v1/playlists", &editor_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body.as_array().unwrap().len(), 1);
// Get
let response = app
.clone()
.oneshot(get_authed(
&format!("/api/v1/playlists/{playlist_id}"),
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// Update
let response = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/playlists/{playlist_id}"),
r#"{"name":"Updated Playlist","description":"A test description"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["name"], "Updated Playlist");
// Delete
let response = app
.clone()
.oneshot(delete_authed(
&format!("/api/v1/playlists/{playlist_id}"),
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_playlist_empty_name() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/playlists",
r#"{"name":""}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_most_viewed_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/analytics/most-viewed", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_record_event_and_query() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
// Record an event
let response = app
.clone()
.oneshot(post_json_authed(
"/api/v1/analytics/events",
r#"{"event_type":"view","duration_secs":5.0}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert_eq!(body["recorded"], true);
}
#[tokio::test]
async fn test_transcode_session_not_found() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/transcode/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
// Should be 404 or 500 (not found in DB)
assert!(
response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_transcode_list_empty() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/transcode", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
assert!(body.as_array().unwrap().is_empty());
}
#[tokio::test]
async fn test_hls_segment_no_session() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/stream/hls/720p/\
segment0.ts",
&viewer_token,
))
.await
.unwrap();
// Should fail because media doesn't exist or no transcode session
assert!(
response.status() == StatusCode::BAD_REQUEST
|| response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_subtitles_list() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
// Should return empty for nonexistent media (or not found)
let response = app
.oneshot(get_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/subtitles",
&viewer_token,
))
.await
.unwrap();
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_health_public() {
let (app, ..) = setup_app_with_auth().await;
// Health endpoint should be accessible without auth even when accounts
// enabled
let response = app.oneshot(get("/api/v1/health")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_invalid_uuid_in_path() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/media/not-a-uuid", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_oversized_comment() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let long_text: String = "x".repeat(10_001);
let body = format!(r#"{{"text":"{long_text}"}}"#);
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/comments",
&body,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn test_share_link_expired() {
// Uses no-auth setup since share links are complex to test with auth
// (need real media items). Verify the expire check logic works.
let app = setup_app().await;
// First import a dummy file to get a media_id, but we can't without a real
// file. So let's test the public share access endpoint with a nonexistent
// token.
let response = app
.oneshot(get("/api/v1/s/nonexistent_token"))
.await
.unwrap();
// Should fail with not found or internal error (no such share link)
assert!(
response.status() == StatusCode::NOT_FOUND
|| response.status() == StatusCode::INTERNAL_SERVER_ERROR
);
}
#[tokio::test]
async fn test_update_sync_device_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let response = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,150 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn list_books_empty() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/books")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
let items = body.as_array().expect("array response");
assert!(items.is_empty());
}
#[tokio::test]
async fn get_book_metadata_not_found() {
let app = setup_app().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.oneshot(get(&format!("/api/v1/books/{fake_id}/metadata")))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn list_books_with_filters() {
let app = setup_app().await;
let resp = app
.oneshot(get("/api/v1/books?author=Tolkien&limit=10"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn list_series_empty() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/books/series")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn list_authors_empty() {
let app = setup_app().await;
let resp = app
.oneshot(get("/api/v1/books/authors?offset=0&limit=50"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn reading_progress_nonexistent_book() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(get_authed(
&format!("/api/v1/books/{fake_id}/progress"),
&viewer,
))
.await
.unwrap();
// Nonexistent book always returns 404.
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn update_reading_progress_nonexistent_book() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/books/{fake_id}/progress"),
r#"{"current_page":42}"#,
&viewer,
))
.await
.unwrap();
// Nonexistent book: handler verifies existence first, so always 404.
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn reading_list_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/books/reading-list", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn import_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/import",
r#"{"path":"/tmp/test.txt"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}"),
r#"{"title":"new"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/media/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,324 @@
use std::{net::SocketAddr, sync::Arc};
use axum::{
body::Body,
extract::ConnectInfo,
http::{Request, StatusCode},
};
use http_body_util::BodyExt;
use pinakes_core::{
cache::CacheLayer,
config::{
AccountsConfig,
AnalyticsConfig,
CloudConfig,
Config,
DirectoryConfig,
EnrichmentConfig,
JobsConfig,
ManagedStorageConfig,
PhotoConfig,
PluginsConfig,
RateLimitConfig,
ScanningConfig,
ServerConfig,
SharingConfig,
SqliteConfig,
StorageBackendType,
StorageConfig,
SyncConfig,
ThumbnailConfig,
TlsConfig,
TranscodingConfig,
TrashConfig,
UiConfig,
UserAccount,
UserRole,
WebhookConfig,
},
jobs::JobQueue,
storage::{StorageBackend, sqlite::SqliteBackend},
};
use tokio::sync::RwLock;
use tower::ServiceExt;
/// Fake socket address for tests (governor needs
/// `ConnectInfo<SocketAddr>`)
pub fn test_addr() -> ConnectInfo<SocketAddr> {
ConnectInfo("127.0.0.1:9999".parse().unwrap())
}
/// Build a GET request with `ConnectInfo` for rate limiter
/// compatibility
pub fn get(uri: &str) -> Request<Body> {
let mut req = Request::builder().uri(uri).body(Body::empty()).unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST request with `ConnectInfo`
pub fn post_json(uri: &str, body: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a GET request with Bearer auth
pub fn get_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a POST JSON request with Bearer auth
pub fn post_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("POST")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PUT JSON request with Bearer auth
pub fn put_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PUT")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a DELETE request with Bearer auth
pub fn delete_authed(uri: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("DELETE")
.uri(uri)
.header("authorization", format!("Bearer {token}"))
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
/// Build a PATCH JSON request with Bearer auth
pub fn patch_json_authed(uri: &str, body: &str, token: &str) -> Request<Body> {
let mut req = Request::builder()
.method("PATCH")
.uri(uri)
.header("content-type", "application/json")
.header("authorization", format!("Bearer {token}"))
.body(Body::from(body.to_string()))
.unwrap();
req.extensions_mut().insert(test_addr());
req
}
pub fn default_config() -> Config {
Config {
storage: StorageConfig {
backend: StorageBackendType::Sqlite,
sqlite: Some(SqliteConfig {
path: ":memory:".into(),
}),
postgres: None,
},
directories: DirectoryConfig { roots: vec![] },
scanning: ScanningConfig {
watch: false,
poll_interval_secs: 300,
ignore_patterns: vec![],
import_concurrency: 8,
},
server: ServerConfig {
host: "127.0.0.1".to_string(),
port: 3000,
api_key: None,
tls: TlsConfig::default(),
authentication_disabled: true,
cors_enabled: false,
cors_origins: vec![],
swagger_ui: false,
},
rate_limits: RateLimitConfig::default(),
ui: UiConfig::default(),
accounts: AccountsConfig::default(),
jobs: JobsConfig::default(),
thumbnails: ThumbnailConfig::default(),
webhooks: Vec::<WebhookConfig>::new(),
scheduled_tasks: vec![],
plugins: PluginsConfig::default(),
transcoding: TranscodingConfig::default(),
enrichment: EnrichmentConfig::default(),
cloud: CloudConfig::default(),
analytics: AnalyticsConfig::default(),
photos: PhotoConfig::default(),
managed_storage: ManagedStorageConfig::default(),
sync: SyncConfig::default(),
sharing: SharingConfig::default(),
trash: TrashConfig::default(),
}
}
pub async fn setup_app() -> axum::Router {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let config = default_config();
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
pinakes_server::app::create_router(state, &RateLimitConfig::default())
}
/// Hash a password for test user accounts
pub fn hash_password(password: &str) -> String {
pinakes_core::users::auth::hash_password(password).unwrap()
}
/// Set up an app with accounts enabled and three pre-seeded users.
/// Returns (Router, `admin_token`, `editor_token`, `viewer_token`).
pub async fn setup_app_with_auth() -> (axum::Router, String, String, String) {
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let users_to_create = vec![
("admin", "adminpass", UserRole::Admin),
("editor", "editorpass", UserRole::Editor),
("viewer", "viewerpass", UserRole::Viewer),
];
for (username, password, role) in &users_to_create {
let password_hash = hash_password(password);
storage
.create_user(username, &password_hash, *role, None)
.await
.expect("create user");
}
let mut config = default_config();
config.server.authentication_disabled = false;
config.accounts.enabled = true;
config.accounts.users = vec![
UserAccount {
username: "admin".to_string(),
password_hash: hash_password("adminpass"),
role: UserRole::Admin,
},
UserAccount {
username: "editor".to_string(),
password_hash: hash_password("editorpass"),
role: UserRole::Editor,
},
UserAccount {
username: "viewer".to_string(),
password_hash: hash_password("viewerpass"),
role: UserRole::Viewer,
},
];
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: None,
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
let app =
pinakes_server::app::create_router(state, &RateLimitConfig::default());
let admin_token = login_user(app.clone(), "admin", "adminpass").await;
let editor_token = login_user(app.clone(), "editor", "editorpass").await;
let viewer_token = login_user(app.clone(), "viewer", "viewerpass").await;
(app, admin_token, editor_token, viewer_token)
}
pub async fn login_user(
app: axum::Router,
username: &str,
password: &str,
) -> String {
let body = format!(r#"{{"username":"{username}","password":"{password}"}}"#);
let response = app
.oneshot(post_json("/api/v1/auth/login", &body))
.await
.unwrap();
assert_eq!(
response.status(),
StatusCode::OK,
"login failed for user {username}"
);
let body = response.into_body().collect().await.unwrap().to_bytes();
let result: serde_json::Value = serde_json::from_slice(&body).unwrap();
result["token"].as_str().unwrap().to_string()
}
pub async fn response_body(
response: axum::response::Response,
) -> serde_json::Value {
let body = response.into_body().collect().await.unwrap().to_bytes();
serde_json::from_slice(&body).unwrap_or(serde_json::Value::Null)
}

View file

@ -0,0 +1,221 @@
/// End-to-end tests that bind a real TCP listener and exercise the HTTP layer.
///
/// These tests differ from the router-level `oneshot` tests in that they verify
/// the full Axum `serve` path: `TcpListener` binding, HTTP framing, and
/// response serialization. Each test spins up a server on an ephemeral port,
/// issues a real HTTP request via reqwest, then shuts down.
mod common;
use std::net::SocketAddr;
use tokio::net::TcpListener;
use tower::ServiceExt;
/// Bind a listener on an ephemeral port, spawn the server in the background,
/// and return the bound address as a string.
///
/// Uses `into_make_service_with_connect_info` so that the governor rate
/// limiter can extract `ConnectInfo<SocketAddr>` from real TCP connections.
async fn bind_and_serve() -> String {
let app = common::setup_app().await;
let listener = TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
tokio::spawn(async move {
axum::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.await
.unwrap();
});
format!("http://{addr}")
}
/// Bind a listener on an ephemeral port with auth enabled. Returns the base
/// URL and tokens for admin, editor, and viewer users.
///
/// Tokens are issued in-process before binding so they work against the same
/// app instance served over TCP.
async fn bind_and_serve_authed() -> (String, String, String, String) {
let (app, admin_token, editor_token, viewer_token) =
common::setup_app_with_auth().await;
let listener = TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
tokio::spawn(async move {
axum::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.await
.unwrap();
});
(
format!("http://{addr}"),
admin_token,
editor_token,
viewer_token,
)
}
#[tokio::test]
async fn health_endpoint_responds_over_real_tcp() {
let base = bind_and_serve().await;
let resp = reqwest::get(format!("{base}/api/v1/health"))
.await
.expect("health request failed");
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.expect("body not JSON");
assert!(
body["status"].is_string(),
"expected health response to contain 'status' field"
);
}
#[tokio::test]
async fn media_list_responds_over_real_tcp() {
// setup_app has authentication_disabled=true; verifies the router serves
// real TCP traffic, not just in-process oneshot requests.
let base = bind_and_serve().await;
let resp = reqwest::get(format!("{base}/api/v1/media"))
.await
.expect("media list request failed");
assert_eq!(resp.status(), 200);
}
#[tokio::test]
async fn unknown_route_returns_404_over_real_tcp() {
let base = bind_and_serve().await;
let resp = reqwest::get(format!("{base}/api/v1/nonexistent-route"))
.await
.expect("request failed");
assert_eq!(resp.status(), 404);
}
#[tokio::test]
async fn authenticated_request_accepted_over_real_tcp() {
let (base, _, _, viewer_token) = bind_and_serve_authed().await;
let client = reqwest::Client::new();
let resp = client
.get(format!("{base}/api/v1/health"))
.bearer_auth(&viewer_token)
.send()
.await
.expect("authenticated health request failed");
assert_eq!(resp.status(), 200);
}
#[tokio::test]
async fn invalid_token_rejected_over_real_tcp() {
let (base, ..) = bind_and_serve_authed().await;
let client = reqwest::Client::new();
let resp = client
.get(format!("{base}/api/v1/webhooks"))
.bearer_auth("not-a-valid-token")
.send()
.await
.expect("request failed");
assert_eq!(resp.status(), 401);
}
// In-process cross-checks: verify that RBAC and response shapes are
// consistent whether accessed via oneshot or real TCP.
#[tokio::test]
async fn health_response_body_has_status_field() {
let app = common::setup_app().await;
let resp = app.oneshot(common::get("/api/v1/health")).await.unwrap();
let status = resp.status();
let body = common::response_body(resp).await;
assert_eq!(status, 200);
assert!(body["status"].is_string(), "expected status field: {body}");
}
#[tokio::test]
async fn rbac_enforced_for_write_methods() {
let (app, _, editor_token, viewer_token) =
common::setup_app_with_auth().await;
let _ = common::hash_password("unused"); // exercises hash_password
// post_json - unauthenticated login attempt with wrong password
let resp = app
.clone()
.oneshot(common::post_json(
"/api/v1/auth/login",
r#"{"username":"viewer","password":"wrong"}"#,
))
.await
.unwrap();
assert_eq!(resp.status(), 401);
// get_authed - viewer can reach health
let resp = app
.clone()
.oneshot(common::get_authed("/api/v1/health", &viewer_token))
.await
.unwrap();
assert_eq!(resp.status(), 200);
// post_json_authed - viewer cannot trigger batch enrich (editor route)
let resp = app
.clone()
.oneshot(common::post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":["00000000-0000-0000-0000-000000000000"]}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(resp.status(), 403);
// put_json_authed - viewer cannot update sync device (editor route)
let resp = app
.clone()
.oneshot(common::put_json_authed(
"/api/v1/sync/devices/00000000-0000-0000-0000-000000000000",
r#"{"name":"device"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(resp.status(), 403);
// patch_json_authed - viewer cannot update media (editor route)
let resp = app
.clone()
.oneshot(common::patch_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000",
r#"{"title":"x"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(resp.status(), 403);
// delete_authed - viewer cannot delete media (editor route)
let resp = app
.clone()
.oneshot(common::delete_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
assert_eq!(resp.status(), 403);
// test_addr is exercised by all common request builders above via
// extensions_mut().insert(test_addr()); verify it round-trips
let addr = common::test_addr();
assert_eq!(addr.0.ip().to_string(), "127.0.0.1");
// editor can access editor routes
let resp = app
.clone()
.oneshot(common::post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":["00000000-0000-0000-0000-000000000000"]}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(resp.status(), 200);
}

View file

@ -0,0 +1,210 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
// GET /api/v1/media/{id}/metadata/external (viewer)
#[tokio::test]
async fn get_external_metadata_requires_auth() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(get(
"/api/v1/media/00000000-0000-0000-0000-000000000000/external-metadata",
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn get_external_metadata_viewer_ok() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/external-metadata",
&viewer_token,
))
.await
.unwrap();
// Media does not exist; 200 with empty array or 404 are both valid
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::NOT_FOUND
);
}
// POST /api/v1/media/{id}/enrich (editor)
#[tokio::test]
async fn trigger_enrichment_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/enrich",
"{}",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn trigger_enrichment_editor_accepted() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000/enrich",
"{}",
&editor_token,
))
.await
.unwrap();
// Route is accessible to editors; media not found returns 404, job queued
// returns 200
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::NOT_FOUND
);
}
// POST /api/v1/jobs/enrich (editor, batch)
#[tokio::test]
async fn batch_enrich_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":["00000000-0000-0000-0000-000000000000"]}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn batch_enrich_empty_ids_rejected() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":[]}"#,
&editor_token,
))
.await
.unwrap();
// Validation requires 1-1000 ids
assert!(
response.status() == StatusCode::BAD_REQUEST
|| response.status() == StatusCode::UNPROCESSABLE_ENTITY
);
}
#[tokio::test]
async fn batch_enrich_editor_accepted() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":["00000000-0000-0000-0000-000000000000"]}"#,
&editor_token,
))
.await
.unwrap();
// Job is queued and a job_id is returned
assert_eq!(response.status(), StatusCode::OK);
}
// No-auth coverage (exercises setup_app and get)
#[tokio::test]
async fn get_external_metadata_auth_disabled() {
let app = setup_app().await;
let response = app
.oneshot(get(
"/api/v1/media/00000000-0000-0000-0000-000000000000/external-metadata",
))
.await
.unwrap();
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::NOT_FOUND
);
}
// RBAC enforcement for editor-level HTTP methods
#[tokio::test]
async fn batch_enrich_response_has_job_id() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/jobs/enrich",
r#"{"media_ids":["00000000-0000-0000-0000-000000000000"]}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response_body(response).await;
// Route queues a job and returns a job identifier
assert!(
body["job_id"].is_string() || body["id"].is_string(),
"expected job identifier in response: {body}"
);
}
#[tokio::test]
async fn delete_tag_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(delete_authed(
"/api/v1/tags/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(patch_json_authed(
"/api/v1/media/00000000-0000-0000-0000-000000000000",
r#"{"title":"new title"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(put_json_authed(
"/api/v1/sync/devices/00000000-0000-0000-0000-000000000000",
r#"{"name":"my device"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,145 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn media_count_empty() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/media/count")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
assert_eq!(body["count"], 0);
}
#[tokio::test]
async fn batch_delete_empty_ids() {
let (app, admin, ..) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/batch/delete",
r#"{"ids":[]}"#,
&admin,
))
.await
.unwrap();
// Empty ids should be rejected (validation requires 1+ items)
assert!(
resp.status() == StatusCode::BAD_REQUEST
|| resp.status() == StatusCode::UNPROCESSABLE_ENTITY
);
}
#[tokio::test]
async fn batch_delete_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let body = format!(r#"{{"ids":["{fake_id}"]}}"#);
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/batch/delete",
&body,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn list_trash_empty() {
let (app, _, editor, _) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/trash?offset=0&limit=50", &editor))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
assert_eq!(body["total_count"], 0);
let items = body["items"].as_array().expect("items array");
assert!(items.is_empty());
}
#[tokio::test]
async fn batch_tag_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/batch/tag",
r#"{"media_ids":[],"tag_ids":[]}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn list_trash_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/trash", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn rename_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}/rename"),
r#"{"new_name":"renamed.txt"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn permanent_delete_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/media/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,150 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn backlinks_for_nonexistent_media() {
let app = setup_app().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.oneshot(get(&format!("/api/v1/media/{fake_id}/backlinks")))
.await
.unwrap();
// Should return OK with empty list, or NOT_FOUND
assert!(
resp.status() == StatusCode::OK || resp.status() == StatusCode::NOT_FOUND
);
}
#[tokio::test]
async fn outgoing_links_for_nonexistent_media() {
let app = setup_app().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.oneshot(get(&format!("/api/v1/media/{fake_id}/outgoing-links")))
.await
.unwrap();
assert!(
resp.status() == StatusCode::OK || resp.status() == StatusCode::NOT_FOUND
);
}
#[tokio::test]
async fn notes_graph_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/notes/graph", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
// Fresh database: graph must be empty.
if let Some(arr) = body.as_array() {
assert!(arr.is_empty(), "graph should be empty, got {arr:?}");
} else if let Some(obj) = body.as_object() {
// Accept an object if the schema uses {nodes:[], edges:[]} style.
let nodes_empty = obj
.get("nodes")
.and_then(|v| v.as_array())
.is_none_or(std::vec::Vec::is_empty);
let edges_empty = obj
.get("edges")
.and_then(|v| v.as_array())
.is_none_or(std::vec::Vec::is_empty);
assert!(
nodes_empty && edges_empty,
"graph should be empty, got {obj:?}"
);
} else {
panic!("expected array or object, got {body}");
}
}
#[tokio::test]
async fn unresolved_count_zero() {
let app = setup_app().await;
let resp = app
.oneshot(get("/api/v1/notes/unresolved-count"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
// Fresh database has no unresolved links.
let count = body["count"]
.as_u64()
.expect("response should have a numeric 'count' field");
assert_eq!(count, 0, "expected zero unresolved links in fresh database");
}
#[tokio::test]
async fn reindex_links_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(post_json_authed(
&format!("/api/v1/media/{fake_id}/reindex-links"),
"{}",
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}"),
r#"{"title":"new title"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/media/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,274 @@
mod common;
use std::sync::Arc;
use axum::{body::Body, http::StatusCode};
use common::{
default_config,
delete_authed,
get,
get_authed,
patch_json_authed,
post_json,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
test_addr,
};
use http_body_util::BodyExt;
use pinakes_core::{config::PluginsConfig, plugin::PluginManager};
use tower::ServiceExt;
async fn setup_app_with_plugins()
-> (axum::Router, Arc<PluginManager>, tempfile::TempDir) {
use pinakes_core::{
cache::CacheLayer,
config::RateLimitConfig,
jobs::JobQueue,
storage::{StorageBackend, sqlite::SqliteBackend},
};
use tokio::sync::RwLock;
let backend = SqliteBackend::in_memory().expect("in-memory SQLite");
backend.run_migrations().await.expect("migrations");
let storage = Arc::new(backend) as pinakes_core::storage::DynStorageBackend;
let temp_dir = tempfile::TempDir::new().expect("create temp dir");
let data_dir = temp_dir.path().join("data");
let cache_dir = temp_dir.path().join("cache");
std::fs::create_dir_all(&data_dir).expect("create data dir");
std::fs::create_dir_all(&cache_dir).expect("create cache dir");
let plugin_config = PluginsConfig {
enabled: true,
data_dir: data_dir.clone(),
cache_dir: cache_dir.clone(),
plugin_dirs: vec![],
enable_hot_reload: false,
allow_unsigned: true,
max_concurrent_ops: 2,
plugin_timeout_secs: 10,
timeouts:
pinakes_core::config::PluginTimeoutConfig::default(),
max_consecutive_failures: 5,
trusted_keys: vec![],
};
let plugin_manager =
PluginManager::new(data_dir, cache_dir, plugin_config.clone().into())
.expect("create plugin manager");
let plugin_manager = Arc::new(plugin_manager);
let mut config = default_config();
config.plugins = plugin_config;
let job_queue =
JobQueue::new(1, 0, |_id, _kind, _cancel, _jobs| tokio::spawn(async {}));
let config = Arc::new(RwLock::new(config));
let scheduler = pinakes_core::scheduler::TaskScheduler::new(
job_queue.clone(),
tokio_util::sync::CancellationToken::new(),
config.clone(),
None,
);
let state = pinakes_server::state::AppState {
storage,
config,
config_path: None,
scan_progress: pinakes_core::scan::ScanProgress::new(),
job_queue,
cache: Arc::new(CacheLayer::new(60)),
scheduler: Arc::new(scheduler),
plugin_manager: Some(plugin_manager.clone()),
plugin_pipeline: None,
transcode_service: None,
managed_storage: None,
chunked_upload_manager: None,
session_semaphore: Arc::new(tokio::sync::Semaphore::new(64)),
webhook_dispatcher: None,
};
let router =
pinakes_server::app::create_router(state, &RateLimitConfig::default());
(router, plugin_manager, temp_dir)
}
#[tokio::test]
async fn test_list_plugins_empty() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let response = app.oneshot(get("/api/v1/plugins")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let body = response.into_body().collect().await.unwrap().to_bytes();
let plugins: Vec<serde_json::Value> = serde_json::from_slice(&body).unwrap();
assert_eq!(plugins.len(), 0, "should start with no plugins loaded");
}
#[tokio::test]
async fn test_plugin_manager_exists() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let plugins = _pm.list_plugins().await;
assert_eq!(plugins.len(), 0);
let response = app.oneshot(get("/api/v1/plugins")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
#[tokio::test]
async fn test_plugin_not_found() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let response = app
.oneshot(get("/api/v1/plugins/nonexistent"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_plugin_enable_disable() {
let (app, pm, _tmp) = setup_app_with_plugins().await;
assert!(pm.list_plugins().await.is_empty());
let mut req = axum::http::Request::builder()
.method("POST")
.uri("/api/v1/plugins/test-plugin/enable")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.clone().oneshot(req).await.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
let mut req = axum::http::Request::builder()
.method("POST")
.uri("/api/v1/plugins/test-plugin/disable")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.oneshot(req).await.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn test_plugin_uninstall_not_found() {
let (app, _pm, _tmp) = setup_app_with_plugins().await;
let mut req = axum::http::Request::builder()
.method("DELETE")
.uri("/api/v1/plugins/nonexistent")
.body(Body::empty())
.unwrap();
req.extensions_mut().insert(test_addr());
let response = app.oneshot(req).await.unwrap();
assert!(
response.status() == StatusCode::BAD_REQUEST
|| response.status() == StatusCode::NOT_FOUND
);
}
// RBAC tests using common helpers with auth setup
#[tokio::test]
async fn media_list_unauthenticated() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
assert!(body.is_array());
}
#[tokio::test]
async fn media_list_authenticated() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/media", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn import_unauthenticated_rejected() {
let (app, ..) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json(
"/api/v1/media/import",
r#"{"path":"/tmp/test.txt"}"#,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn import_viewer_forbidden() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/media/import",
r#"{"path":"/tmp/test.txt"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_viewer_forbidden() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}"),
r#"{"title":"new"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_media_viewer_forbidden() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/media/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_viewer_forbidden() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}

View file

@ -0,0 +1,142 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn list_outgoing_shares_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/shares/outgoing", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
let shares = body.as_array().expect("array response");
assert!(shares.is_empty());
}
#[tokio::test]
async fn list_incoming_shares_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/shares/incoming", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn share_notifications_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/notifications/shares", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}
#[tokio::test]
async fn batch_delete_shares_requires_auth() {
let (app, ..) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json("/api/v1/shares/batch/delete", r#"{"ids":[]}"#))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn batch_delete_shares_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/shares/batch/delete",
r#"{"ids":[]}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn create_share_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let body = format!(r#"{{"media_id":"{fake_id}","share_type":"link"}}"#);
let resp = app
.clone()
.oneshot(post_json_authed("/api/v1/shares", &body, &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_share_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/shares/{fake_id}"),
r#"{"permissions":["read"]}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_share_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(&format!("/api/v1/shares/{fake_id}"), &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn media_list_no_auth() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}

View file

@ -0,0 +1,137 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
#[tokio::test]
async fn list_sync_devices_empty() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/sync/devices", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
let body = response_body(resp).await;
let devices = body.as_array().expect("array response");
assert!(devices.is_empty());
}
#[tokio::test]
async fn get_changes_sync_disabled() {
// Default config has sync.enabled = false; endpoint should reject
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/sync/changes", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn list_conflicts_requires_device_token() {
// list_conflicts requires X-Device-Token header; omitting it returns 400
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(get_authed("/api/v1/sync/conflicts", &viewer))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[tokio::test]
async fn register_device_requires_auth() {
let (app, ..) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json(
"/api/v1/sync/devices",
r#"{"name":"test","device_type":"desktop","client_version":"0.3.0"}"#,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn register_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let resp = app
.clone()
.oneshot(post_json_authed(
"/api/v1/sync/devices",
r#"{"name":"test","device_type":"desktop","client_version":"0.3.0"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(put_json_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
r#"{"name":"renamed"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_device_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(delete_authed(
&format!("/api/v1/sync/devices/{fake_id}"),
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_media_requires_editor() {
let (app, _, _, viewer) = setup_app_with_auth().await;
let fake_id = uuid::Uuid::now_v7();
let resp = app
.clone()
.oneshot(patch_json_authed(
&format!("/api/v1/media/{fake_id}"),
r#"{"title":"new"}"#,
&viewer,
))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn media_list_no_auth() {
let app = setup_app().await;
let resp = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK);
}

View file

@ -0,0 +1,234 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
// GET /api/v1/users (admin)
#[tokio::test]
async fn list_users_requires_admin() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/users", &editor_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn list_users_viewer_forbidden() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/users", &viewer_token))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn list_users_admin_ok() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/users", &admin_token))
.await
.unwrap();
let status = response.status();
let body = response_body(response).await;
assert_eq!(status, StatusCode::OK);
let users = body.as_array().expect("users is array");
// setup_app_with_auth seeds three users
assert_eq!(users.len(), 3);
}
// POST /api/v1/users (admin)
#[tokio::test]
async fn create_user_requires_admin() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/users",
r#"{"username":"newuser","password":"password123","role":"viewer"}"#,
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn create_user_admin_ok() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/users",
r#"{"username":"newuser","password":"password123","role":"viewer"}"#,
&admin_token,
))
.await
.unwrap();
let status = response.status();
let body = response_body(response).await;
assert!(
status == StatusCode::OK || status == StatusCode::CREATED,
"unexpected status: {status}"
);
assert!(body["id"].is_string(), "expected id field, got: {body}");
assert_eq!(body["username"].as_str().unwrap(), "newuser");
}
#[tokio::test]
async fn create_user_duplicate_username() {
let (app, admin_token, ..) = setup_app_with_auth().await;
// "admin" already exists
let response = app
.oneshot(post_json_authed(
"/api/v1/users",
r#"{"username":"admin","password":"password123","role":"viewer"}"#,
&admin_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::CONFLICT);
}
#[tokio::test]
async fn create_user_password_too_short() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/users",
r#"{"username":"shortpass","password":"short","role":"viewer"}"#,
&admin_token,
))
.await
.unwrap();
// Password minimum is 8 chars; should be rejected
assert!(
response.status() == StatusCode::BAD_REQUEST
|| response.status() == StatusCode::UNPROCESSABLE_ENTITY
);
}
// GET /api/v1/users/{id} (admin)
#[tokio::test]
async fn get_user_requires_admin() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn get_user_not_found() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000",
&admin_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
// PATCH /api/v1/users/{id} (admin)
#[tokio::test]
async fn update_user_requires_admin() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(patch_json_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000",
r#"{"role":"editor"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
// DELETE /api/v1/users/{id} (admin)
#[tokio::test]
async fn delete_user_requires_admin() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(delete_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn delete_user_not_found() {
let (app, admin_token, ..) = setup_app_with_auth().await;
let response = app
.oneshot(delete_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000",
&admin_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
// GET /api/v1/users/{id}/libraries (admin)
#[tokio::test]
async fn get_user_libraries_requires_admin() {
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed(
"/api/v1/users/00000000-0000-0000-0000-000000000000/libraries",
&editor_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
// PUT coverage
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(put_json_authed(
"/api/v1/sync/devices/00000000-0000-0000-0000-000000000000",
r#"{"name":"device"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
// No-auth coverage (exercises setup_app and get helpers)
#[tokio::test]
async fn media_list_no_auth_users_file() {
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/media")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}

View file

@ -0,0 +1,136 @@
mod common;
use axum::http::StatusCode;
use common::{
delete_authed,
get,
get_authed,
patch_json_authed,
post_json_authed,
put_json_authed,
response_body,
setup_app,
setup_app_with_auth,
};
use tower::ServiceExt;
// GET /api/v1/webhooks (viewer)
#[tokio::test]
async fn list_webhooks_requires_auth() {
let (app, ..) = setup_app_with_auth().await;
let response = app.oneshot(get("/api/v1/webhooks")).await.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
#[tokio::test]
async fn list_webhooks_viewer_ok() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(get_authed("/api/v1/webhooks", &viewer_token))
.await
.unwrap();
let status = response.status();
let body = response_body(response).await;
assert_eq!(status, StatusCode::OK);
// No webhooks configured in test config: empty array
assert!(body.is_array(), "expected array, got: {body}");
assert_eq!(body.as_array().unwrap().len(), 0);
}
#[tokio::test]
async fn list_webhooks_no_auth_disabled_ok() {
// Auth disabled (setup_app): viewer-level route still accessible
let app = setup_app().await;
let response = app.oneshot(get("/api/v1/webhooks")).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
// POST /api/v1/webhooks/test (editor)
#[tokio::test]
async fn test_webhook_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/webhooks/test",
"{}",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn test_webhook_no_dispatcher_returns_ok() {
// No webhook dispatcher in test setup; route should return 200 with
// "no webhooks configured" message rather than erroring.
let (app, _, editor_token, _) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed(
"/api/v1/webhooks/test",
"{}",
&editor_token,
))
.await
.unwrap();
// Either OK or the route returns a structured response about no webhooks
assert!(
response.status() == StatusCode::OK
|| response.status() == StatusCode::BAD_REQUEST
);
}
#[tokio::test]
async fn test_webhook_requires_auth() {
let (app, ..) = setup_app_with_auth().await;
let response = app
.oneshot(post_json_authed("/api/v1/webhooks/test", "{}", "badtoken"))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
// RBAC enforcement for editor-level HTTP methods
#[tokio::test]
async fn delete_playlist_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(delete_authed(
"/api/v1/playlists/00000000-0000-0000-0000-000000000000",
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_playlist_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(patch_json_authed(
"/api/v1/playlists/00000000-0000-0000-0000-000000000000",
r#"{"name":"updated"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
#[tokio::test]
async fn update_sync_device_requires_editor() {
let (app, _, _, viewer_token) = setup_app_with_auth().await;
let response = app
.oneshot(put_json_authed(
"/api/v1/sync/devices/00000000-0000-0000-0000-000000000000",
r#"{"name":"device"}"#,
&viewer_token,
))
.await
.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}