treewide: complete book management interface

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If5a21f16221f3c56a8008e139f93edc46a6a6964
This commit is contained in:
raf 2026-02-04 23:14:37 +03:00
commit 2f31242442
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
23 changed files with 1693 additions and 126 deletions

View file

@ -76,7 +76,7 @@ fn is_valid_isbn13(isbn13: &str) -> bool {
.filter_map(|(i, c)| c.to_digit(10).map(|d| if i % 2 == 0 { d } else { d * 3 }))
.sum();
sum % 10 == 0
sum.is_multiple_of(10)
}
/// Extract ISBN from text (searches for ISBN-10 or ISBN-13 patterns)
@ -98,14 +98,12 @@ pub fn extract_isbn_from_text(text: &str) -> Option<String> {
];
for pattern_str in patterns {
if let Ok(pattern) = Regex::new(pattern_str) {
if let Some(captures) = pattern.captures(text) {
if let Some(isbn) = captures.get(1) {
if let Ok(normalized) = normalize_isbn(isbn.as_str()) {
return Some(normalized);
}
}
}
if let Ok(pattern) = Regex::new(pattern_str)
&& let Some(captures) = pattern.captures(text)
&& let Some(isbn) = captures.get(1)
&& let Ok(normalized) = normalize_isbn(isbn.as_str())
{
return Some(normalized);
}
}

View file

@ -633,18 +633,18 @@ impl TlsConfig {
if self.key_path.is_none() {
return Err("TLS enabled but key_path not specified".into());
}
if let Some(ref cert_path) = self.cert_path {
if !cert_path.exists() {
return Err(format!(
"TLS certificate file not found: {}",
cert_path.display()
));
}
if let Some(ref cert_path) = self.cert_path
&& !cert_path.exists()
{
return Err(format!(
"TLS certificate file not found: {}",
cert_path.display()
));
}
if let Some(ref key_path) = self.key_path {
if !key_path.exists() {
return Err(format!("TLS key file not found: {}", key_path.display()));
}
if let Some(ref key_path) = self.key_path
&& !key_path.exists()
{
return Err(format!("TLS key file not found: {}", key_path.display()));
}
}
Ok(())
@ -768,11 +768,7 @@ impl Config {
}
// Validate authentication configuration
let has_api_key = self
.server
.api_key
.as_ref()
.map_or(false, |k| !k.is_empty());
let has_api_key = self.server.api_key.as_ref().is_some_and(|k| !k.is_empty());
let has_accounts = !self.accounts.users.is_empty();
let auth_disabled = self.server.authentication_disabled;
@ -785,12 +781,12 @@ impl Config {
}
// Empty API key is not allowed (must use authentication_disabled flag)
if let Some(ref api_key) = self.server.api_key {
if api_key.is_empty() {
return Err("empty api_key is not allowed. To disable authentication, \
set authentication_disabled = true instead"
.into());
}
if let Some(ref api_key) = self.server.api_key
&& api_key.is_empty()
{
return Err("empty api_key is not allowed. To disable authentication, \
set authentication_disabled = true instead"
.into());
}
// Require TLS when authentication is enabled on non-localhost

View file

@ -82,20 +82,18 @@ pub async fn import_file_with_options(
let current_mtime = get_file_mtime(&path);
// Check for incremental scan: skip if file hasn't changed
if options.incremental && !options.force {
if let Some(existing) = storage.get_media_by_path(&path).await? {
// Compare mtimes - if they match, skip this file
if let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime) {
if stored_mtime == curr_mtime {
return Ok(ImportResult {
media_id: existing.id,
was_duplicate: false,
was_skipped: true,
path: path.clone(),
});
}
}
}
if options.incremental
&& !options.force
&& let Some(existing) = storage.get_media_by_path(&path).await?
&& let (Some(stored_mtime), Some(curr_mtime)) = (existing.file_mtime, current_mtime)
&& stored_mtime == curr_mtime
{
return Ok(ImportResult {
media_id: existing.id,
was_duplicate: false,
was_skipped: true,
path: path.clone(),
});
}
let content_hash = compute_file_hash(&path).await?;

View file

@ -79,7 +79,7 @@ pub async fn detect_orphans(storage: &DynStorageBackend) -> Result<OrphanReport>
for (id, path, hash) in &media_paths {
hash_index
.entry(hash.clone())
.or_insert_with(Vec::new)
.or_default()
.push((*id, path.clone()));
}
@ -191,13 +191,12 @@ async fn detect_untracked_files(
if e.file_type().is_dir() {
let name = e.file_name().to_string_lossy();
for pattern in &ignore_patterns {
if pattern.starts_with("*.") {
if pattern.starts_with("*.")
&& let Some(ext) = pattern.strip_prefix("*.")
&& name.ends_with(ext)
{
// Extension pattern
if let Some(ext) = pattern.strip_prefix("*.") {
if name.ends_with(ext) {
return false;
}
}
return false;
} else if pattern.contains('*') {
// Glob pattern - simplified matching
let pattern_without_stars = pattern.replace('*', "");

View file

@ -181,7 +181,7 @@ impl JobQueue {
pub async fn list(&self) -> Vec<Job> {
let map = self.jobs.read().await;
let mut jobs: Vec<Job> = map.values().cloned().collect();
jobs.sort_by(|a, b| b.created_at.cmp(&a.created_at));
jobs.sort_by_key(|job| std::cmp::Reverse(job.created_at));
jobs
}

View file

@ -31,6 +31,7 @@ fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
.map_err(|e| PinakesError::MetadataExtraction(format!("PDF load: {e}")))?;
let mut meta = ExtractedMetadata::default();
let mut book_meta = crate::model::ExtractedBookMetadata::default();
// Find the Info dictionary via the trailer
if let Ok(info_ref) = doc.trailer.get(b"Info") {
@ -47,7 +48,28 @@ fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
meta.title = pdf_object_to_string(title);
}
if let Ok(author) = dict.get(b"Author") {
meta.artist = pdf_object_to_string(author);
let author_str = pdf_object_to_string(author);
meta.artist = author_str.clone();
// Parse multiple authors if separated by semicolon, comma, or "and"
if let Some(authors_str) = author_str {
let author_names: Vec<String> = authors_str
.split(&[';', ','][..])
.flat_map(|part| part.split(" and "))
.map(|name| name.trim().to_string())
.filter(|name| !name.is_empty())
.collect();
book_meta.authors = author_names
.into_iter()
.enumerate()
.map(|(pos, name)| {
let mut author = crate::model::AuthorInfo::new(name);
author.position = pos as i32;
author
})
.collect();
}
}
if let Ok(subject) = dict.get(b"Subject") {
meta.description = pdf_object_to_string(subject);
@ -68,12 +90,39 @@ fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
}
// Page count
let page_count = doc.get_pages().len();
let pages = doc.get_pages();
let page_count = pages.len();
if page_count > 0 {
meta.extra
.insert("page_count".to_string(), page_count.to_string());
book_meta.page_count = Some(page_count as i32);
}
// Try to extract ISBN from first few pages
// Extract text from up to the first 5 pages and search for ISBN patterns
let mut extracted_text = String::new();
let max_pages = page_count.min(5);
for (_page_num, page_id) in pages.iter().take(max_pages) {
if let Ok(content) = doc.get_page_content(*page_id) {
// PDF content streams contain raw operators, but may have text strings
if let Ok(text) = std::str::from_utf8(&content) {
extracted_text.push_str(text);
extracted_text.push(' ');
}
}
}
// Extract ISBN from the text
if let Some(isbn) = crate::books::extract_isbn_from_text(&extracted_text)
&& let Ok(normalized) = crate::books::normalize_isbn(&isbn)
{
book_meta.isbn13 = Some(normalized);
book_meta.isbn = Some(isbn);
}
// Set format
book_meta.format = Some("pdf".to_string());
meta.book_metadata = Some(book_meta);
Ok(meta)
}
@ -86,7 +135,7 @@ fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> {
}
fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
let doc = epub::doc::EpubDoc::new(path)
let mut doc = epub::doc::EpubDoc::new(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB parse: {e}")))?;
let mut meta = ExtractedMetadata {
@ -96,18 +145,131 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
..Default::default()
};
let mut book_meta = crate::model::ExtractedBookMetadata::default();
// Extract basic metadata
if let Some(lang) = doc.mdata("language") {
meta.extra
.insert("language".to_string(), lang.value.clone());
book_meta.language = Some(lang.value.clone());
}
if let Some(publisher) = doc.mdata("publisher") {
meta.extra
.insert("publisher".to_string(), publisher.value.clone());
book_meta.publisher = Some(publisher.value.clone());
}
if let Some(date) = doc.mdata("date") {
meta.extra.insert("date".to_string(), date.value.clone());
// Try to parse as YYYY-MM-DD or just YYYY
if let Ok(parsed_date) = chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d") {
book_meta.publication_date = Some(parsed_date);
} else if let Ok(year) = date.value.parse::<i32>() {
book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1);
}
}
// Extract authors - iterate through all metadata items
let mut authors = Vec::new();
let mut position = 0;
for item in &doc.metadata {
if item.property == "creator" || item.property == "dc:creator" {
let mut author = crate::model::AuthorInfo::new(item.value.clone());
author.position = position;
position += 1;
// Check for file-as in refinements
if let Some(file_as_ref) = item.refinement("file-as") {
author.file_as = Some(file_as_ref.value.clone());
}
// Check for role in refinements
if let Some(role_ref) = item.refinement("role") {
author.role = role_ref.value.clone();
}
authors.push(author);
}
}
book_meta.authors = authors;
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements
let scheme = item
.refinement("identifier-type")
.map(|r| r.value.to_lowercase());
let id_type = match scheme.as_deref() {
Some("isbn") => "isbn",
Some("isbn-10") | Some("isbn10") => "isbn",
Some("isbn-13") | Some("isbn13") => "isbn13",
Some("asin") => "asin",
Some("doi") => "doi",
_ => {
// Fallback: detect from value pattern
if item.value.len() == 10
|| item.value.len() == 13
|| item.value.contains('-') && item.value.len() < 20
{
"isbn"
} else {
"other"
}
}
};
// Try to normalize ISBN
if (id_type == "isbn" || id_type == "isbn13")
&& let Ok(normalized) = crate::books::normalize_isbn(&item.value)
{
book_meta.isbn13 = Some(normalized.clone());
book_meta.isbn = Some(item.value.clone());
}
identifiers
.entry(id_type.to_string())
.or_insert_with(Vec::new)
.push(item.value.clone());
}
}
book_meta.identifiers = identifiers;
// Extract Calibre series metadata by parsing the content.opf file
// Try common OPF locations
let opf_paths = vec!["OEBPS/content.opf", "content.opf", "OPS/content.opf"];
let mut opf_data = None;
for path in opf_paths {
if let Some(data) = doc.get_resource_str_by_path(path) {
opf_data = Some(data);
break;
}
}
if let Some(opf_content) = opf_data {
// Look for <meta name="calibre:series" content="Series Name"/>
if let Some(series_start) = opf_content.find("name=\"calibre:series\"")
&& let Some(content_start) = opf_content[series_start..].find("content=\"")
{
let after_content = &opf_content[series_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"') {
book_meta.series_name = Some(after_content[..quote_end].to_string());
}
}
// Look for <meta name="calibre:series_index" content="1.0"/>
if let Some(index_start) = opf_content.find("name=\"calibre:series_index\"")
&& let Some(content_start) = opf_content[index_start..].find("content=\"")
{
let after_content = &opf_content[index_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"')
&& let Ok(index) = after_content[..quote_end].parse::<f64>()
{
book_meta.series_index = Some(index);
}
}
}
// Set format
book_meta.format = Some("epub".to_string());
meta.book_metadata = Some(book_meta);
Ok(meta)
}

View file

@ -9,6 +9,7 @@ use std::path::Path;
use crate::error::Result;
use crate::media_type::MediaType;
use crate::model::ExtractedBookMetadata;
#[derive(Debug, Clone, Default)]
pub struct ExtractedMetadata {
@ -20,6 +21,7 @@ pub struct ExtractedMetadata {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub book_metadata: Option<ExtractedBookMetadata>,
}
pub trait MetadataExtractor: Send + Sync {

View file

@ -324,6 +324,22 @@ impl AuthorInfo {
}
}
/// Book metadata extracted from files (without database-specific fields)
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ExtractedBookMetadata {
pub isbn: Option<String>,
pub isbn13: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub publication_date: Option<chrono::NaiveDate>,
pub series_name: Option<String>,
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorInfo>,
pub identifiers: HashMap<String, Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReadingProgress {
pub media_id: MediaId,

View file

@ -124,12 +124,11 @@ fn parse_date_value(s: &str) -> Option<DateValue> {
"last-year" | "lastyear" => Some(DateValue::LastYear),
other => {
// Try to parse "last-Nd" format (e.g., "last-7d", "last-30d")
if let Some(rest) = other.strip_prefix("last-") {
if let Some(days_str) = rest.strip_suffix('d') {
if let Ok(days) = days_str.parse::<u32>() {
return Some(DateValue::DaysAgo(days));
}
}
if let Some(rest) = other.strip_prefix("last-")
&& let Some(days_str) = rest.strip_suffix('d')
&& let Ok(days) = days_str.parse::<u32>()
{
return Some(DateValue::DaysAgo(days));
}
None
}
@ -237,13 +236,13 @@ fn field_match(input: &mut &str) -> ModalResult<SearchQuery> {
}
// Check for date queries on created/modified fields
if field == "created" || field == "modified" {
if let Some(date_val) = parse_date_value(&value) {
return SearchQuery::DateQuery {
field,
value: date_val,
};
}
if (field == "created" || field == "modified")
&& let Some(date_val) = parse_date_value(&value)
{
return SearchQuery::DateQuery {
field,
value: date_val,
};
}
// Default: simple field match

View file

@ -144,6 +144,7 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn list_media_paths(&self) -> Result<Vec<(MediaId, std::path::PathBuf, ContentHash)>>;
// Batch metadata update
#[allow(clippy::too_many_arguments)]
async fn batch_update_media(
&self,
ids: &[MediaId],
@ -446,6 +447,69 @@ pub trait StorageBackend: Send + Sync + 'static {
/// List all active sessions (optionally filtered by username)
async fn list_active_sessions(&self, username: Option<&str>) -> Result<Vec<SessionData>>;
// Book Management Methods
/// Upsert book metadata for a media item
async fn upsert_book_metadata(&self, metadata: &crate::model::BookMetadata) -> Result<()>;
/// Get book metadata for a media item
async fn get_book_metadata(
&self,
media_id: MediaId,
) -> Result<Option<crate::model::BookMetadata>>;
/// Add an author to a book
async fn add_book_author(
&self,
media_id: MediaId,
author: &crate::model::AuthorInfo,
) -> Result<()>;
/// Get all authors for a book
async fn get_book_authors(&self, media_id: MediaId) -> Result<Vec<crate::model::AuthorInfo>>;
/// List all distinct authors with book counts
async fn list_all_authors(&self, pagination: &Pagination) -> Result<Vec<(String, u64)>>;
/// List all series with book counts
async fn list_series(&self) -> Result<Vec<(String, u64)>>;
/// Get all books in a series, ordered by series_index
async fn get_series_books(&self, series_name: &str) -> Result<Vec<MediaItem>>;
/// Update reading progress for a user and book
async fn update_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
current_page: i32,
) -> Result<()>;
/// Get reading progress for a user and book
async fn get_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
) -> Result<Option<crate::model::ReadingProgress>>;
/// Get reading list for a user filtered by status
async fn get_reading_list(
&self,
user_id: uuid::Uuid,
status: Option<crate::model::ReadingStatus>,
) -> Result<Vec<MediaItem>>;
/// Search books with book-specific criteria
async fn search_books(
&self,
isbn: Option<&str>,
author: Option<&str>,
series: Option<&str>,
publisher: Option<&str>,
language: Option<&str>,
pagination: &Pagination,
) -> Result<Vec<MediaItem>>;
}
/// Comprehensive library statistics.

View file

@ -3445,6 +3445,476 @@ impl StorageBackend for PostgresBackend {
})
.collect())
}
// Book Management Methods
async fn upsert_book_metadata(&self, metadata: &crate::model::BookMetadata) -> Result<()> {
let mut client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let tx = client.transaction().await?;
// Upsert book_metadata
tx.execute(
"INSERT INTO book_metadata (
media_id, isbn, isbn13, publisher, language, page_count,
publication_date, series_name, series_index, format
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
ON CONFLICT(media_id) DO UPDATE SET
isbn = $2, isbn13 = $3, publisher = $4, language = $5,
page_count = $6, publication_date = $7, series_name = $8,
series_index = $9, format = $10, updated_at = NOW()",
&[
&metadata.media_id.0,
&metadata.isbn,
&metadata.isbn13,
&metadata.publisher,
&metadata.language,
&metadata.page_count,
&metadata.publication_date,
&metadata.series_name,
&metadata.series_index,
&metadata.format,
],
)
.await?;
// Clear existing authors and identifiers
tx.execute(
"DELETE FROM book_authors WHERE media_id = $1",
&[&metadata.media_id.0],
)
.await?;
tx.execute(
"DELETE FROM book_identifiers WHERE media_id = $1",
&[&metadata.media_id.0],
)
.await?;
// Insert authors
for author in &metadata.authors {
tx.execute(
"INSERT INTO book_authors (media_id, author_name, author_sort, role, position)
VALUES ($1, $2, $3, $4, $5)",
&[
&metadata.media_id.0,
&author.name,
&author.file_as,
&author.role,
&author.position,
],
)
.await?;
}
// Insert identifiers
for (id_type, values) in &metadata.identifiers {
for value in values {
tx.execute(
"INSERT INTO book_identifiers (media_id, identifier_type, identifier_value)
VALUES ($1, $2, $3)",
&[&metadata.media_id.0, &id_type, &value],
)
.await?;
}
}
tx.commit().await?;
Ok(())
}
async fn get_book_metadata(
&self,
media_id: MediaId,
) -> Result<Option<crate::model::BookMetadata>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Get base book metadata
let row = client
.query_opt(
"SELECT isbn, isbn13, publisher, language, page_count,
publication_date, series_name, series_index, format,
created_at, updated_at
FROM book_metadata WHERE media_id = $1",
&[&media_id.0],
)
.await?;
if row.is_none() {
return Ok(None);
}
let row = row.unwrap();
// Get authors
let author_rows = client
.query(
"SELECT author_name, author_sort, role, position
FROM book_authors WHERE media_id = $1 ORDER BY position",
&[&media_id.0],
)
.await?;
let authors: Vec<crate::model::AuthorInfo> = author_rows
.iter()
.map(|r| crate::model::AuthorInfo {
name: r.get(0),
file_as: r.get(1),
role: r.get(2),
position: r.get(3),
})
.collect();
// Get identifiers
let id_rows = client
.query(
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = $1",
&[&media_id.0],
)
.await?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
for r in id_rows {
let id_type: String = r.get(0);
let value: String = r.get(1);
identifiers.entry(id_type).or_default().push(value);
}
Ok(Some(crate::model::BookMetadata {
media_id,
isbn: row.get(0),
isbn13: row.get(1),
publisher: row.get(2),
language: row.get(3),
page_count: row.get(4),
publication_date: row.get(5),
series_name: row.get(6),
series_index: row.get(7),
format: row.get(8),
authors,
identifiers,
created_at: row.get(9),
updated_at: row.get(10),
}))
}
async fn add_book_author(
&self,
media_id: MediaId,
author: &crate::model::AuthorInfo,
) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
client
.execute(
"INSERT INTO book_authors (media_id, author_name, author_sort, role, position)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT(media_id, author_name, role) DO UPDATE SET
author_sort = $3, position = $5",
&[
&media_id.0,
&author.name,
&author.file_as,
&author.role,
&author.position,
],
)
.await?;
Ok(())
}
async fn get_book_authors(&self, media_id: MediaId) -> Result<Vec<crate::model::AuthorInfo>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client
.query(
"SELECT author_name, author_sort, role, position
FROM book_authors WHERE media_id = $1 ORDER BY position",
&[&media_id.0],
)
.await?;
Ok(rows
.iter()
.map(|r| crate::model::AuthorInfo {
name: r.get(0),
file_as: r.get(1),
role: r.get(2),
position: r.get(3),
})
.collect())
}
async fn list_all_authors(&self, pagination: &Pagination) -> Result<Vec<(String, u64)>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client
.query(
"SELECT author_name, COUNT(DISTINCT media_id) as book_count
FROM book_authors
GROUP BY author_name
ORDER BY book_count DESC, author_name
LIMIT $1 OFFSET $2",
&[&(pagination.limit as i64), &(pagination.offset as i64)],
)
.await?;
Ok(rows
.iter()
.map(|r| (r.get(0), r.get::<_, i64>(1) as u64))
.collect())
}
async fn list_series(&self) -> Result<Vec<(String, u64)>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client
.query(
"SELECT series_name, COUNT(*) as book_count
FROM book_metadata
WHERE series_name IS NOT NULL
GROUP BY series_name
ORDER BY series_name",
&[],
)
.await?;
Ok(rows
.iter()
.map(|r| (r.get(0), r.get::<_, i64>(1) as u64))
.collect())
}
async fn get_series_books(&self, series_name: &str) -> Result<Vec<MediaItem>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let rows = client
.query(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata b ON m.id = b.media_id
WHERE b.series_name = $1
ORDER BY b.series_index, m.title",
&[&series_name],
)
.await?;
rows.iter().map(row_to_media_item).collect()
}
async fn update_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
current_page: i32,
) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
client
.execute(
"INSERT INTO watch_history (user_id, media_id, progress_secs, last_watched_at)
VALUES ($1, $2, $3, NOW())
ON CONFLICT(user_id, media_id) DO UPDATE SET
progress_secs = $3, last_watched_at = NOW()",
&[&user_id, &media_id.0, &(current_page as f64)],
)
.await?;
Ok(())
}
async fn get_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
) -> Result<Option<crate::model::ReadingProgress>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let row = client
.query_opt(
"SELECT wh.progress_secs, bm.page_count, wh.last_watched_at
FROM watch_history wh
LEFT JOIN book_metadata bm ON wh.media_id = bm.media_id
WHERE wh.user_id = $1 AND wh.media_id = $2",
&[&user_id, &media_id.0],
)
.await?;
Ok(row.map(|r| {
let current_page = r.get::<_, f64>(0) as i32;
let total_pages: Option<i32> = r.get(1);
let progress_percent = if let Some(total) = total_pages {
if total > 0 {
(current_page as f64 / total as f64 * 100.0).min(100.0)
} else {
0.0
}
} else {
0.0
};
crate::model::ReadingProgress {
media_id,
user_id,
current_page,
total_pages,
progress_percent,
last_read_at: r.get(2),
}
}))
}
async fn get_reading_list(
&self,
_user_id: uuid::Uuid,
_status: Option<crate::model::ReadingStatus>,
) -> Result<Vec<MediaItem>> {
// TODO: Implement reading list with explicit status tracking
// For now, return empty list as this requires additional schema
Ok(Vec::new())
}
#[allow(clippy::too_many_arguments)]
async fn search_books(
&self,
isbn: Option<&str>,
author: Option<&str>,
series: Option<&str>,
publisher: Option<&str>,
language: Option<&str>,
pagination: &Pagination,
) -> Result<Vec<MediaItem>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// For PostgreSQL, we need to handle parameters carefully due to lifetimes
// Simplified approach: use separate queries for different filter combinations
let rows = if let (Some(i), Some(a), Some(s), Some(p), Some(l)) =
(isbn, author, series, publisher, language)
{
let author_pattern = format!("%{}%", a);
let series_pattern = format!("%{}%", s);
let publisher_pattern = format!("%{}%", p);
client
.query(
"SELECT DISTINCT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata bm ON m.id = bm.media_id
INNER JOIN book_authors ba ON m.id = ba.media_id
WHERE (bm.isbn = $1 OR bm.isbn13 = $1) AND ba.author_name ILIKE $2
AND bm.series_name ILIKE $3 AND bm.publisher ILIKE $4 AND bm.language = $5
ORDER BY m.title LIMIT $6 OFFSET $7",
&[
&i,
&author_pattern,
&series_pattern,
&publisher_pattern,
&l,
&(pagination.limit as i64),
&(pagination.offset as i64),
],
)
.await?
} else if isbn.is_none()
&& author.is_none()
&& series.is_none()
&& publisher.is_none()
&& language.is_none()
{
// No filters
client
.query(
"SELECT DISTINCT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata bm ON m.id = bm.media_id
ORDER BY m.title LIMIT $1 OFFSET $2",
&[&(pagination.limit as i64), &(pagination.offset as i64)],
)
.await?
} else {
// For other combinations, use dynamic query (simplified - just filter by what's provided)
let mut query =
"SELECT DISTINCT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata bm ON m.id = bm.media_id WHERE 1=1"
.to_string();
if isbn.is_some() {
query.push_str(" AND (bm.isbn = $1 OR bm.isbn13 = $1)");
}
query.push_str(" ORDER BY m.title LIMIT $2 OFFSET $3");
if let Some(i) = isbn {
client
.query(
&query,
&[&i, &(pagination.limit as i64), &(pagination.offset as i64)],
)
.await?
} else {
client
.query(
&query,
&[&(pagination.limit as i64), &(pagination.offset as i64)],
)
.await?
}
};
let items: Result<Vec<_>> = rows.iter().map(row_to_media_item).collect();
items
}
}
impl PostgresBackend {

View file

@ -1734,11 +1734,7 @@ impl StorageBackend for SqliteBackend {
[],
|r| r.get(0),
)?;
let avg_size: u64 = if total_media > 0 {
total_size / total_media
} else {
0
};
let avg_size: u64 = total_size.checked_div(total_media).unwrap_or(0);
// Media count by type
let mut stmt = db.prepare("SELECT media_type, COUNT(*) FROM media_items GROUP BY media_type ORDER BY COUNT(*) DESC")?;
@ -3801,6 +3797,543 @@ impl StorageBackend for SqliteBackend {
.map_err(|_| PinakesError::Database("list_active_sessions timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))?
}
// Book Management Methods
async fn upsert_book_metadata(&self, metadata: &crate::model::BookMetadata) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = metadata.media_id.to_string();
let isbn = metadata.isbn.clone();
let isbn13 = metadata.isbn13.clone();
let publisher = metadata.publisher.clone();
let language = metadata.language.clone();
let page_count = metadata.page_count;
let publication_date = metadata.publication_date.map(|d| d.to_string());
let series_name = metadata.series_name.clone();
let series_index = metadata.series_index;
let format = metadata.format.clone();
let authors = metadata.authors.clone();
let identifiers = metadata.identifiers.clone();
let fut = tokio::task::spawn_blocking(move || {
let mut conn = conn.lock().unwrap();
let tx = conn.transaction()?;
// Upsert book_metadata
tx.execute(
"INSERT INTO book_metadata (
media_id, isbn, isbn13, publisher, language, page_count,
publication_date, series_name, series_index, format
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)
ON CONFLICT(media_id) DO UPDATE SET
isbn = ?2, isbn13 = ?3, publisher = ?4, language = ?5,
page_count = ?6, publication_date = ?7, series_name = ?8,
series_index = ?9, format = ?10, updated_at = datetime('now')",
rusqlite::params![
media_id_str,
isbn,
isbn13,
publisher,
language,
page_count,
publication_date,
series_name,
series_index,
format
],
)?;
// Clear existing authors and identifiers
tx.execute(
"DELETE FROM book_authors WHERE media_id = ?1",
[&media_id_str],
)?;
tx.execute(
"DELETE FROM book_identifiers WHERE media_id = ?1",
[&media_id_str],
)?;
// Insert authors
for author in &authors {
tx.execute(
"INSERT INTO book_authors (media_id, author_name, author_sort, role, position)
VALUES (?1, ?2, ?3, ?4, ?5)",
rusqlite::params![
media_id_str,
author.name,
author.file_as,
author.role,
author.position
],
)?;
}
// Insert identifiers
for (id_type, values) in &identifiers {
for value in values {
tx.execute(
"INSERT INTO book_identifiers (media_id, identifier_type, identifier_value)
VALUES (?1, ?2, ?3)",
rusqlite::params![media_id_str, id_type, value],
)?;
}
}
tx.commit()?;
Ok::<_, rusqlite::Error>(())
});
tokio::time::timeout(std::time::Duration::from_secs(30), fut)
.await
.map_err(|_| PinakesError::Database("upsert_book_metadata timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_book_metadata(
&self,
media_id: MediaId,
) -> Result<Option<crate::model::BookMetadata>> {
let conn = self.conn.clone();
let media_id_str = media_id.to_string();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
// Get base book metadata
let metadata_row = conn
.query_row(
"SELECT isbn, isbn13, publisher, language, page_count,
publication_date, series_name, series_index, format,
created_at, updated_at
FROM book_metadata WHERE media_id = ?1",
[&media_id_str],
|row| {
Ok((
row.get::<_, Option<String>>(0)?,
row.get::<_, Option<String>>(1)?,
row.get::<_, Option<String>>(2)?,
row.get::<_, Option<String>>(3)?,
row.get::<_, Option<i32>>(4)?,
row.get::<_, Option<String>>(5)?,
row.get::<_, Option<String>>(6)?,
row.get::<_, Option<f64>>(7)?,
row.get::<_, Option<String>>(8)?,
row.get::<_, String>(9)?,
row.get::<_, String>(10)?,
))
},
)
.optional()?;
if metadata_row.is_none() {
return Ok::<_, rusqlite::Error>(None);
}
let (
isbn,
isbn13,
publisher,
language,
page_count,
publication_date,
series_name,
series_index,
format,
created_at,
updated_at,
) = metadata_row.unwrap();
// Get authors
let mut stmt = conn.prepare(
"SELECT author_name, author_sort, role, position
FROM book_authors WHERE media_id = ?1 ORDER BY position",
)?;
let authors: Vec<crate::model::AuthorInfo> = stmt
.query_map([&media_id_str], |row| {
Ok(crate::model::AuthorInfo {
name: row.get(0)?,
file_as: row.get(1)?,
role: row.get(2)?,
position: row.get(3)?,
})
})?
.collect::<rusqlite::Result<Vec<_>>>()?;
// Get identifiers
let mut stmt = conn.prepare(
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = ?1",
)?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
for row in stmt.query_map([&media_id_str], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})? {
let (id_type, value) = row?;
identifiers.entry(id_type).or_default().push(value);
}
let parsed_date = publication_date
.and_then(|d| chrono::NaiveDate::parse_from_str(&d, "%Y-%m-%d").ok());
Ok(Some(crate::model::BookMetadata {
media_id,
isbn,
isbn13,
publisher,
language,
page_count,
publication_date: parsed_date,
series_name,
series_index,
format,
authors,
identifiers,
created_at: chrono::DateTime::parse_from_rfc3339(&created_at)
.unwrap()
.with_timezone(&chrono::Utc),
updated_at: chrono::DateTime::parse_from_rfc3339(&updated_at)
.unwrap()
.with_timezone(&chrono::Utc),
}))
});
Ok(
tokio::time::timeout(std::time::Duration::from_secs(10), fut)
.await
.map_err(|_| PinakesError::Database("get_book_metadata timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??,
)
}
async fn add_book_author(
&self,
media_id: MediaId,
author: &crate::model::AuthorInfo,
) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.to_string();
let author_clone = author.clone();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"INSERT INTO book_authors (media_id, author_name, author_sort, role, position)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(media_id, author_name, role) DO UPDATE SET
author_sort = ?3, position = ?5",
rusqlite::params![
media_id_str,
author_clone.name,
author_clone.file_as,
author_clone.role,
author_clone.position
],
)?;
Ok::<_, rusqlite::Error>(())
});
tokio::time::timeout(std::time::Duration::from_secs(5), fut)
.await
.map_err(|_| PinakesError::Database("add_book_author timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_book_authors(&self, media_id: MediaId) -> Result<Vec<crate::model::AuthorInfo>> {
let conn = self.conn.clone();
let media_id_str = media_id.to_string();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT author_name, author_sort, role, position
FROM book_authors WHERE media_id = ?1 ORDER BY position",
)?;
let authors: Vec<crate::model::AuthorInfo> = stmt
.query_map([&media_id_str], |row| {
Ok(crate::model::AuthorInfo {
name: row.get(0)?,
file_as: row.get(1)?,
role: row.get(2)?,
position: row.get(3)?,
})
})?
.collect::<rusqlite::Result<Vec<_>>>()?;
Ok::<_, rusqlite::Error>(authors)
});
Ok(tokio::time::timeout(std::time::Duration::from_secs(5), fut)
.await
.map_err(|_| PinakesError::Database("get_book_authors timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??)
}
async fn list_all_authors(&self, pagination: &Pagination) -> Result<Vec<(String, u64)>> {
let conn = self.conn.clone();
let offset = pagination.offset;
let limit = pagination.limit;
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT author_name, COUNT(DISTINCT media_id) as book_count
FROM book_authors
GROUP BY author_name
ORDER BY book_count DESC, author_name
LIMIT ?1 OFFSET ?2",
)?;
let authors: Vec<(String, u64)> = stmt
.query_map([limit as i64, offset as i64], |row| {
Ok((row.get(0)?, row.get::<_, i64>(1)? as u64))
})?
.collect::<rusqlite::Result<Vec<_>>>()?;
Ok::<_, rusqlite::Error>(authors)
});
Ok(
tokio::time::timeout(std::time::Duration::from_secs(10), fut)
.await
.map_err(|_| PinakesError::Database("list_all_authors timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??,
)
}
async fn list_series(&self) -> Result<Vec<(String, u64)>> {
let conn = self.conn.clone();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT series_name, COUNT(*) as book_count
FROM book_metadata
WHERE series_name IS NOT NULL
GROUP BY series_name
ORDER BY series_name",
)?;
let series: Vec<(String, u64)> = stmt
.query_map([], |row| Ok((row.get(0)?, row.get::<_, i64>(1)? as u64)))?
.collect::<rusqlite::Result<Vec<_>>>()?;
Ok::<_, rusqlite::Error>(series)
});
Ok(
tokio::time::timeout(std::time::Duration::from_secs(10), fut)
.await
.map_err(|_| PinakesError::Database("list_series timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??,
)
}
async fn get_series_books(&self, series_name: &str) -> Result<Vec<MediaItem>> {
let conn = self.conn.clone();
let series = series_name.to_string();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata b ON m.id = b.media_id
WHERE b.series_name = ?1
ORDER BY b.series_index, m.title",
)?;
let items = stmt
.query_map([&series], row_to_media_item)?
.collect::<rusqlite::Result<Vec<_>>>()?;
Ok::<_, rusqlite::Error>(items)
});
Ok(
tokio::time::timeout(std::time::Duration::from_secs(10), fut)
.await
.map_err(|_| PinakesError::Database("get_series_books timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??,
)
}
async fn update_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
current_page: i32,
) -> Result<()> {
// Reuse watch_history table: progress_secs stores current page for books
let conn = self.conn.clone();
let user_id_str = user_id.to_string();
let media_id_str = media_id.to_string();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"INSERT INTO watch_history (user_id, media_id, progress_secs, last_watched_at)
VALUES (?1, ?2, ?3, datetime('now'))
ON CONFLICT(user_id, media_id) DO UPDATE SET
progress_secs = ?3, last_watched_at = datetime('now')",
rusqlite::params![user_id_str, media_id_str, current_page as f64],
)?;
Ok::<_, rusqlite::Error>(())
});
tokio::time::timeout(std::time::Duration::from_secs(5), fut)
.await
.map_err(|_| PinakesError::Database("update_reading_progress timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_reading_progress(
&self,
user_id: uuid::Uuid,
media_id: MediaId,
) -> Result<Option<crate::model::ReadingProgress>> {
let conn = self.conn.clone();
let user_id_str = user_id.to_string();
let media_id_str = media_id.to_string();
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let result = conn
.query_row(
"SELECT wh.progress_secs, bm.page_count, wh.last_watched_at
FROM watch_history wh
LEFT JOIN book_metadata bm ON wh.media_id = bm.media_id
WHERE wh.user_id = ?1 AND wh.media_id = ?2",
[&user_id_str, &media_id_str],
|row| {
let current_page = row.get::<_, f64>(0)? as i32;
let total_pages = row.get::<_, Option<i32>>(1)?;
let last_read_str = row.get::<_, String>(2)?;
Ok((current_page, total_pages, last_read_str))
},
)
.optional()?;
Ok::<_, rusqlite::Error>(result.map(|(current_page, total_pages, last_read_str)| {
crate::model::ReadingProgress {
media_id,
user_id,
current_page,
total_pages,
progress_percent: if let Some(total) = total_pages {
if total > 0 {
(current_page as f64 / total as f64 * 100.0).min(100.0)
} else {
0.0
}
} else {
0.0
},
last_read_at: chrono::DateTime::parse_from_rfc3339(&last_read_str)
.unwrap()
.with_timezone(&chrono::Utc),
}
}))
});
Ok(tokio::time::timeout(std::time::Duration::from_secs(5), fut)
.await
.map_err(|_| PinakesError::Database("get_reading_progress timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??)
}
async fn get_reading_list(
&self,
_user_id: uuid::Uuid,
_status: Option<crate::model::ReadingStatus>,
) -> Result<Vec<MediaItem>> {
// TODO: Implement reading list with explicit status tracking
// For now, return empty list as this requires additional schema
Ok(Vec::new())
}
#[allow(clippy::too_many_arguments)]
async fn search_books(
&self,
isbn: Option<&str>,
author: Option<&str>,
series: Option<&str>,
publisher: Option<&str>,
language: Option<&str>,
pagination: &Pagination,
) -> Result<Vec<MediaItem>> {
let conn = self.conn.clone();
let isbn = isbn.map(String::from);
let author = author.map(String::from);
let series = series.map(String::from);
let publisher = publisher.map(String::from);
let language = language.map(String::from);
let offset = pagination.offset;
let limit = pagination.limit;
let fut = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut query = String::from(
"SELECT DISTINCT m.id, m.path, m.file_name, m.media_type, m.content_hash,
m.file_size, m.title, m.artist, m.album, m.genre, m.year,
m.duration_secs, m.description, m.thumbnail_path, m.file_mtime,
m.created_at, m.updated_at
FROM media_items m
INNER JOIN book_metadata bm ON m.id = bm.media_id",
);
let mut conditions = Vec::new();
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
if let Some(ref i) = isbn {
conditions.push("(bm.isbn = ? OR bm.isbn13 = ?)");
params.push(Box::new(i.clone()));
params.push(Box::new(i.clone()));
}
if let Some(ref a) = author {
query.push_str(" INNER JOIN book_authors ba ON m.id = ba.media_id");
conditions.push("ba.author_name LIKE ?");
params.push(Box::new(format!("%{}%", a)));
}
if let Some(ref s) = series {
conditions.push("bm.series_name LIKE ?");
params.push(Box::new(format!("%{}%", s)));
}
if let Some(ref p) = publisher {
conditions.push("bm.publisher LIKE ?");
params.push(Box::new(format!("%{}%", p)));
}
if let Some(ref l) = language {
conditions.push("bm.language = ?");
params.push(Box::new(l.clone()));
}
if !conditions.is_empty() {
query.push_str(" WHERE ");
query.push_str(&conditions.join(" AND "));
}
query.push_str(" ORDER BY m.title LIMIT ? OFFSET ?");
params.push(Box::new(limit as i64));
params.push(Box::new(offset as i64));
let params_refs: Vec<&dyn rusqlite::ToSql> =
params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(&query)?;
let items = stmt
.query_map(&*params_refs, row_to_media_item)?
.collect::<rusqlite::Result<Vec<_>>>()?;
Ok::<_, rusqlite::Error>(items)
});
Ok(
tokio::time::timeout(std::time::Duration::from_secs(10), fut)
.await
.map_err(|_| PinakesError::Database("search_books timed out".into()))?
.map_err(|e: tokio::task::JoinError| PinakesError::Database(e.to_string()))??,
)
}
}
// Needed for `query_row(...).optional()`

View file

@ -6,7 +6,7 @@
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use thiserror::Error;
pub mod manifest;
@ -161,7 +161,7 @@ pub trait MediaTypeProvider: Plugin {
fn supported_media_types(&self) -> Vec<MediaTypeDefinition>;
/// Check if this plugin can handle the given file
async fn can_handle(&self, path: &PathBuf, mime_type: Option<&str>) -> PluginResult<bool>;
async fn can_handle(&self, path: &Path, mime_type: Option<&str>) -> PluginResult<bool>;
}
/// Definition of a custom media type
@ -190,7 +190,7 @@ pub struct MediaTypeDefinition {
#[async_trait]
pub trait MetadataExtractor: Plugin {
/// Extract metadata from a file
async fn extract_metadata(&self, path: &PathBuf) -> PluginResult<ExtractedMetadata>;
async fn extract_metadata(&self, path: &Path) -> PluginResult<ExtractedMetadata>;
/// Get the media types this extractor supports
fn supported_types(&self) -> Vec<String>;
@ -223,8 +223,8 @@ pub trait ThumbnailGenerator: Plugin {
/// Generate a thumbnail for the given file
async fn generate_thumbnail(
&self,
path: &PathBuf,
output_path: &PathBuf,
path: &Path,
output_path: &Path,
options: ThumbnailOptions,
) -> PluginResult<ThumbnailInfo>;

View file

@ -98,7 +98,7 @@ impl PluginManifest {
}
/// Parse a manifest from TOML string
pub fn from_str(content: &str) -> Result<Self, ManifestError> {
pub fn parse_str(content: &str) -> Result<Self, ManifestError> {
let manifest: Self = toml::from_str(content)?;
manifest.validate()?;
Ok(manifest)
@ -223,7 +223,7 @@ wasm = "plugin.wasm"
read = ["/tmp/pinakes-thumbnails"]
"#;
let manifest = PluginManifest::from_str(toml).unwrap();
let manifest = PluginManifest::parse_str(toml).unwrap();
assert_eq!(manifest.plugin.name, "heif-support");
assert_eq!(manifest.plugin.version, "1.0.0");
assert_eq!(manifest.plugin.kind.len(), 2);
@ -242,7 +242,7 @@ kind = ["media_type"]
wasm = "plugin.wasm"
"#;
assert!(PluginManifest::from_str(toml).is_err());
assert!(PluginManifest::parse_str(toml).is_err());
}
#[test]
@ -258,6 +258,6 @@ kind = ["invalid_kind"]
wasm = "plugin.wasm"
"#;
assert!(PluginManifest::from_str(toml).is_err());
assert!(PluginManifest::parse_str(toml).is_err());
}
}

View file

@ -100,6 +100,8 @@ pub fn create_router_with_tls(
.route("/media/{id}", get(routes::media::get_media))
.route("/media/{id}/thumbnail", get(routes::media::get_thumbnail))
.route("/media/{media_id}/tags", get(routes::tags::get_media_tags))
// Books API
.nest("/books", routes::books::routes())
.route("/tags", get(routes::tags::list_tags))
.route("/tags/{id}", get(routes::tags::get_tag))
.route("/collections", get(routes::collections::list_collections))

View file

@ -109,7 +109,7 @@ async fn main() -> Result<()> {
.server
.api_key
.as_ref()
.map_or(false, |k| !k.is_empty());
.is_some_and(|k| !k.is_empty());
let has_accounts = !config.accounts.users.is_empty();
if !has_api_key && !has_accounts {
tracing::error!("⚠️ No authentication method configured!");
@ -425,7 +425,6 @@ async fn main() -> Result<()> {
}
}
};
();
drop(cancel);
})
},

View file

@ -0,0 +1,315 @@
use axum::{
Json, Router,
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, put},
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::{
error::PinakesError,
model::{AuthorInfo, BookMetadata, MediaId, Pagination, ReadingProgress, ReadingStatus},
};
use crate::{dto::MediaResponse, error::ApiError, state::AppState};
/// Book metadata response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct BookMetadataResponse {
pub media_id: Uuid,
pub isbn: Option<String>,
pub isbn13: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub publication_date: Option<String>,
pub series_name: Option<String>,
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
pub identifiers: std::collections::HashMap<String, Vec<String>>,
}
impl From<BookMetadata> for BookMetadataResponse {
fn from(meta: BookMetadata) -> Self {
Self {
media_id: meta.media_id.0,
isbn: meta.isbn,
isbn13: meta.isbn13,
publisher: meta.publisher,
language: meta.language,
page_count: meta.page_count,
publication_date: meta.publication_date.map(|d| d.to_string()),
series_name: meta.series_name,
series_index: meta.series_index,
format: meta.format,
authors: meta.authors.into_iter().map(AuthorResponse::from).collect(),
identifiers: meta.identifiers,
}
}
}
/// Author response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct AuthorResponse {
pub name: String,
pub role: String,
pub file_as: Option<String>,
pub position: i32,
}
impl From<AuthorInfo> for AuthorResponse {
fn from(author: AuthorInfo) -> Self {
Self {
name: author.name,
role: author.role,
file_as: author.file_as,
position: author.position,
}
}
}
/// Reading progress response DTO
#[derive(Debug, Serialize, Deserialize)]
pub struct ReadingProgressResponse {
pub media_id: Uuid,
pub user_id: Uuid,
pub current_page: i32,
pub total_pages: Option<i32>,
pub progress_percent: f64,
pub last_read_at: String,
}
impl From<ReadingProgress> for ReadingProgressResponse {
fn from(progress: ReadingProgress) -> Self {
Self {
media_id: progress.media_id.0,
user_id: progress.user_id,
current_page: progress.current_page,
total_pages: progress.total_pages,
progress_percent: progress.progress_percent,
last_read_at: progress.last_read_at.to_rfc3339(),
}
}
}
/// Update reading progress request
#[derive(Debug, Deserialize)]
pub struct UpdateProgressRequest {
pub current_page: i32,
}
/// Search books query parameters
#[derive(Debug, Deserialize)]
pub struct SearchBooksQuery {
pub isbn: Option<String>,
pub author: Option<String>,
pub series: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
#[serde(default = "default_offset")]
pub offset: u64,
#[serde(default = "default_limit")]
pub limit: u64,
}
fn default_offset() -> u64 {
0
}
fn default_limit() -> u64 {
50
}
/// Series summary DTO
#[derive(Debug, Serialize)]
pub struct SeriesSummary {
pub name: String,
pub book_count: u64,
}
/// Author summary DTO
#[derive(Debug, Serialize)]
pub struct AuthorSummary {
pub name: String,
pub book_count: u64,
}
/// Get book metadata by media ID
pub async fn get_book_metadata(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let media_id = MediaId(media_id);
let metadata = state
.storage
.get_book_metadata(media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Book metadata not found".to_string(),
)))?;
Ok(Json(BookMetadataResponse::from(metadata)))
}
/// List all books with optional search filters
pub async fn list_books(
State(state): State<AppState>,
Query(query): Query<SearchBooksQuery>,
) -> Result<impl IntoResponse, ApiError> {
let pagination = Pagination {
offset: query.offset,
limit: query.limit,
sort: None,
};
let items = state
.storage
.search_books(
query.isbn.as_deref(),
query.author.as_deref(),
query.series.as_deref(),
query.publisher.as_deref(),
query.language.as_deref(),
&pagination,
)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// List all series with book counts
pub async fn list_series(State(state): State<AppState>) -> Result<impl IntoResponse, ApiError> {
let series = state.storage.list_series().await?;
let response: Vec<SeriesSummary> = series
.into_iter()
.map(|(name, count)| SeriesSummary {
name,
book_count: count,
})
.collect();
Ok(Json(response))
}
/// Get books in a specific series
pub async fn get_series_books(
State(state): State<AppState>,
Path(series_name): Path<String>,
) -> Result<impl IntoResponse, ApiError> {
let items = state.storage.get_series_books(&series_name).await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// List all authors with book counts
pub async fn list_authors(
State(state): State<AppState>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let authors = state.storage.list_all_authors(&pagination).await?;
let response: Vec<AuthorSummary> = authors
.into_iter()
.map(|(name, count)| AuthorSummary {
name,
book_count: count,
})
.collect();
Ok(Json(response))
}
/// Get books by a specific author
pub async fn get_author_books(
State(state): State<AppState>,
Path(author_name): Path<String>,
Query(pagination): Query<Pagination>,
) -> Result<impl IntoResponse, ApiError> {
let items = state
.storage
.search_books(None, Some(&author_name), None, None, None, &pagination)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
/// Get reading progress for a book
pub async fn get_reading_progress(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
// TODO: Get user_id from auth context
let user_id = Uuid::new_v4(); // Placeholder
let media_id = MediaId(media_id);
let progress = state
.storage
.get_reading_progress(user_id, media_id)
.await?
.ok_or(ApiError(PinakesError::NotFound(
"Reading progress not found".to_string(),
)))?;
Ok(Json(ReadingProgressResponse::from(progress)))
}
/// Update reading progress for a book
pub async fn update_reading_progress(
State(state): State<AppState>,
Path(media_id): Path<Uuid>,
Json(req): Json<UpdateProgressRequest>,
) -> Result<impl IntoResponse, ApiError> {
// TODO: Get user_id from auth context
let user_id = Uuid::new_v4(); // Placeholder
let media_id = MediaId(media_id);
state
.storage
.update_reading_progress(user_id, media_id, req.current_page)
.await?;
Ok(StatusCode::NO_CONTENT)
}
/// Get user's reading list
pub async fn get_reading_list(
State(state): State<AppState>,
Query(params): Query<ReadingListQuery>,
) -> Result<impl IntoResponse, ApiError> {
// TODO: Get user_id from auth context
let user_id = Uuid::new_v4(); // Placeholder
let items = state
.storage
.get_reading_list(user_id, params.status)
.await?;
let response: Vec<MediaResponse> = items.into_iter().map(MediaResponse::from).collect();
Ok(Json(response))
}
#[derive(Debug, Deserialize)]
pub struct ReadingListQuery {
pub status: Option<ReadingStatus>,
}
/// Build the books router
pub fn routes() -> Router<AppState> {
Router::new()
// Metadata routes
.route("/:id/metadata", get(get_book_metadata))
// Browse routes
.route("/", get(list_books))
.route("/series", get(list_series))
.route("/series/:name", get(get_series_books))
.route("/authors", get(list_authors))
.route("/authors/:name/books", get(get_author_books))
// Reading progress routes
.route("/:id/progress", get(get_reading_progress))
.route("/:id/progress", put(update_reading_progress))
.route("/reading-list", get(get_reading_list))
}

View file

@ -75,10 +75,7 @@ pub async fn health(State(state): State<AppState>) -> Json<HealthResponse> {
response.database = Some(db_health);
// Check filesystem health (root directories)
let roots = match state.storage.list_root_dirs().await {
Ok(r) => r,
Err(_) => Vec::new(),
};
let roots: Vec<std::path::PathBuf> = state.storage.list_root_dirs().await.unwrap_or_default();
let roots_accessible = roots.iter().filter(|r| r.exists()).count();
if roots_accessible < roots.len() {
response.status = "degraded".to_string();

View file

@ -1,6 +1,7 @@
pub mod analytics;
pub mod audit;
pub mod auth;
pub mod books;
pub mod collections;
pub mod config;
pub mod database;

View file

@ -122,10 +122,10 @@ pub fn App() -> Element {
// Check system preference using JavaScript
let result =
document::eval(r#"window.matchMedia('(prefers-color-scheme: dark)').matches"#);
if let Ok(val) = result.await {
if let Some(prefers_dark) = val.as_bool() {
system_prefers_dark.set(prefers_dark);
}
if let Ok(val) = result.await
&& let Some(prefers_dark) = val.as_bool()
{
system_prefers_dark.set(prefers_dark);
}
});
});
@ -581,7 +581,7 @@ pub fn App() -> Element {
{
let (completed, total) = *import_progress.read();
let has_progress = total > 0;
let pct = if total > 0 { (completed * 100) / total } else { 0 };
let pct = (completed * 100).checked_div(total).unwrap_or(0);
let current = import_current_file.read().clone();
let queue_len = import_queue.read().len();
rsx! {
@ -1408,38 +1408,57 @@ pub fn App() -> Element {
// Check if already importing - if so, add to queue
// Extract directory name from path
// Check if already importing - if so, add to queue
// Get preview files if available for per-file progress
// Use parallel import with per-batch progress
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Fallback: use server-side directory import (no per-file progress)
// Check if already importing - if so, add to queue
// Update progress from scan status
// Check if already importing - if so, add to queue
// Process files in parallel batches for better performance
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Extended import state
// Extract directory name from path
// Check if already importing - if so, add to queue
if *import_in_progress.read() {
// Get preview files if available for per-file progress
// Use parallel import with per-batch progress
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Fallback: use server-side directory import (no per-file progress)
// Check if already importing - if so, add to queue
// Update progress from scan status
// Check if already importing - if so, add to queue
// Process files in parallel batches for better performance
// Show first file in batch as current
// Process batch in parallel
// Update progress after batch
// Extended import state
import_queue.write().push(file_name);
show_toast("Added to import queue".into(), false);
return;
@ -1547,8 +1566,6 @@ pub fn App() -> Element {
if let Some(first_path) = chunk.first() {
let file_name = first_path
.rsplit('/')
.next()
.unwrap_or(first_path);
@ -1679,10 +1696,7 @@ pub fn App() -> Element {
Ok(status) => {
let done = !status.scanning;
import_progress
.set((
status.files_processed as usize,
status.files_found as usize,
));
.set((status.files_processed, status.files_found));
if status.files_found > 0 {
import_current_file
.set(
@ -1752,8 +1766,6 @@ pub fn App() -> Element {
if let Some(first_path) = chunk.first() {
let file_name = first_path
.rsplit('/')
.next()
.unwrap_or(first_path);

View file

@ -54,7 +54,7 @@ pub fn Import(
{
let (completed, total) = import_progress;
let has_progress = total > 0;
let pct = if total > 0 { (completed * 100) / total } else { 0 };
let pct = (completed * 100).checked_div(total).unwrap_or(0);
let queue_count = import_queue.len();
rsx! {
div { class: "import-status-panel",

View file

@ -612,6 +612,9 @@ pub fn Library(
let card_click = {
let id = item.id.clone();
move |_| on_select.call(id.clone())

View file

@ -79,6 +79,7 @@ pub fn Statistics(
if !s.media_by_type.is_empty() {
div { class: "card mt-16",
h4 { class: "card-title", "Media by Type" }