various: remove dead code; fix skipped tests

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9100489be899f9e9fbd32f6aca3080196a6a6964
This commit is contained in:
raf 2026-02-05 00:18:02 +03:00
commit cfdc3d0622
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
18 changed files with 1445 additions and 28 deletions

7
Cargo.lock generated
View file

@ -4837,6 +4837,7 @@ dependencies = [
"tokio-util", "tokio-util",
"toml 0.9.11+spec-1.1.0", "toml 0.9.11+spec-1.1.0",
"tracing", "tracing",
"urlencoding",
"uuid", "uuid",
"walkdir", "walkdir",
"wasmtime", "wasmtime",
@ -7329,6 +7330,12 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]] [[package]]
name = "utf-8" name = "utf-8"
version = "0.7.6" version = "0.7.6"

View file

@ -39,6 +39,7 @@ reqwest = { workspace = true }
argon2 = { workspace = true } argon2 = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
moka = { version = "0.12", features = ["future"] } moka = { version = "0.12", features = ["future"] }
urlencoding = "2.1"
# Plugin system # Plugin system
pinakes-plugin-api = { path = "../pinakes-plugin-api" } pinakes-plugin-api = { path = "../pinakes-plugin-api" }

View file

@ -0,0 +1,233 @@
use chrono::Utc;
use uuid::Uuid;
use crate::error::{PinakesError, Result};
use crate::model::MediaItem;
use super::googlebooks::GoogleBooksClient;
use super::openlibrary::OpenLibraryClient;
use super::{EnrichmentSourceType, ExternalMetadata, MetadataEnricher};
/// Book enricher that tries OpenLibrary first, then falls back to Google Books
pub struct BookEnricher {
openlibrary: OpenLibraryClient,
googlebooks: GoogleBooksClient,
}
impl BookEnricher {
pub fn new(google_api_key: Option<String>) -> Self {
Self {
openlibrary: OpenLibraryClient::new(),
googlebooks: GoogleBooksClient::new(google_api_key),
}
}
/// Try to enrich from OpenLibrary first
pub async fn try_openlibrary(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
match self.openlibrary.fetch_by_isbn(isbn).await {
Ok(book) => {
let metadata_json = serde_json::to_string(&book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
media_id: crate::model::MediaId(Uuid::nil()), // Will be set by caller
source: EnrichmentSourceType::OpenLibrary,
external_id: None,
metadata_json,
confidence: calculate_openlibrary_confidence(&book),
last_updated: Utc::now(),
}))
}
Err(_) => Ok(None),
}
}
/// Try to enrich from Google Books
pub async fn try_googlebooks(&self, isbn: &str) -> Result<Option<ExternalMetadata>> {
match self.googlebooks.fetch_by_isbn(isbn).await {
Ok(books) if !books.is_empty() => {
let book = &books[0];
let metadata_json = serde_json::to_string(book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
media_id: crate::model::MediaId(Uuid::nil()), // Will be set by caller
source: EnrichmentSourceType::GoogleBooks,
external_id: Some(book.id.clone()),
metadata_json,
confidence: calculate_googlebooks_confidence(&book.volume_info),
last_updated: Utc::now(),
}))
}
_ => Ok(None),
}
}
/// Try to enrich by searching with title and author
pub async fn enrich_by_search(
&self,
title: &str,
author: Option<&str>,
) -> Result<Option<ExternalMetadata>> {
// Try OpenLibrary search first
if let Ok(results) = self.openlibrary.search(title, author).await
&& let Some(result) = results.first()
{
let metadata_json = serde_json::to_string(result)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
media_id: crate::model::MediaId(Uuid::nil()),
source: EnrichmentSourceType::OpenLibrary,
external_id: result.key.clone(),
metadata_json,
confidence: 0.6, // Lower confidence for search results
last_updated: Utc::now(),
}));
}
// Fall back to Google Books
if let Ok(results) = self.googlebooks.search(title, author).await
&& let Some(book) = results.first()
{
let metadata_json = serde_json::to_string(book)
.map_err(|e| PinakesError::External(format!("Failed to serialize metadata: {}", e)))?;
return Ok(Some(ExternalMetadata {
id: Uuid::new_v4(),
media_id: crate::model::MediaId(Uuid::nil()),
source: EnrichmentSourceType::GoogleBooks,
external_id: Some(book.id.clone()),
metadata_json,
confidence: 0.6,
last_updated: Utc::now(),
}));
}
Ok(None)
}
}
#[async_trait::async_trait]
impl MetadataEnricher for BookEnricher {
fn source(&self) -> EnrichmentSourceType {
// Returns the preferred source
EnrichmentSourceType::OpenLibrary
}
async fn enrich(&self, item: &MediaItem) -> Result<Option<ExternalMetadata>> {
// Try ISBN-based enrichment first by checking title/description for ISBN patterns
if let Some(ref title) = item.title {
if let Some(isbn) = crate::books::extract_isbn_from_text(title) {
if let Some(mut metadata) = self.try_openlibrary(&isbn).await? {
metadata.media_id = item.id;
return Ok(Some(metadata));
}
if let Some(mut metadata) = self.try_googlebooks(&isbn).await? {
metadata.media_id = item.id;
return Ok(Some(metadata));
}
}
// Fall back to title/author search
let author = item.artist.as_deref();
return self.enrich_by_search(title, author).await;
}
// No title available
Ok(None)
}
}
/// Calculate confidence score for OpenLibrary metadata
pub fn calculate_openlibrary_confidence(book: &super::openlibrary::OpenLibraryBook) -> f64 {
let mut score: f64 = 0.5; // Base score
if book.title.is_some() {
score += 0.1;
}
if !book.authors.is_empty() {
score += 0.1;
}
if !book.publishers.is_empty() {
score += 0.05;
}
if book.publish_date.is_some() {
score += 0.05;
}
if book.description.is_some() {
score += 0.1;
}
if !book.covers.is_empty() {
score += 0.1;
}
score.min(1.0)
}
/// Calculate confidence score for Google Books metadata
pub fn calculate_googlebooks_confidence(info: &super::googlebooks::VolumeInfo) -> f64 {
let mut score: f64 = 0.5; // Base score
if info.title.is_some() {
score += 0.1;
}
if !info.authors.is_empty() {
score += 0.1;
}
if info.publisher.is_some() {
score += 0.05;
}
if info.published_date.is_some() {
score += 0.05;
}
if info.description.is_some() {
score += 0.1;
}
if info.image_links.is_some() {
score += 0.1;
}
score.min(1.0)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_openlibrary_confidence_calculation() {
let book = super::super::openlibrary::OpenLibraryBook {
title: Some("Test Book".to_string()),
subtitle: None,
authors: vec![],
publishers: vec![],
publish_date: None,
number_of_pages: None,
subjects: vec![],
covers: vec![],
isbn_10: vec![],
isbn_13: vec![],
series: vec![],
description: None,
languages: vec![],
};
let confidence = calculate_openlibrary_confidence(&book);
assert_eq!(confidence, 0.6); // 0.5 base + 0.1 for title
}
#[test]
fn test_googlebooks_confidence_calculation() {
let info = super::super::googlebooks::VolumeInfo {
title: Some("Test Book".to_string()),
..Default::default()
};
let confidence = calculate_googlebooks_confidence(&info);
assert_eq!(confidence, 0.6); // 0.5 base + 0.1 for title
}
}

View file

@ -0,0 +1,283 @@
use serde::{Deserialize, Serialize};
use crate::error::{PinakesError, Result};
/// Google Books API client for book metadata enrichment
pub struct GoogleBooksClient {
client: reqwest::Client,
api_key: Option<String>,
}
impl GoogleBooksClient {
pub fn new(api_key: Option<String>) -> Self {
Self {
client: reqwest::Client::builder()
.user_agent("Pinakes/1.0")
.timeout(std::time::Duration::from_secs(10))
.build()
.expect("Failed to build HTTP client"),
api_key,
}
}
/// Fetch book metadata by ISBN
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<Vec<GoogleBook>> {
let mut url = format!(
"https://www.googleapis.com/books/v1/volumes?q=isbn:{}",
isbn
);
if let Some(ref key) = self.api_key {
url.push_str(&format!("&key={}", key));
}
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Google Books request failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"Google Books returned status: {}",
response.status()
)));
}
let volumes: GoogleBooksResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse Google Books response: {}", e)))?;
Ok(volumes.items)
}
/// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<GoogleBook>> {
let mut query = format!("intitle:{}", urlencoding::encode(title));
if let Some(author) = author {
query.push_str(&format!("+inauthor:{}", urlencoding::encode(author)));
}
let mut url = format!(
"https://www.googleapis.com/books/v1/volumes?q={}&maxResults=5",
query
);
if let Some(ref key) = self.api_key {
url.push_str(&format!("&key={}", key));
}
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Google Books search failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"Google Books search returned status: {}",
response.status()
)));
}
let volumes: GoogleBooksResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
Ok(volumes.items)
}
/// Download cover image from Google Books
pub async fn fetch_cover(&self, image_link: &str) -> Result<Vec<u8>> {
// Replace thumbnail link with higher resolution if possible
let high_res_link = image_link
.replace("&zoom=1", "&zoom=2")
.replace("&edge=curl", "");
let response = self
.client
.get(&high_res_link)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"Cover download returned status: {}",
response.status()
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GoogleBooksResponse {
#[serde(default)]
pub items: Vec<GoogleBook>,
#[serde(default)]
pub total_items: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GoogleBook {
pub id: String,
#[serde(default)]
pub volume_info: VolumeInfo,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct VolumeInfo {
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub subtitle: Option<String>,
#[serde(default)]
pub authors: Vec<String>,
#[serde(default)]
pub publisher: Option<String>,
#[serde(default)]
pub published_date: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub page_count: Option<i32>,
#[serde(default)]
pub categories: Vec<String>,
#[serde(default)]
pub average_rating: Option<f64>,
#[serde(default)]
pub ratings_count: Option<i32>,
#[serde(default)]
pub image_links: Option<ImageLinks>,
#[serde(default)]
pub language: Option<String>,
#[serde(default)]
pub industry_identifiers: Vec<IndustryIdentifier>,
#[serde(default)]
pub main_category: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageLinks {
#[serde(default)]
pub small_thumbnail: Option<String>,
#[serde(default)]
pub thumbnail: Option<String>,
#[serde(default)]
pub small: Option<String>,
#[serde(default)]
pub medium: Option<String>,
#[serde(default)]
pub large: Option<String>,
#[serde(default)]
pub extra_large: Option<String>,
}
impl ImageLinks {
/// Get the best available image link (highest resolution)
pub fn best_link(&self) -> Option<&String> {
self.extra_large
.as_ref()
.or(self.large.as_ref())
.or(self.medium.as_ref())
.or(self.small.as_ref())
.or(self.thumbnail.as_ref())
.or(self.small_thumbnail.as_ref())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IndustryIdentifier {
#[serde(rename = "type")]
pub identifier_type: String,
pub identifier: String,
}
impl IndustryIdentifier {
/// Check if this is an ISBN-13
pub fn is_isbn13(&self) -> bool {
self.identifier_type == "ISBN_13"
}
/// Check if this is an ISBN-10
pub fn is_isbn10(&self) -> bool {
self.identifier_type == "ISBN_10"
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_googlebooks_client_creation() {
let client = GoogleBooksClient::new(None);
assert!(client.api_key.is_none());
let client_with_key = GoogleBooksClient::new(Some("test-key".to_string()));
assert_eq!(client_with_key.api_key, Some("test-key".to_string()));
}
#[test]
fn test_image_links_best_link() {
let links = ImageLinks {
small_thumbnail: Some("small.jpg".to_string()),
thumbnail: Some("thumb.jpg".to_string()),
small: None,
medium: Some("medium.jpg".to_string()),
large: Some("large.jpg".to_string()),
extra_large: None,
};
assert_eq!(links.best_link(), Some(&"large.jpg".to_string()));
}
#[test]
fn test_industry_identifier_type_checks() {
let isbn13 = IndustryIdentifier {
identifier_type: "ISBN_13".to_string(),
identifier: "9780123456789".to_string(),
};
assert!(isbn13.is_isbn13());
assert!(!isbn13.is_isbn10());
let isbn10 = IndustryIdentifier {
identifier_type: "ISBN_10".to_string(),
identifier: "0123456789".to_string(),
};
assert!(!isbn10.is_isbn13());
assert!(isbn10.is_isbn10());
}
}

View file

@ -1,7 +1,10 @@
//! Metadata enrichment from external sources. //! Metadata enrichment from external sources.
pub mod books;
pub mod googlebooks;
pub mod lastfm; pub mod lastfm;
pub mod musicbrainz; pub mod musicbrainz;
pub mod openlibrary;
pub mod tmdb; pub mod tmdb;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -32,6 +35,10 @@ pub enum EnrichmentSourceType {
Tmdb, Tmdb,
#[serde(rename = "lastfm")] #[serde(rename = "lastfm")]
LastFm, LastFm,
#[serde(rename = "openlibrary")]
OpenLibrary,
#[serde(rename = "googlebooks")]
GoogleBooks,
} }
impl std::fmt::Display for EnrichmentSourceType { impl std::fmt::Display for EnrichmentSourceType {
@ -40,6 +47,8 @@ impl std::fmt::Display for EnrichmentSourceType {
Self::MusicBrainz => "musicbrainz", Self::MusicBrainz => "musicbrainz",
Self::Tmdb => "tmdb", Self::Tmdb => "tmdb",
Self::LastFm => "lastfm", Self::LastFm => "lastfm",
Self::OpenLibrary => "openlibrary",
Self::GoogleBooks => "googlebooks",
}; };
write!(f, "{s}") write!(f, "{s}")
} }
@ -53,6 +62,8 @@ impl std::str::FromStr for EnrichmentSourceType {
"musicbrainz" => Ok(Self::MusicBrainz), "musicbrainz" => Ok(Self::MusicBrainz),
"tmdb" => Ok(Self::Tmdb), "tmdb" => Ok(Self::Tmdb),
"lastfm" => Ok(Self::LastFm), "lastfm" => Ok(Self::LastFm),
"openlibrary" => Ok(Self::OpenLibrary),
"googlebooks" => Ok(Self::GoogleBooks),
_ => Err(format!("unknown enrichment source: {s}")), _ => Err(format!("unknown enrichment source: {s}")),
} }
} }

View file

@ -0,0 +1,283 @@
use serde::{Deserialize, Serialize};
use crate::error::{PinakesError, Result};
/// OpenLibrary API client for book metadata enrichment
pub struct OpenLibraryClient {
client: reqwest::Client,
base_url: String,
}
impl Default for OpenLibraryClient {
fn default() -> Self {
Self::new()
}
}
impl OpenLibraryClient {
pub fn new() -> Self {
Self {
client: reqwest::Client::builder()
.user_agent("Pinakes/1.0")
.timeout(std::time::Duration::from_secs(10))
.build()
.expect("Failed to build HTTP client"),
base_url: "https://openlibrary.org".to_string(),
}
}
/// Fetch book metadata by ISBN
pub async fn fetch_by_isbn(&self, isbn: &str) -> Result<OpenLibraryBook> {
let url = format!("{}/isbn/{}.json", self.base_url, isbn);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary request failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"OpenLibrary returned status: {}",
response.status()
)));
}
response
.json::<OpenLibraryBook>()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse OpenLibrary response: {}", e)))
}
/// Search for books by title and author
pub async fn search(&self, title: &str, author: Option<&str>) -> Result<Vec<OpenLibrarySearchResult>> {
let mut url = format!("{}/search.json?title={}", self.base_url, urlencoding::encode(title));
if let Some(author) = author {
url.push_str(&format!("&author={}", urlencoding::encode(author)));
}
url.push_str("&limit=5");
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("OpenLibrary search failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"OpenLibrary search returned status: {}",
response.status()
)));
}
let search_response: OpenLibrarySearchResponse = response
.json()
.await
.map_err(|e| PinakesError::External(format!("Failed to parse search results: {}", e)))?;
Ok(search_response.docs)
}
/// Fetch cover image by cover ID
pub async fn fetch_cover(&self, cover_id: i64, size: CoverSize) -> Result<Vec<u8>> {
let size_str = match size {
CoverSize::Small => "S",
CoverSize::Medium => "M",
CoverSize::Large => "L",
};
let url = format!(
"https://covers.openlibrary.org/b/id/{}-{}.jpg",
cover_id, size_str
);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"Cover download returned status: {}",
response.status()
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
}
/// Fetch cover by ISBN
pub async fn fetch_cover_by_isbn(&self, isbn: &str, size: CoverSize) -> Result<Vec<u8>> {
let size_str = match size {
CoverSize::Small => "S",
CoverSize::Medium => "M",
CoverSize::Large => "L",
};
let url = format!(
"https://covers.openlibrary.org/b/isbn/{}-{}.jpg",
isbn, size_str
);
let response = self
.client
.get(&url)
.send()
.await
.map_err(|e| PinakesError::External(format!("Cover download failed: {}", e)))?;
if !response.status().is_success() {
return Err(PinakesError::External(format!(
"Cover download returned status: {}",
response.status()
)));
}
response
.bytes()
.await
.map(|b| b.to_vec())
.map_err(|e| PinakesError::External(format!("Failed to read cover data: {}", e)))
}
}
#[derive(Debug, Clone, Copy)]
pub enum CoverSize {
Small, // 256x256
Medium, // 600x800
Large, // Original
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenLibraryBook {
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub subtitle: Option<String>,
#[serde(default)]
pub authors: Vec<AuthorRef>,
#[serde(default)]
pub publishers: Vec<String>,
#[serde(default)]
pub publish_date: Option<String>,
#[serde(default)]
pub number_of_pages: Option<i32>,
#[serde(default)]
pub subjects: Vec<String>,
#[serde(default)]
pub covers: Vec<i64>,
#[serde(default)]
pub isbn_10: Vec<String>,
#[serde(default)]
pub isbn_13: Vec<String>,
#[serde(default)]
pub series: Vec<String>,
#[serde(default)]
pub description: Option<StringOrObject>,
#[serde(default)]
pub languages: Vec<LanguageRef>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuthorRef {
pub key: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LanguageRef {
pub key: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum StringOrObject {
String(String),
Object { value: String },
}
impl StringOrObject {
pub fn as_str(&self) -> &str {
match self {
Self::String(s) => s,
Self::Object { value } => value,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenLibrarySearchResponse {
#[serde(default)]
pub docs: Vec<OpenLibrarySearchResult>,
#[serde(default)]
pub num_found: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenLibrarySearchResult {
#[serde(default)]
pub key: Option<String>,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub author_name: Vec<String>,
#[serde(default)]
pub first_publish_year: Option<i32>,
#[serde(default)]
pub publisher: Vec<String>,
#[serde(default)]
pub isbn: Vec<String>,
#[serde(default)]
pub cover_i: Option<i64>,
#[serde(default)]
pub subject: Vec<String>,
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_openlibrary_client_creation() {
let client = OpenLibraryClient::new();
assert_eq!(client.base_url, "https://openlibrary.org");
}
#[test]
fn test_string_or_object_parsing() {
let string_desc: StringOrObject = serde_json::from_str(r#""Simple description""#).unwrap();
assert_eq!(string_desc.as_str(), "Simple description");
let object_desc: StringOrObject = serde_json::from_str(r#"{"value": "Object description"}"#).unwrap();
assert_eq!(object_desc.as_str(), "Object description");
}
}

View file

@ -54,6 +54,9 @@ pub enum PinakesError {
#[error("path not allowed: {0}")] #[error("path not allowed: {0}")]
PathNotAllowed(String), PathNotAllowed(String),
#[error("external API error: {0}")]
External(String),
} }
impl From<rusqlite::Error> for PinakesError { impl From<rusqlite::Error> for PinakesError {

View file

@ -276,6 +276,149 @@ fn generate_heic_thumbnail(source: &Path, dest: &Path, config: &ThumbnailConfig)
} }
} }
/// Cover size variants for book covers
#[derive(Debug, Clone, Copy)]
pub enum CoverSize {
Tiny, // 64x64 - for map markers, timeline
Grid, // 320x320 - for library grid view
Preview, // 1024x1024 - for quick fullscreen preview
Original, // Full size - original cover
}
impl CoverSize {
pub fn dimensions(&self) -> Option<(u32, u32)> {
match self {
CoverSize::Tiny => Some((64, 64)),
CoverSize::Grid => Some((320, 320)),
CoverSize::Preview => Some((1024, 1024)),
CoverSize::Original => None, // No resizing
}
}
pub fn filename(&self) -> &'static str {
match self {
CoverSize::Tiny => "tiny.jpg",
CoverSize::Grid => "grid.jpg",
CoverSize::Preview => "preview.jpg",
CoverSize::Original => "original.jpg",
}
}
}
/// Generate multi-resolution covers for a book
pub fn generate_book_covers(
media_id: MediaId,
source_image: &[u8],
covers_dir: &Path,
) -> Result<Vec<(CoverSize, PathBuf)>> {
// Create cover directory for this media item
let media_cover_dir = covers_dir.join(media_id.to_string());
std::fs::create_dir_all(&media_cover_dir)?;
let img = image::load_from_memory(source_image)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover image load: {e}")))?;
let mut results = Vec::new();
// Generate each size variant
for size in [
CoverSize::Tiny,
CoverSize::Grid,
CoverSize::Preview,
CoverSize::Original,
] {
let cover_path = media_cover_dir.join(size.filename());
match size.dimensions() {
Some((width, height)) => {
// Generate thumbnail
let thumb = img.thumbnail(width, height);
let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 90);
thumb
.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?;
}
None => {
// Save original
let mut output = std::fs::File::create(&cover_path)?;
let encoder = image::codecs::jpeg::JpegEncoder::new_with_quality(&mut output, 95);
img.write_with_encoder(encoder)
.map_err(|e| PinakesError::MetadataExtraction(format!("cover encode: {e}")))?;
}
}
results.push((size, cover_path));
}
Ok(results)
}
/// Extract full-size cover from an EPUB file
pub fn extract_epub_cover(epub_path: &Path) -> Result<Option<Vec<u8>>> {
let mut doc = epub::doc::EpubDoc::new(epub_path)
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB open: {e}")))?;
// Try to get the cover image
if let Some(cover_id) = doc.get_cover_id()
&& let Some((cover_data, _mime)) = doc.get_resource(&cover_id)
{
return Ok(Some(cover_data));
}
// Fallback: look for common cover image filenames
let cover_names = ["cover.jpg", "cover.jpeg", "cover.png", "Cover.jpg", "Cover.jpeg", "Cover.png"];
for name in &cover_names {
if let Some(data) = doc.get_resource_by_path(name) {
return Ok(Some(data));
}
}
Ok(None)
}
/// Extract full-size cover from a PDF file (first page)
pub fn extract_pdf_cover(pdf_path: &Path) -> Result<Option<Vec<u8>>> {
// Use pdftoppm to extract the first page at high resolution
let pdftoppm = "pdftoppm";
let temp_dir = std::env::temp_dir();
let temp_prefix = temp_dir.join(format!("pdf_cover_{}", uuid::Uuid::new_v4()));
let status = Command::new(pdftoppm)
.args(["-jpeg", "-f", "1", "-l", "1", "-scale-to", "1200"])
.arg(pdf_path)
.arg(&temp_prefix)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.map_err(|e| PinakesError::MetadataExtraction(format!("pdftoppm: {e}")))?;
if !status.success() {
return Err(PinakesError::MetadataExtraction(format!(
"pdftoppm exited with status {}",
status
)));
}
// pdftoppm outputs files like prefix-1.jpg
let output_path = format!("{}-1.jpg", temp_prefix.display());
let output_pathbuf = PathBuf::from(&output_path);
if output_pathbuf.exists() {
let data = std::fs::read(&output_pathbuf)?;
let _ = std::fs::remove_file(&output_pathbuf);
Ok(Some(data))
} else {
Ok(None)
}
}
/// Returns the default covers directory under the data dir
pub fn default_covers_dir() -> PathBuf {
crate::config::Config::default_data_dir().join("covers")
}
/// Returns the default thumbnail directory under the data dir. /// Returns the default thumbnail directory under the data dir.
pub fn default_thumbnail_dir() -> PathBuf { pub fn default_thumbnail_dir() -> PathBuf {
crate::config::Config::default_data_dir().join("thumbnails") crate::config::Config::default_data_dir().join("thumbnails")

View file

@ -0,0 +1,199 @@
use pinakes_core::books::{extract_isbn_from_text, normalize_isbn, parse_author_file_as};
use pinakes_core::enrichment::books::BookEnricher;
use pinakes_core::enrichment::googlebooks::GoogleBooksClient;
use pinakes_core::enrichment::openlibrary::OpenLibraryClient;
use pinakes_core::thumbnail::{extract_epub_cover, generate_book_covers, CoverSize};
#[test]
fn test_isbn_normalization() {
// Valid ISBN-10 to ISBN-13 conversion (The Hobbit)
let result = normalize_isbn("0547928220");
assert!(result.is_ok());
let isbn13 = result.unwrap();
assert_eq!(isbn13.len(), 13);
assert!(isbn13.starts_with("978"));
// Valid ISBN-13 should return itself
let result = normalize_isbn("9780547928227");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// ISBN with hyphens should be normalized
let result = normalize_isbn("978-0-547-92822-7");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "9780547928227");
// Invalid ISBN
let result = normalize_isbn("invalid");
assert!(result.is_err());
}
#[test]
fn test_isbn_extraction_from_text() {
let text = "This book has ISBN-13: 978-0-123-45678-9 in the middle.";
let result = extract_isbn_from_text(text);
assert!(result.is_some());
let isbn = result.unwrap();
assert!(isbn.contains("978"));
let text_isbn10 = "Old format ISBN: 0-123-45678-9";
let result = extract_isbn_from_text(text_isbn10);
assert!(result.is_some());
let text_no_isbn = "This text has no ISBN at all.";
let result = extract_isbn_from_text(text_no_isbn);
assert!(result.is_none());
}
#[test]
fn test_author_file_as_parsing() {
// Standard name: "First Last" -> "Last, First"
let result = parse_author_file_as("John Smith");
assert_eq!(result, "Smith, John");
// Single name
let result = parse_author_file_as("Shakespeare");
assert_eq!(result, "Shakespeare");
// Multiple middle names
let result = parse_author_file_as("John Ronald Reuel Tolkien");
assert_eq!(result, "Tolkien, John Ronald Reuel");
// Already in "Last, First" format
let result = parse_author_file_as("Tolkien, J.R.R.");
assert_eq!(result, "Tolkien, J.R.R.");
}
#[test]
fn test_book_enricher_creation() {
let enricher = BookEnricher::new(None);
// Just verify it can be created
drop(enricher);
let enricher_with_key = BookEnricher::new(Some("test-api-key".to_string()));
drop(enricher_with_key);
}
#[test]
fn test_openlibrary_client_creation() {
let client = OpenLibraryClient::new();
// Verify client is created successfully
drop(client);
}
#[test]
fn test_googlebooks_client_creation() {
let client = GoogleBooksClient::new(None);
drop(client);
let client_with_key = GoogleBooksClient::new(Some("test-key".to_string()));
drop(client_with_key);
}
#[test]
fn test_cover_size_dimensions() {
assert_eq!(CoverSize::Tiny.dimensions(), Some((64, 64)));
assert_eq!(CoverSize::Grid.dimensions(), Some((320, 320)));
assert_eq!(CoverSize::Preview.dimensions(), Some((1024, 1024)));
assert_eq!(CoverSize::Original.dimensions(), None);
}
#[test]
fn test_cover_size_filenames() {
assert_eq!(CoverSize::Tiny.filename(), "tiny.jpg");
assert_eq!(CoverSize::Grid.filename(), "grid.jpg");
assert_eq!(CoverSize::Preview.filename(), "preview.jpg");
assert_eq!(CoverSize::Original.filename(), "original.jpg");
}
// Note: The following tests would require actual EPUB files and network access,
// so they're marked as ignored by default. Run with --ignored to execute them.
#[test]
#[ignore]
fn test_epub_cover_extraction() {
// This test requires a real EPUB file
// Create a test EPUB file path
let epub_path = std::path::PathBuf::from("test_fixtures/sample.epub");
if !epub_path.exists() {
// Skip if test fixture doesn't exist
return;
}
let result = extract_epub_cover(&epub_path);
// Should either succeed with Some(data) or None if no cover found
assert!(result.is_ok());
}
#[test]
#[ignore]
fn test_book_cover_generation() {
// This test requires a sample image
use tempfile::tempdir;
// Create a minimal 100x100 red PNG in memory
let mut img_data = Vec::new();
{
use image::{ImageBuffer, Rgb};
let img: ImageBuffer<Rgb<u8>, Vec<u8>> = ImageBuffer::from_fn(100, 100, |_, _| Rgb([255u8, 0u8, 0u8]));
img.write_to(&mut std::io::Cursor::new(&mut img_data), image::ImageFormat::Png)
.unwrap();
}
let temp_dir = tempdir().unwrap();
let media_id = pinakes_core::model::MediaId::new();
let result = generate_book_covers(media_id, &img_data, temp_dir.path());
assert!(result.is_ok());
let covers = result.unwrap();
assert_eq!(covers.len(), 4); // tiny, grid, preview, original
// Verify all cover files exist
for (size, path) in &covers {
assert!(path.exists(), "Cover {:?} should exist at {:?}", size, path);
}
}
#[tokio::test]
#[ignore]
async fn test_openlibrary_isbn_fetch() {
// This test requires network access
let client = OpenLibraryClient::new();
// Use a known ISBN for "The Hobbit"
let result = client.fetch_by_isbn("9780547928227").await;
// Should either succeed or fail gracefully
// We don't assert success because network might not be available
match result {
Ok(book) => {
assert!(book.title.is_some());
}
Err(_) => {
// Network error or book not found - acceptable in tests
}
}
}
#[tokio::test]
#[ignore]
async fn test_googlebooks_isbn_fetch() {
// This test requires network access
let client = GoogleBooksClient::new(None);
// Use a known ISBN
let result = client.fetch_by_isbn("9780547928227").await;
match result {
Ok(books) => {
if !books.is_empty() {
assert!(books[0].volume_info.title.is_some());
}
}
Err(_) => {
// Network error - acceptable in tests
}
}
}

View file

@ -301,15 +301,15 @@ pub struct ReadingListQuery {
pub fn routes() -> Router<AppState> { pub fn routes() -> Router<AppState> {
Router::new() Router::new()
// Metadata routes // Metadata routes
.route("/:id/metadata", get(get_book_metadata)) .route("/{id}/metadata", get(get_book_metadata))
// Browse routes // Browse routes
.route("/", get(list_books)) .route("/", get(list_books))
.route("/series", get(list_series)) .route("/series", get(list_series))
.route("/series/:name", get(get_series_books)) .route("/series/{name}", get(get_series_books))
.route("/authors", get(list_authors)) .route("/authors", get(list_authors))
.route("/authors/:name/books", get(get_author_books)) .route("/authors/{name}/books", get(get_author_books))
// Reading progress routes // Reading progress routes
.route("/:id/progress", get(get_reading_progress)) .route("/{id}/progress", get(get_reading_progress))
.route("/:id/progress", put(update_reading_progress)) .route("/{id}/progress", put(update_reading_progress))
.route("/reading-list", get(get_reading_list)) .route("/reading-list", get(get_reading_list))
} }

View file

@ -212,15 +212,13 @@ fn handle_api_result(state: &mut AppState, result: ApiResult) {
state.search_selected = Some(0); state.search_selected = Some(0);
} }
} }
ApiResult::Tags(tags) => { ApiResult::AllTags(tags) => {
// All tags in the system (for Tags view)
state.tags = tags; state.tags = tags;
if !state.tags.is_empty() { if !state.tags.is_empty() {
state.tag_selected = Some(0); state.tag_selected = Some(0);
} }
} }
ApiResult::AllTags(tags) => {
state.all_tags = tags;
}
ApiResult::Collections(cols) => { ApiResult::Collections(cols) => {
state.collections = cols; state.collections = cols;
if !state.collections.is_empty() { if !state.collections.is_empty() {
@ -623,6 +621,16 @@ async fn handle_action(
match client.list_jobs().await { match client.list_jobs().await {
Ok(jobs) => { Ok(jobs) => {
tracing::debug!("Found {} background jobs", jobs.len()); tracing::debug!("Found {} background jobs", jobs.len());
for job in &jobs {
tracing::debug!(
"Job {}: kind={:?}, status={:?}, created={}, updated={}",
job.id,
job.kind,
job.status,
job.created_at,
job.updated_at
);
}
} }
Err(e) => tracing::warn!("Failed to list jobs: {}", e), Err(e) => tracing::warn!("Failed to list jobs: {}", e),
} }
@ -728,7 +736,7 @@ async fn handle_action(
} }
View::Tags => match client.list_tags().await { View::Tags => match client.list_tags().await {
Ok(tags) => { Ok(tags) => {
if let Err(e) = tx.send(AppEvent::ApiResult(ApiResult::Tags(tags))) { if let Err(e) = tx.send(AppEvent::ApiResult(ApiResult::AllTags(tags))) {
tracing::warn!("failed to send event: {e}"); tracing::warn!("failed to send event: {e}");
} }
} }

View file

@ -102,9 +102,7 @@ pub struct DuplicateGroupResponse {
} }
/// Background job response from the API. /// Background job response from the API.
/// Fields are used for deserialization; the job count is logged in the Database view.
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)]
pub struct JobResponse { pub struct JobResponse {
pub id: String, pub id: String,
pub kind: serde_json::Value, pub kind: serde_json::Value,

View file

@ -11,11 +11,9 @@ pub enum AppEvent {
} }
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)]
pub enum ApiResult { pub enum ApiResult {
MediaList(Vec<crate::client::MediaResponse>), MediaList(Vec<crate::client::MediaResponse>),
SearchResults(crate::client::SearchResponse), SearchResults(crate::client::SearchResponse),
Tags(Vec<crate::client::TagResponse>),
AllTags(Vec<crate::client::TagResponse>), AllTags(Vec<crate::client::TagResponse>),
Collections(Vec<crate::client::CollectionResponse>), Collections(Vec<crate::client::CollectionResponse>),
ImportDone(crate::client::ImportResponse), ImportDone(crate::client::ImportResponse),

View file

@ -293,7 +293,6 @@ pub struct CreateSavedSearchRequest {
pub sort_order: Option<String>, pub sort_order: Option<String>,
} }
#[allow(dead_code)]
impl ApiClient { impl ApiClient {
pub fn new(base_url: &str, api_key: Option<&str>) -> Self { pub fn new(base_url: &str, api_key: Option<&str>) -> Self {
let mut headers = header::HeaderMap::new(); let mut headers = header::HeaderMap::new();
@ -1124,3 +1123,40 @@ impl ApiClient {
.unwrap_or_else(|_| Client::new()); .unwrap_or_else(|_| Client::new());
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_base_url() {
let client = ApiClient::new("http://localhost:3000", None);
assert_eq!(client.base_url(), "http://localhost:3000");
}
#[test]
fn test_stream_url() {
let client = ApiClient::new("http://localhost:3000", None);
let url = client.stream_url("test-id-123");
assert_eq!(url, "http://localhost:3000/api/v1/media/test-id-123/stream");
}
#[test]
fn test_thumbnail_url() {
let client = ApiClient::new("http://localhost:3000", None);
let url = client.thumbnail_url("test-id-456");
assert_eq!(url, "http://localhost:3000/api/v1/media/test-id-456/thumbnail");
}
#[test]
fn test_client_creation_with_api_key() {
let client = ApiClient::new("http://localhost:3000", Some("test-key"));
assert_eq!(client.base_url(), "http://localhost:3000");
}
#[test]
fn test_base_url_trailing_slash() {
let client = ApiClient::new("http://localhost:3000/", None);
assert_eq!(client.base_url(), "http://localhost:3000");
}
}

View file

@ -5,7 +5,7 @@ use super::markdown_viewer::MarkdownViewer;
use super::media_player::MediaPlayer; use super::media_player::MediaPlayer;
use super::pdf_viewer::PdfViewer; use super::pdf_viewer::PdfViewer;
use super::utils::{format_duration, format_size, media_category, type_badge_class}; use super::utils::{format_duration, format_size, media_category, type_badge_class};
use crate::client::{MediaResponse, MediaUpdateEvent, TagResponse}; use crate::client::{ApiClient, MediaResponse, MediaUpdateEvent, TagResponse};
#[component] #[component]
pub fn Detail( pub fn Detail(
@ -100,9 +100,11 @@ pub fn Detail(
let has_system_fields = !system_fields.is_empty(); let has_system_fields = !system_fields.is_empty();
let has_user_fields = !user_fields.is_empty(); let has_user_fields = !user_fields.is_empty();
// Media preview URLs // Media preview URLs - use ApiClient methods for consistent URL building
let stream_url = format!("{}/api/v1/media/{}/stream", server_url, media.id); let client = ApiClient::new(&server_url, None);
let thumbnail_url = format!("{}/api/v1/media/{}/thumbnail", server_url, media.id); tracing::trace!("Using API base URL: {}", client.base_url());
let stream_url = client.stream_url(&media.id);
let thumbnail_url = client.thumbnail_url(&media.id);
let category = media_category(&media.media_type); let category = media_category(&media.media_type);
let has_thumbnail = media.has_thumbnail; let has_thumbnail = media.has_thumbnail;

View file

@ -42,19 +42,16 @@ impl Default for PlayQueue {
impl PlayQueue { impl PlayQueue {
/// Check if the queue is empty. /// Check if the queue is empty.
#[allow(dead_code)]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.items.is_empty() self.items.is_empty()
} }
/// Get the current item in the queue. /// Get the current item in the queue.
#[allow(dead_code)]
pub fn current(&self) -> Option<&QueueItem> { pub fn current(&self) -> Option<&QueueItem> {
self.items.get(self.current_index) self.items.get(self.current_index)
} }
/// Advance to the next item based on repeat mode. /// Advance to the next item based on repeat mode.
#[allow(dead_code)]
pub fn next(&mut self) -> Option<&QueueItem> { pub fn next(&mut self) -> Option<&QueueItem> {
if self.items.is_empty() { if self.items.is_empty() {
return None; return None;
@ -77,7 +74,6 @@ impl PlayQueue {
} }
/// Go to the previous item based on repeat mode. /// Go to the previous item based on repeat mode.
#[allow(dead_code)]
pub fn previous(&mut self) -> Option<&QueueItem> { pub fn previous(&mut self) -> Option<&QueueItem> {
if self.items.is_empty() { if self.items.is_empty() {
return None; return None;
@ -91,13 +87,11 @@ impl PlayQueue {
} }
/// Add an item to the queue. /// Add an item to the queue.
#[allow(dead_code)]
pub fn add(&mut self, item: QueueItem) { pub fn add(&mut self, item: QueueItem) {
self.items.push(item); self.items.push(item);
} }
/// Remove an item from the queue by index. /// Remove an item from the queue by index.
#[allow(dead_code)]
pub fn remove(&mut self, index: usize) { pub fn remove(&mut self, index: usize) {
if index < self.items.len() { if index < self.items.len() {
self.items.remove(index); self.items.remove(index);
@ -108,14 +102,12 @@ impl PlayQueue {
} }
/// Clear all items from the queue. /// Clear all items from the queue.
#[allow(dead_code)]
pub fn clear(&mut self) { pub fn clear(&mut self) {
self.items.clear(); self.items.clear();
self.current_index = 0; self.current_index = 0;
} }
/// Toggle between repeat modes: Off -> All -> One -> Off. /// Toggle between repeat modes: Off -> All -> One -> Off.
#[allow(dead_code)]
pub fn toggle_repeat(&mut self) { pub fn toggle_repeat(&mut self) {
self.repeat = match self.repeat { self.repeat = match self.repeat {
RepeatMode::Off => RepeatMode::All, RepeatMode::Off => RepeatMode::All,
@ -125,7 +117,6 @@ impl PlayQueue {
} }
/// Toggle shuffle mode on/off. /// Toggle shuffle mode on/off.
#[allow(dead_code)]
pub fn toggle_shuffle(&mut self) { pub fn toggle_shuffle(&mut self) {
self.shuffle = !self.shuffle; self.shuffle = !self.shuffle;
} }
@ -536,3 +527,178 @@ pub fn QueuePanel(
} }
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_play_queue_is_empty() {
let queue = PlayQueue::default();
assert!(queue.is_empty());
}
#[test]
fn test_play_queue_add() {
let mut queue = PlayQueue::default();
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Test Song".to_string(),
artist: Some("Test Artist".to_string()),
duration_secs: Some(180.0),
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
assert!(!queue.is_empty());
assert_eq!(queue.items.len(), 1);
}
#[test]
fn test_play_queue_current() {
let mut queue = PlayQueue::default();
assert!(queue.current().is_none());
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Test Song".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
assert!(queue.current().is_some());
assert_eq!(queue.current().unwrap().media_id, "test1");
}
#[test]
fn test_play_queue_next() {
let mut queue = PlayQueue::default();
queue.repeat = RepeatMode::Off;
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Song 1".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
queue.add(QueueItem {
media_id: "test2".to_string(),
title: "Song 2".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test2".to_string(),
thumbnail_url: None,
});
let next = queue.next();
assert!(next.is_some());
assert_eq!(next.unwrap().media_id, "test2");
}
#[test]
fn test_play_queue_previous() {
let mut queue = PlayQueue::default();
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Song 1".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
queue.add(QueueItem {
media_id: "test2".to_string(),
title: "Song 2".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test2".to_string(),
thumbnail_url: None,
});
queue.current_index = 1;
let prev = queue.previous();
assert!(prev.is_some());
assert_eq!(prev.unwrap().media_id, "test1");
}
#[test]
fn test_play_queue_remove() {
let mut queue = PlayQueue::default();
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Song 1".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
queue.add(QueueItem {
media_id: "test2".to_string(),
title: "Song 2".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test2".to_string(),
thumbnail_url: None,
});
queue.remove(0);
assert_eq!(queue.items.len(), 1);
assert_eq!(queue.items[0].media_id, "test2");
}
#[test]
fn test_play_queue_clear() {
let mut queue = PlayQueue::default();
queue.add(QueueItem {
media_id: "test1".to_string(),
title: "Song 1".to_string(),
artist: None,
duration_secs: None,
media_type: "audio".to_string(),
stream_url: "/stream/test1".to_string(),
thumbnail_url: None,
});
queue.clear();
assert!(queue.is_empty());
assert_eq!(queue.current_index, 0);
}
#[test]
fn test_play_queue_toggle_repeat() {
let mut queue = PlayQueue::default();
assert_eq!(queue.repeat, RepeatMode::Off);
queue.toggle_repeat();
assert_eq!(queue.repeat, RepeatMode::All);
queue.toggle_repeat();
assert_eq!(queue.repeat, RepeatMode::One);
queue.toggle_repeat();
assert_eq!(queue.repeat, RepeatMode::Off);
}
#[test]
fn test_play_queue_toggle_shuffle() {
let mut queue = PlayQueue::default();
assert!(!queue.shuffle);
queue.toggle_shuffle();
assert!(queue.shuffle);
queue.toggle_shuffle();
assert!(!queue.shuffle);
}
}

View file

@ -0,0 +1,25 @@
[plugin]
name = "heif-support"
version = "1.0.0"
api_version = "1.0"
author = "Pinakes Team"
description = "HEIF/HEIC image format support with metadata extraction and thumbnail generation"
homepage = "https://github.com/pinakes/pinakes"
license = "MIT OR Apache-2.0"
kind = ["media_type", "metadata_extractor", "thumbnail_generator"]
[plugin.binary]
wasm = "heif_support.wasm"
[capabilities]
max_memory_mb = 256
max_cpu_time_secs = 30
[capabilities.filesystem]
read = ["/tmp/pinakes-input"]
write = ["/tmp/pinakes-output"]
[config]
extract_exif = { type = "boolean", default = true, description = "Extract EXIF metadata from HEIF images" }
generate_thumbnails = { type = "boolean", default = true, description = "Generate thumbnails for HEIF images" }
thumbnail_quality = { type = "integer", default = 85, description = "JPEG quality for thumbnails (1-100)" }

View file

@ -0,0 +1,21 @@
[plugin]
name = "markdown-metadata"
version = "1.0.0"
api_version = "1.0"
author = "Pinakes Team"
description = "Extract metadata from Markdown files with YAML frontmatter"
homepage = "https://github.com/pinakes/pinakes"
license = "MIT OR Apache-2.0"
kind = ["metadata_extractor"]
[plugin.binary]
wasm = "markdown_metadata.wasm"
[capabilities]
# No filesystem or network access needed
# Plugin operates on provided content
[config]
extract_tags = { type = "boolean", default = true, description = "Extract tags from YAML frontmatter" }
parse_yaml = { type = "boolean", default = true, description = "Parse YAML frontmatter" }
max_file_size = { type = "integer", default = 10485760, description = "Maximum file size in bytes (10MB)" }