treewide: fix various UI bugs; optimize crypto dependencies & format

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8fe8b38c1d9c4fecd40ff71f88d2ae06a6a6964
This commit is contained in:
raf 2026-02-10 12:56:05 +03:00
commit 3ccddce7fd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
178 changed files with 58342 additions and 54241 deletions

View file

@ -1,81 +1,91 @@
use std::path::Path;
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use lofty::{
file::{AudioFile, TaggedFileExt},
tag::Accessor,
};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct AudioExtractor;
impl MetadataExtractor for AudioExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("audio metadata: {e}")))?;
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let tagged_file = lofty::read_from_path(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("audio metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default();
let mut meta = ExtractedMetadata::default();
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag.title().map(|s| s.to_string());
meta.artist = tag.artist().map(|s| s.to_string());
meta.album = tag.album().map(|s| s.to_string());
meta.genre = tag.genre().map(|s| s.to_string());
meta.year = tag.date().map(|ts| ts.year as i32);
}
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
if let Some(track) = tag.track() {
meta.extra
.insert("track_number".to_string(), track.to_string());
}
if let Some(disc) = tag.disk() {
meta.extra
.insert("disc_number".to_string(), disc.to_string());
}
if let Some(comment) = tag.comment() {
meta.extra
.insert("comment".to_string(), comment.to_string());
}
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
.insert("bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
.insert("channels".to_string(), channels.to_string());
}
Ok(meta)
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag.title().map(|s| s.to_string());
meta.artist = tag.artist().map(|s| s.to_string());
meta.album = tag.album().map(|s| s.to_string());
meta.genre = tag.genre().map(|s| s.to_string());
meta.year = tag.date().map(|ts| ts.year as i32);
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp3),
MediaType::Builtin(BuiltinMediaType::Flac),
MediaType::Builtin(BuiltinMediaType::Ogg),
MediaType::Builtin(BuiltinMediaType::Wav),
MediaType::Builtin(BuiltinMediaType::Aac),
MediaType::Builtin(BuiltinMediaType::Opus),
]
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
if let Some(track) = tag.track() {
meta
.extra
.insert("track_number".to_string(), track.to_string());
}
if let Some(disc) = tag.disk() {
meta
.extra
.insert("disc_number".to_string(), disc.to_string());
}
if let Some(comment) = tag.comment() {
meta
.extra
.insert("comment".to_string(), comment.to_string());
}
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta
.extra
.insert("bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta
.extra
.insert("channels".to_string(), channels.to_string());
}
Ok(meta)
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp3),
MediaType::Builtin(BuiltinMediaType::Flac),
MediaType::Builtin(BuiltinMediaType::Ogg),
MediaType::Builtin(BuiltinMediaType::Wav),
MediaType::Builtin(BuiltinMediaType::Aac),
MediaType::Builtin(BuiltinMediaType::Opus),
]
}
}

View file

@ -1,358 +1,367 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct DocumentExtractor;
impl MetadataExtractor for DocumentExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Builtin(BuiltinMediaType::Pdf)) => extract_pdf(path),
Some(MediaType::Builtin(BuiltinMediaType::Epub)) => extract_epub(path),
Some(MediaType::Builtin(BuiltinMediaType::Djvu)) => extract_djvu(path),
_ => Ok(ExtractedMetadata::default()),
}
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Builtin(BuiltinMediaType::Pdf)) => extract_pdf(path),
Some(MediaType::Builtin(BuiltinMediaType::Epub)) => extract_epub(path),
Some(MediaType::Builtin(BuiltinMediaType::Djvu)) => extract_djvu(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Pdf),
MediaType::Builtin(BuiltinMediaType::Epub),
MediaType::Builtin(BuiltinMediaType::Djvu),
]
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Pdf),
MediaType::Builtin(BuiltinMediaType::Epub),
MediaType::Builtin(BuiltinMediaType::Djvu),
]
}
}
fn extract_pdf(path: &Path) -> Result<ExtractedMetadata> {
let doc = lopdf::Document::load(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("PDF load: {e}")))?;
let doc = lopdf::Document::load(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("PDF load: {e}")))?;
let mut meta = ExtractedMetadata::default();
let mut book_meta = crate::model::ExtractedBookMetadata::default();
let mut meta = ExtractedMetadata::default();
let mut book_meta = crate::model::ExtractedBookMetadata::default();
// Find the Info dictionary via the trailer
if let Ok(info_ref) = doc.trailer.get(b"Info") {
let info_obj = if let Ok(reference) = info_ref.as_reference() {
doc.get_object(reference).ok()
} else {
Some(info_ref)
};
// Find the Info dictionary via the trailer
if let Ok(info_ref) = doc.trailer.get(b"Info") {
let info_obj = if let Ok(reference) = info_ref.as_reference() {
doc.get_object(reference).ok()
} else {
Some(info_ref)
};
if let Some(obj) = info_obj
&& let Ok(dict) = obj.as_dict()
{
if let Ok(title) = dict.get(b"Title") {
meta.title = pdf_object_to_string(title);
}
if let Ok(author) = dict.get(b"Author") {
let author_str = pdf_object_to_string(author);
meta.artist = author_str.clone();
// Parse multiple authors if separated by semicolon, comma, or "and"
if let Some(authors_str) = author_str {
let author_names: Vec<String> = authors_str
.split(&[';', ','][..])
.flat_map(|part| part.split(" and "))
.map(|name| name.trim().to_string())
.filter(|name| !name.is_empty())
.collect();
book_meta.authors = author_names
.into_iter()
.enumerate()
.map(|(pos, name)| {
let mut author = crate::model::AuthorInfo::new(name);
author.position = pos as i32;
author
})
.collect();
}
}
if let Ok(subject) = dict.get(b"Subject") {
meta.description = pdf_object_to_string(subject);
}
if let Ok(creator) = dict.get(b"Creator") {
meta.extra.insert(
"creator".to_string(),
pdf_object_to_string(creator).unwrap_or_default(),
);
}
if let Ok(producer) = dict.get(b"Producer") {
meta.extra.insert(
"producer".to_string(),
pdf_object_to_string(producer).unwrap_or_default(),
);
}
}
}
// Page count
let pages = doc.get_pages();
let page_count = pages.len();
if page_count > 0 {
book_meta.page_count = Some(page_count as i32);
}
// Try to extract ISBN from first few pages
// Extract text from up to the first 5 pages and search for ISBN patterns
let mut extracted_text = String::new();
let max_pages = page_count.min(5);
for (_page_num, page_id) in pages.iter().take(max_pages) {
if let Ok(content) = doc.get_page_content(*page_id) {
// PDF content streams contain raw operators, but may have text strings
if let Ok(text) = std::str::from_utf8(&content) {
extracted_text.push_str(text);
extracted_text.push(' ');
}
}
}
// Extract ISBN from the text
if let Some(isbn) = crate::books::extract_isbn_from_text(&extracted_text)
&& let Ok(normalized) = crate::books::normalize_isbn(&isbn)
if let Some(obj) = info_obj
&& let Ok(dict) = obj.as_dict()
{
book_meta.isbn13 = Some(normalized);
book_meta.isbn = Some(isbn);
if let Ok(title) = dict.get(b"Title") {
meta.title = pdf_object_to_string(title);
}
if let Ok(author) = dict.get(b"Author") {
let author_str = pdf_object_to_string(author);
meta.artist = author_str.clone();
// Parse multiple authors if separated by semicolon, comma, or "and"
if let Some(authors_str) = author_str {
let author_names: Vec<String> = authors_str
.split(&[';', ','][..])
.flat_map(|part| part.split(" and "))
.map(|name| name.trim().to_string())
.filter(|name| !name.is_empty())
.collect();
book_meta.authors = author_names
.into_iter()
.enumerate()
.map(|(pos, name)| {
let mut author = crate::model::AuthorInfo::new(name);
author.position = pos as i32;
author
})
.collect();
}
}
if let Ok(subject) = dict.get(b"Subject") {
meta.description = pdf_object_to_string(subject);
}
if let Ok(creator) = dict.get(b"Creator") {
meta.extra.insert(
"creator".to_string(),
pdf_object_to_string(creator).unwrap_or_default(),
);
}
if let Ok(producer) = dict.get(b"Producer") {
meta.extra.insert(
"producer".to_string(),
pdf_object_to_string(producer).unwrap_or_default(),
);
}
}
}
// Set format
book_meta.format = Some("pdf".to_string());
// Page count
let pages = doc.get_pages();
let page_count = pages.len();
if page_count > 0 {
book_meta.page_count = Some(page_count as i32);
}
meta.book_metadata = Some(book_meta);
Ok(meta)
// Try to extract ISBN from first few pages
// Extract text from up to the first 5 pages and search for ISBN patterns
let mut extracted_text = String::new();
let max_pages = page_count.min(5);
for (_page_num, page_id) in pages.iter().take(max_pages) {
if let Ok(content) = doc.get_page_content(*page_id) {
// PDF content streams contain raw operators, but may have text strings
if let Ok(text) = std::str::from_utf8(&content) {
extracted_text.push_str(text);
extracted_text.push(' ');
}
}
}
// Extract ISBN from the text
if let Some(isbn) = crate::books::extract_isbn_from_text(&extracted_text)
&& let Ok(normalized) = crate::books::normalize_isbn(&isbn)
{
book_meta.isbn13 = Some(normalized);
book_meta.isbn = Some(isbn);
}
// Set format
book_meta.format = Some("pdf".to_string());
meta.book_metadata = Some(book_meta);
Ok(meta)
}
fn pdf_object_to_string(obj: &lopdf::Object) -> Option<String> {
match obj {
lopdf::Object::String(bytes, _) => Some(String::from_utf8_lossy(bytes).into_owned()),
lopdf::Object::Name(name) => Some(String::from_utf8_lossy(name).into_owned()),
_ => None,
}
match obj {
lopdf::Object::String(bytes, _) => {
Some(String::from_utf8_lossy(bytes).into_owned())
},
lopdf::Object::Name(name) => {
Some(String::from_utf8_lossy(name).into_owned())
},
_ => None,
}
}
fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
let mut doc = epub::doc::EpubDoc::new(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("EPUB parse: {e}")))?;
let mut doc = epub::doc::EpubDoc::new(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("EPUB parse: {e}"))
})?;
let mut meta = ExtractedMetadata {
title: doc.mdata("title").map(|item| item.value.clone()),
artist: doc.mdata("creator").map(|item| item.value.clone()),
description: doc.mdata("description").map(|item| item.value.clone()),
..Default::default()
};
let mut meta = ExtractedMetadata {
title: doc.mdata("title").map(|item| item.value.clone()),
artist: doc.mdata("creator").map(|item| item.value.clone()),
description: doc.mdata("description").map(|item| item.value.clone()),
..Default::default()
};
let mut book_meta = crate::model::ExtractedBookMetadata::default();
let mut book_meta = crate::model::ExtractedBookMetadata::default();
// Extract basic metadata
if let Some(lang) = doc.mdata("language") {
book_meta.language = Some(lang.value.clone());
// Extract basic metadata
if let Some(lang) = doc.mdata("language") {
book_meta.language = Some(lang.value.clone());
}
if let Some(publisher) = doc.mdata("publisher") {
book_meta.publisher = Some(publisher.value.clone());
}
if let Some(date) = doc.mdata("date") {
// Try to parse as YYYY-MM-DD or just YYYY
if let Ok(parsed_date) =
chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d")
{
book_meta.publication_date = Some(parsed_date);
} else if let Ok(year) = date.value.parse::<i32>() {
book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1);
}
if let Some(publisher) = doc.mdata("publisher") {
book_meta.publisher = Some(publisher.value.clone());
}
// Extract authors - iterate through all metadata items
let mut authors = Vec::new();
let mut position = 0;
for item in &doc.metadata {
if item.property == "creator" || item.property == "dc:creator" {
let mut author = crate::model::AuthorInfo::new(item.value.clone());
author.position = position;
position += 1;
// Check for file-as in refinements
if let Some(file_as_ref) = item.refinement("file-as") {
author.file_as = Some(file_as_ref.value.clone());
}
// Check for role in refinements
if let Some(role_ref) = item.refinement("role") {
author.role = role_ref.value.clone();
}
authors.push(author);
}
if let Some(date) = doc.mdata("date") {
// Try to parse as YYYY-MM-DD or just YYYY
if let Ok(parsed_date) = chrono::NaiveDate::parse_from_str(&date.value, "%Y-%m-%d") {
book_meta.publication_date = Some(parsed_date);
} else if let Ok(year) = date.value.parse::<i32>() {
book_meta.publication_date = chrono::NaiveDate::from_ymd_opt(year, 1, 1);
}
}
book_meta.authors = authors;
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements
let scheme = item
.refinement("identifier-type")
.map(|r| r.value.to_lowercase());
let id_type = match scheme.as_deref() {
Some("isbn") => "isbn",
Some("isbn-10") | Some("isbn10") => "isbn",
Some("isbn-13") | Some("isbn13") => "isbn13",
Some("asin") => "asin",
Some("doi") => "doi",
_ => {
// Fallback: detect from value pattern
if item.value.len() == 10
|| item.value.len() == 13
|| item.value.contains('-') && item.value.len() < 20
{
"isbn"
} else {
"other"
}
},
};
// Try to normalize ISBN
if (id_type == "isbn" || id_type == "isbn13")
&& let Ok(normalized) = crate::books::normalize_isbn(&item.value)
{
book_meta.isbn13 = Some(normalized.clone());
book_meta.isbn = Some(item.value.clone());
}
identifiers
.entry(id_type.to_string())
.or_insert_with(Vec::new)
.push(item.value.clone());
}
}
book_meta.identifiers = identifiers;
// Extract Calibre series metadata by parsing the content.opf file
// Try common OPF locations
let opf_paths = vec!["OEBPS/content.opf", "content.opf", "OPS/content.opf"];
let mut opf_data = None;
for path in opf_paths {
if let Some(data) = doc.get_resource_str_by_path(path) {
opf_data = Some(data);
break;
}
}
if let Some(opf_content) = opf_data {
// Look for <meta name="calibre:series" content="Series Name"/>
if let Some(series_start) = opf_content.find("name=\"calibre:series\"")
&& let Some(content_start) =
opf_content[series_start..].find("content=\"")
{
let after_content = &opf_content[series_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"') {
book_meta.series_name = Some(after_content[..quote_end].to_string());
}
}
// Extract authors - iterate through all metadata items
let mut authors = Vec::new();
let mut position = 0;
for item in &doc.metadata {
if item.property == "creator" || item.property == "dc:creator" {
let mut author = crate::model::AuthorInfo::new(item.value.clone());
author.position = position;
position += 1;
// Check for file-as in refinements
if let Some(file_as_ref) = item.refinement("file-as") {
author.file_as = Some(file_as_ref.value.clone());
}
// Check for role in refinements
if let Some(role_ref) = item.refinement("role") {
author.role = role_ref.value.clone();
}
authors.push(author);
}
// Look for <meta name="calibre:series_index" content="1.0"/>
if let Some(index_start) = opf_content.find("name=\"calibre:series_index\"")
&& let Some(content_start) = opf_content[index_start..].find("content=\"")
{
let after_content = &opf_content[index_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"')
&& let Ok(index) = after_content[..quote_end].parse::<f64>()
{
book_meta.series_index = Some(index);
}
}
book_meta.authors = authors;
}
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements
let scheme = item
.refinement("identifier-type")
.map(|r| r.value.to_lowercase());
// Set format
book_meta.format = Some("epub".to_string());
let id_type = match scheme.as_deref() {
Some("isbn") => "isbn",
Some("isbn-10") | Some("isbn10") => "isbn",
Some("isbn-13") | Some("isbn13") => "isbn13",
Some("asin") => "asin",
Some("doi") => "doi",
_ => {
// Fallback: detect from value pattern
if item.value.len() == 10
|| item.value.len() == 13
|| item.value.contains('-') && item.value.len() < 20
{
"isbn"
} else {
"other"
}
}
};
// Try to normalize ISBN
if (id_type == "isbn" || id_type == "isbn13")
&& let Ok(normalized) = crate::books::normalize_isbn(&item.value)
{
book_meta.isbn13 = Some(normalized.clone());
book_meta.isbn = Some(item.value.clone());
}
identifiers
.entry(id_type.to_string())
.or_insert_with(Vec::new)
.push(item.value.clone());
}
}
book_meta.identifiers = identifiers;
// Extract Calibre series metadata by parsing the content.opf file
// Try common OPF locations
let opf_paths = vec!["OEBPS/content.opf", "content.opf", "OPS/content.opf"];
let mut opf_data = None;
for path in opf_paths {
if let Some(data) = doc.get_resource_str_by_path(path) {
opf_data = Some(data);
break;
}
}
if let Some(opf_content) = opf_data {
// Look for <meta name="calibre:series" content="Series Name"/>
if let Some(series_start) = opf_content.find("name=\"calibre:series\"")
&& let Some(content_start) = opf_content[series_start..].find("content=\"")
{
let after_content = &opf_content[series_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"') {
book_meta.series_name = Some(after_content[..quote_end].to_string());
}
}
// Look for <meta name="calibre:series_index" content="1.0"/>
if let Some(index_start) = opf_content.find("name=\"calibre:series_index\"")
&& let Some(content_start) = opf_content[index_start..].find("content=\"")
{
let after_content = &opf_content[index_start + content_start + 9..];
if let Some(quote_end) = after_content.find('"')
&& let Ok(index) = after_content[..quote_end].parse::<f64>()
{
book_meta.series_index = Some(index);
}
}
}
// Set format
book_meta.format = Some("epub".to_string());
meta.book_metadata = Some(book_meta);
Ok(meta)
meta.book_metadata = Some(book_meta);
Ok(meta)
}
fn extract_djvu(path: &Path) -> Result<ExtractedMetadata> {
// DjVu files contain metadata in SEXPR (S-expression) format within
// ANTa/ANTz chunks, or in the DIRM chunk. We parse the raw bytes to
// extract any metadata fields we can find.
let data = std::fs::read(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("DjVu read: {e}")))?;
// DjVu files contain metadata in SEXPR (S-expression) format within
// ANTa/ANTz chunks, or in the DIRM chunk. We parse the raw bytes to
// extract any metadata fields we can find.
let data = std::fs::read(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("DjVu read: {e}")))?;
let mut meta = ExtractedMetadata::default();
let mut meta = ExtractedMetadata::default();
// DjVu files start with "AT&T" magic followed by FORM:DJVU or FORM:DJVM
if data.len() < 16 {
return Ok(meta);
// DjVu files start with "AT&T" magic followed by FORM:DJVU or FORM:DJVM
if data.len() < 16 {
return Ok(meta);
}
// Search for metadata annotations in the file. DjVu metadata is stored
// as S-expressions like (metadata (key "value") ...) within ANTa chunks.
let content = String::from_utf8_lossy(&data);
// Look for (metadata ...) blocks
if let Some(meta_start) = content.find("(metadata") {
let remainder = &content[meta_start..];
// Extract key-value pairs like (title "Some Title")
extract_djvu_field(remainder, "title", &mut meta.title);
extract_djvu_field(remainder, "author", &mut meta.artist);
let mut desc = None;
extract_djvu_field(remainder, "subject", &mut desc);
if desc.is_none() {
extract_djvu_field(remainder, "description", &mut desc);
}
meta.description = desc;
let mut year_str = None;
extract_djvu_field(remainder, "year", &mut year_str);
if let Some(ref y) = year_str {
meta.year = y.parse().ok();
}
// Search for metadata annotations in the file. DjVu metadata is stored
// as S-expressions like (metadata (key "value") ...) within ANTa chunks.
let content = String::from_utf8_lossy(&data);
// Look for (metadata ...) blocks
if let Some(meta_start) = content.find("(metadata") {
let remainder = &content[meta_start..];
// Extract key-value pairs like (title "Some Title")
extract_djvu_field(remainder, "title", &mut meta.title);
extract_djvu_field(remainder, "author", &mut meta.artist);
let mut desc = None;
extract_djvu_field(remainder, "subject", &mut desc);
if desc.is_none() {
extract_djvu_field(remainder, "description", &mut desc);
}
meta.description = desc;
let mut year_str = None;
extract_djvu_field(remainder, "year", &mut year_str);
if let Some(ref y) = year_str {
meta.year = y.parse().ok();
}
let mut creator = None;
extract_djvu_field(remainder, "creator", &mut creator);
if let Some(c) = creator {
meta.extra.insert("creator".to_string(), c);
}
let mut creator = None;
extract_djvu_field(remainder, "creator", &mut creator);
if let Some(c) = creator {
meta.extra.insert("creator".to_string(), c);
}
}
// Also check for booklet-style metadata that some DjVu encoders write
// outside the metadata SEXPR
if meta.title.is_none()
&& let Some(title_start) = content.find("(bookmarks")
{
let remainder = &content[title_start..];
// First bookmark title is often the document title
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
meta.title = Some(val.to_string());
}
}
// Also check for booklet-style metadata that some DjVu encoders write
// outside the metadata SEXPR
if meta.title.is_none()
&& let Some(title_start) = content.find("(bookmarks")
{
let remainder = &content[title_start..];
// First bookmark title is often the document title
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
meta.title = Some(val.to_string());
}
}
}
}
Ok(meta)
Ok(meta)
}
fn extract_djvu_field(sexpr: &str, key: &str, out: &mut Option<String>) {
// Look for patterns like (key "value") in the S-expression
let pattern = format!("({key}");
if let Some(start) = sexpr.find(&pattern) {
let remainder = &sexpr[start + pattern.len()..];
// Find the quoted value
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
*out = Some(val.to_string());
}
}
// Look for patterns like (key "value") in the S-expression
let pattern = format!("({key}");
if let Some(start) = sexpr.find(&pattern) {
let remainder = &sexpr[start + pattern.len()..];
// Find the quoted value
if let Some(q1) = remainder.find('"') {
let after_q1 = &remainder[q1 + 1..];
if let Some(q2) = after_q1.find('"') {
let val = &after_q1[..q2];
if !val.is_empty() {
*out = Some(val.to_string());
}
}
}
}
}

View file

@ -1,263 +1,297 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct ImageExtractor;
impl MetadataExtractor for ImageExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let mut meta = ExtractedMetadata::default();
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let mut meta = ExtractedMetadata::default();
let file = std::fs::File::open(path)?;
let mut buf_reader = std::io::BufReader::new(&file);
let file = std::fs::File::open(path)?;
let mut buf_reader = std::io::BufReader::new(&file);
let exif_data = match exif::Reader::new().read_from_container(&mut buf_reader) {
Ok(exif) => exif,
Err(_) => return Ok(meta),
};
let exif_data =
match exif::Reader::new().read_from_container(&mut buf_reader) {
Ok(exif) => exif,
Err(_) => return Ok(meta),
};
// Image dimensions
if let Some(width) = exif_data
.get_field(exif::Tag::PixelXDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageWidth, exif::In::PRIMARY))
&& let Some(w) = field_to_u32(width)
{
meta.extra.insert("width".to_string(), w.to_string());
}
if let Some(height) = exif_data
.get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY))
&& let Some(h) = field_to_u32(height)
{
meta.extra.insert("height".to_string(), h.to_string());
}
// Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY) {
let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val);
}
}
if let Some(model) = exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY) {
let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_model = Some(val.clone());
meta.extra.insert("camera_model".to_string(), val);
}
}
// Date taken - parse EXIF date format (YYYY:MM:DD HH:MM:SS)
if let Some(date) = exif_data
.get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY))
{
let val = date.display_value().to_string();
if !val.is_empty() {
// Try parsing EXIF format: "YYYY:MM:DD HH:MM:SS"
if let Some(dt) = parse_exif_datetime(&val) {
meta.date_taken = Some(dt);
}
meta.extra.insert("date_taken".to_string(), val);
}
}
// GPS coordinates - set both in top-level fields and extra
if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = (
exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitudeRef, exif::In::PRIMARY),
) && let (Some(lat_val), Some(lon_val)) =
(dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref))
{
meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val);
meta.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta.extra
.insert("gps_longitude".to_string(), format!("{lon_val:.6}"));
}
// Exposure info
if let Some(iso) =
exif_data.get_field(exif::Tag::PhotographicSensitivity, exif::In::PRIMARY)
{
let val = iso.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("iso".to_string(), val);
}
}
if let Some(exposure) = exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY) {
let val = exposure.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("exposure_time".to_string(), val);
}
}
if let Some(aperture) = exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY) {
let val = aperture.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("f_number".to_string(), val);
}
}
if let Some(focal) = exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY) {
let val = focal.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("focal_length".to_string(), val);
}
}
// Lens model
if let Some(lens) = exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY) {
let val = lens.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.extra
.insert("lens_model".to_string(), val.trim_matches('"').to_string());
}
}
// Flash
if let Some(flash) = exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY) {
let val = flash.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("flash".to_string(), val);
}
}
// Orientation
if let Some(orientation) = exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY) {
let val = orientation.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("orientation".to_string(), val);
}
}
// Software
if let Some(software) = exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY) {
let val = software.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("software".to_string(), val);
}
}
// Image description as title
if let Some(desc) = exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY) {
let val = desc.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.title = Some(val.trim_matches('"').to_string());
}
}
// Artist
if let Some(artist) = exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY) {
let val = artist.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.artist = Some(val.trim_matches('"').to_string());
}
}
// Copyright as description
if let Some(copyright) = exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY) {
let val = copyright.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.description = Some(val.trim_matches('"').to_string());
}
}
Ok(meta)
// Image dimensions
if let Some(width) = exif_data
.get_field(exif::Tag::PixelXDimension, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::ImageWidth, exif::In::PRIMARY))
&& let Some(w) = field_to_u32(width)
{
meta.extra.insert("width".to_string(), w.to_string());
}
if let Some(height) = exif_data
.get_field(exif::Tag::PixelYDimension, exif::In::PRIMARY)
.or_else(|| {
exif_data.get_field(exif::Tag::ImageLength, exif::In::PRIMARY)
})
&& let Some(h) = field_to_u32(height)
{
meta.extra.insert("height".to_string(), h.to_string());
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Jpeg),
MediaType::Builtin(BuiltinMediaType::Png),
MediaType::Builtin(BuiltinMediaType::Gif),
MediaType::Builtin(BuiltinMediaType::Webp),
MediaType::Builtin(BuiltinMediaType::Avif),
MediaType::Builtin(BuiltinMediaType::Tiff),
MediaType::Builtin(BuiltinMediaType::Bmp),
// RAW formats (TIFF-based, kamadak-exif handles these)
MediaType::Builtin(BuiltinMediaType::Cr2),
MediaType::Builtin(BuiltinMediaType::Nef),
MediaType::Builtin(BuiltinMediaType::Arw),
MediaType::Builtin(BuiltinMediaType::Dng),
MediaType::Builtin(BuiltinMediaType::Orf),
MediaType::Builtin(BuiltinMediaType::Rw2),
// HEIC
MediaType::Builtin(BuiltinMediaType::Heic),
]
// Camera make and model - set both in top-level fields and extra
if let Some(make) = exif_data.get_field(exif::Tag::Make, exif::In::PRIMARY)
{
let val = make.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_make = Some(val.clone());
meta.extra.insert("camera_make".to_string(), val);
}
}
if let Some(model) =
exif_data.get_field(exif::Tag::Model, exif::In::PRIMARY)
{
let val = model.display_value().to_string().trim().to_string();
if !val.is_empty() {
meta.camera_model = Some(val.clone());
meta.extra.insert("camera_model".to_string(), val);
}
}
// Date taken - parse EXIF date format (YYYY:MM:DD HH:MM:SS)
if let Some(date) = exif_data
.get_field(exif::Tag::DateTimeOriginal, exif::In::PRIMARY)
.or_else(|| exif_data.get_field(exif::Tag::DateTime, exif::In::PRIMARY))
{
let val = date.display_value().to_string();
if !val.is_empty() {
// Try parsing EXIF format: "YYYY:MM:DD HH:MM:SS"
if let Some(dt) = parse_exif_datetime(&val) {
meta.date_taken = Some(dt);
}
meta.extra.insert("date_taken".to_string(), val);
}
}
// GPS coordinates - set both in top-level fields and extra
if let (Some(lat), Some(lat_ref), Some(lon), Some(lon_ref)) = (
exif_data.get_field(exif::Tag::GPSLatitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLatitudeRef, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitude, exif::In::PRIMARY),
exif_data.get_field(exif::Tag::GPSLongitudeRef, exif::In::PRIMARY),
) && let (Some(lat_val), Some(lon_val)) =
(dms_to_decimal(lat, lat_ref), dms_to_decimal(lon, lon_ref))
{
meta.latitude = Some(lat_val);
meta.longitude = Some(lon_val);
meta
.extra
.insert("gps_latitude".to_string(), format!("{lat_val:.6}"));
meta
.extra
.insert("gps_longitude".to_string(), format!("{lon_val:.6}"));
}
// Exposure info
if let Some(iso) =
exif_data.get_field(exif::Tag::PhotographicSensitivity, exif::In::PRIMARY)
{
let val = iso.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("iso".to_string(), val);
}
}
if let Some(exposure) =
exif_data.get_field(exif::Tag::ExposureTime, exif::In::PRIMARY)
{
let val = exposure.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("exposure_time".to_string(), val);
}
}
if let Some(aperture) =
exif_data.get_field(exif::Tag::FNumber, exif::In::PRIMARY)
{
let val = aperture.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("f_number".to_string(), val);
}
}
if let Some(focal) =
exif_data.get_field(exif::Tag::FocalLength, exif::In::PRIMARY)
{
let val = focal.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("focal_length".to_string(), val);
}
}
// Lens model
if let Some(lens) =
exif_data.get_field(exif::Tag::LensModel, exif::In::PRIMARY)
{
let val = lens.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta
.extra
.insert("lens_model".to_string(), val.trim_matches('"').to_string());
}
}
// Flash
if let Some(flash) =
exif_data.get_field(exif::Tag::Flash, exif::In::PRIMARY)
{
let val = flash.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("flash".to_string(), val);
}
}
// Orientation
if let Some(orientation) =
exif_data.get_field(exif::Tag::Orientation, exif::In::PRIMARY)
{
let val = orientation.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("orientation".to_string(), val);
}
}
// Software
if let Some(software) =
exif_data.get_field(exif::Tag::Software, exif::In::PRIMARY)
{
let val = software.display_value().to_string();
if !val.is_empty() {
meta.extra.insert("software".to_string(), val);
}
}
// Image description as title
if let Some(desc) =
exif_data.get_field(exif::Tag::ImageDescription, exif::In::PRIMARY)
{
let val = desc.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.title = Some(val.trim_matches('"').to_string());
}
}
// Artist
if let Some(artist) =
exif_data.get_field(exif::Tag::Artist, exif::In::PRIMARY)
{
let val = artist.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.artist = Some(val.trim_matches('"').to_string());
}
}
// Copyright as description
if let Some(copyright) =
exif_data.get_field(exif::Tag::Copyright, exif::In::PRIMARY)
{
let val = copyright.display_value().to_string();
if !val.is_empty() && val != "\"\"" {
meta.description = Some(val.trim_matches('"').to_string());
}
}
Ok(meta)
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Jpeg),
MediaType::Builtin(BuiltinMediaType::Png),
MediaType::Builtin(BuiltinMediaType::Gif),
MediaType::Builtin(BuiltinMediaType::Webp),
MediaType::Builtin(BuiltinMediaType::Avif),
MediaType::Builtin(BuiltinMediaType::Tiff),
MediaType::Builtin(BuiltinMediaType::Bmp),
// RAW formats (TIFF-based, kamadak-exif handles these)
MediaType::Builtin(BuiltinMediaType::Cr2),
MediaType::Builtin(BuiltinMediaType::Nef),
MediaType::Builtin(BuiltinMediaType::Arw),
MediaType::Builtin(BuiltinMediaType::Dng),
MediaType::Builtin(BuiltinMediaType::Orf),
MediaType::Builtin(BuiltinMediaType::Rw2),
// HEIC
MediaType::Builtin(BuiltinMediaType::Heic),
]
}
}
fn field_to_u32(field: &exif::Field) -> Option<u32> {
match &field.value {
exif::Value::Long(v) => v.first().copied(),
exif::Value::Short(v) => v.first().map(|&x| x as u32),
_ => None,
}
match &field.value {
exif::Value::Long(v) => v.first().copied(),
exif::Value::Short(v) => v.first().map(|&x| x as u32),
_ => None,
}
}
fn dms_to_decimal(dms_field: &exif::Field, ref_field: &exif::Field) -> Option<f64> {
if let exif::Value::Rational(ref rationals) = dms_field.value
&& rationals.len() >= 3
{
let degrees = rationals[0].to_f64();
let minutes = rationals[1].to_f64();
let seconds = rationals[2].to_f64();
let mut decimal = degrees + minutes / 60.0 + seconds / 3600.0;
fn dms_to_decimal(
dms_field: &exif::Field,
ref_field: &exif::Field,
) -> Option<f64> {
if let exif::Value::Rational(ref rationals) = dms_field.value
&& rationals.len() >= 3
{
let degrees = rationals[0].to_f64();
let minutes = rationals[1].to_f64();
let seconds = rationals[2].to_f64();
let mut decimal = degrees + minutes / 60.0 + seconds / 3600.0;
let ref_str = ref_field.display_value().to_string();
if ref_str.contains('S') || ref_str.contains('W') {
decimal = -decimal;
}
return Some(decimal);
let ref_str = ref_field.display_value().to_string();
if ref_str.contains('S') || ref_str.contains('W') {
decimal = -decimal;
}
None
return Some(decimal);
}
None
}
/// Parse EXIF datetime format: "YYYY:MM:DD HH:MM:SS"
fn parse_exif_datetime(s: &str) -> Option<chrono::DateTime<chrono::Utc>> {
use chrono::NaiveDateTime;
use chrono::NaiveDateTime;
// EXIF format is "YYYY:MM:DD HH:MM:SS"
let s = s.trim().trim_matches('"');
// EXIF format is "YYYY:MM:DD HH:MM:SS"
let s = s.trim().trim_matches('"');
// Try standard EXIF format
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S") {
return Some(dt.and_utc());
}
// Try standard EXIF format
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y:%m:%d %H:%M:%S") {
return Some(dt.and_utc());
}
// Try ISO format as fallback
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S") {
return Some(dt.and_utc());
}
// Try ISO format as fallback
if let Ok(dt) = NaiveDateTime::parse_from_str(s, "%Y-%m-%d %H:%M:%S") {
return Some(dt.and_utc());
}
None
None
}
/// Generate a perceptual hash for an image file.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity detection.
/// Returns a hex-encoded hash string, or None if the image cannot be processed.
/// Uses DCT (Discrete Cosine Transform) hash algorithm for robust similarity
/// detection. Returns a hex-encoded hash string, or None if the image cannot be
/// processed.
pub fn generate_perceptual_hash(path: &Path) -> Option<String> {
use image_hasher::{HashAlg, HasherConfig};
use image_hasher::{HashAlg, HasherConfig};
// Open and decode the image
let img = image::open(path).ok()?;
// Open and decode the image
let img = image::open(path).ok()?;
// Create hasher with DCT algorithm (good for finding similar images)
let hasher = HasherConfig::new()
// Create hasher with DCT algorithm (good for finding similar images)
let hasher = HasherConfig::new()
.hash_alg(HashAlg::DoubleGradient)
.hash_size(8, 8) // 64-bit hash
.to_hasher();
// Generate hash
let hash = hasher.hash_image(&img);
// Generate hash
let hash = hasher.hash_image(&img);
// Convert to hex string for storage
Some(hash.to_base64())
// Convert to hex string for storage
Some(hash.to_base64())
}

View file

@ -1,43 +1,45 @@
use std::path::Path;
use crate::error::Result;
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::Result,
media_type::{BuiltinMediaType, MediaType},
};
pub struct MarkdownExtractor;
impl MetadataExtractor for MarkdownExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let content = std::fs::read_to_string(path)?;
let parsed = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
let content = std::fs::read_to_string(path)?;
let parsed =
gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(&content);
let mut meta = ExtractedMetadata::default();
let mut meta = ExtractedMetadata::default();
if let Some(data) = parsed.ok().and_then(|p| p.data)
&& let gray_matter::Pod::Hash(map) = data
{
if let Some(gray_matter::Pod::String(title)) = map.get("title") {
meta.title = Some(title.clone());
}
if let Some(gray_matter::Pod::String(author)) = map.get("author") {
meta.artist = Some(author.clone());
}
if let Some(gray_matter::Pod::String(desc)) = map.get("description") {
meta.description = Some(desc.clone());
}
if let Some(gray_matter::Pod::String(date)) = map.get("date") {
meta.extra.insert("date".to_string(), date.clone());
}
}
Ok(meta)
if let Some(data) = parsed.ok().and_then(|p| p.data)
&& let gray_matter::Pod::Hash(map) = data
{
if let Some(gray_matter::Pod::String(title)) = map.get("title") {
meta.title = Some(title.clone());
}
if let Some(gray_matter::Pod::String(author)) = map.get("author") {
meta.artist = Some(author.clone());
}
if let Some(gray_matter::Pod::String(desc)) = map.get("description") {
meta.description = Some(desc.clone());
}
if let Some(gray_matter::Pod::String(date)) = map.get("date") {
meta.extra.insert("date".to_string(), date.clone());
}
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Markdown),
MediaType::Builtin(BuiltinMediaType::PlainText),
]
}
Ok(meta)
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Markdown),
MediaType::Builtin(BuiltinMediaType::PlainText),
]
}
}

View file

@ -4,53 +4,57 @@ pub mod image;
pub mod markdown;
pub mod video;
use std::collections::HashMap;
use std::path::Path;
use std::{collections::HashMap, path::Path};
use crate::error::Result;
use crate::media_type::MediaType;
use crate::model::ExtractedBookMetadata;
use crate::{
error::Result,
media_type::MediaType,
model::ExtractedBookMetadata,
};
#[derive(Debug, Clone, Default)]
pub struct ExtractedMetadata {
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub book_metadata: Option<ExtractedBookMetadata>,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub genre: Option<String>,
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub book_metadata: Option<ExtractedBookMetadata>,
// Photo-specific metadata
pub date_taken: Option<chrono::DateTime<chrono::Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
// Photo-specific metadata
pub date_taken: Option<chrono::DateTime<chrono::Utc>>,
pub latitude: Option<f64>,
pub longitude: Option<f64>,
pub camera_make: Option<String>,
pub camera_model: Option<String>,
pub rating: Option<i32>,
}
pub trait MetadataExtractor: Send + Sync {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata>;
fn supported_types(&self) -> Vec<MediaType>;
fn extract(&self, path: &Path) -> Result<ExtractedMetadata>;
fn supported_types(&self) -> Vec<MediaType>;
}
pub fn extract_metadata(path: &Path, media_type: MediaType) -> Result<ExtractedMetadata> {
let extractors: Vec<Box<dyn MetadataExtractor>> = vec![
Box::new(audio::AudioExtractor),
Box::new(document::DocumentExtractor),
Box::new(video::VideoExtractor),
Box::new(markdown::MarkdownExtractor),
Box::new(image::ImageExtractor),
];
pub fn extract_metadata(
path: &Path,
media_type: MediaType,
) -> Result<ExtractedMetadata> {
let extractors: Vec<Box<dyn MetadataExtractor>> = vec![
Box::new(audio::AudioExtractor),
Box::new(document::DocumentExtractor),
Box::new(video::VideoExtractor),
Box::new(markdown::MarkdownExtractor),
Box::new(image::ImageExtractor),
];
for extractor in &extractors {
if extractor.supported_types().contains(&media_type) {
return extractor.extract(path);
}
for extractor in &extractors {
if extractor.supported_types().contains(&media_type) {
return extractor.extract(path);
}
}
Ok(ExtractedMetadata::default())
Ok(ExtractedMetadata::default())
}

View file

@ -1,118 +1,128 @@
use std::path::Path;
use crate::error::{PinakesError, Result};
use crate::media_type::{BuiltinMediaType, MediaType};
use super::{ExtractedMetadata, MetadataExtractor};
use crate::{
error::{PinakesError, Result},
media_type::{BuiltinMediaType, MediaType},
};
pub struct VideoExtractor;
impl MetadataExtractor for VideoExtractor {
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Builtin(BuiltinMediaType::Mkv)) => extract_mkv(path),
Some(MediaType::Builtin(BuiltinMediaType::Mp4)) => extract_mp4(path),
_ => Ok(ExtractedMetadata::default()),
}
fn extract(&self, path: &Path) -> Result<ExtractedMetadata> {
match MediaType::from_path(path) {
Some(MediaType::Builtin(BuiltinMediaType::Mkv)) => extract_mkv(path),
Some(MediaType::Builtin(BuiltinMediaType::Mp4)) => extract_mp4(path),
_ => Ok(ExtractedMetadata::default()),
}
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp4),
MediaType::Builtin(BuiltinMediaType::Mkv),
]
}
fn supported_types(&self) -> Vec<MediaType> {
vec![
MediaType::Builtin(BuiltinMediaType::Mp4),
MediaType::Builtin(BuiltinMediaType::Mkv),
]
}
}
fn extract_mkv(path: &Path) -> Result<ExtractedMetadata> {
let file = std::fs::File::open(path)?;
let mkv = matroska::Matroska::open(file)
.map_err(|e| PinakesError::MetadataExtraction(format!("MKV parse: {e}")))?;
let file = std::fs::File::open(path)?;
let mkv = matroska::Matroska::open(file)
.map_err(|e| PinakesError::MetadataExtraction(format!("MKV parse: {e}")))?;
let mut meta = ExtractedMetadata {
title: mkv.info.title.clone(),
duration_secs: mkv.info.duration.map(|dur| dur.as_secs_f64()),
..Default::default()
};
let mut meta = ExtractedMetadata {
title: mkv.info.title.clone(),
duration_secs: mkv.info.duration.map(|dur| dur.as_secs_f64()),
..Default::default()
};
// Extract resolution and codec info from tracks
for track in &mkv.tracks {
match &track.settings {
matroska::Settings::Video(v) => {
meta.extra.insert(
"resolution".to_string(),
format!("{}x{}", v.pixel_width, v.pixel_height),
);
if !track.codec_id.is_empty() {
meta.extra
.insert("video_codec".to_string(), track.codec_id.clone());
}
}
matroska::Settings::Audio(a) => {
meta.extra.insert(
"sample_rate".to_string(),
format!("{} Hz", a.sample_rate as u32),
);
meta.extra
.insert("channels".to_string(), a.channels.to_string());
if !track.codec_id.is_empty() {
meta.extra
.insert("audio_codec".to_string(), track.codec_id.clone());
}
}
_ => {}
// Extract resolution and codec info from tracks
for track in &mkv.tracks {
match &track.settings {
matroska::Settings::Video(v) => {
meta.extra.insert(
"resolution".to_string(),
format!("{}x{}", v.pixel_width, v.pixel_height),
);
if !track.codec_id.is_empty() {
meta
.extra
.insert("video_codec".to_string(), track.codec_id.clone());
}
},
matroska::Settings::Audio(a) => {
meta.extra.insert(
"sample_rate".to_string(),
format!("{} Hz", a.sample_rate as u32),
);
meta
.extra
.insert("channels".to_string(), a.channels.to_string());
if !track.codec_id.is_empty() {
meta
.extra
.insert("audio_codec".to_string(), track.codec_id.clone());
}
},
_ => {},
}
}
Ok(meta)
Ok(meta)
}
fn extract_mp4(path: &Path) -> Result<ExtractedMetadata> {
use lofty::file::{AudioFile, TaggedFileExt};
use lofty::tag::Accessor;
use lofty::{
file::{AudioFile, TaggedFileExt},
tag::Accessor,
};
let tagged_file = lofty::read_from_path(path)
.map_err(|e| PinakesError::MetadataExtraction(format!("MP4 metadata: {e}")))?;
let tagged_file = lofty::read_from_path(path).map_err(|e| {
PinakesError::MetadataExtraction(format!("MP4 metadata: {e}"))
})?;
let mut meta = ExtractedMetadata::default();
let mut meta = ExtractedMetadata::default();
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag
.title()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.artist = tag
.artist()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.album = tag
.album()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.genre = tag
.genre()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.year = tag.date().map(|ts| ts.year as i32);
}
if let Some(tag) = tagged_file
.primary_tag()
.or_else(|| tagged_file.first_tag())
{
meta.title = tag
.title()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.artist = tag
.artist()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.album = tag
.album()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.genre = tag
.genre()
.map(|s: std::borrow::Cow<'_, str>| s.to_string());
meta.year = tag.date().map(|ts| ts.year as i32);
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
let properties = tagged_file.properties();
let duration = properties.duration();
if !duration.is_zero() {
meta.duration_secs = Some(duration.as_secs_f64());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta.extra
.insert("audio_bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta.extra
.insert("channels".to_string(), channels.to_string());
}
if let Some(bitrate) = properties.audio_bitrate() {
meta
.extra
.insert("audio_bitrate".to_string(), format!("{bitrate} kbps"));
}
if let Some(sample_rate) = properties.sample_rate() {
meta
.extra
.insert("sample_rate".to_string(), format!("{sample_rate} Hz"));
}
if let Some(channels) = properties.channels() {
meta
.extra
.insert("channels".to_string(), channels.to_string());
}
Ok(meta)
Ok(meta)
}