chore: bump deps; fix clippy lints & cleanup

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4c4815ad145650a07f108614034d2e996a6a6964
This commit is contained in:
raf 2026-03-02 17:05:28 +03:00
commit cd1161ee5d
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
41 changed files with 1283 additions and 740 deletions

View file

@ -438,13 +438,123 @@ async fn main() -> Result<()> {
}
},
JobKind::Enrich { media_ids } => {
// Enrichment job placeholder
use pinakes_core::{
enrichment::{
MetadataEnricher,
books::BookEnricher,
lastfm::LastFmEnricher,
musicbrainz::MusicBrainzEnricher,
tmdb::TmdbEnricher,
},
media_type::MediaCategory,
};
let enrich_cfg = &config.enrichment;
let mut enrichers: Vec<Box<dyn MetadataEnricher>> = Vec::new();
if enrich_cfg.enabled {
if enrich_cfg.sources.musicbrainz.enabled {
enrichers.push(Box::new(MusicBrainzEnricher::new()));
}
if let (true, Some(key)) = (
enrich_cfg.sources.tmdb.enabled,
enrich_cfg.sources.tmdb.api_key.clone(),
) {
enrichers.push(Box::new(TmdbEnricher::new(key)));
}
if let (true, Some(key)) = (
enrich_cfg.sources.lastfm.enabled,
enrich_cfg.sources.lastfm.api_key.clone(),
) {
enrichers.push(Box::new(LastFmEnricher::new(key)));
}
// BookEnricher handles documents/epub. No dedicated config
// key is required; the Google Books key is optional.
enrichers.push(Box::new(BookEnricher::new(None)));
}
let total = media_ids.len();
let mut enriched: usize = 0;
let mut errors: usize = 0;
'items: for media_id in media_ids {
if cancel.is_cancelled() {
break 'items;
}
let item = match storage.get_media(media_id).await {
Ok(i) => i,
Err(e) => {
tracing::warn!(
%media_id,
error = %e,
"enrich: failed to fetch media item"
);
errors += 1;
continue;
},
};
// Select enrichers appropriate for this media category.
let category = item.media_type.category();
for enricher in &enrichers {
let source = enricher.source();
use pinakes_core::enrichment::EnrichmentSourceType;
let applicable = match source {
EnrichmentSourceType::MusicBrainz
| EnrichmentSourceType::LastFm => {
category == MediaCategory::Audio
},
EnrichmentSourceType::Tmdb => {
category == MediaCategory::Video
},
EnrichmentSourceType::OpenLibrary
| EnrichmentSourceType::GoogleBooks => {
category == MediaCategory::Document
},
};
if !applicable {
continue;
}
match enricher.enrich(&item).await {
Ok(Some(meta)) => {
if let Err(e) = storage.store_external_metadata(&meta).await
{
tracing::warn!(
%media_id,
%source,
error = %e,
"enrich: failed to store external metadata"
);
errors += 1;
} else {
enriched += 1;
}
},
Ok(None) => {},
Err(e) => {
tracing::warn!(
%media_id,
%source,
error = %e,
"enrich: enricher returned error"
);
errors += 1;
},
}
}
}
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({"media_ids": media_ids.len(), "status": "not_implemented"}),
)
.await;
&jobs,
job_id,
serde_json::json!({
"total": total,
"enriched": enriched,
"errors": errors,
}),
)
.await;
},
JobKind::CleanupAnalytics => {
let before = chrono::Utc::now() - chrono::Duration::days(90);
@ -460,6 +570,27 @@ async fn main() -> Result<()> {
Err(e) => JobQueue::fail(&jobs, job_id, e.to_string()).await,
}
},
JobKind::TrashPurge => {
let retention_days = config.trash.retention_days;
let before = chrono::Utc::now()
- chrono::Duration::days(retention_days as i64);
match storage.purge_old_trash(before).await {
Ok(count) => {
tracing::info!(count, "purged {} items from trash", count);
JobQueue::complete(
&jobs,
job_id,
serde_json::json!({"purged": count, "retention_days": retention_days}),
)
.await;
},
Err(e) => {
tracing::error!(error = %e, "failed to purge trash");
JobQueue::fail(&jobs, job_id, e.to_string()).await;
},
}
},
};
drop(cancel);
})