pinakes: import in parallel; various UI improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I1eb47cd79cd4145c56af966f6756fe1d6a6a6964
This commit is contained in:
raf 2026-02-03 10:31:20 +03:00
commit 116fe7b059
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
42 changed files with 4316 additions and 316 deletions

View file

@ -46,6 +46,8 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn get_media(&self, id: MediaId) -> Result<MediaItem>;
async fn count_media(&self) -> Result<u64>;
async fn get_media_by_hash(&self, hash: &ContentHash) -> Result<Option<MediaItem>>;
/// Get a media item by its file path (used for incremental scanning)
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>>;
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>>;
async fn update_media(&self, item: &MediaItem) -> Result<()>;
async fn delete_media(&self, id: MediaId) -> Result<()>;
@ -232,6 +234,59 @@ pub trait StorageBackend: Send + Sync + 'static {
root_path: &str,
) -> Result<()>;
/// Check if a user has access to a specific media item based on library permissions.
/// Returns the permission level if access is granted, or an error if denied.
/// Admin users (role=admin) bypass library checks and have full access.
async fn check_library_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<crate::users::LibraryPermission> {
// Default implementation: get the media item's path and check against user's library access
let media = self.get_media(media_id).await?;
let path_str = media.path.to_string_lossy().to_string();
// Get user's library permissions
let libraries = self.get_user_libraries(user_id).await?;
// If user has no library restrictions, they have no access (unless they're admin)
// This default impl requires at least one matching library permission
for lib in &libraries {
if path_str.starts_with(&lib.root_path) {
return Ok(lib.permission);
}
}
Err(crate::error::PinakesError::Authorization(format!(
"user {} has no access to media {}",
user_id, media_id
)))
}
/// Check if a user has at least read access to a media item
async fn has_media_read_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<bool> {
match self.check_library_access(user_id, media_id).await {
Ok(perm) => Ok(perm.can_read()),
Err(_) => Ok(false),
}
}
/// Check if a user has write access to a media item
async fn has_media_write_access(
&self,
user_id: crate::users::UserId,
media_id: crate::model::MediaId,
) -> Result<bool> {
match self.check_library_access(user_id, media_id).await {
Ok(perm) => Ok(perm.can_write()),
Err(_) => Ok(false),
}
}
// ===== Ratings =====
async fn rate_media(
&self,

View file

@ -114,6 +114,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
file_mtime: row.get("file_mtime"),
created_at: row.get("created_at"),
updated_at: row.get("updated_at"),
})
@ -198,11 +199,61 @@ fn build_search_inner(
if text.is_empty() {
return Ok("TRUE".to_string());
}
let idx = *offset;
// Combine FTS with trigram similarity and ILIKE for comprehensive fuzzy matching
// This allows partial matches like "mus" -> "music"
let idx_fts = *offset;
*offset += 1;
let idx_prefix = *offset;
*offset += 1;
let idx_ilike = *offset;
*offset += 1;
let idx_sim_title = *offset;
*offset += 1;
let idx_sim_artist = *offset;
*offset += 1;
let idx_sim_album = *offset;
*offset += 1;
let idx_sim_filename = *offset;
*offset += 1;
// Sanitize for tsquery prefix matching
let sanitized = text.replace(['&', '|', '!', '(', ')', ':', '*', '\\', '\''], "");
let prefix_query = if sanitized.contains(' ') {
// For multi-word, join with & and add :* to last word
let words: Vec<&str> = sanitized.split_whitespace().collect();
if let Some((last, rest)) = words.split_last() {
let prefix_parts: Vec<String> = rest.iter().map(|w| w.to_string()).collect();
if prefix_parts.is_empty() {
format!("{}:*", last)
} else {
format!("{} & {}:*", prefix_parts.join(" & "), last)
}
} else {
format!("{}:*", sanitized)
}
} else {
format!("{}:*", sanitized)
};
params.push(Box::new(text.clone()));
params.push(Box::new(prefix_query));
params.push(Box::new(format!("%{}%", text)));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
params.push(Box::new(text.clone()));
Ok(format!(
"search_vector @@ plainto_tsquery('english', ${idx})"
"(\
search_vector @@ plainto_tsquery('english', ${idx_fts}) OR \
search_vector @@ to_tsquery('english', ${idx_prefix}) OR \
LOWER(COALESCE(title, '')) LIKE LOWER(${idx_ilike}) OR \
LOWER(COALESCE(file_name, '')) LIKE LOWER(${idx_ilike}) OR \
similarity(COALESCE(title, ''), ${idx_sim_title}) > 0.3 OR \
similarity(COALESCE(artist, ''), ${idx_sim_artist}) > 0.3 OR \
similarity(COALESCE(album, ''), ${idx_sim_album}) > 0.3 OR \
similarity(COALESCE(file_name, ''), ${idx_sim_filename}) > 0.25\
)"
))
}
SearchQuery::Prefix(term) => {
@ -214,14 +265,31 @@ fn build_search_inner(
Ok(format!("search_vector @@ to_tsquery('english', ${idx})"))
}
SearchQuery::Fuzzy(term) => {
// Use trigram similarity on multiple fields
let idx_title = *offset;
*offset += 1;
let idx_artist = *offset;
*offset += 1;
let idx_album = *offset;
*offset += 1;
let idx_filename = *offset;
*offset += 1;
let idx_ilike = *offset;
*offset += 1;
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(term.clone()));
params.push(Box::new(format!("%{}%", term)));
Ok(format!(
"(similarity(COALESCE(title, ''), ${idx_title}) > 0.3 OR similarity(COALESCE(artist, ''), ${idx_artist}) > 0.3)"
"(\
similarity(COALESCE(title, ''), ${idx_title}) > 0.3 OR \
similarity(COALESCE(artist, ''), ${idx_artist}) > 0.3 OR \
similarity(COALESCE(album, ''), ${idx_album}) > 0.3 OR \
similarity(COALESCE(file_name, ''), ${idx_filename}) > 0.25 OR \
LOWER(COALESCE(title, '')) LIKE LOWER(${idx_ilike}) OR \
LOWER(COALESCE(file_name, '')) LIKE LOWER(${idx_ilike})\
)"
))
}
SearchQuery::FieldMatch { field, value } => {
@ -277,6 +345,86 @@ fn build_search_inner(
let frag = build_search_inner(inner, offset, params, type_filters, tag_filters)?;
Ok(format!("NOT ({frag})"))
}
SearchQuery::RangeQuery { field, start, end } => {
let col = match field.as_str() {
"year" => "year",
"size" | "file_size" => "file_size",
"duration" => "duration_secs",
_ => return Ok("TRUE".to_string()), // Unknown field, ignore
};
match (start, end) {
(Some(s), Some(e)) => {
let idx_start = *offset;
*offset += 1;
let idx_end = *offset;
*offset += 1;
params.push(Box::new(*s));
params.push(Box::new(*e));
Ok(format!("({col} >= ${idx_start} AND {col} <= ${idx_end})"))
}
(Some(s), None) => {
let idx = *offset;
*offset += 1;
params.push(Box::new(*s));
Ok(format!("{col} >= ${idx}"))
}
(None, Some(e)) => {
let idx = *offset;
*offset += 1;
params.push(Box::new(*e));
Ok(format!("{col} <= ${idx}"))
}
(None, None) => Ok("TRUE".to_string()),
}
}
SearchQuery::CompareQuery { field, op, value } => {
let col = match field.as_str() {
"year" => "year",
"size" | "file_size" => "file_size",
"duration" => "duration_secs",
_ => return Ok("TRUE".to_string()), // Unknown field, ignore
};
let op_sql = match op {
crate::search::CompareOp::GreaterThan => ">",
crate::search::CompareOp::GreaterOrEqual => ">=",
crate::search::CompareOp::LessThan => "<",
crate::search::CompareOp::LessOrEqual => "<=",
};
let idx = *offset;
*offset += 1;
params.push(Box::new(*value));
Ok(format!("{col} {op_sql} ${idx}"))
}
SearchQuery::DateQuery { field, value } => {
let col = match field.as_str() {
"created" => "created_at",
"modified" | "updated" => "updated_at",
_ => return Ok("TRUE".to_string()),
};
Ok(date_value_to_postgres_expr(col, value))
}
}
}
/// Convert a DateValue to a PostgreSQL datetime comparison expression
fn date_value_to_postgres_expr(col: &str, value: &crate::search::DateValue) -> String {
use crate::search::DateValue;
match value {
DateValue::Today => format!("{col}::date = CURRENT_DATE"),
DateValue::Yesterday => format!("{col}::date = CURRENT_DATE - INTERVAL '1 day'"),
DateValue::ThisWeek => format!("{col} >= date_trunc('week', CURRENT_DATE)"),
DateValue::LastWeek => format!(
"{col} >= date_trunc('week', CURRENT_DATE) - INTERVAL '7 days' AND {col} < date_trunc('week', CURRENT_DATE)"
),
DateValue::ThisMonth => format!("{col} >= date_trunc('month', CURRENT_DATE)"),
DateValue::LastMonth => format!(
"{col} >= date_trunc('month', CURRENT_DATE) - INTERVAL '1 month' AND {col} < date_trunc('month', CURRENT_DATE)"
),
DateValue::ThisYear => format!("{col} >= date_trunc('year', CURRENT_DATE)"),
DateValue::LastYear => format!(
"{col} >= date_trunc('year', CURRENT_DATE) - INTERVAL '1 year' AND {col} < date_trunc('year', CURRENT_DATE)"
),
DateValue::DaysAgo(days) => format!("{col} >= CURRENT_DATE - INTERVAL '{days} days'"),
}
}
@ -478,7 +626,7 @@ impl StorageBackend for PostgresBackend {
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, created_at, updated_at
thumbnail_path, file_mtime, created_at, updated_at
FROM media_items WHERE content_hash = $1",
&[&hash.0],
)
@ -494,6 +642,34 @@ impl StorageBackend for PostgresBackend {
}
}
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>> {
let path_str = path.to_string_lossy().to_string();
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let row = client
.query_opt(
"SELECT id, path, file_name, media_type, content_hash, file_size,
title, artist, album, genre, year, duration_secs, description,
thumbnail_path, file_mtime, created_at, updated_at
FROM media_items WHERE path = $1",
&[&path_str],
)
.await?;
match row {
Some(r) => {
let mut item = row_to_media_item(&r)?;
item.custom_fields = self.get_custom_fields(item.id).await?;
Ok(Some(item))
}
None => Ok(None),
}
}
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>> {
let client = self
.pool
@ -671,6 +847,59 @@ impl StorageBackend for PostgresBackend {
Ok(count as u64)
}
// ---- Batch Operations ----
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
if ids.is_empty() {
return Ok(0);
}
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Use ANY with array for efficient batch delete
let uuids: Vec<Uuid> = ids.iter().map(|id| id.0).collect();
let rows = client
.execute("DELETE FROM media_items WHERE id = ANY($1)", &[&uuids])
.await?;
Ok(rows)
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
if media_ids.is_empty() || tag_ids.is_empty() {
return Ok(0);
}
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Use UNNEST for efficient batch insert
let mut media_uuids = Vec::new();
let mut tag_uuids = Vec::new();
for mid in media_ids {
for tid in tag_ids {
media_uuids.push(mid.0);
tag_uuids.push(*tid);
}
}
let rows = client
.execute(
"INSERT INTO media_tags (media_id, tag_id)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[])
ON CONFLICT DO NOTHING",
&[&media_uuids, &tag_uuids],
)
.await?;
Ok(rows)
}
// ---- Tags ----
async fn create_tag(&self, name: &str, parent_id: Option<Uuid>) -> Result<Tag> {
@ -3155,6 +3384,9 @@ fn query_has_fts(query: &SearchQuery) -> bool {
SearchQuery::FieldMatch { .. } => false,
SearchQuery::TypeFilter(_) => false,
SearchQuery::TagFilter(_) => false,
SearchQuery::RangeQuery { .. } => false,
SearchQuery::CompareQuery { .. } => false,
SearchQuery::DateQuery { .. } => false,
SearchQuery::And(children) | SearchQuery::Or(children) => {
children.iter().any(query_has_fts)
}
@ -3173,7 +3405,7 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
None
} else {
let idx = *offset;
*offset += 1;
*offset += 7; // FullText now uses 7 params (fts, prefix, ilike, sim_title, sim_artist, sim_album, sim_filename)
Some(idx)
}
}
@ -3183,7 +3415,7 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
Some(idx)
}
SearchQuery::Fuzzy(_) => {
*offset += 2; // fuzzy uses two params
*offset += 5; // Fuzzy now uses 5 params (sim_title, sim_artist, sim_album, sim_filename, ilike)
None
}
SearchQuery::FieldMatch { .. } => {
@ -3191,6 +3423,21 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
None
}
SearchQuery::TypeFilter(_) | SearchQuery::TagFilter(_) => None,
SearchQuery::RangeQuery { start, end, .. } => {
// Range queries use 0-2 params depending on bounds
if start.is_some() {
*offset += 1;
}
if end.is_some() {
*offset += 1;
}
None
}
SearchQuery::CompareQuery { .. } => {
*offset += 1;
None
}
SearchQuery::DateQuery { .. } => None, // No params, uses inline SQL
SearchQuery::And(children) | SearchQuery::Or(children) => {
for child in children {
if let Some(idx) = find_inner(child, offset) {
@ -3255,10 +3502,15 @@ mod tests {
let mut offset = 1;
let mut params: Vec<Box<dyn ToSql + Sync + Send>> = Vec::new();
let (clause, types, tags) = build_search_clause(&query, &mut offset, &mut params).unwrap();
assert_eq!(clause, "search_vector @@ plainto_tsquery('english', $1)");
// Fuzzy search combines FTS, prefix, ILIKE, and trigram similarity
assert!(clause.contains("plainto_tsquery"));
assert!(clause.contains("to_tsquery"));
assert!(clause.contains("LIKE"));
assert!(clause.contains("similarity"));
assert!(types.is_empty());
assert!(tags.is_empty());
assert_eq!(offset, 2);
// FullText now uses 7 parameters
assert_eq!(offset, 8);
}
#[test]

View file

@ -111,6 +111,8 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
created_at: parse_datetime(&created_str),
updated_at: parse_datetime(&updated_str),
})
@ -312,18 +314,22 @@ fn load_custom_fields_batch(db: &Connection, items: &mut [MediaItem]) -> rusqlit
/// Translate a `SearchQuery` into components that can be assembled into SQL.
///
/// Returns `(fts_expr, where_clauses, join_clauses)` where:
/// Returns `(fts_expr, like_terms, where_clauses, join_clauses, params)` where:
/// - `fts_expr` is an FTS5 MATCH expression (may be empty),
/// - `like_terms` are search terms for LIKE fallback matching,
/// - `where_clauses` are extra WHERE predicates (e.g. type filters),
/// - `join_clauses` are extra JOIN snippets (e.g. tag filters).
/// - `params` are bind parameter values corresponding to `?` placeholders in
/// where_clauses and join_clauses.
fn search_query_to_fts(query: &SearchQuery) -> (String, Vec<String>, Vec<String>, Vec<String>) {
fn search_query_to_fts(
query: &SearchQuery,
) -> (String, Vec<String>, Vec<String>, Vec<String>, Vec<String>) {
let mut wheres = Vec::new();
let mut joins = Vec::new();
let mut params = Vec::new();
let fts = build_fts_expr(query, &mut wheres, &mut joins, &mut params);
(fts, wheres, joins, params)
let mut like_terms = Vec::new();
let fts = build_fts_expr(query, &mut wheres, &mut joins, &mut params, &mut like_terms);
(fts, like_terms, wheres, joins, params)
}
fn build_fts_expr(
@ -331,21 +337,35 @@ fn build_fts_expr(
wheres: &mut Vec<String>,
joins: &mut Vec<String>,
params: &mut Vec<String>,
like_terms: &mut Vec<String>,
) -> String {
match query {
SearchQuery::FullText(text) => {
if text.is_empty() {
String::new()
} else {
sanitize_fts_token(text)
// Collect term for LIKE fallback matching
like_terms.push(text.clone());
// Add implicit prefix matching for better partial matches
// This allows "mus" to match "music", "musician", etc.
let sanitized = sanitize_fts_token(text);
// If it's a single word, add prefix matching
if !sanitized.contains(' ') && !sanitized.contains('"') {
format!("{}*", sanitized)
} else {
// For phrases, use as-is but also add NEAR for proximity
sanitized
}
}
}
SearchQuery::Prefix(prefix) => {
like_terms.push(prefix.clone());
format!("{}*", sanitize_fts_token(prefix))
}
SearchQuery::Fuzzy(term) => {
// FTS5 does not natively support fuzzy; fall back to prefix match
// FTS5 does not natively support fuzzy; use prefix match
// as a best-effort approximation.
like_terms.push(term.clone());
format!("{}*", sanitize_fts_token(term))
}
SearchQuery::FieldMatch { field, value } => {
@ -355,7 +375,7 @@ fn build_fts_expr(
format!("{safe_field}:{safe_value}")
}
SearchQuery::Not(inner) => {
let inner_expr = build_fts_expr(inner, wheres, joins, params);
let inner_expr = build_fts_expr(inner, wheres, joins, params, like_terms);
if inner_expr.is_empty() {
String::new()
} else {
@ -365,7 +385,7 @@ fn build_fts_expr(
SearchQuery::And(terms) => {
let parts: Vec<String> = terms
.iter()
.map(|t| build_fts_expr(t, wheres, joins, params))
.map(|t| build_fts_expr(t, wheres, joins, params, like_terms))
.filter(|s| !s.is_empty())
.collect();
parts.join(" ")
@ -373,7 +393,7 @@ fn build_fts_expr(
SearchQuery::Or(terms) => {
let parts: Vec<String> = terms
.iter()
.map(|t| build_fts_expr(t, wheres, joins, params))
.map(|t| build_fts_expr(t, wheres, joins, params, like_terms))
.filter(|s| !s.is_empty())
.collect();
if parts.len() <= 1 {
@ -399,6 +419,82 @@ fn build_fts_expr(
params.push(tag_name.clone());
String::new()
}
SearchQuery::RangeQuery { field, start, end } => {
let col = match field.as_str() {
"year" => "m.year",
"size" | "file_size" => "m.file_size",
"duration" => "m.duration_secs",
_ => return String::new(), // Unknown field, ignore
};
match (start, end) {
(Some(s), Some(e)) => {
wheres.push(format!("{col} >= ? AND {col} <= ?"));
params.push(s.to_string());
params.push(e.to_string());
}
(Some(s), None) => {
wheres.push(format!("{col} >= ?"));
params.push(s.to_string());
}
(None, Some(e)) => {
wheres.push(format!("{col} <= ?"));
params.push(e.to_string());
}
(None, None) => {}
}
String::new()
}
SearchQuery::CompareQuery { field, op, value } => {
let col = match field.as_str() {
"year" => "m.year",
"size" | "file_size" => "m.file_size",
"duration" => "m.duration_secs",
_ => return String::new(), // Unknown field, ignore
};
let op_sql = match op {
crate::search::CompareOp::GreaterThan => ">",
crate::search::CompareOp::GreaterOrEqual => ">=",
crate::search::CompareOp::LessThan => "<",
crate::search::CompareOp::LessOrEqual => "<=",
};
wheres.push(format!("{col} {op_sql} ?"));
params.push(value.to_string());
String::new()
}
SearchQuery::DateQuery { field, value } => {
let col = match field.as_str() {
"created" => "m.created_at",
"modified" | "updated" => "m.updated_at",
_ => return String::new(),
};
let sql = date_value_to_sqlite_expr(col, value);
if !sql.is_empty() {
wheres.push(sql);
}
String::new()
}
}
}
/// Convert a DateValue to a SQLite datetime comparison expression
fn date_value_to_sqlite_expr(col: &str, value: &crate::search::DateValue) -> String {
use crate::search::DateValue;
match value {
DateValue::Today => format!("date({col}) = date('now')"),
DateValue::Yesterday => format!("date({col}) = date('now', '-1 day')"),
DateValue::ThisWeek => format!("{col} >= datetime('now', 'weekday 0', '-7 days')"),
DateValue::LastWeek => format!(
"{col} >= datetime('now', 'weekday 0', '-14 days') AND {col} < datetime('now', 'weekday 0', '-7 days')"
),
DateValue::ThisMonth => format!("{col} >= datetime('now', 'start of month')"),
DateValue::LastMonth => format!(
"{col} >= datetime('now', 'start of month', '-1 month') AND {col} < datetime('now', 'start of month')"
),
DateValue::ThisYear => format!("{col} >= datetime('now', 'start of year')"),
DateValue::LastYear => format!(
"{col} >= datetime('now', 'start of year', '-1 year') AND {col} < datetime('now', 'start of year')"
),
DateValue::DaysAgo(days) => format!("{col} >= datetime('now', '-{days} days')"),
}
}
@ -514,8 +610,8 @@ impl StorageBackend for SqliteBackend {
db.execute(
"INSERT INTO media_items (id, path, file_name, media_type, content_hash, \
file_size, title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
thumbnail_path, file_mtime, created_at, updated_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17)",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -533,6 +629,7 @@ impl StorageBackend for SqliteBackend {
item.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.created_at.to_rfc3339(),
item.updated_at.to_rfc3339(),
],
@ -566,7 +663,7 @@ impl StorageBackend for SqliteBackend {
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items WHERE id = ?1",
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE id = ?1",
)?;
let mut item = stmt
.query_row(params![id.0.to_string()], row_to_media_item)
@ -593,7 +690,7 @@ impl StorageBackend for SqliteBackend {
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items WHERE content_hash = ?1",
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE content_hash = ?1",
)?;
let result = stmt
.query_row(params![hash.0], row_to_media_item)
@ -609,6 +706,32 @@ impl StorageBackend for SqliteBackend {
.map_err(|e| PinakesError::Database(e.to_string()))?
}
async fn get_media_by_path(&self, path: &std::path::Path) -> Result<Option<MediaItem>> {
let path_str = path.to_string_lossy().to_string();
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut stmt = db.prepare(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, file_mtime, created_at, updated_at FROM media_items WHERE path = ?1",
)?;
let result = stmt
.query_row(params![path_str], row_to_media_item)
.optional()?;
if let Some(mut item) = result {
item.custom_fields = load_custom_fields_sync(&db, item.id)?;
Ok(Some(item))
} else {
Ok(None)
}
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))?
}
async fn list_media(&self, pagination: &Pagination) -> Result<Vec<MediaItem>> {
let pagination = pagination.clone();
let conn = Arc::clone(&self.conn);
@ -630,7 +753,7 @@ impl StorageBackend for SqliteBackend {
let sql = format!(
"SELECT id, path, file_name, media_type, content_hash, file_size, \
title, artist, album, genre, year, duration_secs, description, \
thumbnail_path, created_at, updated_at FROM media_items \
thumbnail_path, file_mtime, created_at, updated_at FROM media_items \
ORDER BY {order_by} LIMIT ?1 OFFSET ?2"
);
let mut stmt = db.prepare(&sql)?;
@ -658,7 +781,7 @@ impl StorageBackend for SqliteBackend {
"UPDATE media_items SET path = ?2, file_name = ?3, media_type = ?4, \
content_hash = ?5, file_size = ?6, title = ?7, artist = ?8, album = ?9, \
genre = ?10, year = ?11, duration_secs = ?12, description = ?13, \
thumbnail_path = ?14, updated_at = ?15 WHERE id = ?1",
thumbnail_path = ?14, file_mtime = ?15, updated_at = ?16 WHERE id = ?1",
params![
item.id.0.to_string(),
item.path.to_string_lossy().as_ref(),
@ -676,6 +799,7 @@ impl StorageBackend for SqliteBackend {
item.thumbnail_path
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
item.file_mtime,
item.updated_at.to_rfc3339(),
],
)?;
@ -1067,7 +1191,7 @@ impl StorageBackend for SqliteBackend {
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
let (fts_expr, where_clauses, join_clauses, bind_params) =
let (fts_expr, _like_terms, where_clauses, join_clauses, bind_params) =
search_query_to_fts(&request.query);
let use_fts = !fts_expr.is_empty();
@ -1309,16 +1433,30 @@ impl StorageBackend for SqliteBackend {
}
async fn batch_delete_media(&self, ids: &[MediaId]) -> Result<u64> {
if ids.is_empty() {
return Ok(0);
}
let ids: Vec<String> = ids.iter().map(|id| id.0.to_string()).collect();
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let db = conn
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Use IN clause for batch delete - much faster than individual deletes
// SQLite has a limit of ~500-1000 items in IN clause, so chunk if needed
const CHUNK_SIZE: usize = 500;
db.execute_batch("BEGIN IMMEDIATE")?;
let mut count = 0u64;
for id in &ids {
let rows = db.execute("DELETE FROM media_items WHERE id = ?1", params![id])?;
for chunk in ids.chunks(CHUNK_SIZE) {
let placeholders: Vec<String> =
(1..=chunk.len()).map(|i| format!("?{}", i)).collect();
let sql = format!(
"DELETE FROM media_items WHERE id IN ({})",
placeholders.join(", ")
);
let params: Vec<&dyn rusqlite::ToSql> =
chunk.iter().map(|s| s as &dyn rusqlite::ToSql).collect();
let rows = db.execute(&sql, params.as_slice())?;
count += rows as u64;
}
db.execute_batch("COMMIT")?;
@ -1329,6 +1467,9 @@ impl StorageBackend for SqliteBackend {
}
async fn batch_tag_media(&self, media_ids: &[MediaId], tag_ids: &[Uuid]) -> Result<u64> {
if media_ids.is_empty() || tag_ids.is_empty() {
return Ok(0);
}
let media_ids: Vec<String> = media_ids.iter().map(|id| id.0.to_string()).collect();
let tag_ids: Vec<String> = tag_ids.iter().map(|id| id.to_string()).collect();
let conn = Arc::clone(&self.conn);
@ -1337,13 +1478,14 @@ impl StorageBackend for SqliteBackend {
.lock()
.map_err(|e| PinakesError::Database(e.to_string()))?;
db.execute_batch("BEGIN IMMEDIATE")?;
// Prepare statement once for reuse
let mut stmt = db.prepare_cached(
"INSERT OR IGNORE INTO media_tags (media_id, tag_id) VALUES (?1, ?2)",
)?;
let mut count = 0u64;
for mid in &media_ids {
for tid in &tag_ids {
db.execute(
"INSERT OR IGNORE INTO media_tags (media_id, tag_id) VALUES (?1, ?2)",
params![mid, tid],
)?;
stmt.execute(params![mid, tid])?;
count += 1;
}
}