treewide: replace std hashers with rustc_hash alternatives; fix clippy

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I766c36cb53d3d7f9e85b91a67c4131a66a6a6964
This commit is contained in:
raf 2026-03-19 22:34:30 +03:00
commit c6efd3661f
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
53 changed files with 343 additions and 394 deletions

View file

@ -1,11 +1,11 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use chrono::{DateTime, NaiveDateTime, Utc};
use rusqlite::{Connection, Row, params};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -142,7 +142,7 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
custom_fields: FxHashMap::default(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
@ -358,7 +358,7 @@ fn load_user_profile_sync(
fn load_custom_fields_sync(
db: &Connection,
media_id: MediaId,
) -> rusqlite::Result<HashMap<String, CustomField>> {
) -> rusqlite::Result<FxHashMap<String, CustomField>> {
let mut stmt = db.prepare(
"SELECT field_name, field_type, field_value FROM custom_fields WHERE \
media_id = ?1",
@ -372,7 +372,7 @@ fn load_custom_fields_sync(
value,
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -409,8 +409,8 @@ fn load_custom_fields_batch(
Ok((mid_str, name, ft_str, value))
})?;
let mut fields_map: HashMap<String, HashMap<String, CustomField>> =
HashMap::new();
let mut fields_map: FxHashMap<String, FxHashMap<String, CustomField>> =
FxHashMap::default();
for r in rows {
let (mid_str, name, ft_str, value) = r?;
fields_map
@ -1762,7 +1762,7 @@ impl StorageBackend for SqliteBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let map = {
@ -1783,7 +1783,7 @@ impl StorageBackend for SqliteBackend {
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -2093,8 +2093,8 @@ impl StorageBackend for SqliteBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -5265,8 +5265,8 @@ impl StorageBackend for SqliteBackend {
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = ?1",
)?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> =
FxHashMap::default();
for row in stmt.query_map([&media_id_str], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})? {
@ -8336,13 +8336,13 @@ impl StorageBackend for SqliteBackend {
let conn = conn.lock().map_err(|e| PinakesError::Database(format!("connection mutex poisoned: {e}")))?;
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
let mut node_ids = rustc_hash::FxHashSet::default();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center_id);
for _ in 0..depth {