treewide: replace std hashers with rustc_hash alternatives; fix clippy

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I766c36cb53d3d7f9e85b91a67c4131a66a6a6964
This commit is contained in:
raf 2026-03-19 22:34:30 +03:00
commit c6efd3661f
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
53 changed files with 343 additions and 394 deletions

View file

@ -1,9 +1,10 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use chrono::Utc;
use deadpool_postgres::{Config as PoolConfig, Pool, Runtime};
use native_tls::TlsConnector;
use postgres_native_tls::MakeTlsConnector;
use rustc_hash::FxHashMap;
use tokio_postgres::{NoTls, Row, types::ToSql};
use uuid::Uuid;
@ -215,7 +216,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: row.get("file_mtime"),
// Photo-specific fields
@ -922,8 +923,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1596,8 +1597,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1759,8 +1760,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1894,7 +1895,7 @@ impl StorageBackend for PostgresBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let client = self
.pool
.get()
@ -1909,7 +1910,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for row in &rows {
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
@ -1988,8 +1989,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2066,8 +2067,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2089,8 +2090,8 @@ impl StorageBackend for PostgresBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -2952,8 +2953,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3365,8 +3366,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3553,8 +3554,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3623,8 +3624,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -4448,8 +4449,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> = FxHashMap::default();
for r in id_rows {
let id_type: String = r.get(0);
let value: String = r.get(1);
@ -7031,11 +7031,11 @@ impl StorageBackend for PostgresBackend {
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let node_ids: std::collections::HashSet<String> =
let node_ids: rustc_hash::FxHashSet<String> =
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center.0.to_string());
for _ in 0..depth {
@ -7099,7 +7099,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut collected = std::collections::HashSet::new();
let mut collected = rustc_hash::FxHashSet::default();
for row in rows {
let id: String = row.get(0);
collected.insert(id);