treewide: replace std hashers with rustc_hash alternatives; fix clippy

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I766c36cb53d3d7f9e85b91a67c4131a66a6a6964
This commit is contained in:
raf 2026-03-19 22:34:30 +03:00
commit f831e58723
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
53 changed files with 343 additions and 394 deletions

View file

@ -42,6 +42,7 @@ regex = { workspace = true }
moka = { workspace = true }
urlencoding = { workspace = true }
image_hasher = { workspace = true }
rustc-hash = { workspace = true }
# Plugin system
pinakes-plugin-api.workspace = true

View file

@ -1494,6 +1494,8 @@ impl Default for Config {
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;
use super::*;
fn test_config_with_concurrency(concurrency: usize) -> Config {
@ -1549,7 +1551,7 @@ mod tests {
// HashMap lookup. This avoids unsafe std::env::set_var and is
// thread-safe for parallel test execution.
fn test_lookup<'a>(
vars: &'a std::collections::HashMap<&str, &str>,
vars: &'a FxHashMap<&str, &str>,
) -> impl Fn(&str) -> crate::error::Result<String> + 'a {
move |name| {
vars
@ -1565,24 +1567,21 @@ mod tests {
#[test]
fn test_expand_env_var_simple() {
let vars =
std::collections::HashMap::from([("TEST_VAR_SIMPLE", "test_value")]);
let vars = FxHashMap::from([("TEST_VAR_SIMPLE", "test_value")]);
let result = expand_env_vars("$TEST_VAR_SIMPLE", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_braces() {
let vars =
std::collections::HashMap::from([("TEST_VAR_BRACES", "test_value")]);
let vars = FxHashMap::from([("TEST_VAR_BRACES", "test_value")]);
let result = expand_env_vars("${TEST_VAR_BRACES}", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_embedded() {
let vars =
std::collections::HashMap::from([("TEST_VAR_EMBEDDED", "value")]);
let vars = FxHashMap::from([("TEST_VAR_EMBEDDED", "value")]);
let result =
expand_env_vars("prefix_${TEST_VAR_EMBEDDED}_suffix", test_lookup(&vars));
assert_eq!(result.unwrap(), "prefix_value_suffix");
@ -1590,15 +1589,14 @@ mod tests {
#[test]
fn test_expand_env_var_multiple() {
let vars =
std::collections::HashMap::from([("VAR1", "value1"), ("VAR2", "value2")]);
let vars = FxHashMap::from([("VAR1", "value1"), ("VAR2", "value2")]);
let result = expand_env_vars("${VAR1}_${VAR2}", test_lookup(&vars));
assert_eq!(result.unwrap(), "value1_value2");
}
#[test]
fn test_expand_env_var_missing() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${NONEXISTENT_VAR}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1611,7 +1609,7 @@ mod tests {
#[test]
fn test_expand_env_var_empty_name() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1624,31 +1622,28 @@ mod tests {
#[test]
fn test_expand_env_var_escaped() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("\\$NOT_A_VAR", test_lookup(&vars));
assert_eq!(result.unwrap(), "$NOT_A_VAR");
}
#[test]
fn test_expand_env_var_no_vars() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("plain_text", test_lookup(&vars));
assert_eq!(result.unwrap(), "plain_text");
}
#[test]
fn test_expand_env_var_underscore() {
let vars = std::collections::HashMap::from([("TEST_VAR_NAME", "value")]);
let vars = FxHashMap::from([("TEST_VAR_NAME", "value")]);
let result = expand_env_vars("$TEST_VAR_NAME", test_lookup(&vars));
assert_eq!(result.unwrap(), "value");
}
#[test]
fn test_expand_env_var_mixed_syntax() {
let vars = std::collections::HashMap::from([
("VAR1_MIXED", "v1"),
("VAR2_MIXED", "v2"),
]);
let vars = FxHashMap::from([("VAR1_MIXED", "v1"), ("VAR2_MIXED", "v2")]);
let result =
expand_env_vars("$VAR1_MIXED and ${VAR2_MIXED}", test_lookup(&vars));
assert_eq!(result.unwrap(), "v1 and v2");

View file

@ -254,7 +254,7 @@ pub async fn import_file_with_options(
duration_secs: extracted.duration_secs,
description: extracted.description,
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: current_mtime,
// Photo-specific metadata from extraction

View file

@ -1,8 +1,6 @@
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
@ -96,8 +94,8 @@ pub async fn detect_orphans(
let mut orphaned_ids = Vec::new();
// Build hash index: ContentHash -> Vec<(MediaId, PathBuf)>
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
HashMap::new();
let mut hash_index: FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
FxHashMap::default();
for (id, path, hash) in &media_paths {
hash_index
.entry(hash.clone())
@ -138,12 +136,12 @@ pub async fn detect_orphans(
fn detect_moved_files(
orphaned_ids: &[MediaId],
media_paths: &[(MediaId, PathBuf, ContentHash)],
hash_index: &HashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
hash_index: &FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
) -> Vec<(MediaId, PathBuf, PathBuf)> {
let mut moved = Vec::new();
// Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash)
let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths
let orphaned_map: FxHashMap<MediaId, (PathBuf, ContentHash)> = media_paths
.iter()
.filter(|(id, ..)| orphaned_ids.contains(id))
.map(|(id, path, hash)| (*id, (path.clone(), hash.clone())))
@ -184,7 +182,7 @@ async fn detect_untracked_files(
}
// Build set of tracked paths for fast lookup
let tracked_paths: HashSet<PathBuf> = media_paths
let tracked_paths: FxHashSet<PathBuf> = media_paths
.iter()
.map(|(_, path, _)| path.clone())
.collect();
@ -198,7 +196,7 @@ async fn detect_untracked_files(
];
// Walk filesystem for each root in parallel (limit concurrency to 4)
let mut filesystem_paths = HashSet::new();
let mut filesystem_paths = FxHashSet::default();
let mut tasks = tokio::task::JoinSet::new();
for root in roots {
@ -322,8 +320,7 @@ pub async fn verify_integrity(
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> =
if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> =
ids.iter().copied().collect();
let id_set: FxHashSet<MediaId> = ids.iter().copied().collect();
all_paths
.into_iter()
.filter(|(id, ..)| id_set.contains(id))
@ -383,7 +380,7 @@ pub async fn cleanup_orphaned_thumbnails(
thumbnail_dir: &Path,
) -> Result<usize> {
let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths
let known_ids: FxHashSet<String> = media_paths
.iter()
.map(|(id, ..)| id.0.to_string())
.collect();

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::sync::{RwLock, mpsc};
@ -71,8 +72,8 @@ struct WorkerItem {
}
pub struct JobQueue {
jobs: Arc<RwLock<HashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>>,
jobs: Arc<RwLock<FxHashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>>,
tx: mpsc::Sender<WorkerItem>,
}
@ -94,7 +95,7 @@ impl JobQueue {
Uuid,
JobKind,
CancellationToken,
Arc<RwLock<HashMap<Uuid, Job>>>,
Arc<RwLock<FxHashMap<Uuid, Job>>>,
) -> tokio::task::JoinHandle<()>
+ Send
+ Sync
@ -102,10 +103,10 @@ impl JobQueue {
{
let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> =
Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new()));
let jobs: Arc<RwLock<FxHashMap<Uuid, Job>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let executor = Arc::new(executor);
@ -261,7 +262,7 @@ impl JobQueue {
/// Update a job's progress. Called by executors.
pub async fn update_progress(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
progress: f32,
message: String,
@ -275,7 +276,7 @@ impl JobQueue {
/// Mark a job as completed.
pub async fn complete(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
result: Value,
) {
@ -288,7 +289,7 @@ impl JobQueue {
/// Mark a job as failed.
pub async fn fail(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
error: String,
) {

View file

@ -352,7 +352,7 @@ pub fn resolve_link_candidates(
}
// 4. Remove duplicates while preserving order
let mut seen = std::collections::HashSet::new();
let mut seen = rustc_hash::FxHashSet::default();
candidates.retain(|p| seen.insert(p.clone()));
candidates

View file

@ -1,8 +1,7 @@
//! Media type registry for managing both built-in and custom media types
use std::collections::HashMap;
use anyhow::{Result, anyhow};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use super::MediaCategory;
@ -33,10 +32,10 @@ pub struct MediaTypeDescriptor {
#[derive(Debug, Clone)]
pub struct MediaTypeRegistry {
/// Map of media type ID to descriptor
types: HashMap<String, MediaTypeDescriptor>,
types: FxHashMap<String, MediaTypeDescriptor>,
/// Map of extension to media type ID
extension_map: HashMap<String, String>,
extension_map: FxHashMap<String, String>,
}
impl MediaTypeRegistry {
@ -44,8 +43,8 @@ impl MediaTypeRegistry {
#[must_use]
pub fn new() -> Self {
Self {
types: HashMap::new(),
extension_map: HashMap::new(),
types: FxHashMap::default(),
extension_map: FxHashMap::default(),
}
}

View file

@ -190,7 +190,7 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
book_meta.authors = authors;
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
let mut identifiers = rustc_hash::FxHashMap::default();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements

View file

@ -4,7 +4,9 @@ pub mod image;
pub mod markdown;
pub mod video;
use std::{collections::HashMap, path::Path};
use std::path::Path;
use rustc_hash::FxHashMap;
use crate::{error::Result, media_type::MediaType, model::BookMetadata};
@ -17,7 +19,7 @@ pub struct ExtractedMetadata {
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
pub book_metadata: Option<BookMetadata>,
// Photo-specific metadata

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, fmt, path::PathBuf};
use std::{fmt, path::PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -129,7 +130,7 @@ pub struct MediaItem {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
pub custom_fields: FxHashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental
/// scanning
pub file_mtime: Option<i64>,
@ -434,7 +435,7 @@ pub struct BookMetadata {
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorInfo>,
pub identifiers: HashMap<String, Vec<String>>,
pub identifiers: FxHashMap<String, Vec<String>>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@ -454,7 +455,7 @@ impl Default for BookMetadata {
series_index: None,
format: None,
authors: Vec::new(),
identifiers: HashMap::new(),
identifiers: FxHashMap::default(),
created_at: now,
updated_at: now,
}

View file

@ -186,17 +186,19 @@ impl PluginManager {
fn resolve_load_order(
manifests: &[pinakes_plugin_api::PluginManifest],
) -> Vec<pinakes_plugin_api::PluginManifest> {
use std::collections::{HashMap, HashSet, VecDeque};
use std::collections::VecDeque;
use rustc_hash::{FxHashMap, FxHashSet};
// Index manifests by name for O(1) lookup
let by_name: HashMap<&str, usize> = manifests
let by_name: FxHashMap<&str, usize> = manifests
.iter()
.enumerate()
.map(|(i, m)| (m.plugin.name.as_str(), i))
.collect();
// Check for missing dependencies and warn early
let known: HashSet<&str> = by_name.keys().copied().collect();
let known: FxHashSet<&str> = by_name.keys().copied().collect();
for manifest in manifests {
for dep in &manifest.plugin.dependencies {
if !known.contains(dep.as_str()) {
@ -250,7 +252,7 @@ impl PluginManager {
// Anything not in `result` is part of a cycle or has a missing dep
if result.len() < manifests.len() {
let loaded: HashSet<&str> =
let loaded: FxHashSet<&str> =
result.iter().map(|m| m.plugin.name.as_str()).collect();
for manifest in manifests {
if !loaded.contains(manifest.plugin.name.as_str()) {
@ -669,9 +671,9 @@ impl PluginManager {
/// none declare theme extensions.
pub async fn list_ui_theme_extensions(
&self,
) -> std::collections::HashMap<String, String> {
) -> rustc_hash::FxHashMap<String, String> {
let registry = self.registry.read().await;
let mut merged = std::collections::HashMap::new();
let mut merged = rustc_hash::FxHashMap::default();
for plugin in registry.list_all() {
if !plugin.enabled {
continue;

View file

@ -13,12 +13,12 @@
//! priority 100. A circuit breaker disables plugins after consecutive failures.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, Instant},
};
use rustc_hash::FxHashMap;
use tokio::sync::RwLock;
use tracing::{debug, info, warn};
@ -75,22 +75,22 @@ struct CachedCapabilities {
/// Keyed by `(kind, plugin_id)` -> list of supported type strings.
/// Separate entries for each kind avoid collisions when a plugin
/// implements both `metadata_extractor` and `thumbnail_generator`.
supported_types: HashMap<(String, String), Vec<String>>,
supported_types: FxHashMap<(String, String), Vec<String>>,
/// `plugin_id` -> list of interested event type strings
interested_events: HashMap<String, Vec<String>>,
interested_events: FxHashMap<String, Vec<String>>,
/// `plugin_id` -> list of media type definitions (for `MediaTypeProvider`)
media_type_definitions: HashMap<String, Vec<PluginMediaTypeDefinition>>,
media_type_definitions: FxHashMap<String, Vec<PluginMediaTypeDefinition>>,
/// `plugin_id` -> list of theme definitions (for `ThemeProvider`)
theme_definitions: HashMap<String, Vec<PluginThemeDefinition>>,
theme_definitions: FxHashMap<String, Vec<PluginThemeDefinition>>,
}
impl CachedCapabilities {
fn new() -> Self {
Self {
supported_types: HashMap::new(),
interested_events: HashMap::new(),
media_type_definitions: HashMap::new(),
theme_definitions: HashMap::new(),
supported_types: FxHashMap::default(),
interested_events: FxHashMap::default(),
media_type_definitions: FxHashMap::default(),
theme_definitions: FxHashMap::default(),
}
}
}
@ -101,7 +101,7 @@ pub struct PluginPipeline {
manager: Arc<PluginManager>,
timeouts: PluginTimeoutConfig,
max_consecutive_failures: u32,
health: RwLock<HashMap<String, PluginHealth>>,
health: RwLock<FxHashMap<String, PluginHealth>>,
capabilities: RwLock<CachedCapabilities>,
}
@ -117,7 +117,7 @@ impl PluginPipeline {
manager,
timeouts,
max_consecutive_failures,
health: RwLock::new(HashMap::new()),
health: RwLock::new(FxHashMap::default()),
capabilities: RwLock::new(CachedCapabilities::new()),
}
}
@ -826,7 +826,7 @@ impl PluginPipeline {
}
// Deduplicate by ID, keeping the highest-scoring entry
let mut seen: HashMap<String, usize> = HashMap::new();
let mut seen: FxHashMap<String, usize> = FxHashMap::default();
let mut deduped: Vec<SearchResultItem> = Vec::new();
for item in all_results {
if let Some(&idx) = seen.get(&item.id) {
@ -1363,7 +1363,7 @@ mod tests {
year: Some(2024),
duration_secs: None,
description: None,
extra: HashMap::new(),
extra: FxHashMap::default(),
};
merge_metadata(&mut base, &resp);
@ -1379,7 +1379,7 @@ mod tests {
let mut base = ExtractedMetadata::default();
base.extra.insert("key1".to_string(), "val1".to_string());
let mut extra = HashMap::new();
let mut extra = FxHashMap::default();
extra.insert("key2".to_string(), "val2".to_string());
extra.insert("key1".to_string(), "overwritten".to_string());

View file

@ -1,9 +1,10 @@
//! Plugin registry for managing loaded plugins
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use rustc_hash::FxHashMap;
use super::runtime::WasmPlugin;
@ -21,7 +22,7 @@ pub struct RegisteredPlugin {
/// Plugin registry maintains the state of all loaded plugins
pub struct PluginRegistry {
/// Map of plugin ID to registered plugin
plugins: HashMap<String, RegisteredPlugin>,
plugins: FxHashMap<String, RegisteredPlugin>,
}
impl PluginRegistry {
@ -29,7 +30,7 @@ impl PluginRegistry {
#[must_use]
pub fn new() -> Self {
Self {
plugins: HashMap::new(),
plugins: FxHashMap::default(),
}
}
@ -156,9 +157,8 @@ impl Default for PluginRegistry {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::{Capabilities, manifest::ManifestCapabilities};
use rustc_hash::FxHashMap;
use super::*;
@ -181,7 +181,7 @@ mod tests {
priority: 0,
},
capabilities: ManifestCapabilities::default(),
config: HashMap::new(),
config: FxHashMap::default(),
ui: Default::default(),
};

View file

@ -4,8 +4,9 @@
//! Requests are serialized to JSON, passed to the plugin, and responses
//! are deserialized from JSON written by the plugin via `host_set_result`.
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Request to check if a plugin can handle a file
@ -55,7 +56,7 @@ pub struct ExtractMetadataResponse {
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
}
/// Request to generate a thumbnail
@ -140,7 +141,7 @@ pub struct PluginThemeDefinition {
#[derive(Debug, Clone, Deserialize)]
pub struct LoadThemeResponse {
pub css: Option<String>,
pub colors: HashMap<String, String>,
pub colors: FxHashMap<String, String>,
}
#[cfg(test)]

View file

@ -272,7 +272,7 @@ impl Default for WasmPlugin {
context: PluginContext {
data_dir: std::env::temp_dir(),
cache_dir: std::env::temp_dir(),
config: std::collections::HashMap::new(),
config: Default::default(),
capabilities: Default::default(),
},
}
@ -774,8 +774,6 @@ impl HostFunctions {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::PluginContext;
use super::*;
@ -795,7 +793,7 @@ mod tests {
let context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
config: Default::default(),
capabilities,
};

View file

@ -5,6 +5,7 @@ pub mod sqlite;
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -221,7 +222,7 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>;
) -> Result<FxHashMap<String, CustomField>>;
/// Delete a custom field from a media item by name.
async fn delete_custom_field(

View file

@ -1,9 +1,10 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use chrono::Utc;
use deadpool_postgres::{Config as PoolConfig, Pool, Runtime};
use native_tls::TlsConnector;
use postgres_native_tls::MakeTlsConnector;
use rustc_hash::FxHashMap;
use tokio_postgres::{NoTls, Row, types::ToSql};
use uuid::Uuid;
@ -215,7 +216,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: row.get("file_mtime"),
// Photo-specific fields
@ -922,8 +923,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1596,8 +1597,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1759,8 +1760,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1894,7 +1895,7 @@ impl StorageBackend for PostgresBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let client = self
.pool
.get()
@ -1909,7 +1910,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for row in &rows {
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
@ -1988,8 +1989,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2066,8 +2067,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2089,8 +2090,8 @@ impl StorageBackend for PostgresBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -2952,8 +2953,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3365,8 +3366,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3553,8 +3554,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3623,8 +3624,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -4448,8 +4449,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> = FxHashMap::default();
for r in id_rows {
let id_type: String = r.get(0);
let value: String = r.get(1);
@ -7031,11 +7031,11 @@ impl StorageBackend for PostgresBackend {
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let node_ids: std::collections::HashSet<String> =
let node_ids: rustc_hash::FxHashSet<String> =
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center.0.to_string());
for _ in 0..depth {
@ -7099,7 +7099,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut collected = std::collections::HashSet::new();
let mut collected = rustc_hash::FxHashSet::default();
for row in rows {
let id: String = row.get(0);
collected.insert(id);

View file

@ -1,11 +1,11 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use chrono::{DateTime, NaiveDateTime, Utc};
use rusqlite::{Connection, Row, params};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -142,7 +142,7 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
custom_fields: FxHashMap::default(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
@ -358,7 +358,7 @@ fn load_user_profile_sync(
fn load_custom_fields_sync(
db: &Connection,
media_id: MediaId,
) -> rusqlite::Result<HashMap<String, CustomField>> {
) -> rusqlite::Result<FxHashMap<String, CustomField>> {
let mut stmt = db.prepare(
"SELECT field_name, field_type, field_value FROM custom_fields WHERE \
media_id = ?1",
@ -372,7 +372,7 @@ fn load_custom_fields_sync(
value,
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -409,8 +409,8 @@ fn load_custom_fields_batch(
Ok((mid_str, name, ft_str, value))
})?;
let mut fields_map: HashMap<String, HashMap<String, CustomField>> =
HashMap::new();
let mut fields_map: FxHashMap<String, FxHashMap<String, CustomField>> =
FxHashMap::default();
for r in rows {
let (mid_str, name, ft_str, value) = r?;
fields_map
@ -1762,7 +1762,7 @@ impl StorageBackend for SqliteBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let map = {
@ -1783,7 +1783,7 @@ impl StorageBackend for SqliteBackend {
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -2093,8 +2093,8 @@ impl StorageBackend for SqliteBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -5265,8 +5265,8 @@ impl StorageBackend for SqliteBackend {
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = ?1",
)?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> =
FxHashMap::default();
for row in stmt.query_map([&media_id_str], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})? {
@ -8336,13 +8336,13 @@ impl StorageBackend for SqliteBackend {
let conn = conn.lock().map_err(|e| PinakesError::Database(format!("connection mutex poisoned: {e}")))?;
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
let mut node_ids = rustc_hash::FxHashSet::default();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center_id);
for _ in 0..depth {

View file

@ -1,12 +1,12 @@
//! Transcoding service for media files using `FFmpeg`.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
@ -94,7 +94,7 @@ impl TranscodeStatus {
/// Service managing transcoding sessions and `FFmpeg` invocations.
pub struct TranscodeService {
pub config: TranscodingConfig,
pub sessions: Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
pub sessions: Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
semaphore: Arc<Semaphore>,
}
@ -103,7 +103,7 @@ impl TranscodeService {
pub fn new(config: TranscodingConfig) -> Self {
let max_concurrent = config.max_concurrent.max(1);
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
sessions: Arc::new(RwLock::new(FxHashMap::default())),
semaphore: Arc::new(Semaphore::new(max_concurrent)),
config,
}
@ -481,7 +481,7 @@ fn get_ffmpeg_args(
/// Run `FFmpeg` as a child process, parsing progress from stdout.
async fn run_ffmpeg(
args: &[String],
sessions: &Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
sessions: &Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
session_id: Uuid,
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,

View file

@ -3,7 +3,7 @@
//! Handles file uploads, metadata extraction, and `MediaItem` creation
//! for files stored in managed content-addressable storage.
use std::{collections::HashMap, path::Path};
use std::path::Path;
use chrono::Utc;
use tokio::io::AsyncRead;
@ -85,7 +85,7 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
duration_secs: extracted.as_ref().and_then(|m| m.duration_secs),
description: extracted.as_ref().and_then(|m| m.description.clone()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: None,
date_taken: extracted.as_ref().and_then(|m| m.date_taken),
latitude: extracted.as_ref().and_then(|m| m.latitude),

View file

@ -1,8 +1,7 @@
//! User management and authentication
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -78,7 +77,7 @@ pub struct UserPreferences {
pub auto_play: bool,
/// Custom preferences (extensible)
pub custom: HashMap<String, serde_json::Value>,
pub custom: FxHashMap<String, serde_json::Value>,
}
/// Library access permission

View file

@ -3,13 +3,14 @@
// the test suite
#![allow(dead_code)]
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use pinakes_core::{
media_type::{BuiltinMediaType, MediaType},
model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use uuid::Uuid;
@ -46,7 +47,7 @@ pub fn make_test_media(hash: &str) -> MediaItem {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -83,7 +84,7 @@ pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -121,7 +122,7 @@ pub fn make_test_markdown_item(id: MediaId) -> MediaItem {
duration_secs: None,
description: Some("Test markdown note".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,

View file

@ -1,6 +1,5 @@
use std::collections::HashMap;
use pinakes_core::{model::*, storage::StorageBackend};
use rustc_hash::FxHashMap;
mod common;
use common::{make_test_media, setup};
@ -28,7 +27,7 @@ async fn test_media_crud() {
duration_secs: None,
description: Some("A test file".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -120,7 +119,7 @@ async fn test_tags() {
duration_secs: Some(180.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -191,7 +190,7 @@ async fn test_collections() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -252,7 +251,7 @@ async fn test_custom_fields() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -334,7 +333,7 @@ async fn test_search() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -479,7 +478,7 @@ async fn test_library_statistics_with_data() {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,