treewide: replace std hashers with rustc_hash alternatives; fix clippy

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I766c36cb53d3d7f9e85b91a67c4131a66a6a6964
This commit is contained in:
raf 2026-03-19 22:34:30 +03:00
commit f831e58723
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
53 changed files with 343 additions and 394 deletions

BIN
Cargo.lock generated

Binary file not shown.

View file

@ -18,50 +18,28 @@ pinakes-plugin-api = { path = "./crates/pinakes-plugin-api" }
pinakes-ui = { path = "./crates/pinakes-ui" }
pinakes-tui = { path = "./crates/pinakes-tui" }
# Async runtime
tokio = { version = "1.49.0", features = ["full"] }
tokio-util = { version = "0.7.18", features = ["rt"] }
# Serialization
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
toml = "1.0.3"
# CLI argument parsing
clap = { version = "4.5.60", features = ["derive", "env"] }
# Date/time
chrono = { version = "0.4.44", features = ["serde"] }
# IDs
uuid = { version = "1.21.0", features = ["v7", "serde"] }
# Error handling
thiserror = "2.0.18"
anyhow = "1.0.102"
# Logging
tracing = "0.1.44"
tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] }
# Hashing
blake3 = "1.8.3"
# Cryptographic signatures (plugin verification)
rustc-hash = "2.1.1"
ed25519-dalek = { version = "2.1.1", features = ["std"] }
# Metadata extraction
lofty = "0.23.2"
lopdf = "0.39.0"
epub = "2.1.5"
matroska = "0.30.0"
gray_matter = "0.3.2"
kamadak-exif = "0.6.1"
# Database - SQLite
rusqlite = { version = "=0.37.0", features = ["bundled", "column_decltype"] }
# Database - PostgreSQL
tokio-postgres = { version = "0.7.16", features = [
"with-uuid-1",
"with-chrono-0_4",
@ -71,44 +49,24 @@ deadpool-postgres = "0.14.1"
postgres-types = { version = "0.2.12", features = ["derive"] }
postgres-native-tls = "0.5.2"
native-tls = "0.2.18"
# Migrations
refinery = { version = "0.9.0", features = ["rusqlite", "tokio-postgres"] }
# Filesystem
walkdir = "2.5.0"
notify = { version = "8.2.0", features = ["macos_fsevent"] }
# Search parser
winnow = "0.7.14"
# HTTP server
axum = { version = "0.8.8", features = ["macros", "multipart"] }
axum-server = { version = "0.8.0" }
tower = "0.5.3"
tower-http = { version = "0.6.8", features = ["cors", "trace", "set-header"] }
governor = "0.10.4"
tower_governor = "0.8.0"
# HTTP client
reqwest = { version = "0.13.2", features = ["json", "query", "blocking"] }
url = "2.5"
# TUI
ratatui = "0.30.0"
crossterm = "0.29.0"
# Desktop/Web UI
dioxus = { version = "0.7.3", features = ["desktop", "router"] }
dioxus-core = { version = "0.7.3" }
# Async trait (dyn-compatible async methods)
async-trait = "0.1.89"
# Async utilities
futures = "0.3.32"
# Image processing (thumbnails)
image = { version = "0.25.9", default-features = false, features = [
"jpeg",
"png",
@ -117,15 +75,9 @@ image = { version = "0.25.9", default-features = false, features = [
"tiff",
"bmp",
] }
# Markdown rendering
pulldown-cmark = "0.13.1"
ammonia = "4.1.2"
# Password hashing
argon2 = { version = "0.5.3", features = ["std"] }
# Misc
mime_guess = "2.0.5"
regex = "1.12.3"
dioxus-free-icons = { version = "0.10.0", features = ["font-awesome-solid"] }
@ -137,12 +89,8 @@ urlencoding = "2.1.3"
image_hasher = "3.1.1"
percent-encoding = "2.3.2"
http = "1.4.0"
# WASM runtime for plugins
wasmtime = { version = "42.0.1", features = ["component-model"] }
wit-bindgen = "0.53.1"
# Misc
tempfile = "3.26.0"
# See:

View file

@ -42,6 +42,7 @@ regex = { workspace = true }
moka = { workspace = true }
urlencoding = { workspace = true }
image_hasher = { workspace = true }
rustc-hash = { workspace = true }
# Plugin system
pinakes-plugin-api.workspace = true

View file

@ -1494,6 +1494,8 @@ impl Default for Config {
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;
use super::*;
fn test_config_with_concurrency(concurrency: usize) -> Config {
@ -1549,7 +1551,7 @@ mod tests {
// HashMap lookup. This avoids unsafe std::env::set_var and is
// thread-safe for parallel test execution.
fn test_lookup<'a>(
vars: &'a std::collections::HashMap<&str, &str>,
vars: &'a FxHashMap<&str, &str>,
) -> impl Fn(&str) -> crate::error::Result<String> + 'a {
move |name| {
vars
@ -1565,24 +1567,21 @@ mod tests {
#[test]
fn test_expand_env_var_simple() {
let vars =
std::collections::HashMap::from([("TEST_VAR_SIMPLE", "test_value")]);
let vars = FxHashMap::from([("TEST_VAR_SIMPLE", "test_value")]);
let result = expand_env_vars("$TEST_VAR_SIMPLE", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_braces() {
let vars =
std::collections::HashMap::from([("TEST_VAR_BRACES", "test_value")]);
let vars = FxHashMap::from([("TEST_VAR_BRACES", "test_value")]);
let result = expand_env_vars("${TEST_VAR_BRACES}", test_lookup(&vars));
assert_eq!(result.unwrap(), "test_value");
}
#[test]
fn test_expand_env_var_embedded() {
let vars =
std::collections::HashMap::from([("TEST_VAR_EMBEDDED", "value")]);
let vars = FxHashMap::from([("TEST_VAR_EMBEDDED", "value")]);
let result =
expand_env_vars("prefix_${TEST_VAR_EMBEDDED}_suffix", test_lookup(&vars));
assert_eq!(result.unwrap(), "prefix_value_suffix");
@ -1590,15 +1589,14 @@ mod tests {
#[test]
fn test_expand_env_var_multiple() {
let vars =
std::collections::HashMap::from([("VAR1", "value1"), ("VAR2", "value2")]);
let vars = FxHashMap::from([("VAR1", "value1"), ("VAR2", "value2")]);
let result = expand_env_vars("${VAR1}_${VAR2}", test_lookup(&vars));
assert_eq!(result.unwrap(), "value1_value2");
}
#[test]
fn test_expand_env_var_missing() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${NONEXISTENT_VAR}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1611,7 +1609,7 @@ mod tests {
#[test]
fn test_expand_env_var_empty_name() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("${}", test_lookup(&vars));
assert!(result.is_err());
assert!(
@ -1624,31 +1622,28 @@ mod tests {
#[test]
fn test_expand_env_var_escaped() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("\\$NOT_A_VAR", test_lookup(&vars));
assert_eq!(result.unwrap(), "$NOT_A_VAR");
}
#[test]
fn test_expand_env_var_no_vars() {
let vars = std::collections::HashMap::new();
let vars = FxHashMap::default();
let result = expand_env_vars("plain_text", test_lookup(&vars));
assert_eq!(result.unwrap(), "plain_text");
}
#[test]
fn test_expand_env_var_underscore() {
let vars = std::collections::HashMap::from([("TEST_VAR_NAME", "value")]);
let vars = FxHashMap::from([("TEST_VAR_NAME", "value")]);
let result = expand_env_vars("$TEST_VAR_NAME", test_lookup(&vars));
assert_eq!(result.unwrap(), "value");
}
#[test]
fn test_expand_env_var_mixed_syntax() {
let vars = std::collections::HashMap::from([
("VAR1_MIXED", "v1"),
("VAR2_MIXED", "v2"),
]);
let vars = FxHashMap::from([("VAR1_MIXED", "v1"), ("VAR2_MIXED", "v2")]);
let result =
expand_env_vars("$VAR1_MIXED and ${VAR2_MIXED}", test_lookup(&vars));
assert_eq!(result.unwrap(), "v1 and v2");

View file

@ -254,7 +254,7 @@ pub async fn import_file_with_options(
duration_secs: extracted.duration_secs,
description: extracted.description,
thumbnail_path: thumb_path,
custom_fields: std::collections::HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: current_mtime,
// Photo-specific metadata from extraction

View file

@ -1,8 +1,6 @@
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
@ -96,8 +94,8 @@ pub async fn detect_orphans(
let mut orphaned_ids = Vec::new();
// Build hash index: ContentHash -> Vec<(MediaId, PathBuf)>
let mut hash_index: HashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
HashMap::new();
let mut hash_index: FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>> =
FxHashMap::default();
for (id, path, hash) in &media_paths {
hash_index
.entry(hash.clone())
@ -138,12 +136,12 @@ pub async fn detect_orphans(
fn detect_moved_files(
orphaned_ids: &[MediaId],
media_paths: &[(MediaId, PathBuf, ContentHash)],
hash_index: &HashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
hash_index: &FxHashMap<ContentHash, Vec<(MediaId, PathBuf)>>,
) -> Vec<(MediaId, PathBuf, PathBuf)> {
let mut moved = Vec::new();
// Build lookup map for orphaned items: MediaId -> (PathBuf, ContentHash)
let orphaned_map: HashMap<MediaId, (PathBuf, ContentHash)> = media_paths
let orphaned_map: FxHashMap<MediaId, (PathBuf, ContentHash)> = media_paths
.iter()
.filter(|(id, ..)| orphaned_ids.contains(id))
.map(|(id, path, hash)| (*id, (path.clone(), hash.clone())))
@ -184,7 +182,7 @@ async fn detect_untracked_files(
}
// Build set of tracked paths for fast lookup
let tracked_paths: HashSet<PathBuf> = media_paths
let tracked_paths: FxHashSet<PathBuf> = media_paths
.iter()
.map(|(_, path, _)| path.clone())
.collect();
@ -198,7 +196,7 @@ async fn detect_untracked_files(
];
// Walk filesystem for each root in parallel (limit concurrency to 4)
let mut filesystem_paths = HashSet::new();
let mut filesystem_paths = FxHashSet::default();
let mut tasks = tokio::task::JoinSet::new();
for root in roots {
@ -322,8 +320,7 @@ pub async fn verify_integrity(
let paths_to_check: Vec<(MediaId, PathBuf, ContentHash)> =
if let Some(ids) = media_ids {
let id_set: std::collections::HashSet<MediaId> =
ids.iter().copied().collect();
let id_set: FxHashSet<MediaId> = ids.iter().copied().collect();
all_paths
.into_iter()
.filter(|(id, ..)| id_set.contains(id))
@ -383,7 +380,7 @@ pub async fn cleanup_orphaned_thumbnails(
thumbnail_dir: &Path,
) -> Result<usize> {
let media_paths = storage.list_media_paths().await?;
let known_ids: std::collections::HashSet<String> = media_paths
let known_ids: FxHashSet<String> = media_paths
.iter()
.map(|(id, ..)| id.0.to_string())
.collect();

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::sync::{RwLock, mpsc};
@ -71,8 +72,8 @@ struct WorkerItem {
}
pub struct JobQueue {
jobs: Arc<RwLock<HashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>>,
jobs: Arc<RwLock<FxHashMap<Uuid, Job>>>,
cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>>,
tx: mpsc::Sender<WorkerItem>,
}
@ -94,7 +95,7 @@ impl JobQueue {
Uuid,
JobKind,
CancellationToken,
Arc<RwLock<HashMap<Uuid, Job>>>,
Arc<RwLock<FxHashMap<Uuid, Job>>>,
) -> tokio::task::JoinHandle<()>
+ Send
+ Sync
@ -102,10 +103,10 @@ impl JobQueue {
{
let (tx, rx) = mpsc::channel::<WorkerItem>(256);
let rx = Arc::new(tokio::sync::Mutex::new(rx));
let jobs: Arc<RwLock<HashMap<Uuid, Job>>> =
Arc::new(RwLock::new(HashMap::new()));
let cancellations: Arc<RwLock<HashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(HashMap::new()));
let jobs: Arc<RwLock<FxHashMap<Uuid, Job>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let cancellations: Arc<RwLock<FxHashMap<Uuid, CancellationToken>>> =
Arc::new(RwLock::new(FxHashMap::default()));
let executor = Arc::new(executor);
@ -261,7 +262,7 @@ impl JobQueue {
/// Update a job's progress. Called by executors.
pub async fn update_progress(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
progress: f32,
message: String,
@ -275,7 +276,7 @@ impl JobQueue {
/// Mark a job as completed.
pub async fn complete(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
result: Value,
) {
@ -288,7 +289,7 @@ impl JobQueue {
/// Mark a job as failed.
pub async fn fail(
jobs: &Arc<RwLock<HashMap<Uuid, Job>>>,
jobs: &Arc<RwLock<FxHashMap<Uuid, Job>>>,
id: Uuid,
error: String,
) {

View file

@ -352,7 +352,7 @@ pub fn resolve_link_candidates(
}
// 4. Remove duplicates while preserving order
let mut seen = std::collections::HashSet::new();
let mut seen = rustc_hash::FxHashSet::default();
candidates.retain(|p| seen.insert(p.clone()));
candidates

View file

@ -1,8 +1,7 @@
//! Media type registry for managing both built-in and custom media types
use std::collections::HashMap;
use anyhow::{Result, anyhow};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use super::MediaCategory;
@ -33,10 +32,10 @@ pub struct MediaTypeDescriptor {
#[derive(Debug, Clone)]
pub struct MediaTypeRegistry {
/// Map of media type ID to descriptor
types: HashMap<String, MediaTypeDescriptor>,
types: FxHashMap<String, MediaTypeDescriptor>,
/// Map of extension to media type ID
extension_map: HashMap<String, String>,
extension_map: FxHashMap<String, String>,
}
impl MediaTypeRegistry {
@ -44,8 +43,8 @@ impl MediaTypeRegistry {
#[must_use]
pub fn new() -> Self {
Self {
types: HashMap::new(),
extension_map: HashMap::new(),
types: FxHashMap::default(),
extension_map: FxHashMap::default(),
}
}

View file

@ -190,7 +190,7 @@ fn extract_epub(path: &Path) -> Result<ExtractedMetadata> {
book_meta.authors = authors;
// Extract ISBNs from identifiers
let mut identifiers = std::collections::HashMap::new();
let mut identifiers = rustc_hash::FxHashMap::default();
for item in &doc.metadata {
if item.property == "identifier" || item.property == "dc:identifier" {
// Try to get scheme from refinements

View file

@ -4,7 +4,9 @@ pub mod image;
pub mod markdown;
pub mod video;
use std::{collections::HashMap, path::Path};
use std::path::Path;
use rustc_hash::FxHashMap;
use crate::{error::Result, media_type::MediaType, model::BookMetadata};
@ -17,7 +19,7 @@ pub struct ExtractedMetadata {
pub year: Option<i32>,
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
pub book_metadata: Option<BookMetadata>,
// Photo-specific metadata

View file

@ -1,6 +1,7 @@
use std::{collections::HashMap, fmt, path::PathBuf};
use std::{fmt, path::PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -129,7 +130,7 @@ pub struct MediaItem {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub thumbnail_path: Option<PathBuf>,
pub custom_fields: HashMap<String, CustomField>,
pub custom_fields: FxHashMap<String, CustomField>,
/// File modification time (Unix timestamp in seconds), used for incremental
/// scanning
pub file_mtime: Option<i64>,
@ -434,7 +435,7 @@ pub struct BookMetadata {
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorInfo>,
pub identifiers: HashMap<String, Vec<String>>,
pub identifiers: FxHashMap<String, Vec<String>>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@ -454,7 +455,7 @@ impl Default for BookMetadata {
series_index: None,
format: None,
authors: Vec::new(),
identifiers: HashMap::new(),
identifiers: FxHashMap::default(),
created_at: now,
updated_at: now,
}

View file

@ -186,17 +186,19 @@ impl PluginManager {
fn resolve_load_order(
manifests: &[pinakes_plugin_api::PluginManifest],
) -> Vec<pinakes_plugin_api::PluginManifest> {
use std::collections::{HashMap, HashSet, VecDeque};
use std::collections::VecDeque;
use rustc_hash::{FxHashMap, FxHashSet};
// Index manifests by name for O(1) lookup
let by_name: HashMap<&str, usize> = manifests
let by_name: FxHashMap<&str, usize> = manifests
.iter()
.enumerate()
.map(|(i, m)| (m.plugin.name.as_str(), i))
.collect();
// Check for missing dependencies and warn early
let known: HashSet<&str> = by_name.keys().copied().collect();
let known: FxHashSet<&str> = by_name.keys().copied().collect();
for manifest in manifests {
for dep in &manifest.plugin.dependencies {
if !known.contains(dep.as_str()) {
@ -250,7 +252,7 @@ impl PluginManager {
// Anything not in `result` is part of a cycle or has a missing dep
if result.len() < manifests.len() {
let loaded: HashSet<&str> =
let loaded: FxHashSet<&str> =
result.iter().map(|m| m.plugin.name.as_str()).collect();
for manifest in manifests {
if !loaded.contains(manifest.plugin.name.as_str()) {
@ -669,9 +671,9 @@ impl PluginManager {
/// none declare theme extensions.
pub async fn list_ui_theme_extensions(
&self,
) -> std::collections::HashMap<String, String> {
) -> rustc_hash::FxHashMap<String, String> {
let registry = self.registry.read().await;
let mut merged = std::collections::HashMap::new();
let mut merged = rustc_hash::FxHashMap::default();
for plugin in registry.list_all() {
if !plugin.enabled {
continue;

View file

@ -13,12 +13,12 @@
//! priority 100. A circuit breaker disables plugins after consecutive failures.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
time::{Duration, Instant},
};
use rustc_hash::FxHashMap;
use tokio::sync::RwLock;
use tracing::{debug, info, warn};
@ -75,22 +75,22 @@ struct CachedCapabilities {
/// Keyed by `(kind, plugin_id)` -> list of supported type strings.
/// Separate entries for each kind avoid collisions when a plugin
/// implements both `metadata_extractor` and `thumbnail_generator`.
supported_types: HashMap<(String, String), Vec<String>>,
supported_types: FxHashMap<(String, String), Vec<String>>,
/// `plugin_id` -> list of interested event type strings
interested_events: HashMap<String, Vec<String>>,
interested_events: FxHashMap<String, Vec<String>>,
/// `plugin_id` -> list of media type definitions (for `MediaTypeProvider`)
media_type_definitions: HashMap<String, Vec<PluginMediaTypeDefinition>>,
media_type_definitions: FxHashMap<String, Vec<PluginMediaTypeDefinition>>,
/// `plugin_id` -> list of theme definitions (for `ThemeProvider`)
theme_definitions: HashMap<String, Vec<PluginThemeDefinition>>,
theme_definitions: FxHashMap<String, Vec<PluginThemeDefinition>>,
}
impl CachedCapabilities {
fn new() -> Self {
Self {
supported_types: HashMap::new(),
interested_events: HashMap::new(),
media_type_definitions: HashMap::new(),
theme_definitions: HashMap::new(),
supported_types: FxHashMap::default(),
interested_events: FxHashMap::default(),
media_type_definitions: FxHashMap::default(),
theme_definitions: FxHashMap::default(),
}
}
}
@ -101,7 +101,7 @@ pub struct PluginPipeline {
manager: Arc<PluginManager>,
timeouts: PluginTimeoutConfig,
max_consecutive_failures: u32,
health: RwLock<HashMap<String, PluginHealth>>,
health: RwLock<FxHashMap<String, PluginHealth>>,
capabilities: RwLock<CachedCapabilities>,
}
@ -117,7 +117,7 @@ impl PluginPipeline {
manager,
timeouts,
max_consecutive_failures,
health: RwLock::new(HashMap::new()),
health: RwLock::new(FxHashMap::default()),
capabilities: RwLock::new(CachedCapabilities::new()),
}
}
@ -826,7 +826,7 @@ impl PluginPipeline {
}
// Deduplicate by ID, keeping the highest-scoring entry
let mut seen: HashMap<String, usize> = HashMap::new();
let mut seen: FxHashMap<String, usize> = FxHashMap::default();
let mut deduped: Vec<SearchResultItem> = Vec::new();
for item in all_results {
if let Some(&idx) = seen.get(&item.id) {
@ -1363,7 +1363,7 @@ mod tests {
year: Some(2024),
duration_secs: None,
description: None,
extra: HashMap::new(),
extra: FxHashMap::default(),
};
merge_metadata(&mut base, &resp);
@ -1379,7 +1379,7 @@ mod tests {
let mut base = ExtractedMetadata::default();
base.extra.insert("key1".to_string(), "val1".to_string());
let mut extra = HashMap::new();
let mut extra = FxHashMap::default();
extra.insert("key2".to_string(), "val2".to_string());
extra.insert("key1".to_string(), "overwritten".to_string());

View file

@ -1,9 +1,10 @@
//! Plugin registry for managing loaded plugins
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Result, anyhow};
use pinakes_plugin_api::{PluginManifest, PluginMetadata};
use rustc_hash::FxHashMap;
use super::runtime::WasmPlugin;
@ -21,7 +22,7 @@ pub struct RegisteredPlugin {
/// Plugin registry maintains the state of all loaded plugins
pub struct PluginRegistry {
/// Map of plugin ID to registered plugin
plugins: HashMap<String, RegisteredPlugin>,
plugins: FxHashMap<String, RegisteredPlugin>,
}
impl PluginRegistry {
@ -29,7 +30,7 @@ impl PluginRegistry {
#[must_use]
pub fn new() -> Self {
Self {
plugins: HashMap::new(),
plugins: FxHashMap::default(),
}
}
@ -156,9 +157,8 @@ impl Default for PluginRegistry {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::{Capabilities, manifest::ManifestCapabilities};
use rustc_hash::FxHashMap;
use super::*;
@ -181,7 +181,7 @@ mod tests {
priority: 0,
},
capabilities: ManifestCapabilities::default(),
config: HashMap::new(),
config: FxHashMap::default(),
ui: Default::default(),
};

View file

@ -4,8 +4,9 @@
//! Requests are serialized to JSON, passed to the plugin, and responses
//! are deserialized from JSON written by the plugin via `host_set_result`.
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Request to check if a plugin can handle a file
@ -55,7 +56,7 @@ pub struct ExtractMetadataResponse {
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub extra: HashMap<String, String>,
pub extra: FxHashMap<String, String>,
}
/// Request to generate a thumbnail
@ -140,7 +141,7 @@ pub struct PluginThemeDefinition {
#[derive(Debug, Clone, Deserialize)]
pub struct LoadThemeResponse {
pub css: Option<String>,
pub colors: HashMap<String, String>,
pub colors: FxHashMap<String, String>,
}
#[cfg(test)]

View file

@ -272,7 +272,7 @@ impl Default for WasmPlugin {
context: PluginContext {
data_dir: std::env::temp_dir(),
cache_dir: std::env::temp_dir(),
config: std::collections::HashMap::new(),
config: Default::default(),
capabilities: Default::default(),
},
}
@ -774,8 +774,6 @@ impl HostFunctions {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use pinakes_plugin_api::PluginContext;
use super::*;
@ -795,7 +793,7 @@ mod tests {
let context = PluginContext {
data_dir: "/tmp/data".into(),
cache_dir: "/tmp/cache".into(),
config: HashMap::new(),
config: Default::default(),
capabilities,
};

View file

@ -5,6 +5,7 @@ pub mod sqlite;
use std::{path::PathBuf, sync::Arc};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -221,7 +222,7 @@ pub trait StorageBackend: Send + Sync + 'static {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<std::collections::HashMap<String, CustomField>>;
) -> Result<FxHashMap<String, CustomField>>;
/// Delete a custom field from a media item by name.
async fn delete_custom_field(

View file

@ -1,9 +1,10 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use chrono::Utc;
use deadpool_postgres::{Config as PoolConfig, Pool, Runtime};
use native_tls::TlsConnector;
use postgres_native_tls::MakeTlsConnector;
use rustc_hash::FxHashMap;
use tokio_postgres::{NoTls, Row, types::ToSql};
use uuid::Uuid;
@ -215,7 +216,7 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")
.map(PathBuf::from),
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: row.get("file_mtime"),
// Photo-specific fields
@ -922,8 +923,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1596,8 +1597,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1759,8 +1760,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -1894,7 +1895,7 @@ impl StorageBackend for PostgresBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let client = self
.pool
.get()
@ -1909,7 +1910,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for row in &rows {
let name: String = row.get("field_name");
let ft_str: String = row.get("field_type");
@ -1988,8 +1989,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2066,8 +2067,8 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -2089,8 +2090,8 @@ impl StorageBackend for PostgresBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -2952,8 +2953,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3365,8 +3366,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3553,8 +3554,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -3623,8 +3624,8 @@ impl StorageBackend for PostgresBackend {
&[&ids],
)
.await?;
let mut cf_map: HashMap<Uuid, HashMap<String, CustomField>> =
HashMap::new();
let mut cf_map: FxHashMap<Uuid, FxHashMap<String, CustomField>> =
FxHashMap::default();
for row in &cf_rows {
let mid: Uuid = row.get("media_id");
let name: String = row.get("field_name");
@ -4448,8 +4449,7 @@ impl StorageBackend for PostgresBackend {
)
.await?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> = FxHashMap::default();
for r in id_rows {
let id_type: String = r.get(0);
let value: String = r.get(1);
@ -7031,11 +7031,11 @@ impl StorageBackend for PostgresBackend {
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let node_ids: std::collections::HashSet<String> =
let node_ids: rustc_hash::FxHashSet<String> =
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center.0.to_string());
for _ in 0..depth {
@ -7099,7 +7099,7 @@ impl StorageBackend for PostgresBackend {
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut collected = std::collections::HashSet::new();
let mut collected = rustc_hash::FxHashSet::default();
for row in rows {
let id: String = row.get(0);
collected.insert(id);

View file

@ -1,11 +1,11 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use chrono::{DateTime, NaiveDateTime, Utc};
use rusqlite::{Connection, Row, params};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -142,7 +142,7 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
thumbnail_path: row
.get::<_, Option<String>>("thumbnail_path")?
.map(PathBuf::from),
custom_fields: HashMap::new(), // loaded separately
custom_fields: FxHashMap::default(), // loaded separately
// file_mtime may not be present in all queries, so handle gracefully
file_mtime: row.get::<_, Option<i64>>("file_mtime").unwrap_or(None),
@ -358,7 +358,7 @@ fn load_user_profile_sync(
fn load_custom_fields_sync(
db: &Connection,
media_id: MediaId,
) -> rusqlite::Result<HashMap<String, CustomField>> {
) -> rusqlite::Result<FxHashMap<String, CustomField>> {
let mut stmt = db.prepare(
"SELECT field_name, field_type, field_value FROM custom_fields WHERE \
media_id = ?1",
@ -372,7 +372,7 @@ fn load_custom_fields_sync(
value,
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -409,8 +409,8 @@ fn load_custom_fields_batch(
Ok((mid_str, name, ft_str, value))
})?;
let mut fields_map: HashMap<String, HashMap<String, CustomField>> =
HashMap::new();
let mut fields_map: FxHashMap<String, FxHashMap<String, CustomField>> =
FxHashMap::default();
for r in rows {
let (mid_str, name, ft_str, value) = r?;
fields_map
@ -1762,7 +1762,7 @@ impl StorageBackend for SqliteBackend {
async fn get_custom_fields(
&self,
media_id: MediaId,
) -> Result<HashMap<String, CustomField>> {
) -> Result<FxHashMap<String, CustomField>> {
let conn = Arc::clone(&self.conn);
tokio::task::spawn_blocking(move || {
let map = {
@ -1783,7 +1783,7 @@ impl StorageBackend for SqliteBackend {
}))
})?;
let mut map = HashMap::new();
let mut map = FxHashMap::default();
for r in rows {
let (name, field) = r?;
map.insert(name, field);
@ -2093,8 +2093,8 @@ impl StorageBackend for SqliteBackend {
// Compare each pair and build groups
let mut groups: Vec<Vec<MediaItem>> = Vec::new();
let mut grouped_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut grouped_indices: rustc_hash::FxHashSet<usize> =
rustc_hash::FxHashSet::default();
for i in 0..items.len() {
if grouped_indices.contains(&i) {
@ -5265,8 +5265,8 @@ impl StorageBackend for SqliteBackend {
"SELECT identifier_type, identifier_value
FROM book_identifiers WHERE media_id = ?1",
)?;
let mut identifiers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
let mut identifiers: FxHashMap<String, Vec<String>> =
FxHashMap::default();
for row in stmt.query_map([&media_id_str], |row| {
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
})? {
@ -8336,13 +8336,13 @@ impl StorageBackend for SqliteBackend {
let conn = conn.lock().map_err(|e| PinakesError::Database(format!("connection mutex poisoned: {e}")))?;
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
let mut node_ids = rustc_hash::FxHashSet::default();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
visited.insert(center_id);
for _ in 0..depth {

View file

@ -1,12 +1,12 @@
//! Transcoding service for media files using `FFmpeg`.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tokio::sync::{RwLock, Semaphore};
use uuid::Uuid;
@ -94,7 +94,7 @@ impl TranscodeStatus {
/// Service managing transcoding sessions and `FFmpeg` invocations.
pub struct TranscodeService {
pub config: TranscodingConfig,
pub sessions: Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
pub sessions: Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
semaphore: Arc<Semaphore>,
}
@ -103,7 +103,7 @@ impl TranscodeService {
pub fn new(config: TranscodingConfig) -> Self {
let max_concurrent = config.max_concurrent.max(1);
Self {
sessions: Arc::new(RwLock::new(HashMap::new())),
sessions: Arc::new(RwLock::new(FxHashMap::default())),
semaphore: Arc::new(Semaphore::new(max_concurrent)),
config,
}
@ -481,7 +481,7 @@ fn get_ffmpeg_args(
/// Run `FFmpeg` as a child process, parsing progress from stdout.
async fn run_ffmpeg(
args: &[String],
sessions: &Arc<RwLock<HashMap<Uuid, TranscodeSession>>>,
sessions: &Arc<RwLock<FxHashMap<Uuid, TranscodeSession>>>,
session_id: Uuid,
duration_secs: Option<f64>,
cancel: Arc<tokio::sync::Notify>,

View file

@ -3,7 +3,7 @@
//! Handles file uploads, metadata extraction, and `MediaItem` creation
//! for files stored in managed content-addressable storage.
use std::{collections::HashMap, path::Path};
use std::path::Path;
use chrono::Utc;
use tokio::io::AsyncRead;
@ -85,7 +85,7 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
duration_secs: extracted.as_ref().and_then(|m| m.duration_secs),
description: extracted.as_ref().and_then(|m| m.description.clone()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: rustc_hash::FxHashMap::default(),
file_mtime: None,
date_taken: extracted.as_ref().and_then(|m| m.date_taken),
latitude: extracted.as_ref().and_then(|m| m.latitude),

View file

@ -1,8 +1,7 @@
//! User management and authentication
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -78,7 +77,7 @@ pub struct UserPreferences {
pub auto_play: bool,
/// Custom preferences (extensible)
pub custom: HashMap<String, serde_json::Value>,
pub custom: FxHashMap<String, serde_json::Value>,
}
/// Library access permission

View file

@ -3,13 +3,14 @@
// the test suite
#![allow(dead_code)]
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc};
use pinakes_core::{
media_type::{BuiltinMediaType, MediaType},
model::{ContentHash, MediaId, MediaItem, StorageMode},
storage::{DynStorageBackend, StorageBackend, sqlite::SqliteBackend},
};
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use uuid::Uuid;
@ -46,7 +47,7 @@ pub fn make_test_media(hash: &str) -> MediaItem {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -83,7 +84,7 @@ pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -121,7 +122,7 @@ pub fn make_test_markdown_item(id: MediaId) -> MediaItem {
duration_secs: None,
description: Some("Test markdown note".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,

View file

@ -1,6 +1,5 @@
use std::collections::HashMap;
use pinakes_core::{model::*, storage::StorageBackend};
use rustc_hash::FxHashMap;
mod common;
use common::{make_test_media, setup};
@ -28,7 +27,7 @@ async fn test_media_crud() {
duration_secs: None,
description: Some("A test file".to_string()),
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -120,7 +119,7 @@ async fn test_tags() {
duration_secs: Some(180.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -191,7 +190,7 @@ async fn test_collections() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -252,7 +251,7 @@ async fn test_custom_fields() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -334,7 +333,7 @@ async fn test_search() {
duration_secs: None,
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,
@ -479,7 +478,7 @@ async fn test_library_statistics_with_data() {
duration_secs: Some(120.0),
description: None,
thumbnail_path: None,
custom_fields: HashMap::new(),
custom_fields: FxHashMap::default(),
file_mtime: None,
date_taken: None,
latitude: None,

View file

@ -19,6 +19,7 @@ toml = { workspace = true }
uuid = { workspace = true }
chrono = { workspace = true }
mime_guess = { workspace = true }
rustc-hash = { workspace = true }
# WASM bridge types
wit-bindgen = { workspace = true, optional = true }

View file

@ -4,12 +4,10 @@
//! Plugins can extend Pinakes by implementing one or more of the provided
//! traits.
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -74,7 +72,7 @@ pub struct PluginContext {
pub cache_dir: PathBuf,
/// Plugin configuration from manifest
pub config: HashMap<String, serde_json::Value>,
pub config: FxHashMap<String, serde_json::Value>,
/// Capabilities granted to the plugin
pub capabilities: Capabilities,
@ -160,7 +158,7 @@ pub struct PluginMetadata {
pub struct HealthStatus {
pub healthy: bool,
pub message: Option<String>,
pub metrics: HashMap<String, f64>,
pub metrics: FxHashMap<String, f64>,
}
/// Trait for plugins that provide custom media type support
@ -227,7 +225,7 @@ pub struct ExtractedMetadata {
pub bitrate_kbps: Option<u32>,
/// Custom metadata fields specific to this file type
pub custom_fields: HashMap<String, serde_json::Value>,
pub custom_fields: FxHashMap<String, serde_json::Value>,
/// Tags extracted from the file
pub tags: Vec<String>,
@ -301,14 +299,14 @@ pub struct SearchIndexItem {
pub content: Option<String>,
pub tags: Vec<String>,
pub media_type: String,
pub metadata: HashMap<String, serde_json::Value>,
pub metadata: FxHashMap<String, serde_json::Value>,
}
/// Search query
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchQuery {
pub query_text: String,
pub filters: HashMap<String, serde_json::Value>,
pub filters: FxHashMap<String, serde_json::Value>,
pub limit: usize,
pub offset: usize,
}
@ -360,7 +358,7 @@ pub enum EventType {
pub struct Event {
pub event_type: EventType,
pub timestamp: String,
pub data: HashMap<String, serde_json::Value>,
pub data: FxHashMap<String, serde_json::Value>,
}
/// Trait for plugins that provide UI themes
@ -387,7 +385,7 @@ pub struct ThemeDefinition {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Theme {
pub id: String,
pub colors: HashMap<String, String>,
pub fonts: HashMap<String, String>,
pub colors: FxHashMap<String, String>,
pub fonts: FxHashMap<String, String>,
pub custom_css: Option<String>,
}

View file

@ -1,7 +1,8 @@
//! Plugin manifest parsing and validation
use std::{collections::HashMap, path::Path};
use std::path::Path;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -23,7 +24,7 @@ pub struct PluginManifest {
pub capabilities: ManifestCapabilities,
#[serde(default)]
pub config: HashMap<String, toml::Value>,
pub config: FxHashMap<String, toml::Value>,
/// UI pages provided by this plugin
#[serde(default)]
@ -49,8 +50,8 @@ pub struct UiSection {
/// CSS custom property overrides provided by this plugin.
/// Keys are property names (e.g. `--accent-color`), values are CSS values.
/// The host applies these to `document.documentElement` on startup.
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub theme_extensions: HashMap<String, String>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub theme_extensions: FxHashMap<String, String>,
}
impl UiSection {
@ -709,7 +710,7 @@ gap = 16
"/api/v1/media".to_string(),
"/api/plugins/my-plugin/data".to_string(),
],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
assert!(section.validate().is_ok());
}
@ -720,7 +721,7 @@ gap = 16
pages: vec![],
widgets: vec![],
required_endpoints: vec!["/not-api/something".to_string()],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
assert!(section.validate().is_err());
}
@ -731,7 +732,7 @@ gap = 16
pages: vec![],
widgets: vec![],
required_endpoints: vec!["/api/ok".to_string(), "no-slash".to_string()],
theme_extensions: HashMap::new(),
theme_extensions: FxHashMap::default(),
};
let err = section.validate().unwrap_err();
assert!(

View file

@ -49,8 +49,7 @@
//! Array indices use the same notation: `"items.0.title"`.
//! ```
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
@ -133,12 +132,12 @@ pub struct UiPage {
pub root_element: UiElement,
/// Named data sources available to this page
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub data_sources: HashMap<String, DataSource>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub data_sources: FxHashMap<String, DataSource>,
/// Named actions available to this page (referenced by `ActionRef::Name`)
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub actions: HashMap<String, ActionDefinition>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub actions: FxHashMap<String, ActionDefinition>,
}
impl UiPage {
@ -204,8 +203,8 @@ impl UiPage {
/// Validates that there are no cycles in Transform data source dependencies
fn validate_no_cycles(&self) -> SchemaResult<()> {
let mut visited = std::collections::HashSet::new();
let mut stack = std::collections::HashSet::new();
let mut visited = rustc_hash::FxHashSet::default();
let mut stack = rustc_hash::FxHashSet::default();
for name in self.data_sources.keys() {
Self::dfs_check_cycles(self, name, &mut visited, &mut stack)?;
@ -218,8 +217,8 @@ impl UiPage {
fn dfs_check_cycles(
&self,
name: &str,
visited: &mut std::collections::HashSet<String>,
stack: &mut std::collections::HashSet<String>,
visited: &mut rustc_hash::FxHashSet<String>,
stack: &mut rustc_hash::FxHashSet<String>,
) -> SchemaResult<()> {
if stack.contains(name) {
return Err(SchemaError::ValidationError(format!(
@ -1451,8 +1450,8 @@ pub struct ActionDefinition {
pub path: String,
/// Action parameters (merged with form data on submit)
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub params: HashMap<String, serde_json::Value>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub params: FxHashMap<String, serde_json::Value>,
/// Success message
#[serde(skip_serializing_if = "Option::is_none")]
@ -1509,7 +1508,7 @@ impl Default for ActionDefinition {
Self {
method: default_http_method(),
path: String::new(),
params: HashMap::new(),
params: FxHashMap::default(),
success_message: None,
error_message: None,
navigate_to: None,
@ -1543,8 +1542,8 @@ pub enum DataSource {
path: String,
/// Query parameters
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
params: HashMap<String, Expression>,
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
params: FxHashMap<String, Expression>,
/// Polling interval in seconds (0 = no polling)
#[serde(default)]
@ -1839,7 +1838,7 @@ mod tests {
let valid = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/api/test".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -1848,7 +1847,7 @@ mod tests {
let invalid = DataSource::Endpoint {
method: HttpMethod::Get,
path: "api/test".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -1898,8 +1897,8 @@ mod tests {
page_size: 0,
row_actions: vec![],
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
let refs = page.referenced_data_sources();
@ -1918,8 +1917,8 @@ mod tests {
columns: 13,
gap: 16,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
assert!(page.validate().is_err());
@ -1937,8 +1936,8 @@ mod tests {
content: TextContent::Static("Title".to_string()),
id: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
assert!(page.validate().is_err());
@ -2005,7 +2004,7 @@ mod tests {
let bad = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/not-api/something".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -2017,7 +2016,7 @@ mod tests {
let bad = DataSource::Endpoint {
method: HttpMethod::Get,
path: "/api/v1/../admin".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
poll_interval: 0,
transform: None,
};
@ -2078,7 +2077,7 @@ mod tests {
#[test]
fn test_link_validation_rejects_unsafe_href() {
use std::collections::HashMap as HM;
use rustc_hash::FxHashMap as HM;
let page = UiPage {
id: "p".to_string(),
title: "P".to_string(),
@ -2089,15 +2088,15 @@ mod tests {
href: "javascript:alert(1)".to_string(),
external: false,
},
data_sources: HM::new(),
actions: HM::new(),
data_sources: HM::default(),
actions: HM::default(),
};
assert!(page.validate().is_err());
}
#[test]
fn test_reserved_route_rejected() {
use std::collections::HashMap as HM;
use rustc_hash::FxHashMap as HM;
let page = UiPage {
id: "search-page".to_string(),
title: "Search".to_string(),
@ -2108,8 +2107,8 @@ mod tests {
gap: 0,
padding: None,
},
data_sources: HM::new(),
actions: HM::new(),
data_sources: HM::default(),
actions: HM::default(),
};
let err = page.validate().unwrap_err();
assert!(

View file

@ -343,7 +343,7 @@ impl SchemaValidator {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use super::*;
use crate::UiElement;
@ -359,8 +359,8 @@ mod tests {
gap: 0,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
}
}

View file

@ -1,7 +1,6 @@
//! WASM bridge types and helpers for plugin communication
use std::collections::HashMap;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Memory allocation info for passing data between host and plugin
@ -93,7 +92,7 @@ pub struct LogMessage {
pub level: LogLevel,
pub target: String,
pub message: String,
pub fields: HashMap<String, String>,
pub fields: FxHashMap<String, String>,
}
/// HTTP request parameters
@ -101,7 +100,7 @@ pub struct LogMessage {
pub struct HttpRequest {
pub method: String,
pub url: String,
pub headers: HashMap<String, String>,
pub headers: FxHashMap<String, String>,
pub body: Option<Vec<u8>>,
}
@ -109,7 +108,7 @@ pub struct HttpRequest {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HttpResponse {
pub status: u16,
pub headers: HashMap<String, String>,
pub headers: FxHashMap<String, String>,
pub body: Vec<u8>,
}

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use async_trait::async_trait;
use pinakes_plugin_api::{
@ -25,6 +25,7 @@ use pinakes_plugin_api::{
ThumbnailOptions,
wasm::{HttpRequest, HttpResponse, LogLevel, LogMessage},
};
use rustc_hash::FxHashMap;
struct TestPlugin {
initialized: bool,
@ -41,7 +42,7 @@ impl TestPlugin {
health_status: HealthStatus {
healthy: true,
message: Some("OK".to_string()),
metrics: HashMap::new(),
metrics: FxHashMap::default(),
},
metadata: PluginMetadata {
id: "test-plugin".to_string(),
@ -82,7 +83,7 @@ async fn test_plugin_context_creation() {
let context = PluginContext {
data_dir: PathBuf::from("/data/test-plugin"),
cache_dir: PathBuf::from("/cache/test-plugin"),
config: HashMap::from([
config: FxHashMap::from([
("enabled".to_string(), serde_json::json!(true)),
("max_items".to_string(), serde_json::json!(100)),
]),
@ -119,7 +120,7 @@ async fn test_plugin_context_fields() {
let context = PluginContext {
data_dir: PathBuf::from("/custom/data"),
cache_dir: PathBuf::from("/custom/cache"),
config: HashMap::new(),
config: FxHashMap::default(),
capabilities: Capabilities::default(),
};
@ -137,7 +138,7 @@ async fn test_plugin_lifecycle() {
let context = PluginContext {
data_dir: PathBuf::from("/data"),
cache_dir: PathBuf::from("/cache"),
config: HashMap::new(),
config: FxHashMap::default(),
capabilities: Capabilities::default(),
};
plugin.initialize(context).await.unwrap();
@ -164,7 +165,7 @@ async fn test_extracted_metadata_structure() {
file_size_bytes: Some(1_500_000),
codec: Some("h264".to_string()),
bitrate_kbps: Some(5000),
custom_fields: HashMap::from([
custom_fields: FxHashMap::from([
("color_space".to_string(), serde_json::json!("sRGB")),
("orientation".to_string(), serde_json::json!(90)),
]),
@ -182,7 +183,7 @@ async fn test_extracted_metadata_structure() {
async fn test_search_query_serialization() {
let query = SearchQuery {
query_text: "nature landscape".to_string(),
filters: HashMap::from([
filters: FxHashMap::from([
("type".to_string(), serde_json::json!("image")),
("year".to_string(), serde_json::json!(2023)),
]),
@ -329,7 +330,7 @@ async fn test_event_serialization() {
let event = Event {
event_type: EventType::MediaImported,
timestamp: "2024-01-15T10:00:00Z".to_string(),
data: HashMap::from([
data: FxHashMap::from([
("path".to_string(), serde_json::json!("/media/test.jpg")),
("size".to_string(), serde_json::json!(1024)),
]),
@ -347,7 +348,7 @@ async fn test_http_request_serialization() {
let request = HttpRequest {
method: "GET".to_string(),
url: "https://api.example.com/data".to_string(),
headers: HashMap::from([
headers: FxHashMap::from([
("Authorization".to_string(), "Bearer token".to_string()),
("Content-Type".to_string(), "application/json".to_string()),
]),
@ -366,7 +367,7 @@ async fn test_http_request_serialization() {
async fn test_http_response_serialization() {
let response = HttpResponse {
status: 200,
headers: HashMap::from([(
headers: FxHashMap::from([(
"Content-Type".to_string(),
"application/json".to_string(),
)]),
@ -386,7 +387,7 @@ async fn test_log_message_serialization() {
level: LogLevel::Info,
target: "plugin::metadata".to_string(),
message: "Metadata extraction complete".to_string(),
fields: HashMap::from([
fields: FxHashMap::from([
("file_count".to_string(), "42".to_string()),
("duration_ms".to_string(), "150".to_string()),
]),
@ -453,7 +454,7 @@ async fn test_search_index_item_serialization() {
"photos".to_string(),
],
media_type: "image/jpeg".to_string(),
metadata: HashMap::from([
metadata: FxHashMap::from([
("camera".to_string(), serde_json::json!("Canon EOS R5")),
("location".to_string(), serde_json::json!("Beach")),
]),
@ -474,7 +475,7 @@ async fn test_health_status_variants() {
let healthy = HealthStatus {
healthy: true,
message: Some("All systems operational".to_string()),
metrics: HashMap::from([
metrics: FxHashMap::from([
("items_processed".to_string(), 1000.0),
("avg_process_time_ms".to_string(), 45.5),
]),
@ -484,7 +485,7 @@ async fn test_health_status_variants() {
let unhealthy = HealthStatus {
healthy: false,
message: Some("Database connection failed".to_string()),
metrics: HashMap::new(),
metrics: FxHashMap::default(),
};
assert!(!unhealthy.healthy);
assert_eq!(
@ -571,7 +572,7 @@ async fn test_extracted_metadata_default() {
async fn test_search_query_structure() {
let query = SearchQuery {
query_text: "test query".to_string(),
filters: HashMap::new(),
filters: FxHashMap::default(),
limit: 10,
offset: 0,
};

View file

@ -3,8 +3,6 @@
//! Renderer-level behaviour (e.g., Dioxus components) is out of scope here;
//! that requires a Dioxus runtime and belongs in pinakes-ui tests.
use std::collections::HashMap;
use pinakes_plugin_api::{
DataSource,
HttpMethod,
@ -26,8 +24,8 @@ fn make_page(id: &str, route: &str) -> UiPage {
gap: 0,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: Default::default(),
actions: Default::default(),
}
}

View file

@ -31,6 +31,7 @@ blake3 = { workspace = true }
rand = { workspace = true }
percent-encoding = { workspace = true }
http = { workspace = true }
rustc-hash = { workspace = true }
[lints]
workspace = true

View file

@ -1,9 +1,7 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use chrono::{DateTime, Utc};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -52,7 +50,7 @@ pub struct MediaResponse {
pub duration_secs: Option<f64>,
pub description: Option<String>,
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
pub custom_fields: FxHashMap<String, CustomFieldResponse>,
// Photo-specific metadata
pub date_taken: Option<DateTime<Utc>>,

View file

@ -17,6 +17,7 @@ use pinakes_core::{
ReadingStatus,
},
};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
@ -41,7 +42,7 @@ pub struct BookMetadataResponse {
pub series_index: Option<f64>,
pub format: Option<String>,
pub authors: Vec<AuthorResponse>,
pub identifiers: std::collections::HashMap<String, Vec<String>>,
pub identifiers: FxHashMap<String, Vec<String>>,
}
impl From<BookMetadata> for BookMetadataResponse {

View file

@ -3,6 +3,7 @@ use axum::{
extract::{Path, Query, State},
};
use pinakes_core::{model::MediaId, storage::DynStorageBackend};
use rustc_hash::FxHashMap;
use uuid::Uuid;
use crate::{
@ -1249,7 +1250,7 @@ pub async fn empty_trash(
pub async fn permanent_delete_media(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Query(params): Query<std::collections::HashMap<String, String>>,
Query(params): Query<FxHashMap<String, String>>,
) -> Result<Json<serde_json::Value>, ApiError> {
let media_id = MediaId(id);
let permanent = params.get("permanent").is_some_and(|v| v == "true");

View file

@ -1,5 +1,3 @@
use std::collections::HashMap;
use axum::{
Json,
Router,
@ -91,8 +89,10 @@ pub async fn get_timeline(
.collect();
// Group by the requested period
let mut groups: HashMap<String, Vec<pinakes_core::model::MediaItem>> =
HashMap::new();
let mut groups: rustc_hash::FxHashMap<
String,
Vec<pinakes_core::model::MediaItem>,
> = rustc_hash::FxHashMap::default();
for photo in photos {
if let Some(date_taken) = photo.date_taken {

View file

@ -1,10 +1,11 @@
use std::{collections::HashMap, sync::Arc};
use std::sync::Arc;
use axum::{
Json,
extract::{Path, State},
};
use pinakes_core::plugin::PluginManager;
use rustc_hash::FxHashMap;
use crate::{
dto::{
@ -194,7 +195,7 @@ pub async fn emit_plugin_event(
/// List merged CSS custom property overrides from all enabled plugins
pub async fn list_plugin_ui_theme_extensions(
State(state): State<AppState>,
) -> Result<Json<HashMap<String, String>>, ApiError> {
) -> Result<Json<FxHashMap<String, String>>, ApiError> {
let plugin_manager = require_plugin_manager(&state)?;
Ok(Json(plugin_manager.list_ui_theme_extensions().await))
}

View file

@ -51,7 +51,8 @@ pub async fn create_saved_search(
));
}
if let Some(ref sort) = req.sort_order
&& !VALID_SORT_ORDERS.contains(&sort.as_str()) {
&& !VALID_SORT_ORDERS.contains(&sort.as_str())
{
return Err(ApiError(
pinakes_core::error::PinakesError::InvalidOperation(format!(
"sort_order must be one of: {}",

View file

@ -18,6 +18,7 @@ tracing-subscriber = { workspace = true }
reqwest = { workspace = true }
ratatui = { workspace = true }
crossterm = { workspace = true }
rustc-hash = { workspace = true }
[lints]
workspace = true

View file

@ -1,4 +1,4 @@
use std::{collections::HashSet, time::Duration};
use std::time::Duration;
use anyhow::Result;
use crossterm::{
@ -6,6 +6,7 @@ use crossterm::{
terminal::{self, EnterAlternateScreen, LeaveAlternateScreen},
};
use ratatui::{Terminal, backend::CrosstermBackend};
use rustc_hash::FxHashSet;
use crate::{
client::{
@ -74,7 +75,7 @@ pub struct AppState {
pub total_media_count: u64,
pub server_url: String,
// Multi-select support
pub selected_items: HashSet<String>,
pub selected_items: FxHashSet<String>,
pub selection_mode: bool,
pub pending_batch_delete: bool,
// Duplicates view
@ -178,7 +179,7 @@ impl AppState {
total_media_count: 0,
server_url: server_url.to_string(),
// Multi-select
selected_items: HashSet::new(),
selected_items: FxHashSet::default(),
selection_mode: false,
pending_batch_delete: false,
}

View file

@ -1,7 +1,6 @@
use std::collections::HashMap;
use anyhow::Result;
use reqwest::Client;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
#[derive(Clone)]
@ -28,7 +27,7 @@ pub struct MediaResponse {
pub description: Option<String>,
#[serde(default)]
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
pub custom_fields: FxHashMap<String, CustomFieldResponse>,
pub created_at: String,
pub updated_at: String,
}

View file

@ -28,6 +28,7 @@ gloo-timers = { workspace = true }
rand = { workspace = true }
urlencoding = { workspace = true }
pinakes-plugin-api = { workspace = true }
rustc-hash = { workspace = true }
[lints]
workspace = true

View file

@ -1,7 +1,6 @@
use std::collections::HashMap;
use anyhow::Result;
use reqwest::{Client, header};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
/// Payload for import events: (path, tag_ids, new_tags, collection_id)
@ -66,7 +65,7 @@ pub struct MediaResponse {
pub description: Option<String>,
#[serde(default)]
pub has_thumbnail: bool,
pub custom_fields: HashMap<String, CustomFieldResponse>,
pub custom_fields: FxHashMap<String, CustomFieldResponse>,
pub created_at: String,
pub updated_at: String,
#[serde(default)]
@ -395,7 +394,7 @@ pub struct BookMetadataResponse {
pub format: Option<String>,
pub authors: Vec<BookAuthorResponse>,
#[serde(default)]
pub identifiers: HashMap<String, Vec<String>>,
pub identifiers: FxHashMap<String, Vec<String>>,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
@ -1680,7 +1679,7 @@ impl ApiClient {
/// Returns a map of CSS property names to values.
pub async fn get_plugin_ui_theme_extensions(
&self,
) -> Result<HashMap<String, String>> {
) -> Result<FxHashMap<String, String>> {
Ok(
self
.client

View file

@ -1,9 +1,8 @@
//! Graph visualization component for markdown note connections.
//!
//! Renders a force-directed graph showing connections between notes.
use std::collections::HashMap;
use dioxus::prelude::*;
use rustc_hash::FxHashMap;
use crate::client::{
ApiClient,
@ -298,7 +297,7 @@ fn ForceDirectedGraph(
// Create id to position map
let nodes_read = physics_nodes.read();
let id_to_pos: HashMap<&str, (f64, f64)> = nodes_read
let id_to_pos: FxHashMap<&str, (f64, f64)> = nodes_read
.iter()
.map(|n| (n.id.as_str(), (n.x, n.y)))
.collect();

View file

@ -1,6 +1,5 @@
use std::collections::HashSet;
use dioxus::prelude::*;
use rustc_hash::FxHashSet;
use super::utils::{format_size, type_badge_class};
use crate::client::{
@ -50,7 +49,7 @@ pub fn Import(
let mut filter_max_size = use_signal(|| 0u64); // 0 means no limit
// File selection state
let mut selected_file_paths = use_signal(HashSet::<String>::new);
let mut selected_file_paths = use_signal(FxHashSet::<String>::default);
let current_mode = *import_mode.read();
@ -475,7 +474,7 @@ pub fn Import(
button {
class: "btn btn-sm btn-ghost",
onclick: move |_| {
selected_file_paths.set(HashSet::new());
selected_file_paths.set(FxHashSet::default());
},
"Deselect All"
}
@ -496,12 +495,12 @@ pub fn Import(
let filtered_paths = filtered_paths.clone();
move |_| {
if all_filtered_selected {
let filtered_set: HashSet<String> = filtered_paths
let filtered_set: FxHashSet<String> = filtered_paths
.iter()
.cloned()
.collect();
let sel = selected_file_paths.read().clone();
let new_sel: HashSet<String> = sel
let new_sel: FxHashSet<String> = sel
.difference(&filtered_set)
.cloned()
.collect();
@ -599,7 +598,7 @@ pub fn Import(
let new_tags = parse_new_tags(&new_tags_input.read());
let col_id = selected_collection.read().clone();
on_import_batch.call((paths, tag_ids, new_tags, col_id));
selected_file_paths.set(HashSet::new());
selected_file_paths.set(FxHashSet::default());
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
@ -644,7 +643,7 @@ pub fn Import(
selected_tags.set(Vec::new());
new_tags_input.set(String::new());
selected_collection.set(None);
selected_file_paths.set(HashSet::new());
selected_file_paths.set(FxHashSet::default());
}
}
},

View file

@ -316,6 +316,10 @@ fn escape_html_attr(text: &str) -> String {
/// Sanitize HTML using ammonia with a safe allowlist.
/// This prevents XSS attacks by removing dangerous elements and attributes.
#[expect(
clippy::disallowed_types,
reason = "ammonia::Builder requires std HashSet"
)]
fn sanitize_html(html: &str) -> String {
use std::collections::HashSet;

View file

@ -3,8 +3,6 @@
//! This module provides the action execution system that handles
//! user interactions with plugin UI elements.
use std::collections::HashMap;
use pinakes_plugin_api::{
ActionDefinition,
ActionRef,
@ -12,6 +10,7 @@ use pinakes_plugin_api::{
SpecialAction,
UiElement,
};
use rustc_hash::FxHashMap;
use super::data::to_reqwest_method;
use crate::client::ApiClient;
@ -48,7 +47,7 @@ pub enum ActionResult {
pub async fn execute_action(
client: &ApiClient,
action_ref: &ActionRef,
page_actions: &HashMap<String, ActionDefinition>,
page_actions: &FxHashMap<String, ActionDefinition>,
form_data: Option<&serde_json::Value>,
) -> Result<ActionResult, String> {
match action_ref {
@ -224,7 +223,8 @@ mod tests {
async fn test_named_action_unknown_returns_none() {
let client = crate::client::ApiClient::default();
let action_ref = ActionRef::Name("my-action".to_string());
let result = execute_action(&client, &action_ref, &HashMap::new(), None)
let result =
execute_action(&client, &action_ref, &FxHashMap::default(), None)
.await
.unwrap();
assert!(matches!(result, ActionResult::None));
@ -235,11 +235,11 @@ mod tests {
use pinakes_plugin_api::ActionDefinition;
let client = crate::client::ApiClient::default();
let mut page_actions = HashMap::new();
let mut page_actions = FxHashMap::default();
page_actions.insert("do-thing".to_string(), ActionDefinition {
method: pinakes_plugin_api::HttpMethod::Post,
path: "/api/v1/nonexistent-endpoint".to_string(),
params: HashMap::new(),
params: FxHashMap::default(),
success_message: None,
error_message: None,
navigate_to: None,
@ -267,7 +267,8 @@ mod tests {
let client = crate::client::ApiClient::default();
let action_ref = ActionRef::Special(SpecialAction::Refresh);
let result = execute_action(&client, &action_ref, &HashMap::new(), None)
let result =
execute_action(&client, &action_ref, &FxHashMap::default(), None)
.await
.unwrap();
assert!(matches!(result, ActionResult::Refresh));
@ -281,7 +282,8 @@ mod tests {
let action_ref = ActionRef::Special(SpecialAction::Navigate {
to: "/dashboard".to_string(),
});
let result = execute_action(&client, &action_ref, &HashMap::new(), None)
let result =
execute_action(&client, &action_ref, &FxHashMap::default(), None)
.await
.unwrap();
assert!(
@ -299,7 +301,8 @@ mod tests {
key: "count".to_string(),
value: expr.clone(),
});
let result = execute_action(&client, &action_ref, &HashMap::new(), None)
let result =
execute_action(&client, &action_ref, &FxHashMap::default(), None)
.await
.unwrap();
match result {
@ -317,7 +320,8 @@ mod tests {
let client = crate::client::ApiClient::default();
let action_ref = ActionRef::Special(SpecialAction::CloseModal);
let result = execute_action(&client, &action_ref, &HashMap::new(), None)
let result =
execute_action(&client, &action_ref, &FxHashMap::default(), None)
.await
.unwrap();
assert!(matches!(result, ActionResult::CloseModal));

View file

@ -2,14 +2,12 @@
//!
//! Provides data fetching and caching for plugin data sources.
use std::{
collections::{HashMap, HashSet},
time::Duration,
};
use std::time::Duration;
use dioxus::prelude::*;
use dioxus_core::Task;
use pinakes_plugin_api::{DataSource, Expression, HttpMethod};
use rustc_hash::{FxHashMap, FxHashSet};
use super::expr::{evaluate_expression, value_to_display_string};
use crate::client::ApiClient;
@ -17,9 +15,9 @@ use crate::client::ApiClient;
/// Cached data for a plugin page
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginPageData {
data: HashMap<String, serde_json::Value>,
loading: HashSet<String>,
errors: HashMap<String, String>,
data: FxHashMap<String, serde_json::Value>,
loading: FxHashSet<String>,
errors: FxHashMap<String, String>,
}
impl PluginPageData {
@ -105,7 +103,7 @@ async fn fetch_endpoint(
client: &ApiClient,
path: &str,
method: HttpMethod,
params: &HashMap<String, Expression>,
params: &FxHashMap<String, Expression>,
ctx: &serde_json::Value,
allowed_endpoints: &[String],
) -> Result<serde_json::Value, String> {
@ -174,9 +172,9 @@ async fn fetch_endpoint(
/// Returns an error if any data source fails to fetch
pub async fn fetch_page_data(
client: &ApiClient,
data_sources: &HashMap<String, DataSource>,
data_sources: &FxHashMap<String, DataSource>,
allowed_endpoints: &[String],
) -> Result<HashMap<String, serde_json::Value>, String> {
) -> Result<FxHashMap<String, serde_json::Value>, String> {
// Group non-Transform sources into dedup groups.
//
// For Endpoint sources, two entries are in the same group when they share
@ -300,7 +298,7 @@ pub async fn fetch_page_data(
})
.collect();
let mut results: HashMap<String, serde_json::Value> = HashMap::new();
let mut results: FxHashMap<String, serde_json::Value> = FxHashMap::default();
for group_result in futures::future::join_all(futs).await {
for (name, value) in group_result? {
results.insert(name, value);
@ -375,7 +373,7 @@ pub async fn fetch_page_data(
/// immediate re-fetch outside of the polling interval.
pub fn use_plugin_data(
client: Signal<ApiClient>,
data_sources: HashMap<String, DataSource>,
data_sources: FxHashMap<String, DataSource>,
refresh: Signal<u32>,
allowed_endpoints: Vec<String>,
) -> Signal<PluginPageData> {
@ -564,7 +562,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
sources.insert("nums".to_string(), DataSource::Static {
value: serde_json::json!([1, 2, 3]),
});
@ -586,7 +584,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
// The Transform expression accesses "raw" from the context
sources.insert("derived".to_string(), DataSource::Transform {
source_name: "raw".to_string(),
@ -611,7 +609,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
sources.insert("raw".to_string(), DataSource::Static {
value: serde_json::json!(42),
});
@ -634,7 +632,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
// Two Static sources with the same payload; dedup is for Endpoint sources,
// but both names must appear in the output regardless.
sources.insert("a".to_string(), DataSource::Static {
@ -662,7 +660,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
// Two endpoints with identical (path, method, params=empty) but different
// transforms. Both should produce the same error when the path is blocked.
sources.insert("x".to_string(), DataSource::Endpoint {
@ -707,7 +705,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
sources.insert("raw_data".to_string(), DataSource::Static {
value: serde_json::json!({"count": 42, "name": "test"}),
});
@ -741,7 +739,7 @@ mod tests {
use crate::client::ApiClient;
let client = ApiClient::default();
let mut sources = HashMap::new();
let mut sources = FxHashMap::default();
sources.insert("items".to_string(), DataSource::Endpoint {
path: "/api/v1/media".to_string(),
method: HttpMethod::Get,

View file

@ -16,10 +16,9 @@
//! }
//! ```
use std::collections::HashMap;
use dioxus::prelude::*;
use pinakes_plugin_api::{UiPage, UiWidget};
use rustc_hash::FxHashMap;
use crate::client::ApiClient;
@ -43,11 +42,11 @@ pub struct PluginRegistry {
/// API client for fetching pages from server
client: ApiClient,
/// Cached pages: (`plugin_id`, `page_id`) -> `PluginPage`
pages: HashMap<(String, String), PluginPage>,
pages: FxHashMap<(String, String), PluginPage>,
/// Cached widgets: (`plugin_id`, `widget_id`) -> `UiWidget`
widgets: Vec<(String, UiWidget)>,
/// Merged CSS custom property overrides from all enabled plugins
theme_vars: HashMap<String, String>,
theme_vars: FxHashMap<String, String>,
}
impl PluginRegistry {
@ -55,14 +54,14 @@ impl PluginRegistry {
pub fn new(client: ApiClient) -> Self {
Self {
client,
pages: HashMap::new(),
pages: FxHashMap::default(),
widgets: Vec::new(),
theme_vars: HashMap::new(),
theme_vars: FxHashMap::default(),
}
}
/// Get merged CSS custom property overrides from all loaded plugins.
pub fn theme_vars(&self) -> &HashMap<String, String> {
pub fn theme_vars(&self) -> &FxHashMap<String, String> {
&self.theme_vars
}
@ -230,8 +229,8 @@ mod tests {
gap: 16,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
}
}
@ -491,8 +490,8 @@ mod tests {
gap: 16,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
registry.register_page("test-plugin".to_string(), invalid_page, vec![]);
@ -517,8 +516,8 @@ mod tests {
gap: 0,
padding: None,
},
data_sources: HashMap::new(),
actions: HashMap::new(),
data_sources: FxHashMap::default(),
actions: FxHashMap::default(),
};
registry.register_page("p".to_string(), invalid_page, vec![]);
assert_eq!(registry.all_pages().len(), 0);

View file

@ -4,8 +4,6 @@
//! elements. Data-driven elements resolve their data from a [`PluginPageData`]
//! context that is populated by the `use_plugin_data` hook.
use std::collections::HashMap;
use dioxus::prelude::*;
use pinakes_plugin_api::{
ActionDefinition,
@ -23,6 +21,7 @@ use pinakes_plugin_api::{
UiElement,
UiPage,
};
use rustc_hash::{FxHashMap, FxHashSet};
use super::{
actions::execute_action,
@ -49,13 +48,13 @@ pub struct RenderContext {
pub navigate: Signal<Option<String>>,
pub refresh: Signal<u32>,
pub modal: Signal<Option<UiElement>>,
pub local_state: Signal<HashMap<String, serde_json::Value>>,
pub local_state: Signal<FxHashMap<String, serde_json::Value>>,
}
/// Build the expression evaluation context from page data and local state.
fn build_ctx(
data: &PluginPageData,
local_state: &HashMap<String, serde_json::Value>,
local_state: &FxHashMap<String, serde_json::Value>,
) -> serde_json::Value {
let mut base = data.as_json();
if let serde_json::Value::Object(ref mut obj) = base {
@ -101,7 +100,7 @@ pub fn PluginViewRenderer(props: PluginViewProps) -> Element {
let mut navigate = use_signal(|| None::<String>);
let refresh = use_signal(|| 0u32);
let mut modal = use_signal(|| None::<UiElement>);
let local_state = use_signal(HashMap::<String, serde_json::Value>::new);
let local_state = use_signal(FxHashMap::<String, serde_json::Value>::default);
let ctx = RenderContext {
client: props.client,
feedback,
@ -169,7 +168,7 @@ struct PluginTabsProps {
tabs: Vec<TabDefinition>,
default_tab: usize,
data: PluginPageData,
actions: HashMap<String, ActionDefinition>,
actions: FxHashMap<String, ActionDefinition>,
ctx: RenderContext,
}
@ -232,7 +231,7 @@ struct PluginDataTableProps {
page_size: usize,
row_actions: Vec<pinakes_plugin_api::RowAction>,
data: PluginPageData,
actions: HashMap<String, ActionDefinition>,
actions: FxHashMap<String, ActionDefinition>,
ctx: RenderContext,
}
@ -472,7 +471,7 @@ fn PluginDataTable(props: PluginDataTableProps) -> Element {
pub fn render_element(
element: &UiElement,
data: &PluginPageData,
actions: &HashMap<String, ActionDefinition>,
actions: &FxHashMap<String, ActionDefinition>,
ctx: RenderContext,
) -> Element {
match element {
@ -1188,7 +1187,7 @@ fn render_chart_data(
Some(serde_json::Value::Array(arr)) if !arr.is_empty() => {
if arr.first().map(|v| v.is_object()).unwrap_or(false) {
// Object rows: collect unique keys preserving insertion order
let mut seen = std::collections::HashSet::new();
let mut seen = FxHashSet::default();
let cols: Vec<String> = arr
.iter()
.filter_map(|r| r.as_object())

View file

@ -4,10 +4,9 @@
//! predefined locations. Unlike full pages, widgets have no data sources of
//! their own and render with empty data context.
use std::collections::HashMap;
use dioxus::prelude::*;
use pinakes_plugin_api::{ActionDefinition, UiWidget, widget_location};
use rustc_hash::FxHashMap;
use super::{
data::PluginPageData,
@ -120,7 +119,7 @@ pub fn WidgetViewRenderer(props: WidgetViewRendererProps) -> Element {
let navigate = use_signal(|| None::<String>);
let refresh = use_signal(|| 0u32);
let modal = use_signal(|| None::<pinakes_plugin_api::UiElement>);
let local_state = use_signal(HashMap::<String, serde_json::Value>::new);
let local_state = use_signal(FxHashMap::<String, serde_json::Value>::default);
let ctx = RenderContext {
client: props.client,
feedback,
@ -129,7 +128,7 @@ pub fn WidgetViewRenderer(props: WidgetViewRendererProps) -> Element {
modal,
local_state,
};
let empty_actions: HashMap<String, ActionDefinition> = HashMap::new();
let empty_actions: FxHashMap<String, ActionDefinition> = FxHashMap::default();
rsx! {
div {
class: "plugin-widget",
@ -142,6 +141,8 @@ pub fn WidgetViewRenderer(props: WidgetViewRendererProps) -> Element {
#[cfg(test)]
mod tests {
use rustc_hash::FxHashSet;
use super::*;
#[test]
@ -159,7 +160,7 @@ mod tests {
WidgetLocation::SettingsSection,
];
let strings: Vec<&str> = locations.iter().map(|l| l.as_str()).collect();
let unique: std::collections::HashSet<_> = strings.iter().collect();
let unique: FxHashSet<_> = strings.iter().collect();
assert_eq!(
strings.len(),
unique.len(),