various: markdown improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I81fda8247814da19eed1e76dbe97bd5b6a6a6964
This commit is contained in:
raf 2026-02-05 15:39:05 +03:00
commit 80a8b5c7ca
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
23 changed files with 3458 additions and 30 deletions

91
Cargo.lock generated
View file

@ -43,6 +43,19 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "ammonia"
version = "4.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6"
dependencies = [
"cssparser 0.35.0",
"html5ever 0.35.0",
"maplit",
"tendril",
"url",
]
[[package]]
name = "android_system_properties"
version = "0.1.5"
@ -1175,6 +1188,19 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "cssparser"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e901edd733a1472f944a45116df3f846f54d37e67e68640ac8bb69689aca2aa"
dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf 0.11.3",
"smallvec",
]
[[package]]
name = "cssparser-macros"
version = "0.6.1"
@ -2925,8 +2951,19 @@ checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c"
dependencies = [
"log",
"mac",
"markup5ever",
"match_token",
"markup5ever 0.14.1",
"match_token 0.1.0",
]
[[package]]
name = "html5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4"
dependencies = [
"log",
"markup5ever 0.35.0",
"match_token 0.35.0",
]
[[package]]
@ -3539,8 +3576,8 @@ version = "0.8.8-speedreader"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2"
dependencies = [
"cssparser",
"html5ever",
"cssparser 0.29.6",
"html5ever 0.29.1",
"indexmap",
"selectors",
]
@ -3871,6 +3908,12 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "maplit"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]]
name = "markup5ever"
version = "0.14.1"
@ -3885,6 +3928,17 @@ dependencies = [
"tendril",
]
[[package]]
name = "markup5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3"
dependencies = [
"log",
"tendril",
"web_atoms",
]
[[package]]
name = "match_token"
version = "0.1.0"
@ -3896,6 +3950,17 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "match_token"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.114",
]
[[package]]
name = "matchers"
version = "0.2.0"
@ -4974,6 +5039,7 @@ dependencies = [
name = "pinakes-ui"
version = "0.1.0"
dependencies = [
"ammonia",
"anyhow",
"chrono",
"clap",
@ -4981,6 +5047,7 @@ dependencies = [
"futures",
"gray_matter",
"pulldown-cmark",
"regex",
"reqwest",
"rfd",
"serde",
@ -6096,7 +6163,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c37578180969d00692904465fb7f6b3d50b9a2b952b87c23d0e2e5cb5013416"
dependencies = [
"bitflags 1.3.2",
"cssparser",
"cssparser 0.29.6",
"derive_more 0.99.20",
"fxhash",
"log",
@ -8162,6 +8229,18 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "web_atoms"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414"
dependencies = [
"phf 0.11.3",
"phf_codegen 0.11.3",
"string_cache",
"string_cache_codegen",
]
[[package]]
name = "webbrowser"
version = "1.0.6"
@ -8957,7 +9036,7 @@ dependencies = [
"dpi",
"dunce",
"gtk",
"html5ever",
"html5ever 0.29.1",
"http",
"javascriptcore-rs",
"jni",

View file

@ -6,7 +6,8 @@ use tracing::info;
use crate::audit;
use crate::error::{PinakesError, Result};
use crate::hash::compute_file_hash;
use crate::media_type::MediaType;
use crate::links;
use crate::media_type::{BuiltinMediaType, MediaType};
use crate::metadata;
use crate::model::*;
use crate::storage::DynStorageBackend;
@ -168,6 +169,9 @@ pub async fn import_file_with_options(
None
};
// Check if this is a markdown file for link extraction
let is_markdown = media_type == MediaType::Builtin(BuiltinMediaType::Markdown);
let item = MediaItem {
id: media_id,
path: path.clone(),
@ -206,10 +210,25 @@ pub async fn import_file_with_options(
// New items are not deleted
deleted_at: None,
// Links will be extracted separately
links_extracted_at: None,
};
storage.insert_media(&item).await?;
// Extract and store markdown links for markdown files
if is_markdown {
if let Err(e) = extract_and_store_links(storage, media_id, &path).await {
tracing::warn!(
media_id = %media_id,
path = %path.display(),
error = %e,
"failed to extract markdown links"
);
}
}
// Store extracted extra metadata as custom fields
for (key, value) in &extracted.extra {
let field = CustomField {
@ -372,3 +391,44 @@ pub async fn import_directory_with_options(
Ok(results)
}
/// Extract markdown links from a file and store them in the database.
async fn extract_and_store_links(
storage: &DynStorageBackend,
media_id: MediaId,
path: &Path,
) -> Result<()> {
// Read file content
let content = tokio::fs::read_to_string(path).await.map_err(|e| {
PinakesError::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("failed to read markdown file for link extraction: {e}"),
))
})?;
// Extract links
let extracted_links = links::extract_links(media_id, &content);
if extracted_links.is_empty() {
// No links found, just mark as extracted
storage.mark_links_extracted(media_id).await?;
return Ok(());
}
// Clear any existing links for this media (in case of re-import)
storage.clear_links_for_media(media_id).await?;
// Save extracted links
storage.save_markdown_links(media_id, &extracted_links).await?;
// Mark links as extracted
storage.mark_links_extracted(media_id).await?;
tracing::debug!(
media_id = %media_id,
link_count = extracted_links.len(),
"extracted markdown links"
);
Ok(())
}

View file

@ -12,6 +12,7 @@ pub mod hash;
pub mod import;
pub mod integrity;
pub mod jobs;
pub mod links;
pub mod managed_storage;
pub mod media_type;
pub mod metadata;

View file

@ -0,0 +1,456 @@
//! Markdown link extraction and management for Obsidian-style bidirectional links.
//!
//! This module provides:
//! - Wikilink extraction (`[[target]]` and `[[target|display]]`)
//! - Embed extraction (`![[target]]`)
//! - Markdown link extraction (`[text](path)` for internal links)
//! - Link resolution strategies
//! - Context extraction for backlink previews
use std::path::Path;
use regex::Regex;
use uuid::Uuid;
use crate::error::Result;
use crate::model::{LinkType, MarkdownLink, MediaId};
/// Configuration for context extraction around links
const CONTEXT_CHARS_BEFORE: usize = 50;
const CONTEXT_CHARS_AFTER: usize = 50;
/// Extract all markdown links from file content.
///
/// This extracts:
/// - Wikilinks: `[[target]]` and `[[target|display text]]`
/// - Embeds: `![[target]]`
/// - Markdown links: `[text](path)` (internal paths only, no http/https)
pub fn extract_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
let mut links = Vec::new();
// Extract wikilinks: [[target]] or [[target|display]]
links.extend(extract_wikilinks(source_media_id, content));
// Extract embeds: ![[target]]
links.extend(extract_embeds(source_media_id, content));
// Extract markdown links: [text](path)
links.extend(extract_markdown_links(source_media_id, content));
links
}
/// Extract wikilinks from content.
/// Matches: `[[target]]` or `[[target|display text]]` but NOT `![[...]]` (embeds)
fn extract_wikilinks(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
// Match [[...]] - we'll manually filter out embeds that are preceded by !
let re = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new();
for (line_num, line) in content.lines().enumerate() {
for cap in re.captures_iter(line) {
let full_match = cap.get(0).unwrap();
let match_start = full_match.start();
// Check if preceded by ! (which would make it an embed, not a wikilink)
if match_start > 0 {
let bytes = line.as_bytes();
if bytes.get(match_start - 1) == Some(&b'!') {
continue; // Skip embeds
}
}
let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end());
links.push(MarkdownLink {
id: Uuid::now_v7(),
source_media_id,
target_path: target.to_string(),
target_media_id: None, // Will be resolved later
link_type: LinkType::Wikilink,
link_text: display_text.or_else(|| Some(target.to_string())),
line_number: Some(line_num as i32 + 1), // 1-indexed
context: Some(context),
created_at: chrono::Utc::now(),
});
}
}
links
}
/// Extract embeds from content.
/// Matches: `![[target]]`
fn extract_embeds(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
let re = Regex::new(r"!\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let mut links = Vec::new();
for (line_num, line) in content.lines().enumerate() {
for cap in re.captures_iter(line) {
let full_match = cap.get(0).unwrap();
let target = cap.get(1).unwrap().as_str().trim();
let display_text = cap.get(2).map(|m| m.as_str().trim().to_string());
let context = extract_context(content, line_num, full_match.start(), full_match.end());
links.push(MarkdownLink {
id: Uuid::now_v7(),
source_media_id,
target_path: target.to_string(),
target_media_id: None,
link_type: LinkType::Embed,
link_text: display_text.or_else(|| Some(target.to_string())),
line_number: Some(line_num as i32 + 1),
context: Some(context),
created_at: chrono::Utc::now(),
});
}
}
links
}
/// Extract markdown links from content.
/// Matches: `[text](path)` but only for internal paths (no http/https)
fn extract_markdown_links(source_media_id: MediaId, content: &str) -> Vec<MarkdownLink> {
// Match [text](path) where path doesn't start with http:// or https://
let re = Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap();
let mut links = Vec::new();
for (line_num, line) in content.lines().enumerate() {
for cap in re.captures_iter(line) {
let full_match = cap.get(0).unwrap();
let text = cap.get(1).unwrap().as_str().trim();
let path = cap.get(2).unwrap().as_str().trim();
// Skip external links
if path.starts_with("http://")
|| path.starts_with("https://")
|| path.starts_with("mailto:")
|| path.starts_with("ftp://")
{
continue;
}
// Skip anchor-only links
if path.starts_with('#') {
continue;
}
// Remove any anchor from the path for resolution
let target_path = path.split('#').next().unwrap_or(path);
let context = extract_context(content, line_num, full_match.start(), full_match.end());
links.push(MarkdownLink {
id: Uuid::now_v7(),
source_media_id,
target_path: target_path.to_string(),
target_media_id: None,
link_type: LinkType::MarkdownLink,
link_text: Some(text.to_string()),
line_number: Some(line_num as i32 + 1),
context: Some(context),
created_at: chrono::Utc::now(),
});
}
}
links
}
/// Extract surrounding context for a link.
fn extract_context(content: &str, line_num: usize, _start: usize, _end: usize) -> String {
let lines: Vec<&str> = content.lines().collect();
if line_num >= lines.len() {
return String::new();
}
let line = lines[line_num];
let line_len = line.len();
// Get surrounding lines for context if the current line is short
if line_len < 30 && line_num > 0 {
// Include previous line
let prev = lines.get(line_num.saturating_sub(1)).unwrap_or(&"");
let next = lines.get(line_num + 1).unwrap_or(&"");
return format!("{} {} {}", prev.trim(), line.trim(), next.trim())
.chars()
.take(CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER + 20)
.collect();
}
// Truncate long lines
if line_len > CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER {
line.chars()
.take(CONTEXT_CHARS_BEFORE + CONTEXT_CHARS_AFTER)
.collect()
} else {
line.to_string()
}
}
/// Link resolution strategies for finding target media items.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ResolutionStrategy {
/// Direct path match
DirectPath,
/// Relative to source directory
RelativeToSource,
/// Filename with .md extension added
FilenameWithMd,
/// Filename-only search (Obsidian-style)
FilenameOnly,
}
/// Resolve a link target to possible file paths.
///
/// Returns a list of candidate paths to check, in order of preference.
pub fn resolve_link_candidates(
target: &str,
source_path: &Path,
root_dirs: &[std::path::PathBuf],
) -> Vec<std::path::PathBuf> {
let mut candidates = Vec::new();
// Clean up the target path
let target = target.trim();
// 1. Direct path - if it looks like a path
if target.contains('/') || target.contains('\\') {
let direct = std::path::PathBuf::from(target);
if direct.is_absolute() {
candidates.push(direct);
} else {
// Relative to each root dir
for root in root_dirs {
candidates.push(root.join(&direct));
}
}
}
// 2. Relative to source file's directory
if let Some(source_dir) = source_path.parent() {
let relative = source_dir.join(target);
candidates.push(relative.clone());
// Also try with .md extension
if !target.ends_with(".md") {
candidates.push(relative.with_extension("md"));
let mut with_md = relative.clone();
with_md.set_file_name(format!(
"{}.md",
relative.file_name().unwrap_or_default().to_string_lossy()
));
candidates.push(with_md);
}
}
// 3. Filename with .md extension in root dirs
let target_with_md = if target.ends_with(".md") {
target.to_string()
} else {
format!("{}.md", target)
};
for root in root_dirs {
candidates.push(root.join(&target_with_md));
}
// 4. Remove duplicates while preserving order
let mut seen = std::collections::HashSet::new();
candidates.retain(|p| seen.insert(p.clone()));
candidates
}
/// Extract frontmatter aliases from markdown content.
///
/// Obsidian uses the `aliases` field in frontmatter to define alternative names
/// for a note that can be used in wikilinks.
pub fn extract_aliases(content: &str) -> Result<Vec<String>> {
let parsed = gray_matter::Matter::<gray_matter::engine::YAML>::new().parse(content);
if let Some(data) = parsed.ok().and_then(|p| p.data) {
if let gray_matter::Pod::Hash(map) = data {
if let Some(aliases) = map.get("aliases") {
match aliases {
gray_matter::Pod::Array(arr) => {
return Ok(arr
.iter()
.filter_map(|a| {
if let gray_matter::Pod::String(s) = a {
Some(s.clone())
} else {
None
}
})
.collect());
}
gray_matter::Pod::String(s) => {
// Single alias as string
return Ok(vec![s.clone()]);
}
_ => {}
}
}
}
}
Ok(Vec::new())
}
#[cfg(test)]
mod tests {
use super::*;
fn test_media_id() -> MediaId {
MediaId(Uuid::nil())
}
#[test]
fn test_extract_simple_wikilink() {
let content = "This is a [[simple link]] in text.";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 1);
assert_eq!(links[0].target_path, "simple link");
assert_eq!(links[0].link_type, LinkType::Wikilink);
assert_eq!(links[0].link_text, Some("simple link".to_string()));
}
#[test]
fn test_extract_wikilink_with_display() {
let content = "Check out [[target note|this article]] for more.";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 1);
assert_eq!(links[0].target_path, "target note");
assert_eq!(links[0].link_text, Some("this article".to_string()));
}
#[test]
fn test_extract_embed() {
let content = "Here is an image: ![[image.png]]";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 1);
assert_eq!(links[0].target_path, "image.png");
assert_eq!(links[0].link_type, LinkType::Embed);
}
#[test]
fn test_extract_markdown_link() {
let content = "Read [the documentation](docs/README.md) for details.";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 1);
assert_eq!(links[0].target_path, "docs/README.md");
assert_eq!(links[0].link_type, LinkType::MarkdownLink);
assert_eq!(links[0].link_text, Some("the documentation".to_string()));
}
#[test]
fn test_skip_external_links() {
let content = "Visit [our site](https://example.com) or [email us](mailto:test@test.com).";
let links = extract_links(test_media_id(), content);
assert!(links.is_empty());
}
#[test]
fn test_multiple_links() {
let content = r#"
# My Note
This links to [[Note A]] and also [[Note B|Note B Title]].
We also have a markdown link to [config](./config.md).
And an embedded image: ![[diagram.png]]
"#;
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 4);
let types: Vec<_> = links.iter().map(|l| l.link_type).collect();
assert!(types.contains(&LinkType::Wikilink));
assert!(types.contains(&LinkType::Embed));
assert!(types.contains(&LinkType::MarkdownLink));
}
#[test]
fn test_line_numbers() {
let content = "Line 1\n[[link on line 2]]\nLine 3";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 1);
assert_eq!(links[0].line_number, Some(2));
}
#[test]
fn test_resolve_candidates() {
let source_path = std::path::Path::new("/notes/projects/readme.md");
let root_dirs = vec![std::path::PathBuf::from("/notes")];
let candidates = resolve_link_candidates("My Note", source_path, &root_dirs);
// Should include relative path and .md variations
assert!(!candidates.is_empty());
assert!(candidates
.iter()
.any(|p| p.to_string_lossy().contains("My Note.md")));
}
#[test]
fn test_extract_aliases() {
let content = r#"---
title: My Note
aliases:
- Alternative Name
- Another Alias
---
# Content here
"#;
let aliases = extract_aliases(content).unwrap();
assert_eq!(aliases, vec!["Alternative Name", "Another Alias"]);
}
#[test]
fn test_extract_single_alias() {
let content = r#"---
title: My Note
aliases: Single Alias
---
# Content
"#;
let aliases = extract_aliases(content).unwrap();
assert_eq!(aliases, vec!["Single Alias"]);
}
#[test]
fn test_wikilink_not_matching_embed() {
let content = "A wikilink [[note]] and an embed ![[image.png]]";
let links = extract_links(test_media_id(), content);
assert_eq!(links.len(), 2);
let wikilinks: Vec<_> = links
.iter()
.filter(|l| l.link_type == LinkType::Wikilink)
.collect();
let embeds: Vec<_> = links
.iter()
.filter(|l| l.link_type == LinkType::Embed)
.collect();
assert_eq!(wikilinks.len(), 1);
assert_eq!(embeds.len(), 1);
assert_eq!(wikilinks[0].target_path, "note");
assert_eq!(embeds[0].target_path, "image.png");
}
}

View file

@ -154,6 +154,9 @@ pub struct MediaItem {
/// Soft delete timestamp. If set, the item is in the trash.
pub deleted_at: Option<DateTime<Utc>>,
/// When markdown links were last extracted from this file.
pub links_extracted_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -486,3 +489,100 @@ impl fmt::Display for ReadingStatus {
}
}
}
// ===== Markdown Links (Obsidian-style) =====
/// Type of markdown link
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LinkType {
/// Wikilink: [[target]] or [[target|display]]
Wikilink,
/// Markdown link: [text](path)
MarkdownLink,
/// Embed: ![[target]]
Embed,
}
impl fmt::Display for LinkType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Wikilink => write!(f, "wikilink"),
Self::MarkdownLink => write!(f, "markdown_link"),
Self::Embed => write!(f, "embed"),
}
}
}
impl std::str::FromStr for LinkType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"wikilink" => Ok(Self::Wikilink),
"markdown_link" => Ok(Self::MarkdownLink),
"embed" => Ok(Self::Embed),
_ => Err(format!("unknown link type: {}", s)),
}
}
}
/// A markdown link extracted from a file
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MarkdownLink {
pub id: Uuid,
pub source_media_id: MediaId,
/// Raw link target as written in the source (wikilink name or path)
pub target_path: String,
/// Resolved target media_id (None if unresolved)
pub target_media_id: Option<MediaId>,
pub link_type: LinkType,
/// Display text for the link
pub link_text: Option<String>,
/// Line number in source file (1-indexed)
pub line_number: Option<i32>,
/// Surrounding text for backlink preview
pub context: Option<String>,
pub created_at: DateTime<Utc>,
}
/// Information about a backlink (incoming link)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BacklinkInfo {
pub link_id: Uuid,
pub source_id: MediaId,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: LinkType,
}
/// Graph data for visualization
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct GraphData {
pub nodes: Vec<GraphNode>,
pub edges: Vec<GraphEdge>,
}
/// A node in the graph visualization
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GraphNode {
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
/// Number of outgoing links from this node
pub link_count: u32,
/// Number of incoming links to this node
pub backlink_count: u32,
}
/// An edge (link) in the graph visualization
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GraphEdge {
pub source: String,
pub target: String,
pub link_type: LinkType,
}

View file

@ -789,6 +789,45 @@ pub trait StorageBackend: Send + Sync + 'static {
/// Count items in trash.
async fn count_trash(&self) -> Result<u64>;
// ===== Markdown Links (Obsidian-style) =====
/// Save extracted markdown links for a media item.
/// This replaces any existing links for the source media.
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()>;
/// Get outgoing links from a media item.
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>>;
/// Get backlinks (incoming links) to a media item.
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>>;
/// Clear all links for a media item.
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()>;
/// Get graph data for visualization.
///
/// If `center_id` is provided, returns nodes within `depth` hops of that node.
/// If `center_id` is None, returns the entire graph (limited by internal max).
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData>;
/// Resolve unresolved links by matching target_path against media item paths.
/// Returns the number of links that were resolved.
async fn resolve_links(&self) -> Result<u64>;
/// Update the links_extracted_at timestamp for a media item.
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()>;
/// Get count of unresolved links (links where target_media_id is NULL).
async fn count_unresolved_links(&self) -> Result<u64>;
}
/// Comprehensive library statistics.

View file

@ -200,6 +200,9 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
// Trash support
deleted_at: row.try_get("deleted_at").ok().flatten(),
// Markdown links extraction timestamp
links_extracted_at: row.try_get("links_extracted_at").ok().flatten(),
})
}
@ -6036,6 +6039,425 @@ impl StorageBackend for PostgresBackend {
let count: i64 = row.get(0);
Ok(count as u64)
}
// ===== Markdown Links (Obsidian-style) =====
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
// Delete existing links for this source
client
.execute(
"DELETE FROM markdown_links WHERE source_media_id = $1",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Insert new links
for link in links {
let target_media_id = link.target_media_id.map(|id| id.0.to_string());
client
.execute(
"INSERT INTO markdown_links (
id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
&[
&link.id.to_string(),
&media_id_str,
&link.target_path,
&target_media_id,
&link.link_type.to_string(),
&link.link_text,
&link.line_number,
&link.context,
&link.created_at,
],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
}
Ok(())
}
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let rows = client
.query(
"SELECT id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
FROM markdown_links
WHERE source_media_id = $1
ORDER BY line_number",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut links = Vec::new();
for row in rows {
links.push(row_to_markdown_link(&row)?);
}
Ok(links)
}
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let rows = client
.query(
"SELECT l.id, l.source_media_id, m.title, m.path,
l.link_text, l.line_number, l.context, l.link_type
FROM markdown_links l
JOIN media_items m ON l.source_media_id = m.id
WHERE l.target_media_id = $1
ORDER BY m.title, l.line_number",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut backlinks = Vec::new();
for row in rows {
let link_id_str: String = row.get(0);
let source_id_str: String = row.get(1);
let source_title: Option<String> = row.get(2);
let source_path: String = row.get(3);
let link_text: Option<String> = row.get(4);
let line_number: Option<i32> = row.get(5);
let context: Option<String> = row.get(6);
let link_type_str: String = row.get(7);
backlinks.push(crate::model::BacklinkInfo {
link_id: Uuid::parse_str(&link_id_str)
.map_err(|e| PinakesError::Database(e.to_string()))?,
source_id: MediaId(
Uuid::parse_str(&source_id_str)
.map_err(|e| PinakesError::Database(e.to_string()))?,
),
source_title,
source_path,
link_text,
line_number,
context,
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
});
}
Ok(backlinks)
}
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
client
.execute(
"DELETE FROM markdown_links WHERE source_media_id = $1",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(())
}
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids: std::collections::HashSet<String> = std::collections::HashSet::new();
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
visited.insert(center.0.to_string());
for _ in 0..depth {
if frontier.is_empty() {
break;
}
let mut next_frontier = Vec::new();
for node_id in &frontier {
// Get outgoing links
let rows = client
.query(
"SELECT target_media_id FROM markdown_links
WHERE source_media_id = $1 AND target_media_id IS NOT NULL",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
// Get incoming links
let rows = client
.query(
"SELECT source_media_id FROM markdown_links
WHERE target_media_id = $1",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
}
frontier = next_frontier;
}
node_ids = visited;
} else {
// Get all markdown files with links (limit to 500)
let rows = client
.query(
"SELECT DISTINCT id FROM media_items
WHERE media_type = 'markdown' AND deleted_at IS NULL
LIMIT 500",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
node_ids.insert(id);
}
}
// Build nodes with metadata
for node_id in &node_ids {
let row = client
.query_opt(
"SELECT id, COALESCE(title, file_name) as label, title, media_type
FROM media_items WHERE id = $1",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
if let Some(row) = row {
let id: String = row.get(0);
let label: String = row.get(1);
let title: Option<String> = row.get(2);
let media_type: String = row.get(3);
// Count outgoing links
let link_count_row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE source_media_id = $1",
&[&id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let link_count: i64 = link_count_row.get(0);
// Count incoming links
let backlink_count_row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id = $1",
&[&id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let backlink_count: i64 = backlink_count_row.get(0);
nodes.push(crate::model::GraphNode {
id: id.clone(),
label,
title,
media_type,
link_count: link_count as u32,
backlink_count: backlink_count as u32,
});
}
}
// Build edges
for node_id in &node_ids {
let rows = client
.query(
"SELECT source_media_id, target_media_id, link_type
FROM markdown_links
WHERE source_media_id = $1 AND target_media_id IS NOT NULL",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let source: String = row.get(0);
let target: String = row.get(1);
let link_type_str: String = row.get(2);
if node_ids.contains(&target) {
edges.push(crate::model::GraphEdge {
source,
target,
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
});
}
}
}
Ok(crate::model::GraphData { nodes, edges })
}
async fn resolve_links(&self) -> Result<u64> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Strategy 1: Exact path match
let result1 = client
.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
)",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Strategy 2: Filename match
let result2 = client
.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
)",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(result1 + result2)
}
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let now = chrono::Utc::now();
client
.execute(
"UPDATE media_items SET links_extracted_at = $1 WHERE id = $2",
&[&now, &media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(())
}
async fn count_unresolved_links(&self) -> Result<u64> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id IS NULL",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let count: i64 = row.get(0);
Ok(count as u64)
}
}
impl PostgresBackend {
@ -6329,6 +6751,37 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
find_inner(query, &mut offset).unwrap_or(1)
}
// Helper function to parse a markdown link row
fn row_to_markdown_link(row: &Row) -> Result<crate::model::MarkdownLink> {
let id_str: String = row.get(0);
let source_id_str: String = row.get(1);
let target_path: String = row.get(2);
let target_id: Option<String> = row.get(3);
let link_type_str: String = row.get(4);
let link_text: Option<String> = row.get(5);
let line_number: Option<i32> = row.get(6);
let context: Option<String> = row.get(7);
let created_at: chrono::DateTime<Utc> = row.get(8);
Ok(crate::model::MarkdownLink {
id: Uuid::parse_str(&id_str).map_err(|e| PinakesError::Database(e.to_string()))?,
source_media_id: MediaId(
Uuid::parse_str(&source_id_str).map_err(|e| PinakesError::Database(e.to_string()))?,
),
target_path,
target_media_id: target_id
.and_then(|s| Uuid::parse_str(&s).ok())
.map(MediaId),
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
link_text,
line_number,
context,
created_at,
})
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -160,6 +160,14 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
// Markdown links extraction timestamp
links_extracted_at: row
.get::<_, Option<String>>("links_extracted_at")
.ok()
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
})
}
@ -6379,6 +6387,428 @@ impl StorageBackend for SqliteBackend {
Ok(count)
}
// ===== Markdown Links (Obsidian-style) =====
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let links: Vec<_> = links.to_vec();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
// Delete existing links for this source
conn.execute(
"DELETE FROM markdown_links WHERE source_media_id = ?1",
[&media_id_str],
)?;
// Insert new links
let mut stmt = conn.prepare(
"INSERT INTO markdown_links (
id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)"
)?;
for link in &links {
stmt.execute(params![
link.id.to_string(),
media_id_str,
link.target_path,
link.target_media_id.map(|id| id.0.to_string()),
link.link_type.to_string(),
link.link_text,
link.line_number,
link.context,
link.created_at.to_rfc3339(),
])?;
}
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let links = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
FROM markdown_links
WHERE source_media_id = ?1
ORDER BY line_number"
)?;
let rows = stmt.query_map([&media_id_str], |row| {
row_to_markdown_link(row)
})?;
let mut links = Vec::new();
for row in rows {
links.push(row?);
}
Ok::<_, rusqlite::Error>(links)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(links)
}
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let backlinks = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT l.id, l.source_media_id, m.title, m.path,
l.link_text, l.line_number, l.context, l.link_type
FROM markdown_links l
JOIN media_items m ON l.source_media_id = m.id
WHERE l.target_media_id = ?1
ORDER BY m.title, l.line_number"
)?;
let rows = stmt.query_map([&media_id_str], |row| {
let link_id_str: String = row.get(0)?;
let source_id_str: String = row.get(1)?;
let source_title: Option<String> = row.get(2)?;
let source_path: String = row.get(3)?;
let link_text: Option<String> = row.get(4)?;
let line_number: Option<i32> = row.get(5)?;
let context: Option<String> = row.get(6)?;
let link_type_str: String = row.get(7)?;
Ok(crate::model::BacklinkInfo {
link_id: parse_uuid(&link_id_str)?,
source_id: MediaId(parse_uuid(&source_id_str)?),
source_title,
source_path,
link_text,
line_number,
context,
link_type: link_type_str.parse().unwrap_or(crate::model::LinkType::Wikilink),
})
})?;
let mut backlinks = Vec::new();
for row in rows {
backlinks.push(row?);
}
Ok::<_, rusqlite::Error>(backlinks)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(backlinks)
}
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"DELETE FROM markdown_links WHERE source_media_id = ?1",
[&media_id_str],
)?;
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData> {
let conn = self.conn.clone();
let center_id_str = center_id.map(|id| id.0.to_string());
let depth = depth.min(5); // Limit depth to prevent huge queries
let graph_data = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
visited.insert(center_id.clone());
for _ in 0..depth {
let mut next_frontier = Vec::new();
for node_id in &frontier {
// Get outgoing links
let mut stmt = conn.prepare(
"SELECT target_media_id FROM markdown_links
WHERE source_media_id = ?1 AND target_media_id IS NOT NULL"
)?;
let rows = stmt.query_map([node_id], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
let id = row?;
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
// Get incoming links
let mut stmt = conn.prepare(
"SELECT source_media_id FROM markdown_links
WHERE target_media_id = ?1"
)?;
let rows = stmt.query_map([node_id], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
let id = row?;
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
}
frontier = next_frontier;
}
node_ids = visited;
} else {
// Get all markdown files with links (limit to 500 for performance)
let mut stmt = conn.prepare(
"SELECT DISTINCT id FROM media_items
WHERE media_type = 'markdown' AND deleted_at IS NULL
LIMIT 500"
)?;
let rows = stmt.query_map([], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
node_ids.insert(row?);
}
}
// Build nodes with metadata
for node_id in &node_ids {
let mut stmt = conn.prepare(
"SELECT id, COALESCE(title, file_name) as label, title, media_type
FROM media_items WHERE id = ?1"
)?;
if let Ok((id, label, title, media_type)) = stmt.query_row([node_id], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
row.get::<_, Option<String>>(2)?,
row.get::<_, String>(3)?,
))
}) {
// Count outgoing links
let link_count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE source_media_id = ?1",
[&id],
|row| row.get(0),
)?;
// Count incoming links
let backlink_count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id = ?1",
[&id],
|row| row.get(0),
)?;
nodes.push(crate::model::GraphNode {
id: id.clone(),
label,
title,
media_type,
link_count: link_count as u32,
backlink_count: backlink_count as u32,
});
}
}
// Build edges
for node_id in &node_ids {
let mut stmt = conn.prepare(
"SELECT source_media_id, target_media_id, link_type
FROM markdown_links
WHERE source_media_id = ?1 AND target_media_id IS NOT NULL"
)?;
let rows = stmt.query_map([node_id], |row| {
let source: String = row.get(0)?;
let target: String = row.get(1)?;
let link_type_str: String = row.get(2)?;
Ok((source, target, link_type_str))
})?;
for row in rows {
let (source, target, link_type_str) = row?;
if node_ids.contains(&target) {
edges.push(crate::model::GraphEdge {
source,
target,
link_type: link_type_str.parse().unwrap_or(crate::model::LinkType::Wikilink),
});
}
}
}
Ok::<_, rusqlite::Error>(crate::model::GraphData { nodes, edges })
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(graph_data)
}
async fn resolve_links(&self) -> Result<u64> {
let conn = self.conn.clone();
let count = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
// Find unresolved links and try to resolve them
// Strategy 1: Exact path match
let updated1 = conn.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
)",
[],
)?;
// Strategy 2: Filename match (Obsidian-style)
// Match target_path to file_name (with or without .md extension)
let updated2 = conn.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
)",
[],
)?;
Ok::<_, rusqlite::Error>((updated1 + updated2) as u64)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(count)
}
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let now = chrono::Utc::now().to_rfc3339();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"UPDATE media_items SET links_extracted_at = ?1 WHERE id = ?2",
params![now, media_id_str],
)?;
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn count_unresolved_links(&self) -> Result<u64> {
let conn = self.conn.clone();
let count = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id IS NULL",
[],
|row| row.get(0),
)?;
Ok::<_, rusqlite::Error>(count as u64)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(count)
}
}
// Helper function to parse a markdown link row
fn row_to_markdown_link(row: &Row) -> rusqlite::Result<crate::model::MarkdownLink> {
let id_str: String = row.get(0)?;
let source_id_str: String = row.get(1)?;
let target_path: String = row.get(2)?;
let target_id: Option<String> = row.get(3)?;
let link_type_str: String = row.get(4)?;
let link_text: Option<String> = row.get(5)?;
let line_number: Option<i32> = row.get(6)?;
let context: Option<String> = row.get(7)?;
let created_at_str: String = row.get(8)?;
Ok(crate::model::MarkdownLink {
id: parse_uuid(&id_str)?,
source_media_id: MediaId(parse_uuid(&source_id_str)?),
target_path,
target_media_id: target_id
.and_then(|s| Uuid::parse_str(&s).ok())
.map(MediaId),
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
link_text,
line_number,
context,
created_at: parse_datetime(&created_at_str),
})
}
// Helper function to parse a share row

View file

@ -98,6 +98,7 @@ pub async fn process_upload<R: AsyncRead + Unpin>(
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
// Store the media item

View file

@ -57,6 +57,7 @@ pub fn make_test_media(hash: &str) -> MediaItem {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
}
@ -93,5 +94,6 @@ pub fn create_test_media_item(path: PathBuf, hash: &str) -> MediaItem {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
}
}

View file

@ -47,6 +47,7 @@ async fn test_media_crud() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
// Insert
@ -138,6 +139,7 @@ async fn test_tags() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
storage.insert_media(&item).await.unwrap();
storage.tag_media(id, parent.id).await.unwrap();
@ -203,6 +205,7 @@ async fn test_collections() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
storage.insert_media(&item).await.unwrap();
@ -263,6 +266,7 @@ async fn test_custom_fields() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
storage.insert_media(&item).await.unwrap();
@ -342,6 +346,7 @@ async fn test_search() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
storage.insert_media(&item).await.unwrap();
}
@ -486,6 +491,7 @@ async fn test_library_statistics_with_data() {
created_at: now,
updated_at: now,
deleted_at: None,
links_extracted_at: None,
};
storage.insert_media(&item).await.unwrap();

View file

@ -231,7 +231,17 @@ pub fn create_router_with_tls(
.route(
"/notifications/shares",
get(routes::shares::get_notifications),
);
)
// Markdown notes/links (read)
.route(
"/media/{id}/backlinks",
get(routes::notes::get_backlinks),
)
.route(
"/media/{id}/outgoing-links",
get(routes::notes::get_outgoing_links),
)
.nest("/notes", routes::notes::routes());
// Write routes: Editor+ required
let editor_routes = Router::new()
@ -281,6 +291,11 @@ pub fn create_router_with_tls(
"/media/{id}/custom-fields/{name}",
delete(routes::media::delete_custom_field),
)
// Markdown notes/links (write)
.route(
"/media/{id}/reindex-links",
post(routes::notes::reindex_links),
)
.route("/tags", post(routes::tags::create_tag))
.route("/tags/{id}", delete(routes::tags::delete_tag))
.route("/media/{media_id}/tags", post(routes::tags::tag_media))

View file

@ -12,6 +12,7 @@ pub mod health;
pub mod integrity;
pub mod jobs;
pub mod media;
pub mod notes;
pub mod photos;
pub mod playlists;
pub mod plugins;

View file

@ -0,0 +1,316 @@
//! API endpoints for Obsidian-style markdown notes features.
//!
//! Provides endpoints for:
//! - Backlinks (what links to this note)
//! - Outgoing links (what this note links to)
//! - Graph visualization data
//! - Link reindexing
use axum::{
extract::{Path, Query, State},
routing::{get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use pinakes_core::model::{BacklinkInfo, GraphData, GraphEdge, GraphNode, MarkdownLink, MediaId};
use crate::{error::ApiError, state::AppState};
// ===== Response DTOs =====
/// Response for backlinks query
#[derive(Debug, Serialize)]
pub struct BacklinksResponse {
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
}
/// Individual backlink item
#[derive(Debug, Serialize)]
pub struct BacklinkItem {
pub link_id: Uuid,
pub source_id: Uuid,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: String,
}
impl From<BacklinkInfo> for BacklinkItem {
fn from(info: BacklinkInfo) -> Self {
Self {
link_id: info.link_id,
source_id: info.source_id.0,
source_title: info.source_title,
source_path: info.source_path,
link_text: info.link_text,
line_number: info.line_number,
context: info.context,
link_type: info.link_type.to_string(),
}
}
}
/// Response for outgoing links query
#[derive(Debug, Serialize)]
pub struct OutgoingLinksResponse {
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
}
/// Individual outgoing link item
#[derive(Debug, Serialize)]
pub struct OutgoingLinkItem {
pub id: Uuid,
pub target_path: String,
pub target_id: Option<Uuid>,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub link_type: String,
pub is_resolved: bool,
}
impl From<MarkdownLink> for OutgoingLinkItem {
fn from(link: MarkdownLink) -> Self {
Self {
id: link.id,
target_path: link.target_path,
target_id: link.target_media_id.map(|id| id.0),
link_text: link.link_text,
line_number: link.line_number,
link_type: link.link_type.to_string(),
is_resolved: link.target_media_id.is_some(),
}
}
}
/// Response for graph visualization
#[derive(Debug, Serialize)]
pub struct GraphResponse {
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
pub node_count: usize,
pub edge_count: usize,
}
/// Graph node for visualization
#[derive(Debug, Serialize)]
pub struct GraphNodeResponse {
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
pub link_count: u32,
pub backlink_count: u32,
}
impl From<GraphNode> for GraphNodeResponse {
fn from(node: GraphNode) -> Self {
Self {
id: node.id,
label: node.label,
title: node.title,
media_type: node.media_type,
link_count: node.link_count,
backlink_count: node.backlink_count,
}
}
}
/// Graph edge for visualization
#[derive(Debug, Serialize)]
pub struct GraphEdgeResponse {
pub source: String,
pub target: String,
pub link_type: String,
}
impl From<GraphEdge> for GraphEdgeResponse {
fn from(edge: GraphEdge) -> Self {
Self {
source: edge.source,
target: edge.target,
link_type: edge.link_type.to_string(),
}
}
}
impl From<GraphData> for GraphResponse {
fn from(data: GraphData) -> Self {
let node_count = data.nodes.len();
let edge_count = data.edges.len();
Self {
nodes: data.nodes.into_iter().map(GraphNodeResponse::from).collect(),
edges: data.edges.into_iter().map(GraphEdgeResponse::from).collect(),
node_count,
edge_count,
}
}
}
/// Query parameters for graph endpoint
#[derive(Debug, Deserialize)]
pub struct GraphQuery {
/// Center node ID (optional, if not provided returns entire graph)
pub center: Option<Uuid>,
/// Depth of traversal from center (default: 2, max: 5)
#[serde(default = "default_depth")]
pub depth: u32,
}
fn default_depth() -> u32 {
2
}
/// Response for reindex operation
#[derive(Debug, Serialize)]
pub struct ReindexResponse {
pub message: String,
pub links_extracted: usize,
}
/// Response for link resolution
#[derive(Debug, Serialize)]
pub struct ResolveLinksResponse {
pub resolved_count: u64,
}
/// Response for unresolved links count
#[derive(Debug, Serialize)]
pub struct UnresolvedLinksResponse {
pub count: u64,
}
// ===== Handlers =====
/// Get backlinks (incoming links) to a media item.
///
/// GET /api/v1/media/{id}/backlinks
pub async fn get_backlinks(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<BacklinksResponse>, ApiError> {
let media_id = MediaId(id);
let backlinks = state.storage.get_backlinks(media_id).await?;
let items: Vec<BacklinkItem> = backlinks.into_iter().map(BacklinkItem::from).collect();
let count = items.len();
Ok(Json(BacklinksResponse {
backlinks: items,
count,
}))
}
/// Get outgoing links from a media item.
///
/// GET /api/v1/media/{id}/outgoing-links
pub async fn get_outgoing_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<OutgoingLinksResponse>, ApiError> {
let media_id = MediaId(id);
let links = state.storage.get_outgoing_links(media_id).await?;
let items: Vec<OutgoingLinkItem> = links.into_iter().map(OutgoingLinkItem::from).collect();
let count = items.len();
Ok(Json(OutgoingLinksResponse { links: items, count }))
}
/// Get graph data for visualization.
///
/// GET /api/v1/notes/graph?center={uuid}&depth={n}
pub async fn get_graph(
State(state): State<AppState>,
Query(params): Query<GraphQuery>,
) -> Result<Json<GraphResponse>, ApiError> {
let center_id = params.center.map(MediaId);
let depth = params.depth.min(5); // Enforce max depth
let graph_data = state.storage.get_graph_data(center_id, depth).await?;
Ok(Json(GraphResponse::from(graph_data)))
}
/// Re-extract links from a media item.
///
/// POST /api/v1/media/{id}/reindex-links
pub async fn reindex_links(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<ReindexResponse>, ApiError> {
let media_id = MediaId(id);
// Get the media item to read its content
let media = state.storage.get_media(media_id).await?;
// Only process markdown files
use pinakes_core::media_type::{BuiltinMediaType, MediaType};
match &media.media_type {
MediaType::Builtin(BuiltinMediaType::Markdown) => {}
_ => {
return Ok(Json(ReindexResponse {
message: "Skipped: not a markdown file".to_string(),
links_extracted: 0,
}));
}
}
// Read the file content
let content = tokio::fs::read_to_string(&media.path).await.map_err(|e| {
ApiError::internal(format!("Failed to read file: {}", e))
})?;
// Extract links
let links = pinakes_core::links::extract_links(media_id, &content);
let links_count = links.len();
// Save links to database
state.storage.save_markdown_links(media_id, &links).await?;
// Mark as extracted
state.storage.mark_links_extracted(media_id).await?;
// Try to resolve any unresolved links
state.storage.resolve_links().await?;
Ok(Json(ReindexResponse {
message: "Links extracted successfully".to_string(),
links_extracted: links_count,
}))
}
/// Resolve all unresolved links in the database.
///
/// POST /api/v1/notes/resolve-links
pub async fn resolve_links(
State(state): State<AppState>,
) -> Result<Json<ResolveLinksResponse>, ApiError> {
let resolved_count = state.storage.resolve_links().await?;
Ok(Json(ResolveLinksResponse { resolved_count }))
}
/// Get count of unresolved links.
///
/// GET /api/v1/notes/unresolved-count
pub async fn get_unresolved_count(
State(state): State<AppState>,
) -> Result<Json<UnresolvedLinksResponse>, ApiError> {
let count = state.storage.count_unresolved_links().await?;
Ok(Json(UnresolvedLinksResponse { count }))
}
/// Create the routes for notes/links functionality.
pub fn routes() -> Router<AppState> {
Router::new()
.route("/graph", get(get_graph))
.route("/resolve-links", post(resolve_links))
.route("/unresolved-count", get(get_unresolved_count))
}

View file

@ -20,3 +20,5 @@ futures = { workspace = true }
rfd = "0.17"
pulldown-cmark = { workspace = true }
gray_matter = { workspace = true }
regex = { workspace = true }
ammonia = "4"

View file

@ -6,7 +6,7 @@ use futures::future::join_all;
use crate::client::*;
use crate::components::{
audit, collections, database, detail, duplicates, import, library,
audit, collections, database, detail, duplicates, graph_view, import, library,
media_player::PlayQueue,
search, settings, statistics, tags, tasks,
};
@ -29,6 +29,7 @@ enum View {
Tasks,
Settings,
Database,
Graph,
}
impl View {
@ -46,6 +47,7 @@ impl View {
Self::Tasks => "Tasks",
Self::Settings => "Settings",
Self::Database => "Database",
Self::Graph => "Note Graph",
}
}
}
@ -564,6 +566,14 @@ pub fn App() -> Element {
span { class: "nav-icon", "\u{1f4ca}" }
span { class: "nav-item-text", "Statistics" }
}
button {
class: if *current_view.read() == View::Graph { "nav-item active" } else { "nav-item" },
onclick: move |_| {
current_view.set(View::Graph);
},
span { class: "nav-icon", "\u{1f578}" }
span { class: "nav-item-text", "Graph" }
}
button {
class: if *current_view.read() == View::Tasks { "nav-item active" } else { "nav-item" },
onclick: {
@ -1310,6 +1320,25 @@ pub fn App() -> Element {
show_toast("Added to queue".into(), false);
}
},
on_navigate_to_media: {
let client = client.read().clone();
move |media_id: String| {
let client = client.clone();
spawn(async move {
match client.get_media(&media_id).await {
Ok(media) => {
// Load tags for the new media
if let Ok(mtags) = client.get_media_tags(&media_id).await {
media_tags.set(mtags);
}
selected_media.set(Some(media));
auto_play_media.set(false);
}
Err(e) => show_toast(format!("Failed to load linked note: {e}"), true),
}
});
}
},
}
},
None => rsx! {
@ -2305,6 +2334,33 @@ pub fn App() -> Element {
},
}
}
View::Graph => {
rsx! {
graph_view::GraphView {
client: client.read().clone(),
center_id: None,
on_navigate: {
let client = client.read().clone();
move |media_id: String| {
let client = client.clone();
spawn(async move {
match client.get_media(&media_id).await {
Ok(media) => {
// Load tags for the media
if let Ok(mtags) = client.get_media_tags(&media_id).await {
media_tags.set(mtags);
}
selected_media.set(Some(media));
current_view.set(View::Detail);
}
Err(e) => show_toast(format!("Failed to load: {e}"), true),
}
});
}
},
}
}
}
}
}
}

View file

@ -277,6 +277,74 @@ pub struct DatabaseStatsResponse {
pub backend_name: String,
}
// ── Markdown Notes/Links Response Types ──
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct BacklinksResponse {
pub backlinks: Vec<BacklinkItem>,
pub count: usize,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct BacklinkItem {
pub link_id: String,
pub source_id: String,
pub source_title: Option<String>,
pub source_path: String,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub context: Option<String>,
pub link_type: String,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct OutgoingLinksResponse {
pub links: Vec<OutgoingLinkItem>,
pub count: usize,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct OutgoingLinkItem {
pub id: String,
pub target_path: String,
pub target_id: Option<String>,
pub link_text: Option<String>,
pub line_number: Option<i32>,
pub link_type: String,
pub is_resolved: bool,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct GraphResponse {
pub nodes: Vec<GraphNodeResponse>,
pub edges: Vec<GraphEdgeResponse>,
pub node_count: usize,
pub edge_count: usize,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct GraphNodeResponse {
pub id: String,
pub label: String,
pub title: Option<String>,
pub media_type: String,
pub link_count: u32,
pub backlink_count: u32,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct GraphEdgeResponse {
pub source: String,
pub target: String,
pub link_type: String,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct ReindexLinksResponse {
pub message: String,
pub links_extracted: usize,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct SavedSearchResponse {
pub id: String,
@ -1073,6 +1141,85 @@ impl ApiClient {
Ok(())
}
// ── Markdown Notes/Links ──
/// Get backlinks (incoming links) to a media item.
pub async fn get_backlinks(&self, id: &str) -> Result<BacklinksResponse> {
Ok(self
.client
.get(self.url(&format!("/media/{id}/backlinks")))
.send()
.await?
.error_for_status()?
.json()
.await?)
}
/// Get outgoing links from a media item.
pub async fn get_outgoing_links(&self, id: &str) -> Result<OutgoingLinksResponse> {
Ok(self
.client
.get(self.url(&format!("/media/{id}/outgoing-links")))
.send()
.await?
.error_for_status()?
.json()
.await?)
}
/// Get graph data for visualization.
pub async fn get_graph(&self, center_id: Option<&str>, depth: Option<u32>) -> Result<GraphResponse> {
let mut url = self.url("/notes/graph");
let mut query_parts = Vec::new();
if let Some(center) = center_id {
query_parts.push(format!("center={}", center));
}
if let Some(d) = depth {
query_parts.push(format!("depth={}", d));
}
if !query_parts.is_empty() {
url = format!("{}?{}", url, query_parts.join("&"));
}
Ok(self
.client
.get(&url)
.send()
.await?
.error_for_status()?
.json()
.await?)
}
/// Re-extract links from a media item.
pub async fn reindex_links(&self, id: &str) -> Result<ReindexLinksResponse> {
Ok(self
.client
.post(self.url(&format!("/media/{id}/reindex-links")))
.send()
.await?
.error_for_status()?
.json()
.await?)
}
/// Get count of unresolved links.
pub async fn get_unresolved_links_count(&self) -> Result<u64> {
#[derive(Deserialize)]
struct CountResp {
count: u64,
}
let resp: CountResp = self
.client
.get(self.url("/notes/unresolved-count"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp.count)
}
pub fn set_token(&mut self, token: &str) {
let mut headers = header::HeaderMap::new();
if let Ok(val) = header::HeaderValue::from_str(&format!("Bearer {token}")) {

View file

@ -0,0 +1,345 @@
//! Backlinks panel component for showing incoming links to a note.
use dioxus::prelude::*;
use crate::client::{ApiClient, BacklinkItem, BacklinksResponse};
/// Panel displaying backlinks (incoming links) to a media item.
#[component]
pub fn BacklinksPanel(
media_id: String,
client: ApiClient,
on_navigate: EventHandler<String>,
) -> Element {
let mut backlinks = use_signal(|| Option::<BacklinksResponse>::None);
let mut loading = use_signal(|| true);
let mut error = use_signal(|| Option::<String>::None);
let mut collapsed = use_signal(|| false);
let mut reindexing = use_signal(|| false);
let mut reindex_message = use_signal(|| Option::<(String, bool)>::None); // (message, is_error)
// Fetch backlinks function
let fetch_backlinks = {
let client = client.clone();
let id = media_id.clone();
move || {
let client = client.clone();
let id = id.clone();
spawn(async move {
loading.set(true);
error.set(None);
match client.get_backlinks(&id).await {
Ok(resp) => {
backlinks.set(Some(resp));
}
Err(e) => {
error.set(Some(format!("Failed to load backlinks: {e}")));
}
}
loading.set(false);
});
}
};
// Fetch backlinks on mount
let fetch_on_mount = fetch_backlinks.clone();
use_effect(move || {
fetch_on_mount();
});
// Reindex links handler
let on_reindex = {
let client = client.clone();
let id = media_id.clone();
let fetch_backlinks = fetch_backlinks.clone();
move |evt: MouseEvent| {
evt.stop_propagation(); // Don't toggle collapse
let client = client.clone();
let id = id.clone();
let fetch_backlinks = fetch_backlinks.clone();
spawn(async move {
reindexing.set(true);
reindex_message.set(None);
match client.reindex_links(&id).await {
Ok(resp) => {
reindex_message.set(Some((
format!("Reindexed: {} links extracted", resp.links_extracted),
false,
)));
// Refresh backlinks after reindex
fetch_backlinks();
}
Err(e) => {
reindex_message.set(Some((format!("Reindex failed: {e}"), true)));
}
}
reindexing.set(false);
});
}
};
let is_loading = *loading.read();
let is_collapsed = *collapsed.read();
let is_reindexing = *reindexing.read();
let backlink_data = backlinks.read();
let count = backlink_data.as_ref().map(|b| b.count).unwrap_or(0);
rsx! {
div { class: "backlinks-panel",
// Header with toggle
div {
class: "backlinks-header",
onclick: move |_| {
let current = *collapsed.read();
collapsed.set(!current);
},
span { class: "backlinks-toggle",
if is_collapsed { "\u{25b6}" } else { "\u{25bc}" }
}
span { class: "backlinks-title", "Backlinks" }
span { class: "backlinks-count", "({count})" }
// Reindex button
button {
class: "backlinks-reindex-btn",
title: "Re-extract links from this note",
disabled: is_reindexing,
onclick: on_reindex,
if is_reindexing {
span { class: "spinner-tiny" }
} else {
"\u{21bb}" // Refresh symbol
}
}
}
if !is_collapsed {
div { class: "backlinks-content",
// Show reindex message if present
if let Some((ref msg, is_err)) = *reindex_message.read() {
div {
class: if is_err { "backlinks-message error" } else { "backlinks-message success" },
"{msg}"
}
}
if is_loading {
div { class: "backlinks-loading",
div { class: "spinner-small" }
"Loading backlinks..."
}
}
if let Some(ref err) = *error.read() {
div { class: "backlinks-error", "{err}" }
}
if !is_loading && error.read().is_none() {
if let Some(ref data) = *backlink_data {
if data.backlinks.is_empty() {
div { class: "backlinks-empty",
"No other notes link to this one."
}
} else {
ul { class: "backlinks-list",
for backlink in &data.backlinks {
BacklinkItemView {
backlink: backlink.clone(),
on_navigate: on_navigate.clone(),
}
}
}
}
}
}
}
}
}
}
}
/// Individual backlink item view.
#[component]
fn BacklinkItemView(
backlink: BacklinkItem,
on_navigate: EventHandler<String>,
) -> Element {
let source_id = backlink.source_id.clone();
let title = backlink
.source_title
.clone()
.unwrap_or_else(|| backlink.source_path.clone());
let context = backlink.context.clone();
let line_number = backlink.line_number;
let link_type = backlink.link_type.clone();
rsx! {
li {
class: "backlink-item",
onclick: move |_| on_navigate.call(source_id.clone()),
div { class: "backlink-source",
span { class: "backlink-title", "{title}" }
span { class: "backlink-type-badge backlink-type-{link_type}", "{link_type}" }
}
if let Some(ref ctx) = context {
div { class: "backlink-context",
if let Some(ln) = line_number {
span { class: "backlink-line", "L{ln}: " }
}
"\"{ctx}\""
}
}
}
}
}
/// Outgoing links panel showing what this note links to.
#[component]
pub fn OutgoingLinksPanel(
media_id: String,
client: ApiClient,
on_navigate: EventHandler<String>,
) -> Element {
let mut links = use_signal(|| Option::<crate::client::OutgoingLinksResponse>::None);
let mut loading = use_signal(|| true);
let mut error = use_signal(|| Option::<String>::None);
let mut collapsed = use_signal(|| true); // Collapsed by default
let mut global_unresolved = use_signal(|| Option::<u64>::None);
// Fetch outgoing links on mount
let id = media_id.clone();
let client_clone = client.clone();
use_effect(move || {
let id = id.clone();
let client = client_clone.clone();
spawn(async move {
loading.set(true);
error.set(None);
match client.get_outgoing_links(&id).await {
Ok(resp) => {
links.set(Some(resp));
}
Err(e) => {
error.set(Some(format!("Failed to load links: {e}")));
}
}
loading.set(false);
// Also fetch global unresolved count
if let Ok(count) = client.get_unresolved_links_count().await {
global_unresolved.set(Some(count));
}
});
});
let is_loading = *loading.read();
let is_collapsed = *collapsed.read();
let link_data = links.read();
let count = link_data.as_ref().map(|l| l.count).unwrap_or(0);
let unresolved_in_note = link_data
.as_ref()
.map(|l| l.links.iter().filter(|link| !link.is_resolved).count())
.unwrap_or(0);
rsx! {
div { class: "outgoing-links-panel",
// Header with toggle
div {
class: "outgoing-links-header",
onclick: move |_| {
let current = *collapsed.read();
collapsed.set(!current);
},
span { class: "outgoing-links-toggle",
if is_collapsed { "\u{25b6}" } else { "\u{25bc}" }
}
span { class: "outgoing-links-title", "Outgoing Links" }
span { class: "outgoing-links-count", "({count})" }
if unresolved_in_note > 0 {
span {
class: "outgoing-links-unresolved-badge",
title: "Unresolved links in this note",
"{unresolved_in_note} unresolved"
}
}
}
if !is_collapsed {
div { class: "outgoing-links-content",
if is_loading {
div { class: "outgoing-links-loading",
div { class: "spinner-small" }
"Loading links..."
}
}
if let Some(ref err) = *error.read() {
div { class: "outgoing-links-error", "{err}" }
}
if !is_loading && error.read().is_none() {
if let Some(ref data) = *link_data {
if data.links.is_empty() {
div { class: "outgoing-links-empty",
"This note has no outgoing links."
}
} else {
ul { class: "outgoing-links-list",
for link in &data.links {
OutgoingLinkItemView {
link: link.clone(),
on_navigate: on_navigate.clone(),
}
}
}
}
}
// Show global unresolved count if any
if let Some(global_count) = *global_unresolved.read() {
if global_count > 0 {
div { class: "outgoing-links-global-unresolved",
span { class: "unresolved-icon", "\u{26a0}" }
" {global_count} unresolved links across all notes"
}
}
}
}
}
}
}
}
}
/// Individual outgoing link item view.
#[component]
fn OutgoingLinkItemView(
link: crate::client::OutgoingLinkItem,
on_navigate: EventHandler<String>,
) -> Element {
let target_id = link.target_id.clone();
let target_path = link.target_path.clone();
let link_text = link.link_text.clone();
let is_resolved = link.is_resolved;
let link_type = link.link_type.clone();
let display_text = link_text.unwrap_or_else(|| target_path.clone());
let resolved_class = if is_resolved { "resolved" } else { "unresolved" };
rsx! {
li {
class: "outgoing-link-item {resolved_class}",
onclick: move |_| {
if let Some(ref id) = target_id {
on_navigate.call(id.clone());
}
},
div { class: "outgoing-link-target",
span { class: "outgoing-link-text", "{display_text}" }
span { class: "outgoing-link-type-badge link-type-{link_type}", "{link_type}" }
if !is_resolved {
span { class: "unresolved-badge", "unresolved" }
}
}
}
}
}

View file

@ -1,5 +1,6 @@
use dioxus::prelude::*;
use super::backlinks_panel::{BacklinksPanel, OutgoingLinksPanel};
use super::image_viewer::ImageViewer;
use super::markdown_viewer::MarkdownViewer;
use super::media_player::{MediaPlayer, PlayQueue, QueueItem, QueuePanel};
@ -23,6 +24,7 @@ pub fn Detail(
on_set_custom_field: EventHandler<(String, String, String, String)>,
on_delete_custom_field: EventHandler<(String, String)>,
on_delete: EventHandler<String>,
#[props(default)] on_navigate_to_media: Option<EventHandler<String>>,
#[props(default)] on_queue_select: Option<EventHandler<usize>>,
#[props(default)] on_queue_remove: Option<EventHandler<usize>>,
#[props(default)] on_queue_clear: Option<EventHandler<()>>,
@ -751,6 +753,43 @@ pub fn Detail(
}
}
// Backlinks and outgoing links panels for markdown/text files
if category == "text" {
{
let client_for_backlinks = client.clone();
let client_for_outgoing = client.clone();
let media_id_for_backlinks = id.clone();
let media_id_for_outgoing = id.clone();
let nav_handler = on_navigate_to_media;
rsx! {
BacklinksPanel {
media_id: media_id_for_backlinks,
client: client_for_backlinks,
on_navigate: {
let handler = nav_handler;
move |target_id: String| {
if let Some(ref h) = handler {
h.call(target_id);
}
}
},
}
OutgoingLinksPanel {
media_id: media_id_for_outgoing,
client: client_for_outgoing,
on_navigate: {
let handler = nav_handler;
move |target_id: String| {
if let Some(ref h) = handler {
h.call(target_id);
}
}
},
}
}
}
}
// Image viewer overlay
if *show_image_viewer.read() {
ImageViewer {

View file

@ -0,0 +1,295 @@
//! Graph visualization component for markdown note connections.
//!
//! Renders a force-directed graph showing connections between notes.
//! Uses a simple SVG-based rendering approach (no D3.js dependency).
use dioxus::prelude::*;
use crate::client::{ApiClient, GraphEdgeResponse, GraphNodeResponse, GraphResponse};
/// Graph view component showing note connections.
#[component]
pub fn GraphView(
client: ApiClient,
center_id: Option<String>,
on_navigate: EventHandler<String>,
) -> Element {
let mut graph_data = use_signal(|| Option::<GraphResponse>::None);
let mut loading = use_signal(|| true);
let mut error = use_signal(|| Option::<String>::None);
let mut depth = use_signal(|| 2u32);
let mut selected_node = use_signal(|| Option::<String>::None);
// Fetch graph data
let center = center_id.clone();
let d = *depth.read();
let client_clone = client.clone();
use_effect(move || {
let center = center.clone();
let client = client_clone.clone();
spawn(async move {
loading.set(true);
error.set(None);
match client.get_graph(center.as_deref(), Some(d)).await {
Ok(resp) => {
graph_data.set(Some(resp));
}
Err(e) => {
error.set(Some(format!("Failed to load graph: {e}")));
}
}
loading.set(false);
});
});
let is_loading = *loading.read();
let current_depth = *depth.read();
let data = graph_data.read();
rsx! {
div { class: "graph-view",
// Toolbar
div { class: "graph-toolbar",
span { class: "graph-title", "Note Graph" }
div { class: "graph-controls",
label { "Depth: " }
select {
value: "{current_depth}",
onchange: move |evt| {
if let Ok(d) = evt.value().parse::<u32>() {
depth.set(d);
}
},
option { value: "1", "1" }
option { value: "2", "2" }
option { value: "3", "3" }
option { value: "4", "4" }
option { value: "5", "5" }
}
}
if let Some(ref data) = *data {
div { class: "graph-stats",
"{data.node_count} nodes, {data.edge_count} edges"
}
}
}
// Graph container
div { class: "graph-container",
if is_loading {
div { class: "graph-loading",
div { class: "spinner" }
"Loading graph..."
}
}
if let Some(ref err) = *error.read() {
div { class: "graph-error", "{err}" }
}
if !is_loading && error.read().is_none() {
if let Some(ref graph) = *data {
if graph.nodes.is_empty() {
div { class: "graph-empty",
"No linked notes found. Start creating links between your notes!"
}
} else {
GraphSvg {
nodes: graph.nodes.clone(),
edges: graph.edges.clone(),
selected_node: selected_node.clone(),
on_node_click: move |id: String| {
selected_node.set(Some(id.clone()));
},
on_node_double_click: move |id: String| {
on_navigate.call(id);
},
}
}
}
}
}
// Node details panel
if let Some(ref node_id) = *selected_node.read() {
if let Some(ref graph) = *data {
if let Some(node) = graph.nodes.iter().find(|n| &n.id == node_id) {
NodeDetailsPanel {
node: node.clone(),
on_close: move |_| selected_node.set(None),
on_navigate: move |id| {
on_navigate.call(id);
},
}
}
}
}
}
}
}
/// SVG-based graph rendering.
#[component]
fn GraphSvg(
nodes: Vec<GraphNodeResponse>,
edges: Vec<GraphEdgeResponse>,
selected_node: Signal<Option<String>>,
on_node_click: EventHandler<String>,
on_node_double_click: EventHandler<String>,
) -> Element {
// Simple circular layout for nodes
let node_count = nodes.len();
let width: f64 = 800.0;
let height: f64 = 600.0;
let center_x = width / 2.0;
let center_y = height / 2.0;
let radius = (width.min(height) / 2.0) - 60.0;
// Calculate node positions in a circle
let positions: Vec<(f64, f64)> = (0..node_count)
.map(|i| {
let angle = (i as f64 / node_count as f64) * 2.0 * std::f64::consts::PI;
let x = center_x + radius * angle.cos();
let y = center_y + radius * angle.sin();
(x, y)
})
.collect();
// Create a map from node id to position
let id_to_pos: std::collections::HashMap<&str, (f64, f64)> = nodes
.iter()
.enumerate()
.map(|(i, n)| (n.id.as_str(), positions[i]))
.collect();
let selected = selected_node.read();
rsx! {
svg {
class: "graph-svg",
width: "{width}",
height: "{height}",
view_box: "0 0 {width} {height}",
// Draw edges first (so they appear behind nodes)
g { class: "graph-edges",
for edge in &edges {
if let (Some(&(x1, y1)), Some(&(x2, y2))) = (
id_to_pos.get(edge.source.as_str()),
id_to_pos.get(edge.target.as_str())
) {
line {
class: "graph-edge edge-type-{edge.link_type}",
x1: "{x1}",
y1: "{y1}",
x2: "{x2}",
y2: "{y2}",
stroke: "#888",
stroke_width: "1",
marker_end: "url(#arrowhead)",
}
}
}
}
// Arrow marker definition
defs {
marker {
id: "arrowhead",
marker_width: "10",
marker_height: "7",
ref_x: "10",
ref_y: "3.5",
orient: "auto",
polygon {
points: "0 0, 10 3.5, 0 7",
fill: "#888",
}
}
}
// Draw nodes
g { class: "graph-nodes",
for (i, node) in nodes.iter().enumerate() {
{
let (x, y) = positions[i];
let node_id = node.id.clone();
let node_id2 = node.id.clone();
let label = node.label.clone();
let is_selected = selected.as_ref() == Some(&node.id);
let node_size = 8.0 + (node.link_count + node.backlink_count) as f64 * 2.0;
let node_size = node_size.min(30.0);
rsx! {
g {
class: if is_selected { "graph-node selected" } else { "graph-node" },
onclick: move |_| on_node_click.call(node_id.clone()),
ondoubleclick: move |_| on_node_double_click.call(node_id2.clone()),
circle {
cx: "{x}",
cy: "{y}",
r: "{node_size}",
fill: if is_selected { "#2196f3" } else { "#4caf50" },
stroke: if is_selected { "#1565c0" } else { "#388e3c" },
stroke_width: "2",
}
text {
x: "{x}",
y: "{y + node_size + 15.0}",
text_anchor: "middle",
font_size: "12",
fill: "#333",
"{label}"
}
}
}
}
}
}
}
}
}
/// Panel showing details about the selected node.
#[component]
fn NodeDetailsPanel(
node: GraphNodeResponse,
on_close: EventHandler<()>,
on_navigate: EventHandler<String>,
) -> Element {
let node_id = node.id.clone();
rsx! {
div { class: "node-details-panel",
div { class: "node-details-header",
h3 { "{node.label}" }
button {
class: "close-btn",
onclick: move |_| on_close.call(()),
"\u{2715}"
}
}
div { class: "node-details-content",
if let Some(ref title) = node.title {
p { class: "node-title", "{title}" }
}
div { class: "node-stats",
span { class: "stat",
"Outgoing: "
strong { "{node.link_count}" }
}
span { class: "stat",
"Incoming: "
strong { "{node.backlink_count}" }
}
}
button {
class: "btn btn-primary",
onclick: move |_| on_navigate.call(node_id.clone()),
"Open Note"
}
}
}
}
}

View file

@ -1,7 +1,14 @@
use dioxus::prelude::*;
/// Event handler for wikilink clicks. Called with the target note name.
pub type WikilinkClickHandler = EventHandler<String>;
#[component]
pub fn MarkdownViewer(content_url: String, media_type: String) -> Element {
pub fn MarkdownViewer(
content_url: String,
media_type: String,
#[props(default)] on_wikilink_click: Option<WikilinkClickHandler>,
) -> Element {
let mut rendered_html = use_signal(String::new);
let mut frontmatter_html = use_signal(|| Option::<String>::None);
let mut loading = use_signal(|| true);
@ -133,6 +140,9 @@ fn pod_to_display(pod: &gray_matter::Pod) -> String {
fn render_markdown(text: &str) -> String {
use pulldown_cmark::{Options, Parser, html};
// First, convert wikilinks to standard markdown links
let text_with_links = convert_wikilinks(text);
let mut options = Options::empty();
options.insert(Options::ENABLE_TABLES);
options.insert(Options::ENABLE_STRIKETHROUGH);
@ -140,12 +150,47 @@ fn render_markdown(text: &str) -> String {
options.insert(Options::ENABLE_FOOTNOTES);
options.insert(Options::ENABLE_HEADING_ATTRIBUTES);
let parser = Parser::new_ext(text, options);
let parser = Parser::new_ext(&text_with_links, options);
let mut html_output = String::new();
html::push_html(&mut html_output, parser);
// Strip script tags for safety
strip_script_tags(&html_output)
// Sanitize HTML using ammonia with a safe allowlist
sanitize_html(&html_output)
}
/// Convert wikilinks [[target]] and [[target|display]] to styled HTML links.
/// Uses data attributes only - no inline JavaScript for security.
fn convert_wikilinks(text: &str) -> String {
use regex::Regex;
// Match embeds ![[target]] first, convert to a placeholder image/embed span
let embed_re = Regex::new(r"!\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let text = embed_re.replace_all(text, |caps: &regex::Captures| {
let target = caps.get(1).unwrap().as_str().trim();
let alt = caps.get(2).map(|m| m.as_str().trim()).unwrap_or(target);
format!(
"<span class=\"wikilink-embed\" data-target=\"{}\" title=\"Embed: {}\">[Embed: {}]</span>",
escape_html_attr(target),
escape_html_attr(target),
escape_html(alt)
)
});
// Match wikilinks [[target]] or [[target|display]]
let wikilink_re = Regex::new(r"\[\[([^\]|]+)(?:\|([^\]]+))?\]\]").unwrap();
let text = wikilink_re.replace_all(&text, |caps: &regex::Captures| {
let target = caps.get(1).unwrap().as_str().trim();
let display = caps.get(2).map(|m| m.as_str().trim()).unwrap_or(target);
// Create a styled link with data attributes only - no inline JavaScript.
// Event handling is done via event delegation in the frontend.
format!(
"<a href=\"#wikilink\" class=\"wikilink\" data-wikilink-target=\"{}\">{}</a>",
escape_html_attr(target),
escape_html(display)
)
});
text.to_string()
}
fn render_plaintext(text: &str) -> String {
@ -153,6 +198,7 @@ fn render_plaintext(text: &str) -> String {
format!("<pre><code>{escaped}</code></pre>")
}
/// Escape text for display in HTML content.
fn escape_html(text: &str) -> String {
text.replace('&', "&amp;")
.replace('<', "&lt;")
@ -160,21 +206,59 @@ fn escape_html(text: &str) -> String {
.replace('"', "&quot;")
}
fn strip_script_tags(html: &str) -> String {
// Simple removal of <script> tags
let mut result = html.to_string();
while let Some(start) = result.to_lowercase().find("<script") {
if let Some(end) = result.to_lowercase()[start..].find("</script>") {
result = format!(
"{}{}",
&result[..start],
&result[start + end + "</script>".len()..]
);
} else {
// Malformed script tag - remove to end
result = result[..start].to_string();
break;
}
}
result
/// Escape text for use in HTML attributes (includes single quotes).
fn escape_html_attr(text: &str) -> String {
text.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&#x27;")
}
/// Sanitize HTML using ammonia with a safe allowlist.
/// This prevents XSS attacks by removing dangerous elements and attributes.
fn sanitize_html(html: &str) -> String {
use ammonia::Builder;
use std::collections::HashSet;
// Build a custom sanitizer that allows safe markdown elements
// but strips all event handlers and dangerous elements
let mut builder = Builder::default();
// Allow common markdown elements
let allowed_tags: HashSet<&str> = [
"a", "abbr", "acronym", "b", "blockquote", "br", "code", "dd", "del",
"details", "div", "dl", "dt", "em", "h1", "h2", "h3", "h4", "h5", "h6",
"hr", "i", "img", "ins", "kbd", "li", "mark", "ol", "p", "pre", "q",
"s", "samp", "small", "span", "strong", "sub", "summary", "sup",
"table", "tbody", "td", "tfoot", "th", "thead", "tr", "u", "ul", "var",
// Task list support
"input",
]
.into_iter()
.collect();
// Allow safe attributes
let allowed_attrs: HashSet<&str> = [
"href", "src", "alt", "title", "class", "id", "name",
"width", "height", "align", "valign",
"colspan", "rowspan", "scope",
// Data attributes for wikilinks (safe - no code execution)
"data-target", "data-wikilink-target",
// Task list checkbox support
"type", "checked", "disabled",
]
.into_iter()
.collect();
builder
.tags(allowed_tags)
.generic_attributes(allowed_attrs)
// Allow relative URLs and fragment-only URLs for internal links
.url_schemes(["http", "https", "mailto"].into_iter().collect())
.link_rel(Some("noopener noreferrer"))
// Strip all event handler attributes (onclick, onerror, etc.)
.strip_comments(true)
.clean(html)
.to_string()
}

View file

@ -1,9 +1,11 @@
pub mod audit;
pub mod backlinks_panel;
pub mod breadcrumb;
pub mod collections;
pub mod database;
pub mod detail;
pub mod duplicates;
pub mod graph_view;
pub mod image_viewer;
pub mod import;
pub mod library;

View file

@ -3129,4 +3129,503 @@ ul li { padding: 3px 0; font-size: 12px; color: var(--text-1); }
.theme-light .pdf-container {
background: #e8e8e8;
}
/* ── Backlinks Panel ── */
.backlinks-panel,
.outgoing-links-panel {
background: var(--bg-2);
border: 1px solid var(--border);
border-radius: var(--radius);
margin-top: 16px;
overflow: hidden;
}
.backlinks-header,
.outgoing-links-header {
display: flex;
align-items: center;
gap: 8px;
padding: 10px 14px;
background: var(--bg-3);
cursor: pointer;
user-select: none;
transition: background 0.1s;
}
.backlinks-header:hover,
.outgoing-links-header:hover {
background: rgba(255, 255, 255, 0.04);
}
.backlinks-toggle,
.outgoing-links-toggle {
font-size: 10px;
color: var(--text-2);
width: 12px;
text-align: center;
}
.backlinks-title,
.outgoing-links-title {
font-size: 12px;
font-weight: 600;
color: var(--text-0);
flex: 1;
}
.backlinks-count,
.outgoing-links-count {
font-size: 11px;
color: var(--text-2);
}
.backlinks-reindex-btn {
display: flex;
align-items: center;
justify-content: center;
width: 22px;
height: 22px;
padding: 0;
margin-left: auto;
background: transparent;
border: 1px solid var(--border);
border-radius: var(--radius-sm);
color: var(--text-2);
font-size: 12px;
cursor: pointer;
transition: background 0.1s, color 0.1s, border-color 0.1s;
}
.backlinks-reindex-btn:hover:not(:disabled) {
background: var(--bg-2);
color: var(--text-0);
border-color: var(--border-strong);
}
.backlinks-reindex-btn:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.spinner-tiny {
width: 10px;
height: 10px;
border: 1.5px solid var(--border);
border-top-color: var(--accent);
border-radius: 50%;
animation: spin 0.7s linear infinite;
}
.backlinks-message {
padding: 8px 12px;
margin-bottom: 10px;
border-radius: var(--radius-sm);
font-size: 11px;
}
.backlinks-message.success {
background: rgba(62, 201, 122, 0.08);
border: 1px solid rgba(62, 201, 122, 0.2);
color: var(--success);
}
.backlinks-message.error {
background: rgba(228, 88, 88, 0.06);
border: 1px solid rgba(228, 88, 88, 0.2);
color: var(--error);
}
.outgoing-links-unresolved-badge {
margin-left: 8px;
padding: 2px 8px;
border-radius: 10px;
font-size: 10px;
font-weight: 500;
background: rgba(212, 160, 55, 0.12);
color: var(--warning);
}
.outgoing-links-global-unresolved {
display: flex;
align-items: center;
gap: 6px;
margin-top: 12px;
padding: 10px 12px;
background: rgba(212, 160, 55, 0.06);
border: 1px solid rgba(212, 160, 55, 0.15);
border-radius: var(--radius-sm);
font-size: 11px;
color: var(--text-2);
}
.outgoing-links-global-unresolved .unresolved-icon {
color: var(--warning);
}
.backlinks-content,
.outgoing-links-content {
padding: 12px;
border-top: 1px solid var(--border-subtle);
}
.backlinks-loading,
.outgoing-links-loading {
display: flex;
align-items: center;
gap: 8px;
padding: 12px;
color: var(--text-2);
font-size: 12px;
}
.backlinks-error,
.outgoing-links-error {
padding: 8px 12px;
background: rgba(228, 88, 88, 0.06);
border: 1px solid rgba(228, 88, 88, 0.2);
border-radius: var(--radius-sm);
font-size: 12px;
color: var(--error);
}
.backlinks-empty,
.outgoing-links-empty {
padding: 16px;
text-align: center;
color: var(--text-2);
font-size: 12px;
font-style: italic;
}
.backlinks-list,
.outgoing-links-list {
list-style: none;
padding: 0;
margin: 0;
display: flex;
flex-direction: column;
gap: 6px;
}
.backlink-item,
.outgoing-link-item {
padding: 10px 12px;
background: var(--bg-0);
border: 1px solid var(--border-subtle);
border-radius: var(--radius-sm);
cursor: pointer;
transition: background 0.1s, border-color 0.1s;
}
.backlink-item:hover,
.outgoing-link-item:hover {
background: var(--bg-1);
border-color: var(--border);
}
.backlink-source,
.outgoing-link-target {
display: flex;
align-items: center;
gap: 8px;
margin-bottom: 4px;
}
.backlink-title,
.outgoing-link-text {
font-size: 13px;
font-weight: 500;
color: var(--text-0);
flex: 1;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.backlink-type-badge,
.outgoing-link-type-badge {
display: inline-block;
padding: 1px 6px;
border-radius: 8px;
font-size: 9px;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.03em;
}
.backlink-type-wikilink,
.link-type-wikilink {
background: rgba(124, 126, 245, 0.1);
color: var(--accent-text);
}
.backlink-type-embed,
.link-type-embed {
background: rgba(139, 92, 246, 0.1);
color: #9d8be0;
}
.backlink-type-markdown_link,
.link-type-markdown_link {
background: rgba(59, 120, 200, 0.1);
color: #6ca0d4;
}
.backlink-context {
font-size: 11px;
color: var(--text-2);
line-height: 1.4;
overflow: hidden;
text-overflow: ellipsis;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
}
.backlink-line {
color: var(--text-1);
font-weight: 500;
}
.unresolved-badge {
padding: 1px 6px;
border-radius: 8px;
font-size: 9px;
font-weight: 600;
background: rgba(212, 160, 55, 0.1);
color: var(--warning);
}
.outgoing-link-item.unresolved {
opacity: 0.7;
border-style: dashed;
}
.spinner-small {
width: 14px;
height: 14px;
border: 2px solid var(--border);
border-top-color: var(--accent);
border-radius: 50%;
animation: spin 0.7s linear infinite;
}
/* ── Graph View ── */
.graph-view {
display: flex;
flex-direction: column;
height: 100%;
background: var(--bg-1);
border-radius: var(--radius);
overflow: hidden;
}
.graph-toolbar {
display: flex;
align-items: center;
gap: 16px;
padding: 12px 16px;
background: var(--bg-2);
border-bottom: 1px solid var(--border);
}
.graph-title {
font-size: 14px;
font-weight: 600;
color: var(--text-0);
}
.graph-controls {
display: flex;
align-items: center;
gap: 8px;
font-size: 12px;
color: var(--text-1);
}
.graph-controls select {
padding: 4px 20px 4px 8px;
font-size: 11px;
background: var(--bg-3);
}
.graph-stats {
margin-left: auto;
font-size: 11px;
color: var(--text-2);
}
.graph-container {
flex: 1;
position: relative;
display: flex;
align-items: center;
justify-content: center;
overflow: hidden;
background: var(--bg-0);
}
.graph-loading,
.graph-error,
.graph-empty {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
gap: 12px;
padding: 48px;
color: var(--text-2);
font-size: 13px;
text-align: center;
}
.graph-svg {
max-width: 100%;
max-height: 100%;
}
.graph-edges line {
stroke: var(--border-strong);
stroke-width: 1;
opacity: 0.6;
}
.graph-edges line.edge-type-wikilink {
stroke: var(--accent);
}
.graph-edges line.edge-type-embed {
stroke: #9d8be0;
stroke-dasharray: 4 2;
}
.graph-nodes .graph-node {
cursor: pointer;
}
.graph-nodes .graph-node circle {
fill: #4caf50;
stroke: #388e3c;
stroke-width: 2;
transition: fill 0.15s, stroke 0.15s;
}
.graph-nodes .graph-node:hover circle {
fill: #66bb6a;
}
.graph-nodes .graph-node.selected circle {
fill: var(--accent);
stroke: #5456d6;
}
.graph-nodes .graph-node text {
fill: var(--text-1);
font-size: 11px;
pointer-events: none;
}
/* ── Node Details Panel ── */
.node-details-panel {
position: absolute;
top: 16px;
right: 16px;
width: 280px;
background: var(--bg-2);
border: 1px solid var(--border);
border-radius: var(--radius);
box-shadow: var(--shadow);
z-index: 10;
}
.node-details-header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 12px 14px;
border-bottom: 1px solid var(--border-subtle);
}
.node-details-header h3 {
font-size: 13px;
font-weight: 600;
color: var(--text-0);
margin: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.node-details-panel .close-btn {
background: none;
border: none;
color: var(--text-2);
cursor: pointer;
font-size: 14px;
padding: 2px 6px;
line-height: 1;
}
.node-details-panel .close-btn:hover {
color: var(--text-0);
}
.node-details-content {
padding: 14px;
}
.node-details-content .node-title {
font-size: 12px;
color: var(--text-1);
margin-bottom: 12px;
}
.node-stats {
display: flex;
gap: 16px;
margin-bottom: 12px;
}
.node-stats .stat {
font-size: 12px;
color: var(--text-2);
}
.node-stats .stat strong {
color: var(--text-0);
}
/* ── Wikilink Styles (in markdown) ── */
.wikilink {
color: var(--accent-text);
text-decoration: none;
border-bottom: 1px dashed var(--accent);
cursor: pointer;
transition: border-color 0.1s, color 0.1s;
}
.wikilink:hover {
color: var(--accent);
border-bottom-style: solid;
}
.wikilink-embed {
display: inline-block;
padding: 2px 8px;
background: rgba(139, 92, 246, 0.08);
border: 1px dashed rgba(139, 92, 246, 0.3);
border-radius: var(--radius-sm);
color: #9d8be0;
font-size: 12px;
cursor: default;
}
/* ── Light theme adjustments for links and graph ── */
.theme-light .graph-nodes .graph-node text {
fill: var(--text-0);
}
.theme-light .graph-edges line {
stroke: rgba(0, 0, 0, 0.2);
}
"#;