various: markdown improvements

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I81fda8247814da19eed1e76dbe97bd5b6a6a6964
This commit is contained in:
raf 2026-02-05 15:39:05 +03:00
commit 80a8b5c7ca
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
23 changed files with 3458 additions and 30 deletions

View file

@ -789,6 +789,45 @@ pub trait StorageBackend: Send + Sync + 'static {
/// Count items in trash.
async fn count_trash(&self) -> Result<u64>;
// ===== Markdown Links (Obsidian-style) =====
/// Save extracted markdown links for a media item.
/// This replaces any existing links for the source media.
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()>;
/// Get outgoing links from a media item.
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>>;
/// Get backlinks (incoming links) to a media item.
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>>;
/// Clear all links for a media item.
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()>;
/// Get graph data for visualization.
///
/// If `center_id` is provided, returns nodes within `depth` hops of that node.
/// If `center_id` is None, returns the entire graph (limited by internal max).
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData>;
/// Resolve unresolved links by matching target_path against media item paths.
/// Returns the number of links that were resolved.
async fn resolve_links(&self) -> Result<u64>;
/// Update the links_extracted_at timestamp for a media item.
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()>;
/// Get count of unresolved links (links where target_media_id is NULL).
async fn count_unresolved_links(&self) -> Result<u64>;
}
/// Comprehensive library statistics.

View file

@ -200,6 +200,9 @@ fn row_to_media_item(row: &Row) -> Result<MediaItem> {
// Trash support
deleted_at: row.try_get("deleted_at").ok().flatten(),
// Markdown links extraction timestamp
links_extracted_at: row.try_get("links_extracted_at").ok().flatten(),
})
}
@ -6036,6 +6039,425 @@ impl StorageBackend for PostgresBackend {
let count: i64 = row.get(0);
Ok(count as u64)
}
// ===== Markdown Links (Obsidian-style) =====
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
// Delete existing links for this source
client
.execute(
"DELETE FROM markdown_links WHERE source_media_id = $1",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Insert new links
for link in links {
let target_media_id = link.target_media_id.map(|id| id.0.to_string());
client
.execute(
"INSERT INTO markdown_links (
id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
&[
&link.id.to_string(),
&media_id_str,
&link.target_path,
&target_media_id,
&link.link_type.to_string(),
&link.link_text,
&link.line_number,
&link.context,
&link.created_at,
],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
}
Ok(())
}
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let rows = client
.query(
"SELECT id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
FROM markdown_links
WHERE source_media_id = $1
ORDER BY line_number",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut links = Vec::new();
for row in rows {
links.push(row_to_markdown_link(&row)?);
}
Ok(links)
}
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let rows = client
.query(
"SELECT l.id, l.source_media_id, m.title, m.path,
l.link_text, l.line_number, l.context, l.link_type
FROM markdown_links l
JOIN media_items m ON l.source_media_id = m.id
WHERE l.target_media_id = $1
ORDER BY m.title, l.line_number",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let mut backlinks = Vec::new();
for row in rows {
let link_id_str: String = row.get(0);
let source_id_str: String = row.get(1);
let source_title: Option<String> = row.get(2);
let source_path: String = row.get(3);
let link_text: Option<String> = row.get(4);
let line_number: Option<i32> = row.get(5);
let context: Option<String> = row.get(6);
let link_type_str: String = row.get(7);
backlinks.push(crate::model::BacklinkInfo {
link_id: Uuid::parse_str(&link_id_str)
.map_err(|e| PinakesError::Database(e.to_string()))?,
source_id: MediaId(
Uuid::parse_str(&source_id_str)
.map_err(|e| PinakesError::Database(e.to_string()))?,
),
source_title,
source_path,
link_text,
line_number,
context,
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
});
}
Ok(backlinks)
}
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
client
.execute(
"DELETE FROM markdown_links WHERE source_media_id = $1",
&[&media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(())
}
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let depth = depth.min(5); // Limit depth
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids: std::collections::HashSet<String> = std::collections::HashSet::new();
if let Some(center) = center_id {
// BFS to find connected nodes within depth
let mut frontier = vec![center.0.to_string()];
let mut visited = std::collections::HashSet::new();
visited.insert(center.0.to_string());
for _ in 0..depth {
if frontier.is_empty() {
break;
}
let mut next_frontier = Vec::new();
for node_id in &frontier {
// Get outgoing links
let rows = client
.query(
"SELECT target_media_id FROM markdown_links
WHERE source_media_id = $1 AND target_media_id IS NOT NULL",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
// Get incoming links
let rows = client
.query(
"SELECT source_media_id FROM markdown_links
WHERE target_media_id = $1",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
}
frontier = next_frontier;
}
node_ids = visited;
} else {
// Get all markdown files with links (limit to 500)
let rows = client
.query(
"SELECT DISTINCT id FROM media_items
WHERE media_type = 'markdown' AND deleted_at IS NULL
LIMIT 500",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let id: String = row.get(0);
node_ids.insert(id);
}
}
// Build nodes with metadata
for node_id in &node_ids {
let row = client
.query_opt(
"SELECT id, COALESCE(title, file_name) as label, title, media_type
FROM media_items WHERE id = $1",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
if let Some(row) = row {
let id: String = row.get(0);
let label: String = row.get(1);
let title: Option<String> = row.get(2);
let media_type: String = row.get(3);
// Count outgoing links
let link_count_row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE source_media_id = $1",
&[&id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let link_count: i64 = link_count_row.get(0);
// Count incoming links
let backlink_count_row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id = $1",
&[&id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let backlink_count: i64 = backlink_count_row.get(0);
nodes.push(crate::model::GraphNode {
id: id.clone(),
label,
title,
media_type,
link_count: link_count as u32,
backlink_count: backlink_count as u32,
});
}
}
// Build edges
for node_id in &node_ids {
let rows = client
.query(
"SELECT source_media_id, target_media_id, link_type
FROM markdown_links
WHERE source_media_id = $1 AND target_media_id IS NOT NULL",
&[node_id],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
for row in rows {
let source: String = row.get(0);
let target: String = row.get(1);
let link_type_str: String = row.get(2);
if node_ids.contains(&target) {
edges.push(crate::model::GraphEdge {
source,
target,
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
});
}
}
}
Ok(crate::model::GraphData { nodes, edges })
}
async fn resolve_links(&self) -> Result<u64> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
// Strategy 1: Exact path match
let result1 = client
.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
)",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
// Strategy 2: Filename match
let result2 = client
.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
)",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(result1 + result2)
}
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let media_id_str = media_id.0.to_string();
let now = chrono::Utc::now();
client
.execute(
"UPDATE media_items SET links_extracted_at = $1 WHERE id = $2",
&[&now, &media_id_str],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
Ok(())
}
async fn count_unresolved_links(&self) -> Result<u64> {
let client = self
.pool
.get()
.await
.map_err(|e| PinakesError::Database(format!("pool error: {e}")))?;
let row = client
.query_one(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id IS NULL",
&[],
)
.await
.map_err(|e| PinakesError::Database(e.to_string()))?;
let count: i64 = row.get(0);
Ok(count as u64)
}
}
impl PostgresBackend {
@ -6329,6 +6751,37 @@ fn find_first_fts_param(query: &SearchQuery) -> i32 {
find_inner(query, &mut offset).unwrap_or(1)
}
// Helper function to parse a markdown link row
fn row_to_markdown_link(row: &Row) -> Result<crate::model::MarkdownLink> {
let id_str: String = row.get(0);
let source_id_str: String = row.get(1);
let target_path: String = row.get(2);
let target_id: Option<String> = row.get(3);
let link_type_str: String = row.get(4);
let link_text: Option<String> = row.get(5);
let line_number: Option<i32> = row.get(6);
let context: Option<String> = row.get(7);
let created_at: chrono::DateTime<Utc> = row.get(8);
Ok(crate::model::MarkdownLink {
id: Uuid::parse_str(&id_str).map_err(|e| PinakesError::Database(e.to_string()))?,
source_media_id: MediaId(
Uuid::parse_str(&source_id_str).map_err(|e| PinakesError::Database(e.to_string()))?,
),
target_path,
target_media_id: target_id
.and_then(|s| Uuid::parse_str(&s).ok())
.map(MediaId),
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
link_text,
line_number,
context,
created_at,
})
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -160,6 +160,14 @@ fn row_to_media_item(row: &Row) -> rusqlite::Result<MediaItem> {
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
// Markdown links extraction timestamp
links_extracted_at: row
.get::<_, Option<String>>("links_extracted_at")
.ok()
.flatten()
.and_then(|s| DateTime::parse_from_rfc3339(&s).ok())
.map(|dt| dt.with_timezone(&Utc)),
})
}
@ -6379,6 +6387,428 @@ impl StorageBackend for SqliteBackend {
Ok(count)
}
// ===== Markdown Links (Obsidian-style) =====
async fn save_markdown_links(
&self,
media_id: MediaId,
links: &[crate::model::MarkdownLink],
) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let links: Vec<_> = links.to_vec();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
// Delete existing links for this source
conn.execute(
"DELETE FROM markdown_links WHERE source_media_id = ?1",
[&media_id_str],
)?;
// Insert new links
let mut stmt = conn.prepare(
"INSERT INTO markdown_links (
id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)"
)?;
for link in &links {
stmt.execute(params![
link.id.to_string(),
media_id_str,
link.target_path,
link.target_media_id.map(|id| id.0.to_string()),
link.link_type.to_string(),
link.link_text,
link.line_number,
link.context,
link.created_at.to_rfc3339(),
])?;
}
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_outgoing_links(&self, media_id: MediaId) -> Result<Vec<crate::model::MarkdownLink>> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let links = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT id, source_media_id, target_path, target_media_id,
link_type, link_text, line_number, context, created_at
FROM markdown_links
WHERE source_media_id = ?1
ORDER BY line_number"
)?;
let rows = stmt.query_map([&media_id_str], |row| {
row_to_markdown_link(row)
})?;
let mut links = Vec::new();
for row in rows {
links.push(row?);
}
Ok::<_, rusqlite::Error>(links)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(links)
}
async fn get_backlinks(&self, media_id: MediaId) -> Result<Vec<crate::model::BacklinkInfo>> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let backlinks = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut stmt = conn.prepare(
"SELECT l.id, l.source_media_id, m.title, m.path,
l.link_text, l.line_number, l.context, l.link_type
FROM markdown_links l
JOIN media_items m ON l.source_media_id = m.id
WHERE l.target_media_id = ?1
ORDER BY m.title, l.line_number"
)?;
let rows = stmt.query_map([&media_id_str], |row| {
let link_id_str: String = row.get(0)?;
let source_id_str: String = row.get(1)?;
let source_title: Option<String> = row.get(2)?;
let source_path: String = row.get(3)?;
let link_text: Option<String> = row.get(4)?;
let line_number: Option<i32> = row.get(5)?;
let context: Option<String> = row.get(6)?;
let link_type_str: String = row.get(7)?;
Ok(crate::model::BacklinkInfo {
link_id: parse_uuid(&link_id_str)?,
source_id: MediaId(parse_uuid(&source_id_str)?),
source_title,
source_path,
link_text,
line_number,
context,
link_type: link_type_str.parse().unwrap_or(crate::model::LinkType::Wikilink),
})
})?;
let mut backlinks = Vec::new();
for row in rows {
backlinks.push(row?);
}
Ok::<_, rusqlite::Error>(backlinks)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(backlinks)
}
async fn clear_links_for_media(&self, media_id: MediaId) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"DELETE FROM markdown_links WHERE source_media_id = ?1",
[&media_id_str],
)?;
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn get_graph_data(
&self,
center_id: Option<MediaId>,
depth: u32,
) -> Result<crate::model::GraphData> {
let conn = self.conn.clone();
let center_id_str = center_id.map(|id| id.0.to_string());
let depth = depth.min(5); // Limit depth to prevent huge queries
let graph_data = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let mut nodes = Vec::new();
let mut edges = Vec::new();
let mut node_ids = std::collections::HashSet::new();
// Get nodes - either all markdown files or those connected to center
if let Some(center_id) = center_id_str {
// BFS to find connected nodes within depth
let mut frontier = vec![center_id.clone()];
let mut visited = std::collections::HashSet::new();
visited.insert(center_id.clone());
for _ in 0..depth {
let mut next_frontier = Vec::new();
for node_id in &frontier {
// Get outgoing links
let mut stmt = conn.prepare(
"SELECT target_media_id FROM markdown_links
WHERE source_media_id = ?1 AND target_media_id IS NOT NULL"
)?;
let rows = stmt.query_map([node_id], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
let id = row?;
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
// Get incoming links
let mut stmt = conn.prepare(
"SELECT source_media_id FROM markdown_links
WHERE target_media_id = ?1"
)?;
let rows = stmt.query_map([node_id], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
let id = row?;
if !visited.contains(&id) {
visited.insert(id.clone());
next_frontier.push(id);
}
}
}
frontier = next_frontier;
}
node_ids = visited;
} else {
// Get all markdown files with links (limit to 500 for performance)
let mut stmt = conn.prepare(
"SELECT DISTINCT id FROM media_items
WHERE media_type = 'markdown' AND deleted_at IS NULL
LIMIT 500"
)?;
let rows = stmt.query_map([], |row| {
let id: String = row.get(0)?;
Ok(id)
})?;
for row in rows {
node_ids.insert(row?);
}
}
// Build nodes with metadata
for node_id in &node_ids {
let mut stmt = conn.prepare(
"SELECT id, COALESCE(title, file_name) as label, title, media_type
FROM media_items WHERE id = ?1"
)?;
if let Ok((id, label, title, media_type)) = stmt.query_row([node_id], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
row.get::<_, Option<String>>(2)?,
row.get::<_, String>(3)?,
))
}) {
// Count outgoing links
let link_count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE source_media_id = ?1",
[&id],
|row| row.get(0),
)?;
// Count incoming links
let backlink_count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id = ?1",
[&id],
|row| row.get(0),
)?;
nodes.push(crate::model::GraphNode {
id: id.clone(),
label,
title,
media_type,
link_count: link_count as u32,
backlink_count: backlink_count as u32,
});
}
}
// Build edges
for node_id in &node_ids {
let mut stmt = conn.prepare(
"SELECT source_media_id, target_media_id, link_type
FROM markdown_links
WHERE source_media_id = ?1 AND target_media_id IS NOT NULL"
)?;
let rows = stmt.query_map([node_id], |row| {
let source: String = row.get(0)?;
let target: String = row.get(1)?;
let link_type_str: String = row.get(2)?;
Ok((source, target, link_type_str))
})?;
for row in rows {
let (source, target, link_type_str) = row?;
if node_ids.contains(&target) {
edges.push(crate::model::GraphEdge {
source,
target,
link_type: link_type_str.parse().unwrap_or(crate::model::LinkType::Wikilink),
});
}
}
}
Ok::<_, rusqlite::Error>(crate::model::GraphData { nodes, edges })
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(graph_data)
}
async fn resolve_links(&self) -> Result<u64> {
let conn = self.conn.clone();
let count = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
// Find unresolved links and try to resolve them
// Strategy 1: Exact path match
let updated1 = conn.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE path = markdown_links.target_path
AND deleted_at IS NULL
)",
[],
)?;
// Strategy 2: Filename match (Obsidian-style)
// Match target_path to file_name (with or without .md extension)
let updated2 = conn.execute(
"UPDATE markdown_links
SET target_media_id = (
SELECT id FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
LIMIT 1
)
WHERE target_media_id IS NULL
AND EXISTS (
SELECT 1 FROM media_items
WHERE (file_name = markdown_links.target_path
OR file_name = markdown_links.target_path || '.md'
OR REPLACE(file_name, '.md', '') = markdown_links.target_path)
AND deleted_at IS NULL
)",
[],
)?;
Ok::<_, rusqlite::Error>((updated1 + updated2) as u64)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(count)
}
async fn mark_links_extracted(&self, media_id: MediaId) -> Result<()> {
let conn = self.conn.clone();
let media_id_str = media_id.0.to_string();
let now = chrono::Utc::now().to_rfc3339();
tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
conn.execute(
"UPDATE media_items SET links_extracted_at = ?1 WHERE id = ?2",
params![now, media_id_str],
)?;
Ok::<_, rusqlite::Error>(())
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(())
}
async fn count_unresolved_links(&self) -> Result<u64> {
let conn = self.conn.clone();
let count = tokio::task::spawn_blocking(move || {
let conn = conn.lock().unwrap();
let count: i64 = conn.query_row(
"SELECT COUNT(*) FROM markdown_links WHERE target_media_id IS NULL",
[],
|row| row.get(0),
)?;
Ok::<_, rusqlite::Error>(count as u64)
})
.await
.map_err(|e| PinakesError::Database(e.to_string()))??;
Ok(count)
}
}
// Helper function to parse a markdown link row
fn row_to_markdown_link(row: &Row) -> rusqlite::Result<crate::model::MarkdownLink> {
let id_str: String = row.get(0)?;
let source_id_str: String = row.get(1)?;
let target_path: String = row.get(2)?;
let target_id: Option<String> = row.get(3)?;
let link_type_str: String = row.get(4)?;
let link_text: Option<String> = row.get(5)?;
let line_number: Option<i32> = row.get(6)?;
let context: Option<String> = row.get(7)?;
let created_at_str: String = row.get(8)?;
Ok(crate::model::MarkdownLink {
id: parse_uuid(&id_str)?,
source_media_id: MediaId(parse_uuid(&source_id_str)?),
target_path,
target_media_id: target_id
.and_then(|s| Uuid::parse_str(&s).ok())
.map(MediaId),
link_type: link_type_str
.parse()
.unwrap_or(crate::model::LinkType::Wikilink),
link_text,
line_number,
context,
created_at: parse_datetime(&created_at_str),
})
}
// Helper function to parse a share row