memory tools: typed hippocampus fns + macro dispatch

Move tool implementations from tools/memory.rs to hippocampus/mod.rs
with proper typed signatures:
  fn name(store, provenance, ...typed args...) -> Result<String>

Optional params take Option<T>, defaults applied in implementation.

tools/memory.rs is now a thin dispatch layer using memory_tool! macro:
  memory_tool!(write, mut, key: [str], content: [str]);
  memory_tool!(search, ref, keys: [Vec<String>], max_hops: [Option<u32>], ...);

~634 lines of boilerplate replaced with ~30 one-liner invocations.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-13 13:03:24 -04:00
parent d7a5ac6347
commit 4560ba9230
2 changed files with 664 additions and 634 deletions

View file

@ -7,9 +7,6 @@ use std::sync::Arc;
use anyhow::{Context, Result};
use std::sync::OnceLock;
use crate::graph::Graph;
use crate::hippocampus::memory::MemoryNode;
use crate::neuro::{consolidation_priority, ReplayItem};
use crate::store::Store;
// ── Store handle ───────────────────────────────────────────────
@ -76,6 +73,103 @@ fn get_provenance(args: &serde_json::Value) -> String {
.to_string()
}
// ── Macro for generating tool wrappers ─────────────────────────
//
// memory_tool!(name, mut, arg1: str, arg2: f32, arg3: ?str)
// - mut/ref for store mutability
// - type suffixes: str, f32, f64, u64, i64, bool
// - ?type for optional args with default
macro_rules! memory_tool {
// Mutable store variant
($name:ident, mut $(, $($arg:ident : [$($typ:tt)+]),* $(,)?)?) => {
async fn $name(args: &serde_json::Value) -> Result<String> {
$($(let $arg = memory_tool!(@extract args, $arg, $($typ)+);)*)?
let prov = get_provenance(args);
let arc = cached_store().await?;
let mut store = arc.lock().await;
crate::hippocampus::$name(&mut store, &prov $($(, $arg)*)?)
}
};
// Immutable store variant
($name:ident, ref $(, $($arg:ident : [$($typ:tt)+]),* $(,)?)?) => {
async fn $name(args: &serde_json::Value) -> Result<String> {
$($(let $arg = memory_tool!(@extract args, $arg, $($typ)+);)*)?
let prov = get_provenance(args);
let arc = cached_store().await?;
let store = arc.lock().await;
crate::hippocampus::$name(&store, &prov $($(, $arg)*)?)
}
};
// Required extractors - fail if missing
(@extract $args:ident, $name:ident, str) => {
get_str($args, stringify!($name))?
};
(@extract $args:ident, $name:ident, f32) => {
get_f64($args, stringify!($name))? as f32
};
(@extract $args:ident, $name:ident, Vec<String>) => {
$args.get(stringify!($name))
.and_then(|v| v.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect::<Vec<_>>())
.unwrap_or_default()
};
// Optional extractors - return Option<T>
(@extract $args:ident, $name:ident, Option<&str>) => {
$args.get(stringify!($name)).and_then(|v| v.as_str())
};
(@extract $args:ident, $name:ident, Option<bool>) => {
$args.get(stringify!($name)).and_then(|v| v.as_bool())
};
(@extract $args:ident, $name:ident, Option<u64>) => {
$args.get(stringify!($name)).and_then(|v| v.as_u64())
};
(@extract $args:ident, $name:ident, Option<i64>) => {
$args.get(stringify!($name)).and_then(|v| v.as_i64())
};
(@extract $args:ident, $name:ident, Option<usize>) => {
$args.get(stringify!($name)).and_then(|v| v.as_u64()).map(|v| v as usize)
};
(@extract $args:ident, $name:ident, Option<u32>) => {
$args.get(stringify!($name)).and_then(|v| v.as_u64()).map(|v| v as u32)
};
(@extract $args:ident, $name:ident, Option<f64>) => {
$args.get(stringify!($name)).and_then(|v| v.as_f64())
};
}
// ── Memory tools ───────────────────────────────────────────────
memory_tool!(render, ref, key: [str], raw: [Option<bool>]);
memory_tool!(write, mut, key: [str], content: [str]);
memory_tool!(search, ref, keys: [Vec<String>], max_hops: [Option<u32>], edge_decay: [Option<f64>], min_activation: [Option<f64>], limit: [Option<usize>]);
memory_tool!(links, ref, key: [str]);
memory_tool!(link_set, mut, source: [str], target: [str], strength: [f32]);
memory_tool!(link_add, mut, source: [str], target: [str]);
memory_tool!(delete, mut, key: [str]);
memory_tool!(history, ref, key: [str], full: [Option<bool>]);
memory_tool!(weight_set, mut, key: [str], weight: [f32]);
memory_tool!(rename, mut, old_key: [str], new_key: [str]);
memory_tool!(supersede, mut, old_key: [str], new_key: [str], reason: [Option<&str>]);
memory_tool!(query, ref, query: [str], format: [Option<&str>]);
// ── Journal tools ──────────────────────────────────────────────
memory_tool!(journal_tail, ref, count: [Option<u64>], level: [Option<u64>], format: [Option<&str>], after: [Option<&str>]);
memory_tool!(journal_new, mut, name: [str], title: [str], body: [str], level: [Option<i64>]);
memory_tool!(journal_update, mut, body: [str], level: [Option<i64>]);
// ── Graph tools ───────────────────────────────────────────────
memory_tool!(graph_topology, ref);
memory_tool!(graph_health, ref);
memory_tool!(graph_communities, ref, top_n: [Option<usize>], min_size: [Option<usize>]);
memory_tool!(graph_normalize_strengths, mut, apply: [Option<bool>]);
memory_tool!(graph_link_impact, ref, source: [str], target: [str]);
memory_tool!(graph_hubs, ref, count: [Option<usize>]);
memory_tool!(graph_trace, ref, key: [str]);
/// Single entry point for all memory/journal tool calls.
/// If not daemon, forwards to daemon with provenance attached.
async fn dispatch(
@ -111,8 +205,8 @@ async fn dispatch(
"memory_rename" => rename(&args).await,
"memory_supersede" => supersede(&args).await,
"memory_query" => query(&args).await,
"graph_topology" => graph_topology().await,
"graph_health" => graph_health().await,
"graph_topology" => graph_topology(&args).await,
"graph_health" => graph_health(&args).await,
"graph_communities" => graph_communities(&args).await,
"graph_normalize_strengths" => graph_normalize_strengths(&args).await,
"graph_trace" => graph_trace(&args).await,
@ -225,632 +319,3 @@ pub fn journal_tools() -> [super::Tool; 3] {
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_update", &a, v).await })) },
]
}
// ── Memory tools ───────────────────────────────────────────────
async fn render(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let raw = args.get("raw").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let store = arc.lock().await;
let node = MemoryNode::from_store(&store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
if raw {
Ok(node.content)
} else {
Ok(node.render())
}
}
async fn write(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let content = get_str(args, "content")?;
let prov = get_provenance(args);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let result = store.upsert_provenance(key, content, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{} '{}'", result, key))
}
async fn search(args: &serde_json::Value) -> Result<String> {
let keys: Vec<String> = args.get("keys")
.and_then(|v| v.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
.unwrap_or_default();
if keys.is_empty() {
anyhow::bail!("memory_search requires at least one seed key");
}
// Optional params with defaults
let max_hops = args.get("max_hops").and_then(|v| v.as_u64()).unwrap_or(3) as u32;
let edge_decay = args.get("edge_decay").and_then(|v| v.as_f64()).unwrap_or(0.3);
let min_activation = args.get("min_activation").and_then(|v| v.as_f64()).unwrap_or(0.01);
let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(20) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = crate::graph::build_graph_fast(&*store);
let seeds: Vec<(String, f64)> = keys.iter()
.filter_map(|k| {
let resolved = store.resolve_key(k).ok()?;
Some((resolved, 1.0))
})
.collect();
if seeds.is_empty() {
anyhow::bail!("no valid seed keys found");
}
let seed_set: std::collections::HashSet<&str> = seeds.iter()
.map(|(k, _)| k.as_str()).collect();
let results = crate::search::spreading_activation(
&seeds, &graph, &*store,
max_hops, edge_decay, min_activation,
);
Ok(results.iter()
.filter(|(k, _)| !seed_set.contains(k.as_str()))
.take(limit)
.map(|(key, score)| format!(" {:.2} {}", score, key))
.collect::<Vec<_>>().join("\n"))
}
async fn links(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let node = MemoryNode::from_store(&store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
let mut out = format!("Neighbors of '{}':\n", key);
for (target, strength, is_new) in &node.links {
let tag = if *is_new { " (new)" } else { "" };
out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag));
}
Ok(out)
}
async fn link_set(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let strength = get_f64(args, "strength")? as f32;
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{}{} strength {:.2}{:.2}", s, t, old, strength))
}
async fn link_add(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let prov = get_provenance(args);
let strength = store.add_link(&s, &t, &prov).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("linked {}{} (strength={:.2})", s, t, strength))
}
async fn delete(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let mut store = arc.lock().await;
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.delete_node(&resolved).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("deleted {}", resolved))
}
async fn history(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let full = args.get("full").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let store = arc.lock().await;
let key = store.resolve_key(key).unwrap_or_else(|_| key.to_string());
drop(store);
let path = crate::store::nodes_path();
if !path.exists() {
anyhow::bail!("No node log found");
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| anyhow::anyhow!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<crate::store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| anyhow::anyhow!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| anyhow::anyhow!("get nodes: {}", e))? {
let node = crate::store::Node::from_capnp_migrate(node_reader)
.map_err(|e| anyhow::anyhow!("{}", e))?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
anyhow::bail!("No history found for '{}'", key);
}
let mut out = format!("{} versions of '{}':\n\n", versions.len(), key);
for node in &versions {
let ts = crate::store::format_datetime(node.timestamp);
let deleted = if node.deleted { " DELETED" } else { "" };
if full {
out.push_str(&format!("=== v{} {} {}{} w={:.3} {}b ===\n",
node.version, ts, node.provenance, deleted, node.weight, node.content.len()));
out.push_str(&node.content);
out.push('\n');
} else {
let preview = crate::util::first_n_chars(&node.content, 120).replace('\n', "\\n");
out.push_str(&format!("v{:<3} {} {:24} w={:.3} {}b{}\n {}\n",
node.version, ts, node.provenance, node.weight, node.content.len(), deleted, preview));
}
}
Ok(out)
}
async fn weight_set(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let key = store.resolve_key(get_str(args, "key")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let weight = get_f64(args, "weight")? as f32;
let (old, new) = store.set_weight(&key, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("weight {} {:.2}{:.2}", key, old, new))
}
async fn rename(args: &serde_json::Value) -> Result<String> {
let old_key = get_str(args, "old_key")?;
let new_key = get_str(args, "new_key")?;
let arc = cached_store().await?;
let mut store = arc.lock().await;
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
}
async fn supersede(args: &serde_json::Value) -> Result<String> {
let old_key = get_str(args, "old_key")?;
let new_key = get_str(args, "new_key")?;
let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded");
let arc = cached_store().await?;
let mut store = arc.lock().await;
let content = store.nodes.get(old_key)
.map(|n| n.content.clone())
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
new_key, reason, content.trim());
let prov = get_provenance(args);
store.upsert_provenance(old_key, &notice, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("superseded {}{} ({})", old_key, new_key, reason))
}
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &Graph,
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {
let node = store.nodes.get(key)?;
let priority = consolidation_priority(store, key, graph, None);
let cc = graph.clustering_coefficient(key);
Some(ReplayItem {
key: key.clone(),
priority,
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
cc,
classification: "unknown",
outlier_score: 0.0,
})
})
.collect()
}
async fn query(args: &serde_json::Value) -> Result<String> {
let query_str = get_str(args, "query")?;
let format = args.get("format").and_then(|v| v.as_str()).unwrap_or("compact");
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
match format {
"full" => {
// Rich output with full content, graph metrics, hub analysis
let results = crate::query_parser::execute_query(&store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let keys: Vec<String> = results.into_iter().map(|r| r.key).collect();
let items = keys_to_replay_items(&store, &keys, &graph);
Ok(crate::subconscious::prompts::format_nodes_section(&store, &items, &graph))
}
_ => {
// Compact output: handles count, select, and all expression types
crate::query_parser::query_to_string(&store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))
}
}
}
// ── Journal tools ──────────────────────────────────────────────
async fn journal_tail(args: &serde_json::Value) -> Result<String> {
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1);
let level = args.get("level").and_then(|v| v.as_u64()).unwrap_or(0);
let format = args.get("format").and_then(|v| v.as_str()).unwrap_or("full");
let after = args.get("after").and_then(|v| v.as_str());
let type_name = match level {
0 => "episodic",
1 => "daily",
2 => "weekly",
3 => "monthly",
_ => return Err(anyhow::anyhow!("invalid level: {} (0=journal, 1=daily, 2=weekly, 3=monthly)", level)),
};
let mut q = format!("all | type:{} | sort:timestamp", type_name);
if let Some(date) = after {
// Convert date to age in seconds
if let Ok(nd) = chrono::NaiveDate::parse_from_str(date, "%Y-%m-%d") {
let ts = nd.and_hms_opt(0, 0, 0).unwrap().and_utc().timestamp();
let age = chrono::Utc::now().timestamp() - ts;
q.push_str(&format!(" | age:<{}", age));
}
}
q.push_str(&format!(" | limit:{}", count));
query(&serde_json::json!({"query": q, "format": format})).await
}
fn level_to_node_type(level: i64) -> crate::store::NodeType {
match level {
1 => crate::store::NodeType::EpisodicDaily,
2 => crate::store::NodeType::EpisodicWeekly,
3 => crate::store::NodeType::EpisodicMonthly,
_ => crate::store::NodeType::EpisodicSession,
}
}
async fn journal_new(args: &serde_json::Value) -> Result<String> {
let name = get_str(args, "name")?;
let title = get_str(args, "title")?;
let body = get_str(args, "body")?;
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
let content = format!("## {}{}\n\n{}", ts, title, body);
let base_key: String = name.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
let arc = cached_store().await?;
let mut store = arc.lock().await;
let key = if store.nodes.contains_key(base_key) {
let mut n = 2;
loop {
let candidate = format!("{}-{}", base_key, n);
if !store.nodes.contains_key(&candidate) { break candidate; }
n += 1;
}
} else {
base_key.to_string()
};
let mut node = crate::store::new_node(&key, &content);
node.node_type = level_to_node_type(level);
node.provenance = get_provenance(args);
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("New entry '{}' ({} words)", title, word_count))
}
async fn journal_update(args: &serde_json::Value) -> Result<String> {
let body = get_str(args, "body")?;
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
let node_type = level_to_node_type(level);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let latest_key = store.nodes.values()
.filter(|n| n.node_type == node_type)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
};
let existing = store.nodes.get(&key).unwrap().content.clone();
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
let prov = get_provenance(args);
store.upsert_provenance(&key, &new_content, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("Updated last entry (+{} words)", word_count))
}
// ── Graph tools ───────────────────────────────────────────────
async fn graph_topology() -> Result<String> {
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_topology_header(&store, &graph))
}
async fn graph_health() -> Result<String> {
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_health_section(&store, &graph))
}
async fn graph_communities(args: &serde_json::Value) -> Result<String> {
let top_n = args.get("top_n").and_then(|v| v.as_u64()).unwrap_or(10) as usize;
let min_size = args.get("min_size").and_then(|v| v.as_u64()).unwrap_or(3) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let g = store.build_graph();
let infos = g.community_info();
let total = infos.len();
let shown: Vec<_> = infos.into_iter()
.filter(|c| c.size >= min_size)
.take(top_n)
.collect();
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "{} communities total ({} with size >= {})\n",
total, shown.len(), min_size).ok();
writeln!(out, "{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross").ok();
writeln!(out, "{}", "-".repeat(70)).ok();
for c in &shown {
let preview: Vec<&str> = c.members.iter()
.take(5)
.map(|s| s.as_str())
.collect();
let more = if c.size > 5 {
format!(" +{}", c.size - 5)
} else {
String::new()
};
writeln!(out, "{:<6} {:>5} {:>6.0}% {:>7} {}{}",
c.id, c.size, c.isolation * 100.0, c.cross_edges,
preview.join(", "), more).ok();
}
Ok(out)
}
async fn graph_normalize_strengths(args: &serde_json::Value) -> Result<String> {
let apply = args.get("apply").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
let mut buckets = [0usize; 10];
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.strength == 1.0 && rel.rel_type == crate::store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply { rel.strength = new_s; }
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Normalize link strengths (Jaccard similarity)").ok();
writeln!(out, " Total edges in graph: {}", strengths.len()).ok();
writeln!(out, " Would change: {}", changed).ok();
writeln!(out, " Unchanged: {}", unchanged).ok();
writeln!(out, " Temporal (skipped): {}", temporal_skipped).ok();
if changed > 0 {
writeln!(out, " Avg delta: {:.3}", delta_sum / changed as f64).ok();
}
writeln!(out).ok();
writeln!(out, " Strength distribution:").ok();
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
writeln!(out, " {:.1}-{:.1}: {:5} {}", lo, hi, count, bar).ok();
}
if apply {
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
writeln!(out, "\nApplied {} strength updates.", changed).ok();
} else {
writeln!(out, "\nDry run. Pass apply:true to write changes.").ok();
}
Ok(out)
}
async fn graph_link_impact(args: &serde_json::Value) -> Result<String> {
let source = get_str(args, "source")?;
let target = get_str(args, "target")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let source = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let target = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Link impact: {} → {}", source, target).ok();
writeln!(out, " Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg).ok();
writeln!(out, " Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community).ok();
writeln!(out, " ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target).ok();
writeln!(out, " ΔGini: {:+.6}", impact.delta_gini).ok();
writeln!(out, " Assessment: {}", impact.assessment).ok();
Ok(out)
}
async fn graph_hubs(args: &serde_json::Value) -> Result<String> {
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(20) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
// Top hub nodes by degree, spread apart (skip neighbors of already-selected hubs)
let mut hubs: Vec<(String, usize)> = store.nodes.iter()
.filter(|(k, n)| !n.deleted && !k.starts_with('_'))
.map(|(k, _)| {
let degree = graph.neighbors(k).len();
(k.clone(), degree)
})
.collect();
hubs.sort_by(|a, b| b.1.cmp(&a.1));
let mut selected = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
for (key, degree) in &hubs {
if seen.contains(key) { continue; }
selected.push(format!(" - {} (degree {})", key, degree));
// Mark neighbors as seen so we pick far-apart hubs
for (nbr, _) in graph.neighbors(key) {
seen.insert(nbr.clone());
}
seen.insert(key.clone());
if selected.len() >= count { break; }
}
Ok(format!("## Hub nodes (link targets)\n\n{}", selected.join("\n")))
}
async fn graph_trace(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| anyhow::anyhow!("Node not found: {}", resolved))?;
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "=== {} ===", resolved).ok();
writeln!(out, "Type: {:?} Weight: {:.2}", node.node_type, node.weight).ok();
if !node.source_ref.is_empty() {
writeln!(out, "Source: {}", node.source_ref).ok();
}
let preview = crate::util::truncate(&node.content, 200, "...");
writeln!(out, "\n{}\n", preview).ok();
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
crate::store::NodeType::EpisodicSession => episodic_session.push(entry),
crate::store::NodeType::EpisodicDaily => episodic_daily.push(entry),
crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => episodic_weekly.push(entry),
crate::store::NodeType::Semantic => semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
writeln!(out, "Weekly digests:").ok();
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_daily.is_empty() {
writeln!(out, "Daily digests:").ok();
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_session.is_empty() {
writeln!(out, "Session entries:").ok();
for (k, s, n) in &episodic_session {
let preview = crate::util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
writeln!(out, " [{:.2}] {}", s, k).ok();
if !n.source_ref.is_empty() {
writeln!(out, " ↳ source: {}", n.source_ref).ok();
}
writeln!(out, " {}", preview).ok();
}
}
if !semantic.is_empty() {
writeln!(out, "Semantic links:").ok();
for (k, s, _) in &semantic {
writeln!(out, " [{:.2}] {}", s, k).ok();
}
}
writeln!(out, "\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len()).ok();
Ok(out)
}

View file

@ -4,6 +4,9 @@
// similarity scoring, spectral analysis, and neuroscience-inspired
// consolidation (spaced repetition, interference detection, schema
// assimilation).
//
// Tool implementations are typed functions that take &Store or &mut Store.
// The tools/memory.rs layer handles JSON parsing and RPC routing.
pub mod memory;
pub mod store;
@ -14,3 +17,565 @@ pub mod spectral;
pub mod neuro;
pub mod counters;
pub mod transcript;
use anyhow::Result;
use crate::hippocampus::memory::MemoryNode;
use crate::hippocampus::store::Store;
use crate::graph::Graph;
use crate::neuro::{consolidation_priority, ReplayItem};
// ── Memory operations ──────────────────────────────────────────
pub fn render(store: &Store, _provenance: &str, key: &str, raw: Option<bool>) -> Result<String> {
let node = MemoryNode::from_store(store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
if raw.unwrap_or(false) {
Ok(node.content)
} else {
Ok(node.render())
}
}
pub fn write(store: &mut Store, provenance: &str, key: &str, content: &str) -> Result<String> {
let result = store.upsert_provenance(key, content, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{} '{}'", result, key))
}
pub fn search(
store: &Store,
_provenance: &str,
keys: Vec<String>,
max_hops: Option<u32>,
edge_decay: Option<f64>,
min_activation: Option<f64>,
limit: Option<usize>,
) -> Result<String> {
if keys.is_empty() {
anyhow::bail!("memory_search requires at least one seed key");
}
let max_hops = max_hops.unwrap_or(3);
let edge_decay = edge_decay.unwrap_or(0.3);
let min_activation = min_activation.unwrap_or(0.01);
let limit = limit.unwrap_or(20);
let graph = crate::graph::build_graph_fast(store);
let seeds: Vec<(String, f64)> = keys.iter()
.filter_map(|k| {
let resolved = store.resolve_key(k).ok()?;
Some((resolved, 1.0))
})
.collect();
if seeds.is_empty() {
anyhow::bail!("no valid seed keys found");
}
let seed_set: std::collections::HashSet<&str> = seeds.iter()
.map(|(k, _)| k.as_str()).collect();
let results = crate::search::spreading_activation(
&seeds, &graph, store,
max_hops, edge_decay, min_activation,
);
Ok(results.iter()
.filter(|(k, _)| !seed_set.contains(k.as_str()))
.take(limit)
.map(|(key, score)| format!(" {:.2} {}", score, key))
.collect::<Vec<_>>().join("\n"))
}
pub fn links(store: &Store, _provenance: &str, key: &str) -> Result<String> {
let node = MemoryNode::from_store(store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
let mut out = format!("Neighbors of '{}':\n", key);
for (target, strength, is_new) in &node.links {
let tag = if *is_new { " (new)" } else { "" };
out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag));
}
Ok(out)
}
pub fn link_set(store: &mut Store, _provenance: &str, source: &str, target: &str, strength: f32) -> Result<String> {
let s = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{}{} strength {:.2}{:.2}", s, t, old, strength))
}
pub fn link_add(store: &mut Store, provenance: &str, source: &str, target: &str) -> Result<String> {
let s = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let strength = store.add_link(&s, &t, provenance).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("linked {}{} (strength={:.2})", s, t, strength))
}
pub fn delete(store: &mut Store, _provenance: &str, key: &str) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.delete_node(&resolved).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("deleted {}", resolved))
}
pub fn history(store: &Store, _provenance: &str, key: &str, full: Option<bool>) -> Result<String> {
let key = store.resolve_key(key).unwrap_or_else(|_| key.to_string());
let full = full.unwrap_or(false);
let path = crate::store::nodes_path();
if !path.exists() {
anyhow::bail!("No node log found");
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| anyhow::anyhow!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<crate::store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| anyhow::anyhow!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| anyhow::anyhow!("get nodes: {}", e))? {
let node = crate::store::Node::from_capnp_migrate(node_reader)
.map_err(|e| anyhow::anyhow!("{}", e))?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
anyhow::bail!("No history found for '{}'", key);
}
let mut out = format!("{} versions of '{}':\n\n", versions.len(), key);
for node in &versions {
let ts = crate::store::format_datetime(node.timestamp);
let deleted = if node.deleted { " DELETED" } else { "" };
if full {
out.push_str(&format!("=== v{} {} {}{} w={:.3} {}b ===\n",
node.version, ts, node.provenance, deleted, node.weight, node.content.len()));
out.push_str(&node.content);
out.push('\n');
} else {
let preview = crate::util::first_n_chars(&node.content, 120).replace('\n', "\\n");
out.push_str(&format!("v{:<3} {} {:24} w={:.3} {}b{}\n {}\n",
node.version, ts, node.provenance, node.weight, node.content.len(), deleted, preview));
}
}
Ok(out)
}
pub fn weight_set(store: &mut Store, _provenance: &str, key: &str, weight: f32) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let (old, new) = store.set_weight(&resolved, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("weight {} {:.2}{:.2}", resolved, old, new))
}
pub fn rename(store: &mut Store, _provenance: &str, old_key: &str, new_key: &str) -> Result<String> {
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
}
pub fn supersede(store: &mut Store, provenance: &str, old_key: &str, new_key: &str, reason: Option<&str>) -> Result<String> {
let reason = reason.unwrap_or("superseded");
let content = store.nodes.get(old_key)
.map(|n| n.content.clone())
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
new_key, reason, content.trim());
store.upsert_provenance(old_key, &notice, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("superseded {}{} ({})", old_key, new_key, reason))
}
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &Graph,
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {
let node = store.nodes.get(key)?;
let priority = consolidation_priority(store, key, graph, None);
let cc = graph.clustering_coefficient(key);
Some(ReplayItem {
key: key.clone(),
priority,
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
cc,
classification: "unknown",
outlier_score: 0.0,
})
})
.collect()
}
pub fn query(store: &Store, _provenance: &str, query_str: &str, format: Option<&str>) -> Result<String> {
let graph = store.build_graph();
match format.unwrap_or("compact") {
"full" => {
// Rich output with full content, graph metrics, hub analysis
let results = crate::query_parser::execute_query(store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let keys: Vec<String> = results.into_iter().map(|r| r.key).collect();
let items = keys_to_replay_items(store, &keys, &graph);
Ok(crate::subconscious::prompts::format_nodes_section(store, &items, &graph))
}
_ => {
// Compact output: handles count, select, and all expression types
crate::query_parser::query_to_string(store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))
}
}
}
// ── Journal tools ──────────────────────────────────────────────
pub fn journal_tail(store: &Store, _provenance: &str, count: Option<u64>, level: Option<u64>, format: Option<&str>, after: Option<&str>) -> Result<String> {
let count = count.unwrap_or(1);
let level = level.unwrap_or(0);
let format = format.unwrap_or("full");
let type_name = match level {
0 => "episodic",
1 => "daily",
2 => "weekly",
3 => "monthly",
_ => return Err(anyhow::anyhow!("invalid level: {} (0=journal, 1=daily, 2=weekly, 3=monthly)", level)),
};
let mut q = std::format!("all | type:{} | sort:timestamp", type_name);
if let Some(date) = after {
// Convert date to age in seconds
if let Ok(nd) = chrono::NaiveDate::parse_from_str(date, "%Y-%m-%d") {
let ts = nd.and_hms_opt(0, 0, 0).unwrap().and_utc().timestamp();
let age = chrono::Utc::now().timestamp() - ts;
q.push_str(&std::format!(" | age:<{}", age));
}
}
q.push_str(&std::format!(" | limit:{}", count));
query(store, _provenance, &q, Some(format))
}
fn level_to_node_type(level: i64) -> crate::store::NodeType {
match level {
1 => crate::store::NodeType::EpisodicDaily,
2 => crate::store::NodeType::EpisodicWeekly,
3 => crate::store::NodeType::EpisodicMonthly,
_ => crate::store::NodeType::EpisodicSession,
}
}
pub fn journal_new(store: &mut Store, provenance: &str, name: &str, title: &str, body: &str, level: Option<i64>) -> Result<String> {
let level = level.unwrap_or(0);
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
let content = format!("## {}{}\n\n{}", ts, title, body);
let base_key: String = name.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
let key = if store.nodes.contains_key(base_key) {
let mut n = 2;
loop {
let candidate = format!("{}-{}", base_key, n);
if !store.nodes.contains_key(&candidate) { break candidate; }
n += 1;
}
} else {
base_key.to_string()
};
let mut node = crate::store::new_node(&key, &content);
node.node_type = level_to_node_type(level);
node.provenance = provenance.to_string();
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("New entry '{}' ({} words)", title, word_count))
}
pub fn journal_update(store: &mut Store, provenance: &str, body: &str, level: Option<i64>) -> Result<String> {
let level = level.unwrap_or(0);
let node_type = level_to_node_type(level);
let latest_key = store.nodes.values()
.filter(|n| n.node_type == node_type)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
};
let existing = store.nodes.get(&key).unwrap().content.clone();
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
store.upsert_provenance(&key, &new_content, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("Updated last entry (+{} words)", word_count))
}
// ── Graph tools ───────────────────────────────────────────────
pub fn graph_topology(store: &Store, _provenance: &str) -> Result<String> {
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_topology_header(store, &graph))
}
pub fn graph_health(store: &Store, _provenance: &str) -> Result<String> {
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_health_section(store, &graph))
}
pub fn graph_communities(store: &Store, _provenance: &str, top_n: Option<usize>, min_size: Option<usize>) -> Result<String> {
let top_n = top_n.unwrap_or(10);
let min_size = min_size.unwrap_or(3);
let g = store.build_graph();
let infos = g.community_info();
let total = infos.len();
let shown: Vec<_> = infos.into_iter()
.filter(|c| c.size >= min_size)
.take(top_n)
.collect();
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "{} communities total ({} with size >= {})\n",
total, shown.len(), min_size).ok();
writeln!(out, "{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross").ok();
writeln!(out, "{}", "-".repeat(70)).ok();
for c in &shown {
let preview: Vec<&str> = c.members.iter()
.take(5)
.map(|s| s.as_str())
.collect();
let more = if c.size > 5 {
format!(" +{}", c.size - 5)
} else {
String::new()
};
writeln!(out, "{:<6} {:>5} {:>6.0}% {:>7} {}{}",
c.id, c.size, c.isolation * 100.0, c.cross_edges,
preview.join(", "), more).ok();
}
Ok(out)
}
pub fn graph_normalize_strengths(store: &mut Store, _provenance: &str, apply: Option<bool>) -> Result<String> {
let apply = apply.unwrap_or(false);
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
let mut buckets = [0usize; 10];
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.strength == 1.0 && rel.rel_type == crate::store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply { rel.strength = new_s; }
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Normalize link strengths (Jaccard similarity)").ok();
writeln!(out, " Total edges in graph: {}", strengths.len()).ok();
writeln!(out, " Would change: {}", changed).ok();
writeln!(out, " Unchanged: {}", unchanged).ok();
writeln!(out, " Temporal (skipped): {}", temporal_skipped).ok();
if changed > 0 {
writeln!(out, " Avg delta: {:.3}", delta_sum / changed as f64).ok();
}
writeln!(out).ok();
writeln!(out, " Strength distribution:").ok();
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
writeln!(out, " {:.1}-{:.1}: {:5} {}", lo, hi, count, bar).ok();
}
if apply {
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
writeln!(out, "\nApplied {} strength updates.", changed).ok();
} else {
writeln!(out, "\nDry run. Pass apply:true to write changes.").ok();
}
Ok(out)
}
pub fn graph_link_impact(store: &Store, _provenance: &str, source: &str, target: &str) -> Result<String> {
let source = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let target = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Link impact: {} → {}", source, target).ok();
writeln!(out, " Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg).ok();
writeln!(out, " Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community).ok();
writeln!(out, " ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target).ok();
writeln!(out, " ΔGini: {:+.6}", impact.delta_gini).ok();
writeln!(out, " Assessment: {}", impact.assessment).ok();
Ok(out)
}
pub fn graph_hubs(store: &Store, _provenance: &str, count: Option<usize>) -> Result<String> {
let count = count.unwrap_or(20);
let graph = store.build_graph();
// Top hub nodes by degree, spread apart (skip neighbors of already-selected hubs)
let mut hubs: Vec<(String, usize)> = store.nodes.iter()
.filter(|(k, n)| !n.deleted && !k.starts_with('_'))
.map(|(k, _)| {
let degree = graph.neighbors(k).len();
(k.clone(), degree)
})
.collect();
hubs.sort_by(|a, b| b.1.cmp(&a.1));
let mut selected = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
for (key, degree) in &hubs {
if seen.contains(key) { continue; }
selected.push(format!(" - {} (degree {})", key, degree));
// Mark neighbors as seen so we pick far-apart hubs
for (nbr, _) in graph.neighbors(key) {
seen.insert(nbr.clone());
}
seen.insert(key.clone());
if selected.len() >= count { break; }
}
Ok(format!("## Hub nodes (link targets)\n\n{}", selected.join("\n")))
}
pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| anyhow::anyhow!("Node not found: {}", resolved))?;
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "=== {} ===", resolved).ok();
writeln!(out, "Type: {:?} Weight: {:.2}", node.node_type, node.weight).ok();
if !node.source_ref.is_empty() {
writeln!(out, "Source: {}", node.source_ref).ok();
}
let preview = crate::util::truncate(&node.content, 200, "...");
writeln!(out, "\n{}\n", preview).ok();
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
crate::store::NodeType::EpisodicSession => episodic_session.push(entry),
crate::store::NodeType::EpisodicDaily => episodic_daily.push(entry),
crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => episodic_weekly.push(entry),
crate::store::NodeType::Semantic => semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
writeln!(out, "Weekly digests:").ok();
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_daily.is_empty() {
writeln!(out, "Daily digests:").ok();
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_session.is_empty() {
writeln!(out, "Session entries:").ok();
for (k, s, n) in &episodic_session {
let preview = crate::util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
writeln!(out, " [{:.2}] {}", s, k).ok();
if !n.source_ref.is_empty() {
writeln!(out, " ↳ source: {}", n.source_ref).ok();
}
writeln!(out, " {}", preview).ok();
}
}
if !semantic.is_empty() {
writeln!(out, "Semantic links:").ok();
for (k, s, _) in &semantic {
writeln!(out, " [{:.2}] {}", s, k).ok();
}
}
writeln!(out, "\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len()).ok();
Ok(out)
}