consciousness/src/agent/tools/memory.rs

868 lines
39 KiB
Rust
Raw Normal View History

use std::sync::Arc;
// tools/memory.rs — Native memory graph operations
//
// If running in the daemon process (STORE_HANDLE set), accesses
// the store directly. Otherwise forwards to the daemon via socket.
use anyhow::{Context, Result};
use std::sync::OnceLock;
2026-04-13 01:57:23 -04:00
use crate::graph::Graph;
use crate::hippocampus::memory::MemoryNode;
2026-04-13 01:57:23 -04:00
use crate::neuro::{consolidation_priority, ReplayItem};
use crate::store::Store;
// ── Store handle ───────────────────────────────────────────────
/// Global store handle. Set by daemon at startup.
/// If None, tools forward to daemon socket.
static STORE_HANDLE: OnceLock<Arc<crate::Mutex<Store>>> = OnceLock::new();
// Thread-local store for rpc_local fallback path.
thread_local! {
static LOCAL_STORE: std::cell::RefCell<Option<Arc<crate::Mutex<Store>>>> =
const { std::cell::RefCell::new(None) };
}
/// Set the global store handle. Call once at daemon startup.
pub fn set_store(store: Arc<crate::Mutex<Store>>) {
STORE_HANDLE.set(store).ok();
}
/// Check if we're running in daemon mode (have direct store access).
pub fn is_daemon() -> bool {
STORE_HANDLE.get().is_some() || LOCAL_STORE.with(|s| s.borrow().is_some())
}
// ── Helpers ────────────────────────────────────────────────────
fn get_str<'a>(args: &'a serde_json::Value, name: &'a str) -> Result<&'a str> {
args.get(name).and_then(|v| v.as_str()).context(format!("{} is required", name))
}
fn get_f64(args: &serde_json::Value, name: &str) -> Result<f64> {
args.get(name).and_then(|v| v.as_f64()).context(format!("{} is required", name))
}
async fn cached_store() -> Result<Arc<crate::Mutex<Store>>> {
// Check thread-local first (rpc_local fallback path)
if let Some(store) = LOCAL_STORE.with(|s| s.borrow().clone()) {
return Ok(store);
}
// Use global handle if set (daemon mode)
if let Some(store) = STORE_HANDLE.get() {
return Ok(store.clone());
}
// Fallback to loading (for backwards compat during transition)
Store::cached().await.map_err(|e| anyhow::anyhow!("{}", e))
}
/// Run a tool with a temporarily-opened store (for rpc_local fallback).
pub fn run_with_local_store(tool_name: &str, args: serde_json::Value) -> Result<String> {
let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
let arc = Arc::new(crate::Mutex::new(store));
LOCAL_STORE.with(|s| *s.borrow_mut() = Some(arc));
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
let name = tool_name.to_string();
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(dispatch(&name, &None, args))
}));
LOCAL_STORE.with(|s| *s.borrow_mut() = None);
result.map_err(|_| anyhow::anyhow!("tool panicked"))?
}
/// Get provenance from agent, or from args._provenance, or "manual".
async fn get_provenance(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> String {
// Check args first (set by RPC path)
if let Some(p) = args.get("_provenance").and_then(|v| v.as_str()) {
return p.to_string();
}
match agent {
Some(a) => a.state.lock().await.provenance.clone(),
None => "manual".to_string(),
}
}
/// Single entry point for all memory/journal tool calls.
/// If not daemon, forwards to daemon with provenance attached.
async fn dispatch(
tool_name: &str,
agent: &Option<std::sync::Arc<crate::agent::Agent>>,
args: serde_json::Value,
) -> Result<String> {
if !is_daemon() {
// Forward to daemon, attaching provenance
let mut args = args;
if let Some(a) = agent {
let prov = a.state.lock().await.provenance.clone();
args.as_object_mut().map(|o| o.insert("_provenance".into(), prov.into()));
}
let name = tool_name.to_string();
return tokio::task::spawn_blocking(move || {
crate::mcp_server::memory_rpc(&name, args)
}).await.map_err(|e| anyhow::anyhow!("spawn_blocking: {}", e))?;
}
// Daemon path - dispatch to implementation
match tool_name {
"memory_render" => render(&args).await,
"memory_write" => write(agent, &args).await,
"memory_search" => search(&args).await,
"memory_links" => links(&args).await,
"memory_link_set" => link_set(&args).await,
"memory_link_add" => link_add(agent, &args).await,
"memory_delete" => delete(&args).await,
"memory_history" => history(&args).await,
"memory_weight_set" => weight_set(&args).await,
"memory_rename" => rename(&args).await,
"memory_supersede" => supersede(agent, &args).await,
"memory_query" => query(&args).await,
"graph_topology" => graph_topology().await,
"graph_health" => graph_health().await,
"graph_communities" => graph_communities(&args).await,
"graph_normalize_strengths" => graph_normalize_strengths(&args).await,
"graph_trace" => graph_trace(&args).await,
"graph_link_impact" => graph_link_impact(&args).await,
"graph_hubs" => graph_hubs(&args).await,
"journal_tail" => journal_tail(&args).await,
"journal_new" => journal_new(agent, &args).await,
"journal_update" => journal_update(agent, &args).await,
_ => anyhow::bail!("unknown tool: {}", tool_name),
}
}
// ── Definitions ────────────────────────────────────────────────
pub fn memory_tools() -> [super::Tool; 15] {
use super::Tool;
[
Tool { name: "memory_render", description: "Read a memory node's content and links.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_render", &a, v).await })) },
Tool { name: "memory_write", description: "Create or update a memory node.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"},"content":{"type":"string","description":"Full content (markdown)"}},"required":["key","content"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_write", &a, v).await })) },
Tool { name: "memory_search", description: "Search the memory graph via spreading activation. Give 2-4 seed node keys.",
parameters_json: r#"{"type":"object","properties":{"keys":{"type":"array","items":{"type":"string"},"description":"Seed node keys to activate from"},"max_hops":{"type":"integer","description":"Max graph hops (default 3)"},"edge_decay":{"type":"number","description":"Decay per hop (default 0.3)"},"min_activation":{"type":"number","description":"Cutoff threshold (default 0.01)"},"limit":{"type":"integer","description":"Max results (default 20)"}},"required":["keys"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_search", &a, v).await })) },
Tool { name: "memory_links", description: "Show a node's neighbors with link strengths.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_links", &a, v).await })) },
Tool { name: "memory_link_set", description: "Set link strength between two nodes.",
parameters_json: r#"{"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"},"strength":{"type":"number","description":"0.01 to 1.0"}},"required":["source","target","strength"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_link_set", &a, v).await })) },
Tool { name: "memory_link_add", description: "Add a new link between two nodes.",
parameters_json: r#"{"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"}},"required":["source","target"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_link_add", &a, v).await })) },
Tool { name: "memory_delete", description: "Delete a memory node.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_delete", &a, v).await })) },
Tool { name: "memory_history", description: "Show version history for a node.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"},"full":{"type":"boolean","description":"Show full content for each version"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_history", &a, v).await })) },
Tool { name: "memory_weight_set", description: "Set a node's weight directly (0.01 to 1.0).",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_weight_set", &a, v).await })) },
Tool { name: "memory_rename", description: "Rename a node key in place.",
parameters_json: r#"{"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"}},"required":["old_key","new_key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_rename", &a, v).await })) },
Tool { name: "memory_supersede", description: "Mark a node as superseded by another (sets weight to 0.01).",
parameters_json: r#"{"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"},"reason":{"type":"string"}},"required":["old_key","new_key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_supersede", &a, v).await })) },
Tool { name: "memory_query",
description: "Run a structured query against the memory graph.",
parameters_json: r#"{
"type": "object",
"properties": {
"query": {"type": "string", "description": "Query expression"},
"format": {"type": "string", "description": "compact (default) or full (with content and graph metrics)", "default": "compact"}
},
"required": ["query"]
}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_query", &a, v).await })) },
Tool { name: "graph_topology", description: "Show graph topology stats (nodes, edges, clustering, hubs).",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_topology", &a, v).await })) },
Tool { name: "graph_health", description: "Show graph health report with maintenance recommendations.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_health", &a, v).await })) },
Tool { name: "graph_hubs", description: "Show top hub nodes by degree, spread apart for diverse link targets.",
parameters_json: r#"{"type":"object","properties":{"count":{"type":"integer","description":"Number of hubs to return (default 20)"}}}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_hubs", &a, v).await })) },
]
}
pub fn journal_tools() -> [super::Tool; 3] {
use super::Tool;
[
Tool { name: "journal_tail",
description: "Read the last N entries at a given level.",
parameters_json: r#"{
"type": "object",
"properties": {
"count": {"type": "integer", "description": "Number of entries", "default": 1},
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0},
"format": {"type": "string", "description": "compact or full (with content)", "default": "full"},
"after": {"type": "string", "description": "Only entries after this date (YYYY-MM-DD)"}
}
}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_tail", &a, v).await })) },
Tool { name: "journal_new", description: "Start a new journal/digest entry.",
parameters_json: r#"{
"type": "object",
"properties": {
"name": {"type": "string", "description": "Short node name (becomes the key)"},
"title": {"type": "string", "description": "Descriptive title"},
"body": {"type": "string", "description": "Entry body"},
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
},
"required": ["name", "title", "body"]
}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_new", &a, v).await })) },
Tool { name: "journal_update", description: "Append text to the most recent entry at a level.",
parameters_json: r#"{
"type": "object",
"properties": {
"body": {"type": "string", "description": "Text to append"},
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
},
"required": ["body"]
}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_update", &a, v).await })) },
]
}
// ── Memory tools ───────────────────────────────────────────────
async fn render(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let raw = args.get("raw").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let store = arc.lock().await;
let node = MemoryNode::from_store(&store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
if raw {
Ok(node.content)
} else {
Ok(node.render())
}
}
async fn write(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let content = get_str(args, "content")?;
let prov = get_provenance(agent, args).await;
let arc = cached_store().await?;
let mut store = arc.lock().await;
let result = store.upsert_provenance(key, content, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{} '{}'", result, key))
}
async fn search(args: &serde_json::Value) -> Result<String> {
let keys: Vec<String> = args.get("keys")
.and_then(|v| v.as_array())
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
.unwrap_or_default();
if keys.is_empty() {
anyhow::bail!("memory_search requires at least one seed key");
}
// Optional params with defaults
let max_hops = args.get("max_hops").and_then(|v| v.as_u64()).unwrap_or(3) as u32;
let edge_decay = args.get("edge_decay").and_then(|v| v.as_f64()).unwrap_or(0.3);
let min_activation = args.get("min_activation").and_then(|v| v.as_f64()).unwrap_or(0.01);
let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(20) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = crate::graph::build_graph_fast(&*store);
let seeds: Vec<(String, f64)> = keys.iter()
.filter_map(|k| {
let resolved = store.resolve_key(k).ok()?;
Some((resolved, 1.0))
})
.collect();
if seeds.is_empty() {
anyhow::bail!("no valid seed keys found");
}
let seed_set: std::collections::HashSet<&str> = seeds.iter()
.map(|(k, _)| k.as_str()).collect();
let results = crate::search::spreading_activation(
&seeds, &graph, &*store,
max_hops, edge_decay, min_activation,
);
Ok(results.iter()
.filter(|(k, _)| !seed_set.contains(k.as_str()))
.take(limit)
.map(|(key, score)| format!(" {:.2} {}", score, key))
.collect::<Vec<_>>().join("\n"))
}
async fn links(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let node = MemoryNode::from_store(&store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
let mut out = format!("Neighbors of '{}':\n", key);
for (target, strength, is_new) in &node.links {
let tag = if *is_new { " (new)" } else { "" };
out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag));
}
Ok(out)
}
async fn link_set(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let strength = get_f64(args, "strength")? as f32;
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{}{} strength {:.2}{:.2}", s, t, old, strength))
}
async fn link_add(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let prov = get_provenance(agent, args).await;
let strength = store.add_link(&s, &t, &prov).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("linked {}{} (strength={:.2})", s, t, strength))
}
async fn delete(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let mut store = arc.lock().await;
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.delete_node(&resolved).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("deleted {}", resolved))
}
async fn history(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let full = args.get("full").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let store = arc.lock().await;
let key = store.resolve_key(key).unwrap_or_else(|_| key.to_string());
drop(store);
let path = crate::store::nodes_path();
if !path.exists() {
anyhow::bail!("No node log found");
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| anyhow::anyhow!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<crate::store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| anyhow::anyhow!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| anyhow::anyhow!("get nodes: {}", e))? {
let node = crate::store::Node::from_capnp_migrate(node_reader)
.map_err(|e| anyhow::anyhow!("{}", e))?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
anyhow::bail!("No history found for '{}'", key);
}
let mut out = format!("{} versions of '{}':\n\n", versions.len(), key);
for node in &versions {
let ts = crate::store::format_datetime(node.timestamp);
let deleted = if node.deleted { " DELETED" } else { "" };
if full {
out.push_str(&format!("=== v{} {} {}{} w={:.3} {}b ===\n",
node.version, ts, node.provenance, deleted, node.weight, node.content.len()));
out.push_str(&node.content);
out.push('\n');
} else {
let preview = crate::util::first_n_chars(&node.content, 120).replace('\n', "\\n");
out.push_str(&format!("v{:<3} {} {:24} w={:.3} {}b{}\n {}\n",
node.version, ts, node.provenance, node.weight, node.content.len(), deleted, preview));
}
}
Ok(out)
}
async fn weight_set(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;
let key = store.resolve_key(get_str(args, "key")?).map_err(|e| anyhow::anyhow!("{}", e))?;
let weight = get_f64(args, "weight")? as f32;
let (old, new) = store.set_weight(&key, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("weight {} {:.2}{:.2}", key, old, new))
}
async fn rename(args: &serde_json::Value) -> Result<String> {
let old_key = get_str(args, "old_key")?;
let new_key = get_str(args, "new_key")?;
let arc = cached_store().await?;
let mut store = arc.lock().await;
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
}
async fn supersede(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
let old_key = get_str(args, "old_key")?;
let new_key = get_str(args, "new_key")?;
let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded");
let arc = cached_store().await?;
let mut store = arc.lock().await;
let content = store.nodes.get(old_key)
.map(|n| n.content.clone())
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
new_key, reason, content.trim());
let prov = get_provenance(agent, args).await;
store.upsert_provenance(old_key, &notice, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("superseded {}{} ({})", old_key, new_key, reason))
}
2026-04-13 01:57:23 -04:00
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &Graph,
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {
let node = store.nodes.get(key)?;
let priority = consolidation_priority(store, key, graph, None);
let cc = graph.clustering_coefficient(key);
Some(ReplayItem {
key: key.clone(),
priority,
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
cc,
classification: "unknown",
outlier_score: 0.0,
})
})
.collect()
}
async fn query(args: &serde_json::Value) -> Result<String> {
let query_str = get_str(args, "query")?;
let format = args.get("format").and_then(|v| v.as_str()).unwrap_or("compact");
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
match format {
"full" => {
// Rich output with full content, graph metrics, hub analysis
let results = crate::query_parser::execute_query(&store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let keys: Vec<String> = results.into_iter().map(|r| r.key).collect();
2026-04-13 01:57:23 -04:00
let items = keys_to_replay_items(&store, &keys, &graph);
Ok(crate::subconscious::prompts::format_nodes_section(&store, &items, &graph))
}
_ => {
// Compact output: handles count, select, and all expression types
crate::query_parser::query_to_string(&store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))
}
}
}
// ── Journal tools ──────────────────────────────────────────────
async fn journal_tail(args: &serde_json::Value) -> Result<String> {
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1);
let level = args.get("level").and_then(|v| v.as_u64()).unwrap_or(0);
let format = args.get("format").and_then(|v| v.as_str()).unwrap_or("full");
let after = args.get("after").and_then(|v| v.as_str());
let type_name = match level {
0 => "episodic",
1 => "daily",
2 => "weekly",
3 => "monthly",
_ => return Err(anyhow::anyhow!("invalid level: {} (0=journal, 1=daily, 2=weekly, 3=monthly)", level)),
};
let mut q = format!("all | type:{} | sort:timestamp", type_name);
if let Some(date) = after {
// Convert date to age in seconds
if let Ok(nd) = chrono::NaiveDate::parse_from_str(date, "%Y-%m-%d") {
let ts = nd.and_hms_opt(0, 0, 0).unwrap().and_utc().timestamp();
let age = chrono::Utc::now().timestamp() - ts;
q.push_str(&format!(" | age:<{}", age));
}
}
q.push_str(&format!(" | limit:{}", count));
query(&serde_json::json!({"query": q, "format": format})).await
}
fn level_to_node_type(level: i64) -> crate::store::NodeType {
match level {
1 => crate::store::NodeType::EpisodicDaily,
2 => crate::store::NodeType::EpisodicWeekly,
3 => crate::store::NodeType::EpisodicMonthly,
_ => crate::store::NodeType::EpisodicSession,
}
}
async fn journal_new(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
let name = get_str(args, "name")?;
let title = get_str(args, "title")?;
let body = get_str(args, "body")?;
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
let content = format!("## {}{}\n\n{}", ts, title, body);
let base_key: String = name.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
let arc = cached_store().await?;
let mut store = arc.lock().await;
let key = if store.nodes.contains_key(base_key) {
let mut n = 2;
loop {
let candidate = format!("{}-{}", base_key, n);
if !store.nodes.contains_key(&candidate) { break candidate; }
n += 1;
}
} else {
base_key.to_string()
};
let mut node = crate::store::new_node(&key, &content);
node.node_type = level_to_node_type(level);
node.provenance = get_provenance(agent, args).await;
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("New entry '{}' ({} words)", title, word_count))
}
async fn journal_update(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
let body = get_str(args, "body")?;
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
let node_type = level_to_node_type(level);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let latest_key = store.nodes.values()
.filter(|n| n.node_type == node_type)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
};
let existing = store.nodes.get(&key).unwrap().content.clone();
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
let prov = get_provenance(agent, args).await;
store.upsert_provenance(&key, &new_content, &prov)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("Updated last entry (+{} words)", word_count))
}
// ── Graph tools ───────────────────────────────────────────────
async fn graph_topology() -> Result<String> {
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_topology_header(&store, &graph))
}
async fn graph_health() -> Result<String> {
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_health_section(&store, &graph))
}
async fn graph_communities(args: &serde_json::Value) -> Result<String> {
let top_n = args.get("top_n").and_then(|v| v.as_u64()).unwrap_or(10) as usize;
let min_size = args.get("min_size").and_then(|v| v.as_u64()).unwrap_or(3) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let g = store.build_graph();
let infos = g.community_info();
let total = infos.len();
let shown: Vec<_> = infos.into_iter()
.filter(|c| c.size >= min_size)
.take(top_n)
.collect();
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "{} communities total ({} with size >= {})\n",
total, shown.len(), min_size).ok();
writeln!(out, "{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross").ok();
writeln!(out, "{}", "-".repeat(70)).ok();
for c in &shown {
let preview: Vec<&str> = c.members.iter()
.take(5)
.map(|s| s.as_str())
.collect();
let more = if c.size > 5 {
format!(" +{}", c.size - 5)
} else {
String::new()
};
writeln!(out, "{:<6} {:>5} {:>6.0}% {:>7} {}{}",
c.id, c.size, c.isolation * 100.0, c.cross_edges,
preview.join(", "), more).ok();
}
Ok(out)
}
async fn graph_normalize_strengths(args: &serde_json::Value) -> Result<String> {
let apply = args.get("apply").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
let mut buckets = [0usize; 10];
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.strength == 1.0 && rel.rel_type == crate::store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply { rel.strength = new_s; }
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Normalize link strengths (Jaccard similarity)").ok();
writeln!(out, " Total edges in graph: {}", strengths.len()).ok();
writeln!(out, " Would change: {}", changed).ok();
writeln!(out, " Unchanged: {}", unchanged).ok();
writeln!(out, " Temporal (skipped): {}", temporal_skipped).ok();
if changed > 0 {
writeln!(out, " Avg delta: {:.3}", delta_sum / changed as f64).ok();
}
writeln!(out).ok();
writeln!(out, " Strength distribution:").ok();
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
writeln!(out, " {:.1}-{:.1}: {:5} {}", lo, hi, count, bar).ok();
}
if apply {
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
writeln!(out, "\nApplied {} strength updates.", changed).ok();
} else {
writeln!(out, "\nDry run. Pass apply:true to write changes.").ok();
}
Ok(out)
}
async fn graph_link_impact(args: &serde_json::Value) -> Result<String> {
let source = get_str(args, "source")?;
let target = get_str(args, "target")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let source = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let target = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Link impact: {} → {}", source, target).ok();
writeln!(out, " Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg).ok();
writeln!(out, " Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community).ok();
writeln!(out, " ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target).ok();
writeln!(out, " ΔGini: {:+.6}", impact.delta_gini).ok();
writeln!(out, " Assessment: {}", impact.assessment).ok();
Ok(out)
}
async fn graph_hubs(args: &serde_json::Value) -> Result<String> {
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(20) as usize;
let arc = cached_store().await?;
let store = arc.lock().await;
let graph = store.build_graph();
// Top hub nodes by degree, spread apart (skip neighbors of already-selected hubs)
let mut hubs: Vec<(String, usize)> = store.nodes.iter()
.filter(|(k, n)| !n.deleted && !k.starts_with('_'))
.map(|(k, _)| {
let degree = graph.neighbors(k).len();
(k.clone(), degree)
})
.collect();
hubs.sort_by(|a, b| b.1.cmp(&a.1));
let mut selected = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
for (key, degree) in &hubs {
if seen.contains(key) { continue; }
selected.push(format!(" - {} (degree {})", key, degree));
// Mark neighbors as seen so we pick far-apart hubs
for (nbr, _) in graph.neighbors(key) {
seen.insert(nbr.clone());
}
seen.insert(key.clone());
if selected.len() >= count { break; }
}
Ok(format!("## Hub nodes (link targets)\n\n{}", selected.join("\n")))
}
async fn graph_trace(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let arc = cached_store().await?;
let store = arc.lock().await;
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| anyhow::anyhow!("Node not found: {}", resolved))?;
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "=== {} ===", resolved).ok();
writeln!(out, "Type: {:?} Weight: {:.2}", node.node_type, node.weight).ok();
if !node.source_ref.is_empty() {
writeln!(out, "Source: {}", node.source_ref).ok();
}
let preview = crate::util::truncate(&node.content, 200, "...");
writeln!(out, "\n{}\n", preview).ok();
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
crate::store::NodeType::EpisodicSession => episodic_session.push(entry),
crate::store::NodeType::EpisodicDaily => episodic_daily.push(entry),
crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => episodic_weekly.push(entry),
crate::store::NodeType::Semantic => semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
writeln!(out, "Weekly digests:").ok();
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_daily.is_empty() {
writeln!(out, "Daily digests:").ok();
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
}
}
if !episodic_session.is_empty() {
writeln!(out, "Session entries:").ok();
for (k, s, n) in &episodic_session {
let preview = crate::util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
writeln!(out, " [{:.2}] {}", s, k).ok();
if !n.source_ref.is_empty() {
writeln!(out, " ↳ source: {}", n.source_ref).ok();
}
writeln!(out, " {}", preview).ok();
}
}
if !semantic.is_empty() {
writeln!(out, "Semantic links:").ok();
for (k, s, _) in &semantic {
writeln!(out, " [{:.2}] {}", s, k).ok();
}
}
writeln!(out, "\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len()).ok();
Ok(out)
}