tools/memory: one function per tool
Split the monolithic dispatch(name, args) into individual public functions (render, write, search, links, link_set, link_add, used, weight_set, rename, supersede, query, output, journal_tail, journal_new, journal_update) each with a matching _def() function. The old dispatch() remains as a thin match for backward compat until the Tool registry replaces it. Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
943f42d876
commit
1a13534946
2 changed files with 394 additions and 299 deletions
|
|
@ -1,6 +1,7 @@
|
|||
// tools/memory.rs — Native memory graph operations
|
||||
//
|
||||
// Direct library calls into the store — no subprocess spawning.
|
||||
// One function per tool for use in the Tool registry.
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use serde_json::json;
|
||||
|
|
@ -10,305 +11,7 @@ use crate::store::StoreView;
|
|||
use super::ToolDef;
|
||||
use crate::store::Store;
|
||||
|
||||
pub fn definitions() -> Vec<ToolDef> {
|
||||
vec![
|
||||
ToolDef::new("memory_render",
|
||||
"Read a memory node's content and links.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})),
|
||||
ToolDef::new("memory_write",
|
||||
"Create or update a memory node.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"},"content":{"type":"string","description":"Full content (markdown)"}},"required":["key","content"]})),
|
||||
ToolDef::new("memory_search",
|
||||
"Search the memory graph via spreading activation. Give 2-4 seed \
|
||||
node keys related to what you're looking for. Returns nodes ranked \
|
||||
by how strongly they connect to your seeds — bridging nodes score \
|
||||
highest. This finds conceptual connections, not just keyword matches.",
|
||||
json!({"type":"object","properties":{"keys":{"type":"array","items":{"type":"string"},"description":"Seed node keys to activate from"}},"required":["keys"]})),
|
||||
ToolDef::new("memory_links",
|
||||
"Show a node's neighbors with link strengths.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})),
|
||||
ToolDef::new("memory_link_set",
|
||||
"Set link strength between two nodes.",
|
||||
json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"},"strength":{"type":"number","description":"0.01 to 1.0"}},"required":["source","target","strength"]})),
|
||||
ToolDef::new("memory_link_add",
|
||||
"Add a new link between two nodes.",
|
||||
json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"}},"required":["source","target"]})),
|
||||
ToolDef::new("memory_used",
|
||||
"Mark a node as useful (boosts weight).",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})),
|
||||
ToolDef::new("memory_weight_set",
|
||||
"Set a node's weight directly (0.01 to 1.0).",
|
||||
json!({"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]})),
|
||||
ToolDef::new("memory_rename",
|
||||
"Rename a node key in place. Same content, same links, new key.",
|
||||
json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"}},"required":["old_key","new_key"]})),
|
||||
ToolDef::new("memory_supersede",
|
||||
"Mark a node as superseded by another (sets weight to 0.01).",
|
||||
json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"},"reason":{"type":"string"}},"required":["old_key","new_key"]})),
|
||||
ToolDef::new("memory_query",
|
||||
"Run a structured query against the memory graph. Supports filtering, \
|
||||
sorting, field selection. Examples: \"degree > 10 | sort weight | limit 5\", \
|
||||
\"neighbors('identity') | select strength\", \"key ~ 'journal.*' | count\"",
|
||||
json!({"type":"object","properties":{"query":{"type":"string","description":"Query expression"}},"required":["query"]})),
|
||||
ToolDef::new("output",
|
||||
"Produce a named output value. Use this to pass structured results \
|
||||
between steps — subsequent prompts can see these in the conversation history.",
|
||||
json!({"type":"object","properties":{
|
||||
"key":{"type":"string","description":"Output name (e.g. 'relevant_memories')"},
|
||||
"value":{"type":"string","description":"Output value"}
|
||||
},"required":["key","value"]})),
|
||||
]
|
||||
}
|
||||
|
||||
/// Journal-only tools — only given to the journal agent
|
||||
pub fn journal_definitions() -> Vec<ToolDef> {
|
||||
vec![
|
||||
ToolDef::new("journal_tail",
|
||||
"Read the last N journal entries (default 1).",
|
||||
json!({"type":"object","properties":{
|
||||
"count":{"type":"integer","description":"Number of entries (default 1)"}
|
||||
}})),
|
||||
ToolDef::new("journal_new",
|
||||
"Start a new journal entry.",
|
||||
json!({"type":"object","properties":{
|
||||
"name":{"type":"string","description":"Short node name (becomes the key, e.g. 'morning-agent-breakthrough')"},
|
||||
"title":{"type":"string","description":"Descriptive title for the heading (e.g. 'Morning intimacy and the agent breakthrough')"},
|
||||
"body":{"type":"string","description":"Entry body (2-3 paragraphs)"}
|
||||
},"required":["name","title","body"]})),
|
||||
ToolDef::new("journal_update",
|
||||
"Append text to the most recent journal entry (same thread continuing).",
|
||||
json!({"type":"object","properties":{
|
||||
"body":{"type":"string","description":"Text to append to the last entry"}
|
||||
},"required":["body"]})),
|
||||
]
|
||||
}
|
||||
|
||||
/// Dispatch a memory tool call. Direct library calls, no subprocesses.
|
||||
pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>) -> Result<String> {
|
||||
let prov = provenance.unwrap_or("manual");
|
||||
match name {
|
||||
"memory_render" => {
|
||||
let key = get_str(args, "key")?;
|
||||
Ok(MemoryNode::load(key)
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?
|
||||
.render())
|
||||
}
|
||||
"memory_write" => {
|
||||
let key = get_str(args, "key")?;
|
||||
let content = get_str(args, "content")?;
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let result = store.upsert_provenance(key, content, prov)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("{} '{}'", result, key))
|
||||
}
|
||||
"memory_search" => {
|
||||
let keys: Vec<String> = args.get("keys")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||
.unwrap_or_default();
|
||||
if keys.is_empty() {
|
||||
anyhow::bail!("memory_search requires at least one seed key");
|
||||
}
|
||||
let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let graph = crate::graph::build_graph_fast(&store);
|
||||
let params = store.params();
|
||||
let seeds: Vec<(String, f64)> = keys.iter()
|
||||
.filter_map(|k| {
|
||||
let resolved = store.resolve_key(k).ok()?;
|
||||
Some((resolved, 1.0))
|
||||
})
|
||||
.collect();
|
||||
if seeds.is_empty() {
|
||||
anyhow::bail!("no valid seed keys found");
|
||||
}
|
||||
let seed_set: std::collections::HashSet<&str> = seeds.iter()
|
||||
.map(|(k, _)| k.as_str()).collect();
|
||||
let results = crate::search::spreading_activation(
|
||||
&seeds, &graph, &store,
|
||||
params.max_hops, params.edge_decay, params.min_activation,
|
||||
);
|
||||
Ok(results.iter()
|
||||
.filter(|(k, _)| !seed_set.contains(k.as_str()))
|
||||
.take(20)
|
||||
.map(|(key, score)| format!(" {:.2} {}", score, key))
|
||||
.collect::<Vec<_>>().join("\n"))
|
||||
}
|
||||
"memory_links" => {
|
||||
let key = get_str(args, "key")?;
|
||||
let node = MemoryNode::load(key)
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
|
||||
let mut out = format!("Neighbors of '{}':\n", key);
|
||||
for (target, strength, is_new) in &node.links {
|
||||
let tag = if *is_new { " (new)" } else { "" };
|
||||
out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
"memory_link_set" | "memory_link_add" | "memory_used" | "memory_weight_set" => {
|
||||
with_store(name, args, prov)
|
||||
}
|
||||
"memory_rename" => {
|
||||
let old_key = get_str(args, "old_key")?;
|
||||
let new_key = get_str(args, "new_key")?;
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
|
||||
}
|
||||
"memory_supersede" => {
|
||||
let old_key = get_str(args, "old_key")?;
|
||||
let new_key = get_str(args, "new_key")?;
|
||||
let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded");
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let content = store.nodes.get(old_key)
|
||||
.map(|n| n.content.clone())
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
|
||||
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
|
||||
new_key, reason, content.trim());
|
||||
store.upsert_provenance(old_key, ¬ice, prov)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("superseded {} → {} ({})", old_key, new_key, reason))
|
||||
}
|
||||
"memory_query" => {
|
||||
let query = get_str(args, "query")?;
|
||||
let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let graph = store.build_graph();
|
||||
crate::query_parser::query_to_string(&store, &graph, query)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
"output" => {
|
||||
let key = get_str(args, "key")?;
|
||||
if key.starts_with("pid-") || key.contains('/') || key.contains("..") {
|
||||
anyhow::bail!("invalid output key: {}", key);
|
||||
}
|
||||
let value = get_str(args, "value")?;
|
||||
let dir = std::env::var("POC_AGENT_OUTPUT_DIR")
|
||||
.map_err(|_| anyhow::anyhow!("no output directory set"))?;
|
||||
let path = std::path::Path::new(&dir).join(key);
|
||||
std::fs::write(&path, value)
|
||||
.with_context(|| format!("writing output {}", path.display()))?;
|
||||
Ok(format!("{}: {}", key, value))
|
||||
}
|
||||
"journal_tail" => {
|
||||
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1) as usize;
|
||||
let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let mut entries: Vec<&crate::store::Node> = store.nodes.values()
|
||||
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.collect();
|
||||
// Sort by creation time (immutable), not update time
|
||||
entries.sort_by_key(|n| n.created_at);
|
||||
let start = entries.len().saturating_sub(count);
|
||||
if entries[start..].is_empty() {
|
||||
Ok("(no journal entries)".into())
|
||||
} else {
|
||||
Ok(entries[start..].iter()
|
||||
.map(|n| n.content.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n\n"))
|
||||
}
|
||||
}
|
||||
"journal_new" => {
|
||||
let name = get_str(args, "name")?;
|
||||
let title = get_str(args, "title")?;
|
||||
let body = get_str(args, "body")?;
|
||||
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
|
||||
let content = format!("## {} — {}\n\n{}", ts, title, body);
|
||||
|
||||
let base_key: String = name.split_whitespace()
|
||||
.map(|w| w.to_lowercase()
|
||||
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
||||
.collect::<String>())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
.join("-");
|
||||
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
|
||||
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
|
||||
// Dedup: append -2, -3, etc. if the key already exists
|
||||
let key = if store.nodes.contains_key(base_key) {
|
||||
let mut n = 2;
|
||||
loop {
|
||||
let candidate = format!("{}-{}", base_key, n);
|
||||
if !store.nodes.contains_key(&candidate) {
|
||||
break candidate;
|
||||
}
|
||||
n += 1;
|
||||
}
|
||||
} else {
|
||||
base_key.to_string()
|
||||
};
|
||||
let mut node = crate::store::new_node(&key, &content);
|
||||
node.node_type = crate::store::NodeType::EpisodicSession;
|
||||
node.provenance = prov.to_string();
|
||||
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let word_count = body.split_whitespace().count();
|
||||
Ok(format!("New entry '{}' ({} words)", title, word_count))
|
||||
}
|
||||
"journal_update" => {
|
||||
let body = get_str(args, "body")?;
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
// Find most recent EpisodicSession by creation time
|
||||
let latest_key = store.nodes.values()
|
||||
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.max_by_key(|n| n.created_at)
|
||||
.map(|n| n.key.clone());
|
||||
let Some(key) = latest_key else {
|
||||
anyhow::bail!("no journal entry to update — use journal_new first");
|
||||
};
|
||||
let existing = store.nodes.get(&key).unwrap().content.clone();
|
||||
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
|
||||
store.upsert_provenance(&key, &new_content, prov)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let word_count = body.split_whitespace().count();
|
||||
Ok(format!("Updated last entry (+{} words)", word_count))
|
||||
}
|
||||
_ => anyhow::bail!("Unknown memory tool: {}", name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Store mutations that follow the same pattern: load, resolve, mutate, save.
|
||||
fn with_store(name: &str, args: &serde_json::Value, prov: &str) -> Result<String> {
|
||||
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let msg = match name {
|
||||
"memory_link_set" => {
|
||||
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let strength = get_f64(args, "strength")? as f32;
|
||||
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
format!("{} ↔ {} strength {:.2} → {:.2}", s, t, old, strength)
|
||||
}
|
||||
"memory_link_add" => {
|
||||
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let strength = store.add_link(&s, &t, prov).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
format!("linked {} → {} (strength={:.2})", s, t, strength)
|
||||
}
|
||||
"memory_used" => {
|
||||
let key = get_str(args, "key")?;
|
||||
if !store.nodes.contains_key(key) {
|
||||
anyhow::bail!("node not found: {}", key);
|
||||
}
|
||||
store.mark_used(key);
|
||||
format!("marked {} as used", key)
|
||||
}
|
||||
"memory_weight_set" => {
|
||||
let key = store.resolve_key(get_str(args, "key")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let weight = get_f64(args, "weight")? as f32;
|
||||
let (old, new) = store.set_weight(&key, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
format!("weight {} {:.2} → {:.2}", key, old, new)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(msg)
|
||||
}
|
||||
// ── Helpers ────────────────────────────────────────────────────
|
||||
|
||||
fn get_str<'a>(args: &'a serde_json::Value, name: &'a str) -> Result<&'a str> {
|
||||
args.get(name).and_then(|v| v.as_str()).context(format!("{} is required", name))
|
||||
|
|
@ -317,3 +20,380 @@ fn get_str<'a>(args: &'a serde_json::Value, name: &'a str) -> Result<&'a str> {
|
|||
fn get_f64(args: &serde_json::Value, name: &str) -> Result<f64> {
|
||||
args.get(name).and_then(|v| v.as_f64()).context(format!("{} is required", name))
|
||||
}
|
||||
|
||||
fn load_store() -> Result<Store> {
|
||||
Store::load().map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
|
||||
fn provenance() -> &'static str { "manual" }
|
||||
|
||||
// ── Definitions ────────────────────────────────────────────────
|
||||
|
||||
pub fn definitions() -> Vec<ToolDef> {
|
||||
vec![
|
||||
render_def(), write_def(), search_def(), links_def(),
|
||||
link_set_def(), link_add_def(), used_def(), weight_set_def(),
|
||||
rename_def(), supersede_def(), query_def(), output_def(),
|
||||
]
|
||||
}
|
||||
|
||||
pub fn journal_definitions() -> Vec<ToolDef> {
|
||||
vec![journal_tail_def(), journal_new_def(), journal_update_def()]
|
||||
}
|
||||
|
||||
// ── Dispatch (legacy — to be replaced by Tool registry) ───────
|
||||
|
||||
pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>) -> Result<String> {
|
||||
match name {
|
||||
"memory_render" => render(args),
|
||||
"memory_write" => write(args),
|
||||
"memory_search" => search(args),
|
||||
"memory_links" => links(args),
|
||||
"memory_link_set" => link_set(args),
|
||||
"memory_link_add" => link_add(args),
|
||||
"memory_used" => used(args),
|
||||
"memory_weight_set" => weight_set(args),
|
||||
"memory_rename" => rename(args),
|
||||
"memory_supersede" => supersede(args),
|
||||
"memory_query" => query(args),
|
||||
"output" => output(args),
|
||||
"journal_tail" => journal_tail(args),
|
||||
"journal_new" => journal_new(args),
|
||||
"journal_update" => journal_update(args),
|
||||
_ => anyhow::bail!("Unknown memory tool: {}", name),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Memory tools ───────────────────────────────────────────────
|
||||
|
||||
pub fn render_def() -> ToolDef {
|
||||
ToolDef::new("memory_render",
|
||||
"Read a memory node's content and links.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}))
|
||||
}
|
||||
|
||||
pub fn render(args: &serde_json::Value) -> Result<String> {
|
||||
let key = get_str(args, "key")?;
|
||||
Ok(MemoryNode::load(key)
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?
|
||||
.render())
|
||||
}
|
||||
|
||||
pub fn write_def() -> ToolDef {
|
||||
ToolDef::new("memory_write",
|
||||
"Create or update a memory node.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"},"content":{"type":"string","description":"Full content (markdown)"}},"required":["key","content"]}))
|
||||
}
|
||||
|
||||
pub fn write(args: &serde_json::Value) -> Result<String> {
|
||||
let key = get_str(args, "key")?;
|
||||
let content = get_str(args, "content")?;
|
||||
let mut store = load_store()?;
|
||||
let result = store.upsert_provenance(key, content, provenance())
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("{} '{}'", result, key))
|
||||
}
|
||||
|
||||
pub fn search_def() -> ToolDef {
|
||||
ToolDef::new("memory_search",
|
||||
"Search the memory graph via spreading activation. Give 2-4 seed \
|
||||
node keys related to what you're looking for. Returns nodes ranked \
|
||||
by how strongly they connect to your seeds — bridging nodes score \
|
||||
highest. This finds conceptual connections, not just keyword matches.",
|
||||
json!({"type":"object","properties":{"keys":{"type":"array","items":{"type":"string"},"description":"Seed node keys to activate from"}},"required":["keys"]}))
|
||||
}
|
||||
|
||||
pub fn search(args: &serde_json::Value) -> Result<String> {
|
||||
let keys: Vec<String> = args.get("keys")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||
.unwrap_or_default();
|
||||
if keys.is_empty() {
|
||||
anyhow::bail!("memory_search requires at least one seed key");
|
||||
}
|
||||
let store = load_store()?;
|
||||
let graph = crate::graph::build_graph_fast(&store);
|
||||
let params = store.params();
|
||||
let seeds: Vec<(String, f64)> = keys.iter()
|
||||
.filter_map(|k| {
|
||||
let resolved = store.resolve_key(k).ok()?;
|
||||
Some((resolved, 1.0))
|
||||
})
|
||||
.collect();
|
||||
if seeds.is_empty() {
|
||||
anyhow::bail!("no valid seed keys found");
|
||||
}
|
||||
let seed_set: std::collections::HashSet<&str> = seeds.iter()
|
||||
.map(|(k, _)| k.as_str()).collect();
|
||||
let results = crate::search::spreading_activation(
|
||||
&seeds, &graph, &store,
|
||||
params.max_hops, params.edge_decay, params.min_activation,
|
||||
);
|
||||
Ok(results.iter()
|
||||
.filter(|(k, _)| !seed_set.contains(k.as_str()))
|
||||
.take(20)
|
||||
.map(|(key, score)| format!(" {:.2} {}", score, key))
|
||||
.collect::<Vec<_>>().join("\n"))
|
||||
}
|
||||
|
||||
pub fn links_def() -> ToolDef {
|
||||
ToolDef::new("memory_links",
|
||||
"Show a node's neighbors with link strengths.",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}))
|
||||
}
|
||||
|
||||
pub fn links(args: &serde_json::Value) -> Result<String> {
|
||||
let key = get_str(args, "key")?;
|
||||
let node = MemoryNode::load(key)
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
|
||||
let mut out = format!("Neighbors of '{}':\n", key);
|
||||
for (target, strength, is_new) in &node.links {
|
||||
let tag = if *is_new { " (new)" } else { "" };
|
||||
out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn link_set_def() -> ToolDef {
|
||||
ToolDef::new("memory_link_set",
|
||||
"Set link strength between two nodes.",
|
||||
json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"},"strength":{"type":"number","description":"0.01 to 1.0"}},"required":["source","target","strength"]}))
|
||||
}
|
||||
|
||||
pub fn link_set(args: &serde_json::Value) -> Result<String> {
|
||||
let mut store = load_store()?;
|
||||
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let strength = get_f64(args, "strength")? as f32;
|
||||
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("{} ↔ {} strength {:.2} → {:.2}", s, t, old, strength))
|
||||
}
|
||||
|
||||
pub fn link_add_def() -> ToolDef {
|
||||
ToolDef::new("memory_link_add",
|
||||
"Add a new link between two nodes.",
|
||||
json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"}},"required":["source","target"]}))
|
||||
}
|
||||
|
||||
pub fn link_add(args: &serde_json::Value) -> Result<String> {
|
||||
let mut store = load_store()?;
|
||||
let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let strength = store.add_link(&s, &t, provenance()).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("linked {} → {} (strength={:.2})", s, t, strength))
|
||||
}
|
||||
|
||||
pub fn used_def() -> ToolDef {
|
||||
ToolDef::new("memory_used",
|
||||
"Mark a node as useful (boosts weight).",
|
||||
json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}))
|
||||
}
|
||||
|
||||
pub fn used(args: &serde_json::Value) -> Result<String> {
|
||||
let key = get_str(args, "key")?;
|
||||
let mut store = load_store()?;
|
||||
if !store.nodes.contains_key(key) {
|
||||
anyhow::bail!("node not found: {}", key);
|
||||
}
|
||||
store.mark_used(key);
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("marked {} as used", key))
|
||||
}
|
||||
|
||||
pub fn weight_set_def() -> ToolDef {
|
||||
ToolDef::new("memory_weight_set",
|
||||
"Set a node's weight directly (0.01 to 1.0).",
|
||||
json!({"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]}))
|
||||
}
|
||||
|
||||
pub fn weight_set(args: &serde_json::Value) -> Result<String> {
|
||||
let mut store = load_store()?;
|
||||
let key = store.resolve_key(get_str(args, "key")?).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let weight = get_f64(args, "weight")? as f32;
|
||||
let (old, new) = store.set_weight(&key, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("weight {} {:.2} → {:.2}", key, old, new))
|
||||
}
|
||||
|
||||
pub fn rename_def() -> ToolDef {
|
||||
ToolDef::new("memory_rename",
|
||||
"Rename a node key in place. Same content, same links, new key.",
|
||||
json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"}},"required":["old_key","new_key"]}))
|
||||
}
|
||||
|
||||
pub fn rename(args: &serde_json::Value) -> Result<String> {
|
||||
let old_key = get_str(args, "old_key")?;
|
||||
let new_key = get_str(args, "new_key")?;
|
||||
let mut store = load_store()?;
|
||||
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
|
||||
}
|
||||
|
||||
pub fn supersede_def() -> ToolDef {
|
||||
ToolDef::new("memory_supersede",
|
||||
"Mark a node as superseded by another (sets weight to 0.01).",
|
||||
json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"},"reason":{"type":"string"}},"required":["old_key","new_key"]}))
|
||||
}
|
||||
|
||||
pub fn supersede(args: &serde_json::Value) -> Result<String> {
|
||||
let old_key = get_str(args, "old_key")?;
|
||||
let new_key = get_str(args, "new_key")?;
|
||||
let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded");
|
||||
let mut store = load_store()?;
|
||||
let content = store.nodes.get(old_key)
|
||||
.map(|n| n.content.clone())
|
||||
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
|
||||
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
|
||||
new_key, reason, content.trim());
|
||||
store.upsert_provenance(old_key, ¬ice, provenance())
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
Ok(format!("superseded {} → {} ({})", old_key, new_key, reason))
|
||||
}
|
||||
|
||||
pub fn query_def() -> ToolDef {
|
||||
ToolDef::new("memory_query",
|
||||
"Run a structured query against the memory graph. Supports filtering, \
|
||||
sorting, field selection. Examples: \"degree > 10 | sort weight | limit 5\", \
|
||||
\"neighbors('identity') | select strength\", \"key ~ 'journal.*' | count\"",
|
||||
json!({"type":"object","properties":{"query":{"type":"string","description":"Query expression"}},"required":["query"]}))
|
||||
}
|
||||
|
||||
pub fn query(args: &serde_json::Value) -> Result<String> {
|
||||
let query_str = get_str(args, "query")?;
|
||||
let store = load_store()?;
|
||||
let graph = store.build_graph();
|
||||
crate::query_parser::query_to_string(&store, &graph, query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
|
||||
pub fn output_def() -> ToolDef {
|
||||
ToolDef::new("output",
|
||||
"Produce a named output value. Use this to pass structured results \
|
||||
between steps — subsequent prompts can see these in the conversation history.",
|
||||
json!({"type":"object","properties":{
|
||||
"key":{"type":"string","description":"Output name (e.g. 'relevant_memories')"},
|
||||
"value":{"type":"string","description":"Output value"}
|
||||
},"required":["key","value"]}))
|
||||
}
|
||||
|
||||
pub fn output(args: &serde_json::Value) -> Result<String> {
|
||||
let key = get_str(args, "key")?;
|
||||
if key.starts_with("pid-") || key.contains('/') || key.contains("..") {
|
||||
anyhow::bail!("invalid output key: {}", key);
|
||||
}
|
||||
let value = get_str(args, "value")?;
|
||||
let dir = std::env::var("POC_AGENT_OUTPUT_DIR")
|
||||
.map_err(|_| anyhow::anyhow!("no output directory set"))?;
|
||||
let path = std::path::Path::new(&dir).join(key);
|
||||
std::fs::write(&path, value)
|
||||
.with_context(|| format!("writing output {}", path.display()))?;
|
||||
Ok(format!("{}: {}", key, value))
|
||||
}
|
||||
|
||||
// ── Journal tools ──────────────────────────────────────────────
|
||||
|
||||
pub fn journal_tail_def() -> ToolDef {
|
||||
ToolDef::new("journal_tail",
|
||||
"Read the last N journal entries (default 1).",
|
||||
json!({"type":"object","properties":{
|
||||
"count":{"type":"integer","description":"Number of entries (default 1)"}
|
||||
}}))
|
||||
}
|
||||
|
||||
pub fn journal_tail(args: &serde_json::Value) -> Result<String> {
|
||||
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1) as usize;
|
||||
let store = load_store()?;
|
||||
let mut entries: Vec<&crate::store::Node> = store.nodes.values()
|
||||
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.collect();
|
||||
entries.sort_by_key(|n| n.created_at);
|
||||
let start = entries.len().saturating_sub(count);
|
||||
if entries[start..].is_empty() {
|
||||
Ok("(no journal entries)".into())
|
||||
} else {
|
||||
Ok(entries[start..].iter()
|
||||
.map(|n| n.content.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n\n"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn journal_new_def() -> ToolDef {
|
||||
ToolDef::new("journal_new",
|
||||
"Start a new journal entry.",
|
||||
json!({"type":"object","properties":{
|
||||
"name":{"type":"string","description":"Short node name (becomes the key)"},
|
||||
"title":{"type":"string","description":"Descriptive title for the heading"},
|
||||
"body":{"type":"string","description":"Entry body (2-3 paragraphs)"}
|
||||
},"required":["name","title","body"]}))
|
||||
}
|
||||
|
||||
pub fn journal_new(args: &serde_json::Value) -> Result<String> {
|
||||
let name = get_str(args, "name")?;
|
||||
let title = get_str(args, "title")?;
|
||||
let body = get_str(args, "body")?;
|
||||
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
|
||||
let content = format!("## {} — {}\n\n{}", ts, title, body);
|
||||
|
||||
let base_key: String = name.split_whitespace()
|
||||
.map(|w| w.to_lowercase()
|
||||
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
||||
.collect::<String>())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
.join("-");
|
||||
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
|
||||
|
||||
let mut store = load_store()?;
|
||||
let key = if store.nodes.contains_key(base_key) {
|
||||
let mut n = 2;
|
||||
loop {
|
||||
let candidate = format!("{}-{}", base_key, n);
|
||||
if !store.nodes.contains_key(&candidate) { break candidate; }
|
||||
n += 1;
|
||||
}
|
||||
} else {
|
||||
base_key.to_string()
|
||||
};
|
||||
let mut node = crate::store::new_node(&key, &content);
|
||||
node.node_type = crate::store::NodeType::EpisodicSession;
|
||||
node.provenance = provenance().to_string();
|
||||
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let word_count = body.split_whitespace().count();
|
||||
Ok(format!("New entry '{}' ({} words)", title, word_count))
|
||||
}
|
||||
|
||||
pub fn journal_update_def() -> ToolDef {
|
||||
ToolDef::new("journal_update",
|
||||
"Append text to the most recent journal entry (same thread continuing).",
|
||||
json!({"type":"object","properties":{
|
||||
"body":{"type":"string","description":"Text to append to the last entry"}
|
||||
},"required":["body"]}))
|
||||
}
|
||||
|
||||
pub fn journal_update(args: &serde_json::Value) -> Result<String> {
|
||||
let body = get_str(args, "body")?;
|
||||
let mut store = load_store()?;
|
||||
let latest_key = store.nodes.values()
|
||||
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.max_by_key(|n| n.created_at)
|
||||
.map(|n| n.key.clone());
|
||||
let Some(key) = latest_key else {
|
||||
anyhow::bail!("no journal entry to update — use journal_new first");
|
||||
};
|
||||
let existing = store.nodes.get(&key).unwrap().content.clone();
|
||||
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
|
||||
store.upsert_provenance(&key, &new_content, provenance())
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let word_count = body.split_whitespace().count();
|
||||
Ok(format!("Updated last entry (+{} words)", word_count))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,10 +21,25 @@ mod vision;
|
|||
pub mod working_stack;
|
||||
|
||||
use serde::{Serialize, Deserialize};
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
use std::time::Instant;
|
||||
|
||||
fn default_timeout() -> u64 { 120 }
|
||||
|
||||
/// Async tool handler function.
|
||||
/// Agent is None when called from contexts without an agent (MCP server, subconscious).
|
||||
pub type ToolHandler = fn(
|
||||
Option<std::sync::Arc<tokio::sync::Mutex<super::Agent>>>,
|
||||
serde_json::Value,
|
||||
) -> Pin<Box<dyn Future<Output = anyhow::Result<String>> + Send>>;
|
||||
|
||||
/// A tool with its definition and handler — single source of truth.
|
||||
pub struct Tool {
|
||||
pub def: ToolDef,
|
||||
pub handler: ToolHandler,
|
||||
}
|
||||
|
||||
/// Function call within a tool call — name + JSON arguments.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FunctionCall {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue