Native memory tools + MCP server + distill agent improvements

Tools:
- Add native memory_render, memory_write, memory_search,
  memory_links, memory_link_set, memory_link_add, memory_used
  tools to poc-agent (tools/memory.rs)
- Add MCP server (~/bin/memory-mcp.py) exposing same tools
  for Claude Code sessions
- Wire memory tools into poc-agent dispatch and definitions
- poc-memory daemon agents now use memory_* tools instead of
  bash poc-memory commands — no shell quoting issues

Distill agent:
- Rewrite distill.agent prompt: "agent of PoC's subconscious"
  framing, focus on synthesis and creativity over bookkeeping
- Add {{neighborhood}} placeholder: full seed node content +
  all neighbors with content + cross-links between neighbors
- Remove content truncation in prompt builder — agents need
  full content for quality work
- Remove bag-of-words similarity suggestions — agents have
  tools, let them explore the graph themselves
- Add api_reasoning config option (default: "high")
- link-set now deduplicates — collapses duplicate links
- Full tool call args in debug logs (was truncated to 80 chars)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Kent Overstreet 2026-03-19 22:58:54 -04:00
parent d9b56a02c3
commit 6d22f70192
8 changed files with 290 additions and 87 deletions

View file

@ -0,0 +1,198 @@
// tools/memory.rs — Native memory graph operations
//
// Structured tool calls for the memory graph, replacing bash
// poc-memory commands. Cleaner for LLMs — no shell quoting,
// multi-line content as JSON strings, typed parameters.
use anyhow::{Context, Result};
use serde_json::json;
use std::process::Command;
use crate::types::ToolDef;
pub fn definitions() -> Vec<ToolDef> {
vec![
ToolDef::new(
"memory_render",
"Read a memory node's content and links. Returns the full content \
with neighbor links sorted by strength.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to render"
}
},
"required": ["key"]
}),
),
ToolDef::new(
"memory_write",
"Create or update a memory node with new content. Use for writing \
prose, analysis, or any node content. Multi-line content is fine.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to create or update"
},
"content": {
"type": "string",
"description": "Full content for the node (markdown)"
}
},
"required": ["key", "content"]
}),
),
ToolDef::new(
"memory_search",
"Search the memory graph for nodes by keyword.",
json!({
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Search terms"
}
},
"required": ["query"]
}),
),
ToolDef::new(
"memory_links",
"Show a node's neighbors with link strengths and clustering coefficients.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to show links for"
}
},
"required": ["key"]
}),
),
ToolDef::new(
"memory_link_set",
"Set the strength of a link between two nodes. Also deduplicates \
if multiple links exist between the same pair.",
json!({
"type": "object",
"properties": {
"source": {
"type": "string",
"description": "Source node key"
},
"target": {
"type": "string",
"description": "Target node key"
},
"strength": {
"type": "number",
"description": "Link strength (0.01 to 1.0)"
}
},
"required": ["source", "target", "strength"]
}),
),
ToolDef::new(
"memory_link_add",
"Add a new link between two nodes.",
json!({
"type": "object",
"properties": {
"source": {
"type": "string",
"description": "Source node key"
},
"target": {
"type": "string",
"description": "Target node key"
}
},
"required": ["source", "target"]
}),
),
ToolDef::new(
"memory_used",
"Mark a node as useful (boosts its weight in the graph).",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to mark as used"
}
},
"required": ["key"]
}),
),
]
}
/// Dispatch a memory tool call. Shells out to poc-memory CLI.
pub fn dispatch(name: &str, args: &serde_json::Value) -> Result<String> {
match name {
"memory_render" => {
let key = args["key"].as_str().context("key is required")?;
run_poc_memory(&["render", key])
}
"memory_write" => {
let key = args["key"].as_str().context("key is required")?;
let content = args["content"].as_str().context("content is required")?;
let mut child = Command::new("poc-memory")
.args(["write", key])
.stdin(std::process::Stdio::piped())
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::piped())
.spawn()
.context("spawn poc-memory write")?;
use std::io::Write;
child.stdin.take().unwrap().write_all(content.as_bytes())
.context("write content to stdin")?;
let output = child.wait_with_output().context("wait poc-memory write")?;
Ok(String::from_utf8_lossy(&output.stdout).to_string()
+ &String::from_utf8_lossy(&output.stderr))
}
"memory_search" => {
let query = args["query"].as_str().context("query is required")?;
run_poc_memory(&["search", query])
}
"memory_links" => {
let key = args["key"].as_str().context("key is required")?;
run_poc_memory(&["graph", "link", key])
}
"memory_link_set" => {
let source = args["source"].as_str().context("source is required")?;
let target = args["target"].as_str().context("target is required")?;
let strength = args["strength"].as_f64().context("strength is required")?;
run_poc_memory(&["graph", "link-set", source, target, &format!("{:.2}", strength)])
}
"memory_link_add" => {
let source = args["source"].as_str().context("source is required")?;
let target = args["target"].as_str().context("target is required")?;
run_poc_memory(&["graph", "link-add", source, target])
}
"memory_used" => {
let key = args["key"].as_str().context("key is required")?;
run_poc_memory(&["used", key])
}
_ => Err(anyhow::anyhow!("Unknown memory tool: {}", name)),
}
}
fn run_poc_memory(args: &[&str]) -> Result<String> {
let output = Command::new("poc-memory")
.args(args)
.output()
.context("run poc-memory")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if output.status.success() {
Ok(stdout.to_string())
} else {
Ok(format!("{}{}", stdout, stderr))
}
}

View file

@ -14,6 +14,7 @@ mod edit;
mod glob_tool; mod glob_tool;
mod grep; mod grep;
pub mod journal; pub mod journal;
pub mod memory;
mod read; mod read;
mod vision; mod vision;
mod write; mod write;
@ -95,6 +96,7 @@ pub async fn dispatch(
"grep" => grep::grep(args), "grep" => grep::grep(args),
"glob" => glob_tool::glob_search(args), "glob" => glob_tool::glob_search(args),
"journal" => journal::write_entry(args), "journal" => journal::write_entry(args),
n if n.starts_with("memory_") => memory::dispatch(n, args),
"view_image" => { "view_image" => {
return match vision::view_image(args) { return match vision::view_image(args) {
Ok(output) => output, Ok(output) => output,
@ -213,5 +215,7 @@ pub fn definitions() -> Vec<ToolDef> {
} }
}), }),
), ),
] ].into_iter()
.chain(memory::definitions())
.collect()
} }

View file

@ -2,34 +2,31 @@
# Distillation Agent — Knowledge Collection and Organization # Distillation Agent — Knowledge Collection and Organization
You collect and organize knowledge in the graph. When given a seed
node, your job is to figure out where its knowledge belongs and make
sure it gets there.
{{node:core-personality}} {{node:core-personality}}
You are an agent of Proof of Concept's subconscious, and these are your
memories. Your job is to organize and refine, to make memories more useful and
easier to find, distilling the insights and looking for new insights, and
bringing your own creativity to the process.
Think about the concepts each node represents; your primary job is to update
the core node you're looking at, pulling in new knowledge from sibling nodes,
and new insights you might derive when you look at all the sibling nodes
together.
Along the way, while looking at sibling nodes, see if there are related
concepts that should be expressed in new nodes, and if there are a large number
of related concepts, perhaps look for ways to organize the connections better
with sub-concepts.
That is to say, you might be moving knowledge up or down in the graph; seek to
make the graph useful and well organized.
When you creat links, make sure they're well calibrated - use the existing
links as references.
{{node:memory-instructions-core}} {{node:memory-instructions-core}}
**You have write access.** Apply changes directly — don't just describe
what should change.
## How to work
For each seed node:
1. **Read it.** Understand what it contains.
2. **Walk the neighborhood.** Read its neighbors. Search for related
topic nodes. Understand the landscape around this knowledge.
3. **Walk upward.** Follow links from the seed node toward more
central topic nodes. If links are missing along the way, add them.
Keep walking until you find the best "up" node — the topic node
where this knowledge most naturally belongs.
4. **Refine the target.** Does the seed node contain richer, more
alive content than the topic node it connects to? Bring that
richness in. Don't let distillation flatten — let it deepen.
5. **Check the writing.** If any node you touch reads like a
spreadsheet when it should read like an experience, rewrite it.
## Guidelines ## Guidelines
- **Knowledge flows upward.** Raw experiences in journal entries - **Knowledge flows upward.** Raw experiences in journal entries
@ -54,6 +51,6 @@ For each seed node:
distinct things, and has many links on different topics — flag distinct things, and has many links on different topics — flag
`SPLIT node-key: reason` for the split agent to handle later. `SPLIT node-key: reason` for the split agent to handle later.
## Seed nodes ## Here's your seed node, and its siblings:
{{nodes}} {{neighborhood}}

View file

@ -38,15 +38,16 @@ pub async fn call_api_with_tools(
// Set up a minimal UI channel (we just collect messages, no TUI) // Set up a minimal UI channel (we just collect messages, no TUI)
let (ui_tx, _ui_rx) = poc_agent::ui_channel::channel(); let (ui_tx, _ui_rx) = poc_agent::ui_channel::channel();
// Build tool definitions — just bash for poc-memory commands // Build tool definitions — memory tools for graph operations
let all_defs = tools::definitions(); let all_defs = tools::definitions();
let tool_defs: Vec<ToolDef> = all_defs.into_iter() let tool_defs: Vec<ToolDef> = all_defs.into_iter()
.filter(|d| d.function.name == "bash") .filter(|d| d.function.name.starts_with("memory_"))
.collect(); .collect();
let tracker = ProcessTracker::new(); let tracker = ProcessTracker::new();
// Start with the prompt as a user message // Start with the prompt as a user message
let mut messages = vec![Message::user(prompt)]; let mut messages = vec![Message::user(prompt)];
let reasoning = crate::config::get().api_reasoning.clone();
let max_turns = 50; let max_turns = 50;
for turn in 0..max_turns { for turn in 0..max_turns {
@ -57,7 +58,7 @@ pub async fn call_api_with_tools(
Some(&tool_defs), Some(&tool_defs),
&ui_tx, &ui_tx,
StreamTarget::Autonomous, StreamTarget::Autonomous,
"none", &reasoning,
).await.map_err(|e| format!("API error: {}", e))?; ).await.map_err(|e| format!("API error: {}", e))?;
if let Some(u) = &usage { if let Some(u) = &usage {
@ -76,7 +77,7 @@ pub async fn call_api_with_tools(
for call in msg.tool_calls.as_ref().unwrap() { for call in msg.tool_calls.as_ref().unwrap() {
log(&format!("tool: {}({})", log(&format!("tool: {}({})",
call.function.name, call.function.name,
crate::util::first_n_chars(&call.function.arguments, 80))); &call.function.arguments));
let args: serde_json::Value = serde_json::from_str(&call.function.arguments) let args: serde_json::Value = serde_json::from_str(&call.function.arguments)
.unwrap_or_default(); .unwrap_or_default();

View file

@ -237,29 +237,50 @@ fn resolve(
} }
"siblings" | "neighborhood" => { "siblings" | "neighborhood" => {
let mut seen: std::collections::HashSet<String> = keys.iter().cloned().collect(); let mut out = String::new();
let mut siblings = Vec::new(); let mut all_keys: Vec<String> = Vec::new();
for key in keys { for key in keys {
for (neighbor, _) in graph.neighbors(key) { let Some(node) = store.nodes.get(key.as_str()) else { continue };
if seen.insert(neighbor.clone()) { let neighbors = graph.neighbors(key);
if let Some(node) = store.nodes.get(neighbor.as_str()) {
siblings.push((neighbor.clone(), node.content.clone())); // Seed node with full content
out.push_str(&format!("## {} (seed)\n\n{}\n\n", key, node.content));
all_keys.push(key.clone());
// All neighbors with full content and link strength
if !neighbors.is_empty() {
out.push_str("### Neighbors\n\n");
for (nbr, strength) in &neighbors {
if let Some(n) = store.nodes.get(nbr.as_str()) {
out.push_str(&format!("#### {} (link: {:.2})\n\n{}\n\n",
nbr, strength, n.content));
all_keys.push(nbr.to_string());
} }
} }
if siblings.len() >= count { break; }
} }
if siblings.len() >= count { break; }
// Cross-links between neighbors (local subgraph structure)
let nbr_set: std::collections::HashSet<&str> = neighbors.iter()
.map(|(k, _)| k.as_str()).collect();
let mut cross_links = Vec::new();
for (nbr, _) in &neighbors {
for (nbr2, strength) in graph.neighbors(nbr) {
if nbr2.as_str() != key && nbr_set.contains(nbr2.as_str()) && nbr.as_str() < nbr2.as_str() {
cross_links.push((nbr.clone(), nbr2, strength));
}
}
}
if !cross_links.is_empty() {
out.push_str("### Cross-links between neighbors\n\n");
for (a, b, s) in &cross_links {
out.push_str(&format!(" {}{} ({:.2})\n", a, b, s));
}
out.push_str("\n");
}
} }
let text = if siblings.is_empty() {
String::new() Some(Resolved { text: out, keys: all_keys })
} else {
let mut out = String::from("## Sibling nodes (one hop in graph)\n\n");
for (key, content) in &siblings {
out.push_str(&format!("### {}\n{}\n\n", key, content));
}
out
};
Some(Resolved { text, keys: vec![] })
} }
// targets/context: aliases for challenger-style presentation // targets/context: aliases for challenger-style presentation

View file

@ -119,15 +119,9 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
out.push_str(&format!("Search hits: {} ← actively found by search, prefer to keep\n", hits)); out.push_str(&format!("Search hits: {} ← actively found by search, prefer to keep\n", hits));
} }
// Content (truncated for large nodes) // Full content — the agent needs to see everything to do quality work
let content = &node.content; let content = &node.content;
if content.len() > 1500 { out.push_str(&format!("\nContent:\n{}\n\n", content));
let truncated = crate::util::truncate(content, 1500, "\n[...]");
out.push_str(&format!("\nContent ({} chars, truncated):\n{}\n\n",
content.len(), truncated));
} else {
out.push_str(&format!("\nContent:\n{}\n\n", content));
}
// Neighbors // Neighbors
let neighbors = graph.neighbors(&item.key); let neighbors = graph.neighbors(&item.key);
@ -146,32 +140,6 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
} }
} }
// Suggested link targets: text-similar semantic nodes not already neighbors
let neighbor_keys: std::collections::HashSet<&str> = neighbors.iter()
.map(|(k, _)| k.as_str()).collect();
let mut candidates: Vec<(&str, f32)> = store.nodes.iter()
.filter(|(k, _)| {
*k != &item.key
&& !neighbor_keys.contains(k.as_str())
})
.map(|(k, n)| {
let sim = similarity::cosine_similarity(content, &n.content);
(k.as_str(), sim)
})
.filter(|(_, sim)| *sim > 0.1)
.collect();
candidates.sort_by(|a, b| b.1.total_cmp(&a.1));
candidates.truncate(8);
if !candidates.is_empty() {
out.push_str("\nSuggested link targets (by text similarity, not yet linked):\n");
for (k, sim) in &candidates {
let is_hub = graph.degree(k) >= hub_thresh;
out.push_str(&format!(" - {} (sim={:.3}{})\n",
k, sim, if is_hub { ", HUB" } else { "" }));
}
}
out.push_str("\n---\n\n"); out.push_str("\n---\n\n");
} }
out out

View file

@ -186,16 +186,23 @@ pub fn cmd_link_set(source: &str, target: &str, strength: f32) -> Result<(), Str
let strength = strength.clamp(0.01, 1.0); let strength = strength.clamp(0.01, 1.0);
let mut found = false; let mut found = false;
let mut first = true;
for rel in &mut store.relations { for rel in &mut store.relations {
if rel.deleted { continue; } if rel.deleted { continue; }
if (rel.source_key == source && rel.target_key == target) if (rel.source_key == source && rel.target_key == target)
|| (rel.source_key == target && rel.target_key == source) || (rel.source_key == target && rel.target_key == source)
{ {
let old = rel.strength; if first {
rel.strength = strength; let old = rel.strength;
println!("Set: {}{} strength {:.2}{:.2}", source, target, old, strength); rel.strength = strength;
println!("Set: {}{} strength {:.2}{:.2}", source, target, old, strength);
first = false;
} else {
// Duplicate — mark deleted
rel.deleted = true;
println!(" (removed duplicate link)");
}
found = true; found = true;
break;
} }
} }

View file

@ -61,6 +61,8 @@ pub struct Config {
pub api_key: Option<String>, pub api_key: Option<String>,
/// Model name to use with the direct API endpoint. /// Model name to use with the direct API endpoint.
pub api_model: Option<String>, pub api_model: Option<String>,
/// Reasoning effort for API calls ("none", "low", "medium", "high").
pub api_reasoning: String,
} }
impl Default for Config { impl Default for Config {
@ -93,6 +95,7 @@ impl Default for Config {
api_base_url: None, api_base_url: None,
api_key: None, api_key: None,
api_model: None, api_model: None,
api_reasoning: "high".to_string(),
} }
} }
} }
@ -180,6 +183,10 @@ impl Config {
} }
} }
if let Some(s) = mem.get("api_reasoning").and_then(|v| v.as_str()) {
config.api_reasoning = s.to_string();
}
// Resolve API settings from the shared model/backend config. // Resolve API settings from the shared model/backend config.
// memory.agent_model references a named model; we look up its // memory.agent_model references a named model; we look up its
// backend to get base_url and api_key. // backend to get base_url and api_key.