agent visits: track when agents successfully process nodes
New append-only visits.capnp log records which agent processed which
node and when. Only recorded on successful completion — transient
errors don't mark nodes as "seen."
Schema: AgentVisit{nodeUuid, nodeKey, agent, timestamp, outcome}
Storage: append_visits(), replay_visits(), in-memory VisitIndex
Recording: daemon records visits after successful LLM call
API: agent_prompt() returns AgentBatch{prompt, node_keys} so callers
know which nodes to mark as visited.
Groundwork for using visit recency in agent node selection — agents
will deprioritize recently-visited nodes.
This commit is contained in:
parent
9f14a29181
commit
0e1e5a1981
6 changed files with 237 additions and 34 deletions
|
|
@ -11,6 +11,14 @@ use crate::neuro::{
|
|||
replay_queue, replay_queue_with_graph, detect_interference,
|
||||
};
|
||||
|
||||
/// Result of building an agent prompt — includes both the prompt text
|
||||
/// and the keys of nodes selected for processing, so the caller can
|
||||
/// record visits after successful completion.
|
||||
pub struct AgentBatch {
|
||||
pub prompt: String,
|
||||
pub node_keys: Vec<String>,
|
||||
}
|
||||
|
||||
/// Load a prompt template, replacing {{PLACEHOLDER}} with data
|
||||
pub fn load_prompt(name: &str, replacements: &[(&str, &str)]) -> Result<String, String> {
|
||||
let path = crate::config::get().prompts_dir.join(format!("{}.md", name));
|
||||
|
|
@ -260,28 +268,23 @@ fn format_pairs_section(
|
|||
out
|
||||
}
|
||||
|
||||
/// Format rename candidates: nodes with auto-generated or opaque keys
|
||||
fn format_rename_candidates(store: &Store, count: usize) -> String {
|
||||
/// Format rename candidates, returning both keys and formatted section
|
||||
fn format_rename_candidates_with_keys(store: &Store, count: usize) -> (Vec<String>, String) {
|
||||
let mut candidates: Vec<(&str, &crate::store::Node)> = store.nodes.iter()
|
||||
.filter(|(key, _)| {
|
||||
// Only rename nodes with long auto-generated keys
|
||||
if key.len() < 60 { return false; }
|
||||
|
||||
// Journal entries with auto-slugs
|
||||
if key.starts_with("journal#j-") { return true; }
|
||||
|
||||
// Mined transcripts with UUIDs
|
||||
if key.starts_with("_mined-transcripts#f-") { return true; }
|
||||
|
||||
false
|
||||
})
|
||||
.map(|(k, n)| (k.as_str(), n))
|
||||
.collect();
|
||||
|
||||
// Sort by timestamp (newest first) so we rename recent stuff first
|
||||
candidates.sort_by(|a, b| b.1.timestamp.cmp(&a.1.timestamp));
|
||||
candidates.truncate(count);
|
||||
|
||||
let keys: Vec<String> = candidates.iter().map(|(k, _)| k.to_string()).collect();
|
||||
|
||||
let mut out = String::new();
|
||||
out.push_str(&format!("## Nodes to rename ({} of {} candidates)\n\n",
|
||||
candidates.len(),
|
||||
|
|
@ -308,7 +311,7 @@ fn format_rename_candidates(store: &Store, count: usize) -> String {
|
|||
|
||||
out.push_str("---\n\n");
|
||||
}
|
||||
out
|
||||
(keys, out)
|
||||
}
|
||||
|
||||
/// Get split candidates sorted by size (largest first)
|
||||
|
|
@ -437,8 +440,10 @@ pub fn consolidation_batch(store: &Store, count: usize, auto: bool) -> Result<()
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate a specific agent prompt with filled-in data
|
||||
pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String, String> {
|
||||
/// Generate a specific agent prompt with filled-in data.
|
||||
/// Returns an AgentBatch with the prompt text and the keys of nodes
|
||||
/// selected for processing (for visit tracking on success).
|
||||
pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<AgentBatch, String> {
|
||||
let graph = store.build_graph();
|
||||
let topology = format_topology_header(&graph);
|
||||
|
||||
|
|
@ -447,8 +452,10 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
|
|||
match agent {
|
||||
"replay" => {
|
||||
let items = replay_queue_with_graph(store, count, &graph, emb.as_ref());
|
||||
let keys: Vec<String> = items.iter().map(|i| i.key.clone()).collect();
|
||||
let nodes_section = format_nodes_section(store, &items, &graph);
|
||||
load_prompt("replay", &[("{{TOPOLOGY}}", &topology), ("{{NODES}}", &nodes_section)])
|
||||
let prompt = load_prompt("replay", &[("{{TOPOLOGY}}", &topology), ("{{NODES}}", &nodes_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: keys })
|
||||
}
|
||||
"linker" => {
|
||||
// Filter to episodic entries
|
||||
|
|
@ -459,14 +466,21 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
|
|||
.unwrap_or(false)
|
||||
});
|
||||
items.truncate(count);
|
||||
let keys: Vec<String> = items.iter().map(|i| i.key.clone()).collect();
|
||||
let nodes_section = format_nodes_section(store, &items, &graph);
|
||||
load_prompt("linker", &[("{{TOPOLOGY}}", &topology), ("{{NODES}}", &nodes_section)])
|
||||
let prompt = load_prompt("linker", &[("{{TOPOLOGY}}", &topology), ("{{NODES}}", &nodes_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: keys })
|
||||
}
|
||||
"separator" => {
|
||||
let mut pairs = detect_interference(store, &graph, 0.5);
|
||||
pairs.truncate(count);
|
||||
// Both nodes in each pair count as visited
|
||||
let keys: Vec<String> = pairs.iter()
|
||||
.flat_map(|(a, b, _)| vec![a.clone(), b.clone()])
|
||||
.collect();
|
||||
let pairs_section = format_pairs_section(&pairs, store, &graph);
|
||||
load_prompt("separator", &[("{{TOPOLOGY}}", &topology), ("{{PAIRS}}", &pairs_section)])
|
||||
let prompt = load_prompt("separator", &[("{{TOPOLOGY}}", &topology), ("{{PAIRS}}", &pairs_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: keys })
|
||||
}
|
||||
"transfer" => {
|
||||
// Recent episodic entries
|
||||
|
|
@ -493,15 +507,19 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
|
|||
})
|
||||
.collect();
|
||||
let episodes_section = format_nodes_section(store, &items, &graph);
|
||||
load_prompt("transfer", &[("{{TOPOLOGY}}", &topology), ("{{EPISODES}}", &episodes_section)])
|
||||
let prompt = load_prompt("transfer", &[("{{TOPOLOGY}}", &topology), ("{{EPISODES}}", &episodes_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: episode_keys })
|
||||
}
|
||||
"health" => {
|
||||
// Health agent analyzes the whole graph, no specific nodes
|
||||
let health_section = format_health_section(store, &graph);
|
||||
load_prompt("health", &[("{{TOPOLOGY}}", &topology), ("{{HEALTH}}", &health_section)])
|
||||
let prompt = load_prompt("health", &[("{{TOPOLOGY}}", &topology), ("{{HEALTH}}", &health_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: vec![] })
|
||||
}
|
||||
"rename" => {
|
||||
let nodes_section = format_rename_candidates(store, count);
|
||||
load_prompt("rename", &[("{{NODES}}", &nodes_section)])
|
||||
let (keys, nodes_section) = format_rename_candidates_with_keys(store, count);
|
||||
let prompt = load_prompt("rename", &[("{{NODES}}", &nodes_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: keys })
|
||||
}
|
||||
"split" => {
|
||||
// Phase 1: plan prompt for the largest candidate
|
||||
|
|
@ -509,9 +527,10 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
|
|||
if candidates.is_empty() {
|
||||
return Err("No nodes large enough to split".to_string());
|
||||
}
|
||||
let key = &candidates[0];
|
||||
let node_section = format_split_plan_node(store, &graph, key);
|
||||
load_prompt("split-plan", &[("{{TOPOLOGY}}", &topology), ("{{NODE}}", &node_section)])
|
||||
let key = candidates[0].clone();
|
||||
let node_section = format_split_plan_node(store, &graph, &key);
|
||||
let prompt = load_prompt("split-plan", &[("{{TOPOLOGY}}", &topology), ("{{NODE}}", &node_section)])?;
|
||||
Ok(AgentBatch { prompt, node_keys: vec![key] })
|
||||
}
|
||||
_ => Err(format!("Unknown agent: {}. Use: replay, linker, separator, transfer, health, rename, split", agent)),
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue