agents: placeholder-based prompt templates, port remaining 4 agents

Replace the formatter dispatch with a generic {{placeholder}} lookup
system. Placeholders in prompt templates are resolved at runtime from
a table: topology, nodes, episodes, health, pairs, rename, split.

The query in the header selects what to operate on (keys for visit
tracking); placeholders pull in formatted context. Placeholders that
produce their own node selection (pairs, rename) contribute keys back.

Port health, separator, rename, and split agents to .agent files.
All 7 agents now use the config-driven path.
This commit is contained in:
ProofOfConcept 2026-03-10 15:50:54 -04:00
parent b4e674806d
commit 16c749f798
6 changed files with 436 additions and 36 deletions

View file

@ -1,14 +1,21 @@
// Agent definitions: self-contained JSON files with query + prompt.
// Agent definitions: self-contained files with query + prompt template.
//
// Each agent is a .json file in the agents/ directory containing:
// - query: pipeline expression for node selection
// - prompt: the full prompt template with {{TOPOLOGY}} and {{NODES}} placeholders
// - model, schedule metadata
// Each agent is a file in the agents/ directory:
// - First line: JSON header (agent, query, model, schedule)
// - After blank line: prompt template with {{placeholder}} lookups
//
// This replaces the hardcoded per-agent node selection in prompts.rs.
// Agents that need custom generators or formatters (separator, split)
// stay in prompts.rs until the pipeline can express their logic.
// Placeholders are resolved at runtime:
// {{topology}} — graph topology header
// {{nodes}} — query results formatted as node sections
// {{episodes}} — alias for {{nodes}}
// {{health}} — graph health report
// {{pairs}} — interference pairs from detect_interference
// {{rename}} — rename candidates
// {{split}} — split detail for the first query result
//
// The query selects what to operate on; placeholders pull in context.
use crate::graph::Graph;
use crate::neuro::{consolidation_priority, ReplayItem};
use crate::search;
use crate::store::Store;
@ -31,6 +38,7 @@ pub struct AgentDef {
#[derive(Deserialize)]
struct AgentHeader {
agent: String,
#[serde(default)]
query: String,
#[serde(default = "default_model")]
model: String,
@ -80,7 +88,6 @@ pub fn load_defs() -> Vec<AgentDef> {
/// Look up a single agent definition by name.
pub fn get_def(name: &str) -> Option<AgentDef> {
let dir = agents_dir();
// Try both extensions
for ext in ["agent", "md"] {
let path = dir.join(format!("{}.{}", name, ext));
if let Ok(content) = std::fs::read_to_string(&path) {
@ -92,7 +99,100 @@ pub fn get_def(name: &str) -> Option<AgentDef> {
load_defs().into_iter().find(|d| d.agent == name)
}
/// Run a config-driven agent: query → format → fill prompt template.
/// Result of resolving a placeholder: text + any affected node keys.
struct Resolved {
text: String,
keys: Vec<String>,
}
/// Resolve a single {{placeholder}} by name.
/// Returns the replacement text and any node keys it produced (for visit tracking).
fn resolve(
name: &str,
store: &Store,
graph: &Graph,
keys: &[String],
count: usize,
) -> Option<Resolved> {
match name {
"topology" => Some(Resolved {
text: super::prompts::format_topology_header_pub(graph),
keys: vec![],
}),
"nodes" | "episodes" => {
let items = keys_to_replay_items(store, keys, graph);
Some(Resolved {
text: super::prompts::format_nodes_section_pub(store, &items, graph),
keys: vec![], // keys already tracked from query
})
}
"health" => Some(Resolved {
text: super::prompts::format_health_section_pub(store, graph),
keys: vec![],
}),
"pairs" => {
let mut pairs = crate::neuro::detect_interference(store, graph, 0.5);
pairs.truncate(count);
let pair_keys: Vec<String> = pairs.iter()
.flat_map(|(a, b, _)| vec![a.clone(), b.clone()])
.collect();
Some(Resolved {
text: super::prompts::format_pairs_section_pub(&pairs, store, graph),
keys: pair_keys,
})
}
"rename" => {
let (rename_keys, section) = super::prompts::format_rename_candidates_pub(store, count);
Some(Resolved { text: section, keys: rename_keys })
}
"split" => {
let key = keys.first()?;
Some(Resolved {
text: super::prompts::format_split_plan_node_pub(store, graph, key),
keys: vec![], // key already tracked from query
})
}
_ => None,
}
}
/// Resolve all {{placeholder}} patterns in a prompt template.
/// Returns the resolved text and all node keys collected from placeholders.
fn resolve_placeholders(
template: &str,
store: &Store,
graph: &Graph,
keys: &[String],
count: usize,
) -> (String, Vec<String>) {
let mut result = template.to_string();
let mut extra_keys = Vec::new();
loop {
let Some(start) = result.find("{{") else { break };
let Some(end) = result[start + 2..].find("}}") else { break };
let end = start + 2 + end;
let name = result[start + 2..end].trim().to_lowercase();
match resolve(&name, store, graph, keys, count) {
Some(resolved) => {
extra_keys.extend(resolved.keys);
result.replace_range(start..end + 2, &resolved.text);
}
None => {
let msg = format!("(unknown: {})", name);
result.replace_range(start..end + 2, &msg);
}
}
}
(result, extra_keys)
}
/// Run a config-driven agent: query → resolve placeholders → prompt.
pub fn run_agent(
store: &Store,
def: &AgentDef,
@ -100,40 +200,36 @@ pub fn run_agent(
) -> Result<super::prompts::AgentBatch, String> {
let graph = store.build_graph();
// Parse and run the query pipeline
let mut stages = search::Stage::parse_pipeline(&def.query)?;
// Run the query if present
let keys = if !def.query.is_empty() {
let mut stages = search::Stage::parse_pipeline(&def.query)?;
let has_limit = stages.iter().any(|s|
matches!(s, search::Stage::Transform(search::Transform::Limit(_))));
if !has_limit {
stages.push(search::Stage::Transform(search::Transform::Limit(count)));
}
let results = search::run_query(&stages, vec![], &graph, store, false, count);
if results.is_empty() {
return Err(format!("{}: query returned no results", def.agent));
}
results.into_iter().map(|(k, _)| k).collect::<Vec<_>>()
} else {
vec![]
};
let has_limit = stages.iter().any(|s| matches!(s, search::Stage::Transform(search::Transform::Limit(_))));
if !has_limit {
stages.push(search::Stage::Transform(search::Transform::Limit(count)));
}
let (prompt, extra_keys) = resolve_placeholders(&def.prompt, store, &graph, &keys, count);
let results = search::run_query(&stages, vec![], &graph, store, false, count);
if results.is_empty() {
return Err(format!("{}: query returned no results", def.agent));
}
let keys: Vec<String> = results.iter().map(|(k, _)| k.clone()).collect();
let items: Vec<ReplayItem> = keys_to_replay_items(store, &keys, &graph);
// Fill placeholders in the embedded prompt
let topology = super::prompts::format_topology_header_pub(&graph);
let nodes_section = super::prompts::format_nodes_section_pub(store, &items, &graph);
let prompt = def.prompt
.replace("{{TOPOLOGY}}", &topology)
.replace("{{NODES}}", &nodes_section)
.replace("{{EPISODES}}", &nodes_section);
Ok(super::prompts::AgentBatch { prompt, node_keys: keys })
// Merge query keys with any keys produced by placeholder resolution
let mut all_keys = keys;
all_keys.extend(extra_keys);
Ok(super::prompts::AgentBatch { prompt, node_keys: all_keys })
}
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &crate::graph::Graph,
graph: &Graph,
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {