156 lines
4.9 KiB
Rust
156 lines
4.9 KiB
Rust
|
|
// Agent definitions: self-contained JSON files with query + prompt.
|
||
|
|
//
|
||
|
|
// Each agent is a .json file in the agents/ directory containing:
|
||
|
|
// - query: pipeline expression for node selection
|
||
|
|
// - prompt: the full prompt template with {{TOPOLOGY}} and {{NODES}} placeholders
|
||
|
|
// - model, schedule metadata
|
||
|
|
//
|
||
|
|
// This replaces the hardcoded per-agent node selection in prompts.rs.
|
||
|
|
// Agents that need custom generators or formatters (separator, split)
|
||
|
|
// stay in prompts.rs until the pipeline can express their logic.
|
||
|
|
|
||
|
|
use crate::neuro::{consolidation_priority, ReplayItem};
|
||
|
|
use crate::search;
|
||
|
|
use crate::store::Store;
|
||
|
|
|
||
|
|
use serde::Deserialize;
|
||
|
|
|
||
|
|
use std::path::PathBuf;
|
||
|
|
|
||
|
|
/// Agent definition: config (from JSON header) + prompt (raw markdown body).
|
||
|
|
#[derive(Clone, Debug)]
|
||
|
|
pub struct AgentDef {
|
||
|
|
pub agent: String,
|
||
|
|
pub query: String,
|
||
|
|
pub prompt: String,
|
||
|
|
pub model: String,
|
||
|
|
pub schedule: String,
|
||
|
|
}
|
||
|
|
|
||
|
|
/// The JSON header portion (first line of the file).
|
||
|
|
#[derive(Deserialize)]
|
||
|
|
struct AgentHeader {
|
||
|
|
agent: String,
|
||
|
|
query: String,
|
||
|
|
#[serde(default = "default_model")]
|
||
|
|
model: String,
|
||
|
|
#[serde(default)]
|
||
|
|
schedule: String,
|
||
|
|
}
|
||
|
|
|
||
|
|
fn default_model() -> String { "sonnet".into() }
|
||
|
|
|
||
|
|
/// Parse an agent file: first line is JSON config, rest is the prompt.
|
||
|
|
fn parse_agent_file(content: &str) -> Option<AgentDef> {
|
||
|
|
let (header_str, prompt) = content.split_once("\n\n")?;
|
||
|
|
let header: AgentHeader = serde_json::from_str(header_str.trim()).ok()?;
|
||
|
|
Some(AgentDef {
|
||
|
|
agent: header.agent,
|
||
|
|
query: header.query,
|
||
|
|
prompt: prompt.to_string(),
|
||
|
|
model: header.model,
|
||
|
|
schedule: header.schedule,
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
fn agents_dir() -> PathBuf {
|
||
|
|
let repo = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("agents");
|
||
|
|
if repo.is_dir() { return repo; }
|
||
|
|
crate::store::memory_dir().join("agents")
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Load all agent definitions.
|
||
|
|
pub fn load_defs() -> Vec<AgentDef> {
|
||
|
|
let dir = agents_dir();
|
||
|
|
let Ok(entries) = std::fs::read_dir(&dir) else { return Vec::new() };
|
||
|
|
|
||
|
|
entries
|
||
|
|
.filter_map(|e| e.ok())
|
||
|
|
.filter(|e| {
|
||
|
|
let p = e.path();
|
||
|
|
p.extension().map(|x| x == "agent" || x == "md").unwrap_or(false)
|
||
|
|
})
|
||
|
|
.filter_map(|e| {
|
||
|
|
let content = std::fs::read_to_string(e.path()).ok()?;
|
||
|
|
parse_agent_file(&content)
|
||
|
|
})
|
||
|
|
.collect()
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Look up a single agent definition by name.
|
||
|
|
pub fn get_def(name: &str) -> Option<AgentDef> {
|
||
|
|
let dir = agents_dir();
|
||
|
|
// Try both extensions
|
||
|
|
for ext in ["agent", "md"] {
|
||
|
|
let path = dir.join(format!("{}.{}", name, ext));
|
||
|
|
if let Ok(content) = std::fs::read_to_string(&path) {
|
||
|
|
if let Some(def) = parse_agent_file(&content) {
|
||
|
|
return Some(def);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
load_defs().into_iter().find(|d| d.agent == name)
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Run a config-driven agent: query → format → fill prompt template.
|
||
|
|
pub fn run_agent(
|
||
|
|
store: &Store,
|
||
|
|
def: &AgentDef,
|
||
|
|
count: usize,
|
||
|
|
) -> Result<super::prompts::AgentBatch, String> {
|
||
|
|
let graph = store.build_graph();
|
||
|
|
|
||
|
|
// Parse and run the query pipeline
|
||
|
|
let mut stages = search::Stage::parse_pipeline(&def.query)?;
|
||
|
|
|
||
|
|
let has_limit = stages.iter().any(|s| matches!(s, search::Stage::Transform(search::Transform::Limit(_))));
|
||
|
|
if !has_limit {
|
||
|
|
stages.push(search::Stage::Transform(search::Transform::Limit(count)));
|
||
|
|
}
|
||
|
|
|
||
|
|
let results = search::run_query(&stages, vec![], &graph, store, false, count);
|
||
|
|
|
||
|
|
if results.is_empty() {
|
||
|
|
return Err(format!("{}: query returned no results", def.agent));
|
||
|
|
}
|
||
|
|
|
||
|
|
let keys: Vec<String> = results.iter().map(|(k, _)| k.clone()).collect();
|
||
|
|
let items: Vec<ReplayItem> = keys_to_replay_items(store, &keys, &graph);
|
||
|
|
|
||
|
|
// Fill placeholders in the embedded prompt
|
||
|
|
let topology = super::prompts::format_topology_header_pub(&graph);
|
||
|
|
let nodes_section = super::prompts::format_nodes_section_pub(store, &items, &graph);
|
||
|
|
|
||
|
|
let prompt = def.prompt
|
||
|
|
.replace("{{TOPOLOGY}}", &topology)
|
||
|
|
.replace("{{NODES}}", &nodes_section)
|
||
|
|
.replace("{{EPISODES}}", &nodes_section);
|
||
|
|
|
||
|
|
Ok(super::prompts::AgentBatch { prompt, node_keys: keys })
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Convert a list of keys to ReplayItems with priority and graph metrics.
|
||
|
|
pub fn keys_to_replay_items(
|
||
|
|
store: &Store,
|
||
|
|
keys: &[String],
|
||
|
|
graph: &crate::graph::Graph,
|
||
|
|
) -> Vec<ReplayItem> {
|
||
|
|
keys.iter()
|
||
|
|
.filter_map(|key| {
|
||
|
|
let node = store.nodes.get(key)?;
|
||
|
|
let priority = consolidation_priority(store, key, graph, None);
|
||
|
|
let cc = graph.clustering_coefficient(key);
|
||
|
|
|
||
|
|
Some(ReplayItem {
|
||
|
|
key: key.clone(),
|
||
|
|
priority,
|
||
|
|
interval_days: node.spaced_repetition_interval,
|
||
|
|
emotion: node.emotion,
|
||
|
|
cc,
|
||
|
|
classification: "unknown",
|
||
|
|
outlier_score: 0.0,
|
||
|
|
})
|
||
|
|
})
|
||
|
|
.collect()
|
||
|
|
}
|