agent run: add --target flag to run agents on specific nodes

Adds run_one_agent_with_keys() which bypasses the agent's query and
uses explicitly provided node keys. This allows testing agents on
specific graph neighborhoods:

  poc-memory agent run linker --target bcachefs --debug
This commit is contained in:
ProofOfConcept 2026-03-17 00:24:24 -04:00
parent 1aad6d90af
commit 8b959fb68d
3 changed files with 50 additions and 5 deletions

View file

@ -646,6 +646,31 @@ pub fn run_and_apply_with_log(
///
/// This is the common pipeline shared by the knowledge loop, consolidation pipeline,
/// and daemon. Callers handle action application (with or without depth tracking).
/// Run an agent with explicit target keys, bypassing the agent's query.
pub fn run_one_agent_with_keys(
store: &mut Store,
agent_name: &str,
keys: &[String],
count: usize,
llm_tag: &str,
log: &dyn Fn(&str),
debug: bool,
) -> Result<AgentResult, String> {
let def = super::defs::get_def(agent_name)
.ok_or_else(|| format!("no .agent file for {}", agent_name))?;
log(&format!("targeting: {}", keys.join(", ")));
let graph = store.build_graph();
let (prompt, extra_keys) = super::defs::resolve_placeholders(
&def.prompt, store, &graph, keys, count,
);
let mut all_keys: Vec<String> = keys.to_vec();
all_keys.extend(extra_keys);
let agent_batch = super::prompts::AgentBatch { prompt, node_keys: all_keys };
run_one_agent_inner(store, agent_name, &def, agent_batch, llm_tag, log, debug)
}
pub fn run_one_agent(
store: &mut Store,
agent_name: &str,
@ -660,6 +685,18 @@ pub fn run_one_agent(
log("building prompt");
let agent_batch = super::defs::run_agent(store, &def, batch_size)?;
run_one_agent_inner(store, agent_name, &def, agent_batch, llm_tag, log, debug)
}
fn run_one_agent_inner(
store: &mut Store,
agent_name: &str,
def: &super::defs::AgentDef,
agent_batch: super::prompts::AgentBatch,
_llm_tag: &str,
log: &dyn Fn(&str),
debug: bool,
) -> Result<AgentResult, String> {
let prompt_kb = agent_batch.prompt.len() / 1024;
let tools_desc = if def.tools.is_empty() { "no tools".into() }
else { format!("{} tools", def.tools.len()) };
@ -679,7 +716,7 @@ pub fn run_one_agent(
log(&format!("log: {}", log_path.display()));
log("calling LLM");
let output = llm::call_for_def(&def, &agent_batch.prompt)?;
let output = llm::call_for_def(def, &agent_batch.prompt)?;
// Append response to same log file
use std::io::Write;