agent: move oneshot execution from subconscious to agent module

Move agent execution machinery (run_one_agent, spawn_agent, PID
tracking) from subconscious/knowledge.rs to agent/oneshot.rs — the
agent module owns execution, subconscious owns scheduling and defs.

Delete subconscious/llm.rs — callers now use api::call_api_with_tools_sync
directly. Audit and compare inline the call; oneshot inlines tool
filtering.

Update all callers: consolidate, daemon, subconscious, cli/agent.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-04 17:25:10 -04:00 committed by Kent Overstreet
parent 1457a1b50d
commit 0f4ca9e2f2
10 changed files with 43 additions and 106 deletions

View file

@ -15,6 +15,7 @@
pub mod api;
pub mod context;
pub mod oneshot;
pub mod parsing;
pub mod tools;
pub mod training;

View file

@ -1,15 +1,15 @@
// knowledge.rs — agent execution and conversation fragment selection
// oneshot.rs — One-shot agent execution
//
// Agent prompts live in agents/*.agent files, dispatched via defs.rs.
// This module handles:
// - Agent execution (build prompt → call LLM with tools → log)
// - Conversation fragment selection (for observation agent)
// Runs an agent definition (from agents/*.agent files) through the API:
// build prompt → call LLM with tools → return result. Agents apply
// changes via tool calls during the LLM call — no action parsing needed.
//
// Agents apply changes via tool calls (poc-memory write/link-add/etc)
// during the LLM call — no action parsing needed.
// This is distinct from the interactive agent loop in agent/mod.rs:
// oneshot agents run a fixed prompt sequence and exit, while the
// interactive agent has a turn loop with streaming and TUI.
use super::llm;
use crate::store::{self, Store};
use crate::subconscious::{defs, prompts};
use std::fs;
use std::path::PathBuf;
@ -93,7 +93,6 @@ pub fn run_and_apply_with_log(
}
/// Like run_and_apply_with_log but with an in-flight exclusion set.
/// Returns the keys that were processed (for the daemon to track).
pub fn run_and_apply_excluded(
store: &mut Store,
agent_name: &str,
@ -103,7 +102,6 @@ pub fn run_and_apply_excluded(
exclude: &std::collections::HashSet<String>,
) -> Result<(), String> {
let _result = run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, exclude)?;
Ok(())
}
@ -116,7 +114,7 @@ pub fn run_one_agent_with_keys(
llm_tag: &str,
log: &(dyn Fn(&str) + Sync),
) -> Result<AgentResult, String> {
let def = super::defs::get_def(agent_name)
let def = defs::get_def(agent_name)
.ok_or_else(|| format!("no .agent file for {}", agent_name))?;
let (state_dir, pid_path, _guard) = setup_agent_state(agent_name, &def)?;
@ -126,16 +124,16 @@ pub fn run_one_agent_with_keys(
let mut resolved_steps = Vec::new();
let mut all_keys: Vec<String> = keys.to_vec();
for step in &def.steps {
let (prompt, extra_keys) = super::defs::resolve_placeholders(
let (prompt, extra_keys) = defs::resolve_placeholders(
&step.prompt, store, &graph, keys, count,
);
all_keys.extend(extra_keys);
resolved_steps.push(super::prompts::ResolvedStep {
resolved_steps.push(prompts::ResolvedStep {
prompt,
phase: step.phase.clone(),
});
}
let agent_batch = super::prompts::AgentBatch { steps: resolved_steps, node_keys: all_keys };
let agent_batch = prompts::AgentBatch { steps: resolved_steps, node_keys: all_keys };
// Record visits eagerly so concurrent agents pick different seeds
if !agent_batch.node_keys.is_empty() {
@ -164,7 +162,7 @@ pub fn run_one_agent_excluded(
log: &(dyn Fn(&str) + Sync),
exclude: &std::collections::HashSet<String>,
) -> Result<AgentResult, String> {
let def = super::defs::get_def(agent_name)
let def = defs::get_def(agent_name)
.ok_or_else(|| format!("no .agent file for {}", agent_name))?;
// Set up output dir and write pid file BEFORE prompt building
@ -172,7 +170,7 @@ pub fn run_one_agent_excluded(
log("building prompt");
let effective_count = def.count.unwrap_or(batch_size);
let agent_batch = super::defs::run_agent(store, &def, effective_count, exclude)?;
let agent_batch = defs::run_agent(store, &def, effective_count, exclude)?;
run_one_agent_inner(store, agent_name, &def, agent_batch, state_dir, pid_path, llm_tag, log)
}
@ -181,7 +179,7 @@ pub fn run_one_agent_excluded(
/// Returns (state_dir, pid_path, guard). The guard removes the pid file on drop.
fn setup_agent_state(
agent_name: &str,
def: &super::defs::AgentDef,
def: &defs::AgentDef,
) -> Result<(PathBuf, PathBuf, PidGuard), String> {
let state_dir = std::env::var("POC_AGENT_OUTPUT_DIR")
.map(PathBuf::from)
@ -247,7 +245,6 @@ pub fn scan_pid_files(state_dir: &std::path::Path, timeout_secs: u64) -> Vec<(St
/// Spawn an agent asynchronously. Writes the pid file before returning
/// so the caller immediately sees the agent as running.
/// Spawn result: child process handle and log path.
pub struct SpawnResult {
pub child: std::process::Child,
pub log_path: PathBuf,
@ -258,7 +255,7 @@ pub fn spawn_agent(
state_dir: &std::path::Path,
session_id: &str,
) -> Option<SpawnResult> {
let def = super::defs::get_def(agent_name)?;
let def = defs::get_def(agent_name)?;
let first_phase = def.steps.first()
.map(|s| s.phase.as_str())
.unwrap_or("step-0");
@ -288,16 +285,17 @@ pub fn spawn_agent(
fn run_one_agent_inner(
_store: &mut Store,
agent_name: &str,
def: &super::defs::AgentDef,
agent_batch: super::prompts::AgentBatch,
def: &defs::AgentDef,
agent_batch: prompts::AgentBatch,
state_dir: std::path::PathBuf,
pid_path: std::path::PathBuf,
_llm_tag: &str,
log: &(dyn Fn(&str) + Sync),
) -> Result<AgentResult, String> {
let all_tools = crate::agent::tools::memory_and_journal_tools();
let effective_tools: Vec<crate::agent::tools::Tool> = if def.tools.is_empty() {
all_tools
// Filter tools based on agent def specification
let all_tools = super::tools::memory_and_journal_tools();
let effective_tools: Vec<super::tools::Tool> = if def.tools.is_empty() {
all_tools.to_vec()
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
@ -354,8 +352,7 @@ fn run_one_agent_inner(
// Bail check: if the agent defines a bail script, run it between steps.
// The script receives the pid file path as $1, cwd = state dir.
let bail_script = def.bail.as_ref().map(|name| {
// Look for the script next to the .agent file
let agents_dir = super::defs::agents_dir();
let agents_dir = defs::agents_dir();
agents_dir.join(name)
});
let state_dir_for_bail = state_dir.clone();
@ -381,7 +378,9 @@ fn run_one_agent_inner(
Ok(())
};
let output = llm::call_for_def_multi(def, &prompts, &step_phases, Some(&bail_fn), log)?;
let output = crate::subconscious::api::call_api_with_tools_sync(
agent_name, &prompts, &step_phases, def.temperature, def.priority,
&effective_tools, Some(&bail_fn), log)?;
Ok(AgentResult {
output,
@ -389,6 +388,3 @@ fn run_one_agent_inner(
state_dir,
})
}
// ---------------------------------------------------------------------------
// Conversation fragment selection removed — observe/journal agents handle transcripts.

View file

@ -1,7 +1,6 @@
// cli/agent.rs — agent subcommand handlers
use crate::store;
use crate::agents::llm;
pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option<&str>, dry_run: bool, local: bool, state_dir: Option<&str>) -> Result<(), String> {
// Mark as agent so tool calls (e.g. poc-memory render) don't
@ -60,7 +59,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
for (i, key) in resolved_targets.iter().enumerate() {
println!("[{}] [{}/{}] {}", agent, i + 1, resolved_targets.len(), key);
if i > 0 { store = store::Store::load()?; }
if let Err(e) = crate::agents::knowledge::run_one_agent_with_keys(
if let Err(e) = crate::agent::oneshot::run_one_agent_with_keys(
&mut store, agent, &[key.clone()], count, "test", &log,
) {
println!("[{}] ERROR on {}: {}", agent, key, e);
@ -80,7 +79,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
println!("[{}] queued {} tasks to daemon", agent, queued);
} else {
// Local execution (--local, --debug, dry-run, or daemon unavailable)
crate::agents::knowledge::run_one_agent(
crate::agent::oneshot::run_one_agent(
&mut store, agent, count, "test", &log,
)?;
}
@ -400,7 +399,8 @@ fn llm_compare(
let prompt = build_compare_prompt(a, b);
let _ = model; // model selection handled by API backend config
let response = llm::call_simple("compare", &prompt)?;
let response = crate::subconscious::api::call_api_with_tools_sync(
"compare", &[prompt], &[], None, 10, &[], None, &|_| {})?;
let response = response.trim().to_uppercase();
if response.contains("BETTER: B") {

View file

@ -82,7 +82,7 @@ pub use hippocampus::query::parser as query_parser;
pub use subconscious as agents;
pub use subconscious::{
llm, audit, consolidate, knowledge,
audit, consolidate,
digest, daemon,
};
// Backward compat: memory_search moved from subconscious::hook to claude::hook

View file

@ -3,7 +3,6 @@
// Each batch of links gets reviewed by Sonnet, which returns per-link actions:
// KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon.
use super::llm;
use crate::store::{self, Store, new_relation};
use std::collections::HashSet;
@ -211,7 +210,8 @@ pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String>
// Run batches in parallel via rayon
let batch_results: Vec<_> = batch_data.par_iter()
.map(|(batch_idx, batch_infos, prompt)| {
let response = llm::call_simple("audit", prompt);
let response = super::api::call_api_with_tools_sync(
"audit", &[prompt.clone()], &[], None, 10, &[], None, &|_| {});
let completed = done.fetch_add(1, Ordering::Relaxed) + 1;
eprint!("\r Batches: {}/{} done", completed, total_batches);
(*batch_idx, batch_infos, response)

View file

@ -9,7 +9,7 @@
// 6. Summary: final metrics comparison
use super::digest;
use super::knowledge;
use crate::agent::oneshot;
use crate::neuro;
use crate::store::{self, Store};
@ -74,7 +74,7 @@ pub fn consolidate_full_with_progress(
*store = Store::load()?;
}
match knowledge::run_and_apply(store, agent_type, *count, "consolidate") {
match oneshot::run_and_apply(store, agent_type, *count, "consolidate") {
Ok(()) => {
let msg = " Done".to_string();
log_line(&mut log_buf, &msg);

View file

@ -123,7 +123,7 @@ fn job_targeted_agent(
ctx.log_line(msg);
log_event(&job_name, "progress", msg);
};
super::knowledge::run_one_agent_with_keys(
crate::agent::oneshot::run_one_agent_with_keys(
&mut store, &agent, std::slice::from_ref(&key), 5, "daemon", &log,
)?;
ctx.log_line("done");
@ -208,7 +208,7 @@ fn job_consolidation_agent(
};
// Use run_one_agent_with_keys — we already selected seeds above,
// no need to re-run the query.
let result = super::knowledge::run_one_agent_with_keys(
let result = crate::agent::oneshot::run_one_agent_with_keys(
&mut store, &agent, &claimed_keys, batch, "consolidate", &log,
).map(|_| ());
@ -239,7 +239,7 @@ fn job_rename_agent(
ctx.log_line(format!("running rename agent (batch={})", batch));
let log = |msg: &str| ctx.log_line(msg);
let result = super::knowledge::run_one_agent(&mut store, "rename", batch, "consolidate", &log)?;
let result = crate::agent::oneshot::run_one_agent(&mut store, "rename", batch, "consolidate", &log)?;
// Parse RENAME actions from response (rename uses its own format, not WRITE_NODE/LINK/REFINE)
let mut applied = 0;

View file

@ -1,58 +0,0 @@
// LLM utilities: model invocation via direct API
use crate::store::Store;
use std::fs;
/// Simple LLM call for non-agent uses (audit, digest, compare).
/// Logs to llm-logs/{caller}/ file.
pub(crate) fn call_simple(caller: &str, prompt: &str) -> Result<String, String> {
let log_dir = dirs::home_dir().unwrap_or_default()
.join(".consciousness/logs/llm").join(caller);
fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
use std::io::Write;
let log = move |msg: &str| {
if let Ok(mut f) = fs::OpenOptions::new()
.create(true).append(true).open(&log_path)
{
let _ = writeln!(f, "{}", msg);
}
};
let prompts = vec![prompt.to_string()];
let phases = vec![];
super::api::call_api_with_tools_sync(caller, &prompts, &phases, None, 10, &[], None, &log)
}
/// Call a model using an agent definition's configuration (multi-step).
/// Optional bail_fn is called between steps — return Err to stop the pipeline.
pub(crate) fn call_for_def_multi(
def: &super::defs::AgentDef,
prompts: &[String],
phases: &[String],
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
log: &(dyn Fn(&str) + Sync),
) -> Result<String, String> {
// Filter tools based on AgentDef specification
let all_tools = crate::agent::tools::memory_and_journal_tools();
let effective_tools: Vec<crate::agent::tools::Tool> = if def.tools.is_empty() {
all_tools
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
super::api::call_api_with_tools_sync(&def.agent, prompts, phases, def.temperature, def.priority, &effective_tools, bail_fn, log)
}
/// Get all keys for prompt context.
pub(crate) fn semantic_keys(store: &Store) -> Vec<String> {
let mut keys: Vec<String> = store.nodes.keys()
.cloned()
.collect();
keys.sort();
keys.truncate(200);
keys
}

View file

@ -19,11 +19,9 @@
pub mod subconscious;
pub mod api;
pub mod llm;
pub mod prompts;
pub mod defs;
pub mod audit;
pub mod consolidate;
pub mod knowledge;
pub mod digest;
pub mod daemon;

View file

@ -156,7 +156,7 @@ impl AgentCycleState {
}
fn agent_spawned(&mut self, name: &str, phase: &str,
result: crate::agents::knowledge::SpawnResult) {
result: crate::agent::oneshot::SpawnResult) {
if let Some(agent) = self.agents.iter_mut().find(|a| a.name == name) {
agent.pid = Some(result.child.id());
agent.phase = Some(phase.to_string());
@ -279,7 +279,7 @@ impl AgentCycleState {
if transcript.size > 0 {
fs::write(&offset_path, transcript.size.to_string()).ok();
}
if let Some(result) = crate::agents::knowledge::spawn_agent(
if let Some(result) = crate::agent::oneshot::spawn_agent(
"surface-observe", &state_dir, &session.session_id) {
self.log(format_args!("spawned surface-observe pid {}\n", result.child.id()));
self.agent_spawned("surface-observe", "surface", result);
@ -346,7 +346,7 @@ impl AgentCycleState {
}
fs::write(&offset_path, transcript.size.to_string()).ok();
if let Some(result) = crate::agents::knowledge::spawn_agent(
if let Some(result) = crate::agent::oneshot::spawn_agent(
"reflect", &state_dir, &session.session_id) {
self.log(format_args!("reflect: spawned pid {}\n", result.child.id()));
self.agent_spawned("reflect", "step-0", result);
@ -375,7 +375,7 @@ impl AgentCycleState {
}
fs::write(&offset_path, transcript.size.to_string()).ok();
if let Some(result) = crate::agents::knowledge::spawn_agent(
if let Some(result) = crate::agent::oneshot::spawn_agent(
"journal", &state_dir, &session.session_id) {
self.log(format_args!("journal: spawned pid {}\n", result.child.id()));
self.agent_spawned("journal", "step-0", result);