logging: single output stream through caller's log closure

Pass the caller's log closure all the way through to api.rs instead
of creating a separate eprintln closure in llm.rs. Everything goes
through one stream — prompt, think blocks, tool calls with args,
tool results with content, token counts, final response.

CLI uses println (stdout), daemon uses its task log. No more split
between stdout and stderr.

Also removes the llm-log file creation from knowledge.rs — that's
the daemon's concern, not the agent runner's.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Kent Overstreet 2026-03-22 01:57:47 -04:00
parent e74d533748
commit 543e1bdc8a
4 changed files with 25 additions and 45 deletions

View file

@ -39,7 +39,7 @@ pub fn run_and_apply_with_log(
agent_name: &str,
batch_size: usize,
llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
) -> Result<(), String> {
run_and_apply_excluded(store, agent_name, batch_size, llm_tag, log, &Default::default())
}
@ -51,7 +51,7 @@ pub fn run_and_apply_excluded(
agent_name: &str,
batch_size: usize,
llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
exclude: &std::collections::HashSet<String>,
) -> Result<(), String> {
let result = run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, false, exclude)?;
@ -71,7 +71,7 @@ pub fn run_one_agent_with_keys(
keys: &[String],
count: usize,
llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
debug: bool,
) -> Result<AgentResult, String> {
let def = super::defs::get_def(agent_name)
@ -99,7 +99,7 @@ pub fn run_one_agent(
agent_name: &str,
batch_size: usize,
llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
debug: bool,
) -> Result<AgentResult, String> {
run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, debug, &Default::default())
@ -111,7 +111,7 @@ pub fn run_one_agent_excluded(
agent_name: &str,
batch_size: usize,
llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
debug: bool,
exclude: &std::collections::HashSet<String>,
) -> Result<AgentResult, String> {
@ -131,7 +131,7 @@ fn run_one_agent_inner(
def: &super::defs::AgentDef,
agent_batch: super::prompts::AgentBatch,
_llm_tag: &str,
log: &dyn Fn(&str),
log: &(dyn Fn(&str) + Sync),
debug: bool,
) -> Result<AgentResult, String> {
let prompt_kb = agent_batch.prompt.len() / 1024;
@ -163,25 +163,10 @@ fn run_one_agent_inner(
log(&format!(" node: {}", key));
}
// Single log file: prompt then response
let log_dir = store::memory_dir().join("llm-logs").join(agent_name);
fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", store::compact_timestamp()));
let prompt_section = format!("=== PROMPT ===\n\n{}\n\n=== CALLING LLM ===\n", agent_batch.prompt);
fs::write(&log_path, &prompt_section).ok();
if debug { print!("{}", prompt_section); }
log(&format!("log: {}", log_path.display()));
log(&format!("=== PROMPT ===\n\n{}\n\n=== CALLING LLM ===", agent_batch.prompt));
log("calling LLM");
let output = llm::call_for_def(def, &agent_batch.prompt)?;
let output = llm::call_for_def(def, &agent_batch.prompt, log)?;
// Append response to same log file
use std::io::Write;
let response_section = format!("\n=== RESPONSE ===\n\n{}\n", output);
if let Ok(mut f) = fs::OpenOptions::new().append(true).open(&log_path) {
write!(f, "{}", response_section).ok();
}
if debug { print!("{}", response_section); }
log(&format!("response {}KB", output.len() / 1024));
Ok(AgentResult {