logging: single output stream through caller's log closure

Pass the caller's log closure all the way through to api.rs instead
of creating a separate eprintln closure in llm.rs. Everything goes
through one stream — prompt, think blocks, tool calls with args,
tool results with content, token counts, final response.

CLI uses println (stdout), daemon uses its task log. No more split
between stdout and stderr.

Also removes the llm-log file creation from knowledge.rs — that's
the daemon's concern, not the agent runner's.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Kent Overstreet 2026-03-22 01:57:47 -04:00
parent e74d533748
commit 543e1bdc8a
4 changed files with 25 additions and 45 deletions

View file

@ -51,7 +51,7 @@ pub async fn call_api_with_tools(
let max_turns = 50; let max_turns = 50;
for turn in 0..max_turns { for turn in 0..max_turns {
log(&format!("API turn {} ({} messages)", turn, messages.len())); log(&format!("\n=== TURN {} ({} messages) ===\n", turn, messages.len()));
let (msg, usage) = client.chat_completion_stream( let (msg, usage) = client.chat_completion_stream(
&messages, &messages,
@ -101,7 +101,7 @@ pub async fn call_api_with_tools(
// Execute each tool call // Execute each tool call
for call in msg.tool_calls.as_ref().unwrap() { for call in msg.tool_calls.as_ref().unwrap() {
log(&format!("tool: {}({})", log(&format!("\nTOOL CALL: {}({})",
call.function.name, call.function.name,
&call.function.arguments)); &call.function.arguments));
@ -136,7 +136,7 @@ pub async fn call_api_with_tools(
tools::dispatch(&call.function.name, &args, &tracker).await tools::dispatch(&call.function.name, &args, &tracker).await
}; };
log(&format!("tool result: {} chars", output.text.len())); log(&format!("TOOL RESULT ({} chars):\n{}", output.text.len(), output.text));
messages.push(Message::tool_result(&call.id, &output.text)); messages.push(Message::tool_result(&call.id, &output.text));
} }
@ -153,6 +153,7 @@ pub async fn call_api_with_tools(
continue; continue;
} }
log(&format!("\n=== RESPONSE ===\n\n{}", text));
return Ok(text); return Ok(text);
} }

View file

@ -39,7 +39,7 @@ pub fn run_and_apply_with_log(
agent_name: &str, agent_name: &str,
batch_size: usize, batch_size: usize,
llm_tag: &str, llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
) -> Result<(), String> { ) -> Result<(), String> {
run_and_apply_excluded(store, agent_name, batch_size, llm_tag, log, &Default::default()) run_and_apply_excluded(store, agent_name, batch_size, llm_tag, log, &Default::default())
} }
@ -51,7 +51,7 @@ pub fn run_and_apply_excluded(
agent_name: &str, agent_name: &str,
batch_size: usize, batch_size: usize,
llm_tag: &str, llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
exclude: &std::collections::HashSet<String>, exclude: &std::collections::HashSet<String>,
) -> Result<(), String> { ) -> Result<(), String> {
let result = run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, false, exclude)?; let result = run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, false, exclude)?;
@ -71,7 +71,7 @@ pub fn run_one_agent_with_keys(
keys: &[String], keys: &[String],
count: usize, count: usize,
llm_tag: &str, llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
debug: bool, debug: bool,
) -> Result<AgentResult, String> { ) -> Result<AgentResult, String> {
let def = super::defs::get_def(agent_name) let def = super::defs::get_def(agent_name)
@ -99,7 +99,7 @@ pub fn run_one_agent(
agent_name: &str, agent_name: &str,
batch_size: usize, batch_size: usize,
llm_tag: &str, llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
debug: bool, debug: bool,
) -> Result<AgentResult, String> { ) -> Result<AgentResult, String> {
run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, debug, &Default::default()) run_one_agent_excluded(store, agent_name, batch_size, llm_tag, log, debug, &Default::default())
@ -111,7 +111,7 @@ pub fn run_one_agent_excluded(
agent_name: &str, agent_name: &str,
batch_size: usize, batch_size: usize,
llm_tag: &str, llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
debug: bool, debug: bool,
exclude: &std::collections::HashSet<String>, exclude: &std::collections::HashSet<String>,
) -> Result<AgentResult, String> { ) -> Result<AgentResult, String> {
@ -131,7 +131,7 @@ fn run_one_agent_inner(
def: &super::defs::AgentDef, def: &super::defs::AgentDef,
agent_batch: super::prompts::AgentBatch, agent_batch: super::prompts::AgentBatch,
_llm_tag: &str, _llm_tag: &str,
log: &dyn Fn(&str), log: &(dyn Fn(&str) + Sync),
debug: bool, debug: bool,
) -> Result<AgentResult, String> { ) -> Result<AgentResult, String> {
let prompt_kb = agent_batch.prompt.len() / 1024; let prompt_kb = agent_batch.prompt.len() / 1024;
@ -163,25 +163,10 @@ fn run_one_agent_inner(
log(&format!(" node: {}", key)); log(&format!(" node: {}", key));
} }
// Single log file: prompt then response log(&format!("=== PROMPT ===\n\n{}\n\n=== CALLING LLM ===", agent_batch.prompt));
let log_dir = store::memory_dir().join("llm-logs").join(agent_name);
fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", store::compact_timestamp()));
let prompt_section = format!("=== PROMPT ===\n\n{}\n\n=== CALLING LLM ===\n", agent_batch.prompt);
fs::write(&log_path, &prompt_section).ok();
if debug { print!("{}", prompt_section); }
log(&format!("log: {}", log_path.display()));
log("calling LLM"); let output = llm::call_for_def(def, &agent_batch.prompt, log)?;
let output = llm::call_for_def(def, &agent_batch.prompt)?;
// Append response to same log file
use std::io::Write;
let response_section = format!("\n=== RESPONSE ===\n\n{}\n", output);
if let Ok(mut f) = fs::OpenOptions::new().append(true).open(&log_path) {
write!(f, "{}", response_section).ok();
}
if debug { print!("{}", response_section); }
log(&format!("response {}KB", output.len() / 1024)); log(&format!("response {}KB", output.len() / 1024));
Ok(AgentResult { Ok(AgentResult {

View file

@ -186,18 +186,12 @@ pub(crate) fn call_haiku(agent: &str, prompt: &str) -> Result<String, String> {
/// Call a model using an agent definition's model and tool configuration. /// Call a model using an agent definition's model and tool configuration.
/// Uses the direct API backend when api_base_url is configured, /// Uses the direct API backend when api_base_url is configured,
/// otherwise falls back to claude CLI subprocess. /// otherwise falls back to claude CLI subprocess.
pub(crate) fn call_for_def(def: &super::defs::AgentDef, prompt: &str) -> Result<String, String> { pub(crate) fn call_for_def(
let config = crate::config::get(); def: &super::defs::AgentDef,
if config.api_base_url.is_some() { prompt: &str,
super::daemon::log_verbose(&def.agent, "llm-backend", log: &(dyn Fn(&str) + Sync),
&format!("API: {}", config.api_base_url.as_deref().unwrap_or("?"))); ) -> Result<String, String> {
let log = |msg: &str| eprintln!("[{}] {}", def.agent, msg); super::api::call_api_with_tools_sync(&def.agent, prompt, log)
super::api::call_api_with_tools_sync(&def.agent, prompt, &log)
} else {
super::daemon::log_verbose(&def.agent, "llm-backend",
&format!("claude -p (model={}, tools={})", def.model, def.tools.len()));
call_model_with_tools(&def.agent, &def.model, prompt, &def.tools)
}
} }
/// Parse a JSON response, handling markdown fences. /// Parse a JSON response, handling markdown fences.

View file

@ -17,12 +17,12 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
if crate::agents::daemon::send_rpc_pub("ping").is_some() { if crate::agents::daemon::send_rpc_pub("ping").is_some() {
return crate::agents::daemon::rpc_run_agent(agent, count); return crate::agents::daemon::rpc_run_agent(agent, count);
} }
eprintln!("Daemon not running — falling back to local execution"); println!("Daemon not running — falling back to local execution");
} }
// Slow path: need the store for local execution or target resolution // Slow path: need the store for local execution or target resolution
let mut store = store::Store::load()?; let mut store = store::Store::load()?;
let log = |msg: &str| eprintln!("[{}] {}", agent, msg); let log = |msg: &str| println!("{}", msg);
// Resolve targets: explicit --target, --query, or agent's default query // Resolve targets: explicit --target, --query, or agent's default query
let resolved_targets: Vec<String> = if !target.is_empty() { let resolved_targets: Vec<String> = if !target.is_empty() {
@ -35,7 +35,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
return Err(format!("query returned no results: {}", q)); return Err(format!("query returned no results: {}", q));
} }
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect(); let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
eprintln!("[{}] query matched {} nodes", agent, keys.len()); println!("[{}] query matched {} nodes", agent, keys.len());
keys keys
} else { } else {
vec![] // use agent's built-in query vec![] // use agent's built-in query
@ -45,15 +45,15 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
// --local or daemon unavailable: run directly // --local or daemon unavailable: run directly
if needs_local || crate::agents::daemon::send_rpc_pub("ping").is_none() { if needs_local || crate::agents::daemon::send_rpc_pub("ping").is_none() {
if !needs_local { if !needs_local {
eprintln!("Daemon not running — falling back to local execution"); println!("Daemon not running — falling back to local execution");
} }
for (i, key) in resolved_targets.iter().enumerate() { for (i, key) in resolved_targets.iter().enumerate() {
eprintln!("[{}] [{}/{}] {}", agent, i + 1, resolved_targets.len(), key); println!("[{}] [{}/{}] {}", agent, i + 1, resolved_targets.len(), key);
if i > 0 { store = store::Store::load()?; } if i > 0 { store = store::Store::load()?; }
if let Err(e) = crate::agents::knowledge::run_one_agent_with_keys( if let Err(e) = crate::agents::knowledge::run_one_agent_with_keys(
&mut store, agent, &[key.clone()], count, "test", &log, debug, &mut store, agent, &[key.clone()], count, "test", &log, debug,
) { ) {
eprintln!("[{}] ERROR on {}: {}", agent, key, e); println!("[{}] ERROR on {}: {}", agent, key, e);
} }
} }
return Ok(()); return Ok(());
@ -67,7 +67,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
queued += 1; queued += 1;
} }
} }
eprintln!("[{}] queued {} tasks to daemon", agent, queued); println!("[{}] queued {} tasks to daemon", agent, queued);
} else { } else {
// Local execution (--local, --debug, dry-run, or daemon unavailable) // Local execution (--local, --debug, dry-run, or daemon unavailable)
crate::agents::knowledge::run_one_agent( crate::agents::knowledge::run_one_agent(