diff --git a/src/agent/api/mod.rs b/src/agent/api/mod.rs
index fdc3136..33c8526 100644
--- a/src/agent/api/mod.rs
+++ b/src/agent/api/mod.rs
@@ -130,7 +130,7 @@ impl ApiClient {
&reasoning_effort, sampling, priority,
).await;
if let Err(e) = result {
- let _ = tx.send(StreamEvent::Error(e.to_string()));
+ let _ = tx.send(StreamEvent::Error(e.to_string();
}
});
@@ -218,10 +218,10 @@ pub(crate) async fn send_and_check(
let payload_size = serde_json::to_string(body)
.map(|s| s.len())
.unwrap_or(0);
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"request: {}K payload, {}",
payload_size / 1024, debug_label,
- )));
+ );
}
let mut req = client
@@ -262,24 +262,24 @@ pub(crate) async fn send_and_check(
"x-request-id",
] {
if let Some(val) = headers.get(name) {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"header {}: {}",
name,
val.to_str().unwrap_or("?")
- )));
+ );
}
}
}
if !status.is_success() {
let body = response.text().await.unwrap_or_default();
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"HTTP {} after {:.1}s ({}): {}",
status,
elapsed.as_secs_f64(),
url,
&body[..body.len().min(500)]
- )));
+ );
if let Some(json) = request_json {
let log_dir = dirs::home_dir()
.unwrap_or_default()
@@ -288,20 +288,20 @@ pub(crate) async fn send_and_check(
let ts = chrono::Local::now().format("%Y%m%dT%H%M%S");
let path = log_dir.join(format!("{}.json", ts));
if std::fs::write(&path, json).is_ok() {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"saved failed request to {} (HTTP {})", path.display(), status
- )));
+ );
}
}
anyhow::bail!("HTTP {} ({}): {}", status, url, &body[..body.len().min(1000)]);
}
if debug {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"connected in {:.1}s (HTTP {})",
elapsed.as_secs_f64(),
status.as_u16()
- )));
+ );
}
Ok(response)
@@ -353,9 +353,9 @@ impl SseReader {
let ts = chrono::Local::now().format("%Y%m%dT%H%M%S");
let path = log_dir.join(format!("{}.json", ts));
if std::fs::write(&path, json).is_ok() {
- let _ = self.ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"saved failed request to {} ({})", path.display(), reason
- )));
+ );
}
}
@@ -396,10 +396,10 @@ impl SseReader {
} else {
json_str.to_string()
};
- let _ = self.ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"SSE parse error (#{}) {}: {}",
self.sse_parse_errors, e, preview
- )));
+ );
}
continue;
}
@@ -431,7 +431,7 @@ impl SseReader {
self.sse_lines_parsed,
e, buf_preview,
);
- let _ = self.ui_tx.send(UiMessage::Debug(msg.clone()));
+ dbglog!("{}", msg);
self.save_failed_request(&msg);
return Err(e.into());
}
@@ -450,7 +450,7 @@ impl SseReader {
self.stream_start.elapsed().as_secs_f64(),
buf_preview,
);
- let _ = self.ui_tx.send(UiMessage::Debug(msg.clone()));
+ dbglog!("{}", msg);
self.save_failed_request(&msg);
anyhow::bail!(
"stream timeout: no data for {}s ({} chunks received)",
@@ -535,43 +535,43 @@ pub(crate) fn log_diagnostics(
let debug = std::env::var("POC_DEBUG").is_ok();
if reasoning_chars > 0 && reasoning_effort == "none" {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"note: {} chars leaked reasoning (suppressed from display)",
reasoning_chars
- )));
+ );
}
if content_len == 0 && tool_count == 0 {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"WARNING: empty response (finish: {:?}, chunks: {}, reasoning: {}, \
parse_errors: {}, empty_deltas: {}, {:.1}s)",
finish_reason, chunks_received, reasoning_chars,
sse_parse_errors, empty_deltas, total_elapsed.as_secs_f64()
- )));
+ );
}
if finish_reason.is_none() && chunks_received > 0 {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"WARNING: stream ended without finish_reason ({} chunks, {} content chars)",
chunks_received, content_len
- )));
+ );
}
if sse_parse_errors > 0 {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"WARNING: {} SSE parse errors out of {} lines",
sse_parse_errors, sse_lines_parsed
- )));
+ );
}
if debug {
if let Some(u) = usage {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"tokens: {} prompt + {} completion = {} total",
u.prompt_tokens, u.completion_tokens, u.total_tokens
- )));
+ );
}
let ttft = first_content_at
.map(|d| format!("{:.1}s", d.as_secs_f64()))
.unwrap_or_else(|| "none".to_string());
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"stream: {:.1}s total, TTFT={}, {} chunks, {} SSE lines, \
{} content chars, {} reasoning chars, {} tools, \
finish={:?}",
@@ -583,13 +583,13 @@ pub(crate) fn log_diagnostics(
reasoning_chars,
tool_count,
finish_reason,
- )));
+ );
if !tools.is_empty() {
for (i, tc) in tools.iter().enumerate() {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
" tool[{}]: {} (id: {}, {} arg chars)",
i, tc.function.name, tc.id, tc.function.arguments.len()
- )));
+ );
}
}
}
@@ -699,7 +699,8 @@ pub async fn collect_stream(
}
}
StreamEvent::Reasoning(text) => {
- let _ = ui_tx.send(UiMessage::Reasoning(text));
+ // TODO: reasoning tokens → context entries
+ let _ = text;
}
StreamEvent::ToolCallDelta { index, id, call_type, name, arguments } => {
while tool_calls.len() <= index {
diff --git a/src/agent/api/openai.rs b/src/agent/api/openai.rs
index 0956e4d..df59d36 100644
--- a/src/agent/api/openai.rs
+++ b/src/agent/api/openai.rs
@@ -86,9 +86,9 @@ pub(super) async fn stream_events(
while let Some(event) = reader.next_event(&mut response).await? {
if let Some(err_msg) = event["error"]["message"].as_str() {
let raw = event["error"]["metadata"]["raw"].as_str().unwrap_or("");
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"API error in stream: {}", err_msg
- )));
+ );
anyhow::bail!("API error in stream: {} {}", err_msg, raw);
}
@@ -96,16 +96,16 @@ pub(super) async fn stream_events(
Ok(c) => c,
Err(e) => {
let preview = event.to_string();
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"unparseable SSE event ({}): {}",
e, &preview[..preview.len().min(300)]
- )));
+ );
continue;
}
};
if let Some(ref u) = chunk.usage {
- let _ = tx.send(StreamEvent::Usage(u.clone()));
+ let _ = tx.send(StreamEvent::Usage(u.clone();
usage = chunk.usage;
}
@@ -126,7 +126,7 @@ pub(super) async fn stream_events(
reasoning_chars += r.len();
has_reasoning = true;
if !r.is_empty() {
- let _ = tx.send(StreamEvent::Reasoning(r.clone()));
+ let _ = tx.send(StreamEvent::Reasoning(r.clone();
}
}
if let Some(ref r) = choice.delta.reasoning_details {
@@ -143,7 +143,7 @@ pub(super) async fn stream_events(
first_content_at = Some(reader.stream_start.elapsed());
}
content_len += text_delta.len();
- let _ = tx.send(StreamEvent::Content(text_delta.clone()));
+ let _ = tx.send(StreamEvent::Content(text_delta.clone();
}
if let Some(ref tc_deltas) = choice.delta.tool_calls {
diff --git a/src/agent/mod.rs b/src/agent/mod.rs
index ed1bc84..e56652c 100644
--- a/src/agent/mod.rs
+++ b/src/agent/mod.rs
@@ -182,7 +182,7 @@ impl Agent {
if !jnl.is_empty() {
msgs.push(Message::user(jnl));
}
- msgs.extend(self.context.entries.iter().map(|e| e.api_message().clone()));
+ msgs.extend(self.context.entries.iter().map(|e| e.api_message().clone();
msgs
}
@@ -263,7 +263,7 @@ impl Agent {
me.push_message(Message::user(format!(
"\n--- subconscious reflection ---\n{}\n",
reflection.trim(),
- )));
+ );
}
// Collect completed background tool handles — remove from active list
@@ -350,7 +350,7 @@ impl Agent {
let _ = ui_tx.send(UiMessage::Info(format!(
"[context overflow — compacting and retrying ({}/2)]",
overflow_retries,
- )));
+ );
me.compact();
continue;
}
@@ -359,7 +359,7 @@ impl Agent {
let _ = ui_tx.send(UiMessage::Info(format!(
"[stream error: {} — retrying ({}/2)]",
e, empty_retries,
- )));
+ );
drop(me);
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
continue;
@@ -395,10 +395,10 @@ impl Agent {
if !has_content && !has_tools {
if empty_retries < 2 {
empty_retries += 1;
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"empty response, injecting nudge and retrying ({}/2)",
empty_retries,
- )));
+ );
me.push_message(Message::user(
"[system] Your previous response was empty. \
Please respond with text or use a tool."
diff --git a/src/subconscious/api.rs b/src/subconscious/api.rs
index ae9dcc5..98c510f 100644
--- a/src/subconscious/api.rs
+++ b/src/subconscious/api.rs
@@ -118,19 +118,6 @@ pub async fn call_api_with_tools(
u.prompt_tokens, u.completion_tokens));
}
- // Drain reasoning tokens from the UI channel into the log
- {
- let mut reasoning_buf = String::new();
- while let Ok(ui_msg) = ui_rx.try_recv() {
- if let crate::user::ui_channel::UiMessage::Reasoning(r) = ui_msg {
- reasoning_buf.push_str(&r);
- }
- }
- if !reasoning_buf.is_empty() {
- log(&format!("\n{}\n", reasoning_buf.trim()));
- }
- }
-
let has_content = msg.content.is_some();
let has_tools = msg.tool_calls.as_ref().is_some_and(|tc| !tc.is_empty());
diff --git a/src/subconscious/learn.rs b/src/subconscious/learn.rs
index c34c1cb..a5a4c3c 100644
--- a/src/subconscious/learn.rs
+++ b/src/subconscious/learn.rs
@@ -204,36 +204,31 @@ pub async fn score_memories(
});
}
- let _ = ui_tx.send(UiMessage::Info(format!(
- "[scoring {} memories × {} responses]", memory_keys.len(), response_indices.len(),
- )));
let http = http_client();
let range = 0..context.entries.len();
- let _ = ui_tx.send(UiMessage::Activity("scoring baseline...".into()));
let baseline = call_score(&http, client, &build_messages(context, range.clone(), Filter::None)).await?;
let total = memory_keys.len();
let mut matrix: Vec> = Vec::new();
for (mem_idx, key) in memory_keys.iter().enumerate() {
- let _ = ui_tx.send(UiMessage::Activity(format!(
+ dbglog!(
"scoring {}/{}: {}...", mem_idx + 1, total, key,
- )));
+ );
let msgs = build_messages(context, range.clone(), Filter::SkipKey(key));
match call_score(&http, client, &msgs).await {
Ok(without) => matrix.push(divergence(&baseline, &without)),
Err(e) => {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"[training] {} FAILED: {:#}", key, e,
- )));
+ );
matrix.push(vec![0.0; baseline.len()]);
}
}
}
- let _ = ui_tx.send(UiMessage::Activity(String::new()));
let memory_weights: Vec<(String, f64)> = memory_keys.iter()
.zip(matrix.iter())
@@ -295,9 +290,7 @@ pub async fn score_memory(
}
let http = http_client();
- let _ = ui_tx.send(UiMessage::Activity(format!("scoring memory: {}...", key)));
let (divs, _) = score_divergence(&http, client, context, range, Filter::SkipKey(key)).await?;
- let _ = ui_tx.send(UiMessage::Activity(String::new()));
Ok(divs.iter().sum())
}
@@ -365,16 +358,16 @@ pub async fn score_memories_incremental(
Ok((divs, _)) => {
let n_responses = divs.len();
let max_div = divs.iter().cloned().fold(0.0f64, f64::max);
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"[scoring] {} max:{:.3} ({} responses)", key, max_div, n_responses,
- )));
+ );
// TODO: update graph weight once normalization is figured out
results.push((key.clone(), max_div));
}
Err(e) => {
- let _ = ui_tx.send(UiMessage::Debug(format!(
+ dbglog!(
"[scoring] {} FAILED: {:#}", key, e,
- )));
+ );
}
}
}
@@ -408,9 +401,7 @@ pub async fn score_finetune(
}
let http = http_client();
- let _ = ui_tx.send(UiMessage::Activity("scoring for fine-tuning...".into()));
let (divs, _) = score_divergence(&http, client, context, range, Filter::SkipAllMemories).await?;
- let _ = ui_tx.send(UiMessage::Activity(String::new()));
let mut results: Vec<(usize, f64)> = response_positions.iter()
.enumerate()
diff --git a/src/user/chat.rs b/src/user/chat.rs
index 7ebd32f..d6eccdc 100644
--- a/src/user/chat.rs
+++ b/src/user/chat.rs
@@ -602,13 +602,6 @@ impl InteractScreen {
self.autonomous.append_text(text);
}
},
- UiMessage::Reasoning(text) => {
- self.autonomous.current_color = Color::DarkGray;
- self.autonomous.append_text(text);
- }
- UiMessage::Debug(text) => {
- self.tools.push_line(format!("[debug] {}", text), Color::DarkGray);
- }
UiMessage::Info(text) => {
self.conversation.push_line(text.clone(), Color::Cyan);
}
diff --git a/src/user/ui_channel.rs b/src/user/ui_channel.rs
index f9792e9..eccf607 100644
--- a/src/user/ui_channel.rs
+++ b/src/user/ui_channel.rs
@@ -74,12 +74,6 @@ pub enum UiMessage {
/// Streaming text delta — routed to conversation or autonomous pane.
TextDelta(String, StreamTarget),
- /// Reasoning/thinking tokens from the model (internal monologue).
- Reasoning(String),
-
- /// Debug message (only shown when POC_DEBUG is set).
- Debug(String),
-
/// Informational message — goes to conversation pane (command output, etc).
Info(String),