Kill StatusUpdate, Activity, DmnAnnotation, ContextInfoUpdate, AgentUpdate

Status bar reads directly from Agent and MindState on each render tick.
Activity is now a field on Agent — set by agent code directly, read by
UI via try_lock. DmnAnnotation, ContextInfoUpdate, AgentUpdate were
already dead (no senders).

UiMessage down to 4 variants: TextDelta, Reasoning, Debug, Info.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-05 21:34:27 -04:00
parent 1745e03550
commit eafc2887a3
7 changed files with 32 additions and 76 deletions

View file

@ -638,7 +638,7 @@ pub async fn collect_stream(
match event { match event {
StreamEvent::Content(text) => { StreamEvent::Content(text) => {
if first_content { if first_content {
let _ = ui_tx.send(UiMessage::Activity("streaming...".into())); if let Ok(mut ag) = agent.try_lock() { ag.activity = "streaming...".into(); }
first_content = false; first_content = false;
} }
content.push_str(&text); content.push_str(&text);

View file

@ -28,7 +28,7 @@ use context::{ConversationEntry, ContextState, ContextBudget};
use tools::{summarize_args, working_stack}; use tools::{summarize_args, working_stack};
use crate::mind::log::ConversationLog; use crate::mind::log::ConversationLog;
use crate::user::ui_channel::{ContextSection, SharedContextState, StreamTarget, StatusInfo, UiMessage, UiSender}; use crate::user::ui_channel::{ContextSection, SharedContextState, StreamTarget, UiMessage, UiSender};
use crate::subconscious::learn; use crate::subconscious::learn;
/// Result of a single agent turn. /// Result of a single agent turn.
@ -77,6 +77,8 @@ pub struct Agent {
pub temperature: f32, pub temperature: f32,
pub top_p: f32, pub top_p: f32,
pub top_k: u32, pub top_k: u32,
/// Live activity indicator — read by UI on render tick.
pub activity: String,
/// Control tool flags — set by tool handlers, consumed by turn loop. /// Control tool flags — set by tool handlers, consumed by turn loop.
pub pending_yield: bool, pub pending_yield: bool,
pub pending_model_switch: Option<String>, pub pending_model_switch: Option<String>,
@ -145,6 +147,7 @@ impl Agent {
temperature: 0.6, temperature: 0.6,
top_p: 0.95, top_p: 0.95,
top_k: 20, top_k: 20,
activity: String::new(),
pending_yield: false, pending_yield: false,
pending_model_switch: None, pending_model_switch: None,
pending_dmn_pause: false, pending_dmn_pause: false,
@ -294,7 +297,6 @@ impl Agent {
me.apply_tool_result(&call, output, ui_tx, &mut bg_ds); me.apply_tool_result(&call, output, ui_tx, &mut bg_ds);
} }
me.push_message(Message::user(user_input)); me.push_message(Message::user(user_input));
let _ = ui_tx.send(UiMessage::AgentUpdate(me.agent_cycles.snapshots()));
} }
tools tools
@ -305,11 +307,10 @@ impl Agent {
let mut ds = DispatchState::new(); let mut ds = DispatchState::new();
loop { loop {
let _ = ui_tx.send(UiMessage::Activity("thinking...".into()));
// --- Lock 2: assemble messages, start stream --- // --- Lock 2: assemble messages, start stream ---
let (mut rx, _stream_guard) = { let (mut rx, _stream_guard) = {
let me = agent.lock().await; let mut me = agent.lock().await;
me.activity = "thinking...".into();
let api_messages = me.assemble_api_messages(); let api_messages = me.assemble_api_messages();
let sampling = api::SamplingParams { let sampling = api::SamplingParams {
temperature: me.temperature, temperature: me.temperature,
@ -363,13 +364,13 @@ impl Agent {
tokio::time::sleep(std::time::Duration::from_secs(2)).await; tokio::time::sleep(std::time::Duration::from_secs(2)).await;
continue; continue;
} }
let _ = ui_tx.send(UiMessage::Activity(String::new())); me.activity.clear();
return Err(err); return Err(err);
} }
if finish_reason.as_deref() == Some("error") { if finish_reason.as_deref() == Some("error") {
let detail = if content.is_empty() { "no details".into() } else { content }; let detail = if content.is_empty() { "no details".into() } else { content };
let _ = ui_tx.send(UiMessage::Activity(String::new())); me.activity.clear();
return Err(anyhow::anyhow!("model stream error: {}", detail)); return Err(anyhow::anyhow!("model stream error: {}", detail));
} }
@ -386,16 +387,6 @@ impl Agent {
if let Some(usage) = &usage { if let Some(usage) = &usage {
me.last_prompt_tokens = usage.prompt_tokens; me.last_prompt_tokens = usage.prompt_tokens;
me.publish_context_state(); me.publish_context_state();
let _ = ui_tx.send(UiMessage::StatusUpdate(StatusInfo {
dmn_state: String::new(),
dmn_turns: 0,
dmn_max_turns: 0,
prompt_tokens: usage.prompt_tokens,
completion_tokens: usage.completion_tokens,
model: me.client.model.clone(),
turn_tools: 0,
context_budget: me.budget().status_string(),
}));
} }
// Empty response — nudge and retry // Empty response — nudge and retry
@ -468,8 +459,8 @@ impl Agent {
// Genuinely text-only response // Genuinely text-only response
let text = msg.content_text().to_string(); let text = msg.content_text().to_string();
let _ = ui_tx.send(UiMessage::Activity(String::new()));
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.activity.clear();
me.push_message(msg); me.push_message(msg);
// Drain pending control flags // Drain pending control flags
@ -501,15 +492,15 @@ impl Agent {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
let err = format!("Error: malformed tool call arguments: {e}"); let err = format!("Error: malformed tool call arguments: {e}");
let _ = ui_tx.send(UiMessage::Activity(format!("rejected: {} (bad args)", call.function.name)));
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.activity = format!("rejected: {} (bad args)", call.function.name);
me.apply_tool_result(call, err, ui_tx, ds); me.apply_tool_result(call, err, ui_tx, ds);
return; return;
} }
}; };
let args_summary = summarize_args(&call.function.name, &args); let args_summary = summarize_args(&call.function.name, &args);
let _ = ui_tx.send(UiMessage::Activity(format!("calling: {}", call.function.name))); agent.lock().await.activity = format!("calling: {}", call.function.name);
// Spawn tool, track it // Spawn tool, track it
let call_clone = call.clone(); let call_clone = call.clone();

View file

@ -328,7 +328,7 @@ impl Mind {
(ag.context.clone(), ag.client_clone()) (ag.context.clone(), ag.client_clone())
}; };
let result = learn::score_memories_incremental( let result = learn::score_memories_incremental(
&context, max_age as i64, response_window, &client, &ui_tx, &context, max_age as i64, response_window, &client, &ui_tx, &agent,
).await; ).await;
{ {
let mut ag = agent.lock().await; let mut ag = agent.lock().await;

View file

@ -317,6 +317,7 @@ pub async fn score_memories_incremental(
response_window: usize, response_window: usize,
client: &ApiClient, client: &ApiClient,
ui_tx: &UiSender, ui_tx: &UiSender,
agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
) -> anyhow::Result<Vec<(String, f64)>> { ) -> anyhow::Result<Vec<(String, f64)>> {
let now = chrono::Utc::now().timestamp(); let now = chrono::Utc::now().timestamp();
@ -359,7 +360,7 @@ pub async fn score_memories_incremental(
continue; continue;
} }
let _ = ui_tx.send(UiMessage::Activity(format!("scoring memory: {}...", key))); if let Ok(mut ag) = agent.try_lock() { ag.activity = format!("scoring memory: {}...", key); }
match score_divergence(&http, client, context, range, Filter::SkipKey(key)).await { match score_divergence(&http, client, context, range, Filter::SkipKey(key)).await {
Ok((divs, _)) => { Ok((divs, _)) => {
let n_responses = divs.len(); let n_responses = divs.len();
@ -378,7 +379,7 @@ pub async fn score_memories_incremental(
} }
} }
let _ = ui_tx.send(UiMessage::Activity(String::new())); if let Ok(mut ag) = agent.try_lock() { ag.activity.clear(); }
Ok(results) Ok(results)
} }

View file

@ -602,31 +602,6 @@ impl InteractScreen {
self.autonomous.append_text(text); self.autonomous.append_text(text);
} }
}, },
UiMessage::DmnAnnotation(text) => {
self.autonomous.push_line(text.clone(), Color::Yellow);
self.turn_started = Some(std::time::Instant::now());
self.needs_assistant_marker = true;
app.status.turn_tools = 0;
}
UiMessage::StatusUpdate(info) => {
if !info.dmn_state.is_empty() {
app.status.dmn_state = info.dmn_state.clone();
app.status.dmn_turns = info.dmn_turns;
app.status.dmn_max_turns = info.dmn_max_turns;
}
if info.prompt_tokens > 0 { app.status.prompt_tokens = info.prompt_tokens; }
if !info.model.is_empty() { app.status.model = info.model.clone(); }
if !info.context_budget.is_empty() { app.status.context_budget = info.context_budget.clone(); }
}
UiMessage::Activity(text) => {
if text.is_empty() {
self.call_started = None;
} else if app.activity.is_empty() || self.call_started.is_none() {
self.call_started = Some(std::time::Instant::now());
self.call_timeout_secs = crate::config::get().api_stream_timeout_secs;
}
app.activity = text.clone();
}
UiMessage::Reasoning(text) => { UiMessage::Reasoning(text) => {
self.autonomous.current_color = Color::DarkGray; self.autonomous.current_color = Color::DarkGray;
self.autonomous.append_text(text); self.autonomous.append_text(text);
@ -637,8 +612,6 @@ impl InteractScreen {
UiMessage::Info(text) => { UiMessage::Info(text) => {
self.conversation.push_line(text.clone(), Color::Cyan); self.conversation.push_line(text.clone(), Color::Cyan);
} }
UiMessage::ContextInfoUpdate(info) => { app.context_info = Some(info.clone()); }
UiMessage::AgentUpdate(agents) => { app.agent_state = agents.clone(); }
_ => {} _ => {}
} }
} }
@ -919,6 +892,22 @@ impl ScreenView for InteractScreen {
// Sync state from agent // Sync state from agent
self.sync_from_agent(); self.sync_from_agent();
// Read status from agent + mind state
if let Ok(agent) = self.agent.try_lock() {
app.status.prompt_tokens = agent.last_prompt_tokens();
app.status.model = agent.model().to_string();
app.status.context_budget = agent.budget().status_string();
if !agent.activity.is_empty() {
app.activity = agent.activity.clone();
}
}
{
let mind = self.shared_mind.lock().unwrap();
app.status.dmn_state = mind.dmn.label().to_string();
app.status.dmn_turns = mind.dmn_turns;
app.status.dmn_max_turns = mind.max_dmn_turns;
}
// Draw // Draw
self.draw_main(frame, area, app); self.draw_main(frame, area, app);
None None

View file

@ -315,14 +315,6 @@ fn diff_mind_state(
dirty: &mut bool, dirty: &mut bool,
) { ) {
if cur.dmn.label() != prev.dmn.label() || cur.dmn_turns != prev.dmn_turns { if cur.dmn.label() != prev.dmn.label() || cur.dmn_turns != prev.dmn_turns {
let _ = ui_tx.send(UiMessage::StatusUpdate(ui_channel::StatusInfo {
dmn_state: cur.dmn.label().to_string(),
dmn_turns: cur.dmn_turns,
dmn_max_turns: cur.max_dmn_turns,
prompt_tokens: 0, completion_tokens: 0,
model: String::new(), turn_tools: 0,
context_budget: String::new(),
}));
*dirty = true; *dirty = true;
} }
// Input consumed — Mind started a turn with it // Input consumed — Mind started a turn with it

View file

@ -74,15 +74,6 @@ pub enum UiMessage {
/// Streaming text delta — routed to conversation or autonomous pane. /// Streaming text delta — routed to conversation or autonomous pane.
TextDelta(String, StreamTarget), TextDelta(String, StreamTarget),
/// DMN state annotation: [dmn: foraging (3/20)].
DmnAnnotation(String),
/// Status bar update.
StatusUpdate(StatusInfo),
/// Live activity indicator for the status bar.
Activity(String),
/// Reasoning/thinking tokens from the model (internal monologue). /// Reasoning/thinking tokens from the model (internal monologue).
Reasoning(String), Reasoning(String),
@ -92,11 +83,6 @@ pub enum UiMessage {
/// Informational message — goes to conversation pane (command output, etc). /// Informational message — goes to conversation pane (command output, etc).
Info(String), Info(String),
/// Context loading details — stored for the debug screen.
ContextInfoUpdate(ContextInfo),
/// Agent cycle state update — refreshes the F2 agents screen.
AgentUpdate(Vec<crate::subconscious::subconscious::AgentSnapshot>),
} }
/// Sender that fans out to both the TUI (mpsc) and observers (broadcast). /// Sender that fans out to both the TUI (mpsc) and observers (broadcast).
@ -128,6 +114,3 @@ pub fn channel() -> (UiSender, UiReceiver) {
let (observe_tx, _) = broadcast::channel(1024); let (observe_tx, _) = broadcast::channel(1024);
(UiSender { tui: tui_tx, observe: observe_tx }, tui_rx) (UiSender { tui: tui_tx, observe: observe_tx }, tui_rx)
} }
/// Replay a restored session into the TUI panes so the user can see
/// conversation history immediately on restart. Shows user input,