Kill TextDelta, Info — UiMessage is dead. RAII ActivityGuards replace all status feedback

Streaming text now goes directly to agent entries via append_streaming().
sync_from_agent diffs the growing entry each tick. The streaming entry
is popped when the response completes; build_response_message pushes
the final version.

All status feedback uses RAII ActivityGuards:
- push_activity() for long-running work (thinking, streaming, scoring)
- notify() for instant feedback (compacted, DMN state changes, commands)
- Guards auto-remove on Drop, appending "(complete)" and lingering 5s
- expire_activities() cleans up timed-out notifications on render tick

UiMessage enum reduced to a single Info variant with zero sends.
The channel infrastructure remains for now (Mind/Agent still take
UiSender in signatures) — mechanical cleanup for a follow-up.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-05 22:18:07 -04:00
parent e7914e3d58
commit cfddb55ed9
9 changed files with 201 additions and 186 deletions

View file

@ -20,7 +20,7 @@ use tokio::sync::mpsc;
use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall}; use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall};
pub use types::ToolCall; pub use types::ToolCall;
use crate::user::ui_channel::{UiMessage, UiSender, StreamTarget}; use crate::user::ui_channel::UiSender;
/// A JoinHandle that aborts its task when dropped. /// A JoinHandle that aborts its task when dropped.
pub struct AbortOnDrop(tokio::task::JoinHandle<()>); pub struct AbortOnDrop(tokio::task::JoinHandle<()>);
@ -130,7 +130,7 @@ impl ApiClient {
&reasoning_effort, sampling, priority, &reasoning_effort, sampling, priority,
).await; ).await;
if let Err(e) = result { if let Err(e) = result {
let _ = tx.send(StreamEvent::Error(e.to_string(); let _ = tx.send(StreamEvent::Error(e.to_string()));
} }
}); });
@ -207,7 +207,6 @@ pub(crate) async fn send_and_check(
body: &impl serde::Serialize, body: &impl serde::Serialize,
auth_header: (&str, &str), auth_header: (&str, &str),
extra_headers: &[(&str, &str)], extra_headers: &[(&str, &str)],
ui_tx: &UiSender,
debug_label: &str, debug_label: &str,
request_json: Option<&str>, request_json: Option<&str>,
) -> Result<reqwest::Response> { ) -> Result<reqwest::Response> {
@ -619,8 +618,6 @@ pub struct StreamResult {
/// - UI forwarding (text deltas, reasoning, tool call notifications) /// - UI forwarding (text deltas, reasoning, tool call notifications)
pub async fn collect_stream( pub async fn collect_stream(
rx: &mut mpsc::UnboundedReceiver<StreamEvent>, rx: &mut mpsc::UnboundedReceiver<StreamEvent>,
ui_tx: &UiSender,
target: StreamTarget,
agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>, agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>,
active_tools: &crate::user::ui_channel::SharedActiveTools, active_tools: &crate::user::ui_channel::SharedActiveTools,
) -> StreamResult { ) -> StreamResult {
@ -633,12 +630,13 @@ pub async fn collect_stream(
let mut error = None; let mut error = None;
let mut first_content = true; let mut first_content = true;
let mut display_buf = String::new(); let mut display_buf = String::new();
let mut _streaming_guard: Option<super::ActivityGuard> = None;
while let Some(event) = rx.recv().await { while let Some(event) = rx.recv().await {
match event { match event {
StreamEvent::Content(text) => { StreamEvent::Content(text) => {
if first_content { if first_content {
if let Ok(mut ag) = agent.try_lock() { ag.activity = "streaming...".into(); } _streaming_guard = Some(super::start_activity(agent, "streaming...").await);
first_content = false; first_content = false;
} }
content.push_str(&text); content.push_str(&text);
@ -683,7 +681,7 @@ pub async fn collect_stream(
if let Some(pos) = display_buf.find("<tool_call>") { if let Some(pos) = display_buf.find("<tool_call>") {
let before = &display_buf[..pos]; let before = &display_buf[..pos];
if !before.is_empty() { if !before.is_empty() {
let _ = ui_tx.send(UiMessage::TextDelta(before.to_string(), target)); if let Ok(mut ag) = agent.try_lock() { ag.append_streaming(before); }
} }
display_buf.clear(); display_buf.clear();
in_tool_call = true; in_tool_call = true;
@ -693,7 +691,7 @@ pub async fn collect_stream(
if safe > 0 { if safe > 0 {
let flush = display_buf[..safe].to_string(); let flush = display_buf[..safe].to_string();
display_buf = display_buf[safe..].to_string(); display_buf = display_buf[safe..].to_string();
let _ = ui_tx.send(UiMessage::TextDelta(flush, target)); if let Ok(mut ag) = agent.try_lock() { ag.append_streaming(&flush); }
} }
} }
} }

View file

@ -9,7 +9,7 @@ use reqwest::Client;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use super::types::*; use super::types::*;
use crate::user::ui_channel::{UiMessage, UiSender}; use crate::user::ui_channel::UiSender;
use super::StreamEvent; use super::StreamEvent;
/// Stream SSE events from an OpenAI-compatible endpoint, sending /// Stream SSE events from an OpenAI-compatible endpoint, sending
@ -66,7 +66,6 @@ pub(super) async fn stream_events(
&request, &request,
("Authorization", &format!("Bearer {}", api_key)), ("Authorization", &format!("Bearer {}", api_key)),
&[], &[],
ui_tx,
&debug_label, &debug_label,
request_json.as_deref(), request_json.as_deref(),
) )
@ -105,7 +104,7 @@ pub(super) async fn stream_events(
}; };
if let Some(ref u) = chunk.usage { if let Some(ref u) = chunk.usage {
let _ = tx.send(StreamEvent::Usage(u.clone(); let _ = tx.send(StreamEvent::Usage(u.clone()));
usage = chunk.usage; usage = chunk.usage;
} }
@ -126,7 +125,7 @@ pub(super) async fn stream_events(
reasoning_chars += r.len(); reasoning_chars += r.len();
has_reasoning = true; has_reasoning = true;
if !r.is_empty() { if !r.is_empty() {
let _ = tx.send(StreamEvent::Reasoning(r.clone(); let _ = tx.send(StreamEvent::Reasoning(r.clone()));
} }
} }
if let Some(ref r) = choice.delta.reasoning_details { if let Some(ref r) = choice.delta.reasoning_details {
@ -143,7 +142,7 @@ pub(super) async fn stream_events(
first_content_at = Some(reader.stream_start.elapsed()); first_content_at = Some(reader.stream_start.elapsed());
} }
content_len += text_delta.len(); content_len += text_delta.len();
let _ = tx.send(StreamEvent::Content(text_delta.clone(); let _ = tx.send(StreamEvent::Content(text_delta.clone()));
} }
if let Some(ref tc_deltas) = choice.delta.tool_calls { if let Some(ref tc_deltas) = choice.delta.tool_calls {

View file

@ -202,6 +202,15 @@ impl Message {
self.content.as_ref().map_or("", |c| c.as_text()) self.content.as_ref().map_or("", |c| c.as_text())
} }
/// Append text to existing content (for streaming).
pub fn append_content(&mut self, text: &str) {
match self.content {
Some(MessageContent::Text(ref mut s)) => s.push_str(text),
None => self.content = Some(MessageContent::Text(text.to_string())),
_ => {} // Parts — don't append to multimodal
}
}
pub fn role_str(&self) -> &str { pub fn role_str(&self) -> &str {
match self.role { match self.role {
Role::System => "system", Role::System => "system",

View file

@ -28,9 +28,82 @@ use context::{ConversationEntry, ContextState, ContextBudget};
use tools::{summarize_args, working_stack}; use tools::{summarize_args, working_stack};
use crate::mind::log::ConversationLog; use crate::mind::log::ConversationLog;
use crate::user::ui_channel::{ContextSection, SharedContextState, StreamTarget, UiMessage, UiSender}; use crate::user::ui_channel::{ContextSection, SharedContextState, StreamTarget, UiSender};
use crate::subconscious::learn; use crate::subconscious::learn;
// --- Activity tracking (RAII guards) ---
pub struct ActivityEntry {
pub id: u64,
pub label: String,
pub started: std::time::Instant,
/// Auto-expires this long after creation (or completion).
pub expires_at: std::time::Instant,
}
/// RAII guard — marks the activity "(complete)" on drop, starts expiry timer.
pub struct ActivityGuard {
agent: Arc<tokio::sync::Mutex<Agent>>,
id: u64,
}
const ACTIVITY_LINGER: std::time::Duration = std::time::Duration::from_secs(5);
impl Drop for ActivityGuard {
fn drop(&mut self) {
if let Ok(mut ag) = self.agent.try_lock() {
if let Some(entry) = ag.activities.iter_mut().find(|a| a.id == self.id) {
entry.label.push_str(" (complete)");
entry.expires_at = std::time::Instant::now() + ACTIVITY_LINGER;
}
}
}
}
impl Agent {
/// Register an activity, returns its ID. Caller creates the guard.
pub fn push_activity(&mut self, label: impl Into<String>) -> u64 {
self.expire_activities();
let id = self.next_activity_id;
self.next_activity_id += 1;
self.activities.push(ActivityEntry {
id, label: label.into(),
started: std::time::Instant::now(),
expires_at: std::time::Instant::now() + std::time::Duration::from_secs(3600),
});
id
}
/// Push a notification — auto-expires after 5 seconds.
pub fn notify(&mut self, label: impl Into<String>) {
self.expire_activities();
let id = self.next_activity_id;
self.next_activity_id += 1;
self.activities.push(ActivityEntry {
id, label: label.into(),
started: std::time::Instant::now(),
expires_at: std::time::Instant::now() + ACTIVITY_LINGER,
});
}
/// Remove expired activities.
pub fn expire_activities(&mut self) {
let now = std::time::Instant::now();
self.activities.retain(|a| a.expires_at > now);
}
}
/// Create an activity guard from outside the lock.
pub fn activity_guard(agent: &Arc<tokio::sync::Mutex<Agent>>, id: u64) -> ActivityGuard {
ActivityGuard { agent: agent.clone(), id }
}
/// Convenience: lock, push activity, unlock, return guard.
pub async fn start_activity(agent: &Arc<tokio::sync::Mutex<Agent>>, label: impl Into<String>) -> ActivityGuard {
let id = agent.lock().await.push_activity(label);
ActivityGuard { agent: agent.clone(), id }
}
/// Result of a single agent turn. /// Result of a single agent turn.
pub struct TurnResult { pub struct TurnResult {
/// The text response (already sent through UI channel). /// The text response (already sent through UI channel).
@ -77,8 +150,9 @@ pub struct Agent {
pub temperature: f32, pub temperature: f32,
pub top_p: f32, pub top_p: f32,
pub top_k: u32, pub top_k: u32,
/// Live activity indicator — read by UI on render tick. /// Active activities — RAII guards auto-remove on drop.
pub activity: String, pub activities: Vec<ActivityEntry>,
next_activity_id: u64,
/// Control tool flags — set by tool handlers, consumed by turn loop. /// Control tool flags — set by tool handlers, consumed by turn loop.
pub pending_yield: bool, pub pending_yield: bool,
pub pending_model_switch: Option<String>, pub pending_model_switch: Option<String>,
@ -147,7 +221,8 @@ impl Agent {
temperature: 0.6, temperature: 0.6,
top_p: 0.95, top_p: 0.95,
top_k: 20, top_k: 20,
activity: String::new(), activities: Vec::new(),
next_activity_id: 0,
pending_yield: false, pending_yield: false,
pending_model_switch: None, pending_model_switch: None,
pending_dmn_pause: false, pending_dmn_pause: false,
@ -182,7 +257,7 @@ impl Agent {
if !jnl.is_empty() { if !jnl.is_empty() {
msgs.push(Message::user(jnl)); msgs.push(Message::user(jnl));
} }
msgs.extend(self.context.entries.iter().map(|e| e.api_message().clone(); msgs.extend(self.context.entries.iter().map(|e| e.api_message().clone()));
msgs msgs
} }
@ -218,9 +293,22 @@ impl Agent {
self.context.entries.push(entry); self.context.entries.push(entry);
} }
/// Push a context-only message (system prompt, identity context, /// Append streaming text to the last entry (creating a partial
/// journal summaries). Not logged — these are reconstructed on /// assistant entry if needed). Called by collect_stream per token batch.
/// every startup/compaction. pub fn append_streaming(&mut self, text: &str) {
if let Some(entry) = self.context.entries.last_mut() {
let msg = entry.message_mut();
if msg.role == Role::Assistant {
msg.append_content(text);
return;
}
}
// No assistant entry yet — push a new partial one
self.context.entries.push(ConversationEntry::Message(
Message::assistant(text),
));
}
pub fn budget(&self) -> ContextBudget { pub fn budget(&self) -> ContextBudget {
let count_str = |s: &str| self.tokenizer.encode_with_special_tokens(s).len(); let count_str = |s: &str| self.tokenizer.encode_with_special_tokens(s).len();
let count_msg = |m: &Message| crate::agent::context::msg_token_count(&self.tokenizer, m); let count_msg = |m: &Message| crate::agent::context::msg_token_count(&self.tokenizer, m);
@ -263,7 +351,7 @@ impl Agent {
me.push_message(Message::user(format!( me.push_message(Message::user(format!(
"<system-reminder>\n--- subconscious reflection ---\n{}\n</system-reminder>", "<system-reminder>\n--- subconscious reflection ---\n{}\n</system-reminder>",
reflection.trim(), reflection.trim(),
); )));
} }
// Collect completed background tool handles — remove from active list // Collect completed background tool handles — remove from active list
@ -308,9 +396,9 @@ impl Agent {
loop { loop {
// --- Lock 2: assemble messages, start stream --- // --- Lock 2: assemble messages, start stream ---
let _thinking = start_activity(&agent, "thinking...").await;
let (mut rx, _stream_guard) = { let (mut rx, _stream_guard) = {
let mut me = agent.lock().await; let me = agent.lock().await;
me.activity = "thinking...".into();
let api_messages = me.assemble_api_messages(); let api_messages = me.assemble_api_messages();
let sampling = api::SamplingParams { let sampling = api::SamplingParams {
temperature: me.temperature, temperature: me.temperature,
@ -330,7 +418,7 @@ impl Agent {
// --- Stream loop (no lock) --- // --- Stream loop (no lock) ---
let sr = api::collect_stream( let sr = api::collect_stream(
&mut rx, ui_tx, target, &agent, &active_tools, &mut rx, &agent, &active_tools,
).await; ).await;
let api::StreamResult { let api::StreamResult {
content, tool_calls, usage, finish_reason, content, tool_calls, usage, finish_reason,
@ -347,39 +435,37 @@ impl Agent {
let err = anyhow::anyhow!("{}", e); let err = anyhow::anyhow!("{}", e);
if crate::agent::context::is_context_overflow(&err) && overflow_retries < 2 { if crate::agent::context::is_context_overflow(&err) && overflow_retries < 2 {
overflow_retries += 1; overflow_retries += 1;
let _ = ui_tx.send(UiMessage::Info(format!( me.notify(format!("context overflow — retrying ({}/2)", overflow_retries));
"[context overflow — compacting and retrying ({}/2)]",
overflow_retries,
);
me.compact(); me.compact();
continue; continue;
} }
if crate::agent::context::is_stream_error(&err) && empty_retries < 2 { if crate::agent::context::is_stream_error(&err) && empty_retries < 2 {
empty_retries += 1; empty_retries += 1;
let _ = ui_tx.send(UiMessage::Info(format!( me.notify(format!("stream error — retrying ({}/2)", empty_retries));
"[stream error: {} — retrying ({}/2)]",
e, empty_retries,
);
drop(me); drop(me);
tokio::time::sleep(std::time::Duration::from_secs(2)).await; tokio::time::sleep(std::time::Duration::from_secs(2)).await;
continue; continue;
} }
me.activity.clear();
return Err(err); return Err(err);
} }
if finish_reason.as_deref() == Some("error") { if finish_reason.as_deref() == Some("error") {
let detail = if content.is_empty() { "no details".into() } else { content }; let detail = if content.is_empty() { "no details".into() } else { content };
me.activity.clear();
return Err(anyhow::anyhow!("model stream error: {}", detail)); return Err(anyhow::anyhow!("model stream error: {}", detail));
} }
// Flush remaining display buffer // Flush remaining display buffer to streaming entry
if !in_tool_call && !display_buf.is_empty() { if !in_tool_call && !display_buf.is_empty() {
let _ = ui_tx.send(UiMessage::TextDelta(display_buf, target)); me.append_streaming(&display_buf);
} }
if !content.is_empty() && !in_tool_call {
let _ = ui_tx.send(UiMessage::TextDelta("\n".to_string(), target)); // Pop the streaming entry — the proper entry gets pushed below
// via build_response_message which handles tool calls, leaked
// tool calls, etc. sync_from_agent handles the swap.
if let Some(entry) = me.context.entries.last() {
if entry.message().role == Role::Assistant && entry.message().timestamp.is_none() {
me.context.entries.pop();
}
} }
let msg = api::build_response_message(content, tool_calls); let msg = api::build_response_message(content, tool_calls);
@ -460,7 +546,6 @@ impl Agent {
// Genuinely text-only response // Genuinely text-only response
let text = msg.content_text().to_string(); let text = msg.content_text().to_string();
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.activity.clear();
me.push_message(msg); me.push_message(msg);
// Drain pending control flags // Drain pending control flags
@ -492,15 +577,15 @@ impl Agent {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
let err = format!("Error: malformed tool call arguments: {e}"); let err = format!("Error: malformed tool call arguments: {e}");
let _act = start_activity(agent, format!("rejected: {} (bad args)", call.function.name)).await;
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.activity = format!("rejected: {} (bad args)", call.function.name);
me.apply_tool_result(call, err, ui_tx, ds); me.apply_tool_result(call, err, ui_tx, ds);
return; return;
} }
}; };
let args_summary = summarize_args(&call.function.name, &args); let args_summary = summarize_args(&call.function.name, &args);
agent.lock().await.activity = format!("calling: {}", call.function.name); let _calling = start_activity(agent, format!("calling: {}", call.function.name)).await;
// Spawn tool, track it // Spawn tool, track it
let call_clone = call.clone(); let call_clone = call.clone();

View file

@ -259,6 +259,7 @@ impl Mind {
let mut ag = self.agent.lock().await; let mut ag = self.agent.lock().await;
if ag.last_prompt_tokens() > threshold { if ag.last_prompt_tokens() > threshold {
ag.compact(); ag.compact();
ag.notify("compacted");
} }
} }
MindCommand::Score => { MindCommand::Score => {
@ -381,7 +382,7 @@ impl Mind {
let _ = self.turn_watch.send(false); let _ = self.turn_watch.send(false);
if let Some(name) = model_switch { if let Some(name) = model_switch {
crate::user::chat::cmd_switch_model(&self.agent, &name, &self.ui_tx).await; crate::user::chat::cmd_switch_model(&self.agent, &name).await;
} }
// Post-turn maintenance // Post-turn maintenance

View file

@ -17,7 +17,7 @@
use crate::agent::api::ApiClient; use crate::agent::api::ApiClient;
use crate::agent::api::types::*; use crate::agent::api::types::*;
use crate::agent::context::{ConversationEntry, ContextState}; use crate::agent::context::{ConversationEntry, ContextState};
use crate::user::ui_channel::{UiMessage, UiSender}; use crate::user::ui_channel::UiSender;
const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(120); const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(120);
@ -353,7 +353,7 @@ pub async fn score_memories_incremental(
continue; continue;
} }
if let Ok(mut ag) = agent.try_lock() { ag.activity = format!("scoring memory: {}...", key); } let _scoring = crate::agent::start_activity(agent, format!("scoring: {}", key)).await;
match score_divergence(&http, client, context, range, Filter::SkipKey(key)).await { match score_divergence(&http, client, context, range, Filter::SkipKey(key)).await {
Ok((divs, _)) => { Ok((divs, _)) => {
let n_responses = divs.len(); let n_responses = divs.len();
@ -361,7 +361,6 @@ pub async fn score_memories_incremental(
dbglog!( dbglog!(
"[scoring] {} max:{:.3} ({} responses)", key, max_div, n_responses, "[scoring] {} max:{:.3} ({} responses)", key, max_div, n_responses,
); );
// TODO: update graph weight once normalization is figured out
results.push((key.clone(), max_div)); results.push((key.clone(), max_div));
} }
Err(e) => { Err(e) => {
@ -372,7 +371,6 @@ pub async fn score_memories_incremental(
} }
} }
if let Ok(mut ag) = agent.try_lock() { ag.activity.clear(); }
Ok(results) Ok(results)
} }

View file

@ -16,7 +16,7 @@ use super::{
App, HotkeyAction, ScreenAction, ScreenView, App, HotkeyAction, ScreenAction, ScreenView,
screen_legend, screen_legend,
}; };
use crate::user::ui_channel::{UiMessage, StreamTarget}; use crate::user::ui_channel::StreamTarget;
use crate::mind::MindCommand; use crate::mind::MindCommand;
// --- Slash command table --- // --- Slash command table ---
@ -35,13 +35,15 @@ fn commands() -> Vec<SlashCommand> { vec![
SlashCommand { name: "/new", help: "Start fresh session (saves current)", SlashCommand { name: "/new", help: "Start fresh session (saves current)",
handler: |s, _| { let _ = s.mind_tx.send(MindCommand::NewSession); } }, handler: |s, _| { let _ = s.mind_tx.send(MindCommand::NewSession); } },
SlashCommand { name: "/save", help: "Save session to disk", SlashCommand { name: "/save", help: "Save session to disk",
handler: |s, _| { let _ = s.ui_tx.send(UiMessage::Info("Conversation is saved automatically.".into())); } }, handler: |s, _| {
if let Ok(mut ag) = s.agent.try_lock() { ag.notify("saved"); }
} },
SlashCommand { name: "/retry", help: "Re-run last turn", SlashCommand { name: "/retry", help: "Re-run last turn",
handler: |s, _| { handler: |s, _| {
let agent = s.agent.clone(); let agent = s.agent.clone();
let mind_tx = s.mind_tx.clone(); let mind_tx = s.mind_tx.clone();
let ui_tx = s.ui_tx.clone();
tokio::spawn(async move { tokio::spawn(async move {
let _act = crate::agent::start_activity(&agent, "retrying...").await;
let mut ag = agent.lock().await; let mut ag = agent.lock().await;
let entries = ag.entries_mut(); let entries = ag.entries_mut();
let mut last_user_text = None; let mut last_user_text = None;
@ -53,36 +55,29 @@ fn commands() -> Vec<SlashCommand> { vec![
entries.pop(); entries.pop();
} }
drop(ag); drop(ag);
match last_user_text { if let Some(text) = last_user_text {
Some(text) => { let _ = mind_tx.send(MindCommand::Turn(text, StreamTarget::Conversation));
let preview = &text[..text.len().min(60)];
let _ = ui_tx.send(UiMessage::Info(format!("(retrying: {}...)", preview)));
let _ = mind_tx.send(MindCommand::Turn(text, StreamTarget::Conversation));
}
None => {
let _ = ui_tx.send(UiMessage::Info("(nothing to retry)".into()));
}
} }
}); });
} }, } },
SlashCommand { name: "/model", help: "Show/switch model (/model <name>)", SlashCommand { name: "/model", help: "Show/switch model (/model <name>)",
handler: |s, arg| { handler: |s, arg| {
if arg.is_empty() { if arg.is_empty() {
if let Ok(ag) = s.agent.try_lock() { if let Ok(mut ag) = s.agent.try_lock() {
let _ = s.ui_tx.send(UiMessage::Info(format!("Current model: {}", ag.model())));
let names = ag.app_config.model_names(); let names = ag.app_config.model_names();
if !names.is_empty() { let label = if names.is_empty() {
let _ = s.ui_tx.send(UiMessage::Info(format!("Available: {}", names.join(", ")))); format!("model: {}", ag.model())
} } else {
} else { format!("model: {} ({})", ag.model(), names.join(", "))
let _ = s.ui_tx.send(UiMessage::Info("(busy)".into())); };
ag.notify(label);
} }
} else { } else {
let agent = s.agent.clone(); let agent = s.agent.clone();
let ui_tx = s.ui_tx.clone();
let name = arg.to_string(); let name = arg.to_string();
tokio::spawn(async move { tokio::spawn(async move {
cmd_switch_model(&agent, &name, &ui_tx).await; let _act = crate::agent::start_activity(&agent, format!("switching to {}...", name)).await;
cmd_switch_model(&agent, &name).await;
}); });
} }
} }, } },
@ -91,16 +86,16 @@ fn commands() -> Vec<SlashCommand> { vec![
SlashCommand { name: "/dmn", help: "Show DMN state", SlashCommand { name: "/dmn", help: "Show DMN state",
handler: |s, _| { handler: |s, _| {
let st = s.shared_mind.lock().unwrap(); let st = s.shared_mind.lock().unwrap();
let _ = s.ui_tx.send(UiMessage::Info(format!( if let Ok(mut ag) = s.agent.try_lock() {
"DMN: {:?} ({}/{})", st.dmn, st.dmn_turns, st.max_dmn_turns, ag.notify(format!("DMN: {:?} ({}/{})", st.dmn, st.dmn_turns, st.max_dmn_turns));
))); }
} }, } },
SlashCommand { name: "/sleep", help: "Put DMN to sleep", SlashCommand { name: "/sleep", help: "Put DMN to sleep",
handler: |s, _| { handler: |s, _| {
let mut st = s.shared_mind.lock().unwrap(); let mut st = s.shared_mind.lock().unwrap();
st.dmn = crate::mind::dmn::State::Resting { since: std::time::Instant::now() }; st.dmn = crate::mind::dmn::State::Resting { since: std::time::Instant::now() };
st.dmn_turns = 0; st.dmn_turns = 0;
let _ = s.ui_tx.send(UiMessage::Info("DMN sleeping.".into())); if let Ok(mut ag) = s.agent.try_lock() { ag.notify("DMN sleeping"); }
} }, } },
SlashCommand { name: "/wake", help: "Wake DMN to foraging", SlashCommand { name: "/wake", help: "Wake DMN to foraging",
handler: |s, _| { handler: |s, _| {
@ -108,17 +103,17 @@ fn commands() -> Vec<SlashCommand> { vec![
if matches!(st.dmn, crate::mind::dmn::State::Off) { crate::mind::dmn::set_off(false); } if matches!(st.dmn, crate::mind::dmn::State::Off) { crate::mind::dmn::set_off(false); }
st.dmn = crate::mind::dmn::State::Foraging; st.dmn = crate::mind::dmn::State::Foraging;
st.dmn_turns = 0; st.dmn_turns = 0;
let _ = s.ui_tx.send(UiMessage::Info("DMN foraging.".into())); if let Ok(mut ag) = s.agent.try_lock() { ag.notify("DMN foraging"); }
} }, } },
SlashCommand { name: "/pause", help: "Full stop — no autonomous ticks (Ctrl+P)", SlashCommand { name: "/pause", help: "Full stop — no autonomous ticks (Ctrl+P)",
handler: |s, _| { handler: |s, _| {
let mut st = s.shared_mind.lock().unwrap(); let mut st = s.shared_mind.lock().unwrap();
st.dmn = crate::mind::dmn::State::Paused; st.dmn = crate::mind::dmn::State::Paused;
st.dmn_turns = 0; st.dmn_turns = 0;
let _ = s.ui_tx.send(UiMessage::Info("DMN paused.".into())); if let Ok(mut ag) = s.agent.try_lock() { ag.notify("DMN paused"); }
} }, } },
SlashCommand { name: "/help", help: "Show this help", SlashCommand { name: "/help", help: "Show this help",
handler: |s, _| { send_help(&s.ui_tx); } }, handler: |s, _| { notify_help(&s.agent); } },
]} ]}
fn dispatch_command(input: &str) -> Option<SlashCommand> { fn dispatch_command(input: &str) -> Option<SlashCommand> {
@ -130,14 +125,13 @@ fn dispatch_command(input: &str) -> Option<SlashCommand> {
pub async fn cmd_switch_model( pub async fn cmd_switch_model(
agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>, agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
name: &str, name: &str,
ui_tx: &crate::user::ui_channel::UiSender,
) { ) {
let resolved = { let resolved = {
let ag = agent.lock().await; let ag = agent.lock().await;
match ag.app_config.resolve_model(name) { match ag.app_config.resolve_model(name) {
Ok(r) => r, Ok(r) => r,
Err(e) => { Err(e) => {
let _ = ui_tx.send(UiMessage::Info(format!("{}", e))); agent.lock().await.notify(format!("model error: {}", e));
return; return;
} }
} }
@ -154,28 +148,21 @@ pub async fn cmd_switch_model(
if prompt_changed { if prompt_changed {
ag.prompt_file = resolved.prompt_file.clone(); ag.prompt_file = resolved.prompt_file.clone();
ag.compact(); ag.compact();
let _ = ui_tx.send(UiMessage::Info(format!( ag.notify(format!("switched to {} (recompacted)", resolved.model_id));
"Switched to {} ({}) — prompt: {}, recompacted",
name, resolved.model_id, resolved.prompt_file,
)));
} else { } else {
let _ = ui_tx.send(UiMessage::Info(format!( ag.notify(format!("switched to {}", resolved.model_id));
"Switched to {} ({})", name, resolved.model_id,
)));
} }
} }
pub(crate) fn send_help(ui_tx: &crate::user::ui_channel::UiSender) { fn notify_help(agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>) {
for cmd in &commands() { if let Ok(mut ag) = agent.try_lock() {
let _ = ui_tx.send(UiMessage::Info(format!(" {:12} {}", cmd.name, cmd.help))); let mut help = String::new();
for cmd in &commands() {
help.push_str(&format!("{:12} {}\n", cmd.name, cmd.help));
}
help.push_str("Keys: Tab ^Up/Down PgUp/Down Mouse Esc ^P ^R ^K");
ag.notify(help);
} }
let _ = ui_tx.send(UiMessage::Info(String::new()));
let _ = ui_tx.send(UiMessage::Info(
"Keys: Tab=pane ^Up/Down=scroll PgUp/PgDn=scroll Mouse=click/scroll".into(),
));
let _ = ui_tx.send(UiMessage::Info(
" Alt+Enter=newline Esc=interrupt ^P=pause ^R=reasoning ^K=kill".into(),
));
} }
/// Turn marker for the conversation pane gutter. /// Turn marker for the conversation pane gutter.
@ -410,7 +397,6 @@ pub(crate) struct InteractScreen {
agent: std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>, agent: std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
shared_mind: std::sync::Arc<crate::mind::SharedMindState>, shared_mind: std::sync::Arc<crate::mind::SharedMindState>,
mind_tx: tokio::sync::mpsc::UnboundedSender<crate::mind::MindCommand>, mind_tx: tokio::sync::mpsc::UnboundedSender<crate::mind::MindCommand>,
ui_tx: crate::user::ui_channel::UiSender,
} }
impl InteractScreen { impl InteractScreen {
@ -418,7 +404,6 @@ impl InteractScreen {
agent: std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>, agent: std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
shared_mind: std::sync::Arc<crate::mind::SharedMindState>, shared_mind: std::sync::Arc<crate::mind::SharedMindState>,
mind_tx: tokio::sync::mpsc::UnboundedSender<crate::mind::MindCommand>, mind_tx: tokio::sync::mpsc::UnboundedSender<crate::mind::MindCommand>,
ui_tx: crate::user::ui_channel::UiSender,
) -> Self { ) -> Self {
Self { Self {
autonomous: PaneState::new(true), autonomous: PaneState::new(true),
@ -439,7 +424,6 @@ impl InteractScreen {
agent, agent,
shared_mind, shared_mind,
mind_tx, mind_tx,
ui_tx,
} }
} }
@ -576,7 +560,9 @@ impl InteractScreen {
if let Some(cmd) = dispatch_command(input) { if let Some(cmd) = dispatch_command(input) {
(cmd.handler)(self, &input[cmd.name.len()..].trim_start()); (cmd.handler)(self, &input[cmd.name.len()..].trim_start());
} else { } else {
let _ = self.ui_tx.send(UiMessage::Info(format!("Unknown command: {}", input.split_whitespace().next().unwrap_or(input)))); if let Ok(mut ag) = self.agent.try_lock() {
ag.notify(format!("unknown: {}", input.split_whitespace().next().unwrap_or(input)));
}
} }
return; return;
} }
@ -585,29 +571,6 @@ impl InteractScreen {
self.shared_mind.lock().unwrap().input.push(input.to_string()); self.shared_mind.lock().unwrap().input.push(input.to_string());
} }
/// Process a UiMessage — update pane state.
pub fn handle_ui_message(&mut self, msg: &UiMessage, app: &mut App) {
match msg {
UiMessage::TextDelta(text, target) => match target {
StreamTarget::Conversation => {
if self.needs_assistant_marker {
self.conversation.pending_marker = Marker::Assistant;
self.needs_assistant_marker = false;
}
self.conversation.current_color = Color::Reset;
self.conversation.append_text(text);
}
StreamTarget::Autonomous => {
self.autonomous.current_color = Color::Reset;
self.autonomous.append_text(text);
}
},
UiMessage::Info(text) => {
self.conversation.push_line(text.clone(), Color::Cyan);
}
_ => {}
}
}
fn scroll_active_up(&mut self, n: u16) { fn scroll_active_up(&mut self, n: u16) {
match self.active_pane { match self.active_pane {
@ -886,13 +849,14 @@ impl ScreenView for InteractScreen {
self.sync_from_agent(); self.sync_from_agent();
// Read status from agent + mind state // Read status from agent + mind state
if let Ok(agent) = self.agent.try_lock() { if let Ok(mut agent) = self.agent.try_lock() {
agent.expire_activities();
app.status.prompt_tokens = agent.last_prompt_tokens(); app.status.prompt_tokens = agent.last_prompt_tokens();
app.status.model = agent.model().to_string(); app.status.model = agent.model().to_string();
app.status.context_budget = agent.budget().status_string(); app.status.context_budget = agent.budget().status_string();
if !agent.activity.is_empty() { app.activity = agent.activities.last()
app.activity = agent.activity.clone(); .map(|a| a.label.clone())
} .unwrap_or_default();
} }
{ {
let mind = self.shared_mind.lock().unwrap(); let mind = self.shared_mind.lock().unwrap();

View file

@ -17,7 +17,6 @@ use std::time::Duration;
use crate::mind::MindCommand; use crate::mind::MindCommand;
use crate::user::{self as tui}; use crate::user::{self as tui};
use crate::user::ui_channel::UiMessage;
// --- TUI infrastructure (moved from tui/mod.rs) --- // --- TUI infrastructure (moved from tui/mod.rs) ---
@ -211,14 +210,14 @@ pub async fn start(cli: crate::user::CliArgs) -> Result<()> {
s.spawn(async { s.spawn(async {
result = run( result = run(
tui::App::new(String::new(), shared_context, shared_active_tools), tui::App::new(String::new(), shared_context, shared_active_tools),
&mind, mind_tx, ui_tx, ui_rx, &mind, mind_tx,
).await; ).await;
}); });
}); });
result result
} }
fn hotkey_cycle_reasoning(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) { fn hotkey_cycle_reasoning(mind: &crate::mind::Mind) {
if let Ok(mut ag) = mind.agent.try_lock() { if let Ok(mut ag) = mind.agent.try_lock() {
let next = match ag.reasoning_effort.as_str() { let next = match ag.reasoning_effort.as_str() {
"none" => "low", "none" => "low",
@ -232,31 +231,26 @@ fn hotkey_cycle_reasoning(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender
"high" => "high (full monologue)", "high" => "high (full monologue)",
_ => next, _ => next,
}; };
let _ = ui_tx.send(UiMessage::Info(format!("Reasoning: {} — ^R to cycle", label))); ag.notify(format!("reasoning: {}", label));
} else {
let _ = ui_tx.send(UiMessage::Info(
"(agent busy — reasoning change takes effect next turn)".into(),
));
} }
} }
async fn hotkey_kill_processes(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) { async fn hotkey_kill_processes(mind: &crate::mind::Mind) {
let active_tools = mind.agent.lock().await.active_tools.clone(); let mut ag = mind.agent.lock().await;
let active_tools = ag.active_tools.clone();
let mut tools = active_tools.lock().unwrap(); let mut tools = active_tools.lock().unwrap();
if tools.is_empty() { if tools.is_empty() {
let _ = ui_tx.send(UiMessage::Info("(no running tool calls)".into())); ag.notify("no running tools");
} else { } else {
let count = tools.len();
for entry in tools.drain(..) { for entry in tools.drain(..) {
let elapsed = entry.started.elapsed();
let _ = ui_tx.send(UiMessage::Info(format!(
" killing {} ({:.0}s): {}", entry.name, elapsed.as_secs_f64(), entry.detail,
)));
entry.handle.abort(); entry.handle.abort();
} }
ag.notify(format!("killed {} tools", count));
} }
} }
fn hotkey_cycle_autonomy(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) { fn hotkey_cycle_autonomy(mind: &crate::mind::Mind) {
let mut s = mind.shared.lock().unwrap(); let mut s = mind.shared.lock().unwrap();
let label = match &s.dmn { let label = match &s.dmn {
crate::mind::dmn::State::Engaged | crate::mind::dmn::State::Working | crate::mind::dmn::State::Foraging => { crate::mind::dmn::State::Engaged | crate::mind::dmn::State::Working | crate::mind::dmn::State::Foraging => {
@ -280,7 +274,9 @@ fn hotkey_cycle_autonomy(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender)
}; };
s.dmn_turns = 0; s.dmn_turns = 0;
drop(s); drop(s);
let _ = ui_tx.send(UiMessage::Info(format!("DMN → {} (Ctrl+P to cycle)", label))); if let Ok(mut ag) = mind.agent.try_lock() {
ag.notify(format!("DMN → {}", label));
}
} }
fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) { fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) {
@ -294,24 +290,10 @@ fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) {
} }
} }
pub fn send_context_info(config: &crate::config::SessionConfig, ui_tx: &ui_channel::UiSender) {
let context_groups = crate::config::get().context_groups.clone();
let (instruction_files, memory_files) = crate::mind::identity::context_file_info(
&config.prompt_file,
config.app.memory_project.as_deref(),
&context_groups,
);
let _ = ui_tx.send(UiMessage::Info(format!(
" context: {}K chars ({} config, {} memory files)",
config.context_parts.iter().map(|(_, c)| c.len()).sum::<usize>() / 1024,
instruction_files.len(), memory_files.len(),
)));
}
fn diff_mind_state( fn diff_mind_state(
cur: &crate::mind::MindState, cur: &crate::mind::MindState,
prev: &crate::mind::MindState, prev: &crate::mind::MindState,
ui_tx: &ui_channel::UiSender,
dirty: &mut bool, dirty: &mut bool,
) { ) {
if cur.dmn.label() != prev.dmn.label() || cur.dmn_turns != prev.dmn_turns { if cur.dmn.label() != prev.dmn.label() || cur.dmn_turns != prev.dmn_turns {
@ -325,15 +307,9 @@ fn diff_mind_state(
*dirty = true; *dirty = true;
} }
if cur.scoring_in_flight != prev.scoring_in_flight { if cur.scoring_in_flight != prev.scoring_in_flight {
if !cur.scoring_in_flight && prev.scoring_in_flight {
let _ = ui_tx.send(UiMessage::Info("[scoring complete]".into()));
}
*dirty = true; *dirty = true;
} }
if cur.compaction_in_flight != prev.compaction_in_flight { if cur.compaction_in_flight != prev.compaction_in_flight {
if !cur.compaction_in_flight && prev.compaction_in_flight {
let _ = ui_tx.send(UiMessage::Info("[compacted]".into()));
}
*dirty = true; *dirty = true;
} }
} }
@ -342,8 +318,6 @@ pub async fn run(
mut app: tui::App, mut app: tui::App,
mind: &crate::mind::Mind, mind: &crate::mind::Mind,
mind_tx: tokio::sync::mpsc::UnboundedSender<MindCommand>, mind_tx: tokio::sync::mpsc::UnboundedSender<MindCommand>,
ui_tx: ui_channel::UiSender,
mut ui_rx: ui_channel::UiReceiver,
) -> Result<()> { ) -> Result<()> {
let agent = &mind.agent; let agent = &mind.agent;
let shared_mind = &mind.shared; let shared_mind = &mind.shared;
@ -362,9 +336,8 @@ pub async fn run(
} }
let notify_rx = crate::thalamus::channels::subscribe_all(); let notify_rx = crate::thalamus::channels::subscribe_all();
// InteractScreen held separately for UiMessage routing
let mut interact = crate::user::chat::InteractScreen::new( let mut interact = crate::user::chat::InteractScreen::new(
mind.agent.clone(), mind.shared.clone(), mind_tx.clone(), ui_tx.clone(), mind.agent.clone(), mind.shared.clone(), mind_tx.clone(),
); );
// Overlay screens: F2=conscious, F3=subconscious, F4=unconscious, F5=thalamus // Overlay screens: F2=conscious, F3=subconscious, F4=unconscious, F5=thalamus
let mut screens: Vec<Box<dyn tui::ScreenView>> = vec![ let mut screens: Vec<Box<dyn tui::ScreenView>> = vec![
@ -387,7 +360,7 @@ pub async fn run(
terminal.hide_cursor()?; terminal.hide_cursor()?;
let _ = ui_tx.send(UiMessage::Info("consciousness v0.3 (tui)".into())); if let Ok(mut ag) = agent.try_lock() { ag.notify("consciousness v0.3"); }
// Initial render // Initial render
terminal.draw(|f| { terminal.draw(|f| {
@ -453,9 +426,9 @@ pub async fn run(
// One-time: mark startup done after Mind init // One-time: mark startup done after Mind init
if !startup_done { if !startup_done {
if let Ok(ag) = agent.try_lock() { if let Ok(mut ag) = agent.try_lock() {
// sync_from_agent handles conversation replay let model = ag.model().to_string();
let _ = ui_tx.send(UiMessage::Info(format!(" model: {}", ag.model()))); ag.notify(format!("model: {}", model));
startup_done = true; startup_done = true;
dirty = true; dirty = true;
} }
@ -464,7 +437,7 @@ pub async fn run(
// Diff MindState — generate UI messages from changes // Diff MindState — generate UI messages from changes
{ {
let cur = shared_mind.lock().unwrap(); let cur = shared_mind.lock().unwrap();
diff_mind_state(&cur, &prev_mind, &ui_tx, &mut dirty); diff_mind_state(&cur, &prev_mind, &mut dirty);
prev_mind = cur.clone(); prev_mind = cur.clone();
} }
@ -482,27 +455,18 @@ pub async fn run(
dirty = true; dirty = true;
} }
Some(msg) = ui_rx.recv() => {
interact.handle_ui_message(&msg, &mut app);
dirty = true;
}
} }
// Handle hotkey actions // Handle hotkey actions
let actions: Vec<HotkeyAction> = app.hotkey_actions.drain(..).collect(); let actions: Vec<HotkeyAction> = app.hotkey_actions.drain(..).collect();
for action in actions { for action in actions {
match action { match action {
HotkeyAction::CycleReasoning => hotkey_cycle_reasoning(mind, &ui_tx), HotkeyAction::CycleReasoning => hotkey_cycle_reasoning(mind),
HotkeyAction::KillProcess => hotkey_kill_processes(mind, &ui_tx).await, HotkeyAction::KillProcess => hotkey_kill_processes(mind).await,
HotkeyAction::Interrupt => { let _ = mind_tx.send(MindCommand::Interrupt); } HotkeyAction::Interrupt => { let _ = mind_tx.send(MindCommand::Interrupt); }
HotkeyAction::CycleAutonomy => hotkey_cycle_autonomy(mind, &ui_tx), HotkeyAction::CycleAutonomy => hotkey_cycle_autonomy(mind),
HotkeyAction::AdjustSampling(param, delta) => hotkey_adjust_sampling(mind, param, delta), HotkeyAction::AdjustSampling(param, delta) => hotkey_adjust_sampling(mind, param, delta),
} }
}
// Drain UiMessages to interact screen
while let Ok(msg) = ui_rx.try_recv() {
interact.handle_ui_message(&msg, &mut app);
dirty = true; dirty = true;
} }

View file

@ -71,9 +71,6 @@ pub struct ContextInfo {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(dead_code)] #[allow(dead_code)]
pub enum UiMessage { pub enum UiMessage {
/// Streaming text delta — routed to conversation or autonomous pane.
TextDelta(String, StreamTarget),
/// Informational message — goes to conversation pane (command output, etc). /// Informational message — goes to conversation pane (command output, etc).
Info(String), Info(String),