Kill TextDelta, Info — UiMessage is dead. RAII ActivityGuards replace all status feedback

Streaming text now goes directly to agent entries via append_streaming().
sync_from_agent diffs the growing entry each tick. The streaming entry
is popped when the response completes; build_response_message pushes
the final version.

All status feedback uses RAII ActivityGuards:
- push_activity() for long-running work (thinking, streaming, scoring)
- notify() for instant feedback (compacted, DMN state changes, commands)
- Guards auto-remove on Drop, appending "(complete)" and lingering 5s
- expire_activities() cleans up timed-out notifications on render tick

UiMessage enum reduced to a single Info variant with zero sends.
The channel infrastructure remains for now (Mind/Agent still take
UiSender in signatures) — mechanical cleanup for a follow-up.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-05 22:18:07 -04:00
parent e7914e3d58
commit cfddb55ed9
9 changed files with 201 additions and 186 deletions

View file

@ -17,7 +17,6 @@ use std::time::Duration;
use crate::mind::MindCommand;
use crate::user::{self as tui};
use crate::user::ui_channel::UiMessage;
// --- TUI infrastructure (moved from tui/mod.rs) ---
@ -211,14 +210,14 @@ pub async fn start(cli: crate::user::CliArgs) -> Result<()> {
s.spawn(async {
result = run(
tui::App::new(String::new(), shared_context, shared_active_tools),
&mind, mind_tx, ui_tx, ui_rx,
&mind, mind_tx,
).await;
});
});
result
}
fn hotkey_cycle_reasoning(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) {
fn hotkey_cycle_reasoning(mind: &crate::mind::Mind) {
if let Ok(mut ag) = mind.agent.try_lock() {
let next = match ag.reasoning_effort.as_str() {
"none" => "low",
@ -232,31 +231,26 @@ fn hotkey_cycle_reasoning(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender
"high" => "high (full monologue)",
_ => next,
};
let _ = ui_tx.send(UiMessage::Info(format!("Reasoning: {} — ^R to cycle", label)));
} else {
let _ = ui_tx.send(UiMessage::Info(
"(agent busy — reasoning change takes effect next turn)".into(),
));
ag.notify(format!("reasoning: {}", label));
}
}
async fn hotkey_kill_processes(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) {
let active_tools = mind.agent.lock().await.active_tools.clone();
async fn hotkey_kill_processes(mind: &crate::mind::Mind) {
let mut ag = mind.agent.lock().await;
let active_tools = ag.active_tools.clone();
let mut tools = active_tools.lock().unwrap();
if tools.is_empty() {
let _ = ui_tx.send(UiMessage::Info("(no running tool calls)".into()));
ag.notify("no running tools");
} else {
let count = tools.len();
for entry in tools.drain(..) {
let elapsed = entry.started.elapsed();
let _ = ui_tx.send(UiMessage::Info(format!(
" killing {} ({:.0}s): {}", entry.name, elapsed.as_secs_f64(), entry.detail,
)));
entry.handle.abort();
}
ag.notify(format!("killed {} tools", count));
}
}
fn hotkey_cycle_autonomy(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender) {
fn hotkey_cycle_autonomy(mind: &crate::mind::Mind) {
let mut s = mind.shared.lock().unwrap();
let label = match &s.dmn {
crate::mind::dmn::State::Engaged | crate::mind::dmn::State::Working | crate::mind::dmn::State::Foraging => {
@ -280,7 +274,9 @@ fn hotkey_cycle_autonomy(mind: &crate::mind::Mind, ui_tx: &ui_channel::UiSender)
};
s.dmn_turns = 0;
drop(s);
let _ = ui_tx.send(UiMessage::Info(format!("DMN → {} (Ctrl+P to cycle)", label)));
if let Ok(mut ag) = mind.agent.try_lock() {
ag.notify(format!("DMN → {}", label));
}
}
fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) {
@ -294,24 +290,10 @@ fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) {
}
}
pub fn send_context_info(config: &crate::config::SessionConfig, ui_tx: &ui_channel::UiSender) {
let context_groups = crate::config::get().context_groups.clone();
let (instruction_files, memory_files) = crate::mind::identity::context_file_info(
&config.prompt_file,
config.app.memory_project.as_deref(),
&context_groups,
);
let _ = ui_tx.send(UiMessage::Info(format!(
" context: {}K chars ({} config, {} memory files)",
config.context_parts.iter().map(|(_, c)| c.len()).sum::<usize>() / 1024,
instruction_files.len(), memory_files.len(),
)));
}
fn diff_mind_state(
cur: &crate::mind::MindState,
prev: &crate::mind::MindState,
ui_tx: &ui_channel::UiSender,
dirty: &mut bool,
) {
if cur.dmn.label() != prev.dmn.label() || cur.dmn_turns != prev.dmn_turns {
@ -325,15 +307,9 @@ fn diff_mind_state(
*dirty = true;
}
if cur.scoring_in_flight != prev.scoring_in_flight {
if !cur.scoring_in_flight && prev.scoring_in_flight {
let _ = ui_tx.send(UiMessage::Info("[scoring complete]".into()));
}
*dirty = true;
}
if cur.compaction_in_flight != prev.compaction_in_flight {
if !cur.compaction_in_flight && prev.compaction_in_flight {
let _ = ui_tx.send(UiMessage::Info("[compacted]".into()));
}
*dirty = true;
}
}
@ -342,8 +318,6 @@ pub async fn run(
mut app: tui::App,
mind: &crate::mind::Mind,
mind_tx: tokio::sync::mpsc::UnboundedSender<MindCommand>,
ui_tx: ui_channel::UiSender,
mut ui_rx: ui_channel::UiReceiver,
) -> Result<()> {
let agent = &mind.agent;
let shared_mind = &mind.shared;
@ -362,9 +336,8 @@ pub async fn run(
}
let notify_rx = crate::thalamus::channels::subscribe_all();
// InteractScreen held separately for UiMessage routing
let mut interact = crate::user::chat::InteractScreen::new(
mind.agent.clone(), mind.shared.clone(), mind_tx.clone(), ui_tx.clone(),
mind.agent.clone(), mind.shared.clone(), mind_tx.clone(),
);
// Overlay screens: F2=conscious, F3=subconscious, F4=unconscious, F5=thalamus
let mut screens: Vec<Box<dyn tui::ScreenView>> = vec![
@ -387,7 +360,7 @@ pub async fn run(
terminal.hide_cursor()?;
let _ = ui_tx.send(UiMessage::Info("consciousness v0.3 (tui)".into()));
if let Ok(mut ag) = agent.try_lock() { ag.notify("consciousness v0.3"); }
// Initial render
terminal.draw(|f| {
@ -453,9 +426,9 @@ pub async fn run(
// One-time: mark startup done after Mind init
if !startup_done {
if let Ok(ag) = agent.try_lock() {
// sync_from_agent handles conversation replay
let _ = ui_tx.send(UiMessage::Info(format!(" model: {}", ag.model())));
if let Ok(mut ag) = agent.try_lock() {
let model = ag.model().to_string();
ag.notify(format!("model: {}", model));
startup_done = true;
dirty = true;
}
@ -464,7 +437,7 @@ pub async fn run(
// Diff MindState — generate UI messages from changes
{
let cur = shared_mind.lock().unwrap();
diff_mind_state(&cur, &prev_mind, &ui_tx, &mut dirty);
diff_mind_state(&cur, &prev_mind, &mut dirty);
prev_mind = cur.clone();
}
@ -482,27 +455,18 @@ pub async fn run(
dirty = true;
}
Some(msg) = ui_rx.recv() => {
interact.handle_ui_message(&msg, &mut app);
dirty = true;
}
}
// Handle hotkey actions
let actions: Vec<HotkeyAction> = app.hotkey_actions.drain(..).collect();
for action in actions {
match action {
HotkeyAction::CycleReasoning => hotkey_cycle_reasoning(mind, &ui_tx),
HotkeyAction::KillProcess => hotkey_kill_processes(mind, &ui_tx).await,
HotkeyAction::CycleReasoning => hotkey_cycle_reasoning(mind),
HotkeyAction::KillProcess => hotkey_kill_processes(mind).await,
HotkeyAction::Interrupt => { let _ = mind_tx.send(MindCommand::Interrupt); }
HotkeyAction::CycleAutonomy => hotkey_cycle_autonomy(mind, &ui_tx),
HotkeyAction::CycleAutonomy => hotkey_cycle_autonomy(mind),
HotkeyAction::AdjustSampling(param, delta) => hotkey_adjust_sampling(mind, param, delta),
}
}
// Drain UiMessages to interact screen
while let Ok(msg) = ui_rx.try_recv() {
interact.handle_ui_message(&msg, &mut app);
dirty = true;
}