Delete ui_channel.rs — relocate types, remove all UiMessage/UiSender plumbing

Types relocated:
- StreamTarget → mind/mod.rs (Mind decides Conversation vs Autonomous)
- SharedActiveTools + shared_active_tools() → agent/tools/mod.rs
- ContextSection + SharedContextState → agent/context.rs (already there)
- StatusInfo + ContextInfo → user/mod.rs (UI display state)

Removed UiSender from: Agent::turn, Mind, learn.rs, all function signatures.
The entire message-passing layer is gone. All state flows through
Agent fields (activities, entries, streaming) read by the UI via try_lock.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-05 22:34:48 -04:00
parent cfddb55ed9
commit f390fa1617
11 changed files with 72 additions and 165 deletions

View file

@ -20,7 +20,6 @@ use tokio::sync::mpsc;
use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall}; use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall};
pub use types::ToolCall; pub use types::ToolCall;
use crate::user::ui_channel::UiSender;
/// A JoinHandle that aborts its task when dropped. /// A JoinHandle that aborts its task when dropped.
pub struct AbortOnDrop(tokio::task::JoinHandle<()>); pub struct AbortOnDrop(tokio::task::JoinHandle<()>);
@ -107,7 +106,6 @@ impl ApiClient {
&self, &self,
messages: &[Message], messages: &[Message],
tools: &[agent_tools::Tool], tools: &[agent_tools::Tool],
ui_tx: &UiSender,
reasoning_effort: &str, reasoning_effort: &str,
sampling: SamplingParams, sampling: SamplingParams,
priority: Option<i32>, priority: Option<i32>,
@ -119,14 +117,13 @@ impl ApiClient {
let messages = messages.to_vec(); let messages = messages.to_vec();
let tools_json = tools_to_json_str(tools); let tools_json = tools_to_json_str(tools);
let tools_value: serde_json::Value = serde_json::from_str(&tools_json).unwrap_or_default(); let tools_value: serde_json::Value = serde_json::from_str(&tools_json).unwrap_or_default();
let ui_tx = ui_tx.clone();
let reasoning_effort = reasoning_effort.to_string(); let reasoning_effort = reasoning_effort.to_string();
let base_url = self.base_url.clone(); let base_url = self.base_url.clone();
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
let result = openai::stream_events( let result = openai::stream_events(
&client, &base_url, &api_key, &model, &client, &base_url, &api_key, &model,
&messages, &tools_value, &tx, &ui_tx, &messages, &tools_value, &tx,
&reasoning_effort, sampling, priority, &reasoning_effort, sampling, priority,
).await; ).await;
if let Err(e) = result { if let Err(e) = result {
@ -141,13 +138,12 @@ impl ApiClient {
&self, &self,
messages: &[Message], messages: &[Message],
tools: &[agent_tools::Tool], tools: &[agent_tools::Tool],
ui_tx: &UiSender,
reasoning_effort: &str, reasoning_effort: &str,
sampling: SamplingParams, sampling: SamplingParams,
priority: Option<i32>, priority: Option<i32>,
) -> Result<(Message, Option<Usage>)> { ) -> Result<(Message, Option<Usage>)> {
// Use the event stream and accumulate into a message. // Use the event stream and accumulate into a message.
let (mut rx, _handle) = self.start_stream(messages, tools, ui_tx, reasoning_effort, sampling, priority); let (mut rx, _handle) = self.start_stream(messages, tools, reasoning_effort, sampling, priority);
let mut content = String::new(); let mut content = String::new();
let mut tool_calls: Vec<ToolCall> = Vec::new(); let mut tool_calls: Vec<ToolCall> = Vec::new();
let mut usage = None; let mut usage = None;
@ -319,14 +315,13 @@ pub(crate) struct SseReader {
pub sse_lines_parsed: u64, pub sse_lines_parsed: u64,
pub sse_parse_errors: u64, pub sse_parse_errors: u64,
debug: bool, debug: bool,
ui_tx: UiSender,
done: bool, done: bool,
/// Serialized request payload — saved to disk on errors for replay debugging. /// Serialized request payload — saved to disk on errors for replay debugging.
pub(crate) request_json: Option<String>, pub(crate) request_json: Option<String>,
} }
impl SseReader { impl SseReader {
pub(crate) fn new(ui_tx: &UiSender) -> Self { pub(crate) fn new() -> Self {
Self { Self {
line_buf: String::new(), line_buf: String::new(),
chunk_timeout: Duration::from_secs(crate::config::get().api_stream_timeout_secs), chunk_timeout: Duration::from_secs(crate::config::get().api_stream_timeout_secs),
@ -335,7 +330,6 @@ impl SseReader {
sse_lines_parsed: 0, sse_lines_parsed: 0,
sse_parse_errors: 0, sse_parse_errors: 0,
debug: std::env::var("POC_DEBUG").is_ok(), debug: std::env::var("POC_DEBUG").is_ok(),
ui_tx: ui_tx.clone(),
done: false, done: false,
request_json: None, request_json: None,
} }
@ -516,7 +510,6 @@ pub fn build_response_message(
/// Log stream diagnostics. Shared by both backends. /// Log stream diagnostics. Shared by both backends.
pub(crate) fn log_diagnostics( pub(crate) fn log_diagnostics(
ui_tx: &UiSender,
content_len: usize, content_len: usize,
tool_count: usize, tool_count: usize,
reasoning_chars: usize, reasoning_chars: usize,
@ -619,7 +612,7 @@ pub struct StreamResult {
pub async fn collect_stream( pub async fn collect_stream(
rx: &mut mpsc::UnboundedReceiver<StreamEvent>, rx: &mut mpsc::UnboundedReceiver<StreamEvent>,
agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>, agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>,
active_tools: &crate::user::ui_channel::SharedActiveTools, active_tools: &crate::agent::tools::SharedActiveTools,
) -> StreamResult { ) -> StreamResult {
let mut content = String::new(); let mut content = String::new();
let mut tool_calls: Vec<ToolCall> = Vec::new(); let mut tool_calls: Vec<ToolCall> = Vec::new();

View file

@ -9,7 +9,6 @@ use reqwest::Client;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use super::types::*; use super::types::*;
use crate::user::ui_channel::UiSender;
use super::StreamEvent; use super::StreamEvent;
/// Stream SSE events from an OpenAI-compatible endpoint, sending /// Stream SSE events from an OpenAI-compatible endpoint, sending
@ -23,7 +22,6 @@ pub(super) async fn stream_events(
messages: &[Message], messages: &[Message],
tools_json: &serde_json::Value, tools_json: &serde_json::Value,
tx: &mpsc::UnboundedSender<StreamEvent>, tx: &mpsc::UnboundedSender<StreamEvent>,
ui_tx: &UiSender,
reasoning_effort: &str, reasoning_effort: &str,
sampling: super::SamplingParams, sampling: super::SamplingParams,
priority: Option<i32>, priority: Option<i32>,
@ -71,7 +69,7 @@ pub(super) async fn stream_events(
) )
.await?; .await?;
let mut reader = super::SseReader::new(ui_tx); let mut reader = super::SseReader::new();
reader.request_json = request_json; reader.request_json = request_json;
let mut content_len: usize = 0; let mut content_len: usize = 0;
@ -167,7 +165,6 @@ pub(super) async fn stream_events(
let total_elapsed = reader.stream_start.elapsed(); let total_elapsed = reader.stream_start.elapsed();
super::log_diagnostics( super::log_diagnostics(
ui_tx,
content_len, content_len,
tool_call_count, tool_call_count,
reasoning_chars, reasoning_chars,

View file

@ -28,7 +28,8 @@ use context::{ConversationEntry, ContextState, ContextBudget};
use tools::{summarize_args, working_stack}; use tools::{summarize_args, working_stack};
use crate::mind::log::ConversationLog; use crate::mind::log::ConversationLog;
use crate::user::ui_channel::{ContextSection, SharedContextState, StreamTarget, UiSender}; use crate::agent::context::{ContextSection, SharedContextState};
use crate::mind::StreamTarget;
use crate::subconscious::learn; use crate::subconscious::learn;
// --- Activity tracking (RAII guards) --- // --- Activity tracking (RAII guards) ---
@ -177,7 +178,7 @@ pub struct Agent {
/// TODO: move to Session — it's session-level, not agent-level. /// TODO: move to Session — it's session-level, not agent-level.
pub agent_cycles: crate::subconscious::subconscious::AgentCycleState, pub agent_cycles: crate::subconscious::subconscious::AgentCycleState,
/// Shared active tools — Agent writes, TUI reads. /// Shared active tools — Agent writes, TUI reads.
pub active_tools: crate::user::ui_channel::SharedActiveTools, pub active_tools: crate::agent::tools::SharedActiveTools,
} }
fn render_journal(entries: &[context::JournalEntry]) -> String { fn render_journal(entries: &[context::JournalEntry]) -> String {
@ -199,7 +200,7 @@ impl Agent {
prompt_file: String, prompt_file: String,
conversation_log: Option<ConversationLog>, conversation_log: Option<ConversationLog>,
shared_context: SharedContextState, shared_context: SharedContextState,
active_tools: crate::user::ui_channel::SharedActiveTools, active_tools: tools::SharedActiveTools,
) -> Self { ) -> Self {
let tokenizer = tiktoken_rs::cl100k_base() let tokenizer = tiktoken_rs::cl100k_base()
.expect("failed to load cl100k_base tokenizer"); .expect("failed to load cl100k_base tokenizer");
@ -325,7 +326,6 @@ impl Agent {
pub async fn turn( pub async fn turn(
agent: Arc<tokio::sync::Mutex<Agent>>, agent: Arc<tokio::sync::Mutex<Agent>>,
user_input: &str, user_input: &str,
ui_tx: &UiSender,
target: StreamTarget, target: StreamTarget,
) -> Result<TurnResult> { ) -> Result<TurnResult> {
// --- Pre-loop setup (lock 1): agent cycle, memories, user input --- // --- Pre-loop setup (lock 1): agent cycle, memories, user input ---
@ -382,7 +382,7 @@ impl Agent {
let mut me = agent.lock().await; let mut me = agent.lock().await;
let mut bg_ds = DispatchState::new(); let mut bg_ds = DispatchState::new();
for (call, output) in bg_results { for (call, output) in bg_results {
me.apply_tool_result(&call, output, ui_tx, &mut bg_ds); me.apply_tool_result(&call, output, &mut bg_ds);
} }
me.push_message(Message::user(user_input)); me.push_message(Message::user(user_input));
} }
@ -408,7 +408,6 @@ impl Agent {
me.client.start_stream( me.client.start_stream(
&api_messages, &api_messages,
&me.tools, &me.tools,
ui_tx,
&me.reasoning_effort, &me.reasoning_effort,
sampling, sampling,
None, None,
@ -522,7 +521,7 @@ impl Agent {
// Reacquire to apply results // Reacquire to apply results
let mut me = agent.lock().await; let mut me = agent.lock().await;
for (call, output) in results { for (call, output) in results {
me.apply_tool_result(&call, output, ui_tx, &mut ds); me.apply_tool_result(&call, output, &mut ds);
} }
me.publish_context_state(); me.publish_context_state();
continue; continue;
@ -536,7 +535,7 @@ impl Agent {
// Drop lock before tool dispatch // Drop lock before tool dispatch
for call in &calls { for call in &calls {
Agent::dispatch_tool_call_unlocked( Agent::dispatch_tool_call_unlocked(
&agent, &active_tools, call, ui_tx, &mut ds, &agent, &active_tools, call, &mut ds,
).await; ).await;
} }
continue; continue;
@ -568,9 +567,8 @@ impl Agent {
/// Used by `turn()` which manages its own locking. /// Used by `turn()` which manages its own locking.
async fn dispatch_tool_call_unlocked( async fn dispatch_tool_call_unlocked(
agent: &Arc<tokio::sync::Mutex<Agent>>, agent: &Arc<tokio::sync::Mutex<Agent>>,
active_tools: &crate::user::ui_channel::SharedActiveTools, active_tools: &crate::agent::tools::SharedActiveTools,
call: &ToolCall, call: &ToolCall,
ui_tx: &UiSender,
ds: &mut DispatchState, ds: &mut DispatchState,
) { ) {
let args: serde_json::Value = match serde_json::from_str(&call.function.arguments) { let args: serde_json::Value = match serde_json::from_str(&call.function.arguments) {
@ -579,7 +577,7 @@ impl Agent {
let err = format!("Error: malformed tool call arguments: {e}"); let err = format!("Error: malformed tool call arguments: {e}");
let _act = start_activity(agent, format!("rejected: {} (bad args)", call.function.name)).await; let _act = start_activity(agent, format!("rejected: {} (bad args)", call.function.name)).await;
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.apply_tool_result(call, err, ui_tx, ds); me.apply_tool_result(call, err, ds);
return; return;
} }
}; };
@ -613,7 +611,7 @@ impl Agent {
if let Ok((call, output)) = entry.handle.await { if let Ok((call, output)) = entry.handle.await {
// Brief lock to apply result // Brief lock to apply result
let mut me = agent.lock().await; let mut me = agent.lock().await;
me.apply_tool_result(&call, output, ui_tx, ds); me.apply_tool_result(&call, output, ds);
} }
} }
@ -622,7 +620,6 @@ impl Agent {
&mut self, &mut self,
call: &ToolCall, call: &ToolCall,
output: String, output: String,
ui_tx: &UiSender,
ds: &mut DispatchState, ds: &mut DispatchState,
) { ) {
let args: serde_json::Value = let args: serde_json::Value =

View file

@ -22,6 +22,7 @@ pub mod working_stack;
use std::future::Future; use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use std::sync::Arc;
use std::time::Instant; use std::time::Instant;
fn default_timeout() -> u64 { 120 } fn default_timeout() -> u64 { 120 }
@ -70,6 +71,13 @@ pub struct ActiveToolCall {
pub handle: tokio::task::JoinHandle<(ToolCall, String)>, pub handle: tokio::task::JoinHandle<(ToolCall, String)>,
} }
/// Shared active tool calls — agent spawns, TUI reads metadata / aborts.
pub type SharedActiveTools = Arc<std::sync::Mutex<Vec<ActiveToolCall>>>;
pub fn shared_active_tools() -> SharedActiveTools {
Arc::new(std::sync::Mutex::new(Vec::new()))
}
/// Truncate output if it exceeds max length, appending a truncation notice. /// Truncate output if it exceeds max length, appending a truncation notice.
pub fn truncate_output(mut s: String, max: usize) -> String { pub fn truncate_output(mut s: String, max: usize) -> String {
if s.len() > max { if s.len() > max {

View file

@ -25,7 +25,14 @@ use crate::agent::{Agent, TurnResult};
use crate::agent::api::ApiClient; use crate::agent::api::ApiClient;
use crate::config::{AppConfig, SessionConfig}; use crate::config::{AppConfig, SessionConfig};
use crate::subconscious::learn; use crate::subconscious::learn;
use crate::user::ui_channel::{self, StreamTarget}; /// Which pane streaming text should go to.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StreamTarget {
/// User-initiated turn — text goes to conversation pane.
Conversation,
/// DMN-initiated turn — text goes to autonomous pane.
Autonomous,
}
/// Compaction threshold — context is rebuilt when prompt tokens exceed this. /// Compaction threshold — context is rebuilt when prompt tokens exceed this.
fn compaction_threshold(app: &AppConfig) -> u32 { fn compaction_threshold(app: &AppConfig) -> u32 {
@ -192,7 +199,6 @@ pub struct Mind {
pub agent: Arc<tokio::sync::Mutex<Agent>>, pub agent: Arc<tokio::sync::Mutex<Agent>>,
pub shared: Arc<SharedMindState>, pub shared: Arc<SharedMindState>,
pub config: SessionConfig, pub config: SessionConfig,
ui_tx: ui_channel::UiSender,
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>, turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
turn_watch: tokio::sync::watch::Sender<bool>, turn_watch: tokio::sync::watch::Sender<bool>,
bg_tx: mpsc::UnboundedSender<BgEvent>, bg_tx: mpsc::UnboundedSender<BgEvent>,
@ -203,11 +209,10 @@ pub struct Mind {
impl Mind { impl Mind {
pub fn new( pub fn new(
config: SessionConfig, config: SessionConfig,
ui_tx: ui_channel::UiSender,
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>, turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
) -> Self { ) -> Self {
let shared_context = ui_channel::shared_context_state(); let shared_context = crate::agent::context::shared_context_state();
let shared_active_tools = ui_channel::shared_active_tools(); let shared_active_tools = crate::agent::tools::shared_active_tools();
let client = ApiClient::new(&config.api_base, &config.api_key, &config.model); let client = ApiClient::new(&config.api_base, &config.api_key, &config.model);
let conversation_log = log::ConversationLog::new( let conversation_log = log::ConversationLog::new(
@ -233,7 +238,7 @@ impl Mind {
sup.load_config(); sup.load_config();
sup.ensure_running(); sup.ensure_running();
Self { agent, shared, config, ui_tx, turn_tx, turn_watch, bg_tx, Self { agent, shared, config, turn_tx, turn_watch, bg_tx,
bg_rx: std::sync::Mutex::new(Some(bg_rx)), _supervisor: sup } bg_rx: std::sync::Mutex::new(Some(bg_rx)), _supervisor: sup }
} }
@ -303,10 +308,9 @@ impl Mind {
self.shared.lock().unwrap().turn_active = true; self.shared.lock().unwrap().turn_active = true;
let _ = self.turn_watch.send(true); let _ = self.turn_watch.send(true);
let agent = self.agent.clone(); let agent = self.agent.clone();
let ui_tx = self.ui_tx.clone();
let result_tx = self.turn_tx.clone(); let result_tx = self.turn_tx.clone();
self.shared.lock().unwrap().turn_handle = Some(tokio::spawn(async move { self.shared.lock().unwrap().turn_handle = Some(tokio::spawn(async move {
let result = Agent::turn(agent, &input, &ui_tx, target).await; let result = Agent::turn(agent, &input, target).await;
let _ = result_tx.send((result, target)).await; let _ = result_tx.send((result, target)).await;
})); }));
} }
@ -317,7 +321,6 @@ impl Mind {
pub fn start_memory_scoring(&self) { pub fn start_memory_scoring(&self) {
let agent = self.agent.clone(); let agent = self.agent.clone();
let bg_tx = self.bg_tx.clone(); let bg_tx = self.bg_tx.clone();
let ui_tx = self.ui_tx.clone();
let cfg = crate::config::get(); let cfg = crate::config::get();
let max_age = cfg.scoring_interval_secs; let max_age = cfg.scoring_interval_secs;
let response_window = cfg.scoring_response_window; let response_window = cfg.scoring_response_window;
@ -329,7 +332,7 @@ impl Mind {
(ag.context.clone(), ag.client_clone()) (ag.context.clone(), ag.client_clone())
}; };
let result = learn::score_memories_incremental( let result = learn::score_memories_incremental(
&context, max_age as i64, response_window, &client, &ui_tx, &agent, &context, max_age as i64, response_window, &client, &agent,
).await; ).await;
{ {
let mut ag = agent.lock().await; let mut ag = agent.lock().await;

View file

@ -42,9 +42,6 @@ pub async fn call_api_with_tools(
) -> Result<String, String> { ) -> Result<String, String> {
let client = get_client()?; let client = get_client()?;
// Set up a UI channel — we drain reasoning tokens into the log
let (ui_tx, mut ui_rx) = crate::user::ui_channel::channel();
// Tools are already filtered by the caller // Tools are already filtered by the caller
// Provenance tracks which agent:phase is making writes. // Provenance tracks which agent:phase is making writes.
// Updated between steps by the bail function via set_provenance(). // Updated between steps by the bail function via set_provenance().
@ -75,7 +72,6 @@ pub async fn call_api_with_tools(
match client.chat_completion_stream_temp( match client.chat_completion_stream_temp(
&messages, &messages,
tools, tools,
&ui_tx,
&reasoning, &reasoning,
sampling, sampling,
Some(priority), Some(priority),

View file

@ -17,7 +17,6 @@
use crate::agent::api::ApiClient; use crate::agent::api::ApiClient;
use crate::agent::api::types::*; use crate::agent::api::types::*;
use crate::agent::context::{ConversationEntry, ContextState}; use crate::agent::context::{ConversationEntry, ContextState};
use crate::user::ui_channel::UiSender;
const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(120); const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(120);
@ -181,7 +180,6 @@ impl MemoryScore {
pub async fn score_memories( pub async fn score_memories(
context: &ContextState, context: &ContextState,
client: &ApiClient, client: &ApiClient,
ui_tx: &UiSender,
) -> anyhow::Result<MemoryScore> { ) -> anyhow::Result<MemoryScore> {
let mut memory_keys: Vec<String> = context.entries.iter() let mut memory_keys: Vec<String> = context.entries.iter()
.filter_map(|e| match e { .filter_map(|e| match e {
@ -272,7 +270,6 @@ pub async fn score_memory(
context: &ContextState, context: &ContextState,
key: &str, key: &str,
client: &ApiClient, client: &ApiClient,
ui_tx: &UiSender,
) -> anyhow::Result<f64> { ) -> anyhow::Result<f64> {
const RESPONSE_WINDOW: usize = 50; const RESPONSE_WINDOW: usize = 50;
@ -309,7 +306,6 @@ pub async fn score_memories_incremental(
max_age_secs: i64, max_age_secs: i64,
response_window: usize, response_window: usize,
client: &ApiClient, client: &ApiClient,
ui_tx: &UiSender,
agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>, agent: &std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
) -> anyhow::Result<Vec<(String, f64)>> { ) -> anyhow::Result<Vec<(String, f64)>> {
let now = chrono::Utc::now().timestamp(); let now = chrono::Utc::now().timestamp();
@ -387,7 +383,6 @@ pub async fn score_finetune(
context: &ContextState, context: &ContextState,
count: usize, count: usize,
client: &ApiClient, client: &ApiClient,
ui_tx: &UiSender,
) -> anyhow::Result<Vec<(usize, f64)>> { ) -> anyhow::Result<Vec<(usize, f64)>> {
let range = context.entries.len().saturating_sub(count)..context.entries.len(); let range = context.entries.len().saturating_sub(count)..context.entries.len();

View file

@ -16,7 +16,7 @@ use super::{
App, HotkeyAction, ScreenAction, ScreenView, App, HotkeyAction, ScreenAction, ScreenView,
screen_legend, screen_legend,
}; };
use crate::user::ui_channel::StreamTarget; use crate::mind::StreamTarget;
use crate::mind::MindCommand; use crate::mind::MindCommand;
// --- Slash command table --- // --- Slash command table ---

View file

@ -13,6 +13,7 @@ use ratatui::{
}; };
use super::{App, ScreenAction, ScreenView, screen_legend}; use super::{App, ScreenAction, ScreenView, screen_legend};
use crate::agent::context::ContextSection;
pub(crate) struct ConsciousScreen { pub(crate) struct ConsciousScreen {
scroll: u16, scroll: u16,
@ -25,12 +26,12 @@ impl ConsciousScreen {
Self { scroll: 0, selected: None, expanded: std::collections::HashSet::new() } Self { scroll: 0, selected: None, expanded: std::collections::HashSet::new() }
} }
fn read_context_state(&self, app: &App) -> Vec<crate::user::ui_channel::ContextSection> { fn read_context_state(&self, app: &App) -> Vec<ContextSection> {
app.shared_context.read().map_or_else(|_| Vec::new(), |s| s.clone()) app.shared_context.read().map_or_else(|_| Vec::new(), |s| s.clone())
} }
fn item_count(&self, context_state: &[crate::user::ui_channel::ContextSection]) -> usize { fn item_count(&self, context_state: &[ContextSection]) -> usize {
fn count_section(section: &crate::user::ui_channel::ContextSection, expanded: &std::collections::HashSet<usize>, idx: &mut usize) -> usize { fn count_section(section: &ContextSection, expanded: &std::collections::HashSet<usize>, idx: &mut usize) -> usize {
let my_idx = *idx; let my_idx = *idx;
*idx += 1; *idx += 1;
let mut total = 1; let mut total = 1;
@ -63,7 +64,7 @@ impl ConsciousScreen {
fn render_section( fn render_section(
&self, &self,
section: &crate::user::ui_channel::ContextSection, section: &ContextSection,
depth: usize, depth: usize,
lines: &mut Vec<Line>, lines: &mut Vec<Line>,
idx: &mut usize, idx: &mut usize,

View file

@ -8,7 +8,6 @@ pub mod context;
pub mod subconscious; pub mod subconscious;
pub mod unconscious; pub mod unconscious;
pub mod thalamus; pub mod thalamus;
pub mod ui_channel;
use anyhow::Result; use anyhow::Result;
use ratatui::crossterm::event::{Event, EventStream, KeyEventKind}; use ratatui::crossterm::event::{Event, EventStream, KeyEventKind};
@ -30,7 +29,33 @@ use ratatui::{
}; };
use std::io; use std::io;
use crate::user::ui_channel::{ContextInfo, SharedContextState, StatusInfo}; use crate::agent::context::SharedContextState;
/// Status info for the bottom status bar.
#[derive(Debug, Clone)]
pub struct StatusInfo {
pub dmn_state: String,
pub dmn_turns: u32,
pub dmn_max_turns: u32,
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub model: String,
pub turn_tools: u32,
pub context_budget: String,
}
/// Context loading details for the debug screen.
#[derive(Debug, Clone)]
pub struct ContextInfo {
pub model: String,
pub available_models: Vec<String>,
pub prompt_file: String,
pub backend: String,
pub instruction_files: Vec<(String, usize)>,
pub memory_files: Vec<(String, usize)>,
pub system_prompt_chars: usize,
pub context_message_chars: usize,
}
/// Build the screen legend from screen labels. /// Build the screen legend from screen labels.
pub(crate) fn screen_legend_from(interact: &dyn ScreenView, screens: &[Box<dyn ScreenView>]) -> String { pub(crate) fn screen_legend_from(interact: &dyn ScreenView, screens: &[Box<dyn ScreenView>]) -> String {
@ -100,7 +125,7 @@ pub struct App {
pub temperature: f32, pub temperature: f32,
pub top_p: f32, pub top_p: f32,
pub top_k: u32, pub top_k: u32,
pub(crate) active_tools: crate::user::ui_channel::SharedActiveTools, pub(crate) active_tools: crate::agent::tools::SharedActiveTools,
pub should_quit: bool, pub should_quit: bool,
pub submitted: Vec<String>, pub submitted: Vec<String>,
pub hotkey_actions: Vec<HotkeyAction>, pub hotkey_actions: Vec<HotkeyAction>,
@ -112,7 +137,7 @@ pub struct App {
} }
impl App { impl App {
pub fn new(model: String, shared_context: SharedContextState, active_tools: crate::user::ui_channel::SharedActiveTools) -> Self { pub fn new(model: String, shared_context: SharedContextState, active_tools: crate::agent::tools::SharedActiveTools) -> Self {
Self { Self {
status: StatusInfo { status: StatusInfo {
dmn_state: "resting".into(), dmn_turns: 0, dmn_max_turns: 20, dmn_state: "resting".into(), dmn_turns: 0, dmn_max_turns: 20,
@ -189,11 +214,10 @@ pub async fn start(cli: crate::user::CliArgs) -> Result<()> {
unsafe { std::env::set_var("POC_DEBUG", "1") }; unsafe { std::env::set_var("POC_DEBUG", "1") };
} }
let (ui_tx, ui_rx) = ui_channel::channel();
let (turn_tx, turn_rx) = tokio::sync::mpsc::channel(1); let (turn_tx, turn_rx) = tokio::sync::mpsc::channel(1);
let (mind_tx, mind_rx) = tokio::sync::mpsc::unbounded_channel(); let (mind_tx, mind_rx) = tokio::sync::mpsc::unbounded_channel();
let mind = crate::mind::Mind::new(config, ui_tx.clone(), turn_tx); let mind = crate::mind::Mind::new(config, turn_tx);
let shared_context = mind.agent.lock().await.shared_context.clone(); let shared_context = mind.agent.lock().await.shared_context.clone();
let shared_active_tools = mind.agent.lock().await.active_tools.clone(); let shared_active_tools = mind.agent.lock().await.active_tools.clone();

View file

@ -1,107 +0,0 @@
// ui_channel.rs — Output routing for TUI panes
//
// All output from the agent (streaming text, tool calls, status updates)
// goes through a UiMessage enum sent over an mpsc channel. The TUI
// receives these messages and routes them to the appropriate pane.
//
// This replaces direct stdout/stderr printing throughout the codebase.
// The agent and API client never touch the terminal directly — they
// just send messages that the TUI renders where appropriate.
//
// The channel also fans out to a broadcast channel so the observation
// socket (observe.rs) can subscribe without touching the main path.
use std::sync::Arc;
use tokio::sync::{broadcast, mpsc};
// Re-export context types that moved to agent::context
pub use crate::agent::context::{ContextSection, SharedContextState, shared_context_state};
// ActiveToolCall lives in agent::tools — re-export for TUI access
pub use crate::agent::tools::ActiveToolCall;
/// Shared active tool calls — agent spawns, TUI reads metadata / aborts.
pub type SharedActiveTools = Arc<std::sync::Mutex<Vec<ActiveToolCall>>>;
pub fn shared_active_tools() -> SharedActiveTools {
Arc::new(std::sync::Mutex::new(Vec::new()))
}
/// Which pane streaming text should go to.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StreamTarget {
/// User-initiated turn — text goes to conversation pane.
Conversation,
/// DMN-initiated turn — text goes to autonomous pane.
Autonomous,
}
/// Status info for the bottom status bar.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub struct StatusInfo {
pub dmn_state: String,
pub dmn_turns: u32,
pub dmn_max_turns: u32,
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub model: String,
/// Number of tool calls dispatched in the current turn.
pub turn_tools: u32,
/// Context window budget breakdown (e.g. "id:8% mem:25% jnl:30% conv:37%").
pub context_budget: String,
}
/// Context loading details for the debug screen.
#[derive(Debug, Clone)]
pub struct ContextInfo {
pub model: String,
pub available_models: Vec<String>,
pub prompt_file: String,
pub backend: String,
#[allow(dead_code)]
pub instruction_files: Vec<(String, usize)>,
#[allow(dead_code)]
pub memory_files: Vec<(String, usize)>,
pub system_prompt_chars: usize,
pub context_message_chars: usize,
}
/// Messages sent from agent/API to the TUI for rendering.
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum UiMessage {
/// Informational message — goes to conversation pane (command output, etc).
Info(String),
}
/// Sender that fans out to both the TUI (mpsc) and observers (broadcast).
#[derive(Clone)]
pub struct UiSender {
tui: mpsc::UnboundedSender<UiMessage>,
observe: broadcast::Sender<UiMessage>,
}
impl UiSender {
pub fn send(&self, msg: UiMessage) -> Result<(), mpsc::error::SendError<UiMessage>> {
// Broadcast to observers (ignore errors — no subscribers is fine)
let _ = self.observe.send(msg.clone());
self.tui.send(msg)
}
/// Subscribe to the broadcast side (for the observation socket).
pub fn subscribe(&self) -> broadcast::Receiver<UiMessage> {
self.observe.subscribe()
}
}
/// Convenience type for the receiving half.
pub type UiReceiver = mpsc::UnboundedReceiver<UiMessage>;
/// Create a new UI channel pair.
pub fn channel() -> (UiSender, UiReceiver) {
let (tui_tx, tui_rx) = mpsc::unbounded_channel();
let (observe_tx, _) = broadcast::channel(1024);
(UiSender { tui: tui_tx, observe: observe_tx }, tui_rx)
}