split out src/mind

This commit is contained in:
Kent Overstreet 2026-04-04 02:46:32 -04:00
parent ce04568454
commit 79e384f005
21 changed files with 1865 additions and 2175 deletions

View file

@ -473,9 +473,9 @@ pub fn build_response_message(
}
// Check for leaked tool calls in content text.
let leaked = crate::user::parsing::parse_leaked_tool_calls(&content);
let leaked = crate::agent::parsing::parse_leaked_tool_calls(&content);
if !leaked.is_empty() {
let cleaned = crate::user::parsing::strip_leaked_artifacts(&content);
let cleaned = crate::agent::parsing::strip_leaked_artifacts(&content);
return Message {
role: Role::Assistant,
content: if cleaned.trim().is_empty() { None }

View file

@ -15,6 +15,7 @@
pub mod api;
pub mod context;
pub mod parsing;
pub mod tools;
pub mod training;
@ -79,13 +80,10 @@ pub struct Agent {
app_config: crate::config::AppConfig,
pub prompt_file: String,
/// Stable session ID for memory-search dedup across turns.
session_id: String,
pub session_id: String,
/// Agent orchestration state (surface-observe, journal, reflect).
/// TODO: move to Session — it's session-level, not agent-level.
pub agent_cycles: crate::subconscious::subconscious::AgentCycleState,
/// Latest memory importance scores from training scorer.
pub memory_scores: Option<crate::agent::training::MemoryScore>,
/// Whether a /score task is currently running.
pub scoring_in_flight: bool,
/// Shared active tools — Agent writes, TUI reads.
pub active_tools: crate::user::ui_channel::SharedActiveTools,
}
@ -137,8 +135,6 @@ impl Agent {
prompt_file,
session_id,
agent_cycles,
memory_scores: None,
scoring_in_flight: false,
active_tools,
};
@ -323,7 +319,7 @@ impl Agent {
// Check for closing tag — parse and fire immediately
if let Some(end) = tool_call_buf.find("</tool_call>") {
let body = &tool_call_buf[..end];
if let Some(call) = crate::user::parsing::parse_tool_call_body(body) {
if let Some(call) = crate::agent::parsing::parse_tool_call_body(body) {
let args: serde_json::Value =
serde_json::from_str(&call.function.arguments).unwrap_or_default();
let args_summary = summarize_args(&call.function.name, &args);
@ -666,7 +662,7 @@ impl Agent {
}
/// Build context state summary for the debug screen.
pub fn context_state_summary(&self) -> Vec<ContextSection> {
pub fn context_state_summary(&self, memory_scores: Option<&crate::agent::training::MemoryScore>) -> Vec<ContextSection> {
let count = |s: &str| self.tokenizer.encode_with_special_tokens(s).len();
let mut sections = Vec::new();
@ -758,7 +754,7 @@ impl Agent {
_ => unreachable!(),
};
let text = entry.message().content_text();
let score = self.memory_scores.as_ref()
let score = memory_scores
.and_then(|s| s.memory_weights.iter()
.find(|(k, _)| k == key)
.map(|(_, v)| *v));
@ -823,7 +819,7 @@ impl Agent {
};
// Show which memories were important for this response
let children = if m.role == Role::Assistant {
self.memory_scores.as_ref()
memory_scores
.map(|s| s.important_memories_for_entry(i))
.unwrap_or_default()
.into_iter()
@ -965,7 +961,11 @@ impl Agent {
/// Push the current context summary to the shared state for the TUI to read.
pub fn publish_context_state(&self) {
let summary = self.context_state_summary();
self.publish_context_state_with_scores(None);
}
pub fn publish_context_state_with_scores(&self, memory_scores: Option<&crate::agent::training::MemoryScore>) {
let summary = self.context_state_summary(memory_scores);
if let Ok(mut dbg) = std::fs::OpenOptions::new().create(true).append(true)
.open("/tmp/poc-journal-debug.log") {
use std::io::Write;

View file

@ -288,6 +288,79 @@ pub async fn score_memory(
Ok(divs.iter().sum())
}
// ── Background memory scoring ───────────────────────────────────
/// Incrementally score memories through the conversation.
///
/// Walks memory entries in conversation order starting from `cursor`.
/// For each memory with a full WINDOW after it, calls score_memory()
/// and yields the result. Stops at the first memory that doesn't have
/// enough messages yet — the conversation needs to grow before we can
/// score it.
///
/// Returns the updated cursor (entry index to resume from next time)
/// and the scores for each memory that was scored this round.
pub async fn score_memories_incremental(
context: &ContextState,
cursor: usize,
client: &ApiClient,
ui_tx: &UiSender,
) -> anyhow::Result<(usize, Vec<(String, f64)>)> {
const WINDOW: usize = 50;
// Collect unique memory keys with their first position, starting from cursor
let mut seen = std::collections::HashSet::new();
let mut to_score: Vec<(usize, String)> = Vec::new();
for (i, entry) in context.entries.iter().enumerate().skip(cursor) {
if let ConversationEntry::Memory { key, .. } = entry {
if seen.insert(key.clone()) {
to_score.push((i, key.clone()));
}
}
}
let http = http_client();
let mut new_cursor = cursor;
let mut results = Vec::new();
for (pos, key) in &to_score {
let end = pos + WINDOW;
// Not enough conversation after this memory yet — stop here
if end > context.entries.len() {
break;
}
// Need at least one assistant response in the window
let range = *pos..end;
if !context.entries[range.clone()].iter().any(|e| e.message().role == Role::Assistant) {
new_cursor = end;
continue;
}
let _ = ui_tx.send(UiMessage::Activity(format!("scoring memory: {}...", key)));
match score_divergence(&http, client, context, range, Filter::SkipKey(key)).await {
Ok((divs, _)) => {
let importance: f64 = divs.iter().sum();
let _ = ui_tx.send(UiMessage::Debug(format!(
"[scoring] {} → {:.2}", key, importance,
)));
results.push((key.clone(), importance));
}
Err(e) => {
let _ = ui_tx.send(UiMessage::Debug(format!(
"[scoring] {} FAILED: {:#}", key, e,
)));
}
}
new_cursor = end;
}
let _ = ui_tx.send(UiMessage::Activity(String::new()));
Ok((new_cursor, results))
}
// ── Fine-tuning scoring ─────────────────────────────────────────
/// Score which recent responses are candidates for fine-tuning.

File diff suppressed because it is too large Load diff

View file

@ -460,7 +460,7 @@ pub struct ResolvedModel {
impl AppConfig {
/// Resolve the active backend and assemble prompts into a SessionConfig.
pub fn resolve(&self, cli: &crate::user::cli::CliArgs) -> Result<SessionConfig> {
pub fn resolve(&self, cli: &crate::user::CliArgs) -> Result<SessionConfig> {
let cwd = std::env::current_dir().context("Failed to get current directory")?;
let (api_base, api_key, model, prompt_file);
@ -494,8 +494,8 @@ impl AppConfig {
.with_context(|| format!("Failed to read {}", path.display()))?;
(content, Vec::new(), 0, 0)
} else {
let system_prompt = crate::user::identity::assemble_system_prompt();
let (context_parts, cc, mc) = crate::user::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
let system_prompt = crate::mind::identity::assemble_system_prompt();
let (context_parts, cc, mc) = crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
(system_prompt, context_parts, cc, mc)
};
@ -603,7 +603,7 @@ macro_rules! merge_opt {
};
}
fn build_figment(cli: &crate::user::cli::CliArgs) -> Figment {
fn build_figment(cli: &crate::user::CliArgs) -> Figment {
let mut f = Figment::from(Serialized::defaults(AppConfig::default()))
.merge(Json5File(config_path()));
@ -622,14 +622,14 @@ fn build_figment(cli: &crate::user::cli::CliArgs) -> Figment {
}
/// Load just the AppConfig — no validation, no prompt assembly.
pub fn load_app(cli: &crate::user::cli::CliArgs) -> Result<(AppConfig, Figment)> {
pub fn load_app(cli: &crate::user::CliArgs) -> Result<(AppConfig, Figment)> {
let figment = build_figment(cli);
let app: AppConfig = figment.extract().context("Failed to load configuration")?;
Ok((app, figment))
}
/// Load the full config: figment → AppConfig → resolve backend → assemble prompts.
pub fn load_session(cli: &crate::user::cli::CliArgs) -> Result<(SessionConfig, Figment)> {
pub fn load_session(cli: &crate::user::CliArgs) -> Result<(SessionConfig, Figment)> {
let (app, figment) = load_app(cli)?;
let config = app.resolve(cli)?;
Ok((config, figment))
@ -645,9 +645,9 @@ pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, V
return Ok((content, Vec::new()));
}
let system_prompt = crate::user::identity::assemble_system_prompt();
let system_prompt = crate::mind::identity::assemble_system_prompt();
let context_groups = get().context_groups.clone();
let (context_parts, _, _) = crate::user::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
Ok((system_prompt, context_parts))
}

View file

@ -23,9 +23,12 @@ macro_rules! dbglog {
}};
}
// Agent infrastructure
// User interface (TUI, CLI)
pub mod user;
// Cognitive layer (session state machine, DMN, identity)
pub mod mind;
// Shared cognitive infrastructure — used by both agent and subconscious
pub mod agent;

935
src/mind/mod.rs Normal file
View file

@ -0,0 +1,935 @@
// mind/ — Cognitive layer
//
// Session state machine, DMN, identity, observation socket.
// Everything about how the mind operates, separate from the
// user interface (TUI, CLI) and the agent execution (tools, API).
pub mod dmn;
pub mod identity;
pub mod observe;
// consciousness.rs — Session state machine and event loop
//
// The core runtime for the consciousness binary. Session manages turns,
// DMN state, compaction, scoring, and slash commands. The event loop
// bridges Session (cognitive state) with App (TUI rendering).
//
// The event loop uses biased select! so priorities are deterministic:
// keyboard events > turn results > render ticks > DMN timer > UI messages.
use anyhow::Result;
use crossterm::event::{Event, EventStream, KeyEventKind};
use futures::StreamExt;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::{mpsc, Mutex};
use crate::agent::{Agent, TurnResult};
use crate::agent::api::ApiClient;
use crate::agent::api::types as api_types;
use crate::config::{self, AppConfig, SessionConfig};
use crate::dbglog;
use crate::user::{self as tui, HotkeyAction};
use crate::user::ui_channel::{self, ContextInfo, StatusInfo, StreamTarget, UiMessage};
use crate::user::log;
/// Compaction threshold — context is rebuilt when prompt tokens exceed this.
fn compaction_threshold(app: &AppConfig) -> u32 {
(crate::agent::context::context_window() as u32) * app.compaction.hard_threshold_pct / 100
}
/// Commands that are handled in the main loop, not sent to the agent.
enum Command {
Quit,
Handled,
None,
}
// --- Session: all mutable state for a running agent session ---
/// Collects the ~15 loose variables that previously lived in run()
/// into a coherent struct with methods. The event loop dispatches
/// to Session methods; Session manages turns, compaction, DMN state,
/// and slash commands.
pub struct Session {
agent: Arc<Mutex<Agent>>,
config: SessionConfig,
ui_tx: ui_channel::UiSender,
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
// DMN state
dmn: dmn::State,
dmn_turns: u32,
max_dmn_turns: u32,
// Turn tracking
turn_in_progress: bool,
turn_handle: Option<tokio::task::JoinHandle<()>>,
/// User messages received while a turn is in progress.
/// Consolidated into one message (newline-separated) so the
/// model sees everything the user typed, not just the first line.
pending_input: Option<String>,
// Per-turn tracking for DMN context
last_user_input: Instant,
consecutive_errors: u32,
last_turn_had_tools: bool,
// Subconscious orchestration
agent_cycles: crate::subconscious::subconscious::AgentCycleState,
/// Latest memory importance scores from full matrix scoring (manual /score).
memory_scores: Option<crate::agent::training::MemoryScore>,
/// Whether a full matrix /score task is currently running.
scoring_in_flight: bool,
}
impl Session {
fn new(
agent: Arc<Mutex<Agent>>,
config: SessionConfig,
ui_tx: ui_channel::UiSender,
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
) -> Self {
let max_dmn_turns = config.app.dmn.max_turns;
Self {
agent,
config,
ui_tx,
turn_tx,
dmn: if dmn::is_off() {
dmn::State::Off
} else {
dmn::State::Resting { since: Instant::now() }
},
dmn_turns: 0,
max_dmn_turns,
turn_in_progress: false,
turn_handle: None,
pending_input: None,
last_user_input: Instant::now(),
consecutive_errors: 0,
last_turn_had_tools: false,
agent_cycles: crate::subconscious::subconscious::AgentCycleState::new(""),
memory_scores: None,
scoring_in_flight: false,
}
}
/// How long before the next DMN tick.
fn dmn_interval(&self) -> Duration {
self.dmn.interval()
}
/// Spawn an agent turn in a background task.
fn spawn_turn(&mut self, input: String, target: StreamTarget) {
let agent = self.agent.clone();
let ui_tx = self.ui_tx.clone();
let result_tx = self.turn_tx.clone();
self.turn_in_progress = true;
self.turn_handle = Some(tokio::spawn(async move {
let mut agent = agent.lock().await;
let result = agent.turn(&input, &ui_tx, target).await;
let _ = result_tx.send((result, target)).await;
}));
}
/// Submit user input — either queue it (if a turn is running) or
/// start a new turn immediately.
fn submit_input(&mut self, input: String) {
if self.turn_in_progress {
match &mut self.pending_input {
Some(existing) => {
existing.push('\n');
existing.push_str(&input);
}
None => self.pending_input = Some(input.clone()),
}
let _ = self.ui_tx.send(UiMessage::Info("(queued)".into()));
} else {
self.dmn_turns = 0;
self.consecutive_errors = 0;
self.last_user_input = Instant::now();
self.dmn = dmn::State::Engaged;
let _ = self.ui_tx.send(UiMessage::UserInput(input.clone()));
self.update_status();
self.spawn_turn(input, StreamTarget::Conversation);
}
}
/// Process a completed turn: update DMN state, check compaction,
/// drain any queued input.
async fn handle_turn_result(
&mut self,
result: Result<TurnResult>,
target: StreamTarget,
) {
self.turn_in_progress = false;
self.turn_handle = None;
match result {
Ok(turn_result) => {
if turn_result.tool_errors > 0 {
self.consecutive_errors += turn_result.tool_errors;
} else {
self.consecutive_errors = 0;
}
self.last_turn_had_tools = turn_result.had_tool_calls;
self.dmn = dmn::transition(
&self.dmn,
turn_result.yield_requested,
turn_result.had_tool_calls,
target == StreamTarget::Conversation,
);
if turn_result.dmn_pause {
self.dmn = dmn::State::Paused;
self.dmn_turns = 0;
let _ = self.ui_tx.send(UiMessage::Info(
"DMN paused (agent requested). Ctrl+P or /wake to resume.".into(),
));
}
if let Some(model_name) = turn_result.model_switch {
self.switch_model(&model_name).await;
}
}
Err(e) => {
self.consecutive_errors += 1;
let msg = match target {
StreamTarget::Autonomous => {
UiMessage::DmnAnnotation(format!("[error: {:#}]", e))
}
StreamTarget::Conversation => {
UiMessage::Info(format!("Error: {:#}", e))
}
};
let _ = self.ui_tx.send(msg);
self.dmn = dmn::State::Resting {
since: Instant::now(),
};
}
}
self.update_status();
self.check_compaction().await;
self.maybe_start_memory_scoring().await;
self.drain_pending();
}
/// Spawn incremental memory scoring if not already running.
async fn maybe_start_memory_scoring(&mut self) {
{
let agent = self.agent.lock().await;
if agent.agent_cycles.memory_scoring_in_flight {
return;
}
}
let (context, client, cursor) = {
let mut agent = self.agent.lock().await;
let cursor = agent.agent_cycles.memory_score_cursor;
agent.agent_cycles.memory_scoring_in_flight = true;
(agent.context.clone(), agent.client_clone(), cursor)
};
let agent = self.agent.clone();
let ui_tx = self.ui_tx.clone();
tokio::spawn(async move {
let result = crate::agent::training::score_memories_incremental(
&context, cursor, &client, &ui_tx,
).await;
let mut agent = agent.lock().await;
agent.agent_cycles.memory_scoring_in_flight = false;
match result {
Ok((new_cursor, scores)) => {
agent.agent_cycles.memory_score_cursor = new_cursor;
agent.agent_cycles.memory_scores.extend(scores);
let _ = ui_tx.send(UiMessage::AgentUpdate(agent.agent_cycles.snapshots()));
}
Err(e) => {
let _ = ui_tx.send(UiMessage::Debug(format!(
"[memory-scoring] failed: {:#}", e,
)));
}
}
});
}
/// Check if compaction is needed after a turn.
async fn check_compaction(&mut self) {
let mut agent_guard = self.agent.lock().await;
let tokens = agent_guard.last_prompt_tokens();
let threshold = compaction_threshold(&self.config.app);
if tokens > threshold {
let _ = self.ui_tx.send(UiMessage::Info(format!(
"[compaction: {}K > {}K threshold]",
tokens / 1000,
threshold / 1000,
)));
agent_guard.compact();
let _ = self.ui_tx.send(UiMessage::Info(
"[compacted — journal + recent messages]".into(),
));
self.send_context_info();
}
}
/// Send any consolidated pending input as a single turn.
fn drain_pending(&mut self) {
if let Some(queued) = self.pending_input.take() {
self.dmn_turns = 0;
self.consecutive_errors = 0;
self.last_user_input = Instant::now();
self.dmn = dmn::State::Engaged;
let _ = self.ui_tx.send(UiMessage::UserInput(queued.clone()));
self.update_status();
self.spawn_turn(queued, StreamTarget::Conversation);
}
}
/// Fire a DMN tick: check max turns, generate prompt, spawn turn.
fn dmn_tick(&mut self) {
if matches!(self.dmn, dmn::State::Paused | dmn::State::Off) {
return;
}
self.dmn_turns += 1;
if self.dmn_turns > self.max_dmn_turns {
let _ = self.ui_tx.send(UiMessage::DmnAnnotation(format!(
"[dmn: {} consecutive turns, resting (limit: {})]",
self.dmn_turns - 1,
self.max_dmn_turns,
)));
self.dmn = dmn::State::Resting {
since: Instant::now(),
};
self.dmn_turns = 0;
self.update_status();
return;
}
let dmn_ctx = dmn::DmnContext {
user_idle: self.last_user_input.elapsed(),
consecutive_errors: self.consecutive_errors,
last_turn_had_tools: self.last_turn_had_tools,
};
let prompt = self.dmn.prompt(&dmn_ctx);
let _ = self.ui_tx.send(UiMessage::DmnAnnotation(format!(
"[dmn: {} ({}/{})]",
self.dmn.label(),
self.dmn_turns,
self.max_dmn_turns,
)));
self.update_status();
self.spawn_turn(prompt, StreamTarget::Autonomous);
}
/// Handle slash commands. Returns how the main loop should respond.
async fn handle_command(&mut self, input: &str) -> Command {
const COMMANDS: &[(&str, &str)] = &[
("/quit", "Exit consciousness"),
("/new", "Start fresh session (saves current)"),
("/save", "Save session to disk"),
("/retry", "Re-run last turn"),
("/model", "Show/switch model (/model <name>)"),
("/score", "Score memory importance"),
("/dmn", "Show DMN state"),
("/sleep", "Put DMN to sleep"),
("/wake", "Wake DMN to foraging"),
("/pause", "Full stop — no autonomous ticks (Ctrl+P)"),
("/test", "Run tool smoke tests"),
("/help", "Show this help"),
];
match input {
"/quit" | "/exit" => Command::Quit,
"/save" => {
let _ = self.ui_tx.send(UiMessage::Info(
"Conversation is saved automatically (append-only log).".into()
));
Command::Handled
}
"/new" | "/clear" => {
if self.turn_in_progress {
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
return Command::Handled;
}
{
let new_log = log::ConversationLog::new(
self.config.session_dir.join("conversation.jsonl"),
).ok();
let mut agent_guard = self.agent.lock().await;
let shared_ctx = agent_guard.shared_context.clone();
let shared_tools = agent_guard.active_tools.clone();
*agent_guard = Agent::new(
ApiClient::new(&self.config.api_base, &self.config.api_key, &self.config.model),
self.config.system_prompt.clone(),
self.config.context_parts.clone(),
self.config.app.clone(),
self.config.prompt_file.clone(),
new_log,
shared_ctx,
shared_tools,
);
}
self.dmn = dmn::State::Resting { since: Instant::now() };
let _ = self.ui_tx.send(UiMessage::Info("New session started.".into()));
Command::Handled
}
"/model" => {
if let Ok(agent) = self.agent.try_lock() {
let _ = self.ui_tx.send(UiMessage::Info(format!("Current model: {}", agent.model())));
let names = self.config.app.model_names();
if !names.is_empty() {
let _ = self.ui_tx.send(UiMessage::Info(format!("Available: {}", names.join(", "))));
}
} else {
let _ = self.ui_tx.send(UiMessage::Info("(busy)".into()));
}
Command::Handled
}
"/score" => {
if self.scoring_in_flight {
let _ = self.ui_tx.send(UiMessage::Info("(scoring already in progress)".into()));
return Command::Handled;
}
let (context, client) = {
let agent = self.agent.lock().await;
(agent.context.clone(), agent.client_clone())
};
self.scoring_in_flight = true;
let agent = self.agent.clone();
let ui_tx = self.ui_tx.clone();
tokio::spawn(async move {
let result = crate::agent::training::score_memories(
&context, &client, &ui_tx,
).await;
let agent = agent.lock().await;
match result {
Ok(scores) => {
agent.publish_context_state_with_scores(Some(&scores));
}
Err(e) => {
let _ = ui_tx.send(UiMessage::Info(format!("[scoring failed: {:#}]", e)));
}
}
});
Command::Handled
}
"/dmn" => {
let _ = self.ui_tx.send(UiMessage::Info(format!("DMN state: {:?}", self.dmn)));
let _ = self.ui_tx.send(UiMessage::Info(format!("Next tick in: {:?}", self.dmn.interval())));
let _ = self.ui_tx.send(UiMessage::Info(format!(
"Consecutive DMN turns: {}/{}", self.dmn_turns, self.max_dmn_turns,
)));
Command::Handled
}
"/sleep" => {
self.dmn = dmn::State::Resting { since: Instant::now() };
self.dmn_turns = 0;
let _ = self.ui_tx.send(UiMessage::Info(
"DMN sleeping (heartbeat every 5 min). Type anything to wake.".into(),
));
Command::Handled
}
"/wake" => {
let was_paused = matches!(self.dmn, dmn::State::Paused | dmn::State::Off);
if matches!(self.dmn, dmn::State::Off) {
dmn::set_off(false);
}
self.dmn = dmn::State::Foraging;
self.dmn_turns = 0;
let msg = if was_paused { "DMN unpaused — entering foraging mode." }
else { "DMN waking — entering foraging mode." };
let _ = self.ui_tx.send(UiMessage::Info(msg.into()));
self.update_status();
Command::Handled
}
"/pause" => {
self.dmn = dmn::State::Paused;
self.dmn_turns = 0;
let _ = self.ui_tx.send(UiMessage::Info(
"DMN paused — no autonomous ticks. Ctrl+P or /wake to resume.".into(),
));
self.update_status();
Command::Handled
}
"/retry" => {
if self.turn_in_progress {
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
return Command::Handled;
}
let mut agent_guard = self.agent.lock().await;
let entries = agent_guard.entries_mut();
let mut last_user_text = None;
while let Some(entry) = entries.last() {
if entry.message().role == api_types::Role::User {
last_user_text = Some(entries.pop().unwrap().message().content_text().to_string());
break;
}
entries.pop();
}
drop(agent_guard);
match last_user_text {
Some(text) => {
let preview_len = text.len().min(60);
let _ = self.ui_tx.send(UiMessage::Info(format!("(retrying: {}...)", &text[..preview_len])));
self.dmn_turns = 0;
self.dmn = dmn::State::Engaged;
self.spawn_turn(text, StreamTarget::Conversation);
}
None => {
let _ = self.ui_tx.send(UiMessage::Info("(nothing to retry)".into()));
}
}
Command::Handled
}
"/help" => {
for (name, desc) in COMMANDS {
let _ = self.ui_tx.send(UiMessage::Info(format!(" {:12} {}", name, desc)));
}
let _ = self.ui_tx.send(UiMessage::Info(String::new()));
let _ = self.ui_tx.send(UiMessage::Info(
"Keys: Tab=pane ^Up/Down=scroll PgUp/PgDn=scroll Mouse=click/scroll".into(),
));
let _ = self.ui_tx.send(UiMessage::Info(
" Alt+Enter=newline Esc=interrupt ^P=pause ^R=reasoning ^K=kill F10=context F2=agents".into(),
));
let _ = self.ui_tx.send(UiMessage::Info(
" Shift+click for native text selection (copy/paste)".into(),
));
Command::Handled
}
cmd if cmd.starts_with("/model ") => {
let name = cmd[7..].trim();
if name.is_empty() {
let _ = self.ui_tx.send(UiMessage::Info("Usage: /model <name>".into()));
return Command::Handled;
}
self.switch_model(name).await;
Command::Handled
}
_ => Command::None,
}
}
/// Interrupt: kill processes, abort current turn, clear pending queue.
async fn interrupt(&mut self) {
let count = {
let agent = self.agent.lock().await;
let mut tools = agent.active_tools.lock().unwrap();
let count = tools.len();
for entry in tools.drain(..) {
entry.handle.abort();
}
count
};
if count == 0 {
if let Some(handle) = self.turn_handle.take() {
handle.abort();
self.turn_in_progress = false;
self.dmn = dmn::State::Resting { since: Instant::now() };
self.update_status();
let _ = self.ui_tx.send(UiMessage::Activity(String::new()));
}
}
self.pending_input = None;
let killed = count;
if killed > 0 || self.turn_in_progress {
let _ = self.ui_tx.send(UiMessage::Info(format!(
"(interrupted — killed {} process(es), turn aborted)", killed,
)));
} else {
let _ = self.ui_tx.send(UiMessage::Info("(interrupted)".into()));
}
}
/// Cycle reasoning effort: none → low → high → none.
fn cycle_reasoning(&mut self, app: &mut tui::App) {
if let Ok(mut agent_guard) = self.agent.try_lock() {
let next = match agent_guard.reasoning_effort.as_str() {
"none" => "low",
"low" => "high",
_ => "none",
};
agent_guard.reasoning_effort = next.to_string();
app.reasoning_effort = next.to_string();
let label = match next {
"none" => "off (monologue hidden)",
"low" => "low (brief monologue)",
"high" => "high (full monologue)",
_ => next,
};
let _ = self.ui_tx.send(UiMessage::Info(format!("Reasoning: {} — ^R to cycle", label)));
} else {
let _ = self.ui_tx.send(UiMessage::Info(
"(agent busy — reasoning change takes effect next turn)".into(),
));
}
}
/// Show and kill running tool calls (Ctrl+K).
async fn kill_processes(&mut self) {
let active_tools = self.agent.lock().await.active_tools.clone();
let mut tools = active_tools.lock().unwrap();
if tools.is_empty() {
let _ = self.ui_tx.send(UiMessage::Info("(no running tool calls)".into()));
} else {
for entry in tools.drain(..) {
let elapsed = entry.started.elapsed();
let _ = self.ui_tx.send(UiMessage::Info(format!(
" killing {} ({:.0}s): {}", entry.name, elapsed.as_secs_f64(), entry.detail,
)));
entry.handle.abort();
}
}
}
/// Cycle DMN autonomy: foraging → resting → paused → off → foraging.
fn cycle_autonomy(&mut self) {
let (new_state, label) = match &self.dmn {
dmn::State::Engaged | dmn::State::Working | dmn::State::Foraging => {
(dmn::State::Resting { since: Instant::now() }, "resting")
}
dmn::State::Resting { .. } => (dmn::State::Paused, "PAUSED"),
dmn::State::Paused => {
dmn::set_off(true);
(dmn::State::Off, "OFF (persists across restarts)")
}
dmn::State::Off => {
dmn::set_off(false);
(dmn::State::Foraging, "foraging")
}
};
self.dmn = new_state;
self.dmn_turns = 0;
let _ = self.ui_tx.send(UiMessage::Info(format!("DMN → {} (Ctrl+P to cycle)", label)));
self.update_status();
}
/// Switch to a named model from the config registry.
async fn switch_model(&mut self, name: &str) {
if self.turn_in_progress {
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
return;
}
let resolved = match self.config.app.resolve_model(name) {
Ok(r) => r,
Err(e) => {
let _ = self.ui_tx.send(UiMessage::Info(format!("{}", e)));
return;
}
};
let new_client = ApiClient::new(&resolved.api_base, &resolved.api_key, &resolved.model_id);
let prompt_changed = resolved.prompt_file != self.config.prompt_file;
let mut agent_guard = self.agent.lock().await;
agent_guard.swap_client(new_client);
self.config.model = resolved.model_id.clone();
self.config.api_base = resolved.api_base;
self.config.api_key = resolved.api_key;
if prompt_changed {
self.config.prompt_file = resolved.prompt_file.clone();
agent_guard.prompt_file = resolved.prompt_file.clone();
agent_guard.compact();
let _ = self.ui_tx.send(UiMessage::Info(format!(
"Switched to {} ({}) — prompt: {}, recompacted",
name, resolved.model_id, resolved.prompt_file,
)));
} else {
let _ = self.ui_tx.send(UiMessage::Info(format!(
"Switched to {} ({})", name, resolved.model_id,
)));
}
drop(agent_guard);
self.update_status();
self.send_context_info();
}
fn load_context_groups(&self) -> Vec<config::ContextGroup> {
config::get().context_groups.clone()
}
fn send_context_info(&self) {
let context_groups = self.load_context_groups();
let (instruction_files, memory_files) = identity::context_file_info(
&self.config.prompt_file,
self.config.app.memory_project.as_deref(),
&context_groups,
);
let _ = self.ui_tx.send(UiMessage::ContextInfoUpdate(ContextInfo {
model: self.config.model.clone(),
available_models: self.config.app.model_names(),
prompt_file: self.config.prompt_file.clone(),
backend: self.config.app.backend.clone(),
instruction_files,
memory_files,
system_prompt_chars: self.config.system_prompt.len(),
context_message_chars: self.config.context_parts.iter().map(|(_, c)| c.len()).sum(),
}));
}
fn update_status(&self) {
let _ = self.ui_tx.send(UiMessage::StatusUpdate(StatusInfo {
dmn_state: self.dmn.label().to_string(),
dmn_turns: self.dmn_turns,
dmn_max_turns: self.max_dmn_turns,
prompt_tokens: 0,
completion_tokens: 0,
model: String::new(),
turn_tools: 0,
context_budget: String::new(),
}));
}
async fn shutdown(&mut self) {
if let Some(handle) = self.turn_handle.take() {
handle.abort();
}
}
}
// --- Event loop ---
pub async fn run(cli: crate::user::CliArgs) -> Result<()> {
let (config, _figment) = config::load_session(&cli)?;
if config.app.debug {
unsafe { std::env::set_var("POC_DEBUG", "1") };
}
// Start channel daemons
let mut channel_supervisor = crate::thalamus::supervisor::Supervisor::new();
channel_supervisor.load_config();
channel_supervisor.ensure_running();
// Initialize idle state machine
let mut idle_state = crate::thalamus::idle::State::new();
idle_state.load();
// Channel status fetcher
let (channel_tx, mut channel_rx) = tokio::sync::mpsc::channel::<Vec<(String, bool, u32)>>(4);
{
let tx = channel_tx.clone();
tokio::spawn(async move {
let result = crate::thalamus::channels::fetch_all_channels().await;
let _ = tx.send(result).await;
});
}
let notify_rx = crate::thalamus::channels::subscribe_all();
let mut pending_notifications: Vec<crate::thalamus::channels::ChannelNotification> = Vec::new();
// Create UI channel
let (ui_tx, mut ui_rx) = ui_channel::channel();
// Shared state
let shared_context = ui_channel::shared_context_state();
let shared_active_tools = ui_channel::shared_active_tools();
// Initialize TUI
let mut terminal = tui::init_terminal()?;
let mut app = tui::App::new(config.model.clone(), shared_context.clone(), shared_active_tools.clone());
// Startup info
let _ = ui_tx.send(UiMessage::Info("consciousness v0.3 (tui)".into()));
let _ = ui_tx.send(UiMessage::Info(format!(
" model: {} (available: {})", config.model, config.app.model_names().join(", "),
)));
let client = ApiClient::new(&config.api_base, &config.api_key, &config.model);
let _ = ui_tx.send(UiMessage::Info(format!(" api: {} ({})", config.api_base, client.backend_label())));
let _ = ui_tx.send(UiMessage::Info(format!(
" context: {}K chars ({} config, {} memory files)",
config.context_parts.iter().map(|(_, c)| c.len()).sum::<usize>() / 1024,
config.config_file_count, config.memory_file_count,
)));
let conversation_log_path = config.session_dir.join("conversation.jsonl");
let conversation_log = log::ConversationLog::new(conversation_log_path.clone())
.expect("failed to create conversation log");
let _ = ui_tx.send(UiMessage::Info(format!(" log: {}", conversation_log.path().display())));
let agent = Arc::new(Mutex::new(Agent::new(
client,
config.system_prompt.clone(),
config.context_parts.clone(),
config.app.clone(),
config.prompt_file.clone(),
Some(conversation_log),
shared_context,
shared_active_tools,
)));
// Restore conversation from log
{
let mut agent_guard = agent.lock().await;
if agent_guard.restore_from_log() {
ui_channel::replay_session_to_ui(agent_guard.entries(), &ui_tx);
let _ = ui_tx.send(UiMessage::Info("--- restored from conversation log ---".into()));
}
}
// Send initial budget to status bar
{
let agent_guard = agent.lock().await;
let _ = ui_tx.send(UiMessage::StatusUpdate(StatusInfo {
dmn_state: "resting".to_string(),
dmn_turns: 0, dmn_max_turns: 0,
prompt_tokens: 0, completion_tokens: 0,
model: agent_guard.model().to_string(),
turn_tools: 0,
context_budget: agent_guard.budget().status_string(),
}));
}
let (turn_tx, mut turn_rx) = mpsc::channel::<(Result<TurnResult>, StreamTarget)>(1);
let mut session = Session::new(agent, config, ui_tx.clone(), turn_tx);
session.update_status();
session.send_context_info();
// Start observation socket
let socket_path = session.config.session_dir.join("agent.sock");
let (observe_input_tx, mut observe_input_rx) = observe::input_channel();
observe::start(socket_path, ui_tx.subscribe(), observe_input_tx);
let mut reader = EventStream::new();
let mut render_interval = tokio::time::interval(Duration::from_millis(50));
render_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
let mut dirty = true;
terminal.hide_cursor()?;
// Initial render
app.drain_messages(&mut ui_rx);
terminal.draw(|f| app.draw(f))?;
loop {
let timeout = session.dmn_interval();
tokio::select! {
biased;
maybe_event = reader.next() => {
match maybe_event {
Some(Ok(Event::Key(key))) => {
if key.kind != KeyEventKind::Press { continue; }
app.handle_key(key);
idle_state.user_activity();
if app.screen == tui::Screen::Thalamus {
let tx = channel_tx.clone();
tokio::spawn(async move {
let result = crate::thalamus::channels::fetch_all_channels().await;
let _ = tx.send(result).await;
});
}
dirty = true;
}
Some(Ok(Event::Mouse(mouse))) => {
app.handle_mouse(mouse);
dirty = true;
}
Some(Ok(Event::Resize(w, h))) => {
app.handle_resize(w, h);
terminal.clear()?;
dirty = true;
}
Some(Err(_)) => break,
None => break,
_ => continue,
}
}
Some(line) = observe_input_rx.recv() => {
app.submitted.push(line);
dirty = true;
}
Some((result, target)) = turn_rx.recv() => {
session.handle_turn_result(result, target).await;
idle_state.response_activity();
dirty = true;
}
_ = render_interval.tick() => {
let new_count = session.agent.lock().await.active_tools.lock().unwrap().len() as u32;
if new_count != app.running_processes {
app.running_processes = new_count;
dirty = true;
}
idle_state.decay_ewma();
app.update_idle(&idle_state);
while let Ok(notif) = notify_rx.try_recv() {
pending_notifications.push(notif);
let tx = channel_tx.clone();
tokio::spawn(async move {
let result = crate::thalamus::channels::fetch_all_channels().await;
let _ = tx.send(result).await;
});
}
}
_ = tokio::time::sleep(timeout), if !session.turn_in_progress => {
session.dmn_tick();
dirty = true;
}
Some(channels) = channel_rx.recv() => {
app.set_channel_status(channels);
dirty = true;
}
Some(msg) = ui_rx.recv() => {
app.handle_ui_message(msg);
dirty = true;
}
}
// Process submitted input
let submitted: Vec<String> = app.submitted.drain(..).collect();
for input in submitted {
let input = input.trim().to_string();
if input.is_empty() { continue; }
match session.handle_command(&input).await {
Command::Quit => app.should_quit = true,
Command::Handled => {}
Command::None => session.submit_input(input),
}
}
// Process hotkey actions
let actions: Vec<HotkeyAction> = app.hotkey_actions.drain(..).collect();
for action in actions {
match action {
HotkeyAction::CycleReasoning => session.cycle_reasoning(&mut app),
HotkeyAction::KillProcess => session.kill_processes().await,
HotkeyAction::Interrupt => session.interrupt().await,
HotkeyAction::CycleAutonomy => session.cycle_autonomy(),
}
}
if app.drain_messages(&mut ui_rx) {
dirty = true;
}
if dirty {
terminal.draw(|f| app.draw(f))?;
dirty = false;
}
if app.should_quit {
break;
}
}
session.shutdown().await;
tui::restore_terminal(&mut terminal)?;
Ok(())
}

View file

@ -95,11 +95,21 @@ impl SavedAgentState {
/// Persistent state for the agent orchestration cycle.
/// Created once, `trigger()` called on each user message.
/// TUI reads snapshots for display.
///
/// TODO: surface-observe, journal, reflect agents currently spawn child
/// processes (legacy from the Claude Code hook path). They should be
/// converted to async tasks using the ApiClient, like memory scoring.
pub struct AgentCycleState {
output_dir: PathBuf,
log_file: Option<File>,
pub agents: Vec<AgentInfo>,
pub last_output: AgentCycleOutput,
/// Incremental memory scoring — entry index to resume from.
pub memory_score_cursor: usize,
/// Whether incremental memory scoring is currently running.
pub memory_scoring_in_flight: bool,
/// Latest per-memory scores from incremental scoring.
pub memory_scores: Vec<(String, f64)>,
}
const AGENT_CYCLE_NAMES: &[&str] = &["surface-observe", "journal", "reflect"];
@ -126,6 +136,9 @@ impl AgentCycleState {
reflection: None,
sleep_secs: None,
},
memory_score_cursor: 0,
memory_scoring_in_flight: false,
memory_scores: Vec::new(),
}
}
@ -171,7 +184,20 @@ impl AgentCycleState {
}
pub fn snapshots(&self) -> Vec<AgentSnapshot> {
self.agents.iter().map(|a| a.snapshot()).collect()
let mut snaps: Vec<AgentSnapshot> = self.agents.iter().map(|a| a.snapshot()).collect();
snaps.push(AgentSnapshot {
name: "memory-scoring".to_string(),
pid: None,
phase: if self.memory_scoring_in_flight {
Some(format!("scoring (cursor: {})", self.memory_score_cursor))
} else if self.memory_scores.is_empty() {
None
} else {
Some(format!("{} memories scored", self.memory_scores.len()))
},
log_path: None,
});
snaps
}
/// Restore agent state from a saved snapshot (for Claude Code hook path).

View file

@ -1,74 +0,0 @@
// cli.rs — Command-line argument parsing
//
// All fields are Option<T> so unset args don't override config file
// values. The layering order is:
// defaults < config file < CLI args
//
// Subcommands:
// (none) Launch the TUI agent
// read Print new output since last check and exit
// write <msg> Send a message to the running agent
use clap::{Parser, Subcommand};
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(name = "consciousness", about = "Substrate-independent AI agent")]
pub struct CliArgs {
/// Select active backend ("anthropic" or "openrouter")
#[arg(long)]
pub backend: Option<String>,
/// Model override
#[arg(short, long)]
pub model: Option<String>,
/// API key override
#[arg(long)]
pub api_key: Option<String>,
/// Base URL override
#[arg(long)]
pub api_base: Option<String>,
/// Enable debug logging
#[arg(long)]
pub debug: bool,
/// Print effective config with provenance and exit
#[arg(long)]
pub show_config: bool,
/// Override all prompt assembly with this file
#[arg(long)]
pub system_prompt_file: Option<PathBuf>,
/// Project memory directory
#[arg(long)]
pub memory_project: Option<PathBuf>,
/// Max consecutive DMN turns
#[arg(long)]
pub dmn_max_turns: Option<u32>,
#[command(subcommand)]
pub command: Option<SubCmd>,
}
#[derive(Subcommand, Debug)]
pub enum SubCmd {
/// Print new output since last read and exit
Read {
/// Stream output continuously instead of exiting
#[arg(short, long)]
follow: bool,
/// Block until a complete response is received, then exit
#[arg(long)]
block: bool,
},
/// Send a message to the running agent
Write {
/// The message to send
message: Vec<String>,
},
}

View file

@ -1,19 +1,729 @@
// agent/ — interactive agent and shared infrastructure
// user/ — User interface layer
//
// Merged from the former poc-agent crate. Contains:
// - api/ — LLM API backends (OpenAI-compatible, Anthropic)
// - types — Message, ToolDef, ChatRequest, etc.
// - tools/ — tool definitions and dispatch
// - ui_channel — streaming UI communication
// - runner — the interactive agent loop
// - cli, context, dmn, identity, log, observe, parsing, tui
// Config moved to crate::config (unified with memory config)
// TUI, UI channel, parsing. The cognitive layer (session state
// machine, DMN, identity) lives in mind/.
pub mod ui_channel;
pub mod cli;
pub mod dmn;
pub mod identity;
pub mod log;
pub mod observe;
pub mod parsing;
pub mod tui;
pub mod chat;
pub mod context;
pub mod subconscious;
pub mod unconscious;
pub mod thalamus;
// --- TUI infrastructure (moved from tui/mod.rs) ---
use crossterm::{
event::{EnableMouseCapture, DisableMouseCapture, KeyCode, KeyEvent, KeyModifiers, MouseEvent, MouseEventKind, MouseButton},
terminal::{self, EnterAlternateScreen, LeaveAlternateScreen},
ExecutableCommand,
};
use ratatui::{
backend::CrosstermBackend,
layout::Rect,
style::{Color, Style},
text::{Line, Span},
Frame, Terminal,
};
use std::io;
use crate::user::ui_channel::{ContextInfo, SharedContextState, StatusInfo, UiMessage};
pub(crate) const SCREEN_LEGEND: &str = " F1=interact F2=conscious F3=subconscious F4=unconscious F5=thalamus ";
pub(crate) const SUBCONSCIOUS_AGENTS: &[&str] = &["surface-observe", "journal", "reflect"];
#[allow(dead_code)]
pub(crate) const UNCONSCIOUS_AGENTS: &[&str] = &["linker", "organize", "distill", "split"];
pub(crate) fn strip_ansi(text: &str) -> String {
let mut out = String::with_capacity(text.len());
let mut chars = text.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '\x1b' {
if chars.peek() == Some(&'[') {
chars.next();
while let Some(&c) = chars.peek() {
if c.is_ascii() && (0x20..=0x3F).contains(&(c as u8)) {
chars.next();
} else {
break;
}
}
if let Some(&c) = chars.peek() {
if c.is_ascii() && (0x40..=0x7E).contains(&(c as u8)) {
chars.next();
}
}
} else if let Some(&c) = chars.peek() {
if c.is_ascii() && (0x40..=0x5F).contains(&(c as u8)) {
chars.next();
}
}
} else {
out.push(ch);
}
}
out
}
pub(crate) fn is_zero_width(ch: char) -> bool {
matches!(ch,
'\u{200B}'..='\u{200F}' |
'\u{2028}'..='\u{202F}' |
'\u{2060}'..='\u{2069}' |
'\u{FEFF}'
)
}
/// Which pane receives scroll keys.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ActivePane {
Autonomous,
Conversation,
Tools,
}
const MAX_PANE_LINES: usize = 10_000;
/// Turn marker for the conversation pane gutter.
#[derive(Clone, Copy, PartialEq, Default)]
pub(crate) enum Marker {
#[default]
None,
User,
Assistant,
}
pub(crate) struct PaneState {
pub(crate) lines: Vec<Line<'static>>,
pub(crate) markers: Vec<Marker>,
pub(crate) current_line: String,
pub(crate) current_color: Color,
pub(crate) md_buffer: String,
pub(crate) use_markdown: bool,
pub(crate) pending_marker: Marker,
pub(crate) scroll: u16,
pub(crate) pinned: bool,
pub(crate) last_total_lines: u16,
pub(crate) last_height: u16,
}
impl PaneState {
fn new(use_markdown: bool) -> Self {
Self {
lines: Vec::new(), markers: Vec::new(),
current_line: String::new(), current_color: Color::Reset,
md_buffer: String::new(), use_markdown,
pending_marker: Marker::None, scroll: 0, pinned: false,
last_total_lines: 0, last_height: 20,
}
}
fn evict(&mut self) {
if self.lines.len() > MAX_PANE_LINES {
let excess = self.lines.len() - MAX_PANE_LINES;
self.lines.drain(..excess);
self.markers.drain(..excess);
self.scroll = self.scroll.saturating_sub(excess as u16);
}
}
fn append_text(&mut self, text: &str) {
let clean = strip_ansi(text);
if self.use_markdown {
self.md_buffer.push_str(&clean);
} else {
for ch in clean.chars() {
if ch == '\n' {
let line = std::mem::take(&mut self.current_line);
self.lines.push(Line::styled(line, Style::default().fg(self.current_color)));
self.markers.push(Marker::None);
} else if ch == '\t' {
self.current_line.push_str(" ");
} else if ch.is_control() || is_zero_width(ch) {
} else {
self.current_line.push(ch);
}
}
}
self.evict();
}
pub(crate) fn flush_pending(&mut self) {
if self.use_markdown && !self.md_buffer.is_empty() {
let parsed = parse_markdown(&self.md_buffer);
for (i, line) in parsed.into_iter().enumerate() {
let marker = if i == 0 { std::mem::take(&mut self.pending_marker) } else { Marker::None };
self.lines.push(line);
self.markers.push(marker);
}
self.md_buffer.clear();
}
if !self.current_line.is_empty() {
let line = std::mem::take(&mut self.current_line);
self.lines.push(Line::styled(line, Style::default().fg(self.current_color)));
self.markers.push(std::mem::take(&mut self.pending_marker));
}
}
fn push_line(&mut self, line: String, color: Color) {
self.push_line_with_marker(line, color, Marker::None);
}
fn push_line_with_marker(&mut self, line: String, color: Color, marker: Marker) {
self.flush_pending();
self.lines.push(Line::styled(strip_ansi(&line), Style::default().fg(color)));
self.markers.push(marker);
self.evict();
}
fn scroll_up(&mut self, n: u16) {
self.scroll = self.scroll.saturating_sub(n);
self.pinned = true;
}
fn scroll_down(&mut self, n: u16) {
let max = self.last_total_lines.saturating_sub(self.last_height);
self.scroll = (self.scroll + n).min(max);
if self.scroll >= max { self.pinned = false; }
}
pub(crate) fn all_lines(&self) -> Vec<Line<'static>> {
let (lines, _) = self.all_lines_with_markers();
lines
}
pub(crate) fn all_lines_with_markers(&self) -> (Vec<Line<'static>>, Vec<Marker>) {
let mut lines: Vec<Line<'static>> = self.lines.clone();
let mut markers: Vec<Marker> = self.markers.clone();
if self.use_markdown && !self.md_buffer.is_empty() {
let parsed = parse_markdown(&self.md_buffer);
let count = parsed.len();
lines.extend(parsed);
if count > 0 {
markers.push(self.pending_marker);
markers.extend(std::iter::repeat(Marker::None).take(count - 1));
}
} else if !self.current_line.is_empty() {
lines.push(Line::styled(self.current_line.clone(), Style::default().fg(self.current_color)));
markers.push(self.pending_marker);
}
(lines, markers)
}
}
pub(crate) fn new_textarea(lines: Vec<String>) -> tui_textarea::TextArea<'static> {
let mut ta = tui_textarea::TextArea::new(lines);
ta.set_cursor_line_style(Style::default());
ta.set_wrap_mode(tui_textarea::WrapMode::Word);
ta
}
pub(crate) fn parse_markdown(md: &str) -> Vec<Line<'static>> {
tui_markdown::from_str(md)
.lines
.into_iter()
.map(|line| {
let spans: Vec<Span<'static>> = line.spans.into_iter()
.map(|span| Span::styled(span.content.into_owned(), span.style))
.collect();
let mut result = Line::from(spans).style(line.style);
result.alignment = line.alignment;
result
})
.collect()
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Screen {
Interact, Conscious, Subconscious, Unconscious, Thalamus,
}
#[derive(Debug)]
pub enum HotkeyAction {
CycleReasoning, KillProcess, Interrupt, CycleAutonomy,
}
#[derive(Clone)]
pub(crate) struct IdleInfo {
pub user_present: bool,
pub since_activity: f64,
pub activity_ewma: f64,
pub block_reason: String,
pub dreaming: bool,
pub sleeping: bool,
}
#[derive(Clone)]
pub(crate) struct ChannelStatus {
pub name: String,
pub connected: bool,
pub unread: u32,
}
pub struct App {
pub(crate) autonomous: PaneState,
pub(crate) conversation: PaneState,
pub(crate) tools: PaneState,
pub(crate) status: StatusInfo,
pub(crate) activity: String,
pub(crate) turn_started: Option<std::time::Instant>,
pub(crate) call_started: Option<std::time::Instant>,
pub(crate) call_timeout_secs: u64,
pub(crate) needs_assistant_marker: bool,
pub running_processes: u32,
pub reasoning_effort: String,
pub(crate) active_tools: crate::user::ui_channel::SharedActiveTools,
pub(crate) active_pane: ActivePane,
pub textarea: tui_textarea::TextArea<'static>,
input_history: Vec<String>,
history_index: Option<usize>,
pub should_quit: bool,
pub submitted: Vec<String>,
pub hotkey_actions: Vec<HotkeyAction>,
pub(crate) pane_areas: [Rect; 3],
pub screen: Screen,
pub(crate) debug_scroll: u16,
pub(crate) debug_selected: Option<usize>,
pub(crate) debug_expanded: std::collections::HashSet<usize>,
pub(crate) context_info: Option<ContextInfo>,
pub(crate) shared_context: SharedContextState,
pub(crate) agent_selected: usize,
pub(crate) agent_log_view: bool,
pub(crate) agent_state: Vec<crate::subconscious::subconscious::AgentSnapshot>,
pub(crate) channel_status: Vec<ChannelStatus>,
pub(crate) idle_info: Option<IdleInfo>,
}
impl App {
pub fn new(model: String, shared_context: SharedContextState, active_tools: crate::user::ui_channel::SharedActiveTools) -> Self {
Self {
autonomous: PaneState::new(true),
conversation: PaneState::new(true),
tools: PaneState::new(false),
status: StatusInfo {
dmn_state: "resting".into(), dmn_turns: 0, dmn_max_turns: 20,
prompt_tokens: 0, completion_tokens: 0, model,
turn_tools: 0, context_budget: String::new(),
},
activity: String::new(),
turn_started: None, call_started: None, call_timeout_secs: 60,
needs_assistant_marker: false, running_processes: 0,
reasoning_effort: "none".to_string(),
active_tools, active_pane: ActivePane::Conversation,
textarea: new_textarea(vec![String::new()]),
input_history: Vec::new(), history_index: None,
should_quit: false, submitted: Vec::new(), hotkey_actions: Vec::new(),
pane_areas: [Rect::default(); 3],
screen: Screen::Interact,
debug_scroll: 0, debug_selected: None,
debug_expanded: std::collections::HashSet::new(),
context_info: None, shared_context,
agent_selected: 0, agent_log_view: false, agent_state: Vec::new(),
channel_status: Vec::new(), idle_info: None,
}
}
pub fn drain_messages(&mut self, rx: &mut crate::user::ui_channel::UiReceiver) -> bool {
let mut any = false;
while let Ok(msg) = rx.try_recv() {
self.handle_ui_message(msg);
any = true;
}
any
}
pub fn handle_ui_message(&mut self, msg: UiMessage) {
use crate::user::ui_channel::StreamTarget;
match msg {
UiMessage::TextDelta(text, target) => match target {
StreamTarget::Conversation => {
if self.needs_assistant_marker {
self.conversation.pending_marker = Marker::Assistant;
self.needs_assistant_marker = false;
}
self.conversation.current_color = Color::Reset;
self.conversation.append_text(&text);
}
StreamTarget::Autonomous => {
self.autonomous.current_color = Color::Reset;
self.autonomous.append_text(&text);
}
},
UiMessage::UserInput(text) => {
self.conversation.push_line_with_marker(text, Color::Cyan, Marker::User);
self.turn_started = Some(std::time::Instant::now());
self.needs_assistant_marker = true;
self.status.turn_tools = 0;
}
UiMessage::ToolCall { name, args_summary } => {
self.status.turn_tools += 1;
let line = if args_summary.is_empty() { format!("[{}]", name) }
else { format!("[{}] {}", name, args_summary) };
self.tools.push_line(line, Color::Yellow);
}
UiMessage::ToolResult { name: _, result } => {
for line in result.lines() {
self.tools.push_line(format!(" {}", line), Color::DarkGray);
}
self.tools.push_line(String::new(), Color::Reset);
}
UiMessage::DmnAnnotation(text) => {
self.autonomous.push_line(text, Color::Yellow);
self.turn_started = Some(std::time::Instant::now());
self.needs_assistant_marker = true;
self.status.turn_tools = 0;
}
UiMessage::StatusUpdate(info) => {
if !info.dmn_state.is_empty() {
self.status.dmn_state = info.dmn_state;
self.status.dmn_turns = info.dmn_turns;
self.status.dmn_max_turns = info.dmn_max_turns;
}
if info.prompt_tokens > 0 { self.status.prompt_tokens = info.prompt_tokens; }
if !info.model.is_empty() { self.status.model = info.model; }
if !info.context_budget.is_empty() { self.status.context_budget = info.context_budget; }
}
UiMessage::Activity(text) => {
if text.is_empty() {
self.call_started = None;
} else if self.activity.is_empty() || self.call_started.is_none() {
self.call_started = Some(std::time::Instant::now());
self.call_timeout_secs = crate::config::get().api_stream_timeout_secs;
}
self.activity = text;
}
UiMessage::Reasoning(text) => {
self.autonomous.current_color = Color::DarkGray;
self.autonomous.append_text(&text);
}
UiMessage::ToolStarted { .. } | UiMessage::ToolFinished { .. } => {}
UiMessage::Debug(text) => {
self.tools.push_line(format!("[debug] {}", text), Color::DarkGray);
}
UiMessage::Info(text) => {
self.conversation.push_line(text, Color::Cyan);
}
UiMessage::ContextInfoUpdate(info) => { self.context_info = Some(info); }
UiMessage::AgentUpdate(agents) => { self.agent_state = agents; }
}
}
pub fn handle_key(&mut self, key: KeyEvent) {
if key.modifiers.contains(KeyModifiers::CONTROL) {
match key.code {
KeyCode::Char('c') => { self.should_quit = true; return; }
KeyCode::Char('r') => { self.hotkey_actions.push(HotkeyAction::CycleReasoning); return; }
KeyCode::Char('k') => { self.hotkey_actions.push(HotkeyAction::KillProcess); return; }
KeyCode::Char('p') => { self.hotkey_actions.push(HotkeyAction::CycleAutonomy); return; }
_ => {}
}
}
match key.code {
KeyCode::F(1) => { self.set_screen(Screen::Interact); return; }
KeyCode::F(2) => { self.set_screen(Screen::Conscious); return; }
KeyCode::F(3) => { self.set_screen(Screen::Subconscious); return; }
KeyCode::F(4) => { self.set_screen(Screen::Unconscious); return; }
KeyCode::F(5) => { self.set_screen(Screen::Thalamus); return; }
_ => {}
}
match self.screen {
Screen::Subconscious => {
match key.code {
KeyCode::Up => { self.agent_selected = self.agent_selected.saturating_sub(1); self.debug_scroll = 0; return; }
KeyCode::Down => { self.agent_selected = (self.agent_selected + 1).min(SUBCONSCIOUS_AGENTS.len() - 1); self.debug_scroll = 0; return; }
KeyCode::Enter | KeyCode::Right => { self.agent_log_view = true; self.debug_scroll = 0; return; }
KeyCode::Left | KeyCode::Esc => {
if self.agent_log_view { self.agent_log_view = false; self.debug_scroll = 0; }
else { self.screen = Screen::Interact; }
return;
}
KeyCode::PageUp => { self.debug_scroll = self.debug_scroll.saturating_sub(10); return; }
KeyCode::PageDown => { self.debug_scroll += 10; return; }
_ => {}
}
}
Screen::Conscious => {
let cs = self.read_context_state();
let n = self.debug_item_count(&cs);
match key.code {
KeyCode::Up => {
if n > 0 { self.debug_selected = Some(match self.debug_selected { None => n - 1, Some(0) => 0, Some(i) => i - 1 }); self.scroll_to_selected(n); }
return;
}
KeyCode::Down => {
if n > 0 { self.debug_selected = Some(match self.debug_selected { None => 0, Some(i) if i >= n - 1 => n - 1, Some(i) => i + 1 }); self.scroll_to_selected(n); }
return;
}
KeyCode::PageUp => {
if n > 0 { self.debug_selected = Some(match self.debug_selected { None => 0, Some(i) => i.saturating_sub(20) }); self.scroll_to_selected(n); }
return;
}
KeyCode::PageDown => {
if n > 0 { self.debug_selected = Some(match self.debug_selected { None => 0, Some(i) => (i + 20).min(n - 1) }); self.scroll_to_selected(n); }
return;
}
KeyCode::Right | KeyCode::Enter => { if let Some(idx) = self.debug_selected { self.debug_expanded.insert(idx); } return; }
KeyCode::Left => { if let Some(idx) = self.debug_selected { self.debug_expanded.remove(&idx); } return; }
KeyCode::Esc => { self.screen = Screen::Interact; return; }
_ => {}
}
}
Screen::Unconscious | Screen::Thalamus => {
match key.code {
KeyCode::PageUp => { self.debug_scroll = self.debug_scroll.saturating_sub(10); return; }
KeyCode::PageDown => { self.debug_scroll += 10; return; }
KeyCode::Esc => { self.screen = Screen::Interact; return; }
_ => {}
}
}
Screen::Interact => {}
}
match key.code {
KeyCode::Esc => { self.hotkey_actions.push(HotkeyAction::Interrupt); }
KeyCode::Enter if !key.modifiers.contains(KeyModifiers::ALT) && !key.modifiers.contains(KeyModifiers::SHIFT) => {
let input: String = self.textarea.lines().join("\n");
if !input.is_empty() {
if self.input_history.last().map_or(true, |h| h != &input) { self.input_history.push(input.clone()); }
self.history_index = None;
self.submitted.push(input);
self.textarea = new_textarea(vec![String::new()]);
}
}
KeyCode::Up if key.modifiers.contains(KeyModifiers::CONTROL) => self.scroll_active_up(3),
KeyCode::Down if key.modifiers.contains(KeyModifiers::CONTROL) => self.scroll_active_down(3),
KeyCode::Up => {
if !self.input_history.is_empty() {
let idx = match self.history_index { None => self.input_history.len() - 1, Some(i) => i.saturating_sub(1) };
self.history_index = Some(idx);
let mut ta = new_textarea(self.input_history[idx].lines().map(String::from).collect());
ta.move_cursor(tui_textarea::CursorMove::End);
self.textarea = ta;
}
}
KeyCode::Down => {
if let Some(idx) = self.history_index {
if idx + 1 < self.input_history.len() {
self.history_index = Some(idx + 1);
let mut ta = new_textarea(self.input_history[idx + 1].lines().map(String::from).collect());
ta.move_cursor(tui_textarea::CursorMove::End);
self.textarea = ta;
} else {
self.history_index = None;
self.textarea = new_textarea(vec![String::new()]);
}
}
}
KeyCode::PageUp => self.scroll_active_up(10),
KeyCode::PageDown => self.scroll_active_down(10),
KeyCode::Tab => {
self.active_pane = match self.active_pane {
ActivePane::Autonomous => ActivePane::Tools,
ActivePane::Tools => ActivePane::Conversation,
ActivePane::Conversation => ActivePane::Autonomous,
};
}
_ => { self.textarea.input(key); }
}
}
fn scroll_active_up(&mut self, n: u16) {
match self.active_pane {
ActivePane::Autonomous => self.autonomous.scroll_up(n),
ActivePane::Conversation => self.conversation.scroll_up(n),
ActivePane::Tools => self.tools.scroll_up(n),
}
}
fn scroll_active_down(&mut self, n: u16) {
match self.active_pane {
ActivePane::Autonomous => self.autonomous.scroll_down(n),
ActivePane::Conversation => self.conversation.scroll_down(n),
ActivePane::Tools => self.tools.scroll_down(n),
}
}
pub fn handle_resize(&mut self, _width: u16, _height: u16) {}
pub fn handle_mouse(&mut self, mouse: MouseEvent) {
match mouse.kind {
MouseEventKind::ScrollUp => self.scroll_active_up(3),
MouseEventKind::ScrollDown => self.scroll_active_down(3),
MouseEventKind::Down(MouseButton::Left) => {
let (x, y) = (mouse.column, mouse.row);
for (i, area) in self.pane_areas.iter().enumerate() {
if x >= area.x && x < area.x + area.width && y >= area.y && y < area.y + area.height {
self.active_pane = match i { 0 => ActivePane::Autonomous, 1 => ActivePane::Conversation, _ => ActivePane::Tools };
break;
}
}
}
_ => {}
}
}
pub fn draw(&mut self, frame: &mut Frame) {
let size = frame.area();
match self.screen {
Screen::Conscious => { self.draw_debug(frame, size); return; }
Screen::Subconscious => { self.draw_agents(frame, size); return; }
Screen::Unconscious => { self.draw_unconscious(frame, size); return; }
Screen::Thalamus => { self.draw_thalamus(frame, size); return; }
Screen::Interact => {}
}
self.draw_main(frame, size);
}
pub fn set_channel_status(&mut self, channels: Vec<(String, bool, u32)>) {
self.channel_status = channels.into_iter()
.map(|(name, connected, unread)| ChannelStatus { name, connected, unread })
.collect();
}
pub fn update_idle(&mut self, state: &crate::thalamus::idle::State) {
self.idle_info = Some(IdleInfo {
user_present: state.user_present(), since_activity: state.since_activity(),
activity_ewma: state.activity_ewma, block_reason: state.block_reason().to_string(),
dreaming: state.dreaming, sleeping: state.sleep_until.is_some(),
});
}
pub(crate) fn set_screen(&mut self, screen: Screen) {
self.screen = screen;
self.debug_scroll = 0;
}
}
pub fn init_terminal() -> io::Result<Terminal<CrosstermBackend<io::Stdout>>> {
terminal::enable_raw_mode()?;
let mut stdout = io::stdout();
stdout.execute(EnterAlternateScreen)?;
stdout.execute(EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout);
Terminal::new(backend)
}
pub fn restore_terminal(terminal: &mut Terminal<CrosstermBackend<io::Stdout>>) -> io::Result<()> {
terminal::disable_raw_mode()?;
terminal.backend_mut().execute(DisableMouseCapture)?;
terminal.backend_mut().execute(LeaveAlternateScreen)?;
terminal.show_cursor()
}
// --- CLI ---
use clap::{Parser, Subcommand};
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(name = "consciousness", about = "Substrate-independent AI agent")]
pub struct CliArgs {
/// Select active backend ("anthropic" or "openrouter")
#[arg(long)]
pub backend: Option<String>,
/// Model override
#[arg(short, long)]
pub model: Option<String>,
/// API key override
#[arg(long)]
pub api_key: Option<String>,
/// Base URL override
#[arg(long)]
pub api_base: Option<String>,
/// Enable debug logging
#[arg(long)]
pub debug: bool,
/// Print effective config with provenance and exit
#[arg(long)]
pub show_config: bool,
/// Override all prompt assembly with this file
#[arg(long)]
pub system_prompt_file: Option<PathBuf>,
/// Project memory directory
#[arg(long)]
pub memory_project: Option<PathBuf>,
/// Max consecutive DMN turns
#[arg(long)]
pub dmn_max_turns: Option<u32>,
#[command(subcommand)]
pub command: Option<SubCmd>,
}
#[derive(Subcommand, Debug)]
pub enum SubCmd {
/// Print new output since last read and exit
Read {
/// Stream output continuously instead of exiting
#[arg(short, long)]
follow: bool,
/// Block until a complete response is received, then exit
#[arg(long)]
block: bool,
},
/// Send a message to the running agent
Write {
/// The message to send
message: Vec<String>,
},
}
#[tokio::main]
pub async fn main() {
let cli = CliArgs::parse();
match &cli.command {
Some(SubCmd::Read { follow, block }) => {
if let Err(e) = crate::mind::observe::cmd_read_inner(*follow, *block, cli.debug).await {
eprintln!("{:#}", e);
std::process::exit(1);
}
return;
}
Some(SubCmd::Write { message }) => {
let msg = message.join(" ");
if msg.is_empty() {
eprintln!("Usage: consciousness write <message>");
std::process::exit(1);
}
if let Err(e) = crate::mind::observe::cmd_write(&msg, cli.debug).await {
eprintln!("{:#}", e);
std::process::exit(1);
}
return;
}
None => {}
}
if cli.show_config {
match crate::config::load_app(&cli) {
Ok((app, figment)) => crate::config::show_config(&app, &figment),
Err(e) => {
eprintln!("Error loading config: {:#}", e);
std::process::exit(1);
}
}
return;
}
if let Err(e) = crate::mind::run(cli).await {
let _ = crossterm::terminal::disable_raw_mode();
let _ = crossterm::execute!(
std::io::stdout(),
crossterm::terminal::LeaveAlternateScreen
);
eprintln!("Error: {:#}", e);
std::process::exit(1);
}
}

View file

@ -1,886 +0,0 @@
// tui/ — Terminal UI with split panes
//
// Four-pane layout:
// Left top: Autonomous output (DMN annotations + model prose)
// Left bottom: Conversation (user input + model responses)
// Right: Tool activity (tool calls with full results)
// Bottom: Status bar (DMN state, turns, tokens, model)
//
// Uses ratatui + crossterm. The App struct holds all TUI state and
// handles rendering. Input is processed from crossterm key events.
//
// Screen files:
// main_screen.rs — F1 interact (conversation, tools, autonomous)
// context_screen.rs — F2 conscious (context window, model info)
// subconscious_screen.rs — F3 subconscious (consolidation agents)
// unconscious_screen.rs — F4 unconscious (memory daemon status)
mod main;
mod context;
mod subconscious;
mod unconscious;
mod thalamus;
pub(crate) const SCREEN_LEGEND: &str = " F1=interact F2=conscious F3=subconscious F4=unconscious F5=thalamus ";
/// Subconscious agents — interact with conscious context
pub(crate) const SUBCONSCIOUS_AGENTS: &[&str] = &["surface-observe", "journal", "reflect"];
/// Unconscious agents — background consolidation
#[allow(dead_code)]
pub(crate) const UNCONSCIOUS_AGENTS: &[&str] = &["linker", "organize", "distill", "split"];
use crossterm::{
event::{EnableMouseCapture, DisableMouseCapture, KeyCode, KeyEvent, KeyModifiers, MouseEvent, MouseEventKind, MouseButton},
terminal::{self, EnterAlternateScreen, LeaveAlternateScreen},
ExecutableCommand,
};
use ratatui::{
backend::CrosstermBackend,
layout::Rect,
style::{Color, Style},
text::{Line, Span},
Frame, Terminal,
};
use std::io;
use crate::user::ui_channel::{ContextInfo, SharedContextState, StatusInfo, UiMessage};
/// Strip ANSI escape sequences (color codes, cursor movement, etc.)
/// from text so tool output renders cleanly in the TUI.
pub(crate) fn strip_ansi(text: &str) -> String {
let mut out = String::with_capacity(text.len());
let mut chars = text.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '\x1b' {
// CSI sequence: ESC [ ... final_byte
if chars.peek() == Some(&'[') {
chars.next(); // consume '['
// Consume parameter bytes (0x30-0x3F), intermediate (0x20-0x2F),
// then one final byte (0x40-0x7E)
while let Some(&c) = chars.peek() {
if c.is_ascii() && (0x20..=0x3F).contains(&(c as u8)) {
chars.next();
} else {
break;
}
}
// Final byte
if let Some(&c) = chars.peek() {
if c.is_ascii() && (0x40..=0x7E).contains(&(c as u8)) {
chars.next();
}
}
}
// Other escape sequences (ESC + single char)
else if let Some(&c) = chars.peek() {
if c.is_ascii() && (0x40..=0x5F).contains(&(c as u8)) {
chars.next();
}
}
} else {
out.push(ch);
}
}
out
}
/// Check if a Unicode character is zero-width (invisible but takes space
/// in the character count, causing rendering artifacts like `[]`).
pub(crate) fn is_zero_width(ch: char) -> bool {
matches!(ch,
'\u{200B}'..='\u{200F}' | // zero-width space, joiners, directional marks
'\u{2028}'..='\u{202F}' | // line/paragraph separators, embedding
'\u{2060}'..='\u{2069}' | // word joiner, invisible operators
'\u{FEFF}' // byte order mark
)
}
/// Which pane receives scroll keys.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ActivePane {
Autonomous,
Conversation,
Tools,
}
/// Maximum lines kept per pane. Older lines are evicted to prevent
/// unbounded memory growth during long sessions.
const MAX_PANE_LINES: usize = 10_000;
/// Turn marker for the conversation pane gutter.
#[derive(Clone, Copy, PartialEq, Default)]
pub(crate) enum Marker {
#[default]
None,
User,
Assistant,
}
/// A scrollable text pane with auto-scroll behavior.
///
/// Scroll offset is in visual (wrapped) lines so that auto-scroll
/// correctly tracks the bottom even when long lines wrap.
pub(crate) struct PaneState {
pub(crate) lines: Vec<Line<'static>>,
/// Turn markers — parallel to lines, same length.
pub(crate) markers: Vec<Marker>,
/// Current line being built (no trailing newline yet) — plain mode only.
pub(crate) current_line: String,
/// Color applied to streaming text (set before append_text) — plain mode only.
pub(crate) current_color: Color,
/// Raw markdown text of the current streaming response.
pub(crate) md_buffer: String,
/// Whether this pane parses streaming text as markdown.
pub(crate) use_markdown: bool,
/// Marker to apply to the next line pushed (for turn start tracking).
pub(crate) pending_marker: Marker,
/// Scroll offset in visual (wrapped) lines from the top.
pub(crate) scroll: u16,
/// Whether the user has scrolled away from the bottom.
pub(crate) pinned: bool,
/// Last known total visual lines (set during draw by Paragraph::line_count).
pub(crate) last_total_lines: u16,
/// Last known inner height (set during draw).
pub(crate) last_height: u16,
}
impl PaneState {
fn new(use_markdown: bool) -> Self {
Self {
lines: Vec::new(),
markers: Vec::new(),
current_line: String::new(),
current_color: Color::Reset,
md_buffer: String::new(),
use_markdown,
pending_marker: Marker::None,
scroll: 0,
pinned: false,
last_total_lines: 0,
last_height: 20,
}
}
/// Evict old lines if we're over the cap.
fn evict(&mut self) {
if self.lines.len() > MAX_PANE_LINES {
let excess = self.lines.len() - MAX_PANE_LINES;
self.lines.drain(..excess);
self.markers.drain(..excess);
// Approximate: reduce scroll by the wrapped height of evicted lines.
// Not perfectly accurate but prevents scroll from jumping wildly.
self.scroll = self.scroll.saturating_sub(excess as u16);
}
}
/// Append text, splitting on newlines. Strips ANSI escapes.
/// In markdown mode, raw text accumulates in md_buffer for
/// live parsing during render. In plain mode, character-by-character
/// processing builds lines with current_color.
fn append_text(&mut self, text: &str) {
let clean = strip_ansi(text);
if self.use_markdown {
self.md_buffer.push_str(&clean);
} else {
for ch in clean.chars() {
if ch == '\n' {
let line = std::mem::take(&mut self.current_line);
self.lines.push(Line::styled(line, Style::default().fg(self.current_color)));
self.markers.push(Marker::None);
} else if ch == '\t' {
self.current_line.push_str(" ");
} else if ch.is_control() || is_zero_width(ch) {
// Skip control chars and zero-width Unicode
} else {
self.current_line.push(ch);
}
}
}
self.evict();
}
/// Finalize any pending content (markdown buffer or current line).
pub(crate) fn flush_pending(&mut self) {
if self.use_markdown && !self.md_buffer.is_empty() {
let parsed = parse_markdown(&self.md_buffer);
for (i, line) in parsed.into_iter().enumerate() {
let marker = if i == 0 {
std::mem::take(&mut self.pending_marker)
} else {
Marker::None
};
self.lines.push(line);
self.markers.push(marker);
}
self.md_buffer.clear();
}
if !self.current_line.is_empty() {
let line = std::mem::take(&mut self.current_line);
self.lines.push(Line::styled(line, Style::default().fg(self.current_color)));
self.markers.push(std::mem::take(&mut self.pending_marker));
}
}
/// Push a complete line with a color. Flushes any pending
/// markdown or plain-text content first.
fn push_line(&mut self, line: String, color: Color) {
self.push_line_with_marker(line, color, Marker::None);
}
fn push_line_with_marker(&mut self, line: String, color: Color, marker: Marker) {
self.flush_pending();
self.lines.push(Line::styled(strip_ansi(&line), Style::default().fg(color)));
self.markers.push(marker);
self.evict();
}
/// Scroll up by n visual lines, pinning if we move away from bottom.
fn scroll_up(&mut self, n: u16) {
self.scroll = self.scroll.saturating_sub(n);
self.pinned = true;
}
/// Scroll down by n visual lines. Un-pin if we reach bottom.
fn scroll_down(&mut self, n: u16) {
let max = self.last_total_lines.saturating_sub(self.last_height);
self.scroll = (self.scroll + n).min(max);
if self.scroll >= max {
self.pinned = false;
}
}
/// Get all lines as ratatui Lines. Includes finalized lines plus
/// any pending content (live-parsed markdown or in-progress plain line).
/// Scrolling is handled by Paragraph::scroll().
pub(crate) fn all_lines(&self) -> Vec<Line<'static>> {
let (lines, _) = self.all_lines_with_markers();
lines
}
/// Get lines and their markers together. Used by the two-column
/// conversation renderer to know where to place gutter markers.
pub(crate) fn all_lines_with_markers(&self) -> (Vec<Line<'static>>, Vec<Marker>) {
let mut lines: Vec<Line<'static>> = self.lines.clone();
let mut markers: Vec<Marker> = self.markers.clone();
if self.use_markdown && !self.md_buffer.is_empty() {
let parsed = parse_markdown(&self.md_buffer);
let count = parsed.len();
lines.extend(parsed);
if count > 0 {
markers.push(self.pending_marker);
markers.extend(std::iter::repeat(Marker::None).take(count - 1));
}
} else if !self.current_line.is_empty() {
lines.push(Line::styled(
self.current_line.clone(),
Style::default().fg(self.current_color),
));
markers.push(self.pending_marker);
}
(lines, markers)
}
}
/// Create a new textarea with standard settings (word wrap, no cursor line highlight).
pub(crate) fn new_textarea(lines: Vec<String>) -> tui_textarea::TextArea<'static> {
let mut ta = tui_textarea::TextArea::new(lines);
ta.set_cursor_line_style(Style::default());
ta.set_wrap_mode(tui_textarea::WrapMode::Word);
ta
}
/// Parse markdown text into owned ratatui Lines.
pub(crate) fn parse_markdown(md: &str) -> Vec<Line<'static>> {
tui_markdown::from_str(md)
.lines
.into_iter()
.map(|line| {
let spans: Vec<Span<'static>> = line
.spans
.into_iter()
.map(|span| Span::styled(span.content.into_owned(), span.style))
.collect();
let mut result = Line::from(spans).style(line.style);
result.alignment = line.alignment;
result
})
.collect()
}
/// Main TUI application state.
pub struct App {
pub(crate) autonomous: PaneState,
pub(crate) conversation: PaneState,
pub(crate) tools: PaneState,
pub(crate) status: StatusInfo,
/// Live activity indicator ("thinking...", "calling: bash", etc).
pub(crate) activity: String,
/// When the current turn started (for elapsed timer).
pub(crate) turn_started: Option<std::time::Instant>,
/// When the current LLM call started (for per-call timer).
pub(crate) call_started: Option<std::time::Instant>,
/// Stream timeout for the current call (for display).
pub(crate) call_timeout_secs: u64,
/// Whether to emit a marker before the next assistant TextDelta.
pub(crate) needs_assistant_marker: bool,
/// Number of running child processes (updated by main loop).
pub running_processes: u32,
/// Current reasoning effort level (for status display).
pub reasoning_effort: String,
pub(crate) active_tools: crate::user::ui_channel::SharedActiveTools,
pub(crate) active_pane: ActivePane,
/// User input editor (handles wrapping, cursor positioning).
pub textarea: tui_textarea::TextArea<'static>,
/// Input history for up/down navigation.
input_history: Vec<String>,
history_index: Option<usize>,
/// Whether to quit.
pub should_quit: bool,
/// Submitted input lines waiting to be consumed.
pub submitted: Vec<String>,
/// Pending hotkey actions for the main loop to process.
pub hotkey_actions: Vec<HotkeyAction>,
/// Pane areas from last draw (for mouse click -> pane selection).
pub(crate) pane_areas: [Rect; 3], // [autonomous, conversation, tools]
/// Active screen (F1-F4).
pub screen: Screen,
/// Debug screen scroll offset.
pub(crate) debug_scroll: u16,
/// Index of selected context section in debug view (for expand/collapse).
pub(crate) debug_selected: Option<usize>,
/// Which context section indices are expanded.
pub(crate) debug_expanded: std::collections::HashSet<usize>,
/// Context loading info for the debug screen.
pub(crate) context_info: Option<ContextInfo>,
/// Live context state — shared with agent, read directly for debug screen.
pub(crate) shared_context: SharedContextState,
/// Agent screen: selected agent index.
pub(crate) agent_selected: usize,
/// Agent screen: viewing log for selected agent.
pub(crate) agent_log_view: bool,
/// Agent state from last cycle update.
pub(crate) agent_state: Vec<crate::subconscious::subconscious::AgentSnapshot>,
/// Cached channel info for F5 screen (refreshed on status tick).
pub(crate) channel_status: Vec<ChannelStatus>,
/// Cached idle state for F5 screen.
pub(crate) idle_info: Option<IdleInfo>,
}
/// Snapshot of thalamus idle state for display.
#[derive(Clone)]
pub(crate) struct IdleInfo {
pub user_present: bool,
pub since_activity: f64,
pub activity_ewma: f64,
pub block_reason: String,
pub dreaming: bool,
pub sleeping: bool,
}
/// Channel info for display on F5 screen.
#[derive(Clone)]
pub(crate) struct ChannelStatus {
pub name: String,
pub connected: bool,
pub unread: u32,
}
/// Screens toggled by F-keys.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Screen {
/// F1 — conversation
Interact,
/// F2 — context window, model info, budget
Conscious,
/// F3 — subconscious agent status
Subconscious,
/// F4 — memory daemon status
Unconscious,
/// F5 — thalamus: channels, presence, attention routing
Thalamus,
}
/// Actions triggered by hotkeys, consumed by the main loop.
#[derive(Debug)]
pub enum HotkeyAction {
/// Ctrl+R: cycle reasoning effort
CycleReasoning,
/// Ctrl+K: show/kill running processes
KillProcess,
/// Escape: interrupt current turn (kill processes, clear queue)
Interrupt,
/// Ctrl+P: cycle DMN autonomy (foraging -> resting -> paused -> foraging)
CycleAutonomy,
}
impl App {
pub fn new(model: String, shared_context: SharedContextState, active_tools: crate::user::ui_channel::SharedActiveTools) -> Self {
Self {
autonomous: PaneState::new(true), // markdown
conversation: PaneState::new(true), // markdown
tools: PaneState::new(false), // plain text
status: StatusInfo {
dmn_state: "resting".into(),
dmn_turns: 0,
dmn_max_turns: 20,
prompt_tokens: 0,
completion_tokens: 0,
model,
turn_tools: 0,
context_budget: String::new(),
},
activity: String::new(),
turn_started: None,
call_started: None,
call_timeout_secs: 60,
needs_assistant_marker: false,
running_processes: 0,
reasoning_effort: "none".to_string(),
active_tools,
active_pane: ActivePane::Conversation,
textarea: new_textarea(vec![String::new()]),
input_history: Vec::new(),
history_index: None,
should_quit: false,
submitted: Vec::new(),
hotkey_actions: Vec::new(),
pane_areas: [Rect::default(); 3],
screen: Screen::Interact,
debug_scroll: 0,
debug_selected: None,
debug_expanded: std::collections::HashSet::new(),
context_info: None,
shared_context,
agent_selected: 0,
agent_log_view: false,
agent_state: Vec::new(),
channel_status: Vec::new(),
idle_info: None,
}
}
/// Process a UiMessage, routing content to the appropriate pane.
pub fn handle_ui_message(&mut self, msg: UiMessage) {
use crate::user::ui_channel::StreamTarget;
match msg {
UiMessage::TextDelta(text, target) => match target {
StreamTarget::Conversation => {
if self.needs_assistant_marker {
self.conversation.pending_marker = Marker::Assistant;
self.needs_assistant_marker = false;
}
self.conversation.current_color = Color::Reset;
self.conversation.append_text(&text);
}
StreamTarget::Autonomous => {
self.autonomous.current_color = Color::Reset;
self.autonomous.append_text(&text);
}
},
UiMessage::UserInput(text) => {
self.conversation.push_line_with_marker(text.clone(), Color::Cyan, Marker::User);
// Mark turn start — next TextDelta gets an assistant marker
self.turn_started = Some(std::time::Instant::now());
self.needs_assistant_marker = true;
self.status.turn_tools = 0;
}
UiMessage::ToolCall { name, args_summary } => {
self.status.turn_tools += 1;
let line = if args_summary.is_empty() {
format!("[{}]", name)
} else {
format!("[{}] {}", name, args_summary)
};
self.tools.push_line(line, Color::Yellow);
}
UiMessage::ToolResult { name: _, result } => {
// Indent result lines and add to tools pane
for line in result.lines() {
self.tools.push_line(format!(" {}", line), Color::DarkGray);
}
self.tools.push_line(String::new(), Color::Reset); // blank separator
}
UiMessage::DmnAnnotation(text) => {
self.autonomous.push_line(text, Color::Yellow);
// DMN turn start
self.turn_started = Some(std::time::Instant::now());
self.needs_assistant_marker = true;
self.status.turn_tools = 0;
}
UiMessage::StatusUpdate(info) => {
// Merge: non-empty/non-zero fields overwrite.
// DMN state always comes as a group from the main loop.
if !info.dmn_state.is_empty() {
self.status.dmn_state = info.dmn_state;
self.status.dmn_turns = info.dmn_turns;
self.status.dmn_max_turns = info.dmn_max_turns;
}
// Token counts come from the agent after API calls.
if info.prompt_tokens > 0 {
self.status.prompt_tokens = info.prompt_tokens;
}
if !info.model.is_empty() {
self.status.model = info.model;
}
if !info.context_budget.is_empty() {
self.status.context_budget = info.context_budget;
}
}
UiMessage::Activity(text) => {
if text.is_empty() {
self.call_started = None;
} else if self.activity.is_empty() || self.call_started.is_none() {
self.call_started = Some(std::time::Instant::now());
self.call_timeout_secs = crate::config::get().api_stream_timeout_secs;
}
self.activity = text;
}
UiMessage::Reasoning(text) => {
self.autonomous.current_color = Color::DarkGray;
self.autonomous.append_text(&text);
}
UiMessage::ToolStarted { .. } => {} // handled by shared active_tools
UiMessage::ToolFinished { .. } => {}
UiMessage::Debug(text) => {
self.tools.push_line(format!("[debug] {}", text), Color::DarkGray);
}
UiMessage::Info(text) => {
self.conversation.push_line(text, Color::Cyan);
}
UiMessage::ContextInfoUpdate(info) => {
self.context_info = Some(info);
}
UiMessage::AgentUpdate(agents) => {
self.agent_state = agents;
}
}
}
/// Handle a crossterm key event.
pub fn handle_key(&mut self, key: KeyEvent) {
// Ctrl+C always quits
if key.modifiers.contains(KeyModifiers::CONTROL) {
match key.code {
KeyCode::Char('c') => {
self.should_quit = true;
return;
}
KeyCode::Char('r') => {
self.hotkey_actions.push(HotkeyAction::CycleReasoning);
return;
}
KeyCode::Char('k') => {
self.hotkey_actions.push(HotkeyAction::KillProcess);
return;
}
KeyCode::Char('p') => {
self.hotkey_actions.push(HotkeyAction::CycleAutonomy);
return;
}
_ => {}
}
}
// F-keys switch screens from anywhere
match key.code {
KeyCode::F(1) => { self.set_screen(Screen::Interact); return; }
KeyCode::F(2) => { self.set_screen(Screen::Conscious); return; }
KeyCode::F(3) => { self.set_screen(Screen::Subconscious); return; }
KeyCode::F(4) => { self.set_screen(Screen::Unconscious); return; }
KeyCode::F(5) => { self.set_screen(Screen::Thalamus); return; }
_ => {}
}
// Screen-specific key handling
match self.screen {
Screen::Subconscious => {
match key.code {
KeyCode::Up => {
self.agent_selected = self.agent_selected.saturating_sub(1);
self.debug_scroll = 0;
return;
}
KeyCode::Down => {
self.agent_selected = (self.agent_selected + 1).min(SUBCONSCIOUS_AGENTS.len() - 1);
self.debug_scroll = 0;
return;
}
KeyCode::Enter | KeyCode::Right => {
self.agent_log_view = true;
self.debug_scroll = 0;
return;
}
KeyCode::Left | KeyCode::Esc => {
if self.agent_log_view {
self.agent_log_view = false;
self.debug_scroll = 0;
} else {
self.screen = Screen::Interact;
}
return;
}
KeyCode::PageUp => { self.debug_scroll = self.debug_scroll.saturating_sub(10); return; }
KeyCode::PageDown => { self.debug_scroll += 10; return; }
_ => {}
}
}
Screen::Conscious => {
let cs = self.read_context_state();
let n = self.debug_item_count(&cs);
match key.code {
KeyCode::Up => {
if n > 0 {
self.debug_selected = Some(match self.debug_selected {
None => n - 1,
Some(0) => 0,
Some(i) => i - 1,
});
self.scroll_to_selected(n);
}
return;
}
KeyCode::Down => {
if n > 0 {
self.debug_selected = Some(match self.debug_selected {
None => 0,
Some(i) if i >= n - 1 => n - 1,
Some(i) => i + 1,
});
self.scroll_to_selected(n);
}
return;
}
KeyCode::PageUp => {
if n > 0 {
let page = 20;
self.debug_selected = Some(match self.debug_selected {
None => 0,
Some(i) => i.saturating_sub(page),
});
self.scroll_to_selected(n);
}
return;
}
KeyCode::PageDown => {
if n > 0 {
let page = 20;
self.debug_selected = Some(match self.debug_selected {
None => 0,
Some(i) => (i + page).min(n - 1),
});
self.scroll_to_selected(n);
}
return;
}
KeyCode::Right | KeyCode::Enter => {
if let Some(idx) = self.debug_selected {
self.debug_expanded.insert(idx);
}
return;
}
KeyCode::Left => {
if let Some(idx) = self.debug_selected {
self.debug_expanded.remove(&idx);
}
return;
}
KeyCode::Esc => { self.screen = Screen::Interact; return; }
_ => {}
}
}
Screen::Unconscious | Screen::Thalamus => {
match key.code {
KeyCode::PageUp => { self.debug_scroll = self.debug_scroll.saturating_sub(10); return; }
KeyCode::PageDown => { self.debug_scroll += 10; return; }
KeyCode::Esc => { self.screen = Screen::Interact; return; }
_ => {}
}
}
Screen::Interact => {}
}
// Interact screen key handling
match key.code {
KeyCode::Esc => {
self.hotkey_actions.push(HotkeyAction::Interrupt);
}
KeyCode::Enter if !key.modifiers.contains(KeyModifiers::ALT)
&& !key.modifiers.contains(KeyModifiers::SHIFT) => {
// Submit input
let input: String = self.textarea.lines().join("\n");
if !input.is_empty() {
if self.input_history.last().map_or(true, |h| h != &input) {
self.input_history.push(input.clone());
}
self.history_index = None;
self.submitted.push(input);
self.textarea = new_textarea(vec![String::new()]);
}
}
KeyCode::Up if key.modifiers.contains(KeyModifiers::CONTROL) => {
self.scroll_active_up(3);
}
KeyCode::Down if key.modifiers.contains(KeyModifiers::CONTROL) => {
self.scroll_active_down(3);
}
KeyCode::Up if !key.modifiers.contains(KeyModifiers::CONTROL) => {
if !self.input_history.is_empty() {
let idx = match self.history_index {
None => self.input_history.len() - 1,
Some(i) => i.saturating_sub(1),
};
self.history_index = Some(idx);
let mut ta = new_textarea(
self.input_history[idx].lines().map(String::from).collect()
);
ta.move_cursor(tui_textarea::CursorMove::End);
self.textarea = ta;
}
}
KeyCode::Down if !key.modifiers.contains(KeyModifiers::CONTROL) => {
if let Some(idx) = self.history_index {
if idx + 1 < self.input_history.len() {
self.history_index = Some(idx + 1);
let mut ta = new_textarea(
self.input_history[idx + 1].lines().map(String::from).collect()
);
ta.move_cursor(tui_textarea::CursorMove::End);
self.textarea = ta;
} else {
self.history_index = None;
self.textarea = new_textarea(vec![String::new()]);
}
}
}
KeyCode::PageUp => {
self.scroll_active_up(10);
}
KeyCode::PageDown => {
self.scroll_active_down(10);
}
KeyCode::Tab => {
self.active_pane = match self.active_pane {
ActivePane::Autonomous => ActivePane::Tools,
ActivePane::Tools => ActivePane::Conversation,
ActivePane::Conversation => ActivePane::Autonomous,
};
}
_ => {
// Delegate all other keys to the textarea widget
self.textarea.input(key);
}
}
}
fn scroll_active_up(&mut self, n: u16) {
match self.active_pane {
ActivePane::Autonomous => self.autonomous.scroll_up(n),
ActivePane::Conversation => self.conversation.scroll_up(n),
ActivePane::Tools => self.tools.scroll_up(n),
}
}
fn scroll_active_down(&mut self, n: u16) {
match self.active_pane {
ActivePane::Autonomous => self.autonomous.scroll_down(n),
ActivePane::Conversation => self.conversation.scroll_down(n),
ActivePane::Tools => self.tools.scroll_down(n),
}
}
/// Handle terminal resize. Scroll is recalculated in draw_pane
/// via Paragraph::line_count; terminal.clear() in main.rs forces
/// a full redraw.
pub fn handle_resize(&mut self, _width: u16, _height: u16) {
}
/// Handle mouse events: scroll wheel and click-to-select-pane.
pub fn handle_mouse(&mut self, mouse: MouseEvent) {
match mouse.kind {
MouseEventKind::ScrollUp => self.scroll_active_up(3),
MouseEventKind::ScrollDown => self.scroll_active_down(3),
MouseEventKind::Down(MouseButton::Left) => {
let (x, y) = (mouse.column, mouse.row);
for (i, area) in self.pane_areas.iter().enumerate() {
if x >= area.x && x < area.x + area.width
&& y >= area.y && y < area.y + area.height
{
self.active_pane = match i {
0 => ActivePane::Autonomous,
1 => ActivePane::Conversation,
_ => ActivePane::Tools,
};
break;
}
}
}
_ => {}
}
}
/// Draw the full TUI layout.
pub fn draw(&mut self, frame: &mut Frame) {
let size = frame.area();
match self.screen {
Screen::Conscious => { self.draw_debug(frame, size); return; }
Screen::Subconscious => { self.draw_agents(frame, size); return; }
Screen::Unconscious => { self.draw_unconscious(frame, size); return; }
Screen::Thalamus => { self.draw_thalamus(frame, size); return; }
Screen::Interact => {}
}
self.draw_main(frame, size);
}
/// Update channel status from async fetch results.
pub fn set_channel_status(&mut self, channels: Vec<(String, bool, u32)>) {
self.channel_status = channels.into_iter()
.map(|(name, connected, unread)| ChannelStatus { name, connected, unread })
.collect();
}
/// Snapshot idle state for F5 display.
pub fn update_idle(&mut self, state: &crate::thalamus::idle::State) {
self.idle_info = Some(IdleInfo {
user_present: state.user_present(),
since_activity: state.since_activity(),
activity_ewma: state.activity_ewma,
block_reason: state.block_reason().to_string(),
dreaming: state.dreaming,
sleeping: state.sleep_until.is_some(),
});
}
pub(crate) fn set_screen(&mut self, screen: Screen) {
self.screen = screen;
self.debug_scroll = 0;
// Refresh data for status screens on entry
match screen {
// Channel refresh triggered asynchronously from event loop
Screen::Thalamus => {}
_ => {}
}
}
}
/// Initialize the terminal for TUI mode.
pub fn init_terminal() -> io::Result<Terminal<CrosstermBackend<io::Stdout>>> {
terminal::enable_raw_mode()?;
let mut stdout = io::stdout();
stdout.execute(EnterAlternateScreen)?;
stdout.execute(EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout);
let terminal = Terminal::new(backend)?;
Ok(terminal)
}
/// Restore the terminal to normal mode.
pub fn restore_terminal(terminal: &mut Terminal<CrosstermBackend<io::Stdout>>) -> io::Result<()> {
terminal::disable_raw_mode()?;
terminal.backend_mut().execute(DisableMouseCapture)?;
terminal.backend_mut().execute(LeaveAlternateScreen)?;
terminal.show_cursor()?;
Ok(())
}

View file

@ -154,3 +154,81 @@ pub fn channel() -> (UiSender, UiReceiver) {
let (observe_tx, _) = broadcast::channel(1024);
(UiSender { tui: tui_tx, observe: observe_tx }, tui_rx)
}
/// Replay a restored session into the TUI panes so the user can see
/// conversation history immediately on restart. Shows user input,
/// assistant responses, and brief tool call summaries. Skips the system
/// prompt, context message, DMN plumbing, and image injection messages.
pub fn replay_session_to_ui(entries: &[crate::agent::context::ConversationEntry], ui_tx: &UiSender) {
use crate::agent::api::types::Role;
crate::dbglog!("[replay] replaying {} entries to UI", entries.len());
for (i, e) in entries.iter().enumerate() {
let m = e.message();
let preview: String = m.content_text().chars().take(60).collect();
crate::dbglog!("[replay] [{}] {:?} mem={} tc={} tcid={:?} {:?}",
i, m.role, e.is_memory(), m.tool_calls.as_ref().map_or(0, |t| t.len()),
m.tool_call_id.as_deref(), preview);
}
let mut seen_first_user = false;
let mut target = StreamTarget::Conversation;
for entry in entries {
if entry.is_memory() { continue; }
let msg = entry.message();
match msg.role {
Role::System => {}
Role::User => {
if !seen_first_user {
seen_first_user = true;
continue;
}
let text = msg.content_text();
if text.starts_with("Your context was just compacted")
|| text.starts_with("Your context was just rebuilt")
|| text.starts_with("[Earlier in this conversation")
|| text.starts_with("Here is the image")
|| text.contains("[image aged out")
{
continue;
}
if text.starts_with("[dmn]") {
target = StreamTarget::Autonomous;
let first_line = text.lines().next().unwrap_or("[dmn]");
let _ = ui_tx.send(UiMessage::DmnAnnotation(first_line.to_string()));
} else {
target = StreamTarget::Conversation;
let _ = ui_tx.send(UiMessage::UserInput(text.to_string()));
}
}
Role::Assistant => {
if let Some(ref calls) = msg.tool_calls {
for call in calls {
let _ = ui_tx.send(UiMessage::ToolCall {
name: call.function.name.clone(),
args_summary: String::new(),
});
}
}
let text = msg.content_text();
if !text.is_empty() {
let _ = ui_tx.send(UiMessage::TextDelta(format!("{}\n", text), target));
}
}
Role::Tool => {
let text = msg.content_text();
let preview: String = text.lines().take(3).collect::<Vec<_>>().join("\n");
let truncated = if text.lines().count() > 3 {
format!("{}...", preview)
} else {
preview
};
let _ = ui_tx.send(UiMessage::ToolResult {
name: String::new(),
result: truncated,
});
}
}
}
}