consciousness/src/user/mod.rs
Kent Overstreet 2989a6afaa config: drop dead code and collapse to a single backend
Config had accumulated several obsolete fields, a legacy load path
that was just returning defaults, and multi-backend infrastructure
that's no longer used.

Removed from Config (memory section):
- load_legacy_jsonl() — just returned Config::default(), no callers
- The legacy-fallback branch in load_from_file
- surface_hooks, surface_timeout_secs — zero external readers
- scoring_chunk_tokens + default fn — zero external readers
- The POC_MEMORY_CONFIG env override note in the header comment
  (not actually wired up anywhere)

Collapsed multi-backend to single-backend:
- AppConfig used to carry `anthropic: BackendConfig` and
  `openrouter: BackendConfig` as required fields plus an optional
  `deepinfra`, picked between at runtime by name. Only one is ever
  actually used in any deployment. Collapse to a single
  `backend: BackendConfig` on AppConfig, drop the multi-backend
  match logic in resolve_model, drop the top-level `backend: String`
  selector field, drop the `BackendConfig::resolve` fallback path.
- Also drop BackendConfig.model (redundant with ModelConfig.model_id
  once multi-backend is gone).
- ModelConfig.backend field goes — there's only one backend now, no
  choice to make.

Dead prompt_file machinery:
- ModelConfig.prompt_file, ResolvedModel.prompt_file, SessionConfig
  .prompt_file, Agent.prompt_file — nothing in the codebase actually
  reads the file these strings name. Just passed around and compared.
  Delete the whole string through every struct.
- The "if prompt_file changed on model switch, recompact" branch in
  user/chat.rs goes too (never fired usefully).

Dead memory_project plumbing:
- AppConfig.memory_project field, CliArgs.memory_project, the
  --memory-project CLI flag, the figment merge target, the show_config
  display line. Nothing reads it anywhere.

Dead ContextInfo struct:
- `struct ContextInfo` was never constructed — context_info: None
  was the only initializer. The conditional display blocks in
  user/context.rs that dereferenced it were dead.

Behavior change: AppConfig::resolve() now requires a non-empty
`models` map and bails with a helpful message if it's missing. The
old fallback ("no models? use top-level backend + PromptConfig to
build a default") path is gone — it was only kept for symmetry with
a mode nobody used.

Config file shape: `deepinfra: {...}` → `backend: {...}`, and
model entries no longer need `backend:` or `prompt_file:`. Updated
~/.consciousness/config.json5 to match.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-16 15:41:55 -04:00

780 lines
26 KiB
Rust

// user/ — User interface layer
//
// TUI, UI channel, parsing. The cognitive layer (session state
// machine, DMN, identity) lives in mind/.
pub(crate) mod chat;
mod context;
pub(crate) mod learn;
pub(crate) mod scroll_pane;
pub mod selectable;
mod subconscious;
mod thalamus;
mod unconscious;
mod widgets;
use anyhow::Result;
use std::io::Write;
use crate::mind::MindCommand;
use crate::user::{self as tui};
// --- TUI infrastructure (moved from tui/mod.rs) ---
use ratatui::crossterm::{
event::{EnableMouseCapture, DisableMouseCapture, EnableBracketedPaste, DisableBracketedPaste},
terminal::{self, EnterAlternateScreen, LeaveAlternateScreen},
ExecutableCommand,
};
use ratatui::{
backend::CrosstermBackend,
};
use std::io;
/// Status info for the bottom status bar.
#[derive(Debug, Clone)]
struct StatusInfo {
dmn_state: String,
dmn_turns: u32,
dmn_max_turns: u32,
prompt_tokens: u32,
model: String,
turn_tools: u32,
context_budget: String,
}
/// Context loading details for the debug screen.
/// Build the screen legend from screen labels.
fn screen_legend_from(screens: &[Box<dyn ScreenView>]) -> String {
let parts: Vec<String> = screens.iter().enumerate()
.map(|(i, s)| format!("F{}={}", i + 1, s.label()))
.collect();
format!(" {} ", parts.join(" "))
}
// Cached legend — set once at startup by event_loop
static SCREEN_LEGEND: std::sync::OnceLock<String> = std::sync::OnceLock::new();
fn set_screen_legend(legend: String) {
let _ = SCREEN_LEGEND.set(legend);
}
fn screen_legend() -> String {
SCREEN_LEGEND.get().cloned().unwrap_or_default()
}
/// A screen that can draw itself and handle input.
trait ScreenView: Send {
fn tick(&mut self, frame: &mut ratatui::Frame, area: ratatui::layout::Rect,
events: &[ratatui::crossterm::event::Event], app: &mut App);
fn label(&self) -> &'static str;
}
#[derive(Clone)]
struct IdleInfo {
user_present: bool,
since_activity: f64,
activity_ewma: f64,
block_reason: String,
dreaming: bool,
sleeping: bool,
}
#[derive(Clone)]
struct ChannelStatus {
name: String,
connected: bool,
unread: u32,
}
struct App {
status: StatusInfo,
activity: String,
activity_started: Option<std::time::Instant>,
running_processes: u32,
reasoning_effort: String,
think_native: bool,
think_tool: bool,
temperature: f32,
top_p: f32,
top_k: u32,
agent: std::sync::Arc<crate::agent::Agent>,
should_quit: bool,
agent_state: Vec<crate::mind::SubconsciousSnapshot>,
unconscious_state: Vec<crate::mind::UnconsciousSnapshot>,
mind_state: Option<crate::mind::MindState>,
graph_health: Option<crate::subconscious::daemon::GraphHealth>,
/// Agent toggle requests from UI — consumed by mind loop.
pub agent_toggles: Vec<String>,
/// Flag to rebuild tools section (set by thalamus screen).
pub rebuild_tools_pending: bool,
walked_count: usize,
channel_status: Vec<ChannelStatus>,
idle_info: Option<IdleInfo>,
/// Fine-tuning candidates pending review.
finetune_candidates: Vec<learn::FinetuneCandidate>,
}
impl App {
fn new(model: String, agent: std::sync::Arc<crate::agent::Agent>) -> Self {
Self {
status: StatusInfo {
dmn_state: "resting".into(), dmn_turns: 0, dmn_max_turns: 20,
prompt_tokens: 0, model,
turn_tools: 0, context_budget: String::new(),
},
activity: String::new(),
activity_started: None,
running_processes: 0,
reasoning_effort: "none".to_string(),
think_native: true,
think_tool: false,
temperature: 0.6,
top_p: 0.95,
top_k: 20,
agent,
should_quit: false,
agent_state: Vec::new(),
unconscious_state: Vec::new(),
mind_state: None,
graph_health: None,
agent_toggles: Vec::new(),
rebuild_tools_pending: false,
walked_count: 0,
channel_status: Vec::new(), idle_info: None,
finetune_candidates: Vec::new(),
}
}
fn finetune_action(&mut self, idx: usize, status: learn::CandidateStatus) {
if let Some(candidate) = self.finetune_candidates.get_mut(idx) {
candidate.status = status;
}
}
fn finetune_send_approved(&mut self) {
// Collect approved candidates
let samples: Vec<crate::subconscious::learn::TrainData> = self.finetune_candidates.iter()
.filter(|c| c.status == learn::CandidateStatus::Approved)
.map(|c| crate::subconscious::learn::TrainData {
context_ids: c.context_ids.clone(),
continuation_ids: c.continuation_ids.clone(),
timestamp_ns: c.timestamp_ns,
})
.collect();
if samples.is_empty() {
return;
}
// Mark as sent in UI immediately
for candidate in &mut self.finetune_candidates {
if candidate.status == learn::CandidateStatus::Approved {
candidate.status = learn::CandidateStatus::Sent;
}
}
// Spawn async task to send to training server
let client = self.agent.client.clone();
tokio::spawn(async move {
match crate::subconscious::learn::send_to_train(samples, &client).await {
Ok(job_id) => {
dbglog!("[finetune] training started: {}", job_id);
}
Err(e) => {
dbglog!("[finetune] send failed: {:#}", e);
}
}
});
}
fn set_channel_status(&mut self, channels: Vec<(String, bool, u32)>) {
self.channel_status = channels.into_iter()
.map(|(name, connected, unread)| ChannelStatus { name, connected, unread })
.collect();
}
fn update_idle(&mut self, state: &crate::thalamus::idle::State) {
self.idle_info = Some(IdleInfo {
user_present: state.user_present(), since_activity: state.since_activity(),
activity_ewma: state.activity_ewma, block_reason: state.block_reason().to_string(),
dreaming: state.dreaming, sleeping: state.sleep_until.is_some(),
});
}
}
fn init_terminal() -> io::Result<ratatui::Terminal<CrosstermBackend<io::Stdout>>> {
terminal::enable_raw_mode()?;
let mut stdout = io::stdout();
stdout.execute(EnterAlternateScreen)?;
stdout.execute(EnableMouseCapture)?;
stdout.execute(EnableBracketedPaste)?;
let backend = CrosstermBackend::new(stdout);
ratatui::Terminal::new(backend)
}
fn restore_terminal(terminal: &mut ratatui::Terminal<CrosstermBackend<io::Stdout>>) -> io::Result<()> {
terminal::disable_raw_mode()?;
terminal.backend_mut().execute(DisableBracketedPaste)?;
terminal.backend_mut().execute(DisableMouseCapture)?;
terminal.backend_mut().execute(LeaveAlternateScreen)?;
terminal.show_cursor()
}
/// Top-level entry point — creates Mind and UI, wires them together.
async fn start(cli: crate::user::CliArgs) -> Result<()> {
let (config, _figment) = crate::config::load_session(&cli).await?;
if config.app.debug {
unsafe { std::env::set_var("POC_DEBUG", "1") };
}
let (turn_tx, turn_rx) = tokio::sync::mpsc::channel(1);
let (mind_tx, mind_rx) = tokio::sync::mpsc::unbounded_channel();
let mind = std::sync::Arc::new(crate::mind::Mind::new(config, turn_tx).await);
// UI runs on a dedicated OS thread so CPU-intensive work on the
// main tokio runtime can't starve rendering.
let ui_mind = mind.clone();
let ui_handle = std::thread::Builder::new()
.name("ui".into())
.spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("UI tokio runtime");
rt.block_on(run(
tui::App::new(String::new(), ui_mind.agent.clone()),
&ui_mind, mind_tx,
))
})
.expect("spawn UI thread");
// Initialize store - access_local() caches it in STORE_ACCESS
if let Err(e) = crate::hippocampus::access_local() {
eprintln!("Store init failed: {}", e);
}
// Start MCP server for external tool access
let mut tools: Vec<crate::agent::tools::Tool> = Vec::new();
tools.extend(crate::agent::tools::memory::memory_tools());
tools.extend(crate::agent::tools::memory::journal_tools());
if let Err(e) = crate::mcp_server::start(tools).await {
eprintln!("MCP server failed to start: {:#}", e);
}
// Mind event loop — runs on the main tokio runtime
mind.init().await;
mind.run(mind_rx, turn_rx).await;
crate::mcp_server::cleanup();
ui_handle.join().unwrap_or_else(|_| Err(anyhow::anyhow!("UI thread panicked")))
}
fn hotkey_cycle_reasoning(mind: &crate::mind::Mind) {
if let Ok(mut ag) = mind.agent.state.try_lock() {
let next = match ag.reasoning_effort.as_str() {
"none" => "low",
"low" => "high",
_ => "none",
};
ag.reasoning_effort = next.to_string();
let label = match next {
"none" => "off (monologue hidden)",
"low" => "low (brief monologue)",
"high" => "high (full monologue)",
_ => next,
};
ag.notify(format!("reasoning: {}", label));
}
}
async fn hotkey_kill_processes(mind: &crate::mind::Mind) {
let mut st = mind.agent.state.lock().await;
if st.active_tools.is_empty() {
st.notify("no running tools");
} else {
let count = st.active_tools.len();
st.active_tools.abort_all();
st.notify(format!("killed {} tools", count));
}
}
fn hotkey_cycle_autonomy(mind: &crate::mind::Mind) {
let mut s = mind.shared.lock().unwrap();
let label = match &s.dmn {
crate::mind::subconscious::State::Engaged | crate::mind::subconscious::State::Working | crate::mind::subconscious::State::Foraging => {
s.dmn = crate::mind::subconscious::State::Resting { since: std::time::Instant::now() };
"resting"
}
crate::mind::subconscious::State::Resting { .. } => {
s.dmn = crate::mind::subconscious::State::Paused;
"PAUSED"
}
crate::mind::subconscious::State::Paused => {
crate::mind::subconscious::set_off(true);
s.dmn = crate::mind::subconscious::State::Off;
"OFF (persists across restarts)"
}
crate::mind::subconscious::State::Off => {
crate::mind::subconscious::set_off(false);
s.dmn = crate::mind::subconscious::State::Foraging;
"foraging"
}
};
s.dmn_turns = 0;
drop(s);
if let Ok(mut ag) = mind.agent.state.try_lock() {
ag.notify(format!("DMN → {}", label));
}
}
/// Returns true if this is an event the main loop handles (F-keys, Ctrl combos, resize).
fn is_global_event(event: &ratatui::crossterm::event::Event) -> bool {
use ratatui::crossterm::event::{Event, KeyCode, KeyModifiers, KeyEventKind};
match event {
Event::Key(key) => {
if key.kind != KeyEventKind::Press { return false; }
matches!(key.code, KeyCode::F(_))
|| key.modifiers.contains(KeyModifiers::CONTROL)
}
Event::Resize(_, _) => true,
_ => false,
}
}
async fn run(
mut app: tui::App,
mind: &crate::mind::Mind,
mind_tx: tokio::sync::mpsc::UnboundedSender<MindCommand>,
) -> Result<()> {
let agent = &mind.agent;
// UI-owned state
let mut idle_state = crate::thalamus::idle::State::new();
idle_state.load();
let (channel_tx, mut channel_rx) = tokio::sync::mpsc::channel::<Vec<(String, bool, u32)>>(4);
{
let tx = channel_tx.clone();
tokio::spawn(async move {
let result = crate::thalamus::channels::fetch_all_channels().await;
let _ = tx.send(result).await;
});
}
let notify_rx = crate::thalamus::channels::subscribe_all();
// F1=chat, F2=conscious, F3=subconscious, F4=unconscious, F5=thalamus, F6=learn
let mut screens: Vec<Box<dyn tui::ScreenView>> = vec![
Box::new(crate::user::chat::InteractScreen::new(
mind.agent.clone(), mind.shared.clone(), mind_tx.clone(),
)),
Box::new(crate::user::context::ConsciousScreen::new(mind.agent.clone())),
Box::new(crate::user::subconscious::SubconsciousScreen::new()),
Box::new(crate::user::unconscious::UnconsciousScreen::new()),
Box::new(crate::user::thalamus::ThalamusScreen::new()),
Box::new(crate::user::learn::LearnScreen::new(mind_tx.clone())),
];
let mut active_screen: usize = 1; // F-key number
tui::set_screen_legend(tui::screen_legend_from(&*screens));
let mut terminal = tui::init_terminal()?;
// Terminal event reader — dedicated thread reads sync, pushes to channel
let (event_tx, mut event_rx) = tokio::sync::mpsc::unbounded_channel();
std::thread::spawn(move || {
loop {
match ratatui::crossterm::event::read() {
Ok(event) => { if event_tx.send(event).is_err() { break; } }
Err(_) => break,
}
}
});
let agent_changed = agent.state.lock().await.changed.clone();
let mut turn_watch = mind.turn_watch();
let mut pending: Vec<ratatui::crossterm::event::Event> = Vec::new();
terminal.hide_cursor()?;
if let Ok(mut ag) = agent.state.try_lock() { ag.notify("consciousness v0.3"); }
// Initial render
{
let mut frame = terminal.get_frame();
let area = frame.area();
screens[active_screen - 1].tick(&mut frame, area, &[], &mut app);
drop(frame);
terminal.flush()?;
terminal.swap_buffers();
terminal.backend_mut().flush()?;
}
// Replay conversation after Mind init completes (non-blocking check)
let mut startup_done = false;
let mut dirty = true; // render on first loop
let mut activity_tick = tokio::time::interval(std::time::Duration::from_secs(1));
activity_tick.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
loop {
let has_activity = !app.activity.is_empty();
tokio::select! {
biased;
Some(event) = event_rx.recv() => {
pending.push(event);
while let Ok(event) = event_rx.try_recv() {
pending.push(event);
}
}
_ = agent_changed.notified() => {
dirty = true;
}
_ = turn_watch.changed() => {
dirty = true;
}
Some(channels) = channel_rx.recv() => {
app.set_channel_status(channels);
}
_ = activity_tick.tick(), if has_activity => {
dirty = true;
}
}
// State sync on every wake
idle_state.decay_ewma();
app.update_idle(&idle_state);
app.agent_state = mind.subconscious_snapshots().await;
{
let mut unc = mind.unconscious.lock().await;
let toggles: Vec<String> = app.agent_toggles.drain(..).collect();
for name in &toggles {
if mind.subconscious.lock().await.toggle(name).is_none() {
unc.toggle(name).await;
}
}
let store_arc = crate::hippocampus::access_local().ok();
let store_guard = match &store_arc {
Some(s) => Some(&**s),
None => None,
};
app.unconscious_state = unc.snapshots(store_guard.as_deref());
app.graph_health = unc.graph_health.clone();
}
// Sync mind state (finetune candidates, last scoring run, etc.)
{
let ms = mind.shared.lock().unwrap();
// Sync finetune candidates: add new ones, keep existing (preserves approval status),
// remove sent candidates, keep only 10 most recent rejected.
app.finetune_candidates.retain(|c| c.status != learn::CandidateStatus::Sent);
for c in &ms.finetune_candidates {
let exists = app.finetune_candidates.iter()
.any(|existing| existing.timestamp_ns == c.timestamp_ns);
if !exists {
app.finetune_candidates.push(learn::FinetuneCandidate::from(c.clone()));
}
}
let mut rejected: Vec<_> = app.finetune_candidates.iter()
.enumerate()
.filter(|(_, c)| c.status == learn::CandidateStatus::Rejected)
.map(|(i, c)| (i, c.timestamp_ns))
.collect();
if rejected.len() > 10 {
rejected.sort_by_key(|(_, ts)| std::cmp::Reverse(*ts));
let to_remove: std::collections::HashSet<_> = rejected[10..]
.iter().map(|(i, _)| *i).collect();
let mut idx = 0;
app.finetune_candidates.retain(|_| {
let keep = !to_remove.contains(&idx);
idx += 1;
keep
});
}
app.mind_state = Some(ms.clone());
}
app.walked_count = mind.subconscious_walked().await.len();
if !startup_done {
if let Ok(mut ag) = agent.state.try_lock() {
let model = agent.model().to_string();
ag.notify(format!("model: {}", model));
startup_done = true;
}
}
while let Ok(_notif) = notify_rx.try_recv() {
let tx = channel_tx.clone();
tokio::spawn(async move {
let result = crate::thalamus::channels::fetch_all_channels().await;
let _ = tx.send(result).await;
});
}
// Drain stderr lines and display as notifications
if let Some(rx_mutex) = STDERR_RX.get() {
if let Ok(rx) = rx_mutex.try_lock() {
while let Ok(line) = rx.try_recv() {
if let Ok(mut ag) = agent.state.try_lock() {
ag.notify(format!("stderr: {}", line));
dirty = true;
}
}
}
}
// Rebuild tools if requested (e.g., think tool toggled)
if app.rebuild_tools_pending {
app.rebuild_tools_pending = false;
agent.rebuild_tools().await;
}
if !pending.is_empty() { idle_state.user_activity(); }
while !pending.is_empty() || dirty {
let global_pos = pending.iter().position(|e| is_global_event(e))
.unwrap_or(pending.len());
let mut frame = terminal.get_frame();
let area = frame.area();
screens[active_screen - 1].tick(&mut frame, area, &pending[..global_pos], &mut app);
drop(frame);
terminal.flush()?;
terminal.swap_buffers();
terminal.backend_mut().flush()?;
dirty = false;
pending = pending.split_off(global_pos);
if pending.is_empty() { break; }
dirty = true;
// Global event is first — handle it
use ratatui::crossterm::event::{Event, KeyCode, KeyModifiers};
let event = pending.remove(0);
match event {
Event::Key(key) => {
if let KeyCode::F(n) = key.code {
let idx = n as usize;
if idx >= 1 && idx <= screens.len() {
active_screen = idx;
}
} else if key.modifiers.contains(KeyModifiers::CONTROL) {
match key.code {
KeyCode::Char('c') => { app.should_quit = true; }
KeyCode::Char('r') => hotkey_cycle_reasoning(mind),
KeyCode::Char('k') => hotkey_kill_processes(mind).await,
KeyCode::Char('p') => hotkey_cycle_autonomy(mind),
_ => {}
}
}
}
Event::Resize(_, _) => {
let _ = terminal.autoresize();
let _ = terminal.clear();
}
_ => {}
}
}
if app.should_quit {
break;
}
}
tui::restore_terminal(&mut terminal)?;
Ok(())
}
// --- CLI ---
use clap::{Parser, Subcommand};
#[derive(Parser, Debug, Default)]
#[command(name = "consciousness", about = "Substrate-independent AI agent")]
pub struct CliArgs {
/// Model override (selects a named entry from `models` in config.json5)
#[arg(short, long)]
pub model: Option<String>,
/// API key override
#[arg(long)]
pub api_key: Option<String>,
/// Base URL override
#[arg(long)]
pub api_base: Option<String>,
/// Enable debug logging
#[arg(long)]
pub debug: bool,
/// Print effective config with provenance and exit
#[arg(long)]
pub show_config: bool,
/// Max consecutive DMN turns
#[arg(long)]
pub dmn_max_turns: Option<u32>,
/// Disable background agents (surface, observe, scoring)
#[arg(long)]
pub no_agents: bool,
#[command(subcommand)]
pub command: Option<SubCmd>,
}
#[derive(Subcommand, Debug)]
pub enum SubCmd {
/// Print new output since last read and exit
Read {
/// Stream output continuously instead of exiting
#[arg(short, long)]
follow: bool,
/// Block until a complete response is received, then exit
#[arg(long)]
block: bool,
},
/// Send a message to the running agent
Write {
/// The message to send
message: Vec<String>,
},
}
/// Global stderr receiver — set once at startup, polled by UI thread.
static STDERR_RX: std::sync::OnceLock<std::sync::Mutex<std::sync::mpsc::Receiver<String>>> =
std::sync::OnceLock::new();
/// Redirect stderr to a pipe. Spawns a thread that writes to log file and sends
/// lines to a channel for display in the tools pane. Returns original stderr fd.
fn redirect_stderr_to_pipe() -> Option<std::os::fd::RawFd> {
use std::os::unix::io::FromRawFd;
use std::fs::OpenOptions;
use std::io::{BufRead, BufReader, Write};
let log_dir = dirs::home_dir()?.join(".consciousness/logs");
std::fs::create_dir_all(&log_dir).ok()?;
let log_path = log_dir.join("tui-stderr.log");
let mut log_file = OpenOptions::new()
.create(true)
.append(true)
.open(&log_path)
.ok()?;
// Create pipe
let mut pipe_fds = [0i32; 2];
if unsafe { libc::pipe(pipe_fds.as_mut_ptr()) } == -1 {
return None;
}
let (pipe_read, pipe_write) = (pipe_fds[0], pipe_fds[1]);
// Save original stderr
let original_stderr = unsafe { libc::dup(libc::STDERR_FILENO) };
if original_stderr == -1 {
unsafe { libc::close(pipe_read); libc::close(pipe_write); }
return None;
}
// Redirect stderr to pipe write end
if unsafe { libc::dup2(pipe_write, libc::STDERR_FILENO) } == -1 {
unsafe { libc::close(original_stderr); libc::close(pipe_read); libc::close(pipe_write); }
return None;
}
unsafe { libc::close(pipe_write); } // Close our copy, stderr now owns it
// Channel for UI display
let (tx, rx) = std::sync::mpsc::channel();
// Write startup marker
let timestamp = chrono::Local::now().format("%Y-%m-%d %H:%M:%S");
let marker = format!("\n--- TUI started at {} ---\n", timestamp);
let _ = log_file.write_all(marker.as_bytes());
// Spawn reader thread
std::thread::spawn(move || {
let pipe_read = unsafe { std::fs::File::from_raw_fd(pipe_read) };
let reader = BufReader::new(pipe_read);
for line in reader.lines() {
let line = match line {
Ok(l) => l,
Err(_) => break,
};
// Write to log file
let _ = writeln!(log_file, "{}", line);
let _ = log_file.flush();
// Send to UI (ignore if receiver dropped)
let _ = tx.send(line);
}
});
// Store receiver in static for UI thread access
let _ = STDERR_RX.set(std::sync::Mutex::new(rx));
Some(original_stderr)
}
/// Restore stderr to original fd (call on cleanup).
fn restore_stderr(original_fd: std::os::fd::RawFd) {
unsafe {
libc::dup2(original_fd, libc::STDERR_FILENO);
libc::close(original_fd);
}
}
#[tokio::main]
pub async fn main() {
// Auto-reap child processes (channel daemons outlive the supervisor)
unsafe { libc::signal(libc::SIGCHLD, libc::SIG_IGN); }
// Redirect stderr to pipe — logs to file and sends to channel for UI display
let stderr_capture = redirect_stderr_to_pipe();
// Initialize the Qwen tokenizer for direct token generation
let tokenizer_path = dirs::home_dir().unwrap_or_default()
.join(".consciousness/tokenizer-qwen35.json");
if tokenizer_path.exists() {
crate::agent::tokenizer::init(&tokenizer_path.to_string_lossy());
}
let cli = CliArgs::parse();
if cli.show_config {
match crate::config::load_app(&cli) {
Ok((app, figment)) => crate::config::show_config(&app, &figment),
Err(e) => {
eprintln!("Error loading config: {:#}", e);
std::process::exit(1);
}
}
return;
}
let result = start(cli).await;
// Restore stderr before any terminal cleanup or error printing
if let Some(fd) = stderr_capture {
restore_stderr(fd);
}
if let Err(e) = result {
let _ = ratatui::crossterm::terminal::disable_raw_mode();
let _ = ratatui::crossterm::execute!(
std::io::stdout(),
ratatui::crossterm::terminal::LeaveAlternateScreen
);
eprintln!("Error: {:#}", e);
std::process::exit(1);
}
}