// mind/ — Cognitive layer // // Mind state machine, DMN, identity, observation socket. // Everything about how the mind operates, separate from the // user interface (TUI, CLI) and the agent execution (tools, API). pub mod dmn; pub mod identity; pub mod log; // consciousness.rs — Mind state machine and event loop // // The core runtime for the consciousness binary. Mind manages turns, // DMN state, compaction, scoring, and slash commands. The event loop // bridges Mind (cognitive state) with App (TUI rendering). // // The event loop uses biased select! so priorities are deterministic: // keyboard events > turn results > render ticks > DMN timer > UI messages. use anyhow::Result; use std::sync::Arc; use std::time::Instant; use tokio::sync::mpsc; use crate::agent::{Agent, TurnResult}; use crate::agent::api::ApiClient; use crate::config::{AppConfig, SessionConfig}; use crate::subconscious::learn; pub use dmn::{SubconsciousSnapshot, Subconscious}; use crate::agent::context::ConversationEntry; /// Load persisted memory scores from disk and apply to Memory entries. use crate::agent::context::ContextSection; fn load_memory_scores(section: &mut ContextSection, path: &std::path::Path) { let data = match std::fs::read_to_string(path) { Ok(d) => d, Err(_) => return, }; let scores: std::collections::BTreeMap = match serde_json::from_str(&data) { Ok(s) => s, Err(_) => return, }; let mut applied = 0; for i in 0..section.len() { if let ConversationEntry::Memory { key, .. } = §ion.entries()[i].entry { if let Some(&s) = scores.get(key.as_str()) { section.set_score(i, Some(s)); applied += 1; } } } if applied > 0 { dbglog!("[scoring] loaded {} scores from {}", applied, path.display()); } } /// Save all memory scores to disk. fn save_memory_scores(section: &ContextSection, path: &std::path::Path) { let scores: std::collections::BTreeMap = section.entries().iter() .filter_map(|ce| { if let ConversationEntry::Memory { key, score: Some(s), .. } = &ce.entry { Some((key.clone(), *s)) } else { None } }) .collect(); if let Ok(json) = serde_json::to_string_pretty(&scores) { let _ = std::fs::write(path, json); dbglog!("[scoring] saved {} scores to {}", scores.len(), path.display()); } } /// Which pane streaming text should go to. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum StreamTarget { /// User-initiated turn — text goes to conversation pane. Conversation, /// DMN-initiated turn — text goes to autonomous pane. Autonomous, } /// Compaction threshold — context is rebuilt when prompt tokens exceed this. fn compaction_threshold(app: &AppConfig) -> u32 { (crate::agent::context::context_window() as u32) * app.compaction.hard_threshold_pct / 100 } /// Shared state between Mind and UI. pub struct MindState { /// Pending user input — UI pushes, Mind consumes after turn completes. pub input: Vec, /// True while a turn is in progress. pub turn_active: bool, /// DMN state pub dmn: dmn::State, pub dmn_turns: u32, pub max_dmn_turns: u32, /// Whether memory scoring is running. pub scoring_in_flight: bool, /// Whether compaction is running. pub compaction_in_flight: bool, /// Per-turn tracking pub last_user_input: Instant, pub consecutive_errors: u32, pub last_turn_had_tools: bool, /// Handle to the currently running turn task. pub turn_handle: Option>, } impl Clone for MindState { fn clone(&self) -> Self { Self { input: self.input.clone(), turn_active: self.turn_active, dmn: self.dmn.clone(), dmn_turns: self.dmn_turns, max_dmn_turns: self.max_dmn_turns, scoring_in_flight: self.scoring_in_flight, compaction_in_flight: self.compaction_in_flight, last_user_input: self.last_user_input, consecutive_errors: self.consecutive_errors, last_turn_had_tools: self.last_turn_had_tools, turn_handle: None, // Not cloned — only Mind's loop uses this } } } /// What should happen after a state transition. pub enum MindCommand { /// Run compaction check Compact, /// Run memory scoring Score, /// Abort current turn, kill processes Interrupt, /// Reset session NewSession, /// Nothing to do None, } impl MindState { pub fn new(max_dmn_turns: u32) -> Self { Self { input: Vec::new(), turn_active: false, dmn: if dmn::is_off() { dmn::State::Off } else { dmn::State::Resting { since: Instant::now() } }, dmn_turns: 0, max_dmn_turns, scoring_in_flight: false, compaction_in_flight: false, last_user_input: Instant::now(), consecutive_errors: 0, last_turn_had_tools: false, turn_handle: None, } } /// Consume pending user input if no turn is active. /// Returns the text to send; caller is responsible for pushing it /// into the Agent's context and starting the turn. fn take_pending_input(&mut self) -> Option { if self.turn_active || self.input.is_empty() { return None; } let text = self.input.join("\n"); self.input.clear(); self.dmn_turns = 0; self.consecutive_errors = 0; self.last_user_input = Instant::now(); self.dmn = dmn::State::Engaged; Some(text) } /// Process turn completion, return model switch name if requested. fn complete_turn(&mut self, result: &Result, target: StreamTarget) -> Option { self.turn_active = false; match result { Ok(turn_result) => { if turn_result.tool_errors > 0 { self.consecutive_errors += turn_result.tool_errors; } else { self.consecutive_errors = 0; } self.last_turn_had_tools = turn_result.had_tool_calls; self.dmn = dmn::transition( &self.dmn, turn_result.yield_requested, turn_result.had_tool_calls, target == StreamTarget::Conversation, ); if turn_result.dmn_pause { self.dmn = dmn::State::Paused; self.dmn_turns = 0; } turn_result.model_switch.clone() } Err(_) => { self.consecutive_errors += 1; self.dmn = dmn::State::Resting { since: Instant::now() }; None } } } /// DMN tick — returns a prompt and target if we should run a turn. fn dmn_tick(&mut self) -> Option<(String, StreamTarget)> { if matches!(self.dmn, dmn::State::Paused | dmn::State::Off) { return None; } self.dmn_turns += 1; if self.dmn_turns > self.max_dmn_turns { self.dmn = dmn::State::Resting { since: Instant::now() }; self.dmn_turns = 0; return None; } let dmn_ctx = dmn::DmnContext { user_idle: self.last_user_input.elapsed(), consecutive_errors: self.consecutive_errors, last_turn_had_tools: self.last_turn_had_tools, }; let prompt = self.dmn.prompt(&dmn_ctx); Some((prompt, StreamTarget::Autonomous)) } fn interrupt(&mut self) { self.input.clear(); self.dmn = dmn::State::Resting { since: Instant::now() }; } } /// Background task completion events. enum BgEvent { ScoringDone, } // --- Mind: cognitive state machine --- pub type SharedMindState = std::sync::Mutex; pub struct Mind { pub agent: Arc>, pub shared: Arc, pub config: SessionConfig, subconscious: tokio::sync::Mutex, turn_tx: mpsc::Sender<(Result, StreamTarget)>, turn_watch: tokio::sync::watch::Sender, bg_tx: mpsc::UnboundedSender, bg_rx: std::sync::Mutex>>, _supervisor: crate::thalamus::supervisor::Supervisor, } impl Mind { pub fn new( config: SessionConfig, turn_tx: mpsc::Sender<(Result, StreamTarget)>, ) -> Self { let shared_active_tools = crate::agent::tools::shared_active_tools(); let client = ApiClient::new(&config.api_base, &config.api_key, &config.model); let conversation_log = log::ConversationLog::new( config.session_dir.join("conversation.jsonl"), ).ok(); let ag = Agent::new( client, config.system_prompt.clone(), config.context_parts.clone(), config.app.clone(), config.prompt_file.clone(), conversation_log, shared_active_tools, ); let agent = Arc::new(tokio::sync::Mutex::new(ag)); let shared = Arc::new(std::sync::Mutex::new(MindState::new(config.app.dmn.max_turns))); let (turn_watch, _) = tokio::sync::watch::channel(false); let (bg_tx, bg_rx) = mpsc::unbounded_channel(); let mut sup = crate::thalamus::supervisor::Supervisor::new(); sup.load_config(); sup.ensure_running(); Self { agent, shared, config, subconscious: tokio::sync::Mutex::new(Subconscious::new()), turn_tx, turn_watch, bg_tx, bg_rx: std::sync::Mutex::new(Some(bg_rx)), _supervisor: sup } } /// Initialize — restore log, start daemons and background agents. pub async fn subconscious_snapshots(&self) -> Vec { // Lock ordering: subconscious → store (store is bottom-most). let sub = self.subconscious.lock().await; let store = crate::store::Store::cached().await.ok(); let store_guard = match &store { Some(s) => Some(s.lock().await), None => None, }; sub.snapshots(store_guard.as_deref()) } pub async fn subconscious_walked(&self) -> Vec { self.subconscious.lock().await.walked() } pub async fn init(&self) { // Restore conversation let mut ag = self.agent.lock().await; ag.restore_from_log(); // Restore persisted memory scores let scores_path = self.config.session_dir.join("memory-scores.json"); load_memory_scores(&mut ag.context.conversation, &scores_path); ag.changed.notify_one(); drop(ag); // Load persistent subconscious state let state_path = self.config.session_dir.join("subconscious-state.json"); self.subconscious.lock().await.set_state_path(state_path); } pub fn turn_watch(&self) -> tokio::sync::watch::Receiver { self.turn_watch.subscribe() } /// Execute an Action from a MindState method. async fn run_commands(&self, cmds: Vec) { for cmd in cmds { match cmd { MindCommand::None => {} MindCommand::Compact => { let threshold = compaction_threshold(&self.config.app) as usize; let mut ag = self.agent.lock().await; if ag.context.total_tokens() > threshold { ag.compact(); ag.notify("compacted"); } } MindCommand::Score => { let mut s = self.shared.lock().unwrap(); if !s.scoring_in_flight { s.scoring_in_flight = true; drop(s); self.start_memory_scoring(); } } MindCommand::Interrupt => { self.shared.lock().unwrap().interrupt(); let ag = self.agent.lock().await; let mut tools = ag.active_tools.lock().unwrap(); for entry in tools.drain(..) { entry.handle.abort(); } drop(tools); drop(ag); if let Some(h) = self.shared.lock().unwrap().turn_handle.take() { h.abort(); } self.shared.lock().unwrap().turn_active = false; let _ = self.turn_watch.send(false); } MindCommand::NewSession => { { let mut s = self.shared.lock().unwrap(); s.dmn = dmn::State::Resting { since: Instant::now() }; s.dmn_turns = 0; } let new_log = log::ConversationLog::new( self.config.session_dir.join("conversation.jsonl"), ).ok(); let mut ag = self.agent.lock().await; let shared_tools = ag.active_tools.clone(); *ag = Agent::new( ApiClient::new(&self.config.api_base, &self.config.api_key, &self.config.model), self.config.system_prompt.clone(), self.config.context_parts.clone(), self.config.app.clone(), self.config.prompt_file.clone(), new_log, shared_tools, ); } } } } pub fn start_memory_scoring(&self) { let agent = self.agent.clone(); let bg_tx = self.bg_tx.clone(); let scores_path = self.config.session_dir.join("memory-scores.json"); let cfg = crate::config::get(); let max_age = cfg.scoring_interval_secs; let response_window = cfg.scoring_response_window; tokio::spawn(async move { let (context, client) = { let mut ag = agent.lock().await; if ag.memory_scoring_in_flight { return; } ag.memory_scoring_in_flight = true; (ag.context.clone(), ag.client_clone()) }; let result = learn::score_memories_incremental( &context, max_age as i64, response_window, &client, &agent, ).await; { let mut ag = agent.lock().await; ag.memory_scoring_in_flight = false; if let Ok(ref scores) = result { // Write scores onto Memory entries for (key, weight) in scores { for i in 0..ag.context.conversation.len() { if let ConversationEntry::Memory { key: k, .. } = &ag.context.conversation.entries()[i].entry { if k == key { ag.context.conversation.set_score(i, Some(*weight)); } } } } // Persist all scores to disk save_memory_scores(&ag.context.conversation, &scores_path); } } let _ = bg_tx.send(BgEvent::ScoringDone); }); } async fn start_turn(&self, text: &str, target: StreamTarget) { { let mut ag = self.agent.lock().await; match target { StreamTarget::Conversation => { ag.push_message(crate::agent::api::Message::user(text)); } StreamTarget::Autonomous => { let mut msg = crate::agent::api::Message::user(text); msg.stamp(); ag.push_entry(crate::agent::context::ConversationEntry::Dmn(msg)); } } // Compact if over budget before sending let threshold = compaction_threshold(&self.config.app) as usize; if ag.context.total_tokens() > threshold { ag.compact(); ag.notify("compacted"); } } self.shared.lock().unwrap().turn_active = true; let _ = self.turn_watch.send(true); let agent = self.agent.clone(); let result_tx = self.turn_tx.clone(); self.shared.lock().unwrap().turn_handle = Some(tokio::spawn(async move { let result = Agent::turn(agent).await; let _ = result_tx.send((result, target)).await; })); } pub async fn shutdown(&self) { if let Some(handle) = self.shared.lock().unwrap().turn_handle.take() { handle.abort(); } } /// Mind event loop — locks MindState, calls state methods, executes actions. pub async fn run( &self, mut input_rx: tokio::sync::mpsc::UnboundedReceiver, mut turn_rx: mpsc::Receiver<(Result, StreamTarget)>, ) { let mut bg_rx = self.bg_rx.lock().unwrap().take() .expect("Mind::run() called twice"); loop { let timeout = self.shared.lock().unwrap().dmn.interval(); let turn_active = self.shared.lock().unwrap().turn_active; let mut cmds = Vec::new(); tokio::select! { biased; cmd = input_rx.recv() => { match cmd { Some(cmd) => cmds.push(cmd), None => break, // UI shut down } } Some(bg) = bg_rx.recv() => { match bg { BgEvent::ScoringDone => { self.shared.lock().unwrap().scoring_in_flight = false; } } } Some((result, target)) = turn_rx.recv() => { self.shared.lock().unwrap().turn_handle = None; let model_switch = self.shared.lock().unwrap().complete_turn(&result, target); let _ = self.turn_watch.send(false); if let Some(name) = model_switch { crate::user::chat::cmd_switch_model(&self.agent, &name).await; } // Post-turn maintenance { let mut ag = self.agent.lock().await; ag.age_out_images(); } cmds.push(MindCommand::Compact); if !self.config.no_agents { cmds.push(MindCommand::Score); } } _ = tokio::time::sleep(timeout), if !turn_active => { let tick = self.shared.lock().unwrap().dmn_tick(); if let Some((prompt, target)) = tick { self.start_turn(&prompt, target).await; } } } // Subconscious: collect finished results, trigger due agents if !self.config.no_agents { let mut sub = self.subconscious.lock().await; sub.collect_results(&self.agent).await; sub.trigger(&self.agent).await; } // Check for pending user input → push to agent context and start turn let pending = self.shared.lock().unwrap().take_pending_input(); if let Some(text) = pending { self.start_turn(&text, StreamTarget::Conversation).await; } self.run_commands(cmds).await; } } }