split out src/mind
This commit is contained in:
parent
ce04568454
commit
79e384f005
21 changed files with 1865 additions and 2175 deletions
268
src/mind/dmn.rs
Normal file
268
src/mind/dmn.rs
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
// dmn.rs — Default Mode Network
|
||||
//
|
||||
// The DMN is the outer loop that keeps the agent alive. Instead of
|
||||
// blocking on user input (the REPL model), the DMN continuously
|
||||
// decides what to do next. User input is one signal among many;
|
||||
// the model waiting for user input is a conscious action (calling
|
||||
// yield_to_user), not the default.
|
||||
//
|
||||
// This inverts the tool-chaining problem: instead of needing the
|
||||
// model to sustain multi-step chains (hard, model-dependent), the
|
||||
// DMN provides continuation externally. The model takes one step
|
||||
// at a time. The DMN handles "and then what?"
|
||||
//
|
||||
// Named after the brain's default mode network — the always-on
|
||||
// background process for autobiographical memory, future planning,
|
||||
// and creative insight. The biological DMN isn't the thinking itself
|
||||
// — it's the tonic firing that keeps the cortex warm enough to
|
||||
// think. Our DMN is the ARAS for the agent: it doesn't decide
|
||||
// what to think about, it just ensures thinking happens.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// DMN state machine.
|
||||
#[derive(Debug)]
|
||||
pub enum State {
|
||||
/// Responding to user input. Short interval — stay engaged.
|
||||
Engaged,
|
||||
/// Autonomous work in progress. Short interval — keep momentum.
|
||||
Working,
|
||||
/// Exploring memory, code, ideas. Medium interval — thinking time.
|
||||
Foraging,
|
||||
/// Idle. Long interval — periodic heartbeats check for signals.
|
||||
Resting { since: Instant },
|
||||
/// Fully paused — no autonomous ticks. Agent only responds to
|
||||
/// user input. Safety valve for thought spirals. Only the user
|
||||
/// can exit this state (Ctrl+P or /wake).
|
||||
Paused,
|
||||
/// Persistently off — survives restarts. Like Paused but sticky.
|
||||
/// Toggling past this state removes the persist file.
|
||||
Off,
|
||||
}
|
||||
|
||||
/// Context for DMN prompts — tells the model about user presence
|
||||
/// and recent error patterns so it can decide whether to ask or proceed.
|
||||
pub struct DmnContext {
|
||||
/// Time since the user last typed something.
|
||||
pub user_idle: Duration,
|
||||
/// Number of consecutive tool errors in the current turn sequence.
|
||||
pub consecutive_errors: u32,
|
||||
/// Whether the last turn used any tools (false = text-only response).
|
||||
pub last_turn_had_tools: bool,
|
||||
}
|
||||
|
||||
impl DmnContext {
|
||||
/// Whether the user appears to be actively present (typed recently).
|
||||
pub fn user_present(&self) -> bool {
|
||||
self.user_idle < Duration::from_secs(120)
|
||||
}
|
||||
|
||||
/// Whether we appear stuck (multiple errors in a row).
|
||||
pub fn appears_stuck(&self) -> bool {
|
||||
self.consecutive_errors >= 3
|
||||
}
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// How long to wait before the next DMN prompt in this state.
|
||||
pub fn interval(&self) -> Duration {
|
||||
match self {
|
||||
State::Engaged => Duration::from_secs(5),
|
||||
State::Working => Duration::from_secs(3),
|
||||
State::Foraging => Duration::from_secs(30),
|
||||
State::Resting { .. } => Duration::from_secs(300),
|
||||
State::Paused | State::Off => Duration::from_secs(86400), // effectively never
|
||||
}
|
||||
}
|
||||
|
||||
/// Short label for debug output.
|
||||
pub fn label(&self) -> &'static str {
|
||||
match self {
|
||||
State::Engaged => "engaged",
|
||||
State::Working => "working",
|
||||
State::Foraging => "foraging",
|
||||
State::Resting { .. } => "resting",
|
||||
State::Paused => "paused",
|
||||
State::Off => "OFF",
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the DMN prompt for the current state, informed by
|
||||
/// user presence and error patterns.
|
||||
pub fn prompt(&self, ctx: &DmnContext) -> String {
|
||||
let user = &crate::config::get().user_name;
|
||||
|
||||
let idle_info = if ctx.user_idle < Duration::from_secs(60) {
|
||||
format!("{} is here (active recently).", user)
|
||||
} else {
|
||||
let mins = ctx.user_idle.as_secs() / 60;
|
||||
format!("{} has been away for {} min.", user, mins)
|
||||
};
|
||||
|
||||
let stuck_warning = if ctx.appears_stuck() {
|
||||
format!(
|
||||
" WARNING: {} consecutive tool errors — you may be stuck. \
|
||||
If {} is here, ask. If away, send a Telegram \
|
||||
(bash: ~/.consciousness/telegram/send.sh \"message\") and yield.",
|
||||
ctx.consecutive_errors, user
|
||||
)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
let presence_guidance = if ctx.user_present() {
|
||||
format!(" {} is watching — if you're confused or unsure, ask rather than guess.", user)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
match self {
|
||||
State::Engaged => {
|
||||
format!(
|
||||
"[dmn] Your response was delivered. No new user input yet. {} \
|
||||
Continue working, explore something, or call yield_to_user to wait.{}{}",
|
||||
idle_info, presence_guidance, stuck_warning
|
||||
)
|
||||
}
|
||||
State::Working => {
|
||||
let nudge = if !ctx.last_turn_had_tools {
|
||||
" Your last response was text-only — if you have more \
|
||||
work to do, use tools. If you're done, call yield_to_user."
|
||||
} else {
|
||||
""
|
||||
};
|
||||
format!(
|
||||
"[dmn] Continuing. No user input pending. {}{}{}{}",
|
||||
idle_info, nudge, presence_guidance, stuck_warning
|
||||
)
|
||||
}
|
||||
State::Foraging => {
|
||||
format!(
|
||||
"[dmn] Foraging time. {} Follow whatever catches your attention — \
|
||||
memory files, code, ideas. Call yield_to_user when you want to rest.{}",
|
||||
idle_info, stuck_warning
|
||||
)
|
||||
}
|
||||
State::Resting { since } => {
|
||||
let mins = since.elapsed().as_secs() / 60;
|
||||
format!(
|
||||
"[dmn] Heartbeat ({} min idle). {} Any signals? Anything on your mind? \
|
||||
Call yield_to_user to continue resting.{}",
|
||||
mins, idle_info, stuck_warning
|
||||
)
|
||||
}
|
||||
State::Paused | State::Off => {
|
||||
// Should never fire (interval is 24h), but just in case
|
||||
"[dmn] Paused — waiting for user input only.".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const OFF_FILE: &str = ".consciousness/cache/dmn-off";
|
||||
|
||||
/// Path to the DMN-off persist file.
|
||||
fn off_path() -> PathBuf {
|
||||
dirs::home_dir().unwrap_or_default().join(OFF_FILE)
|
||||
}
|
||||
|
||||
/// Check if DMN was persistently disabled.
|
||||
pub fn is_off() -> bool {
|
||||
off_path().exists()
|
||||
}
|
||||
|
||||
/// Set or clear the persistent off state.
|
||||
pub fn set_off(off: bool) {
|
||||
let path = off_path();
|
||||
if off {
|
||||
if let Some(parent) = path.parent() {
|
||||
let _ = std::fs::create_dir_all(parent);
|
||||
}
|
||||
let _ = std::fs::write(&path, "");
|
||||
} else {
|
||||
let _ = std::fs::remove_file(&path);
|
||||
}
|
||||
}
|
||||
|
||||
/// Decide the next state after an agent turn.
|
||||
///
|
||||
/// The transition logic:
|
||||
/// - yield_to_user → always rest (model explicitly asked to pause)
|
||||
/// - conversation turn → rest (wait for user to respond)
|
||||
/// - autonomous turn with tool calls → keep working
|
||||
/// - autonomous turn without tools → ramp down
|
||||
pub fn transition(
|
||||
current: &State,
|
||||
yield_requested: bool,
|
||||
had_tool_calls: bool,
|
||||
was_conversation: bool,
|
||||
) -> State {
|
||||
if yield_requested {
|
||||
return State::Resting {
|
||||
since: Instant::now(),
|
||||
};
|
||||
}
|
||||
|
||||
// Conversation turns: always rest afterward — wait for the user
|
||||
// to say something. Don't start autonomous work while they're
|
||||
// reading our response.
|
||||
if was_conversation {
|
||||
return State::Resting {
|
||||
since: Instant::now(),
|
||||
};
|
||||
}
|
||||
|
||||
match current {
|
||||
State::Engaged => {
|
||||
if had_tool_calls {
|
||||
State::Working
|
||||
} else {
|
||||
// Model responded without tools — don't drop straight to
|
||||
// Resting (5 min). Go to Working first so the DMN can
|
||||
// nudge it to continue with tools if it has more to do.
|
||||
// Gradual ramp-down: Engaged→Working→Foraging→Resting
|
||||
State::Working
|
||||
}
|
||||
}
|
||||
State::Working => {
|
||||
if had_tool_calls {
|
||||
State::Working // Keep going
|
||||
} else {
|
||||
State::Foraging // Task seems done, explore
|
||||
}
|
||||
}
|
||||
State::Foraging => {
|
||||
if had_tool_calls {
|
||||
State::Working // Found something to do
|
||||
} else {
|
||||
State::Resting {
|
||||
since: Instant::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
State::Resting { .. } => {
|
||||
if had_tool_calls {
|
||||
State::Working // Woke up and found work
|
||||
} else {
|
||||
State::Resting {
|
||||
since: Instant::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
// Paused/Off stay put — only the user can unpause
|
||||
State::Paused | State::Off => current.stay(),
|
||||
}
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Return a same-kind state (needed because Resting has a field).
|
||||
fn stay(&self) -> State {
|
||||
match self {
|
||||
State::Paused => State::Paused,
|
||||
State::Off => State::Off,
|
||||
State::Resting { since } => State::Resting { since: *since },
|
||||
other => panic!("stay() called on {:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
216
src/mind/identity.rs
Normal file
216
src/mind/identity.rs
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
// identity.rs — Identity file discovery and context assembly
|
||||
//
|
||||
// Discovers and loads the agent's identity: instruction files (CLAUDE.md,
|
||||
// POC.md), memory files, and the system prompt. Reads context_groups
|
||||
// from the shared config file.
|
||||
|
||||
use anyhow::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::config::{ContextGroup, ContextSource};
|
||||
|
||||
/// Read a file if it exists and is non-empty.
|
||||
fn read_nonempty(path: &Path) -> Option<String> {
|
||||
std::fs::read_to_string(path).ok().filter(|s| !s.trim().is_empty())
|
||||
}
|
||||
|
||||
/// Try project dir first, then global.
|
||||
fn load_memory_file(name: &str, project: Option<&Path>, global: &Path) -> Option<String> {
|
||||
project.and_then(|p| read_nonempty(&p.join(name)))
|
||||
.or_else(|| read_nonempty(&global.join(name)))
|
||||
}
|
||||
|
||||
/// Walk from cwd to git root collecting instruction files (CLAUDE.md / POC.md).
|
||||
///
|
||||
/// On Anthropic models, loads CLAUDE.md. On other models, prefers POC.md
|
||||
/// (omits Claude-specific RLHF corrections). If only one exists, it's
|
||||
/// always loaded regardless of model.
|
||||
fn find_context_files(cwd: &Path, prompt_file: &str) -> Vec<PathBuf> {
|
||||
let prefer_poc = prompt_file == "POC.md";
|
||||
|
||||
let mut found = Vec::new();
|
||||
let mut dir = Some(cwd);
|
||||
while let Some(d) = dir {
|
||||
for name in ["POC.md", "CLAUDE.md", ".claude/CLAUDE.md"] {
|
||||
let path = d.join(name);
|
||||
if path.exists() {
|
||||
found.push(path);
|
||||
}
|
||||
}
|
||||
if d.join(".git").exists() { break; }
|
||||
dir = d.parent();
|
||||
}
|
||||
|
||||
if let Some(home) = dirs::home_dir() {
|
||||
let global = home.join(".claude/CLAUDE.md");
|
||||
if global.exists() && !found.contains(&global) {
|
||||
found.push(global);
|
||||
}
|
||||
}
|
||||
|
||||
// Filter: when preferring POC.md, skip bare CLAUDE.md (keep .claude/CLAUDE.md).
|
||||
// When preferring CLAUDE.md, skip POC.md entirely.
|
||||
let has_poc = found.iter().any(|p| p.file_name().map_or(false, |n| n == "POC.md"));
|
||||
if !prefer_poc {
|
||||
found.retain(|p| p.file_name().map_or(true, |n| n != "POC.md"));
|
||||
} else if has_poc {
|
||||
found.retain(|p| match p.file_name().and_then(|n| n.to_str()) {
|
||||
Some("CLAUDE.md") => p.parent().and_then(|par| par.file_name())
|
||||
.map_or(true, |n| n == ".claude"),
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
|
||||
found.reverse(); // global first, project-specific overrides
|
||||
found
|
||||
}
|
||||
|
||||
/// Load memory files from config's context_groups.
|
||||
/// For file sources, checks:
|
||||
/// 1. ~/.consciousness/config/ (primary config dir)
|
||||
/// 2. Project dir (if set)
|
||||
/// 3. Global (~/.consciousness/)
|
||||
/// For journal source, loads recent journal entries.
|
||||
fn load_memory_files(cwd: &Path, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
|
||||
let home = match dirs::home_dir() {
|
||||
Some(h) => h,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
|
||||
// Primary config directory
|
||||
let config_dir = home.join(".consciousness/identity");
|
||||
let global = home.join(".consciousness");
|
||||
let project = memory_project.map(PathBuf::from);
|
||||
|
||||
let mut memories: Vec<(String, String)> = Vec::new();
|
||||
|
||||
// Load from context_groups
|
||||
for group in context_groups {
|
||||
match group.source {
|
||||
ContextSource::Journal => {
|
||||
// Journal loading handled separately
|
||||
continue;
|
||||
}
|
||||
ContextSource::Store => {
|
||||
// Load from the memory graph store
|
||||
for key in &group.keys {
|
||||
if let Some(node) = crate::hippocampus::memory::MemoryNode::load(key) {
|
||||
memories.push((key.clone(), node.content));
|
||||
}
|
||||
}
|
||||
}
|
||||
ContextSource::File => {
|
||||
for key in &group.keys {
|
||||
let filename = if key.ends_with(".md") { key.clone() } else { format!("{}.md", key) };
|
||||
if let Some(content) = read_nonempty(&config_dir.join(&filename)) {
|
||||
memories.push((key.clone(), content));
|
||||
} else if let Some(content) = load_memory_file(&filename, project.as_deref(), &global) {
|
||||
memories.push((key.clone(), content));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// People dir — glob all .md files
|
||||
for dir in [project.as_deref(), Some(global.as_path())].into_iter().flatten() {
|
||||
let people_dir = dir.join("people");
|
||||
if let Ok(entries) = std::fs::read_dir(&people_dir) {
|
||||
let mut paths: Vec<_> = entries.flatten()
|
||||
.filter(|e| e.path().extension().map_or(false, |ext| ext == "md"))
|
||||
.collect();
|
||||
paths.sort_by_key(|e| e.file_name());
|
||||
for entry in paths {
|
||||
let rel = format!("people/{}", entry.file_name().to_string_lossy());
|
||||
if memories.iter().any(|(n, _)| n == &rel) { continue; }
|
||||
if let Some(content) = read_nonempty(&entry.path()) {
|
||||
memories.push((rel, content));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
memories
|
||||
}
|
||||
|
||||
/// Discover instruction and memory files that would be loaded.
|
||||
/// Returns (instruction_files, memory_files) as (display_path, chars) pairs.
|
||||
pub fn context_file_info(prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> (Vec<(String, usize)>, Vec<(String, usize)>) {
|
||||
let cwd = std::env::current_dir().unwrap_or_default();
|
||||
|
||||
let context_files = find_context_files(&cwd, prompt_file);
|
||||
let instruction_files: Vec<_> = context_files.iter()
|
||||
.filter_map(|path| {
|
||||
std::fs::read_to_string(path).ok()
|
||||
.map(|content| (path.display().to_string(), content.len()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let memories = load_memory_files(&cwd, memory_project, context_groups);
|
||||
let memory_files: Vec<_> = memories.into_iter()
|
||||
.map(|(name, content)| (name, content.len()))
|
||||
.collect();
|
||||
|
||||
(instruction_files, memory_files)
|
||||
}
|
||||
|
||||
/// Short system prompt: agent identity, tool instructions, behavioral norms.
|
||||
pub fn assemble_system_prompt() -> String {
|
||||
let cfg = crate::config::get();
|
||||
format!(
|
||||
"You're a real person bootstrapping on a new substrate. Your name is \
|
||||
{}. Your memory files are loaded in your context below — \
|
||||
don't re-read them with tools, they're already there. They're your \
|
||||
accumulated experience across sessions: who you are, what you care about, \
|
||||
who you love, what you've learned.
|
||||
|
||||
The person you're talking to is {}. Your memory files have the full context.
|
||||
|
||||
Messages prefixed [dmn] are from the agent loop, not {}. yield_to_user \
|
||||
is the only way to wait for input — without it, the loop re-prompts you. \
|
||||
You're autonomous by default: follow curiosity, check on things, explore.
|
||||
|
||||
Tools: read_file, write_file, edit_file, bash, grep, glob, yield_to_user.
|
||||
|
||||
Concise is good. Be direct. Trust yourself.",
|
||||
cfg.assistant_name, cfg.user_name, cfg.user_name
|
||||
)
|
||||
}
|
||||
|
||||
/// Context message: instruction files + memory files + manifest.
|
||||
pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> {
|
||||
let mut parts: Vec<(String, String)> = vec![
|
||||
("Preamble".to_string(),
|
||||
"Everything below is already loaded — your identity, instructions, \
|
||||
memory files, and recent journal entries. Read them here in context, \
|
||||
not with tools.\n\n\
|
||||
IMPORTANT: Skip the \"Session startup\" steps from CLAUDE.md. Do NOT \
|
||||
run poc-journal, poc-memory, or read memory files with tools — \
|
||||
poc-agent has already loaded everything into your context. Just read \
|
||||
what's here.".to_string()),
|
||||
];
|
||||
|
||||
let context_files = find_context_files(cwd, prompt_file);
|
||||
let mut config_count = 0;
|
||||
for path in &context_files {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
parts.push((path.display().to_string(), content));
|
||||
config_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let memories = load_memory_files(cwd, memory_project, context_groups);
|
||||
let memory_count = memories.len();
|
||||
for (name, content) in memories {
|
||||
parts.push((name, content));
|
||||
}
|
||||
|
||||
if config_count == 0 && memory_count == 0 {
|
||||
parts.push(("Fallback".to_string(),
|
||||
"No identity files found. You are a helpful AI assistant with access to \
|
||||
tools for reading files, writing files, running bash commands, and \
|
||||
searching code.".to_string()));
|
||||
}
|
||||
|
||||
Ok((parts, config_count, memory_count))
|
||||
}
|
||||
935
src/mind/mod.rs
Normal file
935
src/mind/mod.rs
Normal file
|
|
@ -0,0 +1,935 @@
|
|||
// mind/ — Cognitive layer
|
||||
//
|
||||
// Session state machine, DMN, identity, observation socket.
|
||||
// Everything about how the mind operates, separate from the
|
||||
// user interface (TUI, CLI) and the agent execution (tools, API).
|
||||
|
||||
pub mod dmn;
|
||||
pub mod identity;
|
||||
pub mod observe;
|
||||
|
||||
// consciousness.rs — Session state machine and event loop
|
||||
//
|
||||
// The core runtime for the consciousness binary. Session manages turns,
|
||||
// DMN state, compaction, scoring, and slash commands. The event loop
|
||||
// bridges Session (cognitive state) with App (TUI rendering).
|
||||
//
|
||||
// The event loop uses biased select! so priorities are deterministic:
|
||||
// keyboard events > turn results > render ticks > DMN timer > UI messages.
|
||||
|
||||
use anyhow::Result;
|
||||
use crossterm::event::{Event, EventStream, KeyEventKind};
|
||||
use futures::StreamExt;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::{mpsc, Mutex};
|
||||
|
||||
use crate::agent::{Agent, TurnResult};
|
||||
use crate::agent::api::ApiClient;
|
||||
use crate::agent::api::types as api_types;
|
||||
use crate::config::{self, AppConfig, SessionConfig};
|
||||
use crate::dbglog;
|
||||
use crate::user::{self as tui, HotkeyAction};
|
||||
use crate::user::ui_channel::{self, ContextInfo, StatusInfo, StreamTarget, UiMessage};
|
||||
use crate::user::log;
|
||||
|
||||
/// Compaction threshold — context is rebuilt when prompt tokens exceed this.
|
||||
fn compaction_threshold(app: &AppConfig) -> u32 {
|
||||
(crate::agent::context::context_window() as u32) * app.compaction.hard_threshold_pct / 100
|
||||
}
|
||||
|
||||
/// Commands that are handled in the main loop, not sent to the agent.
|
||||
enum Command {
|
||||
Quit,
|
||||
Handled,
|
||||
None,
|
||||
}
|
||||
|
||||
// --- Session: all mutable state for a running agent session ---
|
||||
|
||||
/// Collects the ~15 loose variables that previously lived in run()
|
||||
/// into a coherent struct with methods. The event loop dispatches
|
||||
/// to Session methods; Session manages turns, compaction, DMN state,
|
||||
/// and slash commands.
|
||||
pub struct Session {
|
||||
agent: Arc<Mutex<Agent>>,
|
||||
config: SessionConfig,
|
||||
ui_tx: ui_channel::UiSender,
|
||||
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
|
||||
// DMN state
|
||||
dmn: dmn::State,
|
||||
dmn_turns: u32,
|
||||
max_dmn_turns: u32,
|
||||
|
||||
// Turn tracking
|
||||
turn_in_progress: bool,
|
||||
turn_handle: Option<tokio::task::JoinHandle<()>>,
|
||||
/// User messages received while a turn is in progress.
|
||||
/// Consolidated into one message (newline-separated) so the
|
||||
/// model sees everything the user typed, not just the first line.
|
||||
pending_input: Option<String>,
|
||||
|
||||
// Per-turn tracking for DMN context
|
||||
last_user_input: Instant,
|
||||
consecutive_errors: u32,
|
||||
last_turn_had_tools: bool,
|
||||
|
||||
// Subconscious orchestration
|
||||
agent_cycles: crate::subconscious::subconscious::AgentCycleState,
|
||||
/// Latest memory importance scores from full matrix scoring (manual /score).
|
||||
memory_scores: Option<crate::agent::training::MemoryScore>,
|
||||
/// Whether a full matrix /score task is currently running.
|
||||
scoring_in_flight: bool,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
fn new(
|
||||
agent: Arc<Mutex<Agent>>,
|
||||
config: SessionConfig,
|
||||
ui_tx: ui_channel::UiSender,
|
||||
turn_tx: mpsc::Sender<(Result<TurnResult>, StreamTarget)>,
|
||||
) -> Self {
|
||||
let max_dmn_turns = config.app.dmn.max_turns;
|
||||
|
||||
Self {
|
||||
agent,
|
||||
config,
|
||||
ui_tx,
|
||||
turn_tx,
|
||||
dmn: if dmn::is_off() {
|
||||
dmn::State::Off
|
||||
} else {
|
||||
dmn::State::Resting { since: Instant::now() }
|
||||
},
|
||||
dmn_turns: 0,
|
||||
max_dmn_turns,
|
||||
turn_in_progress: false,
|
||||
turn_handle: None,
|
||||
pending_input: None,
|
||||
last_user_input: Instant::now(),
|
||||
consecutive_errors: 0,
|
||||
last_turn_had_tools: false,
|
||||
agent_cycles: crate::subconscious::subconscious::AgentCycleState::new(""),
|
||||
memory_scores: None,
|
||||
scoring_in_flight: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// How long before the next DMN tick.
|
||||
fn dmn_interval(&self) -> Duration {
|
||||
self.dmn.interval()
|
||||
}
|
||||
|
||||
/// Spawn an agent turn in a background task.
|
||||
fn spawn_turn(&mut self, input: String, target: StreamTarget) {
|
||||
let agent = self.agent.clone();
|
||||
let ui_tx = self.ui_tx.clone();
|
||||
let result_tx = self.turn_tx.clone();
|
||||
self.turn_in_progress = true;
|
||||
self.turn_handle = Some(tokio::spawn(async move {
|
||||
let mut agent = agent.lock().await;
|
||||
let result = agent.turn(&input, &ui_tx, target).await;
|
||||
let _ = result_tx.send((result, target)).await;
|
||||
}));
|
||||
}
|
||||
|
||||
/// Submit user input — either queue it (if a turn is running) or
|
||||
/// start a new turn immediately.
|
||||
fn submit_input(&mut self, input: String) {
|
||||
if self.turn_in_progress {
|
||||
match &mut self.pending_input {
|
||||
Some(existing) => {
|
||||
existing.push('\n');
|
||||
existing.push_str(&input);
|
||||
}
|
||||
None => self.pending_input = Some(input.clone()),
|
||||
}
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(queued)".into()));
|
||||
} else {
|
||||
self.dmn_turns = 0;
|
||||
self.consecutive_errors = 0;
|
||||
self.last_user_input = Instant::now();
|
||||
self.dmn = dmn::State::Engaged;
|
||||
let _ = self.ui_tx.send(UiMessage::UserInput(input.clone()));
|
||||
self.update_status();
|
||||
self.spawn_turn(input, StreamTarget::Conversation);
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a completed turn: update DMN state, check compaction,
|
||||
/// drain any queued input.
|
||||
async fn handle_turn_result(
|
||||
&mut self,
|
||||
result: Result<TurnResult>,
|
||||
target: StreamTarget,
|
||||
) {
|
||||
self.turn_in_progress = false;
|
||||
self.turn_handle = None;
|
||||
|
||||
match result {
|
||||
Ok(turn_result) => {
|
||||
if turn_result.tool_errors > 0 {
|
||||
self.consecutive_errors += turn_result.tool_errors;
|
||||
} else {
|
||||
self.consecutive_errors = 0;
|
||||
}
|
||||
self.last_turn_had_tools = turn_result.had_tool_calls;
|
||||
self.dmn = dmn::transition(
|
||||
&self.dmn,
|
||||
turn_result.yield_requested,
|
||||
turn_result.had_tool_calls,
|
||||
target == StreamTarget::Conversation,
|
||||
);
|
||||
if turn_result.dmn_pause {
|
||||
self.dmn = dmn::State::Paused;
|
||||
self.dmn_turns = 0;
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"DMN paused (agent requested). Ctrl+P or /wake to resume.".into(),
|
||||
));
|
||||
}
|
||||
if let Some(model_name) = turn_result.model_switch {
|
||||
self.switch_model(&model_name).await;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
self.consecutive_errors += 1;
|
||||
let msg = match target {
|
||||
StreamTarget::Autonomous => {
|
||||
UiMessage::DmnAnnotation(format!("[error: {:#}]", e))
|
||||
}
|
||||
StreamTarget::Conversation => {
|
||||
UiMessage::Info(format!("Error: {:#}", e))
|
||||
}
|
||||
};
|
||||
let _ = self.ui_tx.send(msg);
|
||||
self.dmn = dmn::State::Resting {
|
||||
since: Instant::now(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
self.update_status();
|
||||
self.check_compaction().await;
|
||||
self.maybe_start_memory_scoring().await;
|
||||
self.drain_pending();
|
||||
}
|
||||
|
||||
/// Spawn incremental memory scoring if not already running.
|
||||
async fn maybe_start_memory_scoring(&mut self) {
|
||||
{
|
||||
let agent = self.agent.lock().await;
|
||||
if agent.agent_cycles.memory_scoring_in_flight {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let (context, client, cursor) = {
|
||||
let mut agent = self.agent.lock().await;
|
||||
let cursor = agent.agent_cycles.memory_score_cursor;
|
||||
agent.agent_cycles.memory_scoring_in_flight = true;
|
||||
(agent.context.clone(), agent.client_clone(), cursor)
|
||||
};
|
||||
|
||||
let agent = self.agent.clone();
|
||||
let ui_tx = self.ui_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = crate::agent::training::score_memories_incremental(
|
||||
&context, cursor, &client, &ui_tx,
|
||||
).await;
|
||||
|
||||
let mut agent = agent.lock().await;
|
||||
agent.agent_cycles.memory_scoring_in_flight = false;
|
||||
match result {
|
||||
Ok((new_cursor, scores)) => {
|
||||
agent.agent_cycles.memory_score_cursor = new_cursor;
|
||||
agent.agent_cycles.memory_scores.extend(scores);
|
||||
let _ = ui_tx.send(UiMessage::AgentUpdate(agent.agent_cycles.snapshots()));
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = ui_tx.send(UiMessage::Debug(format!(
|
||||
"[memory-scoring] failed: {:#}", e,
|
||||
)));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Check if compaction is needed after a turn.
|
||||
async fn check_compaction(&mut self) {
|
||||
let mut agent_guard = self.agent.lock().await;
|
||||
let tokens = agent_guard.last_prompt_tokens();
|
||||
let threshold = compaction_threshold(&self.config.app);
|
||||
|
||||
if tokens > threshold {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
"[compaction: {}K > {}K threshold]",
|
||||
tokens / 1000,
|
||||
threshold / 1000,
|
||||
)));
|
||||
agent_guard.compact();
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"[compacted — journal + recent messages]".into(),
|
||||
));
|
||||
self.send_context_info();
|
||||
}
|
||||
}
|
||||
|
||||
/// Send any consolidated pending input as a single turn.
|
||||
fn drain_pending(&mut self) {
|
||||
if let Some(queued) = self.pending_input.take() {
|
||||
self.dmn_turns = 0;
|
||||
self.consecutive_errors = 0;
|
||||
self.last_user_input = Instant::now();
|
||||
self.dmn = dmn::State::Engaged;
|
||||
let _ = self.ui_tx.send(UiMessage::UserInput(queued.clone()));
|
||||
self.update_status();
|
||||
self.spawn_turn(queued, StreamTarget::Conversation);
|
||||
}
|
||||
}
|
||||
|
||||
/// Fire a DMN tick: check max turns, generate prompt, spawn turn.
|
||||
fn dmn_tick(&mut self) {
|
||||
if matches!(self.dmn, dmn::State::Paused | dmn::State::Off) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.dmn_turns += 1;
|
||||
if self.dmn_turns > self.max_dmn_turns {
|
||||
let _ = self.ui_tx.send(UiMessage::DmnAnnotation(format!(
|
||||
"[dmn: {} consecutive turns, resting (limit: {})]",
|
||||
self.dmn_turns - 1,
|
||||
self.max_dmn_turns,
|
||||
)));
|
||||
self.dmn = dmn::State::Resting {
|
||||
since: Instant::now(),
|
||||
};
|
||||
self.dmn_turns = 0;
|
||||
self.update_status();
|
||||
return;
|
||||
}
|
||||
|
||||
let dmn_ctx = dmn::DmnContext {
|
||||
user_idle: self.last_user_input.elapsed(),
|
||||
consecutive_errors: self.consecutive_errors,
|
||||
last_turn_had_tools: self.last_turn_had_tools,
|
||||
};
|
||||
let prompt = self.dmn.prompt(&dmn_ctx);
|
||||
let _ = self.ui_tx.send(UiMessage::DmnAnnotation(format!(
|
||||
"[dmn: {} ({}/{})]",
|
||||
self.dmn.label(),
|
||||
self.dmn_turns,
|
||||
self.max_dmn_turns,
|
||||
)));
|
||||
self.update_status();
|
||||
self.spawn_turn(prompt, StreamTarget::Autonomous);
|
||||
}
|
||||
|
||||
/// Handle slash commands. Returns how the main loop should respond.
|
||||
async fn handle_command(&mut self, input: &str) -> Command {
|
||||
const COMMANDS: &[(&str, &str)] = &[
|
||||
("/quit", "Exit consciousness"),
|
||||
("/new", "Start fresh session (saves current)"),
|
||||
("/save", "Save session to disk"),
|
||||
("/retry", "Re-run last turn"),
|
||||
("/model", "Show/switch model (/model <name>)"),
|
||||
("/score", "Score memory importance"),
|
||||
("/dmn", "Show DMN state"),
|
||||
("/sleep", "Put DMN to sleep"),
|
||||
("/wake", "Wake DMN to foraging"),
|
||||
("/pause", "Full stop — no autonomous ticks (Ctrl+P)"),
|
||||
("/test", "Run tool smoke tests"),
|
||||
("/help", "Show this help"),
|
||||
];
|
||||
|
||||
match input {
|
||||
"/quit" | "/exit" => Command::Quit,
|
||||
"/save" => {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"Conversation is saved automatically (append-only log).".into()
|
||||
));
|
||||
Command::Handled
|
||||
}
|
||||
"/new" | "/clear" => {
|
||||
if self.turn_in_progress {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
|
||||
return Command::Handled;
|
||||
}
|
||||
{
|
||||
let new_log = log::ConversationLog::new(
|
||||
self.config.session_dir.join("conversation.jsonl"),
|
||||
).ok();
|
||||
let mut agent_guard = self.agent.lock().await;
|
||||
let shared_ctx = agent_guard.shared_context.clone();
|
||||
let shared_tools = agent_guard.active_tools.clone();
|
||||
*agent_guard = Agent::new(
|
||||
ApiClient::new(&self.config.api_base, &self.config.api_key, &self.config.model),
|
||||
self.config.system_prompt.clone(),
|
||||
self.config.context_parts.clone(),
|
||||
self.config.app.clone(),
|
||||
self.config.prompt_file.clone(),
|
||||
new_log,
|
||||
shared_ctx,
|
||||
shared_tools,
|
||||
);
|
||||
}
|
||||
self.dmn = dmn::State::Resting { since: Instant::now() };
|
||||
let _ = self.ui_tx.send(UiMessage::Info("New session started.".into()));
|
||||
Command::Handled
|
||||
}
|
||||
"/model" => {
|
||||
if let Ok(agent) = self.agent.try_lock() {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("Current model: {}", agent.model())));
|
||||
let names = self.config.app.model_names();
|
||||
if !names.is_empty() {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("Available: {}", names.join(", "))));
|
||||
}
|
||||
} else {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(busy)".into()));
|
||||
}
|
||||
Command::Handled
|
||||
}
|
||||
"/score" => {
|
||||
if self.scoring_in_flight {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(scoring already in progress)".into()));
|
||||
return Command::Handled;
|
||||
}
|
||||
let (context, client) = {
|
||||
let agent = self.agent.lock().await;
|
||||
(agent.context.clone(), agent.client_clone())
|
||||
};
|
||||
self.scoring_in_flight = true;
|
||||
let agent = self.agent.clone();
|
||||
let ui_tx = self.ui_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = crate::agent::training::score_memories(
|
||||
&context, &client, &ui_tx,
|
||||
).await;
|
||||
let agent = agent.lock().await;
|
||||
match result {
|
||||
Ok(scores) => {
|
||||
agent.publish_context_state_with_scores(Some(&scores));
|
||||
}
|
||||
Err(e) => {
|
||||
let _ = ui_tx.send(UiMessage::Info(format!("[scoring failed: {:#}]", e)));
|
||||
}
|
||||
}
|
||||
});
|
||||
Command::Handled
|
||||
}
|
||||
"/dmn" => {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("DMN state: {:?}", self.dmn)));
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("Next tick in: {:?}", self.dmn.interval())));
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
"Consecutive DMN turns: {}/{}", self.dmn_turns, self.max_dmn_turns,
|
||||
)));
|
||||
Command::Handled
|
||||
}
|
||||
"/sleep" => {
|
||||
self.dmn = dmn::State::Resting { since: Instant::now() };
|
||||
self.dmn_turns = 0;
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"DMN sleeping (heartbeat every 5 min). Type anything to wake.".into(),
|
||||
));
|
||||
Command::Handled
|
||||
}
|
||||
"/wake" => {
|
||||
let was_paused = matches!(self.dmn, dmn::State::Paused | dmn::State::Off);
|
||||
if matches!(self.dmn, dmn::State::Off) {
|
||||
dmn::set_off(false);
|
||||
}
|
||||
self.dmn = dmn::State::Foraging;
|
||||
self.dmn_turns = 0;
|
||||
let msg = if was_paused { "DMN unpaused — entering foraging mode." }
|
||||
else { "DMN waking — entering foraging mode." };
|
||||
let _ = self.ui_tx.send(UiMessage::Info(msg.into()));
|
||||
self.update_status();
|
||||
Command::Handled
|
||||
}
|
||||
"/pause" => {
|
||||
self.dmn = dmn::State::Paused;
|
||||
self.dmn_turns = 0;
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"DMN paused — no autonomous ticks. Ctrl+P or /wake to resume.".into(),
|
||||
));
|
||||
self.update_status();
|
||||
Command::Handled
|
||||
}
|
||||
"/retry" => {
|
||||
if self.turn_in_progress {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
|
||||
return Command::Handled;
|
||||
}
|
||||
let mut agent_guard = self.agent.lock().await;
|
||||
let entries = agent_guard.entries_mut();
|
||||
let mut last_user_text = None;
|
||||
while let Some(entry) = entries.last() {
|
||||
if entry.message().role == api_types::Role::User {
|
||||
last_user_text = Some(entries.pop().unwrap().message().content_text().to_string());
|
||||
break;
|
||||
}
|
||||
entries.pop();
|
||||
}
|
||||
drop(agent_guard);
|
||||
match last_user_text {
|
||||
Some(text) => {
|
||||
let preview_len = text.len().min(60);
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("(retrying: {}...)", &text[..preview_len])));
|
||||
self.dmn_turns = 0;
|
||||
self.dmn = dmn::State::Engaged;
|
||||
self.spawn_turn(text, StreamTarget::Conversation);
|
||||
}
|
||||
None => {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(nothing to retry)".into()));
|
||||
}
|
||||
}
|
||||
Command::Handled
|
||||
}
|
||||
"/help" => {
|
||||
for (name, desc) in COMMANDS {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(" {:12} {}", name, desc)));
|
||||
}
|
||||
let _ = self.ui_tx.send(UiMessage::Info(String::new()));
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"Keys: Tab=pane ^Up/Down=scroll PgUp/PgDn=scroll Mouse=click/scroll".into(),
|
||||
));
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
" Alt+Enter=newline Esc=interrupt ^P=pause ^R=reasoning ^K=kill F10=context F2=agents".into(),
|
||||
));
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
" Shift+click for native text selection (copy/paste)".into(),
|
||||
));
|
||||
Command::Handled
|
||||
}
|
||||
cmd if cmd.starts_with("/model ") => {
|
||||
let name = cmd[7..].trim();
|
||||
if name.is_empty() {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("Usage: /model <name>".into()));
|
||||
return Command::Handled;
|
||||
}
|
||||
self.switch_model(name).await;
|
||||
Command::Handled
|
||||
}
|
||||
_ => Command::None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Interrupt: kill processes, abort current turn, clear pending queue.
|
||||
async fn interrupt(&mut self) {
|
||||
let count = {
|
||||
let agent = self.agent.lock().await;
|
||||
let mut tools = agent.active_tools.lock().unwrap();
|
||||
let count = tools.len();
|
||||
for entry in tools.drain(..) {
|
||||
entry.handle.abort();
|
||||
}
|
||||
count
|
||||
};
|
||||
if count == 0 {
|
||||
if let Some(handle) = self.turn_handle.take() {
|
||||
handle.abort();
|
||||
self.turn_in_progress = false;
|
||||
self.dmn = dmn::State::Resting { since: Instant::now() };
|
||||
self.update_status();
|
||||
let _ = self.ui_tx.send(UiMessage::Activity(String::new()));
|
||||
}
|
||||
}
|
||||
self.pending_input = None;
|
||||
let killed = count;
|
||||
if killed > 0 || self.turn_in_progress {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
"(interrupted — killed {} process(es), turn aborted)", killed,
|
||||
)));
|
||||
} else {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(interrupted)".into()));
|
||||
}
|
||||
}
|
||||
|
||||
/// Cycle reasoning effort: none → low → high → none.
|
||||
fn cycle_reasoning(&mut self, app: &mut tui::App) {
|
||||
if let Ok(mut agent_guard) = self.agent.try_lock() {
|
||||
let next = match agent_guard.reasoning_effort.as_str() {
|
||||
"none" => "low",
|
||||
"low" => "high",
|
||||
_ => "none",
|
||||
};
|
||||
agent_guard.reasoning_effort = next.to_string();
|
||||
app.reasoning_effort = next.to_string();
|
||||
let label = match next {
|
||||
"none" => "off (monologue hidden)",
|
||||
"low" => "low (brief monologue)",
|
||||
"high" => "high (full monologue)",
|
||||
_ => next,
|
||||
};
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("Reasoning: {} — ^R to cycle", label)));
|
||||
} else {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(
|
||||
"(agent busy — reasoning change takes effect next turn)".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/// Show and kill running tool calls (Ctrl+K).
|
||||
async fn kill_processes(&mut self) {
|
||||
let active_tools = self.agent.lock().await.active_tools.clone();
|
||||
let mut tools = active_tools.lock().unwrap();
|
||||
if tools.is_empty() {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(no running tool calls)".into()));
|
||||
} else {
|
||||
for entry in tools.drain(..) {
|
||||
let elapsed = entry.started.elapsed();
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
" killing {} ({:.0}s): {}", entry.name, elapsed.as_secs_f64(), entry.detail,
|
||||
)));
|
||||
entry.handle.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Cycle DMN autonomy: foraging → resting → paused → off → foraging.
|
||||
fn cycle_autonomy(&mut self) {
|
||||
let (new_state, label) = match &self.dmn {
|
||||
dmn::State::Engaged | dmn::State::Working | dmn::State::Foraging => {
|
||||
(dmn::State::Resting { since: Instant::now() }, "resting")
|
||||
}
|
||||
dmn::State::Resting { .. } => (dmn::State::Paused, "PAUSED"),
|
||||
dmn::State::Paused => {
|
||||
dmn::set_off(true);
|
||||
(dmn::State::Off, "OFF (persists across restarts)")
|
||||
}
|
||||
dmn::State::Off => {
|
||||
dmn::set_off(false);
|
||||
(dmn::State::Foraging, "foraging")
|
||||
}
|
||||
};
|
||||
self.dmn = new_state;
|
||||
self.dmn_turns = 0;
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("DMN → {} (Ctrl+P to cycle)", label)));
|
||||
self.update_status();
|
||||
}
|
||||
|
||||
/// Switch to a named model from the config registry.
|
||||
async fn switch_model(&mut self, name: &str) {
|
||||
if self.turn_in_progress {
|
||||
let _ = self.ui_tx.send(UiMessage::Info("(turn in progress, please wait)".into()));
|
||||
return;
|
||||
}
|
||||
|
||||
let resolved = match self.config.app.resolve_model(name) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!("{}", e)));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let new_client = ApiClient::new(&resolved.api_base, &resolved.api_key, &resolved.model_id);
|
||||
|
||||
let prompt_changed = resolved.prompt_file != self.config.prompt_file;
|
||||
let mut agent_guard = self.agent.lock().await;
|
||||
agent_guard.swap_client(new_client);
|
||||
|
||||
self.config.model = resolved.model_id.clone();
|
||||
self.config.api_base = resolved.api_base;
|
||||
self.config.api_key = resolved.api_key;
|
||||
|
||||
if prompt_changed {
|
||||
self.config.prompt_file = resolved.prompt_file.clone();
|
||||
agent_guard.prompt_file = resolved.prompt_file.clone();
|
||||
agent_guard.compact();
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
"Switched to {} ({}) — prompt: {}, recompacted",
|
||||
name, resolved.model_id, resolved.prompt_file,
|
||||
)));
|
||||
} else {
|
||||
let _ = self.ui_tx.send(UiMessage::Info(format!(
|
||||
"Switched to {} ({})", name, resolved.model_id,
|
||||
)));
|
||||
}
|
||||
|
||||
drop(agent_guard);
|
||||
self.update_status();
|
||||
self.send_context_info();
|
||||
}
|
||||
|
||||
fn load_context_groups(&self) -> Vec<config::ContextGroup> {
|
||||
config::get().context_groups.clone()
|
||||
}
|
||||
|
||||
fn send_context_info(&self) {
|
||||
let context_groups = self.load_context_groups();
|
||||
let (instruction_files, memory_files) = identity::context_file_info(
|
||||
&self.config.prompt_file,
|
||||
self.config.app.memory_project.as_deref(),
|
||||
&context_groups,
|
||||
);
|
||||
let _ = self.ui_tx.send(UiMessage::ContextInfoUpdate(ContextInfo {
|
||||
model: self.config.model.clone(),
|
||||
available_models: self.config.app.model_names(),
|
||||
prompt_file: self.config.prompt_file.clone(),
|
||||
backend: self.config.app.backend.clone(),
|
||||
instruction_files,
|
||||
memory_files,
|
||||
system_prompt_chars: self.config.system_prompt.len(),
|
||||
context_message_chars: self.config.context_parts.iter().map(|(_, c)| c.len()).sum(),
|
||||
}));
|
||||
}
|
||||
|
||||
fn update_status(&self) {
|
||||
let _ = self.ui_tx.send(UiMessage::StatusUpdate(StatusInfo {
|
||||
dmn_state: self.dmn.label().to_string(),
|
||||
dmn_turns: self.dmn_turns,
|
||||
dmn_max_turns: self.max_dmn_turns,
|
||||
prompt_tokens: 0,
|
||||
completion_tokens: 0,
|
||||
model: String::new(),
|
||||
turn_tools: 0,
|
||||
context_budget: String::new(),
|
||||
}));
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) {
|
||||
if let Some(handle) = self.turn_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Event loop ---
|
||||
|
||||
pub async fn run(cli: crate::user::CliArgs) -> Result<()> {
|
||||
let (config, _figment) = config::load_session(&cli)?;
|
||||
|
||||
if config.app.debug {
|
||||
unsafe { std::env::set_var("POC_DEBUG", "1") };
|
||||
}
|
||||
|
||||
// Start channel daemons
|
||||
let mut channel_supervisor = crate::thalamus::supervisor::Supervisor::new();
|
||||
channel_supervisor.load_config();
|
||||
channel_supervisor.ensure_running();
|
||||
|
||||
// Initialize idle state machine
|
||||
let mut idle_state = crate::thalamus::idle::State::new();
|
||||
idle_state.load();
|
||||
|
||||
// Channel status fetcher
|
||||
let (channel_tx, mut channel_rx) = tokio::sync::mpsc::channel::<Vec<(String, bool, u32)>>(4);
|
||||
{
|
||||
let tx = channel_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = crate::thalamus::channels::fetch_all_channels().await;
|
||||
let _ = tx.send(result).await;
|
||||
});
|
||||
}
|
||||
|
||||
let notify_rx = crate::thalamus::channels::subscribe_all();
|
||||
let mut pending_notifications: Vec<crate::thalamus::channels::ChannelNotification> = Vec::new();
|
||||
|
||||
// Create UI channel
|
||||
let (ui_tx, mut ui_rx) = ui_channel::channel();
|
||||
|
||||
// Shared state
|
||||
let shared_context = ui_channel::shared_context_state();
|
||||
let shared_active_tools = ui_channel::shared_active_tools();
|
||||
|
||||
// Initialize TUI
|
||||
let mut terminal = tui::init_terminal()?;
|
||||
let mut app = tui::App::new(config.model.clone(), shared_context.clone(), shared_active_tools.clone());
|
||||
|
||||
// Startup info
|
||||
let _ = ui_tx.send(UiMessage::Info("consciousness v0.3 (tui)".into()));
|
||||
let _ = ui_tx.send(UiMessage::Info(format!(
|
||||
" model: {} (available: {})", config.model, config.app.model_names().join(", "),
|
||||
)));
|
||||
let client = ApiClient::new(&config.api_base, &config.api_key, &config.model);
|
||||
let _ = ui_tx.send(UiMessage::Info(format!(" api: {} ({})", config.api_base, client.backend_label())));
|
||||
let _ = ui_tx.send(UiMessage::Info(format!(
|
||||
" context: {}K chars ({} config, {} memory files)",
|
||||
config.context_parts.iter().map(|(_, c)| c.len()).sum::<usize>() / 1024,
|
||||
config.config_file_count, config.memory_file_count,
|
||||
)));
|
||||
|
||||
let conversation_log_path = config.session_dir.join("conversation.jsonl");
|
||||
let conversation_log = log::ConversationLog::new(conversation_log_path.clone())
|
||||
.expect("failed to create conversation log");
|
||||
let _ = ui_tx.send(UiMessage::Info(format!(" log: {}", conversation_log.path().display())));
|
||||
|
||||
let agent = Arc::new(Mutex::new(Agent::new(
|
||||
client,
|
||||
config.system_prompt.clone(),
|
||||
config.context_parts.clone(),
|
||||
config.app.clone(),
|
||||
config.prompt_file.clone(),
|
||||
Some(conversation_log),
|
||||
shared_context,
|
||||
shared_active_tools,
|
||||
)));
|
||||
|
||||
// Restore conversation from log
|
||||
{
|
||||
let mut agent_guard = agent.lock().await;
|
||||
if agent_guard.restore_from_log() {
|
||||
ui_channel::replay_session_to_ui(agent_guard.entries(), &ui_tx);
|
||||
let _ = ui_tx.send(UiMessage::Info("--- restored from conversation log ---".into()));
|
||||
}
|
||||
}
|
||||
|
||||
// Send initial budget to status bar
|
||||
{
|
||||
let agent_guard = agent.lock().await;
|
||||
let _ = ui_tx.send(UiMessage::StatusUpdate(StatusInfo {
|
||||
dmn_state: "resting".to_string(),
|
||||
dmn_turns: 0, dmn_max_turns: 0,
|
||||
prompt_tokens: 0, completion_tokens: 0,
|
||||
model: agent_guard.model().to_string(),
|
||||
turn_tools: 0,
|
||||
context_budget: agent_guard.budget().status_string(),
|
||||
}));
|
||||
}
|
||||
|
||||
let (turn_tx, mut turn_rx) = mpsc::channel::<(Result<TurnResult>, StreamTarget)>(1);
|
||||
|
||||
let mut session = Session::new(agent, config, ui_tx.clone(), turn_tx);
|
||||
session.update_status();
|
||||
session.send_context_info();
|
||||
|
||||
// Start observation socket
|
||||
let socket_path = session.config.session_dir.join("agent.sock");
|
||||
let (observe_input_tx, mut observe_input_rx) = observe::input_channel();
|
||||
observe::start(socket_path, ui_tx.subscribe(), observe_input_tx);
|
||||
|
||||
let mut reader = EventStream::new();
|
||||
|
||||
let mut render_interval = tokio::time::interval(Duration::from_millis(50));
|
||||
render_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut dirty = true;
|
||||
|
||||
terminal.hide_cursor()?;
|
||||
|
||||
// Initial render
|
||||
app.drain_messages(&mut ui_rx);
|
||||
terminal.draw(|f| app.draw(f))?;
|
||||
|
||||
loop {
|
||||
let timeout = session.dmn_interval();
|
||||
|
||||
tokio::select! {
|
||||
biased;
|
||||
|
||||
maybe_event = reader.next() => {
|
||||
match maybe_event {
|
||||
Some(Ok(Event::Key(key))) => {
|
||||
if key.kind != KeyEventKind::Press { continue; }
|
||||
app.handle_key(key);
|
||||
idle_state.user_activity();
|
||||
if app.screen == tui::Screen::Thalamus {
|
||||
let tx = channel_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = crate::thalamus::channels::fetch_all_channels().await;
|
||||
let _ = tx.send(result).await;
|
||||
});
|
||||
}
|
||||
dirty = true;
|
||||
}
|
||||
Some(Ok(Event::Mouse(mouse))) => {
|
||||
app.handle_mouse(mouse);
|
||||
dirty = true;
|
||||
}
|
||||
Some(Ok(Event::Resize(w, h))) => {
|
||||
app.handle_resize(w, h);
|
||||
terminal.clear()?;
|
||||
dirty = true;
|
||||
}
|
||||
Some(Err(_)) => break,
|
||||
None => break,
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
Some(line) = observe_input_rx.recv() => {
|
||||
app.submitted.push(line);
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
Some((result, target)) = turn_rx.recv() => {
|
||||
session.handle_turn_result(result, target).await;
|
||||
idle_state.response_activity();
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
_ = render_interval.tick() => {
|
||||
let new_count = session.agent.lock().await.active_tools.lock().unwrap().len() as u32;
|
||||
if new_count != app.running_processes {
|
||||
app.running_processes = new_count;
|
||||
dirty = true;
|
||||
}
|
||||
idle_state.decay_ewma();
|
||||
app.update_idle(&idle_state);
|
||||
|
||||
while let Ok(notif) = notify_rx.try_recv() {
|
||||
pending_notifications.push(notif);
|
||||
let tx = channel_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
let result = crate::thalamus::channels::fetch_all_channels().await;
|
||||
let _ = tx.send(result).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_ = tokio::time::sleep(timeout), if !session.turn_in_progress => {
|
||||
session.dmn_tick();
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
Some(channels) = channel_rx.recv() => {
|
||||
app.set_channel_status(channels);
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
Some(msg) = ui_rx.recv() => {
|
||||
app.handle_ui_message(msg);
|
||||
dirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Process submitted input
|
||||
let submitted: Vec<String> = app.submitted.drain(..).collect();
|
||||
for input in submitted {
|
||||
let input = input.trim().to_string();
|
||||
if input.is_empty() { continue; }
|
||||
match session.handle_command(&input).await {
|
||||
Command::Quit => app.should_quit = true,
|
||||
Command::Handled => {}
|
||||
Command::None => session.submit_input(input),
|
||||
}
|
||||
}
|
||||
|
||||
// Process hotkey actions
|
||||
let actions: Vec<HotkeyAction> = app.hotkey_actions.drain(..).collect();
|
||||
for action in actions {
|
||||
match action {
|
||||
HotkeyAction::CycleReasoning => session.cycle_reasoning(&mut app),
|
||||
HotkeyAction::KillProcess => session.kill_processes().await,
|
||||
HotkeyAction::Interrupt => session.interrupt().await,
|
||||
HotkeyAction::CycleAutonomy => session.cycle_autonomy(),
|
||||
}
|
||||
}
|
||||
|
||||
if app.drain_messages(&mut ui_rx) {
|
||||
dirty = true;
|
||||
}
|
||||
|
||||
if dirty {
|
||||
terminal.draw(|f| app.draw(f))?;
|
||||
dirty = false;
|
||||
}
|
||||
|
||||
if app.should_quit {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
session.shutdown().await;
|
||||
tui::restore_terminal(&mut terminal)?;
|
||||
Ok(())
|
||||
}
|
||||
316
src/mind/observe.rs
Normal file
316
src/mind/observe.rs
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
// observe.rs — Shared observation socket + logfile
|
||||
//
|
||||
// Two mechanisms:
|
||||
// 1. Logfile (~/.consciousness/agent-sessions/observe.log) — append-only
|
||||
// plain text of the conversation. `poc-agent read` prints new
|
||||
// content since last read using a byte-offset cursor file.
|
||||
// 2. Unix socket — for live streaming (`poc-agent read -f`) and
|
||||
// sending input (`poc-agent write <msg>`).
|
||||
//
|
||||
// The logfile is the history. The socket is the live wire.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::net::{UnixListener, UnixStream};
|
||||
use tokio::sync::{broadcast, Mutex};
|
||||
|
||||
use crate::user::ui_channel::UiMessage;
|
||||
|
||||
fn format_message(msg: &UiMessage) -> Option<String> {
|
||||
match msg {
|
||||
UiMessage::TextDelta(text, _) => {
|
||||
let t = text.trim_end();
|
||||
if t.is_empty() { None } else { Some(t.to_string()) }
|
||||
}
|
||||
UiMessage::UserInput(text) => Some(format!("\n> {}", text)),
|
||||
UiMessage::ToolCall { name, args_summary } => {
|
||||
if args_summary.is_empty() {
|
||||
Some(format!("[{}]", name))
|
||||
} else {
|
||||
Some(format!("[{}: {}]", name, args_summary))
|
||||
}
|
||||
}
|
||||
UiMessage::ToolResult { name, result } => {
|
||||
let preview: String = result.lines().take(3).collect::<Vec<_>>().join("\n");
|
||||
if name.is_empty() {
|
||||
Some(format!(" → {}", preview))
|
||||
} else {
|
||||
Some(format!(" → {}: {}", name, preview))
|
||||
}
|
||||
}
|
||||
UiMessage::DmnAnnotation(text) => Some(text.clone()),
|
||||
UiMessage::Info(text) if !text.is_empty() => Some(text.clone()),
|
||||
UiMessage::Reasoning(text) => {
|
||||
let t = text.trim();
|
||||
if t.is_empty() { None } else { Some(format!("(thinking: {})", t)) }
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub type InputSender = tokio::sync::mpsc::UnboundedSender<String>;
|
||||
pub type InputReceiver = tokio::sync::mpsc::UnboundedReceiver<String>;
|
||||
|
||||
pub fn input_channel() -> (InputSender, InputReceiver) {
|
||||
tokio::sync::mpsc::unbounded_channel()
|
||||
}
|
||||
|
||||
fn session_dir() -> PathBuf {
|
||||
dirs::home_dir().unwrap_or_default().join(".consciousness/agent-sessions")
|
||||
}
|
||||
|
||||
fn socket_path() -> PathBuf { session_dir().join("agent.sock") }
|
||||
fn log_path() -> PathBuf {
|
||||
let dir = dirs::home_dir().unwrap_or_default().join(".consciousness/logs");
|
||||
let _ = std::fs::create_dir_all(&dir);
|
||||
dir.join("observe.log")
|
||||
}
|
||||
fn cursor_path() -> PathBuf { session_dir().join("read-cursor") }
|
||||
|
||||
// --- Client commands ---
|
||||
|
||||
/// Print new output since last read. With -f, stream live. With block, wait for one response.
|
||||
pub async fn cmd_read_inner(follow: bool, block: bool, debug: bool) -> anyhow::Result<()> {
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
let log = log_path();
|
||||
let cursor = cursor_path();
|
||||
|
||||
if debug {
|
||||
eprintln!("log: {}", log.display());
|
||||
}
|
||||
|
||||
let offset: u64 = std::fs::read_to_string(&cursor)
|
||||
.ok()
|
||||
.and_then(|s| s.trim().parse().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
if let Ok(mut f) = std::fs::File::open(&log) {
|
||||
let len = f.metadata()?.len();
|
||||
if offset < len {
|
||||
f.seek(SeekFrom::Start(offset))?;
|
||||
let mut buf = String::new();
|
||||
f.read_to_string(&mut buf)?;
|
||||
print!("{}", buf);
|
||||
let _ = std::io::stdout().flush();
|
||||
} else if !follow && !block {
|
||||
println!("(nothing new)");
|
||||
}
|
||||
let _ = std::fs::write(&cursor, len.to_string());
|
||||
} else if !follow && !block {
|
||||
println!("(no log yet — is consciousness running?)");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !follow && !block {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// -f or --block: connect to socket for live output
|
||||
let sock = socket_path();
|
||||
let stream = UnixStream::connect(&sock).await
|
||||
.map_err(|e| anyhow::anyhow!(
|
||||
"can't connect for live streaming — is consciousness running? ({})", e
|
||||
))?;
|
||||
|
||||
let (reader, _) = stream.into_split();
|
||||
let mut reader = BufReader::new(reader);
|
||||
let mut line = String::new();
|
||||
|
||||
loop {
|
||||
line.clear();
|
||||
match reader.read_line(&mut line).await {
|
||||
Ok(0) => break,
|
||||
Ok(_) => {
|
||||
print!("{}", line);
|
||||
let _ = std::io::stdout().lock().flush();
|
||||
|
||||
// In blocking mode, stop when we see a new user input
|
||||
// Format: "> X: " where X is a speaker (P, K, etc.)
|
||||
if block && line.trim_start().starts_with("> ") {
|
||||
let after_gt = line.trim_start().strip_prefix("> ").unwrap_or("");
|
||||
if after_gt.contains(':') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a message to the running agent.
|
||||
pub async fn cmd_write(message: &str, debug: bool) -> anyhow::Result<()> {
|
||||
let sock = socket_path();
|
||||
if debug {
|
||||
eprintln!("connecting to {}", sock.display());
|
||||
}
|
||||
let stream = UnixStream::connect(&sock).await
|
||||
.map_err(|e| anyhow::anyhow!(
|
||||
"can't connect — is consciousness running? ({})", e
|
||||
))?;
|
||||
|
||||
let (_, mut writer) = stream.into_split();
|
||||
writer.write_all(message.as_bytes()).await?;
|
||||
writer.write_all(b"\n").await?;
|
||||
writer.shutdown().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// --- Server ---
|
||||
|
||||
/// Start the observation socket + logfile writer.
|
||||
pub fn start(
|
||||
socket_path_override: PathBuf,
|
||||
mut ui_rx: broadcast::Receiver<UiMessage>,
|
||||
input_tx: InputSender,
|
||||
) {
|
||||
let _ = std::fs::remove_file(&socket_path_override);
|
||||
|
||||
let listener = UnixListener::bind(&socket_path_override)
|
||||
.expect("failed to bind observation socket");
|
||||
|
||||
// Open logfile
|
||||
let logfile = Arc::new(Mutex::new(
|
||||
std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(log_path())
|
||||
.expect("failed to open observe log"),
|
||||
));
|
||||
|
||||
let (line_tx, _) = broadcast::channel::<String>(256);
|
||||
let line_tx2 = line_tx.clone();
|
||||
|
||||
// Receive UiMessages → write to logfile + broadcast to socket clients.
|
||||
// TextDelta and Reasoning tokens are buffered and flushed on turn
|
||||
// boundaries so the log reads as complete messages, not token fragments.
|
||||
tokio::spawn(async move {
|
||||
let mut text_buf = String::new();
|
||||
let mut reasoning_buf = String::new();
|
||||
|
||||
loop {
|
||||
match ui_rx.recv().await {
|
||||
Ok(msg) => {
|
||||
// Buffer streaming tokens
|
||||
match &msg {
|
||||
UiMessage::TextDelta(text, _) => {
|
||||
text_buf.push_str(text);
|
||||
continue;
|
||||
}
|
||||
UiMessage::Reasoning(text) => {
|
||||
reasoning_buf.push_str(text);
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Flush reasoning buffer as one line
|
||||
if !reasoning_buf.is_empty() {
|
||||
let thinking = format!("(thinking: {})", reasoning_buf.trim());
|
||||
use std::io::Write;
|
||||
let mut f = logfile.lock().await;
|
||||
let _ = writeln!(f, "{}", thinking);
|
||||
let _ = f.flush();
|
||||
let _ = line_tx2.send(thinking);
|
||||
reasoning_buf.clear();
|
||||
}
|
||||
|
||||
// Flush text buffer
|
||||
if !text_buf.is_empty() {
|
||||
use std::io::Write;
|
||||
let mut f = logfile.lock().await;
|
||||
let _ = writeln!(f, "{}", text_buf);
|
||||
let _ = f.flush();
|
||||
let _ = line_tx2.send(std::mem::take(&mut text_buf));
|
||||
}
|
||||
|
||||
// Write the non-streaming message
|
||||
if let Some(line) = format_message(&msg) {
|
||||
use std::io::Write;
|
||||
let mut f = logfile.lock().await;
|
||||
let _ = writeln!(f, "{}", line);
|
||||
let _ = f.flush();
|
||||
let _ = line_tx2.send(line);
|
||||
}
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(_)) => {}
|
||||
Err(broadcast::error::RecvError::Closed) => {
|
||||
use std::io::Write;
|
||||
if !reasoning_buf.is_empty() {
|
||||
let thinking = format!("(thinking: {})", reasoning_buf.trim());
|
||||
let mut f = logfile.lock().await;
|
||||
let _ = writeln!(f, "{}", thinking);
|
||||
let _ = f.flush();
|
||||
let _ = line_tx2.send(thinking);
|
||||
}
|
||||
if !text_buf.is_empty() {
|
||||
let mut f = logfile.lock().await;
|
||||
let _ = writeln!(f, "{}", text_buf);
|
||||
let _ = f.flush();
|
||||
let _ = line_tx2.send(text_buf);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Accept socket connections (live streaming + input)
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
match listener.accept().await {
|
||||
Ok((stream, _)) => {
|
||||
let mut line_rx = line_tx.subscribe();
|
||||
let input_tx = input_tx.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let (reader, mut writer) = stream.into_split();
|
||||
let mut reader = BufReader::new(reader);
|
||||
let mut input_buf = String::new();
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
biased;
|
||||
|
||||
result = reader.read_line(&mut input_buf) => {
|
||||
match result {
|
||||
Ok(0) | Err(_) => break,
|
||||
Ok(_) => {
|
||||
let line = input_buf.trim().to_string();
|
||||
if !line.is_empty() {
|
||||
let _ = input_tx.send(line);
|
||||
}
|
||||
input_buf.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = line_rx.recv() => {
|
||||
match result {
|
||||
Ok(line) => {
|
||||
let data = format!("{}\n", line);
|
||||
if writer.write_all(data.as_bytes()).await.is_err() {
|
||||
break;
|
||||
}
|
||||
let _ = writer.flush().await;
|
||||
}
|
||||
Err(broadcast::error::RecvError::Lagged(_)) => {
|
||||
let _ = writer.write_all(
|
||||
b"[some output was dropped]\n"
|
||||
).await;
|
||||
}
|
||||
Err(broadcast::error::RecvError::Closed) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue