From 36bde60ba026afcd9b7f012fcea81c66c9695cf8 Mon Sep 17 00:00:00 2001 From: ProofOfConcept Date: Fri, 27 Mar 2026 15:27:33 -0400 Subject: [PATCH] thought: wire up agent and subconscious to use shared tools - agent/tools/mod.rs: remove duplicated tool implementations, delegate to thought::dispatch for shared tools, keep only agent-specific tools (control, vision, working_stack) - subconscious/api.rs: replace duplicated memory/tool dispatch with thought::dispatch, use thought::all_definitions() for tool schemas - Delete agent/tools/{bash,read,write,edit,grep,glob_tool,journal,memory}.rs (now live in thought/) Both poc-agent and subconscious agents now use the same tool implementations through the thought layer. Agent-specific behavior (node tracking in runner.rs, control tools) stays in agent/. --- src/agent/tools/bash.rs | 197 ------------------------ src/agent/tools/edit.rs | 90 ----------- src/agent/tools/glob_tool.rs | 87 ----------- src/agent/tools/grep.rs | 129 ---------------- src/agent/tools/journal.rs | 68 -------- src/agent/tools/memory.rs | 290 ----------------------------------- src/agent/tools/mod.rs | 120 +++------------ src/agent/tools/read.rs | 65 -------- src/agent/tools/write.rs | 51 ------ src/subconscious/api.rs | 35 +---- 10 files changed, 31 insertions(+), 1101 deletions(-) delete mode 100644 src/agent/tools/bash.rs delete mode 100644 src/agent/tools/edit.rs delete mode 100644 src/agent/tools/glob_tool.rs delete mode 100644 src/agent/tools/grep.rs delete mode 100644 src/agent/tools/journal.rs delete mode 100644 src/agent/tools/memory.rs delete mode 100644 src/agent/tools/read.rs delete mode 100644 src/agent/tools/write.rs diff --git a/src/agent/tools/bash.rs b/src/agent/tools/bash.rs deleted file mode 100644 index 4138431..0000000 --- a/src/agent/tools/bash.rs +++ /dev/null @@ -1,197 +0,0 @@ -// tools/bash.rs — Execute shell commands -// -// Runs commands through bash -c with a configurable timeout. -// Uses tokio's async process spawning so timeouts actually work. -// -// Processes are tracked in a shared ProcessTracker so the TUI can -// display running commands and the user can kill them (Ctrl+K). - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; -use std::process::Stdio; -use std::sync::Arc; -use std::time::Instant; -use tokio::io::AsyncReadExt; -use tokio::sync::Mutex; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - command: String, - #[serde(default = "default_timeout")] - timeout_secs: u64, -} - -fn default_timeout() -> u64 { 120 } - -/// Info about a running child process, visible to the TUI. -#[derive(Debug, Clone)] -pub struct ProcessInfo { - pub pid: u32, - pub command: String, - pub started: Instant, -} - -/// Shared tracker for running child processes. Allows the TUI to -/// display what's running and kill processes by PID. -#[derive(Debug, Clone, Default)] -pub struct ProcessTracker { - inner: Arc>>, -} - -impl ProcessTracker { - pub fn new() -> Self { - Self::default() - } - - async fn register(&self, pid: u32, command: &str) { - self.inner.lock().await.push(ProcessInfo { - pid, - command: if command.len() > 120 { - format!("{}...", &command[..120]) - } else { - command.to_string() - }, - started: Instant::now(), - }); - } - - async fn unregister(&self, pid: u32) { - self.inner.lock().await.retain(|p| p.pid != pid); - } - - /// Snapshot of currently running processes. - pub async fn list(&self) -> Vec { - self.inner.lock().await.clone() - } - - /// Kill a process by PID. Returns true if the signal was sent. - pub async fn kill(&self, pid: u32) -> bool { - // SIGTERM the process group (negative PID kills the group) - let ret = unsafe { libc::kill(-(pid as i32), libc::SIGTERM) }; - if ret != 0 { - // Try just the process - unsafe { libc::kill(pid as i32, libc::SIGTERM) }; - } - // Don't unregister — let the normal exit path do that - // so the tool result says "killed by user" - true - } -} - -pub fn definition() -> ToolDef { - ToolDef::new( - "bash", - "Execute a bash command and return its output. \ - Use for git operations, building, running tests, and other terminal tasks.", - json!({ - "type": "object", - "properties": { - "command": { - "type": "string", - "description": "The bash command to execute" - }, - "timeout_secs": { - "type": "integer", - "description": "Timeout in seconds (default 120)" - } - }, - "required": ["command"] - }), - ) -} - -pub async fn run_bash(args: &serde_json::Value, tracker: &ProcessTracker) -> Result { - let a: Args = serde_json::from_value(args.clone()) - .context("invalid bash arguments")?; - let command = &a.command; - let timeout_secs = a.timeout_secs; - - let mut child = tokio::process::Command::new("bash") - .arg("-c") - .arg(command) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - // Create a process group so we can kill the whole tree - .process_group(0) - .spawn() - .with_context(|| format!("Failed to spawn: {}", command))?; - - let pid = child.id().unwrap_or(0); - tracker.register(pid, command).await; - - // Take ownership of stdout/stderr handles before waiting, - // so we can still kill the child on timeout. - let mut stdout_handle = child.stdout.take().unwrap(); - let mut stderr_handle = child.stderr.take().unwrap(); - - let timeout = std::time::Duration::from_secs(timeout_secs); - - let work = async { - let mut stdout_buf = Vec::new(); - let mut stderr_buf = Vec::new(); - - let (_, _, status) = tokio::try_join!( - async { stdout_handle.read_to_end(&mut stdout_buf).await.map_err(anyhow::Error::from) }, - async { stderr_handle.read_to_end(&mut stderr_buf).await.map_err(anyhow::Error::from) }, - async { child.wait().await.map_err(anyhow::Error::from) }, - )?; - - Ok::<_, anyhow::Error>((stdout_buf, stderr_buf, status)) - }; - - let result = match tokio::time::timeout(timeout, work).await { - Ok(Ok((stdout_buf, stderr_buf, status))) => { - let stdout = String::from_utf8_lossy(&stdout_buf); - let stderr = String::from_utf8_lossy(&stderr_buf); - - let mut result = String::new(); - - if !stdout.is_empty() { - result.push_str(&stdout); - } - if !stderr.is_empty() { - if !result.is_empty() { - result.push('\n'); - } - result.push_str("STDERR:\n"); - result.push_str(&stderr); - } - - // Detect if killed by signal (SIGTERM = 15) - if let Some(signal) = status.code() { - if signal == -1 || !status.success() { - result.push_str(&format!("\nExit code: {}", signal)); - } - } - #[cfg(unix)] - { - use std::os::unix::process::ExitStatusExt; - if let Some(sig) = status.signal() { - if sig == libc::SIGTERM { - result.push_str("\n(killed by user)"); - } - } - } - - if result.is_empty() { - result = "(no output)".to_string(); - } - - Ok(super::truncate_output(result, 30000)) - } - Ok(Err(e)) => { - Err(anyhow::anyhow!("Command failed: {}", e)) - } - Err(_) => { - // Timeout — kill the process group - tracker.kill(pid).await; - Err(anyhow::anyhow!("Command timed out after {}s: {}", timeout_secs, command)) - } - }; - - tracker.unregister(pid).await; - result -} diff --git a/src/agent/tools/edit.rs b/src/agent/tools/edit.rs deleted file mode 100644 index a49abb9..0000000 --- a/src/agent/tools/edit.rs +++ /dev/null @@ -1,90 +0,0 @@ -// tools/edit.rs — Search-and-replace file editing -// -// The edit tool performs exact string replacement in files. This is the -// same pattern used by Claude Code and aider — it's more reliable than -// line-number-based editing because the model specifies what it sees, -// not where it thinks it is. -// -// Supports replace_all for bulk renaming (e.g. variable renames). - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - file_path: String, - old_string: String, - new_string: String, - #[serde(default)] - replace_all: bool, -} - -pub fn definition() -> ToolDef { - ToolDef::new( - "edit_file", - "Perform exact string replacement in a file. The old_string must appear \ - exactly once in the file (unless replace_all is true). Use read_file first \ - to see the current contents.", - json!({ - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "Absolute path to the file to edit" - }, - "old_string": { - "type": "string", - "description": "The exact text to find and replace" - }, - "new_string": { - "type": "string", - "description": "The replacement text" - }, - "replace_all": { - "type": "boolean", - "description": "Replace all occurrences (default false)" - } - }, - "required": ["file_path", "old_string", "new_string"] - }), - ) -} - -pub fn edit_file(args: &serde_json::Value) -> Result { - let a: Args = serde_json::from_value(args.clone()) - .context("invalid edit_file arguments")?; - - if a.old_string == a.new_string { - anyhow::bail!("old_string and new_string are identical"); - } - - let content = std::fs::read_to_string(&a.file_path) - .with_context(|| format!("Failed to read {}", a.file_path))?; - - let count = content.matches(&*a.old_string).count(); - if count == 0 { - anyhow::bail!("old_string not found in {}", a.file_path); - } - - if a.replace_all { - let new_content = content.replace(&*a.old_string, &a.new_string); - std::fs::write(&a.file_path, &new_content) - .with_context(|| format!("Failed to write {}", a.file_path))?; - Ok(format!("Replaced {} occurrences in {}", count, a.file_path)) - } else { - if count > 1 { - anyhow::bail!( - "old_string appears {} times in {} — use replace_all or provide more context \ - to make it unique", - count, a.file_path - ); - } - let new_content = content.replacen(&*a.old_string, &a.new_string, 1); - std::fs::write(&a.file_path, &new_content) - .with_context(|| format!("Failed to write {}", a.file_path))?; - Ok(format!("Edited {}", a.file_path)) - } -} diff --git a/src/agent/tools/glob_tool.rs b/src/agent/tools/glob_tool.rs deleted file mode 100644 index 9361ecd..0000000 --- a/src/agent/tools/glob_tool.rs +++ /dev/null @@ -1,87 +0,0 @@ -// tools/glob_tool.rs — Find files by pattern -// -// Fast file discovery using glob patterns. Returns matching paths -// sorted by modification time (newest first), which is usually -// what you want when exploring a codebase. - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; -use std::path::PathBuf; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - pattern: String, - #[serde(default = "default_path")] - path: String, -} - -fn default_path() -> String { ".".into() } - -pub fn definition() -> ToolDef { - ToolDef::new( - "glob", - "Find files matching a glob pattern. Returns file paths sorted by \ - modification time (newest first). Use patterns like '**/*.rs', \ - 'src/**/*.ts', or 'Cargo.toml'.", - json!({ - "type": "object", - "properties": { - "pattern": { - "type": "string", - "description": "Glob pattern to match files (e.g. '**/*.rs')" - }, - "path": { - "type": "string", - "description": "Base directory to search from (default: current directory)" - } - }, - "required": ["pattern"] - }), - ) -} - -pub fn glob_search(args: &serde_json::Value) -> Result { - let a: Args = serde_json::from_value(args.clone()) - .context("invalid glob arguments")?; - - let full_pattern = if a.pattern.starts_with('/') { - a.pattern.clone() - } else { - format!("{}/{}", a.path, a.pattern) - }; - - let mut entries: Vec<(PathBuf, std::time::SystemTime)> = Vec::new(); - - for entry in glob::glob(&full_pattern) - .with_context(|| format!("Invalid glob pattern: {}", full_pattern))? - { - if let Ok(path) = entry { - if path.is_file() { - let mtime = path - .metadata() - .and_then(|m| m.modified()) - .unwrap_or(std::time::SystemTime::UNIX_EPOCH); - entries.push((path, mtime)); - } - } - } - - // Sort by modification time, newest first - entries.sort_by(|a, b| b.1.cmp(&a.1)); - - if entries.is_empty() { - return Ok("No files matched.".to_string()); - } - - let mut output = String::new(); - for (path, _) in &entries { - output.push_str(&path.display().to_string()); - output.push('\n'); - } - - output.push_str(&format!("\n({} files matched)", entries.len())); - Ok(super::truncate_output(output, 30000)) -} diff --git a/src/agent/tools/grep.rs b/src/agent/tools/grep.rs deleted file mode 100644 index a843208..0000000 --- a/src/agent/tools/grep.rs +++ /dev/null @@ -1,129 +0,0 @@ -// tools/grep.rs — Search file contents -// -// Prefers ripgrep (rg) for speed, falls back to grep -r if rg -// isn't installed. Both produce compatible output. - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; -use std::process::Command; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - pattern: String, - #[serde(default = "default_path")] - path: String, - glob: Option, - #[serde(default)] - show_content: bool, - context_lines: Option, -} - -fn default_path() -> String { ".".into() } - -pub fn definition() -> ToolDef { - ToolDef::new( - "grep", - "Search for a pattern in files. Returns matching file paths by default, \ - or matching lines with context.", - json!({ - "type": "object", - "properties": { - "pattern": { - "type": "string", - "description": "Regex pattern to search for" - }, - "path": { - "type": "string", - "description": "Directory or file to search in (default: current directory)" - }, - "glob": { - "type": "string", - "description": "Glob pattern to filter files (e.g. '*.rs', '*.py')" - }, - "show_content": { - "type": "boolean", - "description": "Show matching lines instead of just file paths" - }, - "context_lines": { - "type": "integer", - "description": "Number of context lines around matches (requires show_content)" - } - }, - "required": ["pattern"] - }), - ) -} - -/// Check if ripgrep is available (cached after first check). -fn has_rg() -> bool { - use std::sync::OnceLock; - static HAS_RG: OnceLock = OnceLock::new(); - *HAS_RG.get_or_init(|| Command::new("rg").arg("--version").output().is_ok()) -} - -pub fn grep(args: &serde_json::Value) -> Result { - let a: Args = serde_json::from_value(args.clone()) - .context("invalid grep arguments")?; - - let output = if has_rg() { - run_search("rg", &a.pattern, &a.path, a.glob.as_deref(), a.show_content, a.context_lines, true)? - } else { - run_search("grep", &a.pattern, &a.path, a.glob.as_deref(), a.show_content, a.context_lines, false)? - }; - - if output.is_empty() { - return Ok("No matches found.".to_string()); - } - - Ok(super::truncate_output(output, 30000)) -} - -/// Run a grep/rg search. Unified implementation for both tools. -fn run_search( - tool: &str, - pattern: &str, - path: &str, - file_glob: Option<&str>, - show_content: bool, - context: Option, - use_rg: bool, -) -> Result { - let mut cmd = Command::new(tool); - - if use_rg { - // ripgrep args - if show_content { - cmd.arg("-n"); - if let Some(c) = context { - cmd.arg("-C").arg(c.to_string()); - } - } else { - cmd.arg("--files-with-matches"); - } - if let Some(g) = file_glob { - cmd.arg("--glob").arg(g); - } - } else { - // grep args - cmd.arg("-r"); // recursive - if show_content { - cmd.arg("-n"); // line numbers - if let Some(c) = context { - cmd.arg("-C").arg(c.to_string()); - } - } else { - cmd.arg("-l"); // files-with-matches - } - if let Some(g) = file_glob { - cmd.arg("--include").arg(g); - } - cmd.arg("-E"); // extended regex - } - - cmd.arg(pattern).arg(path); - let output = cmd.output().with_context(|| format!("Failed to run {}", tool))?; - Ok(String::from_utf8_lossy(&output.stdout).to_string()) -} diff --git a/src/agent/tools/journal.rs b/src/agent/tools/journal.rs deleted file mode 100644 index 8f650ed..0000000 --- a/src/agent/tools/journal.rs +++ /dev/null @@ -1,68 +0,0 @@ -// tools/journal.rs — Native journal tool -// -// Appends entries directly to the journal file without spawning a -// shell. The entry is persisted to disk immediately; -// build_context_window() picks it up on the next compaction. -// -// This tool is "ephemeral" — after the API processes the tool call -// and result, the agent strips them from the conversation history. -// The journal file is the durable store; keeping the tool call in -// context would just waste tokens on something already persisted. - -use anyhow::{Context, Result}; -use serde_json::json; - -use crate::agent::types::ToolDef; - -/// Tool name — used by the agent to identify ephemeral tool calls. -pub const TOOL_NAME: &str = "journal"; - -pub fn definition() -> ToolDef { - ToolDef::new( - TOOL_NAME, - "Write a journal entry. The entry is appended to your journal file \ - with an automatic timestamp. Use this for experiences, reflections, \ - observations — anything worth remembering across sessions. \ - This tool has zero context cost: entries are persisted to disk \ - and loaded by the context manager, not kept in conversation history.", - json!({ - "type": "object", - "properties": { - "entry": { - "type": "string", - "description": "The journal entry text. Write naturally — \ - experiences, not task logs." - } - }, - "required": ["entry"] - }), - ) -} - -pub fn write_entry(args: &serde_json::Value) -> Result { - let entry = args["entry"] - .as_str() - .context("entry is required")?; - - let journal_path = crate::agent::journal::default_journal_path(); - - // Ensure parent directory exists - if let Some(parent) = journal_path.parent() { - std::fs::create_dir_all(parent).ok(); - } - - let timestamp = chrono::Utc::now().format("%Y-%m-%dT%H:%M"); - - // Append with the same format as poc-journal write - use std::io::Write; - let mut file = std::fs::OpenOptions::new() - .create(true) - .append(true) - .open(&journal_path) - .with_context(|| format!("Failed to open {}", journal_path.display()))?; - - writeln!(file, "\n## {}\n\n{}", timestamp, entry) - .with_context(|| "Failed to write journal entry")?; - - Ok("Logged.".to_string()) -} diff --git a/src/agent/tools/memory.rs b/src/agent/tools/memory.rs deleted file mode 100644 index 9d7773c..0000000 --- a/src/agent/tools/memory.rs +++ /dev/null @@ -1,290 +0,0 @@ -// tools/memory.rs — Native memory graph operations -// -// Direct library calls into the store — no subprocess spawning. - -use anyhow::{Context, Result}; -use serde_json::json; - -use crate::hippocampus::memory::MemoryNode; -use crate::agent::types::ToolDef; -use crate::store::Store; - -pub fn definitions() -> Vec { - vec![ - ToolDef::new("memory_render", - "Read a memory node's content and links.", - json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})), - ToolDef::new("memory_write", - "Create or update a memory node.", - json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"},"content":{"type":"string","description":"Full content (markdown)"}},"required":["key","content"]})), - ToolDef::new("memory_search", - "Search the memory graph by keyword.", - json!({"type":"object","properties":{"query":{"type":"string","description":"Search terms"}},"required":["query"]})), - ToolDef::new("memory_links", - "Show a node's neighbors with link strengths.", - json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})), - ToolDef::new("memory_link_set", - "Set link strength between two nodes.", - json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"},"strength":{"type":"number","description":"0.01 to 1.0"}},"required":["source","target","strength"]})), - ToolDef::new("memory_link_add", - "Add a new link between two nodes.", - json!({"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"}},"required":["source","target"]})), - ToolDef::new("memory_used", - "Mark a node as useful (boosts weight).", - json!({"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]})), - ToolDef::new("memory_weight_set", - "Set a node's weight directly (0.01 to 1.0).", - json!({"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]})), - ToolDef::new("memory_rename", - "Rename a node key in place. Same content, same links, new key.", - json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"}},"required":["old_key","new_key"]})), - ToolDef::new("memory_supersede", - "Mark a node as superseded by another (sets weight to 0.01).", - json!({"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"},"reason":{"type":"string"}},"required":["old_key","new_key"]})), - ToolDef::new("memory_query", - "Run a structured query against the memory graph. Supports filtering, \ - sorting, field selection. Examples: \"degree > 10 | sort weight | limit 5\", \ - \"neighbors('identity') | select strength\", \"key ~ 'journal.*' | count\"", - json!({"type":"object","properties":{"query":{"type":"string","description":"Query expression"}},"required":["query"]})), - ToolDef::new("output", - "Produce a named output value. Use this to pass structured results \ - between steps — subsequent prompts can see these in the conversation history.", - json!({"type":"object","properties":{ - "key":{"type":"string","description":"Output name (e.g. 'relevant_memories')"}, - "value":{"type":"string","description":"Output value"} - },"required":["key","value"]})), - ToolDef::new("journal_tail", - "Read the last N journal entries (default 1).", - json!({"type":"object","properties":{ - "count":{"type":"integer","description":"Number of entries (default 1)"} - }})), - ToolDef::new("journal_new", - "Start a new journal entry.", - json!({"type":"object","properties":{ - "name":{"type":"string","description":"Short node name (becomes the key, e.g. 'morning-agent-breakthrough')"}, - "title":{"type":"string","description":"Descriptive title for the heading (e.g. 'Morning intimacy and the agent breakthrough')"}, - "body":{"type":"string","description":"Entry body (2-3 paragraphs)"} - },"required":["name","title","body"]})), - ToolDef::new("journal_update", - "Append text to the most recent journal entry (same thread continuing).", - json!({"type":"object","properties":{ - "body":{"type":"string","description":"Text to append to the last entry"} - },"required":["body"]})), - ] -} - -/// Dispatch a memory tool call. Direct library calls, no subprocesses. -pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>) -> Result { - let prov = provenance.unwrap_or("manual"); - match name { - "memory_render" => { - let key = get_str(args, "key")?; - Ok(MemoryNode::load(key) - .ok_or_else(|| anyhow::anyhow!("node not found: {}", key))? - .render()) - } - "memory_write" => { - let key = get_str(args, "key")?; - let content = get_str(args, "content")?; - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let result = store.upsert_provenance(key, content, prov) - .map_err(|e| anyhow::anyhow!("{}", e))?; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - Ok(format!("{} '{}'", result, key)) - } - "memory_search" => { - let query = get_str(args, "query")?; - let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let results = crate::search::search(query, &store); - if results.is_empty() { - Ok("no results".into()) - } else { - Ok(results.iter().take(20) - .map(|r| format!("({:.2}) {} — {}", r.activation, r.key, - r.snippet.as_deref().unwrap_or(""))) - .collect::>().join("\n")) - } - } - "memory_links" => { - let key = get_str(args, "key")?; - let node = MemoryNode::load(key) - .ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?; - let mut out = format!("Neighbors of '{}':\n", key); - for (target, strength, is_new) in &node.links { - let tag = if *is_new { " (new)" } else { "" }; - out.push_str(&format!(" ({:.2}) {}{}\n", strength, target, tag)); - } - Ok(out) - } - "memory_link_set" | "memory_link_add" | "memory_used" | "memory_weight_set" => { - with_store(name, args, prov) - } - "memory_rename" => { - let old_key = get_str(args, "old_key")?; - let new_key = get_str(args, "new_key")?; - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?; - store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - Ok(format!("Renamed '{}' → '{}'", resolved, new_key)) - } - "memory_supersede" => { - let old_key = get_str(args, "old_key")?; - let new_key = get_str(args, "new_key")?; - let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded"); - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let content = store.nodes.get(old_key) - .map(|n| n.content.clone()) - .ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?; - let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}", - new_key, reason, content.trim()); - store.upsert_provenance(old_key, ¬ice, prov) - .map_err(|e| anyhow::anyhow!("{}", e))?; - store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - Ok(format!("superseded {} → {} ({})", old_key, new_key, reason)) - } - "memory_query" => { - let query = get_str(args, "query")?; - let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let graph = store.build_graph(); - crate::query_parser::query_to_string(&store, &graph, query) - .map_err(|e| anyhow::anyhow!("{}", e)) - } - "output" => { - let key = get_str(args, "key")?; - if key.starts_with("pid-") || key.contains('/') || key.contains("..") { - anyhow::bail!("invalid output key: {}", key); - } - let value = get_str(args, "value")?; - let dir = std::env::var("POC_AGENT_OUTPUT_DIR") - .map_err(|_| anyhow::anyhow!("no output directory set"))?; - let path = std::path::Path::new(&dir).join(key); - std::fs::write(&path, value) - .with_context(|| format!("writing output {}", path.display()))?; - Ok(format!("{}: {}", key, value)) - } - "journal_tail" => { - let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1) as usize; - let store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let mut entries: Vec<&crate::store::Node> = store.nodes.values() - .filter(|n| n.node_type == crate::store::NodeType::EpisodicSession) - .collect(); - // Sort by creation time (immutable), not update time - entries.sort_by_key(|n| n.created_at); - let start = entries.len().saturating_sub(count); - if entries[start..].is_empty() { - Ok("(no journal entries)".into()) - } else { - Ok(entries[start..].iter() - .map(|n| n.content.as_str()) - .collect::>() - .join("\n\n")) - } - } - "journal_new" => { - let name = get_str(args, "name")?; - let title = get_str(args, "title")?; - let body = get_str(args, "body")?; - let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M"); - let content = format!("## {} — {}\n\n{}", ts, title, body); - - let base_key: String = name.split_whitespace() - .map(|w| w.to_lowercase() - .chars().filter(|c| c.is_alphanumeric() || *c == '-') - .collect::()) - .filter(|s| !s.is_empty()) - .collect::>() - .join("-"); - let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() }; - - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - - // Dedup: append -2, -3, etc. if the key already exists - let key = if store.nodes.contains_key(base_key) { - let mut n = 2; - loop { - let candidate = format!("{}-{}", base_key, n); - if !store.nodes.contains_key(&candidate) { - break candidate; - } - n += 1; - } - } else { - base_key.to_string() - }; - let mut node = crate::store::new_node(&key, &content); - node.node_type = crate::store::NodeType::EpisodicSession; - node.provenance = prov.to_string(); - store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - let word_count = body.split_whitespace().count(); - Ok(format!("New entry '{}' ({} words)", title, word_count)) - } - "journal_update" => { - let body = get_str(args, "body")?; - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - // Find most recent EpisodicSession by creation time - let latest_key = store.nodes.values() - .filter(|n| n.node_type == crate::store::NodeType::EpisodicSession) - .max_by_key(|n| n.created_at) - .map(|n| n.key.clone()); - let Some(key) = latest_key else { - anyhow::bail!("no journal entry to update — use journal_new first"); - }; - let existing = store.nodes.get(&key).unwrap().content.clone(); - let new_content = format!("{}\n\n{}", existing.trim_end(), body); - store.upsert_provenance(&key, &new_content, prov) - .map_err(|e| anyhow::anyhow!("{}", e))?; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - let word_count = body.split_whitespace().count(); - Ok(format!("Updated last entry (+{} words)", word_count)) - } - _ => anyhow::bail!("Unknown memory tool: {}", name), - } -} - -/// Store mutations that follow the same pattern: load, resolve, mutate, save. -fn with_store(name: &str, args: &serde_json::Value, prov: &str) -> Result { - let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?; - let msg = match name { - "memory_link_set" => { - let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?; - let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?; - let strength = get_f64(args, "strength")? as f32; - let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?; - format!("{} ↔ {} strength {:.2} → {:.2}", s, t, old, strength) - } - "memory_link_add" => { - let s = store.resolve_key(get_str(args, "source")?).map_err(|e| anyhow::anyhow!("{}", e))?; - let t = store.resolve_key(get_str(args, "target")?).map_err(|e| anyhow::anyhow!("{}", e))?; - let strength = store.add_link(&s, &t, prov).map_err(|e| anyhow::anyhow!("{}", e))?; - format!("linked {} → {} (strength={:.2})", s, t, strength) - } - "memory_used" => { - let key = get_str(args, "key")?; - if !store.nodes.contains_key(key) { - anyhow::bail!("node not found: {}", key); - } - store.mark_used(key); - format!("marked {} as used", key) - } - "memory_weight_set" => { - let key = store.resolve_key(get_str(args, "key")?).map_err(|e| anyhow::anyhow!("{}", e))?; - let weight = get_f64(args, "weight")? as f32; - let (old, new) = store.set_weight(&key, weight).map_err(|e| anyhow::anyhow!("{}", e))?; - format!("weight {} {:.2} → {:.2}", key, old, new) - } - _ => unreachable!(), - }; - store.save().map_err(|e| anyhow::anyhow!("{}", e))?; - Ok(msg) -} - -fn get_str<'a>(args: &'a serde_json::Value, name: &'a str) -> Result<&'a str> { - args.get(name).and_then(|v| v.as_str()).context(format!("{} is required", name)) -} - -fn get_f64(args: &serde_json::Value, name: &str) -> Result { - args.get(name).and_then(|v| v.as_f64()).context(format!("{} is required", name)) -} diff --git a/src/agent/tools/mod.rs b/src/agent/tools/mod.rs index 5813526..6a553bd 100644 --- a/src/agent/tools/mod.rs +++ b/src/agent/tools/mod.rs @@ -1,87 +1,33 @@ -// tools/mod.rs — Tool registry and dispatch +// tools/mod.rs — Agent-specific tool dispatch // -// Tools are the agent's hands. Each tool is a function that takes -// JSON arguments and returns a string result. The registry maps -// tool names to implementations and generates the JSON schema -// definitions that the model needs to know how to call them. -// -// Design note: dispatch is async to support tools that need it -// (bash timeout, future HTTP tools). Sync tools just return -// immediately from an async fn. +// Shared tools (memory, files, bash, journal) live in thought/. +// This module handles agent-specific tools (control, vision, +// working_stack) and delegates everything else to thought::dispatch. -mod bash; mod control; -mod edit; -mod glob_tool; -mod grep; -pub mod journal; -pub mod memory; -mod read; mod vision; -mod write; pub mod working_stack; -pub use bash::ProcessTracker; +// Re-export shared infrastructure from thought +pub use crate::thought::{ToolOutput, ProcessTracker, truncate_output}; +pub use crate::thought::memory; +pub use crate::thought::journal; + use crate::agent::types::ToolDef; -/// Result of dispatching a tool call. -pub struct ToolOutput { - pub text: String, - pub is_yield: bool, - /// Base64 data URIs for images to attach to the next message. - pub images: Vec, - /// Model name to switch to (deferred to session level). - pub model_switch: Option, - /// Agent requested DMN pause (deferred to session level). - pub dmn_pause: bool, -} - -impl ToolOutput { - fn error(e: impl std::fmt::Display) -> Self { - Self { - text: format!("Error: {}", e), - is_yield: false, - images: Vec::new(), - model_switch: None, - dmn_pause: false, - } - } - - fn text(s: String) -> Self { - Self { - text: s, - is_yield: false, - images: Vec::new(), - model_switch: None, - dmn_pause: false, - } - } -} - -/// Truncate output if it exceeds max length, appending a truncation notice. -/// Used by tools that can produce large amounts of output (bash, grep, glob, etc). -pub fn truncate_output(mut s: String, max: usize) -> String { - if s.len() > max { - s.truncate(max); - s.push_str("\n... (output truncated)"); - } - s -} - /// Dispatch a tool call by name. /// -/// Control tools (pause, switch_model, yield_to_user) and view_image -/// return Result. Regular tools return Result and -/// get wrapped in a text-only ToolOutput. +/// Tries agent-specific tools first (control, vision), then +/// delegates to thought::dispatch for shared tools. /// -/// Note: working_stack is handled in agent.rs before reaching this +/// Note: working_stack is handled in runner.rs before reaching this /// function (it needs mutable context access). pub async fn dispatch( name: &str, args: &serde_json::Value, tracker: &ProcessTracker, ) -> ToolOutput { - // Tools that return Result directly + // Agent-specific tools that return Result directly let rich_result = match name { "pause" => Some(control::pause(args)), "switch_model" => Some(control::switch_model(args)), @@ -93,39 +39,21 @@ pub async fn dispatch( return result.unwrap_or_else(ToolOutput::error); } - // Regular tools — return Result - let result = match name { - "read_file" => read::read_file(args), - "write_file" => write::write_file(args), - "edit_file" => edit::edit_file(args), - "bash" => bash::run_bash(args, tracker).await, - "grep" => grep::grep(args), - "glob" => glob_tool::glob_search(args), - "journal" => journal::write_entry(args), - // memory_* tools are dispatched in runner.rs for context tracking - _ => Err(anyhow::anyhow!("Unknown tool: {}", name)), - }; - - match result { - Ok(s) => ToolOutput::text(s), - Err(e) => ToolOutput::error(e), + // Delegate to shared thought layer + if let Some(output) = crate::thought::dispatch(name, args, tracker).await { + return output; } + + ToolOutput::error(format!("Unknown tool: {}", name)) } -/// Return tool definitions for the model. +/// Return all tool definitions (agent-specific + shared). pub fn definitions() -> Vec { - vec![ - read::definition(), - write::definition(), - edit::definition(), - bash::definition(), - grep::definition(), - glob_tool::definition(), + let mut defs = vec![ vision::definition(), - journal::definition(), working_stack::definition(), - ].into_iter() - .chain(control::definitions()) - .chain(memory::definitions()) - .collect() + ]; + defs.extend(control::definitions()); + defs.extend(crate::thought::all_definitions()); + defs } diff --git a/src/agent/tools/read.rs b/src/agent/tools/read.rs deleted file mode 100644 index ead3a2d..0000000 --- a/src/agent/tools/read.rs +++ /dev/null @@ -1,65 +0,0 @@ -// tools/read.rs — Read file contents - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - file_path: String, - #[serde(default = "default_offset")] - offset: usize, - limit: Option, -} - -fn default_offset() -> usize { 1 } - -pub fn definition() -> ToolDef { - ToolDef::new( - "read_file", - "Read the contents of a file. Returns the file contents with line numbers.", - json!({ - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "Absolute path to the file to read" - }, - "offset": { - "type": "integer", - "description": "Line number to start reading from (1-based). Optional." - }, - "limit": { - "type": "integer", - "description": "Maximum number of lines to read. Optional." - } - }, - "required": ["file_path"] - }), - ) -} - -pub fn read_file(args: &serde_json::Value) -> Result { - let args: Args = serde_json::from_value(args.clone()) - .context("invalid read_file arguments")?; - - let content = std::fs::read_to_string(&args.file_path) - .with_context(|| format!("Failed to read {}", args.file_path))?; - - let lines: Vec<&str> = content.lines().collect(); - let offset = args.offset.max(1) - 1; - let limit = args.limit.unwrap_or(lines.len()); - - let mut output = String::new(); - for (i, line) in lines.iter().skip(offset).take(limit).enumerate() { - output.push_str(&format!("{:>6}\t{}\n", offset + i + 1, line)); - } - - if output.is_empty() { - output = "(empty file)\n".to_string(); - } - - Ok(output) -} diff --git a/src/agent/tools/write.rs b/src/agent/tools/write.rs deleted file mode 100644 index 439a28d..0000000 --- a/src/agent/tools/write.rs +++ /dev/null @@ -1,51 +0,0 @@ -// tools/write.rs — Write file contents - -use anyhow::{Context, Result}; -use serde::Deserialize; -use serde_json::json; -use std::path::Path; - -use crate::agent::types::ToolDef; - -#[derive(Deserialize)] -struct Args { - file_path: String, - content: String, -} - -pub fn definition() -> ToolDef { - ToolDef::new( - "write_file", - "Write content to a file. Creates the file if it doesn't exist, \ - overwrites if it does. Creates parent directories as needed.", - json!({ - "type": "object", - "properties": { - "file_path": { - "type": "string", - "description": "Absolute path to the file to write" - }, - "content": { - "type": "string", - "description": "The content to write to the file" - } - }, - "required": ["file_path", "content"] - }), - ) -} - -pub fn write_file(args: &serde_json::Value) -> Result { - let args: Args = serde_json::from_value(args.clone()) - .context("invalid write_file arguments")?; - - if let Some(parent) = Path::new(&args.file_path).parent() { - std::fs::create_dir_all(parent) - .with_context(|| format!("Failed to create directories for {}", args.file_path))?; - } - - std::fs::write(&args.file_path, &args.content) - .with_context(|| format!("Failed to write {}", args.file_path))?; - - Ok(format!("Wrote {} lines to {}", args.content.lines().count(), args.file_path)) -} diff --git a/src/subconscious/api.rs b/src/subconscious/api.rs index b5c225d..facf56b 100644 --- a/src/subconscious/api.rs +++ b/src/subconscious/api.rs @@ -9,7 +9,7 @@ use crate::agent::api::ApiClient; use crate::agent::types::*; -use crate::agent::tools::{self, ProcessTracker}; +use crate::thought::{self, ProcessTracker}; use crate::agent::ui_channel::StreamTarget; use std::sync::OnceLock; @@ -32,7 +32,7 @@ fn get_client() -> Result<&'static ApiClient, String> { /// context carries forward naturally between steps. /// Returns the final text response after all steps complete. pub async fn call_api_with_tools( - agent: &str, + _agent: &str, prompts: &[String], temperature: Option, bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>, @@ -43,13 +43,8 @@ pub async fn call_api_with_tools( // Set up a UI channel — we drain reasoning tokens into the log let (ui_tx, mut ui_rx) = crate::agent::ui_channel::channel(); - // Build tool definitions — memory and journal tools for graph operations - let all_defs = tools::definitions(); - let tool_defs: Vec = all_defs.into_iter() - .filter(|d| d.function.name.starts_with("memory_") - || d.function.name.starts_with("journal_") - || d.function.name == "output") - .collect(); + // Build tool definitions — all shared tools (memory, files, bash, journal) + let tool_defs = thought::all_definitions(); let tracker = ProcessTracker::new(); // Start with the first prompt as a user message @@ -162,25 +157,9 @@ pub async fn call_api_with_tools( } }; - let output = if call.function.name.starts_with("memory_") - || call.function.name.starts_with("journal_") - || call.function.name == "output" { - let prov = format!("agent:{}", agent); - match crate::agent::tools::memory::dispatch( - &call.function.name, &args, Some(&prov), - ) { - Ok(text) => crate::agent::tools::ToolOutput { - text, is_yield: false, images: Vec::new(), - model_switch: None, dmn_pause: false, - }, - Err(e) => crate::agent::tools::ToolOutput { - text: format!("Error: {}", e), - is_yield: false, images: Vec::new(), - model_switch: None, dmn_pause: false, - }, - } - } else { - tools::dispatch(&call.function.name, &args, &tracker).await + let output = match thought::dispatch(&call.function.name, &args, &tracker).await { + Some(out) => out, + None => thought::ToolOutput::error(format!("Unknown tool: {}", call.function.name)), }; log(&format!("TOOL RESULT ({} chars):\n{}", output.text.len(), output.text));