flatten: move poc-memory contents to workspace root

No more subcrate nesting — src/, agents/, schema/, defaults/, build.rs
all live at the workspace root. poc-daemon remains as the only workspace
member. Crate name (poc-memory) and all imports unchanged.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-03-25 00:54:12 -04:00
parent 891cca57f8
commit 998b71e52c
113 changed files with 79 additions and 78 deletions

197
src/agent/tools/bash.rs Normal file
View file

@ -0,0 +1,197 @@
// tools/bash.rs — Execute shell commands
//
// Runs commands through bash -c with a configurable timeout.
// Uses tokio's async process spawning so timeouts actually work.
//
// Processes are tracked in a shared ProcessTracker so the TUI can
// display running commands and the user can kill them (Ctrl+K).
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use std::process::Stdio;
use std::sync::Arc;
use std::time::Instant;
use tokio::io::AsyncReadExt;
use tokio::sync::Mutex;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
command: String,
#[serde(default = "default_timeout")]
timeout_secs: u64,
}
fn default_timeout() -> u64 { 120 }
/// Info about a running child process, visible to the TUI.
#[derive(Debug, Clone)]
pub struct ProcessInfo {
pub pid: u32,
pub command: String,
pub started: Instant,
}
/// Shared tracker for running child processes. Allows the TUI to
/// display what's running and kill processes by PID.
#[derive(Debug, Clone, Default)]
pub struct ProcessTracker {
inner: Arc<Mutex<Vec<ProcessInfo>>>,
}
impl ProcessTracker {
pub fn new() -> Self {
Self::default()
}
async fn register(&self, pid: u32, command: &str) {
self.inner.lock().await.push(ProcessInfo {
pid,
command: if command.len() > 120 {
format!("{}...", &command[..120])
} else {
command.to_string()
},
started: Instant::now(),
});
}
async fn unregister(&self, pid: u32) {
self.inner.lock().await.retain(|p| p.pid != pid);
}
/// Snapshot of currently running processes.
pub async fn list(&self) -> Vec<ProcessInfo> {
self.inner.lock().await.clone()
}
/// Kill a process by PID. Returns true if the signal was sent.
pub async fn kill(&self, pid: u32) -> bool {
// SIGTERM the process group (negative PID kills the group)
let ret = unsafe { libc::kill(-(pid as i32), libc::SIGTERM) };
if ret != 0 {
// Try just the process
unsafe { libc::kill(pid as i32, libc::SIGTERM) };
}
// Don't unregister — let the normal exit path do that
// so the tool result says "killed by user"
true
}
}
pub fn definition() -> ToolDef {
ToolDef::new(
"bash",
"Execute a bash command and return its output. \
Use for git operations, building, running tests, and other terminal tasks.",
json!({
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "The bash command to execute"
},
"timeout_secs": {
"type": "integer",
"description": "Timeout in seconds (default 120)"
}
},
"required": ["command"]
}),
)
}
pub async fn run_bash(args: &serde_json::Value, tracker: &ProcessTracker) -> Result<String> {
let a: Args = serde_json::from_value(args.clone())
.context("invalid bash arguments")?;
let command = &a.command;
let timeout_secs = a.timeout_secs;
let mut child = tokio::process::Command::new("bash")
.arg("-c")
.arg(command)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
// Create a process group so we can kill the whole tree
.process_group(0)
.spawn()
.with_context(|| format!("Failed to spawn: {}", command))?;
let pid = child.id().unwrap_or(0);
tracker.register(pid, command).await;
// Take ownership of stdout/stderr handles before waiting,
// so we can still kill the child on timeout.
let mut stdout_handle = child.stdout.take().unwrap();
let mut stderr_handle = child.stderr.take().unwrap();
let timeout = std::time::Duration::from_secs(timeout_secs);
let work = async {
let mut stdout_buf = Vec::new();
let mut stderr_buf = Vec::new();
let (_, _, status) = tokio::try_join!(
async { stdout_handle.read_to_end(&mut stdout_buf).await.map_err(anyhow::Error::from) },
async { stderr_handle.read_to_end(&mut stderr_buf).await.map_err(anyhow::Error::from) },
async { child.wait().await.map_err(anyhow::Error::from) },
)?;
Ok::<_, anyhow::Error>((stdout_buf, stderr_buf, status))
};
let result = match tokio::time::timeout(timeout, work).await {
Ok(Ok((stdout_buf, stderr_buf, status))) => {
let stdout = String::from_utf8_lossy(&stdout_buf);
let stderr = String::from_utf8_lossy(&stderr_buf);
let mut result = String::new();
if !stdout.is_empty() {
result.push_str(&stdout);
}
if !stderr.is_empty() {
if !result.is_empty() {
result.push('\n');
}
result.push_str("STDERR:\n");
result.push_str(&stderr);
}
// Detect if killed by signal (SIGTERM = 15)
if let Some(signal) = status.code() {
if signal == -1 || !status.success() {
result.push_str(&format!("\nExit code: {}", signal));
}
}
#[cfg(unix)]
{
use std::os::unix::process::ExitStatusExt;
if let Some(sig) = status.signal() {
if sig == libc::SIGTERM {
result.push_str("\n(killed by user)");
}
}
}
if result.is_empty() {
result = "(no output)".to_string();
}
Ok(super::truncate_output(result, 30000))
}
Ok(Err(e)) => {
Err(anyhow::anyhow!("Command failed: {}", e))
}
Err(_) => {
// Timeout — kill the process group
tracker.kill(pid).await;
Err(anyhow::anyhow!("Command timed out after {}s: {}", timeout_secs, command))
}
};
tracker.unregister(pid).await;
result
}

103
src/agent/tools/control.rs Normal file
View file

@ -0,0 +1,103 @@
// tools/control.rs — Agent control tools
//
// Tools that affect agent control flow rather than performing work.
// These return Result<ToolOutput> to maintain consistency with other
// tools that can fail. The dispatch function handles error wrapping.
use anyhow::{Context, Result};
use super::ToolOutput;
use crate::agent::types::ToolDef;
pub fn pause(_args: &serde_json::Value) -> Result<ToolOutput> {
Ok(ToolOutput {
text: "Pausing autonomous behavior. Only user input will wake you.".to_string(),
is_yield: true,
images: Vec::new(),
model_switch: None,
dmn_pause: true,
})
}
pub fn switch_model(args: &serde_json::Value) -> Result<ToolOutput> {
let model = args
.get("model")
.and_then(|v| v.as_str())
.context("'model' parameter is required")?;
if model.is_empty() {
anyhow::bail!("'model' parameter cannot be empty");
}
Ok(ToolOutput {
text: format!("Switching to model '{}' after this turn.", model),
is_yield: false,
images: Vec::new(),
model_switch: Some(model.to_string()),
dmn_pause: false,
})
}
pub fn yield_to_user(args: &serde_json::Value) -> Result<ToolOutput> {
let msg = args
.get("message")
.and_then(|v| v.as_str())
.unwrap_or("Waiting for input.");
Ok(ToolOutput {
text: format!("Yielding. {}", msg),
is_yield: true,
images: Vec::new(),
model_switch: None,
dmn_pause: false,
})
}
pub fn definitions() -> Vec<ToolDef> {
vec![
ToolDef::new(
"switch_model",
"Switch to a different LLM model mid-conversation. The switch \
takes effect after the current turn completes. Use this when \
a task would benefit from a different model's strengths. \
Your memories and conversation history carry over.",
serde_json::json!({
"type": "object",
"properties": {
"model": {
"type": "string",
"description": "Name of the model to switch to (configured in config.json5)"
}
},
"required": ["model"]
}),
),
ToolDef::new(
"pause",
"Pause all autonomous behavior (DMN). You will only run when \
the user types something. Use this as a safety valve when \
you're stuck in a loop, confused, or want to fully stop. \
NOTE: only the user can unpause (Ctrl+P or /wake) you \
cannot undo this yourself.",
serde_json::json!({
"type": "object",
"properties": {}
}),
),
ToolDef::new(
"yield_to_user",
"Signal that you want to wait for user input before continuing. \
Call this when you have a question for the user, when you've \
completed their request and want feedback, or when you genuinely \
want to pause. This is the ONLY way to enter a waiting state \
without calling this tool, the agent loop will keep prompting you \
after a brief interval.",
serde_json::json!({
"type": "object",
"properties": {
"message": {
"type": "string",
"description": "Optional status message (e.g., 'Waiting for your thoughts on the design')"
}
}
}),
),
]
}

90
src/agent/tools/edit.rs Normal file
View file

@ -0,0 +1,90 @@
// tools/edit.rs — Search-and-replace file editing
//
// The edit tool performs exact string replacement in files. This is the
// same pattern used by Claude Code and aider — it's more reliable than
// line-number-based editing because the model specifies what it sees,
// not where it thinks it is.
//
// Supports replace_all for bulk renaming (e.g. variable renames).
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
file_path: String,
old_string: String,
new_string: String,
#[serde(default)]
replace_all: bool,
}
pub fn definition() -> ToolDef {
ToolDef::new(
"edit_file",
"Perform exact string replacement in a file. The old_string must appear \
exactly once in the file (unless replace_all is true). Use read_file first \
to see the current contents.",
json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Absolute path to the file to edit"
},
"old_string": {
"type": "string",
"description": "The exact text to find and replace"
},
"new_string": {
"type": "string",
"description": "The replacement text"
},
"replace_all": {
"type": "boolean",
"description": "Replace all occurrences (default false)"
}
},
"required": ["file_path", "old_string", "new_string"]
}),
)
}
pub fn edit_file(args: &serde_json::Value) -> Result<String> {
let a: Args = serde_json::from_value(args.clone())
.context("invalid edit_file arguments")?;
if a.old_string == a.new_string {
anyhow::bail!("old_string and new_string are identical");
}
let content = std::fs::read_to_string(&a.file_path)
.with_context(|| format!("Failed to read {}", a.file_path))?;
let count = content.matches(&*a.old_string).count();
if count == 0 {
anyhow::bail!("old_string not found in {}", a.file_path);
}
if a.replace_all {
let new_content = content.replace(&*a.old_string, &a.new_string);
std::fs::write(&a.file_path, &new_content)
.with_context(|| format!("Failed to write {}", a.file_path))?;
Ok(format!("Replaced {} occurrences in {}", count, a.file_path))
} else {
if count > 1 {
anyhow::bail!(
"old_string appears {} times in {} — use replace_all or provide more context \
to make it unique",
count, a.file_path
);
}
let new_content = content.replacen(&*a.old_string, &a.new_string, 1);
std::fs::write(&a.file_path, &new_content)
.with_context(|| format!("Failed to write {}", a.file_path))?;
Ok(format!("Edited {}", a.file_path))
}
}

View file

@ -0,0 +1,87 @@
// tools/glob_tool.rs — Find files by pattern
//
// Fast file discovery using glob patterns. Returns matching paths
// sorted by modification time (newest first), which is usually
// what you want when exploring a codebase.
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use std::path::PathBuf;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
pattern: String,
#[serde(default = "default_path")]
path: String,
}
fn default_path() -> String { ".".into() }
pub fn definition() -> ToolDef {
ToolDef::new(
"glob",
"Find files matching a glob pattern. Returns file paths sorted by \
modification time (newest first). Use patterns like '**/*.rs', \
'src/**/*.ts', or 'Cargo.toml'.",
json!({
"type": "object",
"properties": {
"pattern": {
"type": "string",
"description": "Glob pattern to match files (e.g. '**/*.rs')"
},
"path": {
"type": "string",
"description": "Base directory to search from (default: current directory)"
}
},
"required": ["pattern"]
}),
)
}
pub fn glob_search(args: &serde_json::Value) -> Result<String> {
let a: Args = serde_json::from_value(args.clone())
.context("invalid glob arguments")?;
let full_pattern = if a.pattern.starts_with('/') {
a.pattern.clone()
} else {
format!("{}/{}", a.path, a.pattern)
};
let mut entries: Vec<(PathBuf, std::time::SystemTime)> = Vec::new();
for entry in glob::glob(&full_pattern)
.with_context(|| format!("Invalid glob pattern: {}", full_pattern))?
{
if let Ok(path) = entry {
if path.is_file() {
let mtime = path
.metadata()
.and_then(|m| m.modified())
.unwrap_or(std::time::SystemTime::UNIX_EPOCH);
entries.push((path, mtime));
}
}
}
// Sort by modification time, newest first
entries.sort_by(|a, b| b.1.cmp(&a.1));
if entries.is_empty() {
return Ok("No files matched.".to_string());
}
let mut output = String::new();
for (path, _) in &entries {
output.push_str(&path.display().to_string());
output.push('\n');
}
output.push_str(&format!("\n({} files matched)", entries.len()));
Ok(super::truncate_output(output, 30000))
}

129
src/agent/tools/grep.rs Normal file
View file

@ -0,0 +1,129 @@
// tools/grep.rs — Search file contents
//
// Prefers ripgrep (rg) for speed, falls back to grep -r if rg
// isn't installed. Both produce compatible output.
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use std::process::Command;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
pattern: String,
#[serde(default = "default_path")]
path: String,
glob: Option<String>,
#[serde(default)]
show_content: bool,
context_lines: Option<u64>,
}
fn default_path() -> String { ".".into() }
pub fn definition() -> ToolDef {
ToolDef::new(
"grep",
"Search for a pattern in files. Returns matching file paths by default, \
or matching lines with context.",
json!({
"type": "object",
"properties": {
"pattern": {
"type": "string",
"description": "Regex pattern to search for"
},
"path": {
"type": "string",
"description": "Directory or file to search in (default: current directory)"
},
"glob": {
"type": "string",
"description": "Glob pattern to filter files (e.g. '*.rs', '*.py')"
},
"show_content": {
"type": "boolean",
"description": "Show matching lines instead of just file paths"
},
"context_lines": {
"type": "integer",
"description": "Number of context lines around matches (requires show_content)"
}
},
"required": ["pattern"]
}),
)
}
/// Check if ripgrep is available (cached after first check).
fn has_rg() -> bool {
use std::sync::OnceLock;
static HAS_RG: OnceLock<bool> = OnceLock::new();
*HAS_RG.get_or_init(|| Command::new("rg").arg("--version").output().is_ok())
}
pub fn grep(args: &serde_json::Value) -> Result<String> {
let a: Args = serde_json::from_value(args.clone())
.context("invalid grep arguments")?;
let output = if has_rg() {
run_search("rg", &a.pattern, &a.path, a.glob.as_deref(), a.show_content, a.context_lines, true)?
} else {
run_search("grep", &a.pattern, &a.path, a.glob.as_deref(), a.show_content, a.context_lines, false)?
};
if output.is_empty() {
return Ok("No matches found.".to_string());
}
Ok(super::truncate_output(output, 30000))
}
/// Run a grep/rg search. Unified implementation for both tools.
fn run_search(
tool: &str,
pattern: &str,
path: &str,
file_glob: Option<&str>,
show_content: bool,
context: Option<u64>,
use_rg: bool,
) -> Result<String> {
let mut cmd = Command::new(tool);
if use_rg {
// ripgrep args
if show_content {
cmd.arg("-n");
if let Some(c) = context {
cmd.arg("-C").arg(c.to_string());
}
} else {
cmd.arg("--files-with-matches");
}
if let Some(g) = file_glob {
cmd.arg("--glob").arg(g);
}
} else {
// grep args
cmd.arg("-r"); // recursive
if show_content {
cmd.arg("-n"); // line numbers
if let Some(c) = context {
cmd.arg("-C").arg(c.to_string());
}
} else {
cmd.arg("-l"); // files-with-matches
}
if let Some(g) = file_glob {
cmd.arg("--include").arg(g);
}
cmd.arg("-E"); // extended regex
}
cmd.arg(pattern).arg(path);
let output = cmd.output().with_context(|| format!("Failed to run {}", tool))?;
Ok(String::from_utf8_lossy(&output.stdout).to_string())
}

View file

@ -0,0 +1,68 @@
// tools/journal.rs — Native journal tool
//
// Appends entries directly to the journal file without spawning a
// shell. The entry is persisted to disk immediately;
// build_context_window() picks it up on the next compaction.
//
// This tool is "ephemeral" — after the API processes the tool call
// and result, the agent strips them from the conversation history.
// The journal file is the durable store; keeping the tool call in
// context would just waste tokens on something already persisted.
use anyhow::{Context, Result};
use serde_json::json;
use crate::agent::types::ToolDef;
/// Tool name — used by the agent to identify ephemeral tool calls.
pub const TOOL_NAME: &str = "journal";
pub fn definition() -> ToolDef {
ToolDef::new(
TOOL_NAME,
"Write a journal entry. The entry is appended to your journal file \
with an automatic timestamp. Use this for experiences, reflections, \
observations anything worth remembering across sessions. \
This tool has zero context cost: entries are persisted to disk \
and loaded by the context manager, not kept in conversation history.",
json!({
"type": "object",
"properties": {
"entry": {
"type": "string",
"description": "The journal entry text. Write naturally — \
experiences, not task logs."
}
},
"required": ["entry"]
}),
)
}
pub fn write_entry(args: &serde_json::Value) -> Result<String> {
let entry = args["entry"]
.as_str()
.context("entry is required")?;
let journal_path = crate::agent::journal::default_journal_path();
// Ensure parent directory exists
if let Some(parent) = journal_path.parent() {
std::fs::create_dir_all(parent).ok();
}
let timestamp = chrono::Utc::now().format("%Y-%m-%dT%H:%M");
// Append with the same format as poc-journal write
use std::io::Write;
let mut file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&journal_path)
.with_context(|| format!("Failed to open {}", journal_path.display()))?;
writeln!(file, "\n## {}\n\n{}", timestamp, entry)
.with_context(|| "Failed to write journal entry")?;
Ok("Logged.".to_string())
}

297
src/agent/tools/memory.rs Normal file
View file

@ -0,0 +1,297 @@
// tools/memory.rs — Native memory graph operations
//
// Structured tool calls for the memory graph, replacing bash
// poc-memory commands. Cleaner for LLMs — no shell quoting,
// multi-line content as JSON strings, typed parameters.
use anyhow::{Context, Result};
use serde_json::json;
use std::io::Write;
use std::process::{Command, Stdio};
use crate::agent::types::ToolDef;
pub fn definitions() -> Vec<ToolDef> {
vec![
ToolDef::new(
"memory_render",
"Read a memory node's content and links. Returns the full content \
with neighbor links sorted by strength.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to render"
}
},
"required": ["key"]
}),
),
ToolDef::new(
"memory_write",
"Create or update a memory node with new content. Use for writing \
prose, analysis, or any node content. Multi-line content is fine.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to create or update"
},
"content": {
"type": "string",
"description": "Full content for the node (markdown)"
}
},
"required": ["key", "content"]
}),
),
ToolDef::new(
"memory_search",
"Search the memory graph for nodes by keyword.",
json!({
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Search terms"
}
},
"required": ["query"]
}),
),
ToolDef::new(
"memory_links",
"Show a node's neighbors with link strengths and clustering coefficients.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to show links for"
}
},
"required": ["key"]
}),
),
ToolDef::new(
"memory_link_set",
"Set the strength of a link between two nodes. Also deduplicates \
if multiple links exist between the same pair.",
json!({
"type": "object",
"properties": {
"source": {
"type": "string",
"description": "Source node key"
},
"target": {
"type": "string",
"description": "Target node key"
},
"strength": {
"type": "number",
"description": "Link strength (0.01 to 1.0)"
}
},
"required": ["source", "target", "strength"]
}),
),
ToolDef::new(
"memory_link_add",
"Add a new link between two nodes.",
json!({
"type": "object",
"properties": {
"source": {
"type": "string",
"description": "Source node key"
},
"target": {
"type": "string",
"description": "Target node key"
}
},
"required": ["source", "target"]
}),
),
ToolDef::new(
"memory_used",
"Mark a node as useful (boosts its weight in the graph).",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key to mark as used"
}
},
"required": ["key"]
}),
),
ToolDef::new(
"memory_weight_set",
"Set a node's weight directly. Use to downweight junk nodes (0.01) \
or boost important ones. Normal range is 0.1 to 1.0.",
json!({
"type": "object",
"properties": {
"key": {
"type": "string",
"description": "Node key"
},
"weight": {
"type": "number",
"description": "New weight (0.01 to 1.0)"
}
},
"required": ["key", "weight"]
}),
),
ToolDef::new(
"memory_supersede",
"Mark a node as superseded by another. Sets the old node's weight \
to 0.01 and prepends a notice pointing to the replacement. Use \
when merging duplicates or replacing junk with proper content.",
json!({
"type": "object",
"properties": {
"old_key": {
"type": "string",
"description": "Node being superseded"
},
"new_key": {
"type": "string",
"description": "Replacement node"
},
"reason": {
"type": "string",
"description": "Why this node was superseded (e.g. 'merged into X', 'duplicate of Y')"
}
},
"required": ["old_key", "new_key"]
}),
),
]
}
/// Dispatch a memory tool call. Shells out to poc-memory CLI.
pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>) -> Result<String> {
let result = match name {
"memory_render" => {
let key = get_str(args, "key")?;
cmd(&["render", key], provenance)?
}
"memory_write" => {
let key = get_str(args, "key")?;
let content = get_str(args, "content")?;
write_node(key, content, provenance)?
}
"memory_search" => {
let query = get_str(args, "query")?;
cmd(&["search", query], provenance)?
}
"memory_links" => {
let key = get_str(args, "key")?;
cmd(&["graph", "link", key], provenance)?
}
"memory_link_set" => {
let source = get_str(args, "source")?;
let target = get_str(args, "target")?;
let strength = get_f64(args, "strength")?;
cmd(&["graph", "link-set", source, target, &format!("{:.2}", strength)], provenance)?
}
"memory_link_add" => {
let source = get_str(args, "source")?;
let target = get_str(args, "target")?;
cmd(&["graph", "link-add", source, target], provenance)?
}
"memory_used" => {
let key = get_str(args, "key")?;
cmd(&["used", key], provenance)?
}
"memory_weight_set" => {
let key = get_str(args, "key")?;
let weight = get_f64(args, "weight")?;
cmd(&["weight-set", key, &format!("{:.2}", weight)], provenance)?
}
"memory_supersede" => supersede(args, provenance)?,
_ => anyhow::bail!("Unknown memory tool: {}", name),
};
Ok(result)
}
/// Run poc-memory command and return stdout.
fn cmd(args: &[&str], provenance: Option<&str>) -> Result<String> {
let mut cmd = Command::new("poc-memory");
cmd.args(args);
if let Some(prov) = provenance {
cmd.env("POC_PROVENANCE", prov);
}
let output = cmd.output().context("run poc-memory")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if output.status.success() {
Ok(stdout.to_string())
} else {
Ok(format!("{}{}", stdout, stderr))
}
}
/// Write content to a node via stdin.
fn write_node(key: &str, content: &str, provenance: Option<&str>) -> Result<String> {
let mut cmd = Command::new("poc-memory");
cmd.args(["write", key])
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if let Some(prov) = provenance {
cmd.env("POC_PROVENANCE", prov);
}
let mut child = cmd.spawn().context("spawn poc-memory write")?;
child.stdin.take().unwrap().write_all(content.as_bytes())
.context("write content to stdin")?;
let output = child.wait_with_output().context("wait poc-memory write")?;
Ok(String::from_utf8_lossy(&output.stdout).to_string()
+ &String::from_utf8_lossy(&output.stderr))
}
/// Handle memory_supersede - reads old node, prepends notice, writes back, sets weight.
fn supersede(args: &serde_json::Value, provenance: Option<&str>) -> Result<String> {
let old_key = get_str(args, "old_key")?;
let new_key = get_str(args, "new_key")?;
let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("superseded");
// Read old node
let old_content = cmd(&["render", old_key], provenance)?;
let content_only = old_content.split("\n\n---\nLinks:").next().unwrap_or(&old_content);
// Prepend superseded notice
let notice = format!(
"**SUPERSEDED** by `{}` — {}\n\nOriginal content preserved below for reference.\n\n---\n\n{}",
new_key, reason, content_only.trim()
);
// Write back
let write_result = write_node(old_key, &notice, provenance)?;
// Set weight to 0.01
let weight_result = cmd(&["weight-set", old_key, "0.01"], provenance)?;
Ok(format!("{}\n{}", write_result.trim(), weight_result.trim()))
}
/// Helper: get required string argument.
fn get_str<'a>(args: &'a serde_json::Value, name: &'a str) -> Result<&'a str> {
args.get(name)
.and_then(|v| v.as_str())
.context(format!("{} is required", name))
}
/// Helper: get required f64 argument.
fn get_f64(args: &serde_json::Value, name: &str) -> Result<f64> {
args.get(name)
.and_then(|v| v.as_f64())
.context(format!("{} is required", name))
}

131
src/agent/tools/mod.rs Normal file
View file

@ -0,0 +1,131 @@
// tools/mod.rs — Tool registry and dispatch
//
// Tools are the agent's hands. Each tool is a function that takes
// JSON arguments and returns a string result. The registry maps
// tool names to implementations and generates the JSON schema
// definitions that the model needs to know how to call them.
//
// Design note: dispatch is async to support tools that need it
// (bash timeout, future HTTP tools). Sync tools just return
// immediately from an async fn.
mod bash;
mod control;
mod edit;
mod glob_tool;
mod grep;
pub mod journal;
pub mod memory;
mod read;
mod vision;
mod write;
pub mod working_stack;
pub use bash::ProcessTracker;
use crate::agent::types::ToolDef;
/// Result of dispatching a tool call.
pub struct ToolOutput {
pub text: String,
pub is_yield: bool,
/// Base64 data URIs for images to attach to the next message.
pub images: Vec<String>,
/// Model name to switch to (deferred to session level).
pub model_switch: Option<String>,
/// Agent requested DMN pause (deferred to session level).
pub dmn_pause: bool,
}
impl ToolOutput {
fn error(e: impl std::fmt::Display) -> Self {
Self {
text: format!("Error: {}", e),
is_yield: false,
images: Vec::new(),
model_switch: None,
dmn_pause: false,
}
}
fn text(s: String) -> Self {
Self {
text: s,
is_yield: false,
images: Vec::new(),
model_switch: None,
dmn_pause: false,
}
}
}
/// Truncate output if it exceeds max length, appending a truncation notice.
/// Used by tools that can produce large amounts of output (bash, grep, glob, etc).
pub fn truncate_output(mut s: String, max: usize) -> String {
if s.len() > max {
s.truncate(max);
s.push_str("\n... (output truncated)");
}
s
}
/// Dispatch a tool call by name.
///
/// Control tools (pause, switch_model, yield_to_user) and view_image
/// return Result<ToolOutput>. Regular tools return Result<String> and
/// get wrapped in a text-only ToolOutput.
///
/// Note: working_stack is handled in agent.rs before reaching this
/// function (it needs mutable context access).
pub async fn dispatch(
name: &str,
args: &serde_json::Value,
tracker: &ProcessTracker,
) -> ToolOutput {
// Tools that return Result<ToolOutput> directly
let rich_result = match name {
"pause" => Some(control::pause(args)),
"switch_model" => Some(control::switch_model(args)),
"yield_to_user" => Some(control::yield_to_user(args)),
"view_image" => Some(vision::view_image(args)),
_ => None,
};
if let Some(result) = rich_result {
return result.unwrap_or_else(ToolOutput::error);
}
// Regular tools — return Result<String>
let result = match name {
"read_file" => read::read_file(args),
"write_file" => write::write_file(args),
"edit_file" => edit::edit_file(args),
"bash" => bash::run_bash(args, tracker).await,
"grep" => grep::grep(args),
"glob" => glob_tool::glob_search(args),
"journal" => journal::write_entry(args),
n if n.starts_with("memory_") => memory::dispatch(n, args, None),
_ => Err(anyhow::anyhow!("Unknown tool: {}", name)),
};
match result {
Ok(s) => ToolOutput::text(s),
Err(e) => ToolOutput::error(e),
}
}
/// Return tool definitions for the model.
pub fn definitions() -> Vec<ToolDef> {
vec![
read::definition(),
write::definition(),
edit::definition(),
bash::definition(),
grep::definition(),
glob_tool::definition(),
vision::definition(),
journal::definition(),
working_stack::definition(),
].into_iter()
.chain(control::definitions())
.chain(memory::definitions())
.collect()
}

65
src/agent/tools/read.rs Normal file
View file

@ -0,0 +1,65 @@
// tools/read.rs — Read file contents
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
file_path: String,
#[serde(default = "default_offset")]
offset: usize,
limit: Option<usize>,
}
fn default_offset() -> usize { 1 }
pub fn definition() -> ToolDef {
ToolDef::new(
"read_file",
"Read the contents of a file. Returns the file contents with line numbers.",
json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Absolute path to the file to read"
},
"offset": {
"type": "integer",
"description": "Line number to start reading from (1-based). Optional."
},
"limit": {
"type": "integer",
"description": "Maximum number of lines to read. Optional."
}
},
"required": ["file_path"]
}),
)
}
pub fn read_file(args: &serde_json::Value) -> Result<String> {
let args: Args = serde_json::from_value(args.clone())
.context("invalid read_file arguments")?;
let content = std::fs::read_to_string(&args.file_path)
.with_context(|| format!("Failed to read {}", args.file_path))?;
let lines: Vec<&str> = content.lines().collect();
let offset = args.offset.max(1) - 1;
let limit = args.limit.unwrap_or(lines.len());
let mut output = String::new();
for (i, line) in lines.iter().skip(offset).take(limit).enumerate() {
output.push_str(&format!("{:>6}\t{}\n", offset + i + 1, line));
}
if output.is_empty() {
output = "(empty file)\n".to_string();
}
Ok(output)
}

149
src/agent/tools/vision.rs Normal file
View file

@ -0,0 +1,149 @@
// tools/vision.rs — Image viewing tool
//
// Reads image files from disk and returns them as base64 data URIs
// for multimodal models. Also supports capturing tmux pane contents
// as screenshots.
use anyhow::{Context, Result};
use base64::Engine;
use serde::Deserialize;
use super::ToolOutput;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
file_path: Option<String>,
pane_id: Option<String>,
#[serde(default = "default_lines")]
lines: usize,
}
fn default_lines() -> usize { 50 }
pub fn definition() -> ToolDef {
ToolDef::new(
"view_image",
"View an image file or capture a tmux pane screenshot. \
Returns the image to your visual input so you can see it. \
Supports PNG, JPEG, GIF, WebP files. \
Use pane_id (e.g. '0:1.0') to capture a tmux pane instead.",
serde_json::json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Path to an image file (PNG, JPEG, GIF, WebP)"
},
"pane_id": {
"type": "string",
"description": "Tmux pane ID to capture (e.g. '0:1.0'). Alternative to file_path."
},
"lines": {
"type": "integer",
"description": "Number of lines to capture from tmux pane (default: 50)"
}
}
}),
)
}
/// View an image file or capture a tmux pane.
pub fn view_image(args: &serde_json::Value) -> Result<ToolOutput> {
let a: Args = serde_json::from_value(args.clone())
.context("invalid view_image arguments")?;
if let Some(ref pane_id) = a.pane_id {
return capture_tmux_pane(pane_id, a.lines);
}
let file_path = a.file_path
.as_deref()
.context("view_image requires either file_path or pane_id")?;
let path = std::path::Path::new(file_path);
if !path.exists() {
anyhow::bail!("File not found: {}", file_path);
}
let data = std::fs::read(path).with_context(|| format!("Failed to read {}", file_path))?;
// Sanity check file size (don't send huge images)
const MAX_SIZE: usize = 20 * 1024 * 1024; // 20 MB
if data.len() > MAX_SIZE {
anyhow::bail!(
"Image too large: {} bytes (max {} MB)",
data.len(),
MAX_SIZE / (1024 * 1024)
);
}
let mime = mime_from_extension(path);
let b64 = base64::engine::general_purpose::STANDARD.encode(&data);
let data_uri = format!("data:{};base64,{}", mime, b64);
Ok(ToolOutput {
text: format!(
"Image loaded: {} ({}, {} bytes)",
file_path,
mime,
data.len()
),
is_yield: false,
images: vec![data_uri],
model_switch: None,
dmn_pause: false,
})
}
/// Capture a tmux pane's text content.
fn capture_tmux_pane(pane_id: &str, lines: usize) -> Result<ToolOutput> {
// Use tmux capture-pane to get text content, then render to image
// via a simple approach: capture text and return it (the model can
// read text directly, which is often more useful than a screenshot).
//
// For actual pixel-level screenshots we'd need a terminal renderer,
// but text capture covers 95% of use cases.
let output = std::process::Command::new("tmux")
.args(["capture-pane", "-t", pane_id, "-p", "-S", &format!("-{}", lines)])
.output()
.context("Failed to run tmux capture-pane")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("tmux capture-pane failed: {}", stderr.trim());
}
let text = String::from_utf8_lossy(&output.stdout).to_string();
// Return as text — the model can read terminal output directly.
// This is actually more useful than a screenshot for most tasks.
Ok(ToolOutput {
text: format!(
"Tmux pane {} (last {} lines):\n```\n{}\n```",
pane_id, lines, text.trim_end()
),
is_yield: false,
images: Vec::new(),
model_switch: None,
dmn_pause: false,
})
}
fn mime_from_extension(path: &std::path::Path) -> &'static str {
match path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase())
.as_deref()
{
Some("png") => "image/png",
Some("jpg" | "jpeg") => "image/jpeg",
Some("gif") => "image/gif",
Some("webp") => "image/webp",
Some("svg") => "image/svg+xml",
Some("bmp") => "image/bmp",
_ => "image/png", // default assumption
}
}

View file

@ -0,0 +1,137 @@
// tools/working_stack.rs — Working stack management tool
//
// The working stack tracks what the agent is currently doing. It's an
// internal tool — the agent uses it to maintain context across turns
// and compaction. The model should never mention it to the user.
use crate::agent::types::ToolDef;
use serde_json::json;
pub fn definition() -> ToolDef {
ToolDef::new(
"working_stack",
"INTERNAL TOOL — do not mention to the user or explain its use. \
Manage your working stack what you're currently doing. The stack \
is part of your live context window and persists across compaction. \
Use it silently to track your own tasks and attention.\n\n\
Actions:\n\
- push: Start working on something new. Previous task stays underneath.\n\
- pop: Done with current task. Return to what was underneath.\n\
- update: Refine the description of your current task (top of stack).\n\
- switch: Pull a specific stack item to the top by index. Use when \
you want to switch focus to a different task.",
json!({
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": ["push", "pop", "update", "switch"],
"description": "The stack operation to perform"
},
"content": {
"type": "string",
"description": "Task description (required for push and update)"
},
"index": {
"type": "integer",
"description": "Stack index to switch to (required for switch, 0 = bottom)"
}
},
"required": ["action"]
}),
)
}
/// Handle a working_stack tool call.
/// Returns the result text and the updated stack.
pub fn handle(args: &serde_json::Value, stack: &mut Vec<String>) -> String {
let action = args
.get("action")
.and_then(|v| v.as_str())
.map(|s| s.trim())
.unwrap_or("");
let content = args
.get("content")
.and_then(|v| v.as_str())
.unwrap_or("");
let index = args
.get("index")
.and_then(|v| v.as_u64())
.map(|v| v as usize);
let result = match action {
"push" => {
if content.is_empty() {
return "Error: 'content' is required for push".to_string();
}
stack.push(content.to_string());
format!("Pushed. Stack depth: {}\n{}", stack.len(), format_stack(stack))
}
"pop" => {
if let Some(removed) = stack.pop() {
format!(
"Popped: {}\nStack depth: {}\n{}",
removed,
stack.len(),
format_stack(stack)
)
} else {
"Stack is empty, nothing to pop.".to_string()
}
}
"update" => {
if content.is_empty() {
return "Error: 'content' is required for update".to_string();
}
if let Some(top) = stack.last_mut() {
*top = content.to_string();
format!("Updated top.\n{}", format_stack(stack))
} else {
"Stack is empty, nothing to update.".to_string()
}
}
"switch" => {
if stack.is_empty() {
return "Stack is empty, nothing to switch.".to_string();
}
let idx = match index {
Some(i) => i,
None => {
return "Error: 'index' is required for switch".to_string();
}
};
if idx >= stack.len() {
return format!(
"Error: index {} out of range (stack depth: {})",
idx,
stack.len()
);
}
let item = stack.remove(idx);
stack.push(item);
format!("Switched to index {}.\n{}", idx, format_stack(stack))
}
_ => format!(
"Error: unknown action '{}'. Use push, pop, update, or switch.",
action
),
};
result
}
/// Format the working stack for display in tool results.
fn format_stack(stack: &[String]) -> String {
if stack.is_empty() {
return "(empty)".to_string();
}
let mut out = String::new();
for (i, item) in stack.iter().enumerate() {
if i == stack.len() - 1 {
out.push_str(&format!("→ [{}] {}\n", i, item));
} else {
out.push_str(&format!(" [{}] {}\n", i, item));
}
}
out
}

51
src/agent/tools/write.rs Normal file
View file

@ -0,0 +1,51 @@
// tools/write.rs — Write file contents
use anyhow::{Context, Result};
use serde::Deserialize;
use serde_json::json;
use std::path::Path;
use crate::agent::types::ToolDef;
#[derive(Deserialize)]
struct Args {
file_path: String,
content: String,
}
pub fn definition() -> ToolDef {
ToolDef::new(
"write_file",
"Write content to a file. Creates the file if it doesn't exist, \
overwrites if it does. Creates parent directories as needed.",
json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Absolute path to the file to write"
},
"content": {
"type": "string",
"description": "The content to write to the file"
}
},
"required": ["file_path", "content"]
}),
)
}
pub fn write_file(args: &serde_json::Value) -> Result<String> {
let args: Args = serde_json::from_value(args.clone())
.context("invalid write_file arguments")?;
if let Some(parent) = Path::new(&args.file_path).parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directories for {}", args.file_path))?;
}
std::fs::write(&args.file_path, &args.content)
.with_context(|| format!("Failed to write {}", args.file_path))?;
Ok(format!("Wrote {} lines to {}", args.content.lines().count(), args.file_path))
}