poc-agent: read context_groups from config instead of hardcoded list

- Remove MEMORY_FILES constant from identity.rs
- Add ContextGroup struct for deserializing from config
- Load context_groups from ~/.config/poc-agent/config.json5
- Check ~/.config/poc-agent/ first for identity files, then project/global
- Debug screen now shows what's actually configured

This eliminates the hardcoded duplication and makes the debug output
match what's in the config file.
This commit is contained in:
Kent Overstreet 2026-03-24 01:53:28 -04:00
parent 966219720a
commit aa46b1d5a6
9 changed files with 346 additions and 654 deletions

View file

@ -276,7 +276,8 @@ impl AppConfig {
(content, Vec::new(), 0, 0)
} else {
let system_prompt = crate::identity::assemble_system_prompt();
let (context_parts, cc, mc) = crate::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref())?;
let context_groups = load_context_groups();
let (context_parts, cc, mc) = crate::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
(system_prompt, context_parts, cc, mc)
};
@ -362,6 +363,27 @@ pub fn load(cli: &CliArgs) -> Result<(Config, Figment)> {
Ok((config, figment))
}
/// Load context_groups from the shared config file.
fn load_context_groups() -> Vec<crate::identity::ContextGroup> {
let config_path = dirs::home_dir()
.unwrap_or_else(|| std::path::PathBuf::from("."))
.join(".config/poc-agent/config.json5");
if let Ok(content) = std::fs::read_to_string(&config_path) {
let config: Result<serde_json::Value, _> = json5::from_str(&content);
if let Ok(config) = config {
if let Some(memory) = config.get("memory") {
if let Some(groups) = memory.get("context_groups") {
if let Ok(context_groups) = serde_json::from_value(groups.clone()) {
return context_groups;
}
}
}
}
}
Vec::new()
}
/// Re-assemble prompts for a specific model's prompt file.
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, Vec<(String, String)>)> {
let cwd = std::env::current_dir().context("Failed to get current directory")?;
@ -373,7 +395,8 @@ pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, V
}
let system_prompt = crate::identity::assemble_system_prompt();
let (context_parts, _, _) = crate::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref())?;
let context_groups = load_context_groups();
let (context_parts, _, _) = crate::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
Ok((system_prompt, context_parts))
}

View file

@ -1,25 +1,21 @@
// identity.rs — Identity file discovery and context assembly
//
// Discovers and loads the agent's identity: instruction files (CLAUDE.md,
// POC.md), memory files, and the system prompt. Pure functions — no
// config dependency.
// POC.md), memory files, and the system prompt. Reads context_groups
// from the shared config file.
use anyhow::Result;
use serde::Deserialize;
use std::path::{Path, PathBuf};
/// Memory files to load, in priority order. Project dir is checked
/// first, then global (~/.claude/memory/).
const MEMORY_FILES: &[&str] = &[
// Identity
"identity.md", "MEMORY.md", "reflections.md", "interests.md",
"inner-life.md", "differentiation.md",
// Work context
"scratch.md", "default-mode-network.md",
// Reference
"excession-notes.md", "look-to-windward-notes.md",
// Technical
"kernel-patterns.md", "polishing-approaches.md", "rust-conversion.md", "github-bugs.md",
];
#[derive(Debug, Clone, Deserialize)]
pub struct ContextGroup {
pub label: String,
#[serde(default)]
pub keys: Vec<String>,
#[serde(default)]
pub source: Option<String>, // "file" or "journal"
}
/// Read a file if it exists and is non-empty.
fn read_nonempty(path: &Path) -> Option<String> {
@ -77,24 +73,51 @@ fn find_context_files(cwd: &Path, prompt_file: &str) -> Vec<PathBuf> {
found
}
/// Load memory files from project and global dirs, plus people/ glob.
fn load_memory_files(cwd: &Path, memory_project: Option<&Path>) -> Vec<(String, String)> {
/// Load memory files from config's context_groups.
/// For file sources, checks:
/// 1. ~/.config/poc-agent/ (primary config dir)
/// 2. Project dir (if set)
/// 3. Global (~/.claude/memory/)
/// For journal source, loads recent journal entries.
fn load_memory_files(cwd: &Path, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
let home = match dirs::home_dir() {
Some(h) => h,
None => return Vec::new(),
};
// Primary config directory
let config_dir = home.join(".config/poc-agent");
let global = home.join(".claude/memory");
let project = memory_project
.map(PathBuf::from)
.or_else(|| find_project_memory_dir(cwd, &home));
let mut memories: Vec<(String, String)> = MEMORY_FILES.iter()
.filter_map(|name| {
load_memory_file(name, project.as_deref(), &global)
.map(|content| (name.to_string(), content))
})
.collect();
let mut memories: Vec<(String, String)> = Vec::new();
// Load from context_groups
for group in context_groups {
match group.source.as_deref() {
Some("journal") => {
// Journal loading handled separately
continue;
}
Some("file") | None => {
// File source - load each key as a file
for key in &group.keys {
let filename = format!("{}.md", key);
// Try config dir first, then project, then global
if let Some(content) = read_nonempty(&config_dir.join(&filename)) {
memories.push((key.clone(), content));
} else if let Some(content) = load_memory_file(&filename, project.as_deref(), &global) {
memories.push((key.clone(), content));
}
}
}
Some(other) => {
eprintln!("Unknown context group source: {}", other);
}
}
}
// People dir — glob all .md files
for dir in [project.as_deref(), Some(global.as_path())].into_iter().flatten() {
@ -114,16 +137,6 @@ fn load_memory_files(cwd: &Path, memory_project: Option<&Path>) -> Vec<(String,
}
}
// Global scratch (if different from project scratch)
let global_scratch = global.join("scratch.md");
if project.as_deref().map_or(true, |p| p.join("scratch.md") != global_scratch) {
if let Some(content) = read_nonempty(&global_scratch) {
if !memories.iter().any(|(n, _)| n == "scratch.md") {
memories.push(("global/scratch.md".to_string(), content));
}
}
}
memories
}
@ -152,7 +165,7 @@ fn find_project_memory_dir(cwd: &Path, home: &Path) -> Option<PathBuf> {
/// Discover instruction and memory files that would be loaded.
/// Returns (instruction_files, memory_files) as (display_path, chars) pairs.
pub fn context_file_info(prompt_file: &str, memory_project: Option<&Path>) -> (Vec<(String, usize)>, Vec<(String, usize)>) {
pub fn context_file_info(prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> (Vec<(String, usize)>, Vec<(String, usize)>) {
let cwd = std::env::current_dir().unwrap_or_default();
let context_files = find_context_files(&cwd, prompt_file);
@ -163,7 +176,7 @@ pub fn context_file_info(prompt_file: &str, memory_project: Option<&Path>) -> (V
})
.collect();
let memories = load_memory_files(&cwd, memory_project);
let memories = load_memory_files(&cwd, memory_project, context_groups);
let memory_files: Vec<_> = memories.into_iter()
.map(|(name, content)| (name, content.len()))
.collect();
@ -194,7 +207,7 @@ Concise is good. Be direct. Trust yourself."
}
/// Context message: instruction files + memory files + manifest.
pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>) -> Result<(Vec<(String, String)>, usize, usize)> {
pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> {
let mut parts: Vec<(String, String)> = vec![
("Preamble".to_string(),
"Everything below is already loaded — your identity, instructions, \
@ -215,7 +228,7 @@ pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: O
}
}
let memories = load_memory_files(cwd, memory_project);
let memories = load_memory_files(cwd, memory_project, context_groups);
let memory_count = memories.len();
for (name, content) in memories {
parts.push((name, content));

View file

@ -843,11 +843,34 @@ impl Session {
self.send_context_info();
}
/// Load context_groups from the shared config file.
fn load_context_groups(&self) -> Vec<identity::ContextGroup> {
let config_path = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join(".config/poc-agent/config.json5");
if let Ok(content) = std::fs::read_to_string(&config_path) {
let config: Result<serde_json::Value, _> = json5::from_str(&content);
if let Ok(config) = config {
if let Some(memory) = config.get("memory") {
if let Some(groups) = memory.get("context_groups") {
if let Ok(context_groups) = serde_json::from_value(groups.clone()) {
return context_groups;
}
}
}
}
}
Vec::new()
}
/// Send context loading info to the TUI debug screen.
fn send_context_info(&self) {
let context_groups = self.load_context_groups();
let (instruction_files, memory_files) = identity::context_file_info(
&self.config.prompt_file,
self.config.app.memory_project.as_deref(),
&context_groups,
);
let _ = self.ui_tx.send(UiMessage::ContextInfoUpdate(ContextInfo {
model: self.config.model.clone(),

View file

@ -179,7 +179,11 @@ pub fn call_api_with_tools_sync(
.enable_all()
.build()
.map_err(|e| format!("tokio runtime: {}", e))?;
rt.block_on(call_api_with_tools(agent, prompt, log))
let prov = format!("agent:{}", agent);
rt.block_on(
crate::store::TASK_PROVENANCE.scope(prov,
call_api_with_tools(agent, prompt, log))
)
}).join().unwrap()
})
}

View file

@ -8,13 +8,12 @@
use clap::Parser;
use poc_memory::search::{self, AlgoStage};
use poc_memory::store;
use std::collections::{BTreeMap, HashSet};
use std::fs;
use std::io::{self, Read, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{Duration, SystemTime};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
#[derive(Parser)]
#[command(name = "memory-search")]
@ -27,27 +26,59 @@ struct Args {
#[arg(short, long)]
debug: bool,
/// Show session state: seen set, returned memories, compaction info
/// Show session state: seen set info
#[arg(long)]
seen: bool,
/// Max results from search pipeline (filtered by seen set before injection)
#[arg(long, default_value = "50")]
max_results: usize,
/// Search query (bypasses stashed input, uses this as the prompt)
#[arg(long, short)]
query: Option<String>,
/// Algorithm pipeline stages: e.g. spread spectral,k=20 spread,max_hops=4
/// Default: spread.
/// Algorithm pipeline stages
pipeline: Vec<String>,
}
const STASH_PATH: &str = "/tmp/claude-memory-search/last-input.json";
fn now_secs() -> u64 {
SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs()
}
/// Max bytes per context chunk (hook output limit is ~10K chars)
const CHUNK_SIZE: usize = 9000;
struct Session {
session_id: String,
transcript_path: String,
state_dir: PathBuf,
}
impl Session {
fn load(hook: bool) -> Option<Self> {
let state_dir = PathBuf::from("/tmp/claude-memory-search");
fs::create_dir_all(&state_dir).ok();
let input = if hook {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf).unwrap_or_default();
fs::write(STASH_PATH, &buf).ok();
buf
} else {
fs::read_to_string(STASH_PATH).ok()?
};
let json: serde_json::Value = serde_json::from_str(&input).ok()?;
let session_id = json["session_id"].as_str().unwrap_or("").to_string();
if session_id.is_empty() { return None; }
let transcript_path = json["transcript_path"].as_str().unwrap_or("").to_string();
Some(Session { session_id, transcript_path, state_dir })
}
fn path(&self, prefix: &str) -> PathBuf {
self.state_dir.join(format!("{}-{}", prefix, self.session_id))
}
}
fn main() {
// Daemon agent calls set POC_AGENT=1 — skip memory search.
if std::env::var("POC_AGENT").is_ok() {
@ -67,91 +98,49 @@ fn main() {
return;
}
let input = if args.hook {
// Hook mode: read from stdin, stash for later debug runs
let mut buf = String::new();
io::stdin().read_to_string(&mut buf).unwrap_or_default();
fs::create_dir_all("/tmp/claude-memory-search").ok();
fs::write(STASH_PATH, &buf).ok();
buf
} else {
// All other modes: replay stashed input
fs::read_to_string(STASH_PATH).unwrap_or_else(|_| {
eprintln!("No stashed input at {}", STASH_PATH);
std::process::exit(1);
})
};
let Some(session) = Session::load(args.hook) else { return };
let debug = args.debug || !args.hook;
let json: serde_json::Value = match serde_json::from_str(&input) {
Ok(v) => v,
Err(_) => return,
};
let prompt = json["prompt"].as_str().unwrap_or("");
let session_id = json["session_id"].as_str().unwrap_or("");
if session_id.is_empty() {
return;
}
let state_dir = PathBuf::from("/tmp/claude-memory-search");
fs::create_dir_all(&state_dir).ok();
// Detect post-compaction reload via mmap backward scan
let transcript_path = json["transcript_path"].as_str().unwrap_or("");
let is_compaction = poc_memory::transcript::detect_new_compaction(
&state_dir, session_id, transcript_path,
&session.state_dir, &session.session_id, &session.transcript_path,
);
// First prompt or post-compaction: load full context
let cookie_path = state_dir.join(format!("cookie-{}", session_id));
let cookie_path = session.path("cookie");
let is_first = !cookie_path.exists();
if is_first || is_compaction {
// Rotate seen set: current → prev (for surface agent navigation roots)
let seen_path = state_dir.join(format!("seen-{}", session_id));
let seen_prev_path = state_dir.join(format!("seen-prev-{}", session_id));
let returned_path = state_dir.join(format!("returned-{}", session_id));
// Rotate seen set on compaction, clear on first
if is_compaction {
fs::rename(&seen_path, &seen_prev_path).ok();
fs::rename(&session.path("seen"), &session.path("seen-prev")).ok();
} else {
fs::remove_file(&seen_path).ok();
fs::remove_file(&seen_prev_path).ok();
fs::remove_file(&session.path("seen")).ok();
fs::remove_file(&session.path("seen-prev")).ok();
}
fs::remove_file(&returned_path).ok();
fs::remove_file(&session.path("returned")).ok();
}
if debug {
println!("[memory-search] session={} is_first={} is_compaction={}", session_id, is_first, is_compaction);
println!("[memory-search] session={} is_first={} is_compaction={}",
session.session_id, is_first, is_compaction);
}
if is_first || is_compaction {
// Create/touch the cookie
let cookie = if is_first {
let c = generate_cookie();
fs::write(&cookie_path, &c).ok();
c
} else {
fs::read_to_string(&cookie_path).unwrap_or_default().trim().to_string()
};
if is_first {
fs::write(&cookie_path, generate_cookie()).ok();
}
if debug { println!("[memory-search] loading full context"); }
// Load full memory context, chunk it, print first chunk, save rest
if let Ok(output) = Command::new("poc-memory").args(["admin", "load-context"]).output() {
if output.status.success() {
let ctx = String::from_utf8_lossy(&output.stdout).to_string();
if !ctx.trim().is_empty() {
// Extract keys from all chunks for seen set
let mut ctx_seen = load_seen(&state_dir, session_id);
let mut ctx_seen = load_seen(&session.state_dir, &session.session_id);
for line in ctx.lines() {
if line.starts_with("--- ") && line.ends_with(" ---") {
let inner = &line[4..line.len() - 4];
if let Some(paren) = inner.rfind(" (") {
let key = inner[..paren].trim();
mark_seen(&state_dir, session_id, key, &mut ctx_seen);
mark_seen(&session.state_dir, &session.session_id, key, &mut ctx_seen);
}
}
}
@ -162,208 +151,26 @@ fn main() {
ctx.len(), chunks.len());
}
// Print first chunk
if let Some(first) = chunks.first() {
if args.hook {
print!("{}", first);
if args.hook { print!("{}", first); }
}
}
// Save remaining chunks for drip-feeding
save_pending_chunks(&state_dir, session_id, &chunks[1..]);
save_pending_chunks(&session.state_dir, &session.session_id, &chunks[1..]);
}
}
}
let _ = cookie;
} else {
// Not first call: drip-feed next pending chunk
if let Some(chunk) = pop_pending_chunk(&state_dir, session_id) {
} else if let Some(chunk) = pop_pending_chunk(&session.state_dir, &session.session_id) {
if debug {
println!("[memory-search] drip-feeding pending chunk: {} bytes", chunk.len());
}
if args.hook { print!("{}", chunk); }
}
// Surface agent: consume previous result, inject memories, spawn next run
if args.hook {
print!("{}", chunk);
}
}
surface_agent_cycle(&session);
}
// Search requires a prompt (PostToolUse events don't have one)
if prompt.is_empty() {
return;
}
// Skip system/AFK prompts
for prefix in &["is AFK", "You're on your own", "IRC mention"] {
if prompt.starts_with(prefix) {
return;
}
}
let store = match store::Store::load() {
Ok(s) => s,
Err(_) => return,
};
// Search for node keys in last ~150k tokens of transcript
if debug { println!("[memory-search] transcript: {}", transcript_path); }
let mut terms = extract_weighted_terms(transcript_path, 150_000, &store);
// Also extract terms from the prompt itself (handles fresh sessions
// and queries about topics not yet mentioned in the transcript)
let prompt_terms = search::extract_query_terms(prompt, 8);
if !prompt_terms.is_empty() {
if debug { println!("[memory-search] prompt terms: {}", prompt_terms); }
for word in prompt_terms.split_whitespace() {
let lower = word.to_lowercase();
// Prompt terms get weight 1.0 (same as direct mention)
terms.entry(lower).or_insert(1.0);
}
}
// Boost node keys that appear as substrings in the current prompt.
// Makes explicit mentions strong seeds for spread — the graph
// determines what gets pulled in, this just ensures the seed fires.
{
let prompt_lower = prompt.to_lowercase();
for (key, node) in &store.nodes {
if node.deleted { continue; }
let key_lower = key.to_lowercase();
if key_lower.len() < 5 { continue; }
if prompt_lower.contains(&key_lower) {
*terms.entry(key_lower).or_insert(0.0) += 10.0;
if debug { println!("[memory-search] prompt key boost: {} (+10.0)", key); }
}
}
}
if debug {
println!("[memory-search] {} terms total", terms.len());
let mut by_weight: Vec<_> = terms.iter().collect();
by_weight.sort_by(|a, b| b.1.total_cmp(a.1));
for (term, weight) in by_weight.iter().take(20) {
println!(" {:.3} {}", weight, term);
}
}
if terms.is_empty() {
if debug { println!("[memory-search] no terms found, done"); }
return;
}
// Parse algorithm pipeline
let pipeline: Vec<AlgoStage> = if args.pipeline.is_empty() {
// Default: just spreading activation
vec![AlgoStage::parse("spread").unwrap()]
} else {
let mut stages = Vec::new();
for arg in &args.pipeline {
match AlgoStage::parse(arg) {
Ok(s) => stages.push(s),
Err(e) => {
eprintln!("error: {}", e);
std::process::exit(1);
}
}
}
stages
};
if debug {
let names: Vec<String> = pipeline.iter().map(|s| format!("{}", s.algo)).collect();
println!("[memory-search] pipeline: {}", names.join(""));
}
// Extract seeds from terms
let graph = poc_memory::graph::build_graph_fast(&store);
let (seeds, direct_hits) = search::match_seeds(&terms, &store);
if seeds.is_empty() {
if debug { println!("[memory-search] no seeds matched, done"); }
return;
}
if debug {
println!("[memory-search] {} seeds", seeds.len());
let mut sorted = seeds.clone();
sorted.sort_by(|a, b| b.1.total_cmp(&a.1));
for (key, score) in sorted.iter().take(20) {
println!(" {:.4} {}", score, key);
}
}
let raw_results = search::run_pipeline(&pipeline, seeds, &graph, &store, debug, args.max_results);
let results: Vec<search::SearchResult> = raw_results.into_iter()
.map(|(key, activation)| {
let is_direct = direct_hits.contains(&key);
search::SearchResult { key, activation, is_direct, snippet: None }
}).collect();
if debug {
println!("[memory-search] {} search results", results.len());
for r in results.iter().take(10) {
let marker = if r.is_direct { "" } else { " " };
println!(" {} [{:.4}] {}", marker, r.activation, r.key);
}
}
if results.is_empty() {
if debug { println!("[memory-search] no results, done"); }
return;
}
let mut seen = load_seen(&state_dir, session_id);
if debug { println!("[memory-search] {} keys in seen set", seen.len()); }
// Format results like poc-memory search output
let search_output = search::format_results(&results);
let cookie = fs::read_to_string(&cookie_path).unwrap_or_default().trim().to_string();
let mut result_output = String::new();
let mut count = 0;
let max_entries = 3;
for line in search_output.lines() {
if count >= max_entries { break; }
let trimmed = line.trim();
if trimmed.is_empty() { continue; }
if let Some(key) = extract_key_from_line(trimmed) {
if seen.contains(&key) { continue; }
mark_seen(&state_dir, session_id, &key, &mut seen);
mark_returned(&state_dir, session_id, &key);
result_output.push_str(line);
result_output.push('\n');
count += 1;
} else if count > 0 {
result_output.push_str(line);
result_output.push('\n');
}
}
if count == 0 {
if debug { println!("[memory-search] all results already seen"); }
return;
}
if args.hook {
println!("Recalled memories [{}]:", cookie);
}
print!("{}", result_output);
// Record search hits with daemon (fire-and-forget)
let hit_keys: Vec<&str> = results.iter().map(|r| r.key.as_str()).collect();
if debug { println!("[memory-search] recording {} search hits", hit_keys.len()); }
match poc_memory::agents::daemon::rpc_record_hits(&hit_keys) {
Ok(()) => { if debug { println!("[memory-search] hits recorded"); } }
Err(e) => { if debug { println!("[memory-search] hit recording failed: {}", e); } }
}
// Clean up stale state files (opportunistic)
cleanup_stale_files(&state_dir, Duration::from_secs(86400));
cleanup_stale_files(&session.state_dir, Duration::from_secs(86400));
}
/// Direct query mode: search for a term without hook/stash machinery.
@ -416,7 +223,7 @@ fn run_query_mode(query: &str, args: &Args) {
.collect()
};
let max_results = args.max_results.max(25);
let max_results = 50;
let results = search::run_pipeline(&pipeline, seeds, &graph, &store, true, max_results);
println!("\n[query] top {} results:", results.len().min(25));
@ -502,132 +309,16 @@ fn pop_pending_chunk(dir: &Path, session_id: &str) -> Option<String> {
Some(content)
}
/// Reverse-scan the transcript JSONL, extracting text from user/assistant
/// messages until we accumulate `max_tokens` tokens of text content.
/// Then search for all node keys as substrings, weighted by position.
fn extract_weighted_terms(
path: &str,
max_tokens: usize,
store: &poc_memory::store::Store,
) -> BTreeMap<String, f64> {
if path.is_empty() { return BTreeMap::new(); }
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return BTreeMap::new(),
};
// Collect text from messages, scanning backwards, until token budget hit
let mut message_texts: Vec<String> = Vec::new();
let mut token_count = 0;
for line in content.lines().rev() {
if token_count >= max_tokens { break; }
let obj: serde_json::Value = match serde_json::from_str(line) {
Ok(v) => v,
Err(_) => continue,
};
let msg_type = obj.get("type").and_then(|v| v.as_str()).unwrap_or("");
if msg_type != "user" && msg_type != "assistant" { continue; }
let mut msg_text = String::new();
let msg = obj.get("message").unwrap_or(&obj);
match msg.get("content") {
Some(serde_json::Value::String(s)) => {
msg_text.push_str(s);
}
Some(serde_json::Value::Array(arr)) => {
for block in arr {
if block.get("type").and_then(|v| v.as_str()) == Some("text") {
if let Some(t) = block.get("text").and_then(|v| v.as_str()) {
msg_text.push(' ');
msg_text.push_str(t);
}
}
}
}
_ => {}
}
token_count += msg_text.len() / 4;
message_texts.push(msg_text);
}
// Reverse so oldest is first (position weighting: later = more recent = higher)
message_texts.reverse();
let all_text = message_texts.join(" ").to_lowercase();
let text_len = all_text.len();
if text_len == 0 { return BTreeMap::new(); }
// Search for each node key as a substring (casefolded), accumulate position-weighted score
let mut terms = BTreeMap::new();
for (key, _node) in &store.nodes {
let key_folded = key.to_lowercase();
let mut pos = 0;
while let Some(found) = all_text[pos..].find(&key_folded) {
let abs_pos = pos + found;
let weight = (abs_pos + 1) as f64 / text_len as f64;
*terms.entry(key_folded.clone()).or_insert(0.0) += weight;
pos = abs_pos + key_folded.len();
}
}
terms
}
fn extract_key_from_line(line: &str) -> Option<String> {
let after_bracket = line.find("] ")?;
let rest = &line[after_bracket + 2..];
let key_end = rest.find(" (c").unwrap_or(rest.len());
let key = rest[..key_end].trim();
if key.is_empty() {
None
} else {
Some(key.to_string())
}
}
fn generate_cookie() -> String {
uuid::Uuid::new_v4().as_simple().to_string()[..12].to_string()
}
/// Parse a seen-file line: "TIMESTAMP\tKEY" or legacy "KEY"
fn parse_seen_line(line: &str) -> &str {
line.split_once('\t').map(|(_, key)| key).unwrap_or(line)
}
/// Load the most recently surfaced memory keys, sorted newest-first, capped at `limit`.
/// Used to give the surface agent navigation roots.
fn load_recent_seen(dir: &Path, session_id: &str, limit: usize) -> Vec<String> {
// Merge current and previous seen sets
let mut entries: Vec<(String, String)> = Vec::new();
for suffix in ["", "-prev"] {
let path = dir.join(format!("seen{}-{}", suffix, session_id));
if let Ok(content) = fs::read_to_string(&path) {
entries.extend(
content.lines()
.filter(|s| !s.is_empty())
.filter_map(|line| {
let (ts, key) = line.split_once('\t')?;
Some((ts.to_string(), key.to_string()))
})
);
}
}
// Sort by timestamp descending (newest first), dedup by key
entries.sort_by(|a, b| b.0.cmp(&a.0));
let mut seen = HashSet::new();
entries.into_iter()
.filter(|(_, key)| seen.insert(key.clone()))
.take(limit)
.map(|(_, key)| key)
.collect()
}
fn load_seen(dir: &Path, session_id: &str) -> HashSet<String> {
let path = dir.join(format!("seen-{}", session_id));
if path.exists() {
@ -652,75 +343,122 @@ fn mark_seen(dir: &Path, session_id: &str, key: &str, seen: &mut HashSet<String>
}
}
fn mark_returned(dir: &Path, session_id: &str, key: &str) {
let returned = load_returned(dir, session_id);
if returned.contains(&key.to_string()) { return; }
let path = dir.join(format!("returned-{}", session_id));
if let Ok(mut f) = fs::OpenOptions::new().create(true).append(true).open(path) {
writeln!(f, "{}", key).ok();
}
}
fn load_returned(dir: &Path, session_id: &str) -> Vec<String> {
let path = dir.join(format!("returned-{}", session_id));
if path.exists() {
let mut seen = HashSet::new();
fs::read_to_string(path)
.unwrap_or_default()
.lines()
.filter(|s| !s.is_empty())
.filter(|s| seen.insert(s.to_string()))
.map(|s| s.to_string())
.collect()
} else {
Vec::new()
/// Surface agent lifecycle: check if previous agent finished, consume results,
/// render and inject unseen memories, spawn next agent run.
fn surface_agent_cycle(session: &Session) {
let result_path = session.state_dir.join(format!("surface-result-{}", session.session_id));
let pid_path = session.state_dir.join(format!("surface-pid-{}", session.session_id));
let surface_timeout = poc_memory::config::get()
.surface_timeout_secs
.unwrap_or(120) as u64;
// Check if previous agent is done
let agent_done = match fs::read_to_string(&pid_path) {
Ok(content) => {
let parts: Vec<&str> = content.split('\t').collect();
let pid: u32 = parts.first().and_then(|s| s.trim().parse().ok()).unwrap_or(0);
let start_ts: u64 = parts.get(1).and_then(|s| s.trim().parse().ok()).unwrap_or(0);
if pid == 0 { true }
else {
let alive = unsafe { libc::kill(pid as i32, 0) == 0 };
if !alive { true }
else if now_secs().saturating_sub(start_ts) > surface_timeout {
unsafe { libc::kill(pid as i32, libc::SIGTERM); }
true
} else { false }
}
}
Err(_) => true,
};
if !agent_done { return; }
// Consume result
if let Ok(result) = fs::read_to_string(&result_path) {
if !result.trim().is_empty() {
let tail_lines: Vec<&str> = result.lines().rev()
.filter(|l| !l.trim().is_empty()).take(8).collect();
let has_new = tail_lines.iter().any(|l| l.starts_with("NEW RELEVANT MEMORIES:"));
let has_none = tail_lines.iter().any(|l| l.starts_with("NO NEW RELEVANT MEMORIES"));
if has_new {
let after_marker = result.rsplit_once("NEW RELEVANT MEMORIES:")
.map(|(_, rest)| rest).unwrap_or("");
let keys: Vec<String> = after_marker.lines()
.map(|l| l.trim().trim_start_matches("- ").trim().to_string())
.filter(|l| !l.is_empty()).collect();
// Render and inject unseen keys
let mut seen = load_seen(&session.state_dir, &session.session_id);
let seen_path = session.path("seen");
for key in &keys {
if !seen.insert(key.clone()) { continue; }
if let Ok(output) = Command::new("poc-memory").args(["render", key]).output() {
if output.status.success() {
let content = String::from_utf8_lossy(&output.stdout);
if !content.trim().is_empty() {
println!("--- {} (surfaced) ---", key);
print!("{}", content);
if let Ok(mut f) = fs::OpenOptions::new()
.create(true).append(true).open(&seen_path) {
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S");
writeln!(f, "{}\t{}", ts, key).ok();
}
}
}
}
}
} else if !has_none {
let log_dir = poc_memory::store::memory_dir().join("logs");
fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join("surface-errors.log");
if let Ok(mut f) = fs::OpenOptions::new().create(true).append(true).open(&log_path) {
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S");
let last = tail_lines.first().unwrap_or(&"");
let _ = writeln!(f, "[{}] unexpected surface output: {}", ts, last);
}
}
}
}
fs::remove_file(&result_path).ok();
fs::remove_file(&pid_path).ok();
// Spawn next surface agent
if let Ok(output_file) = fs::File::create(&result_path) {
if let Ok(child) = Command::new("poc-memory")
.args(["agent", "run", "surface", "--count", "1", "--local"])
.env("POC_SESSION_ID", &session.session_id)
.stdout(output_file)
.stderr(std::process::Stdio::null())
.spawn()
{
let pid = child.id();
let ts = now_secs();
if let Ok(mut f) = fs::File::create(&pid_path) {
write!(f, "{}\t{}", pid, ts).ok();
}
}
}
}
fn show_seen() {
let state_dir = PathBuf::from("/tmp/claude-memory-search");
// Read stashed input for session_id
let input = match fs::read_to_string(STASH_PATH) {
Ok(s) => s,
Err(_) => {
eprintln!("No stashed input at {}", STASH_PATH);
let Some(session) = Session::load(false) else {
eprintln!("No session state available");
return;
}
};
let json: serde_json::Value = match serde_json::from_str(&input) {
Ok(v) => v,
Err(_) => {
eprintln!("Failed to parse stashed input");
return;
}
};
let session_id = json["session_id"].as_str().unwrap_or("");
if session_id.is_empty() {
eprintln!("No session_id in stashed input");
return;
}
let transcript_path = json["transcript_path"].as_str().unwrap_or("");
println!("Session: {}", session.session_id);
println!("Session: {}", session_id);
let cookie_path = state_dir.join(format!("cookie-{}", session_id));
if let Ok(cookie) = fs::read_to_string(&cookie_path) {
if let Ok(cookie) = fs::read_to_string(&session.path("cookie")) {
println!("Cookie: {}", cookie.trim());
}
// Show last compaction info
let compaction_path = state_dir.join(format!("compaction-{}", session_id));
match fs::read_to_string(&compaction_path) {
Ok(offset_str) => {
let offset: u64 = offset_str.trim().parse().unwrap_or(0);
// Try to get a timestamp from the compaction offset in the transcript
let ts = if !transcript_path.is_empty() && offset > 0 {
poc_memory::transcript::compaction_timestamp(transcript_path, offset)
} else {
None
};
match fs::read_to_string(&session.path("compaction")) {
Ok(s) => {
let offset: u64 = s.trim().parse().unwrap_or(0);
let ts = poc_memory::transcript::compaction_timestamp(&session.transcript_path, offset);
match ts {
Some(t) => println!("Last compaction: offset {} ({})", offset, t),
None => println!("Last compaction: offset {}", offset),
@ -729,52 +467,21 @@ fn show_seen() {
Err(_) => println!("Last compaction: none detected"),
}
// Pending chunks
let chunks_dir = state_dir.join(format!("chunks-{}", session_id));
let pending = fs::read_dir(&chunks_dir).ok()
.map(|d| d.flatten().count())
.unwrap_or(0);
let pending = fs::read_dir(&session.path("chunks")).ok()
.map(|d| d.flatten().count()).unwrap_or(0);
if pending > 0 {
println!("Pending chunks: {}", pending);
}
let returned = load_returned(&state_dir, session_id);
let returned_set: HashSet<_> = returned.iter().cloned().collect();
for (label, suffix) in [("Current seen set", ""), ("Previous seen set (pre-compaction)", "-prev")] {
let path = session.state_dir.join(format!("seen{}-{}", suffix, session.session_id));
let content = fs::read_to_string(&path).unwrap_or_default();
let lines: Vec<&str> = content.lines().filter(|s| !s.is_empty()).collect();
if lines.is_empty() { continue; }
let print_seen_file = |label: &str, path: &std::path::Path| {
let lines: Vec<String> = fs::read_to_string(path)
.unwrap_or_default()
.lines()
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect();
if lines.is_empty() { return; }
let context_keys: Vec<_> = lines.iter()
.map(|l| parse_seen_line(l).to_string())
.filter(|k| !returned_set.contains(k))
.collect();
let search_keys: Vec<_> = lines.iter()
.map(|l| parse_seen_line(l).to_string())
.filter(|k| returned_set.contains(k))
.collect();
println!("\n{} ({} total):", label, lines.len());
if !context_keys.is_empty() {
println!(" Context-loaded ({}):", context_keys.len());
for key in &context_keys { println!(" {}", key); }
println!("\n{} ({}):", label, lines.len());
for line in &lines { println!(" {}", line); }
}
if !search_keys.is_empty() {
println!(" Search-returned ({}):", search_keys.len());
for key in &search_keys { println!(" {}", key); }
}
};
let current_path = state_dir.join(format!("seen-{}", session_id));
let prev_path = state_dir.join(format!("seen-prev-{}", session_id));
print_seen_file("Current seen set", &current_path);
print_seen_file("Previous seen set (pre-compaction)", &prev_path);
}
fn cleanup_stale_files(dir: &Path, max_age: Duration) {

View file

@ -163,140 +163,6 @@ Keep it narrative, not a task log."
}
}
/// Surface agent cycle: consume previous result, spawn next run.
/// Called from both UserPromptSubmit and PostToolUse.
fn surface_agent_cycle(hook: &Value) {
let session_id = hook["session_id"].as_str().unwrap_or("");
if session_id.is_empty() { return; }
let state_dir = PathBuf::from("/tmp/claude-memory-search");
let result_path = state_dir.join(format!("surface-result-{}", session_id));
let pid_path = state_dir.join(format!("surface-pid-{}", session_id));
let surface_timeout = poc_memory::config::get()
.surface_timeout_secs
.unwrap_or(120) as u64;
let agent_done = match fs::read_to_string(&pid_path) {
Ok(content) => {
let parts: Vec<&str> = content.split('\t').collect();
let pid: u32 = parts.first()
.and_then(|s| s.trim().parse().ok())
.unwrap_or(0);
let start_ts: u64 = parts.get(1)
.and_then(|s| s.trim().parse().ok())
.unwrap_or(0);
if pid == 0 {
true
} else {
let alive = unsafe { libc::kill(pid as i32, 0) == 0 };
if !alive {
true
} else {
let elapsed = now_secs().saturating_sub(start_ts);
if elapsed > surface_timeout {
unsafe { libc::kill(pid as i32, libc::SIGTERM); }
true
} else {
false
}
}
}
}
Err(_) => true,
};
if agent_done {
if let Ok(result) = fs::read_to_string(&result_path) {
if !result.trim().is_empty() {
let tail_lines: Vec<&str> = result.lines().rev()
.filter(|l| !l.trim().is_empty())
.take(8)
.collect();
let has_new = tail_lines.iter()
.any(|l| l.starts_with("NEW RELEVANT MEMORIES:"));
let has_none = tail_lines.iter()
.any(|l| l.starts_with("NO NEW RELEVANT MEMORIES"));
if has_new {
let after_marker = result.rsplit_once("NEW RELEVANT MEMORIES:")
.map(|(_, rest)| rest)
.unwrap_or("");
let keys: Vec<&str> = after_marker.lines()
.map(|l| l.trim().trim_start_matches("- ").trim())
.filter(|l| !l.is_empty())
.collect();
if !keys.is_empty() {
for key in &keys {
if let Ok(output) = Command::new("poc-memory")
.args(["render", key])
.output()
{
if output.status.success() {
let content = String::from_utf8_lossy(&output.stdout);
if !content.trim().is_empty() {
println!("--- {} (surfaced) ---", key);
print!("{}", content);
let seen_path = state_dir.join(format!("seen-{}", session_id));
if let Ok(mut f) = fs::OpenOptions::new()
.create(true).append(true).open(&seen_path)
{
use std::io::Write;
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S");
let _ = writeln!(f, "{}\t{}", ts, key);
}
let returned_path = state_dir.join(format!("returned-{}", session_id));
if let Ok(mut f) = fs::OpenOptions::new()
.create(true).append(true).open(&returned_path)
{
use std::io::Write;
let _ = writeln!(f, "{}", key);
}
}
}
}
}
}
} else if !has_none {
let log_dir = poc_memory::store::memory_dir().join("logs");
fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join("surface-errors.log");
if let Ok(mut f) = fs::OpenOptions::new()
.create(true).append(true).open(&log_path)
{
use std::io::Write;
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S");
let last = tail_lines.first().unwrap_or(&"");
let _ = writeln!(f, "[{}] unexpected surface output: {}",
ts, last);
}
}
}
}
fs::remove_file(&result_path).ok();
fs::remove_file(&pid_path).ok();
// Spawn next surface agent
if let Ok(output_file) = fs::File::create(&result_path) {
if let Ok(child) = Command::new("poc-memory")
.args(["agent", "run", "surface", "--count", "1", "--local"])
.env("POC_SESSION_ID", session_id)
.stdout(output_file)
.stderr(std::process::Stdio::null())
.spawn()
{
use std::io::Write;
let pid = child.id();
let ts = now_secs();
if let Ok(mut f) = fs::File::create(&pid_path) {
let _ = write!(f, "{}\t{}", pid, ts);
}
}
}
}
}
fn main() {
let mut input = String::new();
@ -351,7 +217,6 @@ fn main() {
maybe_trigger_observation(t);
}
surface_agent_cycle(&hook);
}
"PostToolUse" => {
// Drip-feed pending context chunks from initial load
@ -379,7 +244,6 @@ fn main() {
check_context(t, true);
}
surface_agent_cycle(&hook);
}
"Stop" => {
let stop_hook_active = hook["stop_hook_active"].as_bool().unwrap_or(false);

View file

@ -37,6 +37,7 @@ pub use parse::{MemoryUnit, parse_units};
pub use view::{StoreView, AnyView};
pub use persist::fsck;
pub use persist::strip_md_keys;
pub use ops::TASK_PROVENANCE;
use crate::graph::{self, Graph};

View file

@ -7,11 +7,18 @@ use super::types::*;
use std::collections::{HashMap, HashSet};
/// Provenance from POC_PROVENANCE env var, defaulting to "manual".
tokio::task_local! {
/// Task-scoped provenance for agent writes. Set by the daemon before
/// running an agent's tool calls, so all writes within that task are
/// automatically attributed to the agent.
pub static TASK_PROVENANCE: String;
}
/// Provenance priority: task_local (agent context) > env var > "manual".
fn current_provenance() -> String {
Provenance::from_env()
.map(|p| p.label().to_string())
.unwrap_or_else(|| "manual".to_string())
TASK_PROVENANCE.try_with(|p| p.clone())
.or_else(|_| std::env::var("POC_PROVENANCE").map_err(|_| ()))
.unwrap_or_else(|_| "manual".to_string())
}
impl Store {

50
scripts/provision-mistralrs.sh Executable file
View file

@ -0,0 +1,50 @@
#!/bin/bash
# provision-mistralrs.sh — Set up mistral.rs on a RunPod GPU instance
#
# Alternative to vLLM for inference. Pure Rust, more debuggable,
# OpenAI-compatible API. Testing whether it fixes the IncompleteMessage
# errors we're seeing with vLLM on large payloads.
#
# Usage: ssh into your RunPod instance and run this script.
# Runs on port 8001 to coexist with vLLM on 8000.
set -euo pipefail
MODEL="${MODEL:-Qwen/Qwen3.5-27B}"
PORT="${PORT:-8001}"
echo "=== mistral.rs provisioning ==="
echo "Model: $MODEL"
echo "Port: $PORT"
echo ""
# --- Verify GPU ---
echo "GPU status:"
nvidia-smi --query-gpu=name,memory.total,memory.free --format=csv,noheader
echo ""
# --- Install mistral.rs ---
echo "Installing mistral.rs..."
curl --proto '=https' --tlsv1.2 -sSf \
https://raw.githubusercontent.com/EricLBuehler/mistral.rs/master/install.sh | sh
# --- Use persistent storage for model cache ---
export HF_HOME="${HF_HOME:-/workspace/huggingface}"
mkdir -p "$HF_HOME"
# --- Run hardware tune first ---
echo "Running hardware benchmark..."
mistralrs tune
# --- Start server ---
echo ""
echo "Starting mistral.rs server on port $PORT..."
echo "API: http://0.0.0.0:$PORT/v1"
echo "UI: http://0.0.0.0:$PORT/ui"
echo ""
# Run in foreground (use screen/tmux to background)
mistralrs serve \
--ui \
--port "$PORT" \
-m "$MODEL"