migrate agent output to capnp store, add provenance tracking

All agent output now goes to the store as nodes instead of
markdown/JSON files. Each node carries a Provenance enum identifying
which agent created it (AgentDigest, AgentConsolidate, AgentFactMine,
AgentKnowledgeObservation, etc — 14 variants total).

Store changes:
- upsert_provenance() method for agent-created nodes
- Provenance enum expanded from 5 to 14 variants

Agent changes:
- digest: writes to store nodes (daily-YYYY-MM-DD.md etc)
- consolidate: reports/actions/logs stored as _consolidation-* nodes
- knowledge: depth DB and agent output stored as _knowledge-* nodes
- enrich: experience-mine results go directly to store
- llm: --no-session-persistence prevents transcript accumulation

Deleted: 14 Python/shell scripts replaced by Rust implementations.
This commit is contained in:
ProofOfConcept 2026-03-05 15:30:57 -05:00
parent e37f819dd2
commit 552d255dc3
23 changed files with 1381 additions and 4095 deletions

View file

@ -15,59 +15,40 @@ use crate::llm::{call_sonnet, parse_json_response};
use crate::neuro;
use crate::store::{self, Store, new_relation};
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use crate::util::memory_subdir;
/// Simple append-only log writer for consolidate-full.
struct LogWriter {
path: PathBuf,
}
impl LogWriter {
fn new(path: &Path) -> Result<Self, String> {
fs::write(path, "").map_err(|e| format!("create log: {}", e))?;
Ok(LogWriter { path: path.to_path_buf() })
}
fn write(&mut self, line: &str) -> Result<(), String> {
let mut f = fs::OpenOptions::new()
.append(true)
.open(&self.path)
.map_err(|e| format!("open log: {}", e))?;
writeln!(f, "{}", line)
.map_err(|e| format!("write log: {}", e))
}
/// Append a line to the log buffer.
fn log_line(buf: &mut String, line: &str) {
buf.push_str(line);
buf.push('\n');
}
/// Run the full autonomous consolidation pipeline with logging.
pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
let start = std::time::Instant::now();
let log_path = memory_subdir("agent-results")?.join("consolidate-full.log");
let mut log = LogWriter::new(&log_path)?;
let log_key = format!("_consolidate-log-{}",
store::format_datetime(store::now_epoch()).replace([':', '-', 'T'], ""));
let mut log_buf = String::new();
log.write("=== CONSOLIDATE FULL ===")?;
log.write(&format!("Started: {}", store::format_datetime(store::now_epoch())))?;
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
log.write("")?;
log_line(&mut log_buf, "=== CONSOLIDATE FULL ===");
log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
log_line(&mut log_buf, "");
// --- Step 1: Plan ---
log.write("--- Step 1: Plan ---")?;
log_line(&mut log_buf, "--- Step 1: Plan ---");
let plan = neuro::consolidation_plan(store);
let plan_text = neuro::format_plan(&plan);
log.write(&plan_text)?;
log_line(&mut log_buf, &plan_text);
println!("{}", plan_text);
let total_agents = plan.replay_count + plan.linker_count
+ plan.separator_count + plan.transfer_count
+ if plan.run_health { 1 } else { 0 };
log.write(&format!("Total agents to run: {}", total_agents))?;
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
// --- Step 2: Execute agents ---
log.write("\n--- Step 2: Execute agents ---")?;
let mut reports: Vec<PathBuf> = Vec::new();
log_line(&mut log_buf, "\n--- Step 2: Execute agents ---");
let mut reports: Vec<String> = Vec::new();
let mut agent_num = 0usize;
let mut agent_errors = 0usize;
@ -121,7 +102,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
format!("[{}/{}] {}", agent_num, runs.len(), agent_type)
};
log.write(&format!("\n{}", label))?;
log_line(&mut log_buf, &format!("\n{}", label));
println!("{}", label);
// Reload store to pick up changes from previous agents
@ -133,191 +114,173 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
Ok(p) => p,
Err(e) => {
let msg = format!(" ERROR building prompt: {}", e);
log.write(&msg)?;
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
agent_errors += 1;
continue;
}
};
log.write(&format!(" Prompt: {} chars (~{} tokens)",
prompt.len(), prompt.len() / 4))?;
log_line(&mut log_buf, &format!(" Prompt: {} chars (~{} tokens)",
prompt.len(), prompt.len() / 4));
let response = match call_sonnet(&prompt, 300) {
Ok(r) => r,
Err(e) => {
let msg = format!(" ERROR from Sonnet: {}", e);
log.write(&msg)?;
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
agent_errors += 1;
continue;
}
};
// Save report
// Store report as a node
let ts = store::format_datetime(store::now_epoch())
.replace([':', '-', 'T'], "");
let report_name = format!("consolidation-{}-{}.md", agent_type, ts);
let report_path = memory_subdir("agent-results")?.join(&report_name);
fs::write(&report_path, &response)
.map_err(|e| format!("write report: {}", e))?;
reports.push(report_path.clone());
let report_key = format!("_consolidation-{}-{}", agent_type, ts);
store.upsert_provenance(&report_key, &response,
store::Provenance::AgentConsolidate).ok();
reports.push(report_key.clone());
let msg = format!(" Done: {} lines → {}", response.lines().count(), report_name);
log.write(&msg)?;
let msg = format!(" Done: {} lines → {}", response.lines().count(), report_key);
log_line(&mut log_buf, &msg);
println!("{}", msg);
}
log.write(&format!("\nAgents complete: {} run, {} errors",
agent_num - agent_errors, agent_errors))?;
log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors",
agent_num - agent_errors, agent_errors));
// --- Step 3: Apply consolidation actions ---
log.write("\n--- Step 3: Apply consolidation actions ---")?;
log_line(&mut log_buf, "\n--- Step 3: Apply consolidation actions ---");
println!("\n--- Applying consolidation actions ---");
*store = Store::load()?;
if reports.is_empty() {
log.write(" No reports to apply.")?;
log_line(&mut log_buf, " No reports to apply.");
} else {
match apply_consolidation(store, true, None) {
Ok(()) => log.write(" Applied.")?,
Ok(()) => log_line(&mut log_buf, " Applied."),
Err(e) => {
let msg = format!(" ERROR applying consolidation: {}", e);
log.write(&msg)?;
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
}
}
}
// --- Step 3b: Link orphans ---
log.write("\n--- Step 3b: Link orphans ---")?;
log_line(&mut log_buf, "\n--- Step 3b: Link orphans ---");
println!("\n--- Linking orphan nodes ---");
*store = Store::load()?;
let (lo_orphans, lo_added) = neuro::link_orphans(store, 2, 3, 0.15);
log.write(&format!(" {} orphans, {} links added", lo_orphans, lo_added))?;
log_line(&mut log_buf, &format!(" {} orphans, {} links added", lo_orphans, lo_added));
// --- Step 3c: Cap degree ---
log.write("\n--- Step 3c: Cap degree ---")?;
log_line(&mut log_buf, "\n--- Step 3c: Cap degree ---");
println!("\n--- Capping node degree ---");
*store = Store::load()?;
match store.cap_degree(50) {
Ok((hubs, pruned)) => {
store.save()?;
log.write(&format!(" {} hubs capped, {} edges pruned", hubs, pruned))?;
log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned));
}
Err(e) => log.write(&format!(" ERROR: {}", e))?,
Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)),
}
// --- Step 4: Digest auto ---
log.write("\n--- Step 4: Digest auto ---")?;
log_line(&mut log_buf, "\n--- Step 4: Digest auto ---");
println!("\n--- Generating missing digests ---");
*store = Store::load()?;
match digest::digest_auto(store) {
Ok(()) => log.write(" Digests done.")?,
Ok(()) => log_line(&mut log_buf, " Digests done."),
Err(e) => {
let msg = format!(" ERROR in digest auto: {}", e);
log.write(&msg)?;
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
}
}
// --- Step 5: Apply digest links ---
log.write("\n--- Step 5: Apply digest links ---")?;
log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---");
println!("\n--- Applying digest links ---");
*store = Store::load()?;
let links = digest::parse_all_digest_links()?;
let links = digest::parse_all_digest_links(store);
let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links);
store.save()?;
log.write(&format!(" {} links applied, {} skipped, {} fallbacks",
applied, skipped, fallbacks))?;
log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks",
applied, skipped, fallbacks));
// --- Step 6: Summary ---
let elapsed = start.elapsed();
log.write("\n--- Summary ---")?;
log.write(&format!("Finished: {}", store::format_datetime(store::now_epoch())))?;
log.write(&format!("Duration: {:.0}s", elapsed.as_secs_f64()))?;
log_line(&mut log_buf, "\n--- Summary ---");
log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64()));
*store = Store::load()?;
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
let summary = format!(
"\n=== CONSOLIDATE FULL COMPLETE ===\n\
Duration: {:.0}s\n\
Agents: {} run, {} errors\n\
Nodes: {} Relations: {}\n\
Log: {}\n",
Nodes: {} Relations: {}\n",
elapsed.as_secs_f64(),
agent_num - agent_errors, agent_errors,
store.nodes.len(), store.relations.len(),
log_path.display(),
);
log.write(&summary)?;
log_line(&mut log_buf, &summary);
println!("{}", summary);
// Store the log as a node
store.upsert_provenance(&log_key, &log_buf,
store::Provenance::AgentConsolidate).ok();
store.save()?;
Ok(())
}
/// Find the most recent set of consolidation reports.
fn find_consolidation_reports() -> Result<Vec<PathBuf>, String> {
let dir = memory_subdir("agent-results")?;
let mut reports: Vec<PathBuf> = fs::read_dir(&dir)
.map(|entries| {
entries.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| {
p.file_name()
.and_then(|n| n.to_str())
.map(|n| n.starts_with("consolidation-") && n.ends_with(".md"))
.unwrap_or(false)
})
.collect()
})
.unwrap_or_default();
reports.sort();
reports.reverse();
/// Find the most recent set of consolidation report keys from the store.
fn find_consolidation_reports(store: &Store) -> Vec<String> {
let mut keys: Vec<&String> = store.nodes.keys()
.filter(|k| k.starts_with("_consolidation-"))
.collect();
keys.sort();
keys.reverse();
if reports.is_empty() { return Ok(reports); }
if keys.is_empty() { return Vec::new(); }
// Group by timestamp (last segment of stem before .md)
let latest_ts = reports[0].file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.rsplit('-').next().unwrap_or("")
.to_string();
// Group by timestamp (last segment after last '-')
let latest_ts = keys[0].rsplit('-').next().unwrap_or("").to_string();
reports.retain(|r| {
r.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("")
.ends_with(latest_ts.as_str())
});
Ok(reports)
keys.into_iter()
.filter(|k| k.ends_with(&latest_ts))
.cloned()
.collect()
}
fn build_consolidation_prompt(reports: &[PathBuf]) -> Result<String, String> {
fn build_consolidation_prompt(store: &Store, report_keys: &[String]) -> Result<String, String> {
let mut report_text = String::new();
for r in reports {
let content = fs::read_to_string(r)
.map_err(|e| format!("read {}: {}", r.display(), e))?;
for key in report_keys {
let content = store.nodes.get(key)
.map(|n| n.content.as_str())
.unwrap_or("");
report_text.push_str(&format!("\n{}\n## Report: {}\n\n{}\n",
"=".repeat(60),
r.file_stem().and_then(|s| s.to_str()).unwrap_or(""),
content));
"=".repeat(60), key, content));
}
neuro::load_prompt("consolidation", &[("{{REPORTS}}", &report_text)])
}
/// Run the full apply-consolidation pipeline.
pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Option<&str>) -> Result<(), String> {
let reports = if let Some(path) = report_file {
vec![PathBuf::from(path)]
pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_key: Option<&str>) -> Result<(), String> {
let reports = if let Some(key) = report_key {
vec![key.to_string()]
} else {
find_consolidation_reports()?
find_consolidation_reports(store)
};
if reports.is_empty() {
@ -328,11 +291,11 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
println!("Found {} reports:", reports.len());
for r in &reports {
println!(" {}", r.file_name().and_then(|s| s.to_str()).unwrap_or("?"));
println!(" {}", r);
}
println!("\nExtracting actions from reports...");
let prompt = build_consolidation_prompt(&reports)?;
let prompt = build_consolidation_prompt(store, &reports)?;
println!(" Prompt: {} chars", prompt.len());
let response = call_sonnet(&prompt, 300)?;
@ -343,14 +306,14 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
println!(" {} actions extracted", actions.len());
// Save actions
// Store actions in the store
let timestamp = store::format_datetime(store::now_epoch())
.replace([':', '-'], "");
let actions_path = memory_subdir("agent-results")?
.join(format!("consolidation-actions-{}.json", timestamp));
fs::write(&actions_path, serde_json::to_string_pretty(&actions_value).unwrap())
.map_err(|e| format!("write {}: {}", actions_path.display(), e))?;
println!(" Saved: {}", actions_path.display());
let actions_key = format!("_consolidation-actions-{}", timestamp);
let actions_json = serde_json::to_string_pretty(&actions_value).unwrap();
store.upsert_provenance(&actions_key, &actions_json,
store::Provenance::AgentConsolidate).ok();
println!(" Stored: {}", actions_key);
let link_actions: Vec<_> = actions.iter()
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("link"))