migrate agent output to capnp store, add provenance tracking

All agent output now goes to the store as nodes instead of
markdown/JSON files. Each node carries a Provenance enum identifying
which agent created it (AgentDigest, AgentConsolidate, AgentFactMine,
AgentKnowledgeObservation, etc — 14 variants total).

Store changes:
- upsert_provenance() method for agent-created nodes
- Provenance enum expanded from 5 to 14 variants

Agent changes:
- digest: writes to store nodes (daily-YYYY-MM-DD.md etc)
- consolidate: reports/actions/logs stored as _consolidation-* nodes
- knowledge: depth DB and agent output stored as _knowledge-* nodes
- enrich: experience-mine results go directly to store
- llm: --no-session-persistence prevents transcript accumulation

Deleted: 14 Python/shell scripts replaced by Rust implementations.
This commit is contained in:
ProofOfConcept 2026-03-05 15:30:57 -05:00
parent e37f819dd2
commit 552d255dc3
23 changed files with 1381 additions and 4095 deletions

View file

@ -8,13 +8,10 @@
use crate::llm::{call_sonnet, semantic_keys};
use crate::store::{self, Store, new_relation};
use crate::neuro;
use crate::util::memory_subdir;
use chrono::{Datelike, Duration, Local, NaiveDate};
use regex::Regex;
use std::collections::BTreeSet;
use std::fs;
use std::path::{Path, PathBuf};
// --- Digest level descriptors ---
@ -113,19 +110,24 @@ const MONTHLY: DigestLevel = DigestLevel {
const LEVELS: &[&DigestLevel] = &[&DAILY, &WEEKLY, &MONTHLY];
/// Store key for a digest node: "daily-2026-03-04.md", "weekly-2026-W09.md", etc.
/// Matches the key format from the old import_file() path.
fn digest_node_key(level_name: &str, label: &str) -> String {
format!("{}-{}.md", level_name, label)
}
// --- Input gathering ---
/// Load child digest files from the episodic directory.
fn load_child_digests(prefix: &str, labels: &[String]) -> Result<Vec<(String, String)>, String> {
let dir = memory_subdir("episodic")?;
/// Load child digest content from the store.
fn load_child_digests(store: &Store, prefix: &str, labels: &[String]) -> Vec<(String, String)> {
let mut digests = Vec::new();
for label in labels {
let path = dir.join(format!("{}-{}.md", prefix, label));
if let Ok(content) = fs::read_to_string(&path) {
digests.push((label.clone(), content));
let key = digest_node_key(prefix, label);
if let Some(node) = store.nodes.get(&key) {
digests.push((label.clone(), node.content.clone()));
}
}
Ok(digests)
digests
}
/// Unified: gather inputs for any digest level.
@ -142,7 +144,7 @@ fn gather(level: &DigestLevel, store: &Store, arg: &str) -> Result<(String, Vec<
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
load_child_digests(child_name, &child_labels)?
load_child_digests(store, child_name, &child_labels)
} else {
// Leaf level: scan store for journal entries matching label
let date_re = Regex::new(&format!(
@ -227,14 +229,10 @@ fn generate_digest(
println!(" Calling Sonnet...");
let digest = call_sonnet(&prompt, level.timeout)?;
let output_path = memory_subdir("episodic")?
.join(format!("{}-{}.md", level.name, label));
fs::write(&output_path, &digest)
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
println!(" Written: {}", output_path.display());
store.import_file(&output_path)?;
let key = digest_node_key(level.name, label);
store.upsert_provenance(&key, &digest, store::Provenance::AgentDigest)?;
store.save()?;
println!(" Stored: {}", key);
println!(" Done: {} lines", digest.lines().count());
Ok(())
@ -254,7 +252,6 @@ pub fn generate(store: &mut Store, level_name: &str, arg: &str) -> Result<(), St
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
let today = Local::now().format("%Y-%m-%d").to_string();
let epi = memory_subdir("episodic")?;
// Collect all dates with journal entries
let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}").unwrap();
@ -277,7 +274,8 @@ pub fn digest_auto(store: &mut Store) -> Result<(), String> {
for arg in &candidates {
let (label, inputs) = gather(level, store, arg)?;
if epi.join(format!("{}-{}.md", level.name, label)).exists() {
let key = digest_node_key(level.name, &label);
if store.nodes.contains_key(&key) {
skipped += 1;
continue;
}
@ -357,21 +355,8 @@ fn normalize_link_key(raw: &str) -> String {
key
}
/// Parse the Links section from a single digest file.
fn parse_digest_file_links(path: &Path) -> Vec<DigestLink> {
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => return Vec::new(),
};
let digest_name = path.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("");
let digest_key = format!("{}.md", digest_name);
let filename = path.file_name()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string();
/// Parse the Links section from a digest node's content.
fn parse_digest_node_links(key: &str, content: &str) -> Vec<DigestLink> {
let link_re = Regex::new(r"^-\s+(.+?)\s*[→↔←]\s*(.+?)(?:\s*\((.+?)\))?\s*$").unwrap();
let header_re = Regex::new(r"^##\s+Links").unwrap();
@ -399,8 +384,8 @@ fn parse_digest_file_links(path: &Path) -> Vec<DigestLink> {
let mut target = normalize_link_key(raw_target);
// Replace self-references with digest key
if source.is_empty() { source = digest_key.clone(); }
if target.is_empty() { target = digest_key.clone(); }
if source.is_empty() { source = key.to_string(); }
if target.is_empty() { target = key.to_string(); }
// Handle "this daily/weekly/monthly" in raw text
let raw_s_lower = raw_source.to_lowercase();
@ -408,49 +393,39 @@ fn parse_digest_file_links(path: &Path) -> Vec<DigestLink> {
if raw_s_lower.contains("this daily") || raw_s_lower.contains("this weekly")
|| raw_s_lower.contains("this monthly")
{
source = digest_key.clone();
source = key.to_string();
}
if raw_t_lower.contains("this daily") || raw_t_lower.contains("this weekly")
|| raw_t_lower.contains("this monthly")
{
target = digest_key.clone();
target = key.to_string();
}
// Skip NEW: and self-links
if source.starts_with("NEW:") || target.starts_with("NEW:") { continue; }
if source == target { continue; }
links.push(DigestLink { source, target, reason, file: filename.clone() });
links.push(DigestLink { source, target, reason, file: key.to_string() });
}
}
links
}
/// Parse links from all digest files in the episodic dir.
pub fn parse_all_digest_links() -> Result<Vec<DigestLink>, String> {
let dir = memory_subdir("episodic")?;
/// Parse links from all digest nodes in the store.
pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> {
let mut all_links = Vec::new();
for pattern in &["daily-*.md", "weekly-*.md", "monthly-*.md"] {
if let Ok(entries) = fs::read_dir(&dir) {
let mut files: Vec<PathBuf> = entries
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| {
p.file_name()
.and_then(|n| n.to_str())
.map(|n| {
let prefix = pattern.split('*').next().unwrap_or("");
n.starts_with(prefix) && n.ends_with(".md")
})
.unwrap_or(false)
})
.collect();
files.sort();
for path in files {
all_links.extend(parse_digest_file_links(&path));
}
let mut digest_keys: Vec<&String> = store.nodes.keys()
.filter(|k| k.starts_with("daily-")
|| k.starts_with("weekly-")
|| k.starts_with("monthly-"))
.collect();
digest_keys.sort();
for key in digest_keys {
if let Some(node) = store.nodes.get(key) {
all_links.extend(parse_digest_node_links(key, &node.content));
}
}
@ -458,7 +433,7 @@ pub fn parse_all_digest_links() -> Result<Vec<DigestLink>, String> {
let mut seen = std::collections::HashSet::new();
all_links.retain(|link| seen.insert((link.source.clone(), link.target.clone())));
Ok(all_links)
all_links
}
/// Apply parsed digest links to the store.