provenance: convert from enum to freeform string

The Provenance enum couldn't represent agents defined outside the
source code. Replace it with a Text field in the capnp schema so any
agent can write its own provenance label (e.g. "extractor:write",
"rename:tombstone") without a code change.

Schema: rename old enum fields to provenanceOld, add new Text
provenance fields. Old enum kept for reading legacy records.
Migration: from_capnp_migrate() falls back to old enum when the
new text field is empty.

Also adds `poc-memory tail` command for viewing recent store writes.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-11 01:19:52 -04:00
parent de204e3075
commit d76b14dfcd
14 changed files with 160 additions and 67 deletions

View file

@ -178,7 +178,7 @@ pub fn consolidate_full_with_progress(
// Store the log as a node
store.upsert_provenance(&log_key, &log_buf,
store::Provenance::AgentConsolidate).ok();
"consolidate:write").ok();
store.save()?;
Ok(())

View file

@ -343,7 +343,7 @@ fn job_split_one(
continue;
}
store.upsert_provenance(child_key, content,
crate::store::Provenance::AgentConsolidate)?;
"consolidate:write")?;
let uuid = store.nodes.get(child_key.as_str()).unwrap().uuid;
child_uuids.push((uuid, child_key.clone()));
ctx.log_line(&format!(" created: {} ({} chars)", child_key, content.len()));
@ -905,7 +905,7 @@ pub fn run_daemon() -> Result<(), String> {
let seg_key = format!("{}.{}", fname_key, i);
let content = format!("Migrated from whole-file key for {}", path_str);
let mut node = crate::store::new_node(&seg_key, &content);
node.provenance = crate::store::Provenance::AgentExperienceMine;
node.provenance = "experience-mine:write".to_string();
match store.upsert_node(node) {
Ok(()) => ok += 1,
Err(e) => {

View file

@ -224,7 +224,7 @@ fn generate_digest(
let digest = call_sonnet("digest", &prompt)?;
let key = digest_node_key(level.name, label);
store.upsert_provenance(&key, &digest, store::Provenance::AgentDigest)?;
store.upsert_provenance(&key, &digest, "digest:write")?;
store.save()?;
println!(" Stored: {}", key);

View file

@ -241,7 +241,7 @@ pub fn experience_mine(
let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx);
if !store.nodes.contains_key(&seg_key) {
let mut node = new_node(&seg_key, &format!("Backfilled from {}", dedup_key));
node.provenance = store::Provenance::AgentExperienceMine;
node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node);
store.save()?;
}
@ -351,7 +351,7 @@ pub fn experience_mine(
// Write to store — use event timestamp, not mining time
let mut node = new_node(&key, &full_content);
node.node_type = store::NodeType::EpisodicSession;
node.provenance = store::Provenance::AgentExperienceMine;
node.provenance = "experience-mine:write".to_string();
if !ts.is_empty() {
if let Some(epoch) = parse_timestamp_to_epoch(ts) {
node.created_at = epoch;
@ -371,7 +371,7 @@ pub fn experience_mine(
// Per-segment key: the daemon writes the whole-file key when all segments are done
let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx);
let mut node = new_node(&seg_key, &dedup_content);
node.provenance = store::Provenance::AgentExperienceMine;
node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node);
}
None => {
@ -379,7 +379,7 @@ pub fn experience_mine(
// file may grow with new compaction segments later — the daemon handles
// writing the whole-file filename key after verifying all segments are done)
let mut node = new_node(&dedup_key, &dedup_content);
node.provenance = store::Provenance::AgentExperienceMine;
node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node);
}
}

View file

@ -8,7 +8,7 @@
use crate::config;
use super::llm;
use super::transcript;
use crate::store::{self, Provenance};
use crate::store;
use serde::{Deserialize, Serialize};
use std::collections::HashSet;
@ -266,7 +266,7 @@ pub fn mine_and_store(
eprintln!(" Merging facts into existing node: {}", existing_key);
if let Some(node) = store.nodes.get(existing_key.as_str()) {
let merged = format!("{}\n\n{}", node.content, json);
store.upsert_provenance(&existing_key, &merged, Provenance::AgentFactMine)?;
store.upsert_provenance(&existing_key, &merged, "fact-mine:write")?;
store.save()?;
return Ok(facts.len());
}
@ -275,7 +275,7 @@ pub fn mine_and_store(
}
};
store.upsert_provenance(&key, &json, Provenance::AgentFactMine)?;
store.upsert_provenance(&key, &json, "fact-mine:write")?;
store.save()?;
eprintln!(" Stored {} facts as {}", facts.len(), key);

View file

@ -217,7 +217,7 @@ impl DepthDb {
pub fn save(&self, store: &mut Store) {
if let Ok(json) = serde_json::to_string(&self.depths) {
store.upsert_provenance(DEPTH_DB_KEY, &json,
store::Provenance::AgentKnowledgeObservation).ok();
"observation:write").ok();
}
}
@ -295,12 +295,11 @@ pub fn apply_action(
timestamp: &str,
depth: i32,
) -> bool {
let provenance = agent_provenance(agent);
match &action.kind {
ActionKind::WriteNode { key, content, .. } => {
let stamped = stamp_content(content, agent, timestamp, depth);
store.upsert_provenance(key, &stamped, provenance).is_ok()
let prov = format!("{}:write", agent);
store.upsert_provenance(key, &stamped, &prov).is_ok()
}
ActionKind::Link { source, target } => {
if has_edge(store, source, target) {
@ -320,15 +319,17 @@ pub fn apply_action(
0.3,
source, target,
);
rel.provenance = provenance;
rel.provenance = format!("{}:link", agent);
store.add_relation(rel).is_ok()
}
ActionKind::Refine { key, content } => {
let stamped = stamp_content(content, agent, timestamp, depth);
store.upsert_provenance(key, &stamped, provenance).is_ok()
let prov = format!("{}:refine", agent);
store.upsert_provenance(key, &stamped, &prov).is_ok()
}
ActionKind::Demote { key } => {
if let Some(node) = store.nodes.get_mut(key) {
node.provenance = format!("{}:demote", agent);
node.weight = (node.weight * 0.5).max(0.05);
true
} else {
@ -338,13 +339,13 @@ pub fn apply_action(
}
}
fn agent_provenance(agent: &str) -> store::Provenance {
fn agent_provenance(agent: &str) -> String {
match agent {
"observation" => store::Provenance::AgentKnowledgeObservation,
"extractor" | "pattern" => store::Provenance::AgentKnowledgePattern,
"connector" => store::Provenance::AgentKnowledgeConnector,
"challenger" => store::Provenance::AgentKnowledgeChallenger,
_ => store::Provenance::Agent,
"observation" => "agent:knowledge-observation".to_string(),
"extractor" | "pattern" => "agent:knowledge-pattern".to_string(),
"connector" => "agent:knowledge-connector".to_string(),
"challenger" => "agent:knowledge-challenger".to_string(),
_ => format!("agent:{}", agent),
}
}
@ -587,7 +588,7 @@ pub fn run_one_agent(
let ts = store::compact_timestamp();
let report_key = format!("_{}-{}-{}", llm_tag, agent_name, ts);
let provenance = agent_provenance(agent_name);
store.upsert_provenance(&report_key, &output, provenance).ok();
store.upsert_provenance(&report_key, &output, &provenance).ok();
let actions = parse_all_actions(&output);
let no_ops = count_no_ops(&output);
@ -824,7 +825,7 @@ pub fn run_knowledge_loop(config: &KnowledgeLoopConfig) -> Result<Vec<CycleResul
if let Ok(json) = serde_json::to_string_pretty(&history) {
store = Store::load()?;
store.upsert_provenance(&key, &json,
store::Provenance::AgentKnowledgeObservation).ok();
"observation:write").ok();
depth_db.save(&mut store);
store.save()?;
}