provenance: convert from enum to freeform string

The Provenance enum couldn't represent agents defined outside the
source code. Replace it with a Text field in the capnp schema so any
agent can write its own provenance label (e.g. "extractor:write",
"rename:tombstone") without a code change.

Schema: rename old enum fields to provenanceOld, add new Text
provenance fields. Old enum kept for reading legacy records.
Migration: from_capnp_migrate() falls back to old enum when the
new text field is empty.

Also adds `poc-memory tail` command for viewing recent store writes.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-11 01:19:52 -04:00
parent de204e3075
commit d76b14dfcd
14 changed files with 160 additions and 67 deletions

View file

@ -12,7 +12,7 @@ struct ContentNode {
version @1 :UInt32; # monotonic per UUID, latest wins version @1 :UInt32; # monotonic per UUID, latest wins
timestamp @2 :Int64; # unix epoch seconds timestamp @2 :Int64; # unix epoch seconds
nodeType @3 :NodeType; nodeType @3 :NodeType;
provenance @4 :Provenance; provenanceOld @4 :Provenance; # deprecated — use provenance (@21)
key @5 :Text; # "identity.md#boundaries" human-readable key @5 :Text; # "identity.md#boundaries" human-readable
content @6 :Text; # markdown blob content @6 :Text; # markdown blob
weight @7 :Float32; weight @7 :Float32;
@ -38,6 +38,10 @@ struct ContentNode {
# Stable creation timestamp (unix epoch seconds). Set once when the # Stable creation timestamp (unix epoch seconds). Set once when the
# node is first created; never changes on rename or content update. # node is first created; never changes on rename or content update.
createdAt @20 :Int64; createdAt @20 :Int64;
# Freeform provenance string: "extractor:write", "rename:tombstone", etc.
provenance @21 :Text;
} }
enum NodeType { enum NodeType {
@ -79,12 +83,13 @@ struct Relation {
timestamp @2 :Int64; # unix epoch seconds timestamp @2 :Int64; # unix epoch seconds
source @3 :Data; # content node UUID source @3 :Data; # content node UUID
target @4 :Data; # content node UUID target @4 :Data; # content node UUID
relType @5 :RelationType; relType @5 :RelationType;
strength @6 :Float32; # manual=1.0, auto=0.1-0.7 strength @6 :Float32; # manual=1.0, auto=0.1-0.7
provenance @7 :Provenance; provenanceOld @7 :Provenance; # deprecated — use provenance (@11)
deleted @8 :Bool; # soft delete deleted @8 :Bool; # soft delete
sourceKey @9 :Text; # human-readable source key (for debugging) sourceKey @9 :Text; # human-readable source key (for debugging)
targetKey @10 :Text; # human-readable target key (for debugging) targetKey @10 :Text; # human-readable target key (for debugging)
provenance @11 :Text; # freeform provenance string
} }
enum RelationType { enum RelationType {

View file

@ -178,7 +178,7 @@ pub fn consolidate_full_with_progress(
// Store the log as a node // Store the log as a node
store.upsert_provenance(&log_key, &log_buf, store.upsert_provenance(&log_key, &log_buf,
store::Provenance::AgentConsolidate).ok(); "consolidate:write").ok();
store.save()?; store.save()?;
Ok(()) Ok(())

View file

@ -343,7 +343,7 @@ fn job_split_one(
continue; continue;
} }
store.upsert_provenance(child_key, content, store.upsert_provenance(child_key, content,
crate::store::Provenance::AgentConsolidate)?; "consolidate:write")?;
let uuid = store.nodes.get(child_key.as_str()).unwrap().uuid; let uuid = store.nodes.get(child_key.as_str()).unwrap().uuid;
child_uuids.push((uuid, child_key.clone())); child_uuids.push((uuid, child_key.clone()));
ctx.log_line(&format!(" created: {} ({} chars)", child_key, content.len())); ctx.log_line(&format!(" created: {} ({} chars)", child_key, content.len()));
@ -905,7 +905,7 @@ pub fn run_daemon() -> Result<(), String> {
let seg_key = format!("{}.{}", fname_key, i); let seg_key = format!("{}.{}", fname_key, i);
let content = format!("Migrated from whole-file key for {}", path_str); let content = format!("Migrated from whole-file key for {}", path_str);
let mut node = crate::store::new_node(&seg_key, &content); let mut node = crate::store::new_node(&seg_key, &content);
node.provenance = crate::store::Provenance::AgentExperienceMine; node.provenance = "experience-mine:write".to_string();
match store.upsert_node(node) { match store.upsert_node(node) {
Ok(()) => ok += 1, Ok(()) => ok += 1,
Err(e) => { Err(e) => {

View file

@ -224,7 +224,7 @@ fn generate_digest(
let digest = call_sonnet("digest", &prompt)?; let digest = call_sonnet("digest", &prompt)?;
let key = digest_node_key(level.name, label); let key = digest_node_key(level.name, label);
store.upsert_provenance(&key, &digest, store::Provenance::AgentDigest)?; store.upsert_provenance(&key, &digest, "digest:write")?;
store.save()?; store.save()?;
println!(" Stored: {}", key); println!(" Stored: {}", key);

View file

@ -241,7 +241,7 @@ pub fn experience_mine(
let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx); let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx);
if !store.nodes.contains_key(&seg_key) { if !store.nodes.contains_key(&seg_key) {
let mut node = new_node(&seg_key, &format!("Backfilled from {}", dedup_key)); let mut node = new_node(&seg_key, &format!("Backfilled from {}", dedup_key));
node.provenance = store::Provenance::AgentExperienceMine; node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node); let _ = store.upsert_node(node);
store.save()?; store.save()?;
} }
@ -351,7 +351,7 @@ pub fn experience_mine(
// Write to store — use event timestamp, not mining time // Write to store — use event timestamp, not mining time
let mut node = new_node(&key, &full_content); let mut node = new_node(&key, &full_content);
node.node_type = store::NodeType::EpisodicSession; node.node_type = store::NodeType::EpisodicSession;
node.provenance = store::Provenance::AgentExperienceMine; node.provenance = "experience-mine:write".to_string();
if !ts.is_empty() { if !ts.is_empty() {
if let Some(epoch) = parse_timestamp_to_epoch(ts) { if let Some(epoch) = parse_timestamp_to_epoch(ts) {
node.created_at = epoch; node.created_at = epoch;
@ -371,7 +371,7 @@ pub fn experience_mine(
// Per-segment key: the daemon writes the whole-file key when all segments are done // Per-segment key: the daemon writes the whole-file key when all segments are done
let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx); let seg_key = format!("{}.{}", transcript_filename_key(jsonl_path), idx);
let mut node = new_node(&seg_key, &dedup_content); let mut node = new_node(&seg_key, &dedup_content);
node.provenance = store::Provenance::AgentExperienceMine; node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node); let _ = store.upsert_node(node);
} }
None => { None => {
@ -379,7 +379,7 @@ pub fn experience_mine(
// file may grow with new compaction segments later — the daemon handles // file may grow with new compaction segments later — the daemon handles
// writing the whole-file filename key after verifying all segments are done) // writing the whole-file filename key after verifying all segments are done)
let mut node = new_node(&dedup_key, &dedup_content); let mut node = new_node(&dedup_key, &dedup_content);
node.provenance = store::Provenance::AgentExperienceMine; node.provenance = "experience-mine:write".to_string();
let _ = store.upsert_node(node); let _ = store.upsert_node(node);
} }
} }

View file

@ -8,7 +8,7 @@
use crate::config; use crate::config;
use super::llm; use super::llm;
use super::transcript; use super::transcript;
use crate::store::{self, Provenance}; use crate::store;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashSet; use std::collections::HashSet;
@ -266,7 +266,7 @@ pub fn mine_and_store(
eprintln!(" Merging facts into existing node: {}", existing_key); eprintln!(" Merging facts into existing node: {}", existing_key);
if let Some(node) = store.nodes.get(existing_key.as_str()) { if let Some(node) = store.nodes.get(existing_key.as_str()) {
let merged = format!("{}\n\n{}", node.content, json); let merged = format!("{}\n\n{}", node.content, json);
store.upsert_provenance(&existing_key, &merged, Provenance::AgentFactMine)?; store.upsert_provenance(&existing_key, &merged, "fact-mine:write")?;
store.save()?; store.save()?;
return Ok(facts.len()); return Ok(facts.len());
} }
@ -275,7 +275,7 @@ pub fn mine_and_store(
} }
}; };
store.upsert_provenance(&key, &json, Provenance::AgentFactMine)?; store.upsert_provenance(&key, &json, "fact-mine:write")?;
store.save()?; store.save()?;
eprintln!(" Stored {} facts as {}", facts.len(), key); eprintln!(" Stored {} facts as {}", facts.len(), key);

View file

@ -217,7 +217,7 @@ impl DepthDb {
pub fn save(&self, store: &mut Store) { pub fn save(&self, store: &mut Store) {
if let Ok(json) = serde_json::to_string(&self.depths) { if let Ok(json) = serde_json::to_string(&self.depths) {
store.upsert_provenance(DEPTH_DB_KEY, &json, store.upsert_provenance(DEPTH_DB_KEY, &json,
store::Provenance::AgentKnowledgeObservation).ok(); "observation:write").ok();
} }
} }
@ -295,12 +295,11 @@ pub fn apply_action(
timestamp: &str, timestamp: &str,
depth: i32, depth: i32,
) -> bool { ) -> bool {
let provenance = agent_provenance(agent);
match &action.kind { match &action.kind {
ActionKind::WriteNode { key, content, .. } => { ActionKind::WriteNode { key, content, .. } => {
let stamped = stamp_content(content, agent, timestamp, depth); let stamped = stamp_content(content, agent, timestamp, depth);
store.upsert_provenance(key, &stamped, provenance).is_ok() let prov = format!("{}:write", agent);
store.upsert_provenance(key, &stamped, &prov).is_ok()
} }
ActionKind::Link { source, target } => { ActionKind::Link { source, target } => {
if has_edge(store, source, target) { if has_edge(store, source, target) {
@ -320,15 +319,17 @@ pub fn apply_action(
0.3, 0.3,
source, target, source, target,
); );
rel.provenance = provenance; rel.provenance = format!("{}:link", agent);
store.add_relation(rel).is_ok() store.add_relation(rel).is_ok()
} }
ActionKind::Refine { key, content } => { ActionKind::Refine { key, content } => {
let stamped = stamp_content(content, agent, timestamp, depth); let stamped = stamp_content(content, agent, timestamp, depth);
store.upsert_provenance(key, &stamped, provenance).is_ok() let prov = format!("{}:refine", agent);
store.upsert_provenance(key, &stamped, &prov).is_ok()
} }
ActionKind::Demote { key } => { ActionKind::Demote { key } => {
if let Some(node) = store.nodes.get_mut(key) { if let Some(node) = store.nodes.get_mut(key) {
node.provenance = format!("{}:demote", agent);
node.weight = (node.weight * 0.5).max(0.05); node.weight = (node.weight * 0.5).max(0.05);
true true
} else { } else {
@ -338,13 +339,13 @@ pub fn apply_action(
} }
} }
fn agent_provenance(agent: &str) -> store::Provenance { fn agent_provenance(agent: &str) -> String {
match agent { match agent {
"observation" => store::Provenance::AgentKnowledgeObservation, "observation" => "agent:knowledge-observation".to_string(),
"extractor" | "pattern" => store::Provenance::AgentKnowledgePattern, "extractor" | "pattern" => "agent:knowledge-pattern".to_string(),
"connector" => store::Provenance::AgentKnowledgeConnector, "connector" => "agent:knowledge-connector".to_string(),
"challenger" => store::Provenance::AgentKnowledgeChallenger, "challenger" => "agent:knowledge-challenger".to_string(),
_ => store::Provenance::Agent, _ => format!("agent:{}", agent),
} }
} }
@ -587,7 +588,7 @@ pub fn run_one_agent(
let ts = store::compact_timestamp(); let ts = store::compact_timestamp();
let report_key = format!("_{}-{}-{}", llm_tag, agent_name, ts); let report_key = format!("_{}-{}-{}", llm_tag, agent_name, ts);
let provenance = agent_provenance(agent_name); let provenance = agent_provenance(agent_name);
store.upsert_provenance(&report_key, &output, provenance).ok(); store.upsert_provenance(&report_key, &output, &provenance).ok();
let actions = parse_all_actions(&output); let actions = parse_all_actions(&output);
let no_ops = count_no_ops(&output); let no_ops = count_no_ops(&output);
@ -824,7 +825,7 @@ pub fn run_knowledge_loop(config: &KnowledgeLoopConfig) -> Result<Vec<CycleResul
if let Ok(json) = serde_json::to_string_pretty(&history) { if let Ok(json) = serde_json::to_string_pretty(&history) {
store = Store::load()?; store = Store::load()?;
store.upsert_provenance(&key, &json, store.upsert_provenance(&key, &json,
store::Provenance::AgentKnowledgeObservation).ok(); "observation:write").ok();
depth_db.save(&mut store); depth_db.save(&mut store);
store.save()?; store.save()?;
} }

View file

@ -366,6 +366,15 @@ enum Command {
/// Node key /// Node key
key: Vec<String>, key: Vec<String>,
}, },
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
},
/// Upsert node content from stdin /// Upsert node content from stdin
Write { Write {
/// Node key /// Node key
@ -567,6 +576,8 @@ fn main() {
Command::Render { key } => cmd_render(&key), Command::Render { key } => cmd_render(&key),
Command::History { full, key } Command::History { full, key }
=> cmd_history(&key, full), => cmd_history(&key, full),
Command::Tail { n, full }
=> cmd_tail(n, full),
Command::Write { key } => cmd_write(&key), Command::Write { key } => cmd_write(&key),
Command::Import { files } Command::Import { files }
=> cmd_import(&files), => cmd_import(&files),
@ -2017,10 +2028,10 @@ fn cmd_render(key: &[String]) -> Result<(), String> {
} }
let key = key.join(" "); let key = key.join(" ");
let store = store::Store::load()?; let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let bare = store::strip_md_suffix(&key);
let node = store.nodes.get(&resolved) let node = store.nodes.get(&bare)
.ok_or_else(|| format!("Node not found: {}", resolved))?; .ok_or_else(|| format!("Node not found: {}", bare))?;
print!("{}", node.content); print!("{}", node.content);
Ok(()) Ok(())
@ -2052,7 +2063,7 @@ fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
.map_err(|e| format!("read log: {}", e))?; .map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes() for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? { .map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp(node_reader)?; let node = store::Node::from_capnp_migrate(node_reader)?;
if node.key == key { if node.key == key {
versions.push(node); versions.push(node);
} }
@ -2073,13 +2084,13 @@ fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
let content_len = node.content.len(); let content_len = node.content.len();
if full { if full {
eprintln!("=== v{} {} {} w={:.3} {}b ===", eprintln!("=== v{} {} {} w={:.3} {}b ===",
node.version, ts, node.provenance.label(), node.weight, content_len); node.version, ts, node.provenance, node.weight, content_len);
eprintln!("{}", node.content); eprintln!("{}", node.content);
} else { } else {
let preview = util::first_n_chars(&node.content, 120); let preview = util::first_n_chars(&node.content, 120);
let preview = preview.replace('\n', "\\n"); let preview = preview.replace('\n', "\\n");
eprintln!(" v{:<3} {} {:24} w={:.3} {}b", eprintln!(" v{:<3} {} {:24} w={:.3} {}b",
node.version, ts, node.provenance.label(), node.weight, content_len); node.version, ts, node.provenance, node.weight, content_len);
eprintln!(" {}", preview); eprintln!(" {}", preview);
} }
} }
@ -2087,7 +2098,7 @@ fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
if !full { if !full {
if let Some(latest) = versions.last() { if let Some(latest) = versions.last() {
eprintln!("\n--- Latest content (v{}, {}) ---", eprintln!("\n--- Latest content (v{}, {}) ---",
latest.version, latest.provenance.label()); latest.version, latest.provenance);
print!("{}", latest.content); print!("{}", latest.content);
} }
} }
@ -2095,6 +2106,56 @@ fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
Ok(()) Ok(())
} }
fn cmd_tail(n: usize, full: bool) -> Result<(), String> {
let path = store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| format!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
// Read all entries, keep last N
let mut entries: Vec<store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<poc_memory::memory_capnp::node_log::Reader>()
.map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp_migrate(node_reader)?;
entries.push(node);
}
}
let start = entries.len().saturating_sub(n);
for node in &entries[start..] {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
store::format_datetime(node.timestamp)
} else {
format!("(raw:{})", node.timestamp)
};
let del = if node.deleted { " [DELETED]" } else { "" };
if full {
eprintln!("--- {} (v{}) {} via {} w={:.3}{} ---",
node.key, node.version, ts, node.provenance, node.weight, del);
eprintln!("{}\n", node.content);
} else {
let preview = util::first_n_chars(&node.content, 100).replace('\n', "\\n");
eprintln!(" {} v{} w={:.2}{}",
ts, node.version, node.weight, del);
eprintln!(" {} via {}", node.key, node.provenance);
if !preview.is_empty() {
eprintln!(" {}", preview);
}
eprintln!();
}
}
Ok(())
}
fn cmd_write(key: &[String]) -> Result<(), String> { fn cmd_write(key: &[String]) -> Result<(), String> {
if key.is_empty() { if key.is_empty() {
return Err("write requires a key (reads content from stdin)".into()); return Err("write requires a key (reads content from stdin)".into());
@ -2218,7 +2279,7 @@ fn cmd_journal_write(text: &[String]) -> Result<(), String> {
let mut node = store::new_node(&key, &content); let mut node = store::new_node(&key, &content);
node.node_type = store::NodeType::EpisodicSession; node.node_type = store::NodeType::EpisodicSession;
node.provenance = store::Provenance::Journal; node.provenance = "journal".to_string();
if let Some(src) = source_ref { if let Some(src) = source_ref {
node.source_ref = src; node.source_ref = src;
} }

View file

@ -12,7 +12,7 @@
// Old files are preserved as backup. Run once. // Old files are preserved as backup. Run once.
use crate::store::{ use crate::store::{
self, Store, Node, NodeType, Provenance, RelationType, self, Store, Node, NodeType, RelationType,
parse_units, new_relation, parse_units, new_relation,
}; };
@ -194,7 +194,7 @@ pub fn migrate() -> Result<(), String> {
} else { } else {
NodeType::Semantic NodeType::Semantic
}, },
provenance: Provenance::Manual, provenance: "manual".to_string(),
key: key.clone(), key: key.clone(),
content, content,
weight: old_entry.weight as f32, weight: old_entry.weight as f32,
@ -233,7 +233,7 @@ pub fn migrate() -> Result<(), String> {
} else { } else {
NodeType::Semantic NodeType::Semantic
}, },
provenance: Provenance::Manual, provenance: "manual".to_string(),
key: key.clone(), key: key.clone(),
content: unit.content.clone(), content: unit.content.clone(),
weight: 0.7, weight: 0.7,

View file

@ -173,7 +173,7 @@ fn resolve_field(field: &str, key: &str, store: &Store, graph: &Graph) -> Option
"weight" => Some(Value::Num(node.weight as f64)), "weight" => Some(Value::Num(node.weight as f64)),
"category" => None, // vestigial, kept for query compat "category" => None, // vestigial, kept for query compat
"node_type" => Some(Value::Str(node_type_label(node.node_type).to_string())), "node_type" => Some(Value::Str(node_type_label(node.node_type).to_string())),
"provenance" => Some(Value::Str(node.provenance.label().to_string())), "provenance" => Some(Value::Str(node.provenance.clone())),
"emotion" => Some(Value::Num(node.emotion as f64)), "emotion" => Some(Value::Num(node.emotion as f64)),
"retrievals" => Some(Value::Num(node.retrievals as f64)), "retrievals" => Some(Value::Num(node.retrievals as f64)),
"uses" => Some(Value::Num(node.uses as f64)), "uses" => Some(Value::Num(node.uses as f64)),

View file

@ -38,7 +38,7 @@
// Stages are parsed from strings and composed via the -p flag or // Stages are parsed from strings and composed via the -p flag or
// pipe-separated in agent definitions. // pipe-separated in agent definitions.
use crate::store::{Store, StoreView, NodeType, Provenance}; use crate::store::{Store, StoreView, NodeType};
use crate::graph::Graph; use crate::graph::Graph;
use crate::spectral; use crate::spectral;
@ -147,7 +147,7 @@ pub enum Filter {
Weight(Cmp), Weight(Cmp),
Age(Cmp), // vs now - timestamp (seconds) Age(Cmp), // vs now - timestamp (seconds)
ContentLen(Cmp), ContentLen(Cmp),
Provenance(Provenance), Provenance(String),
NotVisited { agent: String, duration: i64 }, // seconds NotVisited { agent: String, duration: i64 }, // seconds
Visited { agent: String }, Visited { agent: String },
Negated(Box<Filter>), Negated(Box<Filter>),
@ -280,9 +280,7 @@ impl Stage {
"age" => Stage::Filter(Filter::Age(parse_cmp(value)?)), "age" => Stage::Filter(Filter::Age(parse_cmp(value)?)),
"content-len" => Stage::Filter(Filter::ContentLen(parse_cmp(value)?)), "content-len" => Stage::Filter(Filter::ContentLen(parse_cmp(value)?)),
"provenance" => { "provenance" => {
let prov = Provenance::from_label(value) Stage::Filter(Filter::Provenance(value.to_string()))
.ok_or_else(|| format!("unknown provenance: {}", value))?;
Stage::Filter(Filter::Provenance(prov))
} }
"not-visited" => { "not-visited" => {
let (agent, dur) = value.split_once(',') let (agent, dur) = value.split_once(',')
@ -363,7 +361,7 @@ impl fmt::Display for Filter {
Filter::Weight(c) => write!(f, "weight:{}", c), Filter::Weight(c) => write!(f, "weight:{}", c),
Filter::Age(c) => write!(f, "age:{}", c), Filter::Age(c) => write!(f, "age:{}", c),
Filter::ContentLen(c) => write!(f, "content-len:{}", c), Filter::ContentLen(c) => write!(f, "content-len:{}", c),
Filter::Provenance(p) => write!(f, "provenance:{}", p.label()), Filter::Provenance(p) => write!(f, "provenance:{}", p),
Filter::NotVisited { agent, duration } => write!(f, "not-visited:{},{}s", agent, duration), Filter::NotVisited { agent, duration } => write!(f, "not-visited:{},{}s", agent, duration),
Filter::Visited { agent } => write!(f, "visited:{}", agent), Filter::Visited { agent } => write!(f, "visited:{}", agent),
Filter::Negated(inner) => write!(f, "!{}", inner), Filter::Negated(inner) => write!(f, "!{}", inner),

View file

@ -37,13 +37,15 @@ impl Store {
/// Provenance is determined by the POC_PROVENANCE env var if set, /// Provenance is determined by the POC_PROVENANCE env var if set,
/// otherwise defaults to Manual. /// otherwise defaults to Manual.
pub fn upsert(&mut self, key: &str, content: &str) -> Result<&'static str, String> { pub fn upsert(&mut self, key: &str, content: &str) -> Result<&'static str, String> {
let prov = Provenance::from_env().unwrap_or(Provenance::Manual); let prov = Provenance::from_env()
self.upsert_provenance(key, content, prov) .map(|p| p.label().to_string())
.unwrap_or_else(|| "manual".to_string());
self.upsert_provenance(key, content, &prov)
} }
/// Upsert with explicit provenance (for agent-created nodes). /// Upsert with explicit provenance (for agent-created nodes).
/// Holds StoreLock across refresh + check + write to prevent duplicate UUIDs. /// Holds StoreLock across refresh + check + write to prevent duplicate UUIDs.
pub fn upsert_provenance(&mut self, key: &str, content: &str, provenance: Provenance) -> Result<&'static str, String> { pub fn upsert_provenance(&mut self, key: &str, content: &str, provenance: &str) -> Result<&'static str, String> {
let _lock = StoreLock::acquire()?; let _lock = StoreLock::acquire()?;
self.refresh_nodes()?; self.refresh_nodes()?;
@ -53,14 +55,14 @@ impl Store {
} }
let mut node = existing.clone(); let mut node = existing.clone();
node.content = content.to_string(); node.content = content.to_string();
node.provenance = provenance; node.provenance = provenance.to_string();
node.version += 1; node.version += 1;
self.append_nodes_unlocked(std::slice::from_ref(&node))?; self.append_nodes_unlocked(std::slice::from_ref(&node))?;
self.nodes.insert(key.to_string(), node); self.nodes.insert(key.to_string(), node);
Ok("updated") Ok("updated")
} else { } else {
let mut node = new_node(key, content); let mut node = new_node(key, content);
node.provenance = provenance; node.provenance = provenance.to_string();
self.append_nodes_unlocked(std::slice::from_ref(&node))?; self.append_nodes_unlocked(std::slice::from_ref(&node))?;
self.uuid_to_key.insert(node.uuid, node.key.clone()); self.uuid_to_key.insert(node.uuid, node.key.clone());
self.nodes.insert(key.to_string(), node); self.nodes.insert(key.to_string(), node);

View file

@ -117,7 +117,7 @@ impl Store {
.map_err(|e| format!("read node log: {}", e))?; .map_err(|e| format!("read node log: {}", e))?;
for node_reader in log.get_nodes() for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? { .map_err(|e| format!("get nodes: {}", e))? {
let node = Node::from_capnp(node_reader)?; let node = Node::from_capnp_migrate(node_reader)?;
let existing_version = self.nodes.get(&node.key) let existing_version = self.nodes.get(&node.key)
.map(|n| n.version) .map(|n| n.version)
.unwrap_or(0); .unwrap_or(0);
@ -164,7 +164,7 @@ impl Store {
.map_err(|e| format!("read relation log: {}", e))?; .map_err(|e| format!("read relation log: {}", e))?;
for rel_reader in log.get_relations() for rel_reader in log.get_relations()
.map_err(|e| format!("get relations: {}", e))? { .map_err(|e| format!("get relations: {}", e))? {
let rel = Relation::from_capnp(rel_reader)?; let rel = Relation::from_capnp_migrate(rel_reader)?;
let existing_version = by_uuid.get(&rel.uuid) let existing_version = by_uuid.get(&rel.uuid)
.map(|r| r.version) .map(|r| r.version)
.unwrap_or(0); .unwrap_or(0);
@ -199,7 +199,7 @@ impl Store {
.map_err(|e| format!("read node log: {}", e))?; .map_err(|e| format!("read node log: {}", e))?;
for node_reader in log.get_nodes() for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? { .map_err(|e| format!("get nodes: {}", e))? {
let node = Node::from_capnp(node_reader)?; let node = Node::from_capnp_migrate(node_reader)?;
let dominated = by_uuid.get(&node.uuid) let dominated = by_uuid.get(&node.uuid)
.map(|n| node.version >= n.version) .map(|n| node.version >= n.version)
.unwrap_or(true); .unwrap_or(true);
@ -276,7 +276,7 @@ impl Store {
.map_err(|e| format!("read node log delta: {}", e))?; .map_err(|e| format!("read node log delta: {}", e))?;
for node_reader in log.get_nodes() for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes delta: {}", e))? { .map_err(|e| format!("get nodes delta: {}", e))? {
let node = Node::from_capnp(node_reader)?; let node = Node::from_capnp_migrate(node_reader)?;
let dominated = self.nodes.get(&node.key) let dominated = self.nodes.get(&node.key)
.map(|n| node.version >= n.version) .map(|n| node.version >= n.version)
.unwrap_or(true); .unwrap_or(true);

View file

@ -190,7 +190,7 @@ pub struct Node {
pub version: u32, pub version: u32,
pub timestamp: i64, pub timestamp: i64,
pub node_type: NodeType, pub node_type: NodeType,
pub provenance: Provenance, pub provenance: String,
pub key: String, pub key: String,
pub content: String, pub content: String,
pub weight: f32, pub weight: f32,
@ -233,7 +233,7 @@ pub struct Relation {
pub target: [u8; 16], pub target: [u8; 16],
pub rel_type: RelationType, pub rel_type: RelationType,
pub strength: f32, pub strength: f32,
pub provenance: Provenance, pub provenance: String,
pub deleted: bool, pub deleted: bool,
pub source_key: String, pub source_key: String,
pub target_key: String, pub target_key: String,
@ -338,25 +338,51 @@ capnp_enum!(RelationType, memory_capnp::RelationType,
capnp_message!(Node, capnp_message!(Node,
reader: memory_capnp::content_node::Reader<'_>, reader: memory_capnp::content_node::Reader<'_>,
builder: memory_capnp::content_node::Builder<'_>, builder: memory_capnp::content_node::Builder<'_>,
text: [key, content, source_ref, created, state_tag], text: [key, content, source_ref, created, state_tag, provenance],
uuid: [uuid], uuid: [uuid],
prim: [version, timestamp, weight, emotion, deleted, prim: [version, timestamp, weight, emotion, deleted,
retrievals, uses, wrongs, last_replayed, retrievals, uses, wrongs, last_replayed,
spaced_repetition_interval, position, created_at], spaced_repetition_interval, position, created_at],
enm: [node_type: NodeType, provenance: Provenance], enm: [node_type: NodeType],
skip: [community_id, clustering_coefficient, degree], skip: [community_id, clustering_coefficient, degree],
); );
impl Node {
/// Read from capnp with migration: if the new provenance text field
/// is empty (old record), fall back to the deprecated provenanceOld enum.
pub fn from_capnp_migrate(r: memory_capnp::content_node::Reader<'_>) -> Result<Self, String> {
let mut node = Self::from_capnp(r)?;
if node.provenance.is_empty() {
if let Ok(old) = r.get_provenance_old() {
node.provenance = Provenance::from_capnp(old).label().to_string();
}
}
Ok(node)
}
}
capnp_message!(Relation, capnp_message!(Relation,
reader: memory_capnp::relation::Reader<'_>, reader: memory_capnp::relation::Reader<'_>,
builder: memory_capnp::relation::Builder<'_>, builder: memory_capnp::relation::Builder<'_>,
text: [source_key, target_key], text: [source_key, target_key, provenance],
uuid: [uuid, source, target], uuid: [uuid, source, target],
prim: [version, timestamp, strength, deleted], prim: [version, timestamp, strength, deleted],
enm: [rel_type: RelationType, provenance: Provenance], enm: [rel_type: RelationType],
skip: [], skip: [],
); );
impl Relation {
pub fn from_capnp_migrate(r: memory_capnp::relation::Reader<'_>) -> Result<Self, String> {
let mut rel = Self::from_capnp(r)?;
if rel.provenance.is_empty() {
if let Ok(old) = r.get_provenance_old() {
rel.provenance = Provenance::from_capnp(old).label().to_string();
}
}
Ok(rel)
}
}
#[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)] #[archive(check_bytes)]
pub struct RetrievalEvent { pub struct RetrievalEvent {
@ -480,7 +506,7 @@ pub fn new_node(key: &str, content: &str) -> Node {
version: 1, version: 1,
timestamp: now_epoch(), timestamp: now_epoch(),
node_type: NodeType::Semantic, node_type: NodeType::Semantic,
provenance: Provenance::Manual, provenance: "manual".to_string(),
key: key.to_string(), key: key.to_string(),
content: content.to_string(), content: content.to_string(),
weight: 0.7, weight: 0.7,
@ -551,7 +577,7 @@ pub fn new_relation(
target: target_uuid, target: target_uuid,
rel_type, rel_type,
strength, strength,
provenance: Provenance::Manual, provenance: "manual".to_string(),
deleted: false, deleted: false,
source_key: source_key.to_string(), source_key: source_key.to_string(),
target_key: target_key.to_string(), target_key: target_key.to_string(),