split capnp_store.rs into src/store/ module hierarchy

capnp_store.rs (1772 lines) → four focused modules:
  store/types.rs  — types, macros, constants, path helpers
  store/parse.rs  — markdown parsing (MemoryUnit, parse_units)
  store/view.rs   — StoreView trait, MmapView, AnyView
  store/mod.rs    — Store impl methods, re-exports

new_node/new_relation become free functions in types.rs.
All callers updated: capnp_store:: → store::
This commit is contained in:
ProofOfConcept 2026-03-03 12:56:15 -05:00
parent e34c0ccf4c
commit 635da6d3e2
11 changed files with 980 additions and 978 deletions

View file

@ -8,7 +8,7 @@
// 4. Writes the digest to the store + episodic dir // 4. Writes the digest to the store + episodic dir
// 5. Extracts links and saves agent results // 5. Extracts links and saves agent results
use crate::capnp_store::{self, Store}; use crate::store::{self, Store, new_node, new_relation};
use crate::neuro; use crate::neuro;
use regex::Regex; use regex::Regex;
@ -19,7 +19,7 @@ use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
fn memory_dir() -> PathBuf { fn memory_dir() -> PathBuf {
capnp_store::memory_dir() store::memory_dir()
} }
fn episodic_dir() -> PathBuf { fn episodic_dir() -> PathBuf {
@ -261,7 +261,7 @@ fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
let mut dates = Vec::new(); let mut dates = Vec::new();
for i in 0..7 { for i in 0..7 {
let day_epoch = monday_epoch + (i * 86400); let day_epoch = monday_epoch + (i * 86400);
let (dy, dm, dd, _, _, _) = capnp_store::epoch_to_local(day_epoch as f64); let (dy, dm, dd, _, _, _) = store::epoch_to_local(day_epoch as f64);
dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd)); dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd));
} }
@ -439,7 +439,7 @@ fn weeks_in_month(year: i32, month: u32) -> Vec<String> {
let mut d = 1u32; let mut d = 1u32;
loop { loop {
let epoch = date_to_epoch(year, month, d); let epoch = date_to_epoch(year, month, d);
let (_, _, _, _, _, _) = capnp_store::epoch_to_local(epoch as f64); let (_, _, _, _, _, _) = store::epoch_to_local(epoch as f64);
// Check if we're still in the target month // Check if we're still in the target month
let mut tm: libc::tm = unsafe { std::mem::zeroed() }; let mut tm: libc::tm = unsafe { std::mem::zeroed() };
let secs = epoch as libc::time_t; let secs = epoch as libc::time_t;
@ -551,8 +551,8 @@ Use ONLY keys from the semantic memory list below.
pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> { pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> {
let (year, month) = if month_arg.is_empty() { let (year, month) = if month_arg.is_empty() {
let now = capnp_store::now_epoch(); let now = store::now_epoch();
let (y, m, _, _, _, _) = capnp_store::epoch_to_local(now); let (y, m, _, _, _, _) = store::epoch_to_local(now);
(y, m) (y, m)
} else { } else {
let parts: Vec<&str> = month_arg.split('-').collect(); let parts: Vec<&str> = month_arg.split('-').collect();
@ -617,7 +617,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String
/// (needs weeklies). Skips today (incomplete day). Skips already-existing /// (needs weeklies). Skips today (incomplete day). Skips already-existing
/// digests. /// digests.
pub fn digest_auto(store: &mut Store) -> Result<(), String> { pub fn digest_auto(store: &mut Store) -> Result<(), String> {
let today = capnp_store::today(); let today = store::today();
let epi = episodic_dir(); let epi = episodic_dir();
// --- Phase 1: find dates with journal entries but no daily digest --- // --- Phase 1: find dates with journal entries but no daily digest ---
@ -707,7 +707,7 @@ pub fn digest_auto(store: &mut Store) -> Result<(), String> {
// A month is "ready" if the month is before the current month and at // A month is "ready" if the month is before the current month and at
// least one weekly digest exists for it. // least one weekly digest exists for it.
let (cur_y, cur_m, _, _, _, _) = capnp_store::epoch_to_local(capnp_store::now_epoch()); let (cur_y, cur_m, _, _, _, _) = store::epoch_to_local(store::now_epoch());
let mut months_seen: std::collections::BTreeSet<(i32, u32)> = std::collections::BTreeSet::new(); let mut months_seen: std::collections::BTreeSet<(i32, u32)> = std::collections::BTreeSet::new();
for date in &daily_dates_done { for date in &daily_dates_done {
@ -782,7 +782,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
let mut log = LogWriter::new(&log_path)?; let mut log = LogWriter::new(&log_path)?;
log.write("=== CONSOLIDATE FULL ===")?; log.write("=== CONSOLIDATE FULL ===")?;
log.write(&format!("Started: {}", capnp_store::format_datetime(capnp_store::now_epoch())))?; log.write(&format!("Started: {}", store::format_datetime(store::now_epoch())))?;
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?; log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
log.write("")?; log.write("")?;
@ -890,7 +890,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
}; };
// Save report // Save report
let ts = capnp_store::format_datetime(capnp_store::now_epoch()) let ts = store::format_datetime(store::now_epoch())
.replace([':', '-', 'T'], ""); .replace([':', '-', 'T'], "");
let report_name = format!("consolidation-{}-{}.md", agent_type, ts); let report_name = format!("consolidation-{}-{}.md", agent_type, ts);
let report_path = agent_results_dir().join(&report_name); let report_path = agent_results_dir().join(&report_name);
@ -973,7 +973,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
// --- Step 6: Summary --- // --- Step 6: Summary ---
let elapsed = start.elapsed(); let elapsed = start.elapsed();
log.write("\n--- Summary ---")?; log.write("\n--- Summary ---")?;
log.write(&format!("Finished: {}", capnp_store::format_datetime(capnp_store::now_epoch())))?; log.write(&format!("Finished: {}", store::format_datetime(store::now_epoch())))?;
log.write(&format!("Duration: {:.0}s", elapsed.as_secs_f64()))?; log.write(&format!("Duration: {:.0}s", elapsed.as_secs_f64()))?;
*store = Store::load()?; *store = Store::load()?;
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?; log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
@ -1238,9 +1238,9 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
None => { skipped += 1; continue; } None => { skipped += 1; continue; }
}; };
let rel = Store::new_relation( let rel = new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Link, store::RelationType::Link,
0.5, 0.5,
&source, &target, &source, &target,
); );
@ -1512,9 +1512,9 @@ pub fn journal_enrich(
None => continue, None => continue,
}; };
let rel = Store::new_relation( let rel = new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Link, store::RelationType::Link,
0.5, 0.5,
&source_key, &resolved, &source_key, &resolved,
); );
@ -1525,7 +1525,7 @@ pub fn journal_enrich(
} }
// Save result to agent-results // Save result to agent-results
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch()) let timestamp = store::format_datetime(store::now_epoch())
.replace([':', '-'], ""); .replace([':', '-'], "");
let result_file = agent_results_dir() let result_file = agent_results_dir()
.join(format!("{}.json", timestamp)); .join(format!("{}.json", timestamp));
@ -1658,7 +1658,7 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
println!(" {} actions extracted", actions.len()); println!(" {} actions extracted", actions.len());
// Save actions // Save actions
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch()) let timestamp = store::format_datetime(store::now_epoch())
.replace([':', '-'], ""); .replace([':', '-'], "");
let actions_path = agent_results_dir() let actions_path = agent_results_dir()
.join(format!("consolidation-actions-{}.json", timestamp)); .join(format!("consolidation-actions-{}.json", timestamp));
@ -1747,9 +1747,9 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
let source_uuid = match store.nodes.get(&source) { Some(n) => n.uuid, None => { skipped += 1; continue; } }; let source_uuid = match store.nodes.get(&source) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
let target_uuid = match store.nodes.get(&target) { Some(n) => n.uuid, None => { skipped += 1; continue; } }; let target_uuid = match store.nodes.get(&target) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
let rel = Store::new_relation( let rel = new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Auto, store::RelationType::Auto,
0.5, 0.5,
&source, &target, &source, &target,
); );
@ -2110,9 +2110,9 @@ pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String>
// Create new // Create new
if target_uuid != [0u8; 16] { if target_uuid != [0u8; 16] {
let new_rel = Store::new_relation( let new_rel = new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Auto, store::RelationType::Auto,
old_strength, old_strength,
&source_key, new_target, &source_key, new_target,
); );
@ -2303,9 +2303,9 @@ pub fn experience_mine(
} }
// Write to store // Write to store
let mut node = Store::new_node(&key, &full_content); let mut node = new_node(&key, &full_content);
node.node_type = capnp_store::NodeType::EpisodicSession; node.node_type = store::NodeType::EpisodicSession;
node.category = capnp_store::Category::Observation; node.category = store::Category::Observation;
let _ = store.upsert_node(node); let _ = store.upsert_node(node);
count += 1; count += 1;
@ -2315,8 +2315,8 @@ pub fn experience_mine(
// Record this transcript as mined (even if count == 0, to prevent re-runs) // Record this transcript as mined (even if count == 0, to prevent re-runs)
let dedup_content = format!("Mined {} ({} entries)", jsonl_path, count); let dedup_content = format!("Mined {} ({} entries)", jsonl_path, count);
let mut dedup_node = Store::new_node(&dedup_key, &dedup_content); let mut dedup_node = new_node(&dedup_key, &dedup_content);
dedup_node.category = capnp_store::Category::Task; dedup_node.category = store::Category::Task;
let _ = store.upsert_node(dedup_node); let _ = store.upsert_node(dedup_node);
if count > 0 { if count > 0 {

View file

@ -7,7 +7,7 @@
// connections), but relation type and direction are preserved for // connections), but relation type and direction are preserved for
// specific queries. // specific queries.
use crate::capnp_store::{Store, RelationType, StoreView}; use crate::store::{Store, RelationType, StoreView};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet, VecDeque}; use std::collections::{HashMap, HashSet, VecDeque};
@ -589,8 +589,8 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
let cats = store.category_counts(); let cats = store.category_counts();
// Snapshot current metrics and log // Snapshot current metrics and log
let now = crate::capnp_store::now_epoch(); let now = crate::store::now_epoch();
let date = crate::capnp_store::format_datetime_space(now); let date = crate::store::format_datetime_space(now);
let snap = MetricsSnapshot { let snap = MetricsSnapshot {
timestamp: now, timestamp: now,
date: date.clone(), date: date.clone(),

View file

@ -13,7 +13,7 @@
// Neuroscience-inspired: spaced repetition replay, emotional gating, // Neuroscience-inspired: spaced repetition replay, emotional gating,
// interference detection, schema assimilation, reconsolidation. // interference detection, schema assimilation, reconsolidation.
mod capnp_store; mod store;
mod digest; mod digest;
mod graph; mod graph;
mod search; mod search;
@ -201,14 +201,14 @@ Commands:
} }
fn cmd_search(args: &[String]) -> Result<(), String> { fn cmd_search(args: &[String]) -> Result<(), String> {
use capnp_store::StoreView; use store::StoreView;
if args.is_empty() { if args.is_empty() {
return Err("Usage: poc-memory search QUERY [QUERY...]".into()); return Err("Usage: poc-memory search QUERY [QUERY...]".into());
} }
let query = args.join(" "); let query = args.join(" ");
let view = capnp_store::AnyView::load()?; let view = store::AnyView::load()?;
let results = search::search(&query, &view); let results = search::search(&query, &view);
if results.is_empty() { if results.is_empty() {
@ -217,7 +217,7 @@ fn cmd_search(args: &[String]) -> Result<(), String> {
} }
// Log retrieval to a small append-only file (avoid 6MB state.bin rewrite) // Log retrieval to a small append-only file (avoid 6MB state.bin rewrite)
capnp_store::Store::log_retrieval_static(&query, store::Store::log_retrieval_static(&query,
&results.iter().map(|r| r.key.clone()).collect::<Vec<_>>()); &results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
// Show text results // Show text results
@ -275,7 +275,7 @@ fn cmd_search(args: &[String]) -> Result<(), String> {
} }
fn cmd_init() -> Result<(), String> { fn cmd_init() -> Result<(), String> {
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let count = store.init_from_markdown()?; let count = store.init_from_markdown()?;
store.save()?; store.save()?;
println!("Indexed {} memory units", count); println!("Indexed {} memory units", count);
@ -287,7 +287,7 @@ fn cmd_migrate() -> Result<(), String> {
} }
fn cmd_health() -> Result<(), String> { fn cmd_health() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = store.build_graph(); let g = store.build_graph();
let health = graph::health_report(&g, &store); let health = graph::health_report(&g, &store);
println!("{}", health); println!("{}", health);
@ -295,7 +295,7 @@ fn cmd_health() -> Result<(), String> {
} }
fn cmd_status() -> Result<(), String> { fn cmd_status() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let node_count = store.nodes.len(); let node_count = store.nodes.len();
let rel_count = store.relations.len(); let rel_count = store.relations.len();
let categories = store.category_counts(); let categories = store.category_counts();
@ -316,7 +316,7 @@ fn cmd_status() -> Result<(), String> {
} }
fn cmd_graph() -> Result<(), String> { fn cmd_graph() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = store.build_graph(); let g = store.build_graph();
println!("Top nodes by degree:"); println!("Top nodes by degree:");
query::run_query(&store, &g, query::run_query(&store, &g,
@ -328,7 +328,7 @@ fn cmd_used(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory used KEY".into()); return Err("Usage: poc-memory used KEY".into());
} }
let key = args.join(" "); let key = args.join(" ");
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let resolved = store.resolve_key(&key)?;
store.mark_used(&resolved); store.mark_used(&resolved);
store.save()?; store.save()?;
@ -342,7 +342,7 @@ fn cmd_wrong(args: &[String]) -> Result<(), String> {
} }
let key = &args[0]; let key = &args[0];
let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None }; let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None };
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?; let resolved = store.resolve_key(key)?;
store.mark_wrong(&resolved, ctx.as_deref()); store.mark_wrong(&resolved, ctx.as_deref());
store.save()?; store.save()?;
@ -355,7 +355,7 @@ fn cmd_gap(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory gap DESCRIPTION".into()); return Err("Usage: poc-memory gap DESCRIPTION".into());
} }
let desc = args.join(" "); let desc = args.join(" ");
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
store.record_gap(&desc); store.record_gap(&desc);
store.save()?; store.save()?;
println!("Recorded gap: {}", desc); println!("Recorded gap: {}", desc);
@ -368,7 +368,7 @@ fn cmd_categorize(args: &[String]) -> Result<(), String> {
} }
let key = &args[0]; let key = &args[0];
let cat = &args[1]; let cat = &args[1];
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?; let resolved = store.resolve_key(key)?;
store.categorize(&resolved, cat)?; store.categorize(&resolved, cat)?;
store.save()?; store.save()?;
@ -377,7 +377,7 @@ fn cmd_categorize(args: &[String]) -> Result<(), String> {
} }
fn cmd_fix_categories() -> Result<(), String> { fn cmd_fix_categories() -> Result<(), String> {
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let before = format!("{:?}", store.category_counts()); let before = format!("{:?}", store.category_counts());
let (changed, kept) = store.fix_categories()?; let (changed, kept) = store.fix_categories()?;
store.save()?; store.save()?;
@ -392,7 +392,7 @@ fn cmd_link_orphans(args: &[String]) -> Result<(), String> {
let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2); let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2);
let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3); let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3);
let sim_thresh: f32 = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(0.15); let sim_thresh: f32 = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(0.15);
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh); let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})", println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
orphans, links, min_deg, links_per, sim_thresh); orphans, links, min_deg, links_per, sim_thresh);
@ -401,7 +401,7 @@ fn cmd_link_orphans(args: &[String]) -> Result<(), String> {
fn cmd_cap_degree(args: &[String]) -> Result<(), String> { fn cmd_cap_degree(args: &[String]) -> Result<(), String> {
let max_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(50); let max_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(50);
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let (hubs, pruned) = store.cap_degree(max_deg)?; let (hubs, pruned) = store.cap_degree(max_deg)?;
store.save()?; store.save()?;
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg); println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
@ -409,7 +409,7 @@ fn cmd_cap_degree(args: &[String]) -> Result<(), String> {
} }
fn cmd_decay() -> Result<(), String> { fn cmd_decay() -> Result<(), String> {
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let (decayed, pruned) = store.decay(); let (decayed, pruned) = store.decay();
store.save()?; store.save()?;
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned); println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
@ -436,7 +436,7 @@ fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
} }
} }
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
if let Some(agent_name) = agent { if let Some(agent_name) = agent {
// Generate a specific agent prompt // Generate a specific agent prompt
@ -449,7 +449,7 @@ fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
} }
fn cmd_log() -> Result<(), String> { fn cmd_log() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
for event in store.retrieval_log.iter().rev().take(20) { for event in store.retrieval_log.iter().rev().take(20) {
println!("[{}] q=\"{}\"{} results", println!("[{}] q=\"{}\"{} results",
event.timestamp, event.query, event.results.len()); event.timestamp, event.query, event.results.len());
@ -461,7 +461,7 @@ fn cmd_log() -> Result<(), String> {
} }
fn cmd_params() -> Result<(), String> { fn cmd_params() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
println!("decay_factor: {}", store.params.decay_factor); println!("decay_factor: {}", store.params.decay_factor);
println!("use_boost: {}", store.params.use_boost); println!("use_boost: {}", store.params.use_boost);
println!("prune_threshold: {}", store.params.prune_threshold); println!("prune_threshold: {}", store.params.prune_threshold);
@ -476,7 +476,7 @@ fn cmd_link(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory link KEY".into()); return Err("Usage: poc-memory link KEY".into());
} }
let key = args.join(" "); let key = args.join(" ");
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let resolved = store.resolve_key(&key)?;
let g = store.build_graph(); let g = store.build_graph();
println!("Neighbors of '{}':", resolved); println!("Neighbors of '{}':", resolved);
@ -496,7 +496,7 @@ fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
_ => { i += 1; } _ => { i += 1; }
} }
} }
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let queue = neuro::replay_queue(&store, count); let queue = neuro::replay_queue(&store, count);
println!("Replay queue ({} items):", queue.len()); println!("Replay queue ({} items):", queue.len());
for (i, item) in queue.iter().enumerate() { for (i, item) in queue.iter().enumerate() {
@ -508,14 +508,14 @@ fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
} }
fn cmd_consolidate_session() -> Result<(), String> { fn cmd_consolidate_session() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let plan = neuro::consolidation_plan(&store); let plan = neuro::consolidation_plan(&store);
println!("{}", neuro::format_plan(&plan)); println!("{}", neuro::format_plan(&plan));
Ok(()) Ok(())
} }
fn cmd_consolidate_full() -> Result<(), String> { fn cmd_consolidate_full() -> Result<(), String> {
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
digest::consolidate_full(&mut store) digest::consolidate_full(&mut store)
} }
@ -533,14 +533,14 @@ fn cmd_triangle_close(args: &[String]) -> Result<(), String> {
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}", println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
min_degree, sim_threshold, max_per_hub); min_degree, sim_threshold, max_per_hub);
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub); let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
println!("\nProcessed {} hubs, added {} lateral links", hubs, added); println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
Ok(()) Ok(())
} }
fn cmd_daily_check() -> Result<(), String> { fn cmd_daily_check() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let report = neuro::daily_check(&store); let report = neuro::daily_check(&store);
print!("{}", report); print!("{}", report);
Ok(()) Ok(())
@ -550,7 +550,7 @@ fn cmd_link_add(args: &[String]) -> Result<(), String> {
if args.len() < 2 { if args.len() < 2 {
return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into()); return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let source = store.resolve_key(&args[0])?; let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?; let target = store.resolve_key(&args[1])?;
let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() }; let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() };
@ -577,9 +577,9 @@ fn cmd_link_add(args: &[String]) -> Result<(), String> {
return Ok(()); return Ok(());
} }
let rel = capnp_store::Store::new_relation( let rel = store::new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Auto, store::RelationType::Auto,
0.5, 0.5,
&source, &target, &source, &target,
); );
@ -596,7 +596,7 @@ fn cmd_link_impact(args: &[String]) -> Result<(), String> {
if args.len() < 2 { if args.len() < 2 {
return Err("Usage: poc-memory link-impact SOURCE TARGET".into()); return Err("Usage: poc-memory link-impact SOURCE TARGET".into());
} }
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let source = store.resolve_key(&args[0])?; let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?; let target = store.resolve_key(&args[1])?;
let g = store.build_graph(); let g = store.build_graph();
@ -622,7 +622,7 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
return Ok(()); return Ok(());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let mut applied = 0; let mut applied = 0;
let mut errors = 0; let mut errors = 0;
@ -718,9 +718,9 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
None => continue, None => continue,
}; };
let rel = capnp_store::Store::new_relation( let rel = store::new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
capnp_store::RelationType::Link, store::RelationType::Link,
0.5, 0.5,
&source_key, &resolved, &source_key, &resolved,
); );
@ -757,13 +757,13 @@ fn cmd_digest(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory digest daily|weekly|monthly|auto [DATE]".into()); return Err("Usage: poc-memory digest daily|weekly|monthly|auto [DATE]".into());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or(""); let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or("");
match args[0].as_str() { match args[0].as_str() {
"daily" => { "daily" => {
let date = if date_arg.is_empty() { let date = if date_arg.is_empty() {
capnp_store::format_date(capnp_store::now_epoch()) store::format_date(store::now_epoch())
} else { } else {
date_arg.to_string() date_arg.to_string()
}; };
@ -771,7 +771,7 @@ fn cmd_digest(args: &[String]) -> Result<(), String> {
} }
"weekly" => { "weekly" => {
let date = if date_arg.is_empty() { let date = if date_arg.is_empty() {
capnp_store::format_date(capnp_store::now_epoch()) store::format_date(store::now_epoch())
} else { } else {
date_arg.to_string() date_arg.to_string()
}; };
@ -803,7 +803,7 @@ fn cmd_digest_links(args: &[String]) -> Result<(), String> {
return Ok(()); return Ok(());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links); let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped); println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
Ok(()) Ok(())
@ -823,7 +823,7 @@ fn cmd_journal_enrich(args: &[String]) -> Result<(), String> {
return Err(format!("JSONL not found: {}", jsonl_path)); return Err(format!("JSONL not found: {}", jsonl_path));
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
digest::journal_enrich(&mut store, jsonl_path, entry_text, grep_line) digest::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
} }
@ -839,7 +839,7 @@ fn cmd_experience_mine(args: &[String]) -> Result<(), String> {
return Err(format!("JSONL not found: {}", jsonl_path)); return Err(format!("JSONL not found: {}", jsonl_path));
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let count = digest::experience_mine(&mut store, &jsonl_path)?; let count = digest::experience_mine(&mut store, &jsonl_path)?;
println!("Done: {} new entries mined.", count); println!("Done: {} new entries mined.", count);
Ok(()) Ok(())
@ -851,7 +851,7 @@ fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> {
.find(|w| w[0] == "--report") .find(|w| w[0] == "--report")
.map(|w| w[1].as_str()); .map(|w| w[1].as_str());
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
digest::apply_consolidation(&mut store, do_apply, report_file) digest::apply_consolidation(&mut store, do_apply, report_file)
} }
@ -861,7 +861,7 @@ fn cmd_differentiate(args: &[String]) -> Result<(), String> {
.find(|a| !a.starts_with("--")) .find(|a| !a.starts_with("--"))
.map(|s| s.as_str()); .map(|s| s.as_str());
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
if let Some(key) = key_arg { if let Some(key) = key_arg {
// Differentiate a specific hub // Differentiate a specific hub
@ -918,7 +918,7 @@ fn cmd_differentiate(args: &[String]) -> Result<(), String> {
fn cmd_link_audit(args: &[String]) -> Result<(), String> { fn cmd_link_audit(args: &[String]) -> Result<(), String> {
let apply = args.iter().any(|a| a == "--apply"); let apply = args.iter().any(|a| a == "--apply");
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let stats = digest::link_audit(&mut store, apply)?; let stats = digest::link_audit(&mut store, apply)?;
println!("\n{}", "=".repeat(60)); println!("\n{}", "=".repeat(60));
println!("Link audit complete:"); println!("Link audit complete:");
@ -933,7 +933,7 @@ fn cmd_trace(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory trace KEY".into()); return Err("Usage: poc-memory trace KEY".into());
} }
let key = args.join(" "); let key = args.join(" ");
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let resolved = store.resolve_key(&key)?;
let g = store.build_graph(); let g = store.build_graph();
@ -968,13 +968,13 @@ fn cmd_trace(args: &[String]) -> Result<(), String> {
if let Some(nnode) = store.nodes.get(n.as_str()) { if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode); let entry = (n.as_str(), *strength, nnode);
match nnode.node_type { match nnode.node_type {
capnp_store::NodeType::EpisodicSession => store::NodeType::EpisodicSession =>
episodic_session.push(entry), episodic_session.push(entry),
capnp_store::NodeType::EpisodicDaily => store::NodeType::EpisodicDaily =>
episodic_daily.push(entry), episodic_daily.push(entry),
capnp_store::NodeType::EpisodicWeekly => store::NodeType::EpisodicWeekly =>
episodic_weekly.push(entry), episodic_weekly.push(entry),
capnp_store::NodeType::Semantic => store::NodeType::Semantic =>
semantic.push(entry), semantic.push(entry),
} }
} }
@ -1029,7 +1029,7 @@ fn cmd_spectral(args: &[String]) -> Result<(), String> {
let k: usize = args.first() let k: usize = args.first()
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(30); .unwrap_or(30);
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = graph::build_graph(&store); let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k); let result = spectral::decompose(&g, k);
spectral::print_summary(&result, &g); spectral::print_summary(&result, &g);
@ -1040,7 +1040,7 @@ fn cmd_spectral_save(args: &[String]) -> Result<(), String> {
let k: usize = args.first() let k: usize = args.first()
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(20); .unwrap_or(20);
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = graph::build_graph(&store); let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k); let result = spectral::decompose(&g, k);
let emb = spectral::to_embedding(&result); let emb = spectral::to_embedding(&result);
@ -1080,7 +1080,7 @@ fn cmd_spectral_positions(args: &[String]) -> Result<(), String> {
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(30); .unwrap_or(30);
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let emb = spectral::load_embedding()?; let emb = spectral::load_embedding()?;
// Build communities fresh from graph (don't rely on cached node fields) // Build communities fresh from graph (don't rely on cached node fields)
@ -1135,7 +1135,7 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> {
.and_then(|s| s.parse().ok()) .and_then(|s| s.parse().ok())
.unwrap_or(20); .unwrap_or(20);
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let emb = spectral::load_embedding()?; let emb = spectral::load_embedding()?;
let g = store.build_graph(); let g = store.build_graph();
let communities = g.communities(); let communities = g.communities();
@ -1186,13 +1186,13 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> {
} }
fn cmd_list_keys() -> Result<(), String> { fn cmd_list_keys() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = store.build_graph(); let g = store.build_graph();
query::run_query(&store, &g, "* | sort key asc") query::run_query(&store, &g, "* | sort key asc")
} }
fn cmd_list_edges() -> Result<(), String> { fn cmd_list_edges() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
for rel in &store.relations { for rel in &store.relations {
println!("{}\t{}\t{:.2}\t{:?}", println!("{}\t{}\t{:.2}\t{:?}",
rel.source_key, rel.target_key, rel.strength, rel.rel_type); rel.source_key, rel.target_key, rel.strength, rel.rel_type);
@ -1201,7 +1201,7 @@ fn cmd_list_edges() -> Result<(), String> {
} }
fn cmd_dump_json() -> Result<(), String> { fn cmd_dump_json() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let json = serde_json::to_string_pretty(&store) let json = serde_json::to_string_pretty(&store)
.map_err(|e| format!("serialize: {}", e))?; .map_err(|e| format!("serialize: {}", e))?;
println!("{}", json); println!("{}", json);
@ -1213,7 +1213,7 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory node-delete KEY".into()); return Err("Usage: poc-memory node-delete KEY".into());
} }
let key = args.join(" "); let key = args.join(" ");
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let resolved = store.resolve_key(&key)?;
store.delete_node(&resolved)?; store.delete_node(&resolved)?;
store.save()?; store.save()?;
@ -1222,8 +1222,8 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> {
} }
fn cmd_load_context() -> Result<(), String> { fn cmd_load_context() -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let now = capnp_store::now_epoch(); let now = store::now_epoch();
let seven_days = 7.0 * 24.0 * 3600.0; let seven_days = 7.0 * 24.0 * 3600.0;
println!("=== FULL MEMORY LOAD (session start) ==="); println!("=== FULL MEMORY LOAD (session start) ===");
@ -1268,7 +1268,7 @@ fn cmd_load_context() -> Result<(), String> {
// Parse date from key: journal.md#j-2026-02-21-17-45-... // Parse date from key: journal.md#j-2026-02-21-17-45-...
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare // Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
let cutoff_secs = now - seven_days; let cutoff_secs = now - seven_days;
let cutoff_date = capnp_store::format_date(cutoff_secs); let cutoff_date = store::format_date(cutoff_secs);
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap(); let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
let mut journal_nodes: Vec<_> = store.nodes.values() let mut journal_nodes: Vec<_> = store.nodes.values()
@ -1306,7 +1306,7 @@ fn cmd_render(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory render KEY".into()); return Err("Usage: poc-memory render KEY".into());
} }
let key = args.join(" "); let key = args.join(" ");
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?; let resolved = store.resolve_key(&key)?;
let node = store.nodes.get(&resolved) let node = store.nodes.get(&resolved)
@ -1330,7 +1330,7 @@ fn cmd_write(args: &[String]) -> Result<(), String> {
return Err("No content on stdin".into()); return Err("No content on stdin".into());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let result = store.upsert(&key, &content)?; let result = store.upsert(&key, &content)?;
match result { match result {
"unchanged" => println!("No change: '{}'", key), "unchanged" => println!("No change: '{}'", key),
@ -1348,7 +1348,7 @@ fn cmd_import(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory import FILE [FILE...]".into()); return Err("Usage: poc-memory import FILE [FILE...]".into());
} }
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let mut total_new = 0; let mut total_new = 0;
let mut total_updated = 0; let mut total_updated = 0;
@ -1357,7 +1357,7 @@ fn cmd_import(args: &[String]) -> Result<(), String> {
let resolved = if path.exists() { let resolved = if path.exists() {
path path
} else { } else {
let mem_path = capnp_store::memory_dir().join(arg); let mem_path = store::memory_dir().join(arg);
if !mem_path.exists() { if !mem_path.exists() {
eprintln!("File not found: {}", arg); eprintln!("File not found: {}", arg);
continue; continue;
@ -1377,7 +1377,7 @@ fn cmd_import(args: &[String]) -> Result<(), String> {
} }
fn cmd_export(args: &[String]) -> Result<(), String> { fn cmd_export(args: &[String]) -> Result<(), String> {
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let export_all = args.iter().any(|a| a == "--all"); let export_all = args.iter().any(|a| a == "--all");
let targets: Vec<String> = if export_all { let targets: Vec<String> = if export_all {
@ -1401,7 +1401,7 @@ fn cmd_export(args: &[String]) -> Result<(), String> {
}).collect() }).collect()
}; };
let mem_dir = capnp_store::memory_dir(); let mem_dir = store::memory_dir();
for file_key in &targets { for file_key in &targets {
match store.export_to_markdown(file_key) { match store.export_to_markdown(file_key) {
@ -1426,7 +1426,7 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> {
let text = args.join(" "); let text = args.join(" ");
// Generate timestamp and slug // Generate timestamp and slug
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch()); let timestamp = store::format_datetime(store::now_epoch());
// Slug: lowercase first ~6 words, hyphenated, truncated // Slug: lowercase first ~6 words, hyphenated, truncated
let slug: String = text.split_whitespace() let slug: String = text.split_whitespace()
@ -1446,11 +1446,11 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> {
// Find source ref (most recently modified .jsonl transcript) // Find source ref (most recently modified .jsonl transcript)
let source_ref = find_current_transcript(); let source_ref = find_current_transcript();
let mut store = capnp_store::Store::load()?; let mut store = store::Store::load()?;
let mut node = capnp_store::Store::new_node(&key, &content); let mut node = store::new_node(&key, &content);
node.node_type = capnp_store::NodeType::EpisodicSession; node.node_type = store::NodeType::EpisodicSession;
node.provenance = capnp_store::Provenance::Journal; node.provenance = store::Provenance::Journal;
if let Some(src) = source_ref { if let Some(src) = source_ref {
node.source_ref = src; node.source_ref = src;
} }
@ -1475,7 +1475,7 @@ fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
} }
} }
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
// Collect journal nodes, sorted by date extracted from content or key // Collect journal nodes, sorted by date extracted from content or key
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap(); let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
@ -1493,7 +1493,7 @@ fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
s s
} }
}; };
let extract_sort_key = |node: &capnp_store::Node| -> String { let extract_sort_key = |node: &store::Node| -> String {
// Try key first (journal.md#j-2026-02-28t23-11-...) // Try key first (journal.md#j-2026-02-28t23-11-...)
if let Some(caps) = key_date_re.captures(&node.key) { if let Some(caps) = key_date_re.captures(&node.key) {
return normalize_date(&caps[1]); return normalize_date(&caps[1]);
@ -1565,7 +1565,7 @@ fn cmd_interference(args: &[String]) -> Result<(), String> {
_ => { i += 1; } _ => { i += 1; }
} }
} }
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let g = store.build_graph(); let g = store.build_graph();
let pairs = neuro::detect_interference(&store, &g, threshold); let pairs = neuro::detect_interference(&store, &g, threshold);
@ -1597,7 +1597,7 @@ Pipe stages:\n \
} }
let query_str = args.join(" "); let query_str = args.join(" ");
let store = capnp_store::Store::load()?; let store = store::Store::load()?;
let graph = store.build_graph(); let graph = store.build_graph();
query::run_query(&store, &graph, &query_str) query::run_query(&store, &graph, &query_str)
} }

View file

@ -11,9 +11,9 @@
// //
// Old files are preserved as backup. Run once. // Old files are preserved as backup. Run once.
use crate::capnp_store::{ use crate::store::{
self, Store, Node, Category, NodeType, Provenance, RelationType, self, Store, Node, Category, NodeType, Provenance, RelationType,
parse_units, parse_units, new_relation,
}; };
use serde::Deserialize; use serde::Deserialize;
@ -149,7 +149,7 @@ pub fn migrate() -> Result<(), String> {
old_store.entries.len(), old_store.retrieval_log.len()); old_store.entries.len(), old_store.retrieval_log.len());
// Scan markdown files to get content + edges // Scan markdown files to get content + edges
let mut units_by_key: HashMap<String, capnp_store::MemoryUnit> = HashMap::new(); let mut units_by_key: HashMap<String, store::MemoryUnit> = HashMap::new();
scan_markdown_dir(&memory_dir, &mut units_by_key)?; scan_markdown_dir(&memory_dir, &mut units_by_key)?;
eprintln!("Scanned {} markdown units", units_by_key.len()); eprintln!("Scanned {} markdown units", units_by_key.len());
@ -168,7 +168,7 @@ pub fn migrate() -> Result<(), String> {
// Migrate retrieval log // Migrate retrieval log
store.retrieval_log = old_store.retrieval_log.iter().map(|e| { store.retrieval_log = old_store.retrieval_log.iter().map(|e| {
capnp_store::RetrievalEvent { store::RetrievalEvent {
query: e.query.clone(), query: e.query.clone(),
timestamp: e.timestamp.clone(), timestamp: e.timestamp.clone(),
results: e.results.clone(), results: e.results.clone(),
@ -197,7 +197,7 @@ pub fn migrate() -> Result<(), String> {
let node = Node { let node = Node {
uuid, uuid,
version: 1, version: 1,
timestamp: capnp_store::now_epoch(), timestamp: store::now_epoch(),
node_type: if key.contains("journal") { node_type: if key.contains("journal") {
NodeType::EpisodicSession NodeType::EpisodicSession
} else { } else {
@ -236,7 +236,7 @@ pub fn migrate() -> Result<(), String> {
let node = Node { let node = Node {
uuid, uuid,
version: 1, version: 1,
timestamp: capnp_store::now_epoch(), timestamp: store::now_epoch(),
node_type: if key.contains("journal") { node_type: if key.contains("journal") {
NodeType::EpisodicSession NodeType::EpisodicSession
} else { } else {
@ -291,12 +291,12 @@ pub fn migrate() -> Result<(), String> {
}; };
// Avoid duplicate relations // Avoid duplicate relations
let exists = all_relations.iter().any(|r: &capnp_store::Relation| let exists = all_relations.iter().any(|r: &store::Relation|
(r.source == source_uuid && r.target == target_uuid) || (r.source == source_uuid && r.target == target_uuid) ||
(r.source == target_uuid && r.target == source_uuid)); (r.source == target_uuid && r.target == source_uuid));
if exists { continue; } if exists { continue; }
all_relations.push(Store::new_relation( all_relations.push(new_relation(
source_uuid, target_uuid, source_uuid, target_uuid,
RelationType::Link, 1.0, RelationType::Link, 1.0,
key, link, key, link,
@ -310,7 +310,7 @@ pub fn migrate() -> Result<(), String> {
None => continue, None => continue,
}; };
all_relations.push(Store::new_relation( all_relations.push(new_relation(
cause_uuid, source_uuid, cause_uuid, source_uuid,
RelationType::Causal, 1.0, RelationType::Causal, 1.0,
cause, key, cause, key,
@ -349,7 +349,7 @@ pub fn migrate() -> Result<(), String> {
fn scan_markdown_dir( fn scan_markdown_dir(
dir: &Path, dir: &Path,
units: &mut HashMap<String, capnp_store::MemoryUnit>, units: &mut HashMap<String, store::MemoryUnit>,
) -> Result<(), String> { ) -> Result<(), String> {
let entries = fs::read_dir(dir) let entries = fs::read_dir(dir)
.map_err(|e| format!("read dir {}: {}", dir.display(), e))?; .map_err(|e| format!("read dir {}: {}", dir.display(), e))?;

View file

@ -4,13 +4,12 @@
// interference detection, emotional gating, consolidation priority // interference detection, emotional gating, consolidation priority
// scoring, and the agent consolidation harness. // scoring, and the agent consolidation harness.
use crate::capnp_store::Store; use crate::store::{Store, new_relation, now_epoch};
use crate::graph::{self, Graph}; use crate::graph::{self, Graph};
use crate::similarity; use crate::similarity;
use crate::spectral::{self, SpectralEmbedding, SpectralPosition}; use crate::spectral::{self, SpectralEmbedding, SpectralPosition};
use std::collections::HashMap; use std::collections::HashMap;
use crate::capnp_store::now_epoch;
const SECS_PER_DAY: f64 = 86400.0; const SECS_PER_DAY: f64 = 86400.0;
@ -524,7 +523,7 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
let mut items = replay_queue_with_graph(store, count * 2, &graph, emb.as_ref()); let mut items = replay_queue_with_graph(store, count * 2, &graph, emb.as_ref());
items.retain(|item| { items.retain(|item| {
store.nodes.get(&item.key) store.nodes.get(&item.key)
.map(|n| matches!(n.node_type, crate::capnp_store::NodeType::EpisodicSession)) .map(|n| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
.unwrap_or(false) .unwrap_or(false)
|| item.key.contains("journal") || item.key.contains("journal")
|| item.key.contains("session") || item.key.contains("session")
@ -776,8 +775,8 @@ pub fn daily_check(store: &Store) -> String {
} }
// Log this snapshot too // Log this snapshot too
let now = crate::capnp_store::now_epoch(); let now = crate::store::now_epoch();
let date = crate::capnp_store::format_datetime_space(now); let date = crate::store::format_datetime_space(now);
graph::save_metrics_snapshot(&graph::MetricsSnapshot { graph::save_metrics_snapshot(&graph::MetricsSnapshot {
timestamp: now, date, timestamp: now, date,
nodes: graph.nodes().len(), nodes: graph.nodes().len(),
@ -963,9 +962,9 @@ pub fn apply_differentiation(
} }
// Create new section→neighbor relation // Create new section→neighbor relation
let new_rel = Store::new_relation( let new_rel = new_relation(
section_uuid, neighbor_uuid, section_uuid, neighbor_uuid,
crate::capnp_store::RelationType::Auto, crate::store::RelationType::Auto,
0.5, 0.5,
&mv.to_section, &mv.neighbor_key, &mv.to_section, &mv.neighbor_key,
); );
@ -1067,9 +1066,9 @@ pub fn triangle_close(
let uuid_a = match store.nodes.get(a) { Some(n) => n.uuid, None => continue }; let uuid_a = match store.nodes.get(a) { Some(n) => n.uuid, None => continue };
let uuid_b = match store.nodes.get(b) { Some(n) => n.uuid, None => continue }; let uuid_b = match store.nodes.get(b) { Some(n) => n.uuid, None => continue };
let rel = Store::new_relation( let rel = new_relation(
uuid_a, uuid_b, uuid_a, uuid_b,
crate::capnp_store::RelationType::Auto, crate::store::RelationType::Auto,
sim * 0.5, // scale by similarity sim * 0.5, // scale by similarity
a, b, a, b,
); );
@ -1144,9 +1143,9 @@ pub fn link_orphans(
None => continue, None => continue,
}; };
let rel = Store::new_relation( let rel = new_relation(
orphan_uuid, target_uuid, orphan_uuid, target_uuid,
crate::capnp_store::RelationType::Auto, crate::store::RelationType::Auto,
sim * 0.5, sim * 0.5,
orphan_key, target_key, orphan_key, target_key,
); );

View file

@ -21,7 +21,7 @@
// key ~ 'journal.*' AND degree > 10 | count // key ~ 'journal.*' AND degree > 10 | count
// * | sort weight asc | limit 20 // * | sort weight asc | limit 20
use crate::capnp_store::{NodeType, Provenance, RelationType, Store}; use crate::store::{NodeType, Provenance, RelationType, Store};
use crate::graph::Graph; use crate::graph::Graph;
use regex::Regex; use regex::Regex;
use std::collections::BTreeMap; use std::collections::BTreeMap;

View file

@ -4,7 +4,7 @@
// supports circumscription parameter for blending associative vs // supports circumscription parameter for blending associative vs
// causal walks, and benefits from community-aware result grouping. // causal walks, and benefits from community-aware result grouping.
use crate::capnp_store::StoreView; use crate::store::StoreView;
use crate::graph::Graph; use crate::graph::Graph;
use std::collections::{HashMap, HashSet, VecDeque}; use std::collections::{HashMap, HashSet, VecDeque};

File diff suppressed because it is too large Load diff

167
src/store/parse.rs Normal file
View file

@ -0,0 +1,167 @@
// Markdown parsing for memory files
//
// Splits markdown files into MemoryUnit structs based on `<!-- mem: ... -->`
// markers. Each marker starts a new section; content before the first marker
// becomes the file-level unit. Links and causal edges are extracted from
// both marker attributes and inline markdown links.
use super::NodeType;
use regex::Regex;
use std::collections::HashMap;
use std::path::Path;
use std::sync::OnceLock;
pub struct MemoryUnit {
pub key: String,
pub content: String,
pub marker_links: Vec<String>,
pub md_links: Vec<String>,
pub causes: Vec<String>,
pub state: Option<String>,
pub source_ref: Option<String>,
}
pub fn classify_filename(filename: &str) -> NodeType {
if filename.starts_with("daily-") { NodeType::EpisodicDaily }
else if filename.starts_with("weekly-") { NodeType::EpisodicWeekly }
else if filename == "journal.md" { NodeType::EpisodicSession }
else { NodeType::Semantic }
}
pub fn parse_units(filename: &str, content: &str) -> Vec<MemoryUnit> {
static MARKER_RE: OnceLock<Regex> = OnceLock::new();
static SOURCE_RE: OnceLock<Regex> = OnceLock::new();
static MD_LINK_RE: OnceLock<Regex> = OnceLock::new();
let marker_re = MARKER_RE.get_or_init(||
Regex::new(r"<!--\s*mem:\s*((?:id|links|tags|causes|state)\s*=\s*[^\s].*?)-->").unwrap());
let source_re = SOURCE_RE.get_or_init(||
Regex::new(r"<!--\s*source:\s*(.+?)\s*-->").unwrap());
let md_link_re = MD_LINK_RE.get_or_init(||
Regex::new(r"\[[^\]]*\]\(([^)]*\.md(?:#[^)]*)?)\)").unwrap());
let markers: Vec<_> = marker_re.captures_iter(content)
.map(|cap| {
let full_match = cap.get(0).unwrap();
let attrs_str = &cap[1];
(full_match.start(), full_match.end(), parse_marker_attrs(attrs_str))
})
.collect();
let find_source = |text: &str| -> Option<String> {
source_re.captures(text).map(|c| c[1].trim().to_string())
};
if markers.is_empty() {
let source_ref = find_source(content);
let md_links = extract_md_links(content, md_link_re, filename);
return vec![MemoryUnit {
key: filename.to_string(),
content: content.to_string(),
marker_links: Vec::new(),
md_links,
causes: Vec::new(),
state: None,
source_ref,
}];
}
let mut units = Vec::new();
let first_start = markers[0].0;
let pre_content = content[..first_start].trim();
if !pre_content.is_empty() {
let source_ref = find_source(pre_content);
let md_links = extract_md_links(pre_content, md_link_re, filename);
units.push(MemoryUnit {
key: filename.to_string(),
content: pre_content.to_string(),
marker_links: Vec::new(),
md_links,
causes: Vec::new(),
state: None,
source_ref,
});
}
for (i, (_, end, attrs)) in markers.iter().enumerate() {
let unit_end = if i + 1 < markers.len() {
markers[i + 1].0
} else {
content.len()
};
let unit_content = content[*end..unit_end].trim();
let id = attrs.get("id").cloned().unwrap_or_default();
let key = if id.is_empty() {
format!("{}#unnamed-{}", filename, i)
} else {
format!("{}#{}", filename, id)
};
let marker_links = attrs.get("links")
.map(|l| l.split(',').map(|s| normalize_link(s.trim(), filename)).collect())
.unwrap_or_default();
let causes = attrs.get("causes")
.map(|l| l.split(',').map(|s| normalize_link(s.trim(), filename)).collect())
.unwrap_or_default();
let state = attrs.get("state").cloned();
let source_ref = find_source(unit_content);
let md_links = extract_md_links(unit_content, md_link_re, filename);
units.push(MemoryUnit {
key,
content: unit_content.to_string(),
marker_links,
md_links,
causes,
state,
source_ref,
});
}
units
}
fn parse_marker_attrs(attrs_str: &str) -> HashMap<String, String> {
static ATTR_RE: OnceLock<Regex> = OnceLock::new();
let attr_re = ATTR_RE.get_or_init(|| Regex::new(r"(\w+)\s*=\s*(\S+)").unwrap());
let mut attrs = HashMap::new();
for cap in attr_re.captures_iter(attrs_str) {
attrs.insert(cap[1].to_string(), cap[2].to_string());
}
attrs
}
fn extract_md_links(content: &str, re: &Regex, source_file: &str) -> Vec<String> {
re.captures_iter(content)
.map(|cap| normalize_link(&cap[1], source_file))
.filter(|link| !link.starts_with(source_file) || link.contains('#'))
.collect()
}
pub fn normalize_link(target: &str, source_file: &str) -> String {
if target.starts_with('#') {
return format!("{}{}", source_file, target);
}
let (path_part, fragment) = if let Some(hash_pos) = target.find('#') {
(&target[..hash_pos], Some(&target[hash_pos..]))
} else {
(target, None)
};
let basename = Path::new(path_part)
.file_name()
.map(|f| f.to_string_lossy().to_string())
.unwrap_or_else(|| path_part.to_string());
match fragment {
Some(frag) => format!("{}{}", basename, frag),
None => basename,
}
}

480
src/store/types.rs Normal file
View file

@ -0,0 +1,480 @@
// Core types for the memory store
//
// Node, Relation, enums, Params, and supporting types. Also contains
// the capnp serialization macros that generate bidirectional conversion.
use crate::memory_capnp;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use std::collections::HashMap;
use std::env;
use std::fs;
use std::os::unix::io::AsRawFd;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
// ---------------------------------------------------------------------------
// Capnp serialization macros
//
// Declarative mapping between Rust types and capnp generated types.
// Adding a field to the schema means adding it in one place below;
// both read and write are generated from the same declaration.
// ---------------------------------------------------------------------------
/// Generate to_capnp/from_capnp conversion methods for an enum.
macro_rules! capnp_enum {
($rust_type:ident, $capnp_type:path, [$($variant:ident),+ $(,)?]) => {
impl $rust_type {
pub(crate) fn to_capnp(&self) -> $capnp_type {
match self {
$(Self::$variant => <$capnp_type>::$variant,)+
}
}
pub(crate) fn from_capnp(v: $capnp_type) -> Self {
match v {
$(<$capnp_type>::$variant => Self::$variant,)+
}
}
}
};
}
/// Generate from_capnp/to_capnp methods for a struct with capnp serialization.
/// Fields are grouped by serialization kind:
/// text - capnp Text fields (String in Rust)
/// uuid - capnp Data fields ([u8; 16] in Rust)
/// prim - copy types (u32, f32, f64, bool)
/// enm - enums with to_capnp/from_capnp methods
/// skip - Rust-only fields not in capnp (set to Default on read)
macro_rules! capnp_message {
(
$struct:ident,
reader: $reader:ty,
builder: $builder:ty,
text: [$($tf:ident),* $(,)?],
uuid: [$($uf:ident),* $(,)?],
prim: [$($pf:ident),* $(,)?],
enm: [$($ef:ident: $et:ident),* $(,)?],
skip: [$($sf:ident),* $(,)?] $(,)?
) => {
impl $struct {
pub(crate) fn from_capnp(r: $reader) -> Result<Self, String> {
paste::paste! {
Ok(Self {
$($tf: read_text(r.[<get_ $tf>]()),)*
$($uf: read_uuid(r.[<get_ $uf>]()),)*
$($pf: r.[<get_ $pf>](),)*
$($ef: $et::from_capnp(
r.[<get_ $ef>]().map_err(|_| concat!("bad ", stringify!($ef)))?
),)*
$($sf: Default::default(),)*
})
}
}
pub(crate) fn to_capnp(&self, mut b: $builder) {
paste::paste! {
$(b.[<set_ $tf>](&self.$tf);)*
$(b.[<set_ $uf>](&self.$uf);)*
$(b.[<set_ $pf>](self.$pf);)*
$(b.[<set_ $ef>](self.$ef.to_capnp());)*
}
}
}
};
}
// Data dir: ~/.claude/memory/
pub fn memory_dir() -> PathBuf {
PathBuf::from(env::var("HOME").expect("HOME not set"))
.join(".claude/memory")
}
pub(crate) fn nodes_path() -> PathBuf { memory_dir().join("nodes.capnp") }
pub(crate) fn relations_path() -> PathBuf { memory_dir().join("relations.capnp") }
pub(crate) fn state_path() -> PathBuf { memory_dir().join("state.bin") }
pub(crate) fn snapshot_path() -> PathBuf { memory_dir().join("snapshot.rkyv") }
fn lock_path() -> PathBuf { memory_dir().join(".store.lock") }
/// RAII file lock using flock(2). Dropped when scope exits.
pub(crate) struct StoreLock {
_file: fs::File,
}
impl StoreLock {
pub(crate) fn acquire() -> Result<Self, String> {
let path = lock_path();
let file = fs::OpenOptions::new()
.create(true).truncate(false).write(true).open(&path)
.map_err(|e| format!("open lock {}: {}", path.display(), e))?;
// Blocking exclusive lock
let ret = unsafe { libc::flock(file.as_raw_fd(), libc::LOCK_EX) };
if ret != 0 {
return Err(format!("flock: {}", std::io::Error::last_os_error()));
}
Ok(StoreLock { _file: file })
}
// Lock released automatically when _file is dropped (flock semantics)
}
pub fn now_epoch() -> f64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs_f64()
}
/// Convert epoch seconds to broken-down local time components.
/// Returns (year, month, day, hour, minute, second).
pub fn epoch_to_local(epoch: f64) -> (i32, u32, u32, u32, u32, u32) {
// Use libc localtime_r for timezone-correct conversion
let secs = epoch as libc::time_t;
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
unsafe { libc::localtime_r(&secs, &mut tm) };
(
tm.tm_year + 1900,
(tm.tm_mon + 1) as u32,
tm.tm_mday as u32,
tm.tm_hour as u32,
tm.tm_min as u32,
tm.tm_sec as u32,
)
}
/// Format epoch as "YYYY-MM-DD"
pub fn format_date(epoch: f64) -> String {
let (y, m, d, _, _, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}", y, m, d)
}
/// Format epoch as "YYYY-MM-DDTHH:MM"
pub fn format_datetime(epoch: f64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}T{:02}:{:02}", y, m, d, h, min)
}
/// Format epoch as "YYYY-MM-DD HH:MM"
pub fn format_datetime_space(epoch: f64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02} {:02}:{:02}", y, m, d, h, min)
}
pub fn today() -> String {
format_date(now_epoch())
}
// In-memory node representation
#[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub struct Node {
pub uuid: [u8; 16],
pub version: u32,
pub timestamp: f64,
pub node_type: NodeType,
pub provenance: Provenance,
pub key: String,
pub content: String,
pub weight: f32,
pub category: Category,
pub emotion: f32,
pub deleted: bool,
pub source_ref: String,
pub created: String,
pub retrievals: u32,
pub uses: u32,
pub wrongs: u32,
pub state_tag: String,
pub last_replayed: f64,
pub spaced_repetition_interval: u32,
// Position within file (section index, for export ordering)
#[serde(default)]
pub position: u32,
// Derived fields (not in capnp, computed from graph)
#[serde(default)]
pub community_id: Option<u32>,
#[serde(default)]
pub clustering_coefficient: Option<f32>,
#[serde(default)]
pub degree: Option<u32>,
}
#[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub struct Relation {
pub uuid: [u8; 16],
pub version: u32,
pub timestamp: f64,
pub source: [u8; 16],
pub target: [u8; 16],
pub rel_type: RelationType,
pub strength: f32,
pub provenance: Provenance,
pub deleted: bool,
pub source_key: String,
pub target_key: String,
}
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub enum NodeType {
EpisodicSession,
EpisodicDaily,
EpisodicWeekly,
Semantic,
}
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub enum Provenance {
Manual,
Journal,
Agent,
Dream,
Derived,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub enum Category {
General,
Core,
Technical,
Observation,
Task,
}
impl Category {
pub fn decay_factor(&self, base: f64) -> f64 {
match self {
Category::Core => 1.0 - (1.0 - base) * 0.2,
Category::Technical => 1.0 - (1.0 - base) * 0.5,
Category::General => base,
Category::Observation => 1.0 - (1.0 - base) * 1.5,
Category::Task => 1.0 - (1.0 - base) * 2.5,
}
}
pub fn label(&self) -> &str {
match self {
Category::Core => "core",
Category::Technical => "tech",
Category::General => "gen",
Category::Observation => "obs",
Category::Task => "task",
}
}
pub fn from_str(s: &str) -> Option<Self> {
match s {
"core" => Some(Category::Core),
"tech" | "technical" => Some(Category::Technical),
"gen" | "general" => Some(Category::General),
"obs" | "observation" => Some(Category::Observation),
"task" => Some(Category::Task),
_ => None,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub enum RelationType {
Link,
Causal,
Auto,
}
capnp_enum!(NodeType, memory_capnp::NodeType,
[EpisodicSession, EpisodicDaily, EpisodicWeekly, Semantic]);
capnp_enum!(Provenance, memory_capnp::Provenance,
[Manual, Journal, Agent, Dream, Derived]);
capnp_enum!(Category, memory_capnp::Category,
[General, Core, Technical, Observation, Task]);
capnp_enum!(RelationType, memory_capnp::RelationType,
[Link, Causal, Auto]);
capnp_message!(Node,
reader: memory_capnp::content_node::Reader<'_>,
builder: memory_capnp::content_node::Builder<'_>,
text: [key, content, source_ref, created, state_tag],
uuid: [uuid],
prim: [version, timestamp, weight, emotion, deleted,
retrievals, uses, wrongs, last_replayed,
spaced_repetition_interval, position],
enm: [node_type: NodeType, provenance: Provenance, category: Category],
skip: [community_id, clustering_coefficient, degree],
);
capnp_message!(Relation,
reader: memory_capnp::relation::Reader<'_>,
builder: memory_capnp::relation::Builder<'_>,
text: [source_key, target_key],
uuid: [uuid, source, target],
prim: [version, timestamp, strength, deleted],
enm: [rel_type: RelationType, provenance: Provenance],
skip: [],
);
#[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub struct RetrievalEvent {
pub query: String,
pub timestamp: String,
pub results: Vec<String>,
pub used: Option<Vec<String>>,
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub struct Params {
pub default_weight: f64,
pub decay_factor: f64,
pub use_boost: f64,
pub prune_threshold: f64,
pub edge_decay: f64,
pub max_hops: u32,
pub min_activation: f64,
}
impl Default for Params {
fn default() -> Self {
Params {
default_weight: 0.7,
decay_factor: 0.95,
use_boost: 0.15,
prune_threshold: 0.1,
edge_decay: 0.3,
max_hops: 3,
min_activation: 0.05,
}
}
}
// Gap record — something we looked for but didn't find
#[derive(Clone, Debug, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub struct GapRecord {
pub description: String,
pub timestamp: String,
}
// The full in-memory store
#[derive(Default, Serialize, Deserialize)]
pub struct Store {
pub nodes: HashMap<String, Node>, // key → latest node
#[serde(skip)]
pub uuid_to_key: HashMap<[u8; 16], String>, // uuid → key (rebuilt from nodes)
pub relations: Vec<Relation>, // all active relations
pub retrieval_log: Vec<RetrievalEvent>,
pub gaps: Vec<GapRecord>,
pub params: Params,
}
/// Snapshot for mmap: full store state minus retrieval_log (which
/// is append-only in retrieval.log). rkyv zero-copy serialization
/// lets us mmap this and access archived data without deserialization.
#[derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
#[archive(check_bytes)]
pub(crate) struct Snapshot {
pub(crate) nodes: HashMap<String, Node>,
pub(crate) relations: Vec<Relation>,
pub(crate) gaps: Vec<GapRecord>,
pub(crate) params: Params,
}
// rkyv snapshot header: 32 bytes (multiple of 16 for alignment after mmap)
// [0..4] magic "RKV\x01"
// [4..8] format version (u32 LE)
// [8..16] nodes.capnp file size (u64 LE) — staleness check
// [16..24] relations.capnp file size (u64 LE)
// [24..32] rkyv data length (u64 LE)
pub(crate) const RKYV_MAGIC: [u8; 4] = *b"RKV\x01";
pub(crate) const RKYV_HEADER_LEN: usize = 32;
// state.bin header: magic + log file sizes for staleness detection.
// File sizes are race-free for append-only logs (they only grow),
// unlike mtimes which race with concurrent writers.
pub(crate) const CACHE_MAGIC: [u8; 4] = *b"POC\x01";
pub(crate) const CACHE_HEADER_LEN: usize = 4 + 8 + 8; // magic + nodes_size + rels_size
// Cap'n Proto serialization helpers
/// Read a capnp text field, returning empty string on any error
pub(crate) fn read_text(result: capnp::Result<capnp::text::Reader>) -> String {
result.ok()
.and_then(|t| t.to_str().ok())
.unwrap_or("")
.to_string()
}
/// Read a capnp data field as [u8; 16], zero-padded
pub(crate) fn read_uuid(result: capnp::Result<&[u8]>) -> [u8; 16] {
let mut out = [0u8; 16];
if let Ok(data) = result {
if data.len() >= 16 {
out.copy_from_slice(&data[..16]);
}
}
out
}
/// Create a new node with defaults
pub fn new_node(key: &str, content: &str) -> Node {
Node {
uuid: *Uuid::new_v4().as_bytes(),
version: 1,
timestamp: now_epoch(),
node_type: NodeType::Semantic,
provenance: Provenance::Manual,
key: key.to_string(),
content: content.to_string(),
weight: 0.7,
category: Category::General,
emotion: 0.0,
deleted: false,
source_ref: String::new(),
created: today(),
retrievals: 0,
uses: 0,
wrongs: 0,
state_tag: String::new(),
last_replayed: 0.0,
spaced_repetition_interval: 1,
position: 0,
community_id: None,
clustering_coefficient: None,
degree: None,
}
}
/// Create a new relation
pub fn new_relation(
source_uuid: [u8; 16],
target_uuid: [u8; 16],
rel_type: RelationType,
strength: f32,
source_key: &str,
target_key: &str,
) -> Relation {
Relation {
uuid: *Uuid::new_v4().as_bytes(),
version: 1,
timestamp: now_epoch(),
source: source_uuid,
target: target_uuid,
rel_type,
strength,
provenance: Provenance::Manual,
deleted: false,
source_key: source_key.to_string(),
target_key: target_key.to_string(),
}
}

191
src/store/view.rs Normal file
View file

@ -0,0 +1,191 @@
// Read-only access abstractions for the memory store
//
// StoreView: trait abstracting over owned Store and zero-copy MmapView.
// MmapView: mmap'd rkyv snapshot for sub-millisecond read-only access.
// AnyView: enum dispatch selecting fastest available view at runtime.
use super::types::*;
use std::fs;
// ---------------------------------------------------------------------------
// StoreView: read-only access trait for search and graph code.
//
// Abstracts over owned Store and zero-copy MmapView so the same
// spreading-activation and graph code works with either.
// ---------------------------------------------------------------------------
pub trait StoreView {
/// Iterate all nodes. Callback receives (key, content, weight).
fn for_each_node<F: FnMut(&str, &str, f32)>(&self, f: F);
/// Iterate all relations. Callback receives (source_key, target_key, strength, rel_type).
fn for_each_relation<F: FnMut(&str, &str, f32, RelationType)>(&self, f: F);
/// Node weight by key, or the default weight if missing.
fn node_weight(&self, key: &str) -> f64;
/// Node content by key.
fn node_content(&self, key: &str) -> Option<&str>;
/// Search/graph parameters.
fn params(&self) -> Params;
}
impl StoreView for Store {
fn for_each_node<F: FnMut(&str, &str, f32)>(&self, mut f: F) {
for (key, node) in &self.nodes {
f(key, &node.content, node.weight);
}
}
fn for_each_relation<F: FnMut(&str, &str, f32, RelationType)>(&self, mut f: F) {
for rel in &self.relations {
if rel.deleted { continue; }
f(&rel.source_key, &rel.target_key, rel.strength, rel.rel_type);
}
}
fn node_weight(&self, key: &str) -> f64 {
self.nodes.get(key).map(|n| n.weight as f64).unwrap_or(self.params.default_weight)
}
fn node_content(&self, key: &str) -> Option<&str> {
self.nodes.get(key).map(|n| n.content.as_str())
}
fn params(&self) -> Params {
self.params
}
}
// ---------------------------------------------------------------------------
// MmapView: zero-copy store access via mmap'd rkyv snapshot.
//
// Holds the mmap alive; all string reads go directly into the mapped
// pages without allocation. Falls back to None if snapshot is stale.
// ---------------------------------------------------------------------------
pub struct MmapView {
mmap: memmap2::Mmap,
_file: fs::File,
data_offset: usize,
data_len: usize,
}
impl MmapView {
/// Try to open a fresh rkyv snapshot. Returns None if missing or stale.
pub fn open() -> Option<Self> {
let path = snapshot_path();
let file = fs::File::open(&path).ok()?;
let mmap = unsafe { memmap2::Mmap::map(&file) }.ok()?;
if mmap.len() < RKYV_HEADER_LEN { return None; }
if mmap[..4] != RKYV_MAGIC { return None; }
let nodes_size = fs::metadata(nodes_path()).map(|m| m.len()).unwrap_or(0);
let rels_size = fs::metadata(relations_path()).map(|m| m.len()).unwrap_or(0);
let cached_nodes = u64::from_le_bytes(mmap[8..16].try_into().unwrap());
let cached_rels = u64::from_le_bytes(mmap[16..24].try_into().unwrap());
let data_len = u64::from_le_bytes(mmap[24..32].try_into().unwrap()) as usize;
if cached_nodes != nodes_size || cached_rels != rels_size { return None; }
if mmap.len() < RKYV_HEADER_LEN + data_len { return None; }
Some(MmapView { mmap, _file: file, data_offset: RKYV_HEADER_LEN, data_len })
}
fn snapshot(&self) -> &ArchivedSnapshot {
let data = &self.mmap[self.data_offset..self.data_offset + self.data_len];
unsafe { rkyv::archived_root::<Snapshot>(data) }
}
}
impl StoreView for MmapView {
fn for_each_node<F: FnMut(&str, &str, f32)>(&self, mut f: F) {
let snap = self.snapshot();
for (key, node) in snap.nodes.iter() {
f(&key, &node.content, node.weight);
}
}
fn for_each_relation<F: FnMut(&str, &str, f32, RelationType)>(&self, mut f: F) {
let snap = self.snapshot();
for rel in snap.relations.iter() {
if rel.deleted { continue; }
let rt = match rel.rel_type {
ArchivedRelationType::Link => RelationType::Link,
ArchivedRelationType::Causal => RelationType::Causal,
ArchivedRelationType::Auto => RelationType::Auto,
};
f(&rel.source_key, &rel.target_key, rel.strength, rt);
}
}
fn node_weight(&self, key: &str) -> f64 {
let snap = self.snapshot();
snap.nodes.get(key)
.map(|n| n.weight as f64)
.unwrap_or(snap.params.default_weight)
}
fn node_content(&self, key: &str) -> Option<&str> {
let snap = self.snapshot();
snap.nodes.get(key).map(|n| &*n.content)
}
fn params(&self) -> Params {
let p = &self.snapshot().params;
Params {
default_weight: p.default_weight,
decay_factor: p.decay_factor,
use_boost: p.use_boost,
prune_threshold: p.prune_threshold,
edge_decay: p.edge_decay,
max_hops: p.max_hops,
min_activation: p.min_activation,
}
}
}
// ---------------------------------------------------------------------------
// AnyView: enum dispatch for read-only access.
//
// MmapView when the snapshot is fresh, owned Store as fallback.
// The match on each call is a single predicted branch — zero overhead.
// ---------------------------------------------------------------------------
pub enum AnyView {
Mmap(MmapView),
Owned(Store),
}
impl AnyView {
/// Load the fastest available view: mmap snapshot or owned store.
pub fn load() -> Result<Self, String> {
if let Some(mv) = MmapView::open() {
Ok(AnyView::Mmap(mv))
} else {
Ok(AnyView::Owned(Store::load()?))
}
}
}
impl StoreView for AnyView {
fn for_each_node<F: FnMut(&str, &str, f32)>(&self, f: F) {
match self { AnyView::Mmap(v) => v.for_each_node(f), AnyView::Owned(s) => s.for_each_node(f) }
}
fn for_each_relation<F: FnMut(&str, &str, f32, RelationType)>(&self, f: F) {
match self { AnyView::Mmap(v) => v.for_each_relation(f), AnyView::Owned(s) => s.for_each_relation(f) }
}
fn node_weight(&self, key: &str) -> f64 {
match self { AnyView::Mmap(v) => v.node_weight(key), AnyView::Owned(s) => s.node_weight(key) }
}
fn node_content(&self, key: &str) -> Option<&str> {
match self { AnyView::Mmap(v) => v.node_content(key), AnyView::Owned(s) => s.node_content(key) }
}
fn params(&self) -> Params {
match self { AnyView::Mmap(v) => v.params(), AnyView::Owned(s) => s.params() }
}
}