poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
#![allow(dead_code)]
|
|
|
|
|
// poc-memory: graph-structured memory with append-only Cap'n Proto storage
|
|
|
|
|
//
|
|
|
|
|
// Architecture:
|
|
|
|
|
// nodes.capnp - append-only content node log
|
|
|
|
|
// relations.capnp - append-only relation log
|
|
|
|
|
// state.bin - derived KV cache (rebuilt from logs when stale)
|
|
|
|
|
//
|
|
|
|
|
// Graph algorithms: clustering coefficient, community detection (label
|
|
|
|
|
// propagation), schema fit scoring, small-world metrics, consolidation
|
|
|
|
|
// priority. Text similarity via BM25 with Porter stemming.
|
|
|
|
|
//
|
|
|
|
|
// Neuroscience-inspired: spaced repetition replay, emotional gating,
|
|
|
|
|
// interference detection, schema assimilation, reconsolidation.
|
|
|
|
|
|
|
|
|
|
mod capnp_store;
|
|
|
|
|
mod graph;
|
|
|
|
|
mod search;
|
|
|
|
|
mod similarity;
|
|
|
|
|
mod migrate;
|
|
|
|
|
mod neuro;
|
|
|
|
|
|
|
|
|
|
pub mod memory_capnp {
|
|
|
|
|
include!(concat!(env!("OUT_DIR"), "/schema/memory_capnp.rs"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
use std::env;
|
|
|
|
|
use std::process;
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
let args: Vec<String> = env::args().collect();
|
|
|
|
|
if args.len() < 2 {
|
|
|
|
|
usage();
|
|
|
|
|
process::exit(1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let result = match args[1].as_str() {
|
|
|
|
|
"search" => cmd_search(&args[2..]),
|
|
|
|
|
"init" => cmd_init(),
|
|
|
|
|
"migrate" => cmd_migrate(),
|
|
|
|
|
"health" => cmd_health(),
|
|
|
|
|
"status" => cmd_status(),
|
|
|
|
|
"graph" => cmd_graph(),
|
|
|
|
|
"used" => cmd_used(&args[2..]),
|
|
|
|
|
"wrong" => cmd_wrong(&args[2..]),
|
|
|
|
|
"gap" => cmd_gap(&args[2..]),
|
|
|
|
|
"categorize" => cmd_categorize(&args[2..]),
|
|
|
|
|
"decay" => cmd_decay(),
|
|
|
|
|
"consolidate-batch" => cmd_consolidate_batch(&args[2..]),
|
|
|
|
|
"log" => cmd_log(),
|
|
|
|
|
"params" => cmd_params(),
|
|
|
|
|
"link" => cmd_link(&args[2..]),
|
|
|
|
|
"replay-queue" => cmd_replay_queue(&args[2..]),
|
|
|
|
|
"interference" => cmd_interference(&args[2..]),
|
|
|
|
|
"link-add" => cmd_link_add(&args[2..]),
|
|
|
|
|
"link-impact" => cmd_link_impact(&args[2..]),
|
|
|
|
|
"consolidate-session" => cmd_consolidate_session(),
|
|
|
|
|
"daily-check" => cmd_daily_check(),
|
|
|
|
|
"apply-agent" => cmd_apply_agent(&args[2..]),
|
|
|
|
|
"digest" => cmd_digest(&args[2..]),
|
|
|
|
|
"trace" => cmd_trace(&args[2..]),
|
2026-02-28 22:30:03 -05:00
|
|
|
"list-keys" => cmd_list_keys(),
|
|
|
|
|
"list-edges" => cmd_list_edges(),
|
|
|
|
|
"dump-json" => cmd_dump_json(),
|
2026-02-28 22:40:17 -05:00
|
|
|
"node-delete" => cmd_node_delete(&args[2..]),
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
_ => {
|
|
|
|
|
eprintln!("Unknown command: {}", args[1]);
|
|
|
|
|
usage();
|
|
|
|
|
process::exit(1);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if let Err(e) = result {
|
|
|
|
|
eprintln!("Error: {}", e);
|
|
|
|
|
process::exit(1);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn usage() {
|
|
|
|
|
eprintln!("poc-memory v0.4.0 — graph-structured memory store
|
|
|
|
|
|
|
|
|
|
Commands:
|
|
|
|
|
search QUERY [QUERY...] Search memory (AND logic across terms)
|
|
|
|
|
init Scan markdown files, index all memory units
|
|
|
|
|
migrate Migrate from old weights.json system
|
|
|
|
|
health Report graph metrics (CC, communities, small-world)
|
|
|
|
|
status Summary of memory state
|
|
|
|
|
graph Show graph structure overview
|
|
|
|
|
used KEY Mark a memory as useful (boosts weight)
|
|
|
|
|
wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant
|
|
|
|
|
gap DESCRIPTION Record a gap in memory coverage
|
|
|
|
|
categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task)
|
|
|
|
|
decay Apply daily weight decay
|
|
|
|
|
consolidate-batch [--count N] [--auto]
|
|
|
|
|
Run agent consolidation on priority nodes
|
|
|
|
|
log Show recent retrieval log
|
|
|
|
|
params Show current parameters
|
|
|
|
|
link N Interactive graph walk from search result N
|
|
|
|
|
replay-queue [--count N] Show spaced repetition replay queue
|
|
|
|
|
interference [--threshold F]
|
|
|
|
|
Detect potentially confusable memory pairs
|
|
|
|
|
link-add SOURCE TARGET [REASON]
|
|
|
|
|
Add a link between two nodes
|
|
|
|
|
link-impact SOURCE TARGET Simulate adding an edge, report topology impact
|
|
|
|
|
consolidate-session Analyze metrics, plan agent allocation
|
|
|
|
|
daily-check Brief metrics check (for cron/notifications)
|
|
|
|
|
apply-agent [--all] Import pending agent results into the graph
|
|
|
|
|
digest daily [DATE] Generate daily episodic digest (default: today)
|
|
|
|
|
digest weekly [DATE] Generate weekly digest (any date in target week)
|
2026-02-28 22:30:03 -05:00
|
|
|
trace KEY Walk temporal links: semantic ↔ episodic ↔ conversation
|
|
|
|
|
list-keys List all node keys (one per line)
|
|
|
|
|
list-edges List all edges (tsv: source target strength type)
|
2026-02-28 22:40:17 -05:00
|
|
|
dump-json Dump entire store as JSON
|
|
|
|
|
node-delete KEY Soft-delete a node (appends deleted version to log)");
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_search(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory search QUERY [QUERY...]".into());
|
|
|
|
|
}
|
|
|
|
|
let query = args.join(" ");
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let results = search::search(&query, &store);
|
|
|
|
|
|
|
|
|
|
if results.is_empty() {
|
|
|
|
|
eprintln!("No results for '{}'", query);
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Log retrieval
|
|
|
|
|
store.log_retrieval(&query, &results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
|
|
|
|
for (i, r) in results.iter().enumerate().take(15) {
|
|
|
|
|
let marker = if r.is_direct { "→" } else { " " };
|
|
|
|
|
let weight = store.node_weight(&r.key).unwrap_or(0.0);
|
|
|
|
|
print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
|
|
|
|
|
if let Some(community) = store.node_community(&r.key) {
|
|
|
|
|
print!(" (c{})", community);
|
|
|
|
|
}
|
|
|
|
|
println!();
|
|
|
|
|
if let Some(ref snippet) = r.snippet {
|
|
|
|
|
println!(" {}", snippet);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_init() -> Result<(), String> {
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let count = store.init_from_markdown()?;
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Indexed {} memory units", count);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_migrate() -> Result<(), String> {
|
|
|
|
|
migrate::migrate()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_health() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
let health = graph::health_report(&g, &store);
|
|
|
|
|
println!("{}", health);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_status() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let node_count = store.nodes.len();
|
|
|
|
|
let rel_count = store.relations.len();
|
|
|
|
|
let categories = store.category_counts();
|
|
|
|
|
|
|
|
|
|
println!("Nodes: {} Relations: {}", node_count, rel_count);
|
|
|
|
|
println!("Categories: core={} tech={} gen={} obs={} task={}",
|
|
|
|
|
categories.get("core").unwrap_or(&0),
|
|
|
|
|
categories.get("tech").unwrap_or(&0),
|
|
|
|
|
categories.get("gen").unwrap_or(&0),
|
|
|
|
|
categories.get("obs").unwrap_or(&0),
|
|
|
|
|
categories.get("task").unwrap_or(&0),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
println!("Graph edges: {} Communities: {}",
|
|
|
|
|
g.edge_count(), g.community_count());
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_graph() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
// Show top-10 highest degree nodes
|
|
|
|
|
let mut degrees: Vec<_> = g.nodes().iter()
|
|
|
|
|
.map(|k| (k.clone(), g.degree(k)))
|
|
|
|
|
.collect();
|
|
|
|
|
degrees.sort_by(|a, b| b.1.cmp(&a.1));
|
|
|
|
|
|
|
|
|
|
println!("Top nodes by degree:");
|
|
|
|
|
for (key, deg) in degrees.iter().take(10) {
|
|
|
|
|
let cc = g.clustering_coefficient(key);
|
|
|
|
|
println!(" {:40} deg={:3} cc={:.3}", key, deg, cc);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_used(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory used KEY".into());
|
|
|
|
|
}
|
|
|
|
|
let key = args.join(" ");
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
store.mark_used(&resolved);
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Marked '{}' as used", resolved);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_wrong(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory wrong KEY [CONTEXT]".into());
|
|
|
|
|
}
|
|
|
|
|
let key = &args[0];
|
|
|
|
|
let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None };
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(key)?;
|
|
|
|
|
store.mark_wrong(&resolved, ctx.as_deref());
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Marked '{}' as wrong", resolved);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_gap(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory gap DESCRIPTION".into());
|
|
|
|
|
}
|
|
|
|
|
let desc = args.join(" ");
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
store.record_gap(&desc);
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Recorded gap: {}", desc);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_categorize(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.len() < 2 {
|
|
|
|
|
return Err("Usage: poc-memory categorize KEY CATEGORY".into());
|
|
|
|
|
}
|
|
|
|
|
let key = &args[0];
|
|
|
|
|
let cat = &args[1];
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(key)?;
|
|
|
|
|
store.categorize(&resolved, cat)?;
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Set '{}' category to {}", resolved, cat);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_decay() -> Result<(), String> {
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let (decayed, pruned) = store.decay();
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
|
|
|
|
|
let mut count = 5usize;
|
|
|
|
|
let mut auto = false;
|
|
|
|
|
let mut agent: Option<String> = None;
|
|
|
|
|
let mut i = 0;
|
|
|
|
|
while i < args.len() {
|
|
|
|
|
match args[i].as_str() {
|
|
|
|
|
"--count" if i + 1 < args.len() => {
|
|
|
|
|
count = args[i + 1].parse().map_err(|_| "invalid count")?;
|
|
|
|
|
i += 2;
|
|
|
|
|
}
|
|
|
|
|
"--auto" => { auto = true; i += 1; }
|
|
|
|
|
"--agent" if i + 1 < args.len() => {
|
|
|
|
|
agent = Some(args[i + 1].clone());
|
|
|
|
|
i += 2;
|
|
|
|
|
}
|
|
|
|
|
_ => { i += 1; }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
if let Some(agent_name) = agent {
|
|
|
|
|
// Generate a specific agent prompt
|
|
|
|
|
let prompt = neuro::agent_prompt(&store, &agent_name, count)?;
|
|
|
|
|
println!("{}", prompt);
|
|
|
|
|
Ok(())
|
|
|
|
|
} else {
|
|
|
|
|
neuro::consolidation_batch(&store, count, auto)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_log() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
for event in store.retrieval_log.iter().rev().take(20) {
|
|
|
|
|
println!("[{}] q=\"{}\" → {} results",
|
|
|
|
|
event.timestamp, event.query, event.results.len());
|
|
|
|
|
for r in &event.results {
|
|
|
|
|
println!(" {}", r);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_params() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
println!("decay_factor: {}", store.params.decay_factor);
|
|
|
|
|
println!("use_boost: {}", store.params.use_boost);
|
|
|
|
|
println!("prune_threshold: {}", store.params.prune_threshold);
|
|
|
|
|
println!("edge_decay: {}", store.params.edge_decay);
|
|
|
|
|
println!("max_hops: {}", store.params.max_hops);
|
|
|
|
|
println!("min_activation: {}", store.params.min_activation);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_link(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory link KEY".into());
|
|
|
|
|
}
|
|
|
|
|
let key = args.join(" ");
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
println!("Neighbors of '{}':", resolved);
|
|
|
|
|
let neighbors = g.neighbors(&resolved);
|
|
|
|
|
for (i, (n, strength)) in neighbors.iter().enumerate() {
|
|
|
|
|
let cc = g.clustering_coefficient(n);
|
|
|
|
|
println!(" {:2}. [{:.2}] {} (cc={:.3})", i + 1, strength, n, cc);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
|
|
|
|
|
let mut count = 10usize;
|
|
|
|
|
let mut i = 0;
|
|
|
|
|
while i < args.len() {
|
|
|
|
|
match args[i].as_str() {
|
|
|
|
|
"--count" if i + 1 < args.len() => {
|
|
|
|
|
count = args[i + 1].parse().map_err(|_| "invalid count")?;
|
|
|
|
|
i += 2;
|
|
|
|
|
}
|
|
|
|
|
_ => { i += 1; }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let queue = neuro::replay_queue(&store, count);
|
|
|
|
|
println!("Replay queue ({} items):", queue.len());
|
|
|
|
|
for (i, item) in queue.iter().enumerate() {
|
|
|
|
|
println!(" {:2}. [{:.3}] {} (interval={}d, emotion={:.1})",
|
|
|
|
|
i + 1, item.priority, item.key, item.interval_days, item.emotion);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_consolidate_session() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let plan = neuro::consolidation_plan(&store);
|
|
|
|
|
println!("{}", neuro::format_plan(&plan));
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_daily_check() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let report = neuro::daily_check(&store);
|
|
|
|
|
print!("{}", report);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_link_add(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.len() < 2 {
|
|
|
|
|
return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into());
|
|
|
|
|
}
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let source = store.resolve_key(&args[0])?;
|
|
|
|
|
let target = store.resolve_key(&args[1])?;
|
|
|
|
|
let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() };
|
|
|
|
|
|
|
|
|
|
// Find UUIDs
|
|
|
|
|
let source_uuid = store.nodes.get(&source)
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
.ok_or_else(|| format!("source not found: {}", source))?;
|
|
|
|
|
let target_uuid = store.nodes.get(&target)
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
.ok_or_else(|| format!("target not found: {}", target))?;
|
|
|
|
|
|
|
|
|
|
// Check if link already exists
|
|
|
|
|
let exists = store.relations.iter().any(|r|
|
|
|
|
|
r.source_key == source && r.target_key == target && !r.deleted
|
|
|
|
|
);
|
|
|
|
|
if exists {
|
|
|
|
|
println!("Link already exists: {} → {}", source, target);
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let rel = capnp_store::Store::new_relation(
|
|
|
|
|
source_uuid, target_uuid,
|
|
|
|
|
capnp_store::RelationType::Auto,
|
|
|
|
|
0.5,
|
|
|
|
|
&source, &target,
|
|
|
|
|
);
|
|
|
|
|
store.add_relation(rel)?;
|
|
|
|
|
if !reason.is_empty() {
|
|
|
|
|
println!("+ {} → {} ({})", source, target, reason);
|
|
|
|
|
} else {
|
|
|
|
|
println!("+ {} → {}", source, target);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_link_impact(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.len() < 2 {
|
|
|
|
|
return Err("Usage: poc-memory link-impact SOURCE TARGET".into());
|
|
|
|
|
}
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let source = store.resolve_key(&args[0])?;
|
|
|
|
|
let target = store.resolve_key(&args[1])?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
let impact = g.link_impact(&source, &target);
|
|
|
|
|
|
|
|
|
|
println!("Link impact: {} → {}", source, target);
|
|
|
|
|
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
|
|
|
|
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
|
|
|
|
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
|
|
|
|
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
|
|
|
|
println!(" Assessment: {}", impact.assessment);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
|
|
|
|
|
let home = env::var("HOME").unwrap_or_default();
|
|
|
|
|
let results_dir = std::path::PathBuf::from(&home)
|
|
|
|
|
.join(".claude/memory/agent-results");
|
|
|
|
|
|
|
|
|
|
if !results_dir.exists() {
|
|
|
|
|
println!("No agent results directory");
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let mut applied = 0;
|
|
|
|
|
let mut errors = 0;
|
|
|
|
|
|
|
|
|
|
let process_all = args.iter().any(|a| a == "--all");
|
|
|
|
|
|
|
|
|
|
// Find .json result files
|
|
|
|
|
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
|
|
|
|
.map_err(|e| format!("read results dir: {}", e))?
|
|
|
|
|
.filter_map(|e| e.ok())
|
|
|
|
|
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
|
|
|
|
.collect();
|
|
|
|
|
files.sort_by_key(|e| e.path());
|
|
|
|
|
|
|
|
|
|
for entry in &files {
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
let content = match std::fs::read_to_string(&path) {
|
|
|
|
|
Ok(c) => c,
|
|
|
|
|
Err(e) => {
|
|
|
|
|
eprintln!(" Skip {}: {}", path.display(), e);
|
|
|
|
|
errors += 1;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let data: serde_json::Value = match serde_json::from_str(&content) {
|
|
|
|
|
Ok(d) => d,
|
|
|
|
|
Err(e) => {
|
|
|
|
|
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
|
|
|
|
errors += 1;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Check for agent_result with links
|
|
|
|
|
let agent_result = data.get("agent_result").or(Some(&data));
|
|
|
|
|
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
|
|
|
|
Some(l) => l,
|
|
|
|
|
None => continue,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let entry_text = data.get("entry_text")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.unwrap_or("");
|
|
|
|
|
let source_start = agent_result
|
|
|
|
|
.and_then(|r| r.get("source_start"))
|
|
|
|
|
.and_then(|v| v.as_u64());
|
|
|
|
|
let source_end = agent_result
|
|
|
|
|
.and_then(|r| r.get("source_end"))
|
|
|
|
|
.and_then(|v| v.as_u64());
|
|
|
|
|
|
|
|
|
|
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
|
|
|
|
if let (Some(start), Some(end)) = (source_start, source_end) {
|
|
|
|
|
println!(" Source: L{}-L{}", start, end);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for link in links {
|
|
|
|
|
let target = match link.get("target").and_then(|v| v.as_str()) {
|
|
|
|
|
Some(t) => t,
|
|
|
|
|
None => continue,
|
|
|
|
|
};
|
|
|
|
|
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
|
|
|
|
|
|
|
|
|
// Skip NOTE: targets (new topics, not existing nodes)
|
|
|
|
|
if target.starts_with("NOTE:") {
|
|
|
|
|
println!(" NOTE: {} — {}", &target[5..], reason);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Try to resolve the target key and link from journal entry
|
|
|
|
|
let resolved = match store.resolve_key(target) {
|
|
|
|
|
Ok(r) => r,
|
|
|
|
|
Err(_) => {
|
|
|
|
|
println!(" SKIP {} (not found in graph)", target);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let source_key = match find_journal_node(&store, entry_text) {
|
|
|
|
|
Some(k) => k,
|
|
|
|
|
None => {
|
|
|
|
|
println!(" SKIP {} (no matching journal node)", target);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Get UUIDs for both nodes
|
|
|
|
|
let source_uuid = match store.nodes.get(&source_key) {
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
None => continue,
|
|
|
|
|
};
|
|
|
|
|
let target_uuid = match store.nodes.get(&resolved) {
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
None => continue,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let rel = capnp_store::Store::new_relation(
|
|
|
|
|
source_uuid, target_uuid,
|
|
|
|
|
capnp_store::RelationType::Link,
|
|
|
|
|
0.5,
|
|
|
|
|
&source_key, &resolved,
|
|
|
|
|
);
|
|
|
|
|
if let Err(e) = store.add_relation(rel) {
|
|
|
|
|
eprintln!(" Error adding relation: {}", e);
|
|
|
|
|
errors += 1;
|
|
|
|
|
} else {
|
|
|
|
|
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
|
|
|
|
applied += 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Move processed file to avoid re-processing
|
|
|
|
|
if !process_all {
|
|
|
|
|
let done_dir = results_dir.join("done");
|
|
|
|
|
std::fs::create_dir_all(&done_dir).ok();
|
|
|
|
|
let dest = done_dir.join(path.file_name().unwrap());
|
|
|
|
|
std::fs::rename(&path, &dest).ok();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if applied > 0 {
|
|
|
|
|
store.save()?;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
println!("\nApplied {} links ({} errors, {} files processed)",
|
|
|
|
|
applied, errors, files.len());
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Find the journal node that best matches the given entry text
|
|
|
|
|
fn find_journal_node(store: &capnp_store::Store, entry_text: &str) -> Option<String> {
|
|
|
|
|
if entry_text.is_empty() {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Extract keywords from entry text
|
|
|
|
|
let words: Vec<&str> = entry_text.split_whitespace()
|
|
|
|
|
.filter(|w| w.len() > 5)
|
|
|
|
|
.take(5)
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
// Find journal nodes whose content matches the most keywords
|
|
|
|
|
let mut best_key = None;
|
|
|
|
|
let mut best_score = 0;
|
|
|
|
|
|
|
|
|
|
for (key, node) in &store.nodes {
|
|
|
|
|
if !key.starts_with("journal.md#") {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
let content_lower = node.content.to_lowercase();
|
|
|
|
|
let score: usize = words.iter()
|
|
|
|
|
.filter(|w| content_lower.contains(&w.to_lowercase()))
|
|
|
|
|
.count();
|
|
|
|
|
if score > best_score {
|
|
|
|
|
best_score = score;
|
|
|
|
|
best_key = Some(key.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
best_key
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_digest(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory digest daily [DATE] | weekly [DATE]".into());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let home = env::var("HOME").unwrap_or_default();
|
|
|
|
|
let scripts_dir = std::path::PathBuf::from(&home).join("poc/memory/scripts");
|
|
|
|
|
|
|
|
|
|
match args[0].as_str() {
|
|
|
|
|
"daily" => {
|
|
|
|
|
let mut cmd = std::process::Command::new("python3");
|
|
|
|
|
cmd.arg(scripts_dir.join("daily-digest.py"));
|
|
|
|
|
if args.len() > 1 {
|
|
|
|
|
cmd.arg(&args[1]);
|
|
|
|
|
}
|
|
|
|
|
// Unset CLAUDECODE for nested claude calls
|
|
|
|
|
cmd.env_remove("CLAUDECODE");
|
|
|
|
|
let status = cmd.status()
|
|
|
|
|
.map_err(|e| format!("run daily-digest.py: {}", e))?;
|
|
|
|
|
if !status.success() {
|
|
|
|
|
return Err("daily-digest.py failed".into());
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
"weekly" => {
|
|
|
|
|
let mut cmd = std::process::Command::new("python3");
|
|
|
|
|
cmd.arg(scripts_dir.join("weekly-digest.py"));
|
|
|
|
|
if args.len() > 1 {
|
|
|
|
|
cmd.arg(&args[1]);
|
|
|
|
|
}
|
|
|
|
|
cmd.env_remove("CLAUDECODE");
|
|
|
|
|
let status = cmd.status()
|
|
|
|
|
.map_err(|e| format!("run weekly-digest.py: {}", e))?;
|
|
|
|
|
if !status.success() {
|
|
|
|
|
return Err("weekly-digest.py failed".into());
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
_ => Err(format!("Unknown digest type: {}. Use: daily, weekly", args[0])),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_trace(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory trace KEY".into());
|
|
|
|
|
}
|
|
|
|
|
let key = args.join(" ");
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
|
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
|
|
|
|
|
|
// Display the node itself
|
|
|
|
|
println!("=== {} ===", resolved);
|
|
|
|
|
println!("Type: {:?} Category: {} Weight: {:.2}",
|
|
|
|
|
node.node_type, node.category.label(), node.weight);
|
|
|
|
|
if !node.source_ref.is_empty() {
|
|
|
|
|
println!("Source: {}", node.source_ref);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Show content preview
|
|
|
|
|
let preview = if node.content.len() > 200 {
|
|
|
|
|
let end = node.content.floor_char_boundary(200);
|
|
|
|
|
format!("{}...", &node.content[..end])
|
|
|
|
|
} else {
|
|
|
|
|
node.content.clone()
|
|
|
|
|
};
|
|
|
|
|
println!("\n{}\n", preview);
|
|
|
|
|
|
|
|
|
|
// Walk neighbors, grouped by node type
|
|
|
|
|
let neighbors = g.neighbors(&resolved);
|
|
|
|
|
let mut episodic_session = Vec::new();
|
|
|
|
|
let mut episodic_daily = Vec::new();
|
|
|
|
|
let mut episodic_weekly = Vec::new();
|
|
|
|
|
let mut semantic = Vec::new();
|
|
|
|
|
|
|
|
|
|
for (n, strength) in &neighbors {
|
|
|
|
|
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
|
|
|
|
match nnode.node_type {
|
|
|
|
|
capnp_store::NodeType::EpisodicSession =>
|
|
|
|
|
episodic_session.push((n.clone(), *strength, nnode)),
|
|
|
|
|
capnp_store::NodeType::EpisodicDaily =>
|
|
|
|
|
episodic_daily.push((n.clone(), *strength, nnode)),
|
|
|
|
|
capnp_store::NodeType::EpisodicWeekly =>
|
|
|
|
|
episodic_weekly.push((n.clone(), *strength, nnode)),
|
|
|
|
|
capnp_store::NodeType::Semantic =>
|
|
|
|
|
semantic.push((n.clone(), *strength, nnode)),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !episodic_weekly.is_empty() {
|
|
|
|
|
println!("Weekly digests:");
|
|
|
|
|
for (k, s, n) in &episodic_weekly {
|
|
|
|
|
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !episodic_daily.is_empty() {
|
|
|
|
|
println!("Daily digests:");
|
|
|
|
|
for (k, s, n) in &episodic_daily {
|
|
|
|
|
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !episodic_session.is_empty() {
|
|
|
|
|
println!("Session entries:");
|
|
|
|
|
for (k, s, n) in &episodic_session {
|
|
|
|
|
let preview = n.content.lines()
|
|
|
|
|
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
|
|
|
|
.unwrap_or("").chars().take(80).collect::<String>();
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
if !n.source_ref.is_empty() {
|
|
|
|
|
println!(" ↳ source: {}", n.source_ref);
|
|
|
|
|
}
|
|
|
|
|
println!(" {}", preview);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !semantic.is_empty() {
|
|
|
|
|
println!("Semantic links:");
|
|
|
|
|
for (k, s, _) in &semantic {
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Summary
|
|
|
|
|
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
|
|
|
|
episodic_session.len(), episodic_daily.len(),
|
|
|
|
|
episodic_weekly.len(), semantic.len());
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 22:30:03 -05:00
|
|
|
fn cmd_list_keys() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let mut keys: Vec<_> = store.nodes.keys().collect();
|
|
|
|
|
keys.sort();
|
|
|
|
|
for key in keys {
|
|
|
|
|
println!("{}", key);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_list_edges() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
println!("{}\t{}\t{:.2}\t{:?}",
|
|
|
|
|
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn cmd_dump_json() -> Result<(), String> {
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let json = serde_json::to_string_pretty(&store)
|
|
|
|
|
.map_err(|e| format!("serialize: {}", e))?;
|
|
|
|
|
println!("{}", json);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-28 22:40:17 -05:00
|
|
|
fn cmd_node_delete(args: &[String]) -> Result<(), String> {
|
|
|
|
|
if args.is_empty() {
|
|
|
|
|
return Err("Usage: poc-memory node-delete KEY".into());
|
|
|
|
|
}
|
|
|
|
|
let key = args.join(" ");
|
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
|
|
|
|
let updated = if let Some(node) = store.nodes.get_mut(&resolved) {
|
|
|
|
|
node.deleted = true;
|
|
|
|
|
node.version += 1;
|
|
|
|
|
Some(node.clone())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if let Some(node) = updated {
|
|
|
|
|
store.append_nodes(&[node])?;
|
|
|
|
|
store.nodes.remove(&resolved);
|
|
|
|
|
store.save()?;
|
|
|
|
|
println!("Deleted '{}'", resolved);
|
|
|
|
|
Ok(())
|
|
|
|
|
} else {
|
|
|
|
|
Err(format!("No node '{}'", resolved))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
fn cmd_interference(args: &[String]) -> Result<(), String> {
|
|
|
|
|
let mut threshold = 0.4f32;
|
|
|
|
|
let mut i = 0;
|
|
|
|
|
while i < args.len() {
|
|
|
|
|
match args[i].as_str() {
|
|
|
|
|
"--threshold" if i + 1 < args.len() => {
|
|
|
|
|
threshold = args[i + 1].parse().map_err(|_| "invalid threshold")?;
|
|
|
|
|
i += 2;
|
|
|
|
|
}
|
|
|
|
|
_ => { i += 1; }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
let pairs = neuro::detect_interference(&store, &g, threshold);
|
|
|
|
|
|
|
|
|
|
if pairs.is_empty() {
|
|
|
|
|
println!("No interfering pairs above threshold {:.2}", threshold);
|
|
|
|
|
} else {
|
|
|
|
|
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
|
|
|
|
for (a, b, sim) in &pairs {
|
|
|
|
|
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|