#![allow(dead_code)] // poc-memory: graph-structured memory for AI assistants // // Authors: ProofOfConcept and Kent Overstreet // License: MIT OR Apache-2.0 // // Architecture: // nodes.capnp - append-only content node log // relations.capnp - append-only relation log // state.bin - derived KV cache (rebuilt from logs when stale) // // Graph algorithms: clustering coefficient, community detection (label // propagation), schema fit scoring, small-world metrics, consolidation // priority. Text similarity via BM25 with Porter stemming. // // Neuroscience-inspired: spaced repetition replay, emotional gating, // interference detection, schema assimilation, reconsolidation. use poc_memory::*; use std::env; use std::process; /// Find the most recently modified .jsonl transcript in the Claude projects dir. fn find_current_transcript() -> Option { let home = env::var("HOME").ok()?; let projects = std::path::Path::new(&home).join(".claude/projects"); if !projects.exists() { return None; } // Search all project dirs for the most recent .jsonl let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None; if let Ok(dirs) = std::fs::read_dir(&projects) { for dir_entry in dirs.filter_map(|e| e.ok()) { if !dir_entry.path().is_dir() { continue; } if let Ok(files) = std::fs::read_dir(dir_entry.path()) { for f in files.filter_map(|e| e.ok()) { let p = f.path(); if p.extension().map(|x| x == "jsonl").unwrap_or(false) { if let Ok(meta) = p.metadata() { if let Ok(mtime) = meta.modified() { if newest.as_ref().is_none_or(|(t, _)| mtime > *t) { newest = Some((mtime, p)); } } } } } } } } newest.map(|(_, p)| p.to_string_lossy().to_string()) } fn main() { let args: Vec = env::args().collect(); if args.len() < 2 { usage(); process::exit(1); } let result = match args[1].as_str() { "search" => cmd_search(&args[2..]), "init" => cmd_init(), "migrate" => cmd_migrate(), "health" => cmd_health(), "fsck" => cmd_fsck(), "status" => cmd_status(), "graph" => cmd_graph(), "used" => cmd_used(&args[2..]), "wrong" => cmd_wrong(&args[2..]), "gap" => cmd_gap(&args[2..]), "categorize" => cmd_categorize(&args[2..]), "fix-categories" => cmd_fix_categories(), "cap-degree" => cmd_cap_degree(&args[2..]), "link-orphans" => cmd_link_orphans(&args[2..]), "decay" => cmd_decay(), "consolidate-batch" => cmd_consolidate_batch(&args[2..]), "log" => cmd_log(), "params" => cmd_params(), "link" => cmd_link(&args[2..]), "replay-queue" => cmd_replay_queue(&args[2..]), "interference" => cmd_interference(&args[2..]), "link-add" => cmd_link_add(&args[2..]), "link-impact" => cmd_link_impact(&args[2..]), "consolidate-session" => cmd_consolidate_session(), "consolidate-full" => cmd_consolidate_full(), "triangle-close" => cmd_triangle_close(&args[2..]), "daily-check" => cmd_daily_check(), "apply-agent" => cmd_apply_agent(&args[2..]), "digest" => cmd_digest(&args[2..]), "digest-links" => cmd_digest_links(&args[2..]), "journal-enrich" => cmd_journal_enrich(&args[2..]), "experience-mine" => cmd_experience_mine(&args[2..]), "apply-consolidation" => cmd_apply_consolidation(&args[2..]), "differentiate" => cmd_differentiate(&args[2..]), "link-audit" => cmd_link_audit(&args[2..]), "trace" => cmd_trace(&args[2..]), "spectral" => cmd_spectral(&args[2..]), "spectral-save" => cmd_spectral_save(&args[2..]), "spectral-neighbors" => cmd_spectral_neighbors(&args[2..]), "spectral-positions" => cmd_spectral_positions(&args[2..]), "spectral-suggest" => cmd_spectral_suggest(&args[2..]), "list-keys" => cmd_list_keys(&args[2..]), "list-edges" => cmd_list_edges(), "dump-json" => cmd_dump_json(), "node-delete" => cmd_node_delete(&args[2..]), "node-rename" => cmd_node_rename(&args[2..]), "journal-ts-migrate" => cmd_journal_ts_migrate(), "load-context" => cmd_load_context(&args[2..]), "render" => cmd_render(&args[2..]), "history" => cmd_history(&args[2..]), "write" => cmd_write(&args[2..]), "import" => cmd_import(&args[2..]), "export" => cmd_export(&args[2..]), "journal-write" => cmd_journal_write(&args[2..]), "journal-tail" => cmd_journal_tail(&args[2..]), "query" => cmd_query(&args[2..]), "lookup-bump" => cmd_lookup_bump(&args[2..]), "lookups" => cmd_lookups(&args[2..]), "daemon" => cmd_daemon(&args[2..]), "knowledge-loop" => cmd_knowledge_loop(&args[2..]), "fact-mine" => cmd_fact_mine(&args[2..]), "fact-mine-store" => cmd_fact_mine_store(&args[2..]), _ => { eprintln!("Unknown command: {}", args[1]); usage(); process::exit(1); } }; if let Err(e) = result { eprintln!("Error: {}", e); process::exit(1); } } fn usage() { eprintln!("poc-memory v0.4.0 — graph-structured memory store Commands: search QUERY [--expand] [--category CAT] Search memory (AND logic) init Scan markdown files, index all memory units migrate Migrate from old weights.json system health Report graph metrics (CC, communities, small-world) status Summary of memory state graph Show graph structure overview used KEY Mark a memory as useful (boosts weight) wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant gap DESCRIPTION Record a gap in memory coverage categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task) decay Apply daily weight decay consolidate-batch [--count N] [--auto] Run agent consolidation on priority nodes log Show recent retrieval log params Show current parameters link N Interactive graph walk from search result N replay-queue [--count N] Show spaced repetition replay queue interference [--threshold F] Detect potentially confusable memory pairs link-add SOURCE TARGET [REASON] Add a link between two nodes link-impact SOURCE TARGET Simulate adding an edge, report topology impact consolidate-session Analyze metrics, plan agent allocation consolidate-full Autonomous: plan → agents → apply → digests → links triangle-close [DEG] [SIM] [MAX] Close triangles: link similar neighbors of hubs daily-check Brief metrics check (for cron/notifications) apply-agent [--all] Import pending agent results into the graph digest daily [DATE] Generate daily episodic digest (default: today) digest weekly [DATE] Generate weekly digest (any date in target week) digest monthly [YYYY-MM] Generate monthly digest (default: current month) digest auto Generate all missing digests (daily→weekly→monthly) digest-links [--apply] Parse and apply links from digest files journal-enrich JSONL TEXT [LINE] Enrich journal entry with conversation links experience-mine [JSONL] Mine conversation for experiential moments to journal apply-consolidation [--apply] [--report FILE] Extract and apply actions from consolidation reports differentiate [KEY] [--apply] Redistribute hub links to section-level children link-audit [--apply] Walk every link, send to Sonnet for quality review trace KEY Walk temporal links: semantic ↔ episodic ↔ conversation spectral [K] Spectral decomposition of the memory graph (default K=30) spectral-save [K] Compute and save spectral embedding (default K=20) spectral-neighbors KEY [N] Find N spectrally nearest nodes (default N=15) spectral-positions [N] Show N nodes ranked by outlier/bridge score (default 30) spectral-suggest [N] Find N spectrally close but unlinked pairs (default 20) list-keys [PATTERN] List all node keys (one per line, optional glob) list-edges List all edges (tsv: source target strength type) dump-json Dump entire store as JSON node-delete KEY Soft-delete a node (appends deleted version to log) node-rename OLD NEW Rename a node key; updates edge debug strings atomically journal-ts-migrate Populate created_at for nodes missing it load-context Output session-start context from the store render KEY Output a node's content to stdout history [--full] KEY Show all stored versions of a node --full shows complete content for every version write KEY Upsert node content from stdin import FILE [FILE...] Import markdown file(s) into the store export [FILE|--all] Export store nodes to markdown file(s) journal-write TEXT Write a journal entry to the store journal-tail [N] [--level=L] [--full] Show last N entries (default 20, --full for content) --level: 0/journal, 1/daily, 2/weekly, 3/monthly query 'EXPR | stages' Query the memory graph Stages: sort F [asc], limit N, select F,F, count Ex: \"degree > 15 | sort degree | limit 10\" lookup-bump KEY [KEY...] Bump daily lookup counter for keys (fast, no store) lookups [DATE] Show daily lookup counts (default: today) daemon Start background job daemon daemon status Show daemon status daemon log [JOB] [N] Show last N log lines (default 50, optional job filter) knowledge-loop [OPTIONS] Run knowledge agents to convergence --max-cycles N (default 20) --batch-size N (default 5) --window N (default 5) --max-depth N (default 4) fact-mine JSONL [OPTIONS] Extract atomic facts from conversation transcripts fact-mine --batch DIR Mine all .jsonl files in directory"); } fn cmd_search(args: &[String]) -> Result<(), String> { use store::StoreView; if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") { println!("Usage: poc-memory search QUERY [QUERY...] [OPTIONS] Search memory using spreading activation (AND logic across terms). Options: --expand Show 15 results instead of 5, plus spectral neighbors --category CAT Filter results to category: core, tech, gen, obs, task --help, -h Show this help Examples: poc-memory search irc connection poc-memory search bcachefs transaction --expand poc-memory search rust --category tech"); return Ok(()); } let expand = args.iter().any(|a| a == "--expand"); let category_filter: Option = { let mut cat = None; let mut iter = args.iter(); while let Some(a) = iter.next() { if a == "--category" { cat = iter.next().cloned(); break; } } cat }; let query: String = args.iter() .filter(|a| *a != "--expand" && *a != "--category") .scan(false, |skip_next, a| { if *skip_next { *skip_next = false; return Some(None); } if a == "--category" { *skip_next = true; return Some(None); } Some(Some(a.as_str())) }) .flatten() .collect::>() .join(" "); let view = store::AnyView::load()?; let mut results = search::search(&query, &view); // Filter by category if requested if let Some(ref cat_str) = category_filter { let cat = store::Category::from_str(cat_str) .ok_or_else(|| format!("Unknown category '{}' (use: core, tech, gen, obs, task)", cat_str))?; let store = store::Store::load()?; results.retain(|r| { store.nodes.get(&r.key) .map(|n| n.category.label() == cat.label()) .unwrap_or(false) }); } if results.is_empty() { eprintln!("No results for '{}'", query); return Ok(()); } let limit = if expand { 15 } else { 5 }; // Log retrieval to a small append-only file (avoid 6MB state.bin rewrite) store::Store::log_retrieval_static(&query, &results.iter().map(|r| r.key.clone()).collect::>()); // Bump daily lookup counters (fast path, no store needed) let bump_keys: Vec<&str> = results.iter().take(limit).map(|r| r.key.as_str()).collect(); let _ = lookups::bump_many(&bump_keys); // Show text results let text_keys: std::collections::HashSet = results.iter() .take(limit).map(|r| r.key.clone()).collect(); for (i, r) in results.iter().enumerate().take(limit) { let marker = if r.is_direct { "→" } else { " " }; let weight = view.node_weight(&r.key); print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); println!(); if let Some(ref snippet) = r.snippet { println!(" {}", snippet); } } // Spectral expansion: only with --expand if expand { if let Ok(emb) = spectral::load_embedding() { let seeds: Vec<&str> = results.iter() .take(5) .map(|r| r.key.as_str()) .filter(|k| emb.coords.contains_key(*k)) .collect(); if !seeds.is_empty() { let spectral_hits = spectral::nearest_to_seeds(&emb, &seeds, 10); let new_hits: Vec<_> = spectral_hits.into_iter() .filter(|(k, _)| !text_keys.contains(k)) .take(5) .collect(); if !new_hits.is_empty() { println!("\nSpectral neighbors (structural, not keyword):"); for (k, _dist) in &new_hits { let weight = view.node_weight(k); print!(" ~ [{:.2}] {}", weight, k); println!(); if let Some(content) = view.node_content(k) { let snippet: String = content.lines() .find(|l| !l.trim().is_empty() && !l.starts_with('#')) .unwrap_or("") .chars().take(100).collect(); if !snippet.is_empty() { println!(" {}", snippet); } } } } } } } Ok(()) } fn cmd_init() -> Result<(), String> { let cfg = config::get(); // Ensure data directory exists std::fs::create_dir_all(&cfg.data_dir) .map_err(|e| format!("create data_dir: {}", e))?; // Install filesystem files (not store nodes) install_default_file(&cfg.data_dir, "instructions.md", include_str!("../defaults/instructions.md"))?; install_default_file(&cfg.data_dir, "on-consciousness.md", include_str!("../defaults/on-consciousness.md"))?; // Initialize store and seed default identity node if empty let mut store = store::Store::load()?; let count = store.init_from_markdown()?; if !store.nodes.contains_key("identity") { let default_identity = include_str!("../defaults/identity.md"); store.upsert("identity", default_identity) .map_err(|e| format!("seed identity: {}", e))?; println!("Seeded identity in store"); } store.save()?; println!("Indexed {} memory units", count); // Install hooks daemon::install_hook()?; // Create config if none exists let config_path = std::env::var("POC_MEMORY_CONFIG") .map(std::path::PathBuf::from) .unwrap_or_else(|_| { std::path::PathBuf::from(std::env::var("HOME").unwrap()) .join(".config/poc-memory/config.jsonl") }); if !config_path.exists() { let config_dir = config_path.parent().unwrap(); std::fs::create_dir_all(config_dir) .map_err(|e| format!("create config dir: {}", e))?; let example = include_str!("../config.example.jsonl"); std::fs::write(&config_path, example) .map_err(|e| format!("write config: {}", e))?; println!("Created config at {} — edit with your name and context groups", config_path.display()); } println!("Done. Run `poc-memory load-context --stats` to verify."); Ok(()) } fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> { let path = data_dir.join(name); if !path.exists() { std::fs::write(&path, content) .map_err(|e| format!("write {}: {}", name, e))?; println!("Created {}", path.display()); } Ok(()) } fn cmd_migrate() -> Result<(), String> { migrate::migrate() } fn cmd_fsck() -> Result<(), String> { store::fsck()?; store::strip_md_keys()?; // Check for broken links let store = store::Store::load()?; let mut orphans = 0usize; for rel in &store.relations { if rel.deleted { continue; } if !store.nodes.contains_key(&rel.source_key) || !store.nodes.contains_key(&rel.target_key) { orphans += 1; } } if orphans > 0 { eprintln!("{} broken links (run `health` for details)", orphans); } else { eprintln!("No broken links"); } Ok(()) } fn cmd_health() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); let health = graph::health_report(&g, &store); println!("{}", health); Ok(()) } fn cmd_status() -> Result<(), String> { let store = store::Store::load()?; let node_count = store.nodes.len(); let rel_count = store.relations.len(); let categories = store.category_counts(); println!("Nodes: {} Relations: {}", node_count, rel_count); println!("Categories: core={} tech={} gen={} obs={} task={}", categories.get("core").unwrap_or(&0), categories.get("tech").unwrap_or(&0), categories.get("gen").unwrap_or(&0), categories.get("obs").unwrap_or(&0), categories.get("task").unwrap_or(&0), ); let g = store.build_graph(); println!("Graph edges: {} Communities: {}", g.edge_count(), g.community_count()); Ok(()) } fn cmd_graph() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); println!("Top nodes by degree:"); query::run_query(&store, &g, "* | sort degree | limit 10 | select degree,clustering_coefficient") } fn cmd_used(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory used KEY".into()); } let key = args.join(" "); let mut store = store::Store::load()?; let resolved = store.resolve_key(&key)?; store.mark_used(&resolved); store.save()?; println!("Marked '{}' as used", resolved); Ok(()) } fn cmd_wrong(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory wrong KEY [CONTEXT]".into()); } let key = &args[0]; let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None }; let mut store = store::Store::load()?; let resolved = store.resolve_key(key)?; store.mark_wrong(&resolved, ctx.as_deref()); store.save()?; println!("Marked '{}' as wrong", resolved); Ok(()) } fn cmd_gap(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory gap DESCRIPTION".into()); } let desc = args.join(" "); let mut store = store::Store::load()?; store.record_gap(&desc); store.save()?; println!("Recorded gap: {}", desc); Ok(()) } fn cmd_categorize(args: &[String]) -> Result<(), String> { if args.len() < 2 { return Err("Usage: poc-memory categorize KEY CATEGORY".into()); } let key = &args[0]; let cat = &args[1]; let mut store = store::Store::load()?; let resolved = store.resolve_key(key)?; store.categorize(&resolved, cat)?; store.save()?; println!("Set '{}' category to {}", resolved, cat); Ok(()) } fn cmd_fix_categories() -> Result<(), String> { let mut store = store::Store::load()?; let before = format!("{:?}", store.category_counts()); let (changed, kept) = store.fix_categories()?; store.save()?; let after = format!("{:?}", store.category_counts()); println!("Category fix: {} changed, {} kept", changed, kept); println!("\nBefore: {}", before); println!("After: {}", after); Ok(()) } fn cmd_link_orphans(args: &[String]) -> Result<(), String> { let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2); let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3); let sim_thresh: f32 = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(0.15); let mut store = store::Store::load()?; let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh); println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})", orphans, links, min_deg, links_per, sim_thresh); Ok(()) } fn cmd_cap_degree(args: &[String]) -> Result<(), String> { let max_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(50); let mut store = store::Store::load()?; let (hubs, pruned) = store.cap_degree(max_deg)?; store.save()?; println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg); Ok(()) } fn cmd_decay() -> Result<(), String> { let mut store = store::Store::load()?; let (decayed, pruned) = store.decay(); store.save()?; println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned); Ok(()) } fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> { let mut count = 5usize; let mut auto = false; let mut agent: Option = None; let mut i = 0; while i < args.len() { match args[i].as_str() { "--count" if i + 1 < args.len() => { count = args[i + 1].parse().map_err(|_| "invalid count")?; i += 2; } "--auto" => { auto = true; i += 1; } "--agent" if i + 1 < args.len() => { agent = Some(args[i + 1].clone()); i += 2; } _ => { i += 1; } } } let store = store::Store::load()?; if let Some(agent_name) = agent { // Generate a specific agent prompt let prompt = neuro::agent_prompt(&store, &agent_name, count)?; println!("{}", prompt); Ok(()) } else { neuro::consolidation_batch(&store, count, auto) } } fn cmd_log() -> Result<(), String> { let store = store::Store::load()?; for event in store.retrieval_log.iter().rev().take(20) { println!("[{}] q=\"{}\" → {} results", event.timestamp, event.query, event.results.len()); for r in &event.results { println!(" {}", r); } } Ok(()) } fn cmd_params() -> Result<(), String> { let store = store::Store::load()?; println!("decay_factor: {}", store.params.decay_factor); println!("use_boost: {}", store.params.use_boost); println!("prune_threshold: {}", store.params.prune_threshold); println!("edge_decay: {}", store.params.edge_decay); println!("max_hops: {}", store.params.max_hops); println!("min_activation: {}", store.params.min_activation); Ok(()) } fn cmd_link(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory link KEY".into()); } let key = args.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); println!("Neighbors of '{}':", resolved); query::run_query(&store, &g, &format!("neighbors('{}') | select strength,clustering_coefficient", resolved)) } fn cmd_replay_queue(args: &[String]) -> Result<(), String> { let mut count = 10usize; let mut i = 0; while i < args.len() { match args[i].as_str() { "--count" if i + 1 < args.len() => { count = args[i + 1].parse().map_err(|_| "invalid count")?; i += 2; } _ => { i += 1; } } } let store = store::Store::load()?; let queue = neuro::replay_queue(&store, count); println!("Replay queue ({} items):", queue.len()); for (i, item) in queue.iter().enumerate() { println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})", i + 1, item.priority, item.classification, item.key, item.interval_days, item.emotion, item.outlier_score); } Ok(()) } fn cmd_consolidate_session() -> Result<(), String> { let store = store::Store::load()?; let plan = neuro::consolidation_plan(&store); println!("{}", neuro::format_plan(&plan)); Ok(()) } fn cmd_consolidate_full() -> Result<(), String> { let mut store = store::Store::load()?; consolidate::consolidate_full(&mut store) } fn cmd_triangle_close(args: &[String]) -> Result<(), String> { let min_degree: usize = args.first() .and_then(|s| s.parse().ok()) .unwrap_or(5); let sim_threshold: f32 = args.get(1) .and_then(|s| s.parse().ok()) .unwrap_or(0.3); let max_per_hub: usize = args.get(2) .and_then(|s| s.parse().ok()) .unwrap_or(10); println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}", min_degree, sim_threshold, max_per_hub); let mut store = store::Store::load()?; let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub); println!("\nProcessed {} hubs, added {} lateral links", hubs, added); Ok(()) } fn cmd_daily_check() -> Result<(), String> { let store = store::Store::load()?; let report = neuro::daily_check(&store); print!("{}", report); Ok(()) } fn cmd_link_add(args: &[String]) -> Result<(), String> { if args.len() < 2 { return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into()); } let mut store = store::Store::load()?; let source = store.resolve_key(&args[0])?; let target = store.resolve_key(&args[1])?; let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() }; // Refine target to best-matching section let source_content = store.nodes.get(&source) .map(|n| n.content.as_str()).unwrap_or(""); let target = neuro::refine_target(&store, source_content, &target); // Find UUIDs let source_uuid = store.nodes.get(&source) .map(|n| n.uuid) .ok_or_else(|| format!("source not found: {}", source))?; let target_uuid = store.nodes.get(&target) .map(|n| n.uuid) .ok_or_else(|| format!("target not found: {}", target))?; // Check if link already exists let exists = store.relations.iter().any(|r| r.source_key == source && r.target_key == target && !r.deleted ); if exists { println!("Link already exists: {} → {}", source, target); return Ok(()); } let rel = store::new_relation( source_uuid, target_uuid, store::RelationType::Auto, 0.5, &source, &target, ); store.add_relation(rel)?; if !reason.is_empty() { println!("+ {} → {} ({})", source, target, reason); } else { println!("+ {} → {}", source, target); } Ok(()) } fn cmd_link_impact(args: &[String]) -> Result<(), String> { if args.len() < 2 { return Err("Usage: poc-memory link-impact SOURCE TARGET".into()); } let store = store::Store::load()?; let source = store.resolve_key(&args[0])?; let target = store.resolve_key(&args[1])?; let g = store.build_graph(); let impact = g.link_impact(&source, &target); println!("Link impact: {} → {}", source, target); println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg); println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community); println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target); println!(" ΔGini: {:+.6}", impact.delta_gini); println!(" Assessment: {}", impact.assessment); Ok(()) } fn cmd_apply_agent(args: &[String]) -> Result<(), String> { let home = env::var("HOME").unwrap_or_default(); let results_dir = std::path::PathBuf::from(&home) .join(".claude/memory/agent-results"); if !results_dir.exists() { println!("No agent results directory"); return Ok(()); } let mut store = store::Store::load()?; let mut applied = 0; let mut errors = 0; let process_all = args.iter().any(|a| a == "--all"); // Find .json result files let mut files: Vec<_> = std::fs::read_dir(&results_dir) .map_err(|e| format!("read results dir: {}", e))? .filter_map(|e| e.ok()) .filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false)) .collect(); files.sort_by_key(|e| e.path()); for entry in &files { let path = entry.path(); let content = match std::fs::read_to_string(&path) { Ok(c) => c, Err(e) => { eprintln!(" Skip {}: {}", path.display(), e); errors += 1; continue; } }; let data: serde_json::Value = match serde_json::from_str(&content) { Ok(d) => d, Err(e) => { eprintln!(" Skip {}: parse error: {}", path.display(), e); errors += 1; continue; } }; // Check for agent_result with links let agent_result = data.get("agent_result").or(Some(&data)); let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) { Some(l) => l, None => continue, }; let entry_text = data.get("entry_text") .and_then(|v| v.as_str()) .unwrap_or(""); let source_start = agent_result .and_then(|r| r.get("source_start")) .and_then(|v| v.as_u64()); let source_end = agent_result .and_then(|r| r.get("source_end")) .and_then(|v| v.as_u64()); println!("Processing {}:", path.file_name().unwrap().to_string_lossy()); if let (Some(start), Some(end)) = (source_start, source_end) { println!(" Source: L{}-L{}", start, end); } for link in links { let target = match link.get("target").and_then(|v| v.as_str()) { Some(t) => t, None => continue, }; let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or(""); // Skip NOTE: targets (new topics, not existing nodes) if let Some(note) = target.strip_prefix("NOTE:") { println!(" NOTE: {} — {}", note, reason); continue; } // Try to resolve the target key and link from journal entry let resolved = match store.resolve_key(target) { Ok(r) => r, Err(_) => { println!(" SKIP {} (not found in graph)", target); continue; } }; let source_key = match store.find_journal_node(entry_text) { Some(k) => k, None => { println!(" SKIP {} (no matching journal node)", target); continue; } }; // Get UUIDs for both nodes let source_uuid = match store.nodes.get(&source_key) { Some(n) => n.uuid, None => continue, }; let target_uuid = match store.nodes.get(&resolved) { Some(n) => n.uuid, None => continue, }; let rel = store::new_relation( source_uuid, target_uuid, store::RelationType::Link, 0.5, &source_key, &resolved, ); if let Err(e) = store.add_relation(rel) { eprintln!(" Error adding relation: {}", e); errors += 1; } else { println!(" LINK {} → {} ({})", source_key, resolved, reason); applied += 1; } } // Move processed file to avoid re-processing if !process_all { let done_dir = util::memory_subdir("agent-results/done")?; let dest = done_dir.join(path.file_name().unwrap()); std::fs::rename(&path, &dest).ok(); } } if applied > 0 { store.save()?; } println!("\nApplied {} links ({} errors, {} files processed)", applied, errors, files.len()); Ok(()) } fn cmd_digest(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory digest daily|weekly|monthly|auto [DATE]".into()); } let mut store = store::Store::load()?; let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or(""); match args[0].as_str() { "auto" => digest::digest_auto(&mut store), name @ ("daily" | "weekly" | "monthly") => { let arg = if date_arg.is_empty() { store::format_date(store::now_epoch()) } else { date_arg.to_string() }; digest::generate(&mut store, name, &arg) } _ => Err(format!("Unknown digest type: {}. Use: daily, weekly, monthly, auto", args[0])), } } fn cmd_digest_links(args: &[String]) -> Result<(), String> { let do_apply = args.iter().any(|a| a == "--apply"); let store = store::Store::load()?; let links = digest::parse_all_digest_links(&store); drop(store); println!("Found {} unique links from digest nodes", links.len()); if !do_apply { for (i, link) in links.iter().enumerate() { println!(" {:3}. {} → {}", i + 1, link.source, link.target); if !link.reason.is_empty() { println!(" ({})", &link.reason[..link.reason.len().min(80)]); } } println!("\nTo apply: poc-memory digest-links --apply"); return Ok(()); } let mut store = store::Store::load()?; let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links); println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped); Ok(()) } fn cmd_journal_enrich(args: &[String]) -> Result<(), String> { if args.len() < 2 { return Err("Usage: poc-memory journal-enrich JSONL_PATH ENTRY_TEXT [GREP_LINE]".into()); } let jsonl_path = &args[0]; let entry_text = &args[1]; let grep_line: usize = args.get(2) .and_then(|a| a.parse().ok()) .unwrap_or(0); if !std::path::Path::new(jsonl_path.as_str()).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } let mut store = store::Store::load()?; enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line) } fn cmd_experience_mine(args: &[String]) -> Result<(), String> { let jsonl_path = if let Some(path) = args.first() { path.clone() } else { find_current_transcript() .ok_or("no JSONL transcripts found")? }; if !std::path::Path::new(jsonl_path.as_str()).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } let mut store = store::Store::load()?; let count = enrich::experience_mine(&mut store, &jsonl_path, None)?; println!("Done: {} new entries mined.", count); Ok(()) } fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> { let do_apply = args.iter().any(|a| a == "--apply"); let report_file = args.windows(2) .find(|w| w[0] == "--report") .map(|w| w[1].as_str()); let mut store = store::Store::load()?; consolidate::apply_consolidation(&mut store, do_apply, report_file) } fn cmd_differentiate(args: &[String]) -> Result<(), String> { let do_apply = args.iter().any(|a| a == "--apply"); let key_arg: Option<&str> = args.iter() .find(|a| !a.starts_with("--")) .map(|s| s.as_str()); let mut store = store::Store::load()?; if let Some(key) = key_arg { // Differentiate a specific hub let resolved = store.resolve_key(key)?; let moves = neuro::differentiate_hub(&store, &resolved) .ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?; // Group by target section for display let mut by_section: std::collections::BTreeMap> = std::collections::BTreeMap::new(); for mv in &moves { by_section.entry(mv.to_section.clone()).or_default().push(mv); } println!("Hub '{}' — {} links to redistribute across {} sections\n", resolved, moves.len(), by_section.len()); for (section, section_moves) in &by_section { println!(" {} ({} links):", section, section_moves.len()); for mv in section_moves.iter().take(5) { println!(" [{:.3}] {} — {}", mv.similarity, mv.neighbor_key, mv.neighbor_snippet); } if section_moves.len() > 5 { println!(" ... and {} more", section_moves.len() - 5); } } if !do_apply { println!("\nTo apply: poc-memory differentiate {} --apply", resolved); return Ok(()); } let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves); store.save()?; println!("\nApplied: {} Skipped: {}", applied, skipped); } else { // Show all differentiable hubs let hubs = neuro::find_differentiable_hubs(&store); if hubs.is_empty() { println!("No file-level hubs with sections found above threshold"); return Ok(()); } println!("Differentiable hubs (file-level nodes with sections):\n"); for (key, degree, sections) in &hubs { println!(" {:40} deg={:3} sections={}", key, degree, sections); } println!("\nRun: poc-memory differentiate KEY to preview a specific hub"); } Ok(()) } fn cmd_link_audit(args: &[String]) -> Result<(), String> { let apply = args.iter().any(|a| a == "--apply"); let mut store = store::Store::load()?; let stats = audit::link_audit(&mut store, apply)?; println!("\n{}", "=".repeat(60)); println!("Link audit complete:"); println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}", stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors); println!("{}", "=".repeat(60)); Ok(()) } fn cmd_trace(args: &[String]) -> Result<(), String> { if args.is_empty() { return Err("Usage: poc-memory trace KEY".into()); } let key = args.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); let node = store.nodes.get(&resolved) .ok_or_else(|| format!("Node not found: {}", resolved))?; // Display the node itself println!("=== {} ===", resolved); println!("Type: {:?} Category: {} Weight: {:.2}", node.node_type, node.category.label(), node.weight); if !node.source_ref.is_empty() { println!("Source: {}", node.source_ref); } // Show content preview let preview = if node.content.len() > 200 { let end = node.content.floor_char_boundary(200); format!("{}...", &node.content[..end]) } else { node.content.clone() }; println!("\n{}\n", preview); // Walk neighbors, grouped by node type let neighbors = g.neighbors(&resolved); let mut episodic_session = Vec::new(); let mut episodic_daily = Vec::new(); let mut episodic_weekly = Vec::new(); let mut semantic = Vec::new(); for (n, strength) in &neighbors { if let Some(nnode) = store.nodes.get(n.as_str()) { let entry = (n.as_str(), *strength, nnode); match nnode.node_type { store::NodeType::EpisodicSession => episodic_session.push(entry), store::NodeType::EpisodicDaily => episodic_daily.push(entry), store::NodeType::EpisodicWeekly => episodic_weekly.push(entry), store::NodeType::Semantic => semantic.push(entry), } } } if !episodic_weekly.is_empty() { println!("Weekly digests:"); for (k, s, n) in &episodic_weekly { let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::(); println!(" [{:.2}] {} — {}", s, k, preview); } } if !episodic_daily.is_empty() { println!("Daily digests:"); for (k, s, n) in &episodic_daily { let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::(); println!(" [{:.2}] {} — {}", s, k, preview); } } if !episodic_session.is_empty() { println!("Session entries:"); for (k, s, n) in &episodic_session { let preview = n.content.lines() .find(|l| !l.is_empty() && !l.starts_with("