// poc-memory: graph-structured memory for AI assistants // // Authors: ProofOfConcept and Kent Overstreet // License: MIT OR Apache-2.0 // // Architecture: // nodes.capnp - append-only content node log // relations.capnp - append-only relation log // state.bin - derived KV cache (rebuilt from logs when stale) // // Graph algorithms: clustering coefficient, community detection (label // propagation), schema fit scoring, small-world metrics, consolidation // priority. Text similarity via BM25 with Porter stemming. // // Neuroscience-inspired: spaced repetition replay, emotional gating, // interference detection, schema assimilation, reconsolidation. use poc_memory::*; use clap::{Parser, Subcommand}; use std::process; /// Find the most recently modified .jsonl transcript in the Claude projects dir. fn find_current_transcript() -> Option { let projects = config::get().projects_dir.clone(); if !projects.exists() { return None; } let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None; if let Ok(dirs) = std::fs::read_dir(&projects) { for dir_entry in dirs.filter_map(|e| e.ok()) { if !dir_entry.path().is_dir() { continue; } if let Ok(files) = std::fs::read_dir(dir_entry.path()) { for f in files.filter_map(|e| e.ok()) { let p = f.path(); if p.extension().map(|x| x == "jsonl").unwrap_or(false) { if let Ok(meta) = p.metadata() { if let Ok(mtime) = meta.modified() { if newest.as_ref().is_none_or(|(t, _)| mtime > *t) { newest = Some((mtime, p)); } } } } } } } } newest.map(|(_, p)| p.to_string_lossy().to_string()) } #[derive(Parser)] #[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")] struct Cli { #[command(subcommand)] command: Command, } #[derive(Subcommand)] enum Command { // ── Core (daily use) ────────────────────────────────────────────── /// Search memory (AND logic across terms) /// /// Pipeline: -p spread -p spectral,k=20 /// Default pipeline: spread Search { /// Search terms query: Vec, /// Algorithm pipeline stages (repeatable) #[arg(short, long = "pipeline")] pipeline: Vec, /// Show more results #[arg(long)] expand: bool, /// Show node content, not just keys #[arg(long)] full: bool, /// Show debug output for each pipeline stage #[arg(long)] debug: bool, /// Also match key components (e.g. "irc" matches "irc-access") #[arg(long)] fuzzy: bool, /// Also search node content (slow, use when graph search misses) #[arg(long)] content: bool, }, /// Output a node's content to stdout Render { /// Node key key: Vec, }, /// Upsert node content from stdin Write { /// Node key key: Vec, }, /// Show all stored versions of a node History { /// Show full content for every version #[arg(long)] full: bool, /// Node key key: Vec, }, /// Show most recent writes to the node log Tail { /// Number of entries (default: 20) #[arg(default_value_t = 20)] n: usize, /// Show full content #[arg(long)] full: bool, }, /// Summary of memory state Status, /// Query the memory graph #[command(after_long_help = "\ EXPRESSIONS: * all nodes key ~ 'pattern' regex match on node key content ~ 'phrase' regex match on node content degree > 15 numeric comparison on any field field = value exact match field != value not equal expr AND expr boolean AND expr OR expr boolean OR NOT expr negation neighbors('key') nodes linked to key neighbors('key') WHERE expr ... with filter on edges/nodes FIELDS: key, weight, content, degree, node_type, provenance, emotion, retrievals, uses, wrongs, created, clustering_coefficient (cc), community_id OPERATORS: > < >= <= = != ~(regex) PIPE STAGES: | sort FIELD [asc] sort (desc by default) | limit N cap results | select F,F,... output fields as TSV | count just show count | connectivity show graph structure between results FUNCTIONS: community('key') community id of a node degree('key') degree of a node EXAMPLES: key ~ 'inner-life' substring match on keys content ~ 'made love' full-text search content ~ 'made love' | connectivity find clusters among results (content ~ 'A' OR content ~ 'B') | connectivity degree > 15 | sort degree | limit 10 high-degree nodes key ~ 'journal' AND degree > 10 | count count matching nodes neighbors('identity') WHERE strength > 0.5 | sort strength * | sort weight asc | limit 20 lowest-weight nodes ")] Query { /// Query expression (e.g. "key ~ 'inner-life'") expr: Vec, }, /// Mark a memory as useful (boosts weight) Used { /// Node key key: Vec, }, /// Mark a memory as wrong/irrelevant Wrong { /// Node key key: String, /// Optional context context: Vec, }, /// Record a gap in memory coverage Gap { /// Gap description description: Vec, }, // ── Node operations ─────────────────────────────────────────────── /// Node operations (delete, rename, list) #[command(subcommand)] Node(NodeCmd), // ── Journal ─────────────────────────────────────────────────────── /// Journal operations (write, tail, enrich) #[command(subcommand)] Journal(JournalCmd), // ── Graph ───────────────────────────────────────────────────────── /// Graph operations (link, audit, spectral) #[command(subcommand, name = "graph")] GraphCmd(GraphCmd), // ── Agents ──────────────────────────────────────────────────────── /// Agent and daemon operations #[command(subcommand)] Agent(AgentCmd), // ── Admin ───────────────────────────────────────────────────────── /// Admin operations (fsck, health, import, export) #[command(subcommand)] Admin(AdminCmd), } #[derive(Subcommand)] enum NodeCmd { /// Soft-delete a node Delete { /// Node key key: Vec, }, /// Rename a node key Rename { /// Old key old_key: String, /// New key new_key: String, }, /// List all node keys (one per line, optional glob) #[command(name = "list")] List { /// Glob pattern to filter keys pattern: Option, }, /// List all edges (tsv: source target strength type) Edges, /// Dump entire store as JSON #[command(name = "dump")] Dump, } #[derive(Subcommand)] enum JournalCmd { /// Write a journal entry to the store Write { /// Entry text text: Vec, }, /// Show recent journal/digest entries Tail { /// Number of entries to show (default: 20) #[arg(default_value_t = 20)] n: usize, /// Show full content #[arg(long)] full: bool, /// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly #[arg(long, default_value_t = 0)] level: u8, }, /// Enrich journal entry with conversation links Enrich { /// Path to JSONL transcript jsonl_path: String, /// Journal entry text to enrich entry_text: String, /// Grep line number for source location #[arg(default_value_t = 0)] grep_line: usize, }, } #[derive(Subcommand)] enum GraphCmd { /// Show neighbors of a node Link { /// Node key key: Vec, }, /// Add a link between two nodes #[command(name = "link-add")] LinkAdd { /// Source node key source: String, /// Target node key target: String, /// Optional reason reason: Vec, }, /// Simulate adding an edge, report topology impact #[command(name = "link-impact")] LinkImpact { /// Source node key source: String, /// Target node key target: String, }, /// Walk every link, send to Sonnet for quality review #[command(name = "link-audit")] LinkAudit { /// Apply changes (default: dry run) #[arg(long)] apply: bool, }, /// Link orphan nodes to similar neighbors #[command(name = "link-orphans")] LinkOrphans { /// Minimum degree to consider orphan (default: 2) #[arg(default_value_t = 2)] min_degree: usize, /// Links per orphan (default: 3) #[arg(default_value_t = 3)] links_per: usize, /// Similarity threshold (default: 0.15) #[arg(default_value_t = 0.15)] sim_threshold: f32, }, /// Close triangles: link similar neighbors of hubs #[command(name = "triangle-close")] TriangleClose { /// Minimum hub degree (default: 5) #[arg(default_value_t = 5)] min_degree: usize, /// Similarity threshold (default: 0.3) #[arg(default_value_t = 0.3)] sim_threshold: f32, /// Maximum links per hub (default: 10) #[arg(default_value_t = 10)] max_per_hub: usize, }, /// Cap node degree by pruning weak auto edges #[command(name = "cap-degree")] CapDegree { /// Maximum degree (default: 50) #[arg(default_value_t = 50)] max_degree: usize, }, /// Redistribute hub links to section-level children Differentiate { /// Specific hub key (omit to list all differentiable hubs) key: Option, /// Apply the redistribution #[arg(long)] apply: bool, }, /// Walk temporal links: semantic ↔ episodic ↔ conversation Trace { /// Node key key: Vec, }, /// Detect potentially confusable memory pairs Interference { /// Similarity threshold (default: 0.4) #[arg(long, default_value_t = 0.4)] threshold: f32, }, /// Show graph structure overview Overview, /// Spectral decomposition of the memory graph Spectral { /// Number of eigenvectors (default: 30) #[arg(default_value_t = 30)] k: usize, }, /// Compute and save spectral embedding #[command(name = "spectral-save")] SpectralSave { /// Number of eigenvectors (default: 20) #[arg(default_value_t = 20)] k: usize, }, /// Find spectrally nearest nodes #[command(name = "spectral-neighbors")] SpectralNeighbors { /// Node key key: String, /// Number of neighbors (default: 15) #[arg(default_value_t = 15)] n: usize, }, /// Show nodes ranked by outlier/bridge score #[command(name = "spectral-positions")] SpectralPositions { /// Number of nodes to show (default: 30) #[arg(default_value_t = 30)] n: usize, }, /// Find spectrally close but unlinked pairs #[command(name = "spectral-suggest")] SpectralSuggest { /// Number of pairs (default: 20) #[arg(default_value_t = 20)] n: usize, }, /// Diagnose duplicate/overlapping nodes for a topic cluster Organize { /// Search term (matches node keys; also content unless --key-only) term: String, /// Similarity threshold for pair reporting (default: 0.4) #[arg(long, default_value_t = 0.4)] threshold: f32, /// Only match node keys, not content #[arg(long)] key_only: bool, /// Create anchor node for the search term and link to cluster #[arg(long)] anchor: bool, }, } #[derive(Subcommand)] enum AgentCmd { /// Background job daemon Daemon { /// Subcommand: status, log, install sub: Option, /// Additional arguments args: Vec, }, /// Run knowledge agents to convergence #[command(name = "knowledge-loop")] KnowledgeLoop { /// Maximum cycles before stopping #[arg(long, default_value_t = 20)] max_cycles: usize, /// Items per agent per cycle #[arg(long, default_value_t = 5)] batch_size: usize, /// Cycles to check for convergence #[arg(long, default_value_t = 5)] window: usize, /// Maximum inference depth #[arg(long, default_value_t = 4)] max_depth: i32, }, /// Run agent consolidation on priority nodes #[command(name = "consolidate-batch")] ConsolidateBatch { /// Number of nodes to consolidate #[arg(long, default_value_t = 5)] count: usize, /// Generate replay agent prompt automatically #[arg(long)] auto: bool, /// Generate prompt for a specific agent (replay, linker, separator, transfer, health) #[arg(long)] agent: Option, }, /// Analyze metrics, plan agent allocation #[command(name = "consolidate-session")] ConsolidateSession, /// Autonomous: plan → agents → apply → digests → links #[command(name = "consolidate-full")] ConsolidateFull, /// Import pending agent results into the graph #[command(name = "apply-agent")] ApplyAgent { /// Process all files without moving to done/ #[arg(long)] all: bool, }, /// Extract and apply actions from consolidation reports #[command(name = "apply-consolidation")] ApplyConsolidation { /// Apply actions (default: dry run) #[arg(long)] apply: bool, /// Read from specific report file #[arg(long)] report: Option, }, /// Generate episodic digests (daily, weekly, monthly, auto) Digest { /// Digest type: daily, weekly, monthly, auto #[command(subcommand)] level: DigestLevel, }, /// Parse and apply links from digest nodes #[command(name = "digest-links")] DigestLinks { /// Apply the links (default: dry run) #[arg(long)] apply: bool, }, /// Mine conversation for experiential moments to journal #[command(name = "experience-mine")] ExperienceMine { /// Path to JSONL transcript (default: most recent) jsonl_path: Option, }, /// Extract atomic facts from conversation transcripts #[command(name = "fact-mine")] FactMine { /// Path to JSONL transcript or directory (with --batch) path: String, /// Process all .jsonl files in directory #[arg(long)] batch: bool, /// Show chunks without calling model #[arg(long)] dry_run: bool, /// Write JSON to file (default: stdout) #[arg(long, short)] output: Option, /// Skip transcripts with fewer messages #[arg(long, default_value_t = 10)] min_messages: usize, }, /// Extract facts from a transcript and store directly #[command(name = "fact-mine-store")] FactMineStore { /// Path to JSONL transcript path: String, }, /// Show spaced repetition replay queue #[command(name = "replay-queue")] ReplayQueue { /// Number of items to show #[arg(long, default_value_t = 10)] count: usize, }, } #[derive(Subcommand)] enum AdminCmd { /// Scan markdown files, index all memory units Init, /// Report graph metrics (CC, communities, small-world) Health, /// Run consistency checks and repair Fsck, /// Find and merge duplicate nodes (same key, multiple UUIDs) Dedup { /// Apply the merge (default: dry run) #[arg(long)] apply: bool, }, /// Brief metrics check (for cron/notifications) #[command(name = "daily-check")] DailyCheck, /// Import markdown file(s) into the store Import { /// File paths files: Vec, }, /// Export store nodes to markdown file(s) Export { /// File keys to export (or --all) files: Vec, /// Export all file-level nodes #[arg(long)] all: bool, }, /// Output session-start context from the store #[command(name = "load-context")] LoadContext { /// Show word count statistics instead of content #[arg(long)] stats: bool, }, /// Show recent retrieval log Log, /// Show current parameters Params, /// Bump daily lookup counter for keys #[command(name = "lookup-bump")] LookupBump { /// Node keys keys: Vec, }, /// Show daily lookup counts Lookups { /// Date (default: today) date: Option, }, } #[derive(Subcommand)] enum DigestLevel { /// Generate daily digest Daily { /// Date (default: today) date: Option, }, /// Generate weekly digest Weekly { /// Date or week label (default: current week) date: Option, }, /// Generate monthly digest Monthly { /// Month (YYYY-MM) or date (default: current month) date: Option, }, /// Generate all missing digests Auto, } fn main() { let cli = Cli::parse(); let result = match cli.command { // Core Command::Search { query, pipeline, expand, full, debug, fuzzy, content } => cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content), Command::Render { key } => cmd_render(&key), Command::Write { key } => cmd_write(&key), Command::History { full, key } => cmd_history(&key, full), Command::Tail { n, full } => cmd_tail(n, full), Command::Status => cmd_status(), Command::Query { expr } => cmd_query(&expr), Command::Used { key } => cmd_used(&key), Command::Wrong { key, context } => cmd_wrong(&key, &context), Command::Gap { description } => cmd_gap(&description), // Node Command::Node(sub) => match sub { NodeCmd::Delete { key } => cmd_node_delete(&key), NodeCmd::Rename { old_key, new_key } => cmd_node_rename(&old_key, &new_key), NodeCmd::List { pattern } => cmd_list_keys(pattern.as_deref()), NodeCmd::Edges => cmd_list_edges(), NodeCmd::Dump => cmd_dump_json(), }, // Journal Command::Journal(sub) => match sub { JournalCmd::Write { text } => cmd_journal_write(&text), JournalCmd::Tail { n, full, level } => cmd_journal_tail(n, full, level), JournalCmd::Enrich { jsonl_path, entry_text, grep_line } => cmd_journal_enrich(&jsonl_path, &entry_text, grep_line), }, // Graph Command::GraphCmd(sub) => match sub { GraphCmd::Link { key } => cmd_link(&key), GraphCmd::LinkAdd { source, target, reason } => cmd_link_add(&source, &target, &reason), GraphCmd::LinkImpact { source, target } => cmd_link_impact(&source, &target), GraphCmd::LinkAudit { apply } => cmd_link_audit(apply), GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold } => cmd_link_orphans(min_degree, links_per, sim_threshold), GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub } => cmd_triangle_close(min_degree, sim_threshold, max_per_hub), GraphCmd::CapDegree { max_degree } => cmd_cap_degree(max_degree), GraphCmd::Differentiate { key, apply } => cmd_differentiate(key.as_deref(), apply), GraphCmd::Trace { key } => cmd_trace(&key), GraphCmd::Interference { threshold } => cmd_interference(threshold), GraphCmd::Overview => cmd_graph(), GraphCmd::Spectral { k } => cmd_spectral(k), GraphCmd::SpectralSave { k } => cmd_spectral_save(k), GraphCmd::SpectralNeighbors { key, n } => cmd_spectral_neighbors(&key, n), GraphCmd::SpectralPositions { n } => cmd_spectral_positions(n), GraphCmd::SpectralSuggest { n } => cmd_spectral_suggest(n), GraphCmd::Organize { term, threshold, key_only, anchor } => cmd_organize(&term, threshold, key_only, anchor), }, // Agent Command::Agent(sub) => match sub { AgentCmd::Daemon { sub, args } => cmd_daemon(sub.as_deref(), &args), AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth } => cmd_knowledge_loop(max_cycles, batch_size, window, max_depth), AgentCmd::ConsolidateBatch { count, auto, agent } => cmd_consolidate_batch(count, auto, agent), AgentCmd::ConsolidateSession => cmd_consolidate_session(), AgentCmd::ConsolidateFull => cmd_consolidate_full(), AgentCmd::ApplyAgent { all } => cmd_apply_agent(all), AgentCmd::ApplyConsolidation { apply, report } => cmd_apply_consolidation(apply, report.as_deref()), AgentCmd::Digest { level } => cmd_digest(level), AgentCmd::DigestLinks { apply } => cmd_digest_links(apply), AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path), AgentCmd::FactMine { path, batch, dry_run, output, min_messages } => cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages), AgentCmd::FactMineStore { path } => cmd_fact_mine_store(&path), AgentCmd::ReplayQueue { count } => cmd_replay_queue(count), }, // Admin Command::Admin(sub) => match sub { AdminCmd::Init => cmd_init(), AdminCmd::Health => cmd_health(), AdminCmd::Fsck => cmd_fsck(), AdminCmd::Dedup { apply } => cmd_dedup(apply), AdminCmd::DailyCheck => cmd_daily_check(), AdminCmd::Import { files } => cmd_import(&files), AdminCmd::Export { files, all } => cmd_export(&files, all), AdminCmd::LoadContext { stats } => cmd_load_context(stats), AdminCmd::Log => cmd_log(), AdminCmd::Params => cmd_params(), AdminCmd::LookupBump { keys } => cmd_lookup_bump(&keys), AdminCmd::Lookups { date } => cmd_lookups(date.as_deref()), }, }; if let Err(e) = result { eprintln!("Error: {}", e); process::exit(1); } } // ── Command implementations ───────────────────────────────────────── fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full: bool, debug: bool, fuzzy: bool, content: bool) -> Result<(), String> { use store::StoreView; use std::collections::BTreeMap; // Parse pipeline stages (unified: algorithms, filters, transforms, generators) let stages: Vec = if pipeline_args.is_empty() { vec![search::Stage::Algorithm(search::AlgoStage::parse("spread").unwrap())] } else { pipeline_args.iter() .map(|a| search::Stage::parse(a)) .collect::, _>>()? }; // Check if pipeline needs full Store (has filters/transforms/generators) let needs_store = stages.iter().any(|s| !matches!(s, search::Stage::Algorithm(_))); // Check if pipeline starts with a generator (doesn't need seed terms) let has_generator = stages.first().map(|s| matches!(s, search::Stage::Generator(_))).unwrap_or(false); if terms.is_empty() && !has_generator { return Err("search requires terms or a generator stage (e.g. 'all')".into()); } let query: String = terms.join(" "); if debug { let names: Vec = stages.iter().map(|s| format!("{}", s)).collect(); println!("[search] pipeline: {}", names.join(" → ")); } let max_results = if expand { 15 } else { 5 }; if needs_store { // Full Store path — needed for filter/transform/generator stages let store = store::Store::load()?; let graph = store.build_graph(); let seeds = if has_generator { vec![] // generator will produce its own result set } else { let terms_map: BTreeMap = query.split_whitespace() .map(|t| (t.to_lowercase(), 1.0)) .collect(); let (seeds, _) = search::match_seeds_opts(&terms_map, &store, fuzzy, content); seeds }; let raw = search::run_query(&stages, seeds, &graph, &store, debug, max_results); if raw.is_empty() { eprintln!("No results"); return Ok(()); } for (i, (key, score)) in raw.iter().enumerate().take(max_results) { let weight = store.nodes.get(key).map(|n| n.weight).unwrap_or(0.0); println!("{:2}. [{:.2}/{:.2}] {}", i + 1, score, weight, key); if full { if let Some(node) = store.nodes.get(key) { println!(); for line in node.content.lines() { println!(" {}", line); } println!(); } } } } else { // Fast MmapView path — algorithm-only pipeline let view = store::AnyView::load()?; let graph = graph::build_graph_fast(&view); let terms_map: BTreeMap = query.split_whitespace() .map(|t| (t.to_lowercase(), 1.0)) .collect(); let (seeds, direct_hits) = search::match_seeds_opts(&terms_map, &view, fuzzy, content); if seeds.is_empty() { eprintln!("No results for '{}'", query); return Ok(()); } if debug { println!("[search] {} seeds from query '{}'", seeds.len(), query); } // Extract AlgoStages from the unified stages let algo_stages: Vec<&search::AlgoStage> = stages.iter() .filter_map(|s| match s { search::Stage::Algorithm(a) => Some(a), _ => None, }) .collect(); let algo_owned: Vec = algo_stages.into_iter().cloned().collect(); let raw = search::run_pipeline(&algo_owned, seeds, &graph, &view, debug, max_results); let results: Vec = raw.into_iter() .map(|(key, activation)| { let is_direct = direct_hits.contains(&key); search::SearchResult { key, activation, is_direct, snippet: None } }) .collect(); if results.is_empty() { eprintln!("No results for '{}'", query); return Ok(()); } // Log retrieval store::Store::log_retrieval_static(&query, &results.iter().map(|r| r.key.clone()).collect::>()); let bump_keys: Vec<&str> = results.iter().take(max_results).map(|r| r.key.as_str()).collect(); let _ = lookups::bump_many(&bump_keys); for (i, r) in results.iter().enumerate().take(max_results) { let marker = if r.is_direct { "→" } else { " " }; let weight = view.node_weight(&r.key); println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); if full { if let Some(content) = view.node_content(&r.key) { println!(); for line in content.lines() { println!(" {}", line); } println!(); } } } } Ok(()) } fn cmd_init() -> Result<(), String> { let cfg = config::get(); // Ensure data directory exists std::fs::create_dir_all(&cfg.data_dir) .map_err(|e| format!("create data_dir: {}", e))?; // Install filesystem files (not store nodes) install_default_file(&cfg.data_dir, "instructions.md", include_str!("../defaults/instructions.md"))?; install_default_file(&cfg.data_dir, "on-consciousness.md", include_str!("../defaults/on-consciousness.md"))?; // Initialize store and seed default identity node if empty let mut store = store::Store::load()?; let count = store.init_from_markdown()?; for key in &cfg.core_nodes { if !store.nodes.contains_key(key) && key == "identity" { let default = include_str!("../defaults/identity.md"); store.upsert(key, default) .map_err(|e| format!("seed {}: {}", key, e))?; println!("Seeded {} in store", key); } } store.save()?; println!("Indexed {} memory units", count); // Install hooks daemon::install_hook()?; // Create config if none exists let config_path = std::env::var("POC_MEMORY_CONFIG") .map(std::path::PathBuf::from) .unwrap_or_else(|_| { std::path::PathBuf::from(std::env::var("HOME").unwrap()) .join(".config/poc-memory/config.jsonl") }); if !config_path.exists() { let config_dir = config_path.parent().unwrap(); std::fs::create_dir_all(config_dir) .map_err(|e| format!("create config dir: {}", e))?; let example = include_str!("../config.example.jsonl"); std::fs::write(&config_path, example) .map_err(|e| format!("write config: {}", e))?; println!("Created config at {} — edit with your name and context groups", config_path.display()); } println!("Done. Run `poc-memory load-context --stats` to verify."); Ok(()) } fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> { let path = data_dir.join(name); if !path.exists() { std::fs::write(&path, content) .map_err(|e| format!("write {}: {}", name, e))?; println!("Created {}", path.display()); } Ok(()) } fn cmd_fsck() -> Result<(), String> { let mut store = store::Store::load()?; // Check cache vs log consistency let log_store = store::Store::load_from_logs()?; let mut cache_issues = 0; // Nodes in logs but missing from cache for key in log_store.nodes.keys() { if !store.nodes.contains_key(key) { eprintln!("CACHE MISSING: '{}' exists in capnp log but not in cache", key); cache_issues += 1; } } // Nodes in cache but not in logs (phantom nodes) for key in store.nodes.keys() { if !log_store.nodes.contains_key(key) { eprintln!("CACHE PHANTOM: '{}' exists in cache but not in capnp log", key); cache_issues += 1; } } // Version mismatches for (key, log_node) in &log_store.nodes { if let Some(cache_node) = store.nodes.get(key) { if cache_node.version != log_node.version { eprintln!("CACHE STALE: '{}' cache v{} vs log v{}", key, cache_node.version, log_node.version); cache_issues += 1; } } } if cache_issues > 0 { eprintln!("{} cache inconsistencies found — rebuilding from logs", cache_issues); store = log_store; store.save().map_err(|e| format!("rebuild save: {}", e))?; } // Check node-key consistency let mut issues = 0; for (key, node) in &store.nodes { if key != &node.key { eprintln!("MISMATCH: map key '{}' vs node.key '{}'", key, node.key); issues += 1; } } // Check edge endpoints let mut dangling = 0; for rel in &store.relations { if rel.deleted { continue; } if !store.nodes.contains_key(&rel.source_key) { eprintln!("DANGLING: edge source '{}'", rel.source_key); dangling += 1; } if !store.nodes.contains_key(&rel.target_key) { eprintln!("DANGLING: edge target '{}'", rel.target_key); dangling += 1; } } // Prune orphan edges let mut to_tombstone = Vec::new(); for rel in &store.relations { if rel.deleted { continue; } if !store.nodes.contains_key(&rel.source_key) || !store.nodes.contains_key(&rel.target_key) { let mut tombstone = rel.clone(); tombstone.deleted = true; tombstone.version += 1; to_tombstone.push(tombstone); } } if !to_tombstone.is_empty() { let count = to_tombstone.len(); store.append_relations(&to_tombstone)?; for t in &to_tombstone { if let Some(r) = store.relations.iter_mut().find(|r| r.uuid == t.uuid) { r.deleted = true; r.version = t.version; } } store.save()?; eprintln!("Pruned {} orphan edges", count); } let g = store.build_graph(); println!("fsck: {} nodes, {} edges, {} issues, {} dangling, {} cache", store.nodes.len(), g.edge_count(), issues, dangling, cache_issues); Ok(()) } fn cmd_dedup(apply: bool) -> Result<(), String> { use std::collections::{HashMap, HashSet}; let mut store = store::Store::load()?; let duplicates = store.find_duplicates()?; if duplicates.is_empty() { println!("No duplicate keys found."); return Ok(()); } // Count edges per UUID let mut edges_by_uuid: HashMap<[u8; 16], usize> = HashMap::new(); for rel in &store.relations { if rel.deleted { continue; } *edges_by_uuid.entry(rel.source).or_default() += 1; *edges_by_uuid.entry(rel.target).or_default() += 1; } let mut identical_groups = Vec::new(); let mut diverged_groups = Vec::new(); for (key, mut nodes) in duplicates { // Sort by version descending so highest-version is first nodes.sort_by(|a, b| b.version.cmp(&a.version)); // Check if all copies have identical content let all_same = nodes.windows(2).all(|w| w[0].content == w[1].content); let info: Vec<_> = nodes.iter().map(|n| { let edge_count = edges_by_uuid.get(&n.uuid).copied().unwrap_or(0); (n.clone(), edge_count) }).collect(); if all_same { identical_groups.push((key, info)); } else { diverged_groups.push((key, info)); } } // Report println!("=== Duplicate key report ===\n"); println!("{} identical groups, {} diverged groups\n", identical_groups.len(), diverged_groups.len()); if !identical_groups.is_empty() { println!("── Identical (safe to auto-merge) ──"); for (key, copies) in &identical_groups { let total_edges: usize = copies.iter().map(|c| c.1).sum(); println!(" {} ({} copies, {} total edges)", key, copies.len(), total_edges); for (node, edges) in copies { let uuid_hex = node.uuid.iter().map(|b| format!("{:02x}", b)).collect::(); println!(" v{} uuid={}.. edges={}", node.version, &uuid_hex[..8], edges); } } println!(); } if !diverged_groups.is_empty() { println!("── Diverged (need review) ──"); for (key, copies) in &diverged_groups { let total_edges: usize = copies.iter().map(|c| c.1).sum(); println!(" {} ({} copies, {} total edges)", key, copies.len(), total_edges); for (node, edges) in copies { let uuid_hex = node.uuid.iter().map(|b| format!("{:02x}", b)).collect::(); let preview: String = node.content.chars().take(80).collect(); println!(" v{} uuid={}.. edges={} | {}{}", node.version, &uuid_hex[..8], edges, preview, if node.content.len() > 80 { "..." } else { "" }); } } println!(); } if !apply { let total_dupes: usize = identical_groups.iter().chain(diverged_groups.iter()) .map(|(_, copies)| copies.len() - 1) .sum(); println!("Dry run: {} duplicate nodes would be merged. Use --apply to execute.", total_dupes); return Ok(()); } // Merge all groups: identical + diverged // For diverged: keep the copy with most edges (it's the one that got // woven into the graph — the version that lived). Fall back to highest version. let all_groups: Vec<_> = identical_groups.into_iter() .chain(diverged_groups.into_iter()) .collect(); let mut merged = 0usize; let mut edges_redirected = 0usize; let mut edges_deduped = 0usize; for (_key, mut copies) in all_groups { // Pick survivor: most edges first, then highest version copies.sort_by(|a, b| b.1.cmp(&a.1).then(b.0.version.cmp(&a.0.version))); let survivor_uuid = copies[0].0.uuid; let doomed_uuids: Vec<[u8; 16]> = copies[1..].iter().map(|c| c.0.uuid).collect(); // Redirect edges from doomed UUIDs to survivor let mut updated_rels = Vec::new(); for rel in &mut store.relations { if rel.deleted { continue; } let mut changed = false; if doomed_uuids.contains(&rel.source) { rel.source = survivor_uuid; changed = true; } if doomed_uuids.contains(&rel.target) { rel.target = survivor_uuid; changed = true; } if changed { rel.version += 1; updated_rels.push(rel.clone()); edges_redirected += 1; } } // Dedup edges: same (source, target, rel_type) → keep highest strength let mut seen: HashSet<([u8; 16], [u8; 16], String)> = HashSet::new(); let mut to_tombstone_rels = Vec::new(); // Sort by strength descending so we keep the strongest let mut rels_with_idx: Vec<(usize, &store::Relation)> = store.relations.iter() .enumerate() .filter(|(_, r)| !r.deleted && (r.source == survivor_uuid || r.target == survivor_uuid)) .collect(); rels_with_idx.sort_by(|a, b| b.1.strength.total_cmp(&a.1.strength)); for (idx, rel) in &rels_with_idx { let edge_key = (rel.source, rel.target, format!("{:?}", rel.rel_type)); if !seen.insert(edge_key) { to_tombstone_rels.push(*idx); edges_deduped += 1; } } for &idx in &to_tombstone_rels { store.relations[idx].deleted = true; store.relations[idx].version += 1; updated_rels.push(store.relations[idx].clone()); } // Tombstone doomed nodes let mut tombstones = Vec::new(); for (doomed_node, _) in &copies[1..] { let mut t = doomed_node.clone(); t.deleted = true; t.version += 1; tombstones.push(t); } store.append_nodes(&tombstones)?; if !updated_rels.is_empty() { store.append_relations(&updated_rels)?; } for uuid in &doomed_uuids { store.uuid_to_key.remove(uuid); } merged += doomed_uuids.len(); } // Remove tombstoned relations from cache store.relations.retain(|r| !r.deleted); store.save()?; println!("Merged {} duplicates, redirected {} edges, deduped {} duplicate edges", merged, edges_redirected, edges_deduped); Ok(()) } fn cmd_health() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); let report = graph::health_report(&g, &store); print!("{}", report); Ok(()) } fn cmd_status() -> Result<(), String> { // If stdout is a tty and daemon is running, launch TUI if std::io::IsTerminal::is_terminal(&std::io::stdout()) { // Try TUI first — falls back if daemon not running match tui::run_tui() { Ok(()) => return Ok(()), Err(_) => {} // fall through to text output } } let store = store::Store::load()?; let g = store.build_graph(); let mut type_counts = std::collections::HashMap::new(); for node in store.nodes.values() { *type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1; } let mut types: Vec<_> = type_counts.iter().collect(); types.sort_by_key(|(_, c)| std::cmp::Reverse(**c)); println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()); print!("Types:"); for (t, c) in &types { let label = match t.as_str() { "Semantic" => "semantic", "EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly" => "episodic", _ => t, }; print!(" {}={}", label, c); } println!(); println!("Graph edges: {} Communities: {}", g.edge_count(), g.community_count()); Ok(()) } fn cmd_graph() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); println!("Graph: {} nodes, {} edges, {} communities", g.nodes().len(), g.edge_count(), g.community_count()); println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}", g.small_world_sigma(), g.degree_power_law_exponent(), g.degree_gini(), g.avg_clustering_coefficient()); Ok(()) } fn cmd_used(key: &[String]) -> Result<(), String> { if key.is_empty() { return Err("used requires a key".into()); } let key = key.join(" "); let mut store = store::Store::load()?; let resolved = store.resolve_key(&key)?; store.mark_used(&resolved); store.save()?; println!("Marked '{}' as used", resolved); Ok(()) } fn cmd_wrong(key: &str, context: &[String]) -> Result<(), String> { let ctx = if context.is_empty() { None } else { Some(context.join(" ")) }; let mut store = store::Store::load()?; let resolved = store.resolve_key(key)?; store.mark_wrong(&resolved, ctx.as_deref()); store.save()?; println!("Marked '{}' as wrong", resolved); Ok(()) } fn cmd_gap(description: &[String]) -> Result<(), String> { if description.is_empty() { return Err("gap requires a description".into()); } let desc = description.join(" "); let mut store = store::Store::load()?; store.record_gap(&desc); store.save()?; println!("Recorded gap: {}", desc); Ok(()) } fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> { let mut store = store::Store::load()?; let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh); println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})", orphans, links, min_deg, links_per, sim_thresh); Ok(()) } fn cmd_cap_degree(max_deg: usize) -> Result<(), String> { let mut store = store::Store::load()?; let (hubs, pruned) = store.cap_degree(max_deg)?; store.save()?; println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg); Ok(()) } fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option) -> Result<(), String> { let store = store::Store::load()?; if let Some(agent_name) = agent { let batch = agents::prompts::agent_prompt(&store, &agent_name, count)?; println!("{}", batch.prompt); Ok(()) } else { agents::prompts::consolidation_batch(&store, count, auto) } } fn cmd_log() -> Result<(), String> { let store = store::Store::load()?; for event in store.retrieval_log.iter().rev().take(20) { println!("[{}] q=\"{}\" → {} results", event.timestamp, event.query, event.results.len()); for r in &event.results { println!(" {}", r); } } Ok(()) } fn cmd_params() -> Result<(), String> { let store = store::Store::load()?; println!("decay_factor: {}", store.params.decay_factor); println!("use_boost: {}", store.params.use_boost); println!("prune_threshold: {}", store.params.prune_threshold); println!("edge_decay: {}", store.params.edge_decay); println!("max_hops: {}", store.params.max_hops); println!("min_activation: {}", store.params.min_activation); Ok(()) } fn cmd_link(key: &[String]) -> Result<(), String> { if key.is_empty() { return Err("link requires a key".into()); } let key = key.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); println!("Neighbors of '{}':", resolved); query::run_query(&store, &g, &format!("neighbors('{}') | select strength,clustering_coefficient", resolved)) } fn cmd_replay_queue(count: usize) -> Result<(), String> { let store = store::Store::load()?; let queue = neuro::replay_queue(&store, count); println!("Replay queue ({} items):", queue.len()); for (i, item) in queue.iter().enumerate() { println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})", i + 1, item.priority, item.classification, item.key, item.interval_days, item.emotion, item.outlier_score); } Ok(()) } fn cmd_consolidate_session() -> Result<(), String> { let store = store::Store::load()?; let plan = neuro::consolidation_plan(&store); println!("{}", neuro::format_plan(&plan)); Ok(()) } fn cmd_consolidate_full() -> Result<(), String> { let mut store = store::Store::load()?; consolidate::consolidate_full(&mut store) } fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> { println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}", min_degree, sim_threshold, max_per_hub); let mut store = store::Store::load()?; let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub); println!("\nProcessed {} hubs, added {} lateral links", hubs, added); Ok(()) } fn cmd_daily_check() -> Result<(), String> { let store = store::Store::load()?; let report = neuro::daily_check(&store); print!("{}", report); Ok(()) } fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> { let mut store = store::Store::load()?; let source = store.resolve_key(source)?; let target = store.resolve_key(target)?; let reason = reason.join(" "); // Refine target to best-matching section let source_content = store.nodes.get(&source) .map(|n| n.content.as_str()).unwrap_or(""); let target = neuro::refine_target(&store, source_content, &target); // Find UUIDs let source_uuid = store.nodes.get(&source) .map(|n| n.uuid) .ok_or_else(|| format!("source not found: {}", source))?; let target_uuid = store.nodes.get(&target) .map(|n| n.uuid) .ok_or_else(|| format!("target not found: {}", target))?; // Check for existing link let exists = store.relations.iter().any(|r| !r.deleted && ((r.source_key == source && r.target_key == target) || (r.source_key == target && r.target_key == source))); if exists { println!("Link already exists: {} ↔ {}", source, target); return Ok(()); } let rel = store::new_relation( source_uuid, target_uuid, store::RelationType::Link, 0.8, &source, &target, ); store.add_relation(rel)?; store.save()?; println!("Linked: {} → {} ({})", source, target, reason); Ok(()) } fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> { let store = store::Store::load()?; let source = store.resolve_key(source)?; let target = store.resolve_key(target)?; let g = store.build_graph(); let impact = g.link_impact(&source, &target); println!("Link impact: {} → {}", source, target); println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg); println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community); println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target); println!(" ΔGini: {:+.6}", impact.delta_gini); println!(" Assessment: {}", impact.assessment); Ok(()) } /// Apply links from a single agent result JSON file. /// Returns (links_applied, errors). fn apply_agent_file( store: &mut store::Store, data: &serde_json::Value, ) -> (usize, usize) { let agent_result = data.get("agent_result").or(Some(data)); let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) { Some(l) => l, None => return (0, 0), }; let entry_text = data.get("entry_text") .and_then(|v| v.as_str()) .unwrap_or(""); if let (Some(start), Some(end)) = ( agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()), agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()), ) { println!(" Source: L{}-L{}", start, end); } let mut applied = 0; let mut errors = 0; for link in links { let target = match link.get("target").and_then(|v| v.as_str()) { Some(t) => t, None => continue, }; let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or(""); if let Some(note) = target.strip_prefix("NOTE:") { println!(" NOTE: {} — {}", note, reason); continue; } let resolved = match store.resolve_key(target) { Ok(r) => r, Err(_) => { println!(" SKIP {} (not found in graph)", target); continue; } }; let source_key = match store.find_journal_node(entry_text) { Some(k) => k, None => { println!(" SKIP {} (no matching journal node)", target); continue; } }; let source_uuid = match store.nodes.get(&source_key) { Some(n) => n.uuid, None => continue, }; let target_uuid = match store.nodes.get(&resolved) { Some(n) => n.uuid, None => continue, }; let rel = store::new_relation( source_uuid, target_uuid, store::RelationType::Link, 0.5, &source_key, &resolved, ); if let Err(e) = store.add_relation(rel) { eprintln!(" Error adding relation: {}", e); errors += 1; } else { println!(" LINK {} → {} ({})", source_key, resolved, reason); applied += 1; } } (applied, errors) } fn cmd_apply_agent(process_all: bool) -> Result<(), String> { let results_dir = store::memory_dir().join("agent-results"); if !results_dir.exists() { println!("No agent results directory"); return Ok(()); } let mut store = store::Store::load()?; let mut applied = 0; let mut errors = 0; let mut files: Vec<_> = std::fs::read_dir(&results_dir) .map_err(|e| format!("read results dir: {}", e))? .filter_map(|e| e.ok()) .filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false)) .collect(); files.sort_by_key(|e| e.path()); for entry in &files { let path = entry.path(); let content = match std::fs::read_to_string(&path) { Ok(c) => c, Err(e) => { eprintln!(" Skip {}: {}", path.display(), e); errors += 1; continue; } }; let data: serde_json::Value = match serde_json::from_str(&content) { Ok(d) => d, Err(e) => { eprintln!(" Skip {}: parse error: {}", path.display(), e); errors += 1; continue; } }; println!("Processing {}:", path.file_name().unwrap().to_string_lossy()); let (a, e) = apply_agent_file(&mut store, &data); applied += a; errors += e; if !process_all { let done_dir = util::memory_subdir("agent-results/done")?; let dest = done_dir.join(path.file_name().unwrap()); std::fs::rename(&path, &dest).ok(); } } if applied > 0 { store.save()?; } println!("\nApplied {} links ({} errors, {} files processed)", applied, errors, files.len()); Ok(()) } fn cmd_digest(level: DigestLevel) -> Result<(), String> { let mut store = store::Store::load()?; match level { DigestLevel::Auto => digest::digest_auto(&mut store), DigestLevel::Daily { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "daily", &arg) } DigestLevel::Weekly { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "weekly", &arg) } DigestLevel::Monthly { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "monthly", &arg) } } } fn cmd_digest_links(do_apply: bool) -> Result<(), String> { let store = store::Store::load()?; let links = digest::parse_all_digest_links(&store); drop(store); println!("Found {} unique links from digest nodes", links.len()); if !do_apply { for (i, link) in links.iter().enumerate() { println!(" {:3}. {} → {}", i + 1, link.source, link.target); if !link.reason.is_empty() { println!(" ({})", &link.reason[..link.reason.len().min(80)]); } } println!("\nTo apply: poc-memory digest-links --apply"); return Ok(()); } let mut store = store::Store::load()?; let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links); println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped); Ok(()) } fn cmd_journal_enrich(jsonl_path: &str, entry_text: &str, grep_line: usize) -> Result<(), String> { if !std::path::Path::new(jsonl_path).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } let mut store = store::Store::load()?; enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line) } fn cmd_experience_mine(jsonl_path: Option) -> Result<(), String> { let jsonl_path = match jsonl_path { Some(p) => p, None => find_current_transcript() .ok_or("no JSONL transcripts found")?, }; if !std::path::Path::new(jsonl_path.as_str()).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } let mut store = store::Store::load()?; let count = enrich::experience_mine(&mut store, &jsonl_path, None)?; println!("Done: {} new entries mined.", count); Ok(()) } fn cmd_apply_consolidation(do_apply: bool, report_file: Option<&str>) -> Result<(), String> { let mut store = store::Store::load()?; consolidate::apply_consolidation(&mut store, do_apply, report_file) } fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> { let mut store = store::Store::load()?; if let Some(key) = key_arg { let resolved = store.resolve_key(key)?; let moves = neuro::differentiate_hub(&store, &resolved) .ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?; // Group by target section for display let mut by_section: std::collections::BTreeMap> = std::collections::BTreeMap::new(); for mv in &moves { by_section.entry(mv.to_section.clone()).or_default().push(mv); } println!("Hub '{}' — {} links to redistribute across {} sections\n", resolved, moves.len(), by_section.len()); for (section, section_moves) in &by_section { println!(" {} ({} links):", section, section_moves.len()); for mv in section_moves.iter().take(5) { println!(" [{:.3}] {} — {}", mv.similarity, mv.neighbor_key, mv.neighbor_snippet); } if section_moves.len() > 5 { println!(" ... and {} more", section_moves.len() - 5); } } if !do_apply { println!("\nTo apply: poc-memory differentiate {} --apply", resolved); return Ok(()); } let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves); store.save()?; println!("\nApplied: {} Skipped: {}", applied, skipped); } else { let hubs = neuro::find_differentiable_hubs(&store); if hubs.is_empty() { println!("No file-level hubs with sections found above threshold"); return Ok(()); } println!("Differentiable hubs (file-level nodes with sections):\n"); for (key, degree, sections) in &hubs { println!(" {:40} deg={:3} sections={}", key, degree, sections); } println!("\nRun: poc-memory differentiate KEY to preview a specific hub"); } Ok(()) } fn cmd_link_audit(apply: bool) -> Result<(), String> { let mut store = store::Store::load()?; let stats = audit::link_audit(&mut store, apply)?; println!("\n{}", "=".repeat(60)); println!("Link audit complete:"); println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}", stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors); println!("{}", "=".repeat(60)); Ok(()) } fn cmd_trace(key: &[String]) -> Result<(), String> { if key.is_empty() { return Err("trace requires a key".into()); } let key = key.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); let node = store.nodes.get(&resolved) .ok_or_else(|| format!("Node not found: {}", resolved))?; // Display the node itself println!("=== {} ===", resolved); println!("Type: {:?} Weight: {:.2}", node.node_type, node.weight); if !node.source_ref.is_empty() { println!("Source: {}", node.source_ref); } // Show content preview let preview = util::truncate(&node.content, 200, "..."); println!("\n{}\n", preview); // Walk neighbors, grouped by node type let neighbors = g.neighbors(&resolved); let mut episodic_session = Vec::new(); let mut episodic_daily = Vec::new(); let mut episodic_weekly = Vec::new(); let mut semantic = Vec::new(); for (n, strength) in &neighbors { if let Some(nnode) = store.nodes.get(n.as_str()) { let entry = (n.as_str(), *strength, nnode); match nnode.node_type { store::NodeType::EpisodicSession => episodic_session.push(entry), store::NodeType::EpisodicDaily => episodic_daily.push(entry), store::NodeType::EpisodicWeekly | store::NodeType::EpisodicMonthly => episodic_weekly.push(entry), store::NodeType::Semantic => semantic.push(entry), } } } if !episodic_weekly.is_empty() { println!("Weekly digests:"); for (k, s, n) in &episodic_weekly { let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80); println!(" [{:.2}] {} — {}", s, k, preview); } } if !episodic_daily.is_empty() { println!("Daily digests:"); for (k, s, n) in &episodic_daily { let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80); println!(" [{:.2}] {} — {}", s, k, preview); } } if !episodic_session.is_empty() { println!("Session entries:"); for (k, s, n) in &episodic_session { let preview = util::first_n_chars( n.content.lines() .find(|l| !l.is_empty() && !l.starts_with("