// poc-memory: graph-structured memory for AI assistants // // Authors: ProofOfConcept and Kent Overstreet // License: MIT OR Apache-2.0 // // Architecture: // nodes.capnp - append-only content node log // relations.capnp - append-only relation log // state.bin - derived KV cache (rebuilt from logs when stale) // // Graph algorithms: clustering coefficient, community detection (label // propagation), schema fit scoring, small-world metrics, consolidation // priority. Text similarity via BM25 with Porter stemming. // // Neuroscience-inspired: spaced repetition replay, emotional gating, // interference detection, schema assimilation, reconsolidation. use poc_memory::*; use clap::{Parser, Subcommand}; use std::process; /// Find the most recently modified .jsonl transcript in the Claude projects dir. fn find_current_transcript() -> Option { let projects = config::get().projects_dir.clone(); if !projects.exists() { return None; } let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None; if let Ok(dirs) = std::fs::read_dir(&projects) { for dir_entry in dirs.filter_map(|e| e.ok()) { if !dir_entry.path().is_dir() { continue; } if let Ok(files) = std::fs::read_dir(dir_entry.path()) { for f in files.filter_map(|e| e.ok()) { let p = f.path(); if p.extension().map(|x| x == "jsonl").unwrap_or(false) { if let Ok(meta) = p.metadata() { if let Ok(mtime) = meta.modified() { if newest.as_ref().is_none_or(|(t, _)| mtime > *t) { newest = Some((mtime, p)); } } } } } } } } newest.map(|(_, p)| p.to_string_lossy().to_string()) } #[derive(Parser)] #[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")] struct Cli { #[command(subcommand)] command: Command, } #[derive(Subcommand)] enum Command { // ── Core (daily use) ────────────────────────────────────────────── /// Search memory (AND logic across terms) /// /// Pipeline: -p spread -p spectral,k=20 /// Default pipeline: spread Search { /// Search terms query: Vec, /// Algorithm pipeline stages (repeatable) #[arg(short, long = "pipeline")] pipeline: Vec, /// Show more results #[arg(long)] expand: bool, /// Show node content, not just keys #[arg(long)] full: bool, /// Show debug output for each pipeline stage #[arg(long)] debug: bool, /// Also match key components (e.g. "irc" matches "irc-access") #[arg(long)] fuzzy: bool, /// Also search node content (slow, use when graph search misses) #[arg(long)] content: bool, }, /// Output a node's content to stdout Render { /// Node key key: Vec, }, /// Upsert node content from stdin Write { /// Node key key: Vec, }, /// Show all stored versions of a node History { /// Show full content for every version #[arg(long)] full: bool, /// Node key key: Vec, }, /// Show most recent writes to the node log Tail { /// Number of entries (default: 20) #[arg(default_value_t = 20)] n: usize, /// Show full content #[arg(long)] full: bool, }, /// Summary of memory state Status, /// Query the memory graph #[command(after_long_help = "\ EXPRESSIONS: * all nodes key ~ 'pattern' regex match on node key content ~ 'phrase' regex match on node content degree > 15 numeric comparison on any field field = value exact match field != value not equal expr AND expr boolean AND expr OR expr boolean OR NOT expr negation neighbors('key') nodes linked to key neighbors('key') WHERE expr ... with filter on edges/nodes FIELDS: key, weight, content, degree, node_type, provenance, emotion, retrievals, uses, wrongs, created, clustering_coefficient (cc), community_id OPERATORS: > < >= <= = != ~(regex) PIPE STAGES: | sort FIELD [asc] sort (desc by default) | limit N cap results | select F,F,... output fields as TSV | count just show count | connectivity show graph structure between results FUNCTIONS: community('key') community id of a node degree('key') degree of a node EXAMPLES: key ~ 'inner-life' substring match on keys content ~ 'made love' full-text search content ~ 'made love' | connectivity find clusters among results (content ~ 'A' OR content ~ 'B') | connectivity degree > 15 | sort degree | limit 10 high-degree nodes key ~ 'journal' AND degree > 10 | count count matching nodes neighbors('identity') WHERE strength > 0.5 | sort strength * | sort weight asc | limit 20 lowest-weight nodes ")] Query { /// Query expression (e.g. "key ~ 'inner-life'") expr: Vec, }, /// Mark a memory as useful (boosts weight) Used { /// Node key key: Vec, }, /// Mark a memory as wrong/irrelevant Wrong { /// Node key key: String, /// Optional context context: Vec, }, /// Mark a search result as not relevant (weakens edges that led to it) #[command(name = "not-relevant")] NotRelevant { /// Node key that was not relevant key: String, }, /// Mark a node as not useful (weakens node weight, not edges) #[command(name = "not-useful")] NotUseful { /// Node key key: String, }, /// Record a gap in memory coverage Gap { /// Gap description description: Vec, }, // ── Node operations ─────────────────────────────────────────────── /// Node operations (delete, rename, list) #[command(subcommand)] Node(NodeCmd), // ── Journal ─────────────────────────────────────────────────────── /// Journal operations (write, tail, enrich) #[command(subcommand)] Journal(JournalCmd), // ── Graph ───────────────────────────────────────────────────────── /// Graph operations (link, audit, spectral) #[command(subcommand, name = "graph")] GraphCmd(GraphCmd), // ── Cursor (spatial memory) ────────────────────────────────────── /// Navigate the memory graph with a persistent cursor #[command(subcommand)] Cursor(CursorCmd), // ── Agents ──────────────────────────────────────────────────────── /// Agent and daemon operations #[command(subcommand)] Agent(AgentCmd), // ── Admin ───────────────────────────────────────────────────────── /// Admin operations (fsck, health, import, export) #[command(subcommand)] Admin(AdminCmd), } #[derive(Subcommand)] enum NodeCmd { /// Soft-delete a node Delete { /// Node key key: Vec, }, /// Rename a node key Rename { /// Old key old_key: String, /// New key new_key: String, }, /// List all node keys (one per line, optional glob) #[command(name = "list")] List { /// Glob pattern to filter keys pattern: Option, }, /// List all edges (tsv: source target strength type) Edges, /// Dump entire store as JSON #[command(name = "dump")] Dump, } #[derive(Subcommand)] enum CursorCmd { /// Show current cursor position with context Show, /// Set cursor to a node key Set { /// Node key key: Vec, }, /// Move cursor forward in time Forward, /// Move cursor backward in time Back, /// Move up the digest hierarchy (journal→daily→weekly→monthly) Up, /// Move down the digest hierarchy (to first child) Down, /// Clear the cursor Clear, } #[derive(Subcommand)] enum JournalCmd { /// Write a journal entry to the store Write { /// Entry text text: Vec, }, /// Show recent journal/digest entries Tail { /// Number of entries to show (default: 20) #[arg(default_value_t = 20)] n: usize, /// Show full content #[arg(long)] full: bool, /// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly #[arg(long, default_value_t = 0)] level: u8, }, /// Enrich journal entry with conversation links Enrich { /// Path to JSONL transcript jsonl_path: String, /// Journal entry text to enrich entry_text: String, /// Grep line number for source location #[arg(default_value_t = 0)] grep_line: usize, }, } #[derive(Subcommand)] enum GraphCmd { /// Show neighbors of a node Link { /// Node key key: Vec, }, /// Add a link between two nodes #[command(name = "link-add")] LinkAdd { /// Source node key source: String, /// Target node key target: String, /// Optional reason reason: Vec, }, /// Simulate adding an edge, report topology impact #[command(name = "link-impact")] LinkImpact { /// Source node key source: String, /// Target node key target: String, }, /// Walk every link, send to Sonnet for quality review #[command(name = "link-audit")] LinkAudit { /// Apply changes (default: dry run) #[arg(long)] apply: bool, }, /// Link orphan nodes to similar neighbors #[command(name = "link-orphans")] LinkOrphans { /// Minimum degree to consider orphan (default: 2) #[arg(default_value_t = 2)] min_degree: usize, /// Links per orphan (default: 3) #[arg(default_value_t = 3)] links_per: usize, /// Similarity threshold (default: 0.15) #[arg(default_value_t = 0.15)] sim_threshold: f32, }, /// Close triangles: link similar neighbors of hubs #[command(name = "triangle-close")] TriangleClose { /// Minimum hub degree (default: 5) #[arg(default_value_t = 5)] min_degree: usize, /// Similarity threshold (default: 0.3) #[arg(default_value_t = 0.3)] sim_threshold: f32, /// Maximum links per hub (default: 10) #[arg(default_value_t = 10)] max_per_hub: usize, }, /// Cap node degree by pruning weak auto edges #[command(name = "cap-degree")] CapDegree { /// Maximum degree (default: 50) #[arg(default_value_t = 50)] max_degree: usize, }, /// Set link strengths from neighborhood overlap (Jaccard similarity) #[command(name = "normalize-strengths")] NormalizeStrengths { /// Apply changes (default: dry run) #[arg(long)] apply: bool, }, /// Redistribute hub links to section-level children Differentiate { /// Specific hub key (omit to list all differentiable hubs) key: Option, /// Apply the redistribution #[arg(long)] apply: bool, }, /// Walk temporal links: semantic ↔ episodic ↔ conversation Trace { /// Node key key: Vec, }, /// Detect potentially confusable memory pairs Interference { /// Similarity threshold (default: 0.4) #[arg(long, default_value_t = 0.4)] threshold: f32, }, /// Show graph structure overview Overview, /// Spectral decomposition of the memory graph Spectral { /// Number of eigenvectors (default: 30) #[arg(default_value_t = 30)] k: usize, }, /// Compute and save spectral embedding #[command(name = "spectral-save")] SpectralSave { /// Number of eigenvectors (default: 20) #[arg(default_value_t = 20)] k: usize, }, /// Find spectrally nearest nodes #[command(name = "spectral-neighbors")] SpectralNeighbors { /// Node key key: String, /// Number of neighbors (default: 15) #[arg(default_value_t = 15)] n: usize, }, /// Show nodes ranked by outlier/bridge score #[command(name = "spectral-positions")] SpectralPositions { /// Number of nodes to show (default: 30) #[arg(default_value_t = 30)] n: usize, }, /// Find spectrally close but unlinked pairs #[command(name = "spectral-suggest")] SpectralSuggest { /// Number of pairs (default: 20) #[arg(default_value_t = 20)] n: usize, }, /// Diagnose duplicate/overlapping nodes for a topic cluster Organize { /// Search term (matches node keys; also content unless --key-only) term: String, /// Similarity threshold for pair reporting (default: 0.4) #[arg(long, default_value_t = 0.4)] threshold: f32, /// Only match node keys, not content #[arg(long)] key_only: bool, /// Create anchor node for the search term and link to cluster #[arg(long)] anchor: bool, }, } #[derive(Subcommand)] enum DaemonCmd { /// Start the daemon (default) Start, /// Show daemon status Status, /// Show daemon log Log { /// Job name to filter by job: Option, /// Number of lines to show #[arg(long, default_value_t = 20)] lines: usize, }, /// Install systemd service Install, /// Trigger consolidation via daemon Consolidate, /// Run an agent via the daemon Run { /// Agent name (e.g. organize, replay, linker) #[arg(default_value = "replay")] agent: String, /// Batch size #[arg(default_value_t = 1)] count: usize, }, /// Interactive TUI Tui, } #[derive(Subcommand)] enum AgentCmd { /// Background job daemon #[command(subcommand)] Daemon(DaemonCmd), /// Run knowledge agents to convergence #[command(name = "knowledge-loop")] KnowledgeLoop { /// Maximum cycles before stopping #[arg(long, default_value_t = 20)] max_cycles: usize, /// Items per agent per cycle #[arg(long, default_value_t = 5)] batch_size: usize, /// Cycles to check for convergence #[arg(long, default_value_t = 5)] window: usize, /// Maximum inference depth #[arg(long, default_value_t = 4)] max_depth: i32, }, /// Run agent consolidation on priority nodes #[command(name = "consolidate-batch")] ConsolidateBatch { /// Number of nodes to consolidate #[arg(long, default_value_t = 5)] count: usize, /// Generate replay agent prompt automatically #[arg(long)] auto: bool, /// Generate prompt for a specific agent (replay, linker, separator, transfer, health) #[arg(long)] agent: Option, }, /// Analyze metrics, plan agent allocation #[command(name = "consolidate-session")] ConsolidateSession, /// Autonomous: plan → agents → apply → digests → links #[command(name = "consolidate-full")] ConsolidateFull, /// Import pending agent results into the graph #[command(name = "apply-agent")] ApplyAgent { /// Process all files without moving to done/ #[arg(long)] all: bool, }, /// Extract and apply actions from consolidation reports #[command(name = "apply-consolidation")] ApplyConsolidation { /// Apply actions (default: dry run) #[arg(long)] apply: bool, /// Read from specific report file #[arg(long)] report: Option, }, /// Generate episodic digests (daily, weekly, monthly, auto) Digest { /// Digest type: daily, weekly, monthly, auto #[command(subcommand)] level: DigestLevel, }, /// Parse and apply links from digest nodes #[command(name = "digest-links")] DigestLinks { /// Apply the links (default: dry run) #[arg(long)] apply: bool, }, /// Mine conversation for experiential moments to journal #[command(name = "experience-mine")] ExperienceMine { /// Path to JSONL transcript (default: most recent) jsonl_path: Option, }, /// Extract atomic facts from conversation transcripts #[command(name = "fact-mine")] FactMine { /// Path to JSONL transcript or directory (with --batch) path: String, /// Process all .jsonl files in directory #[arg(long)] batch: bool, /// Show chunks without calling model #[arg(long)] dry_run: bool, /// Write JSON to file (default: stdout) #[arg(long, short)] output: Option, /// Skip transcripts with fewer messages #[arg(long, default_value_t = 10)] min_messages: usize, }, /// Extract facts from a transcript and store directly #[command(name = "fact-mine-store")] FactMineStore { /// Path to JSONL transcript path: String, }, /// Show spaced repetition replay queue #[command(name = "replay-queue")] ReplayQueue { /// Number of items to show #[arg(long, default_value_t = 10)] count: usize, }, } #[derive(Subcommand)] enum AdminCmd { /// Scan markdown files, index all memory units Init, /// Report graph metrics (CC, communities, small-world) Health, /// Run consistency checks and repair Fsck, /// Find and merge duplicate nodes (same key, multiple UUIDs) Dedup { /// Apply the merge (default: dry run) #[arg(long)] apply: bool, }, /// Bulk rename: replace a character in all keys #[command(name = "bulk-rename")] BulkRename { /// Character to replace from: String, /// Replacement character to: String, /// Apply changes (default: dry run) #[arg(long)] apply: bool, }, /// Brief metrics check (for cron/notifications) #[command(name = "daily-check")] DailyCheck, /// Import markdown file(s) into the store Import { /// File paths files: Vec, }, /// Export store nodes to markdown file(s) Export { /// File keys to export (or --all) files: Vec, /// Export all file-level nodes #[arg(long)] all: bool, }, /// Output session-start context from the store #[command(name = "load-context")] LoadContext { /// Show word count statistics instead of content #[arg(long)] stats: bool, }, /// Show recent retrieval log Log, /// Show current parameters Params, /// Bump daily lookup counter for keys #[command(name = "lookup-bump")] LookupBump { /// Node keys keys: Vec, }, /// Show daily lookup counts Lookups { /// Date (default: today) date: Option, }, } #[derive(Subcommand)] enum DigestLevel { /// Generate daily digest Daily { /// Date (default: today) date: Option, }, /// Generate weekly digest Weekly { /// Date or week label (default: current week) date: Option, }, /// Generate monthly digest Monthly { /// Month (YYYY-MM) or date (default: current month) date: Option, }, /// Generate all missing digests Auto, } /// Print help with subcommands expanded to show nested commands. fn print_help() { use clap::CommandFactory; let cmd = Cli::command(); println!("poc-memory - graph-structured memory store"); println!("usage: poc-memory []\n"); for sub in cmd.get_subcommands() { if sub.get_name() == "help" { continue } let children: Vec<_> = sub.get_subcommands() .filter(|c| c.get_name() != "help") .collect(); if !children.is_empty() { for child in &children { let about = child.get_about().map(|s| s.to_string()).unwrap_or_default(); let full = format!("{} {}", sub.get_name(), child.get_name()); // Recurse one more level for daemon subcommands etc. let grandchildren: Vec<_> = child.get_subcommands() .filter(|c| c.get_name() != "help") .collect(); if !grandchildren.is_empty() { for gc in grandchildren { let gc_about = gc.get_about().map(|s| s.to_string()).unwrap_or_default(); let gc_full = format!("{} {}", full, gc.get_name()); println!(" {:<34}{gc_about}", gc_full); } } else { println!(" {:<34}{about}", full); } } } else { let about = sub.get_about().map(|s| s.to_string()).unwrap_or_default(); println!(" {:<34}{about}", sub.get_name()); } } } fn main() { // Handle --help ourselves for expanded subcommand display let args: Vec = std::env::args().collect(); if args.len() <= 1 || args.iter().any(|a| a == "--help" || a == "-h") && args.len() == 2 { print_help(); return; } let cli = Cli::parse(); let result = match cli.command { // Core Command::Search { query, pipeline, expand, full, debug, fuzzy, content } => cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content), Command::Render { key } => cli::node::cmd_render(&key), Command::Write { key } => cli::node::cmd_write(&key), Command::History { full, key } => cli::node::cmd_history(&key, full), Command::Tail { n, full } => cmd_tail(n, full), Command::Status => cmd_status(), Command::Query { expr } => cmd_query(&expr), Command::Used { key } => cli::node::cmd_used(&key), Command::Wrong { key, context } => cli::node::cmd_wrong(&key, &context), Command::NotRelevant { key } => cli::node::cmd_not_relevant(&key), Command::NotUseful { key } => cli::node::cmd_not_useful(&key), Command::Gap { description } => cli::node::cmd_gap(&description), // Node Command::Node(sub) => match sub { NodeCmd::Delete { key } => cli::node::cmd_node_delete(&key), NodeCmd::Rename { old_key, new_key } => cli::node::cmd_node_rename(&old_key, &new_key), NodeCmd::List { pattern } => cli::node::cmd_list_keys(pattern.as_deref()), NodeCmd::Edges => cli::node::cmd_list_edges(), NodeCmd::Dump => cli::node::cmd_dump_json(), }, // Journal Command::Journal(sub) => match sub { JournalCmd::Write { text } => cmd_journal_write(&text), JournalCmd::Tail { n, full, level } => cmd_journal_tail(n, full, level), JournalCmd::Enrich { jsonl_path, entry_text, grep_line } => cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line), }, // Graph Command::GraphCmd(sub) => match sub { GraphCmd::Link { key } => cli::graph::cmd_link(&key), GraphCmd::LinkAdd { source, target, reason } => cli::graph::cmd_link_add(&source, &target, &reason), GraphCmd::LinkImpact { source, target } => cli::graph::cmd_link_impact(&source, &target), GraphCmd::LinkAudit { apply } => cli::graph::cmd_link_audit(apply), GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold } => cli::graph::cmd_link_orphans(min_degree, links_per, sim_threshold), GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub } => cli::graph::cmd_triangle_close(min_degree, sim_threshold, max_per_hub), GraphCmd::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree), GraphCmd::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply), GraphCmd::Differentiate { key, apply } => cli::graph::cmd_differentiate(key.as_deref(), apply), GraphCmd::Trace { key } => cli::graph::cmd_trace(&key), GraphCmd::Interference { threshold } => cli::graph::cmd_interference(threshold), GraphCmd::Overview => cli::graph::cmd_graph(), GraphCmd::Spectral { k } => cli::graph::cmd_spectral(k), GraphCmd::SpectralSave { k } => cli::graph::cmd_spectral_save(k), GraphCmd::SpectralNeighbors { key, n } => cli::graph::cmd_spectral_neighbors(&key, n), GraphCmd::SpectralPositions { n } => cli::graph::cmd_spectral_positions(n), GraphCmd::SpectralSuggest { n } => cli::graph::cmd_spectral_suggest(n), GraphCmd::Organize { term, threshold, key_only, anchor } => cli::graph::cmd_organize(&term, threshold, key_only, anchor), }, // Cursor Command::Cursor(sub) => cmd_cursor(sub), // Agent Command::Agent(sub) => match sub { AgentCmd::Daemon(sub) => cmd_daemon(sub), AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth } => cli::agent::cmd_knowledge_loop(max_cycles, batch_size, window, max_depth), AgentCmd::ConsolidateBatch { count, auto, agent } => cli::agent::cmd_consolidate_batch(count, auto, agent), AgentCmd::ConsolidateSession => cli::agent::cmd_consolidate_session(), AgentCmd::ConsolidateFull => cli::agent::cmd_consolidate_full(), AgentCmd::ApplyAgent { all } => cmd_apply_agent(all), AgentCmd::ApplyConsolidation { apply, report } => cli::agent::cmd_apply_consolidation(apply, report.as_deref()), AgentCmd::Digest { level } => cmd_digest(level), AgentCmd::DigestLinks { apply } => cli::agent::cmd_digest_links(apply), AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path), AgentCmd::FactMine { path, batch, dry_run, output, min_messages } => cli::agent::cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages), AgentCmd::FactMineStore { path } => cli::agent::cmd_fact_mine_store(&path), AgentCmd::ReplayQueue { count } => cli::agent::cmd_replay_queue(count), }, // Admin Command::Admin(sub) => match sub { AdminCmd::Init => cli::admin::cmd_init(), AdminCmd::Health => cli::admin::cmd_health(), AdminCmd::Fsck => cli::admin::cmd_fsck(), AdminCmd::Dedup { apply } => cli::admin::cmd_dedup(apply), AdminCmd::BulkRename { from, to, apply } => cli::admin::cmd_bulk_rename(&from, &to, apply), AdminCmd::DailyCheck => cli::admin::cmd_daily_check(), AdminCmd::Import { files } => cli::admin::cmd_import(&files), AdminCmd::Export { files, all } => cli::admin::cmd_export(&files, all), AdminCmd::LoadContext { stats } => cmd_load_context(stats), AdminCmd::Log => cmd_log(), AdminCmd::Params => cmd_params(), AdminCmd::LookupBump { keys } => cli::node::cmd_lookup_bump(&keys), AdminCmd::Lookups { date } => cli::node::cmd_lookups(date.as_deref()), }, }; if let Err(e) = result { eprintln!("Error: {}", e); process::exit(1); } } // ── Command implementations ───────────────────────────────────────── fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full: bool, debug: bool, fuzzy: bool, content: bool) -> Result<(), String> { use store::StoreView; use std::collections::BTreeMap; // Parse pipeline stages (unified: algorithms, filters, transforms, generators) let stages: Vec = if pipeline_args.is_empty() { vec![search::Stage::Algorithm(search::AlgoStage::parse("spread").unwrap())] } else { pipeline_args.iter() .map(|a| search::Stage::parse(a)) .collect::, _>>()? }; // Check if pipeline needs full Store (has filters/transforms/generators) let needs_store = stages.iter().any(|s| !matches!(s, search::Stage::Algorithm(_))); // Check if pipeline starts with a generator (doesn't need seed terms) let has_generator = stages.first().map(|s| matches!(s, search::Stage::Generator(_))).unwrap_or(false); if terms.is_empty() && !has_generator { return Err("search requires terms or a generator stage (e.g. 'all')".into()); } let query: String = terms.join(" "); if debug { let names: Vec = stages.iter().map(|s| format!("{}", s)).collect(); println!("[search] pipeline: {}", names.join(" → ")); } let max_results = if expand { 15 } else { 5 }; if needs_store { // Full Store path — needed for filter/transform/generator stages let store = store::Store::load()?; let graph = store.build_graph(); let seeds = if has_generator { vec![] // generator will produce its own result set } else { let terms_map: BTreeMap = query.split_whitespace() .map(|t| (t.to_lowercase(), 1.0)) .collect(); let (seeds, _) = search::match_seeds_opts(&terms_map, &store, fuzzy, content); seeds }; let raw = search::run_query(&stages, seeds, &graph, &store, debug, max_results); if raw.is_empty() { eprintln!("No results"); return Ok(()); } for (i, (key, score)) in raw.iter().enumerate().take(max_results) { let weight = store.nodes.get(key).map(|n| n.weight).unwrap_or(0.0); println!("{:2}. [{:.2}/{:.2}] {}", i + 1, score, weight, key); if full { if let Some(node) = store.nodes.get(key) { println!(); for line in node.content.lines() { println!(" {}", line); } println!(); } } } } else { // Fast MmapView path — algorithm-only pipeline let view = store::AnyView::load()?; let graph = graph::build_graph_fast(&view); let terms_map: BTreeMap = query.split_whitespace() .map(|t| (t.to_lowercase(), 1.0)) .collect(); let (seeds, direct_hits) = search::match_seeds_opts(&terms_map, &view, fuzzy, content); if seeds.is_empty() { eprintln!("No results for '{}'", query); return Ok(()); } if debug { println!("[search] {} seeds from query '{}'", seeds.len(), query); } // Extract AlgoStages from the unified stages let algo_stages: Vec<&search::AlgoStage> = stages.iter() .filter_map(|s| match s { search::Stage::Algorithm(a) => Some(a), _ => None, }) .collect(); let algo_owned: Vec = algo_stages.into_iter().cloned().collect(); let raw = search::run_pipeline(&algo_owned, seeds, &graph, &view, debug, max_results); let results: Vec = raw.into_iter() .map(|(key, activation)| { let is_direct = direct_hits.contains(&key); search::SearchResult { key, activation, is_direct, snippet: None } }) .collect(); if results.is_empty() { eprintln!("No results for '{}'", query); return Ok(()); } // Log retrieval store::Store::log_retrieval_static(&query, &results.iter().map(|r| r.key.clone()).collect::>()); let bump_keys: Vec<&str> = results.iter().take(max_results).map(|r| r.key.as_str()).collect(); let _ = lookups::bump_many(&bump_keys); for (i, r) in results.iter().enumerate().take(max_results) { let marker = if r.is_direct { "→" } else { " " }; let weight = view.node_weight(&r.key); println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); if full { if let Some(content) = view.node_content(&r.key) { println!(); for line in content.lines() { println!(" {}", line); } println!(); } } } } Ok(()) } fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> { let path = data_dir.join(name); if !path.exists() { std::fs::write(&path, content) .map_err(|e| format!("write {}: {}", name, e))?; println!("Created {}", path.display()); } Ok(()) } fn cmd_status() -> Result<(), String> { // If stdout is a tty and daemon is running, launch TUI if std::io::IsTerminal::is_terminal(&std::io::stdout()) { // Try TUI first — falls back if daemon not running match tui::run_tui() { Ok(()) => return Ok(()), Err(_) => {} // fall through to text output } } let store = store::Store::load()?; let g = store.build_graph(); let mut type_counts = std::collections::HashMap::new(); for node in store.nodes.values() { *type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1; } let mut types: Vec<_> = type_counts.iter().collect(); types.sort_by_key(|(_, c)| std::cmp::Reverse(**c)); println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()); print!("Types:"); for (t, c) in &types { let label = match t.as_str() { "Semantic" => "semantic", "EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly" => "episodic", _ => t, }; print!(" {}={}", label, c); } println!(); println!("Graph edges: {} Communities: {}", g.edge_count(), g.community_count()); Ok(()) } fn cmd_log() -> Result<(), String> { let store = store::Store::load()?; for event in store.retrieval_log.iter().rev().take(20) { println!("[{}] q=\"{}\" → {} results", event.timestamp, event.query, event.results.len()); for r in &event.results { println!(" {}", r); } } Ok(()) } fn cmd_params() -> Result<(), String> { let store = store::Store::load()?; println!("decay_factor: {}", store.params.decay_factor); println!("use_boost: {}", store.params.use_boost); println!("prune_threshold: {}", store.params.prune_threshold); println!("edge_decay: {}", store.params.edge_decay); println!("max_hops: {}", store.params.max_hops); println!("min_activation: {}", store.params.min_activation); Ok(()) } /// Apply links from a single agent result JSON file. /// Returns (links_applied, errors). fn apply_agent_file( store: &mut store::Store, data: &serde_json::Value, ) -> (usize, usize) { let agent_result = data.get("agent_result").or(Some(data)); let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) { Some(l) => l, None => return (0, 0), }; let entry_text = data.get("entry_text") .and_then(|v| v.as_str()) .unwrap_or(""); if let (Some(start), Some(end)) = ( agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()), agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()), ) { println!(" Source: L{}-L{}", start, end); } let mut applied = 0; let mut errors = 0; for link in links { let target = match link.get("target").and_then(|v| v.as_str()) { Some(t) => t, None => continue, }; let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or(""); if let Some(note) = target.strip_prefix("NOTE:") { println!(" NOTE: {} — {}", note, reason); continue; } let resolved = match store.resolve_key(target) { Ok(r) => r, Err(_) => { println!(" SKIP {} (not found in graph)", target); continue; } }; let source_key = match store.find_journal_node(entry_text) { Some(k) => k, None => { println!(" SKIP {} (no matching journal node)", target); continue; } }; let source_uuid = match store.nodes.get(&source_key) { Some(n) => n.uuid, None => continue, }; let target_uuid = match store.nodes.get(&resolved) { Some(n) => n.uuid, None => continue, }; let rel = store::new_relation( source_uuid, target_uuid, store::RelationType::Link, 0.5, &source_key, &resolved, ); if let Err(e) = store.add_relation(rel) { eprintln!(" Error adding relation: {}", e); errors += 1; } else { println!(" LINK {} → {} ({})", source_key, resolved, reason); applied += 1; } } (applied, errors) } fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> { match group.source { config::ContextSource::Journal => { let mut entries = Vec::new(); let now = store::now_epoch(); let window: i64 = cfg.journal_days as i64 * 24 * 3600; let cutoff = now - window; let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap(); let journal_ts = |n: &store::Node| -> i64 { if n.created_at > 0 { return n.created_at; } if let Some(caps) = key_date_re.captures(&n.key) { use chrono::{NaiveDate, TimeZone, Local}; if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") { if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() { return dt.timestamp(); } } } n.timestamp }; let mut journal_nodes: Vec<_> = store.nodes.values() .filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff) .collect(); journal_nodes.sort_by_key(|n| journal_ts(n)); let max = cfg.journal_max; let skip = journal_nodes.len().saturating_sub(max); for node in journal_nodes.iter().skip(skip) { entries.push((node.key.clone(), node.content.clone())); } entries } config::ContextSource::File => { group.keys.iter().filter_map(|key| { let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?; if content.trim().is_empty() { return None; } Some((key.clone(), content.trim().to_string())) }).collect() } config::ContextSource::Store => { group.keys.iter().filter_map(|key| { let content = store.render_file(key)?; if content.trim().is_empty() { return None; } Some((key.clone(), content.trim().to_string())) }).collect() } } } fn cmd_load_context(stats: bool) -> Result<(), String> { let cfg = config::get(); let store = store::Store::load()?; if stats { let mut total_words = 0; let mut total_entries = 0; println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS"); println!("{}", "-".repeat(42)); for group in &cfg.context_groups { let entries = get_group_content(group, &store, cfg); let words: usize = entries.iter() .map(|(_, c)| c.split_whitespace().count()) .sum(); let count = entries.len(); println!("{:<25} {:>6} {:>8}", group.label, count, words); total_words += words; total_entries += count; } println!("{}", "-".repeat(42)); println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words); return Ok(()); } println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name); println!(); for group in &cfg.context_groups { let entries = get_group_content(group, &store, cfg); if !entries.is_empty() && group.source == config::ContextSource::Journal { println!("--- recent journal entries ({}/{}) ---", entries.len(), cfg.journal_max); } for (key, content) in entries { if group.source == config::ContextSource::Journal { println!("## {}", key); } else { println!("--- {} ({}) ---", key, group.label); } println!("{}\n", content); } } println!("=== END MEMORY LOAD ==="); Ok(()) } fn cmd_cursor(sub: CursorCmd) -> Result<(), String> { match sub { CursorCmd::Show => { let store = store::Store::load()?; cursor::show(&store) } CursorCmd::Set { key } => { if key.is_empty() { return Err("cursor set requires a key".into()); } let key = key.join(" "); let store = store::Store::load()?; let bare = store::strip_md_suffix(&key); if !store.nodes.contains_key(&bare) { return Err(format!("Node not found: {}", bare)); } cursor::set(&bare)?; cursor::show(&store) } CursorCmd::Forward => { let store = store::Store::load()?; cursor::move_temporal(&store, true) } CursorCmd::Back => { let store = store::Store::load()?; cursor::move_temporal(&store, false) } CursorCmd::Up => { let store = store::Store::load()?; cursor::move_up(&store) } CursorCmd::Down => { let store = store::Store::load()?; cursor::move_down(&store) } CursorCmd::Clear => cursor::clear(), } } fn cmd_tail(n: usize, full: bool) -> Result<(), String> { let path = store::nodes_path(); if !path.exists() { return Err("No node log found".into()); } use std::io::BufReader; let file = std::fs::File::open(&path) .map_err(|e| format!("open {}: {}", path.display(), e))?; let mut reader = BufReader::new(file); // Read all entries, keep last N let mut entries: Vec = Vec::new(); while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) { let log = msg.get_root::() .map_err(|e| format!("read log: {}", e))?; for node_reader in log.get_nodes() .map_err(|e| format!("get nodes: {}", e))? { let node = store::Node::from_capnp_migrate(node_reader)?; entries.push(node); } } let start = entries.len().saturating_sub(n); for node in &entries[start..] { let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 { store::format_datetime(node.timestamp) } else { format!("(raw:{})", node.timestamp) }; let del = if node.deleted { " [DELETED]" } else { "" }; if full { eprintln!("--- {} (v{}) {} via {} w={:.3}{} ---", node.key, node.version, ts, node.provenance, node.weight, del); eprintln!("{}\n", node.content); } else { let preview = util::first_n_chars(&node.content, 100).replace('\n', "\\n"); eprintln!(" {} v{} w={:.2}{}", ts, node.version, node.weight, del); eprintln!(" {} via {}", node.key, node.provenance); if !preview.is_empty() { eprintln!(" {}", preview); } eprintln!(); } } Ok(()) } fn cmd_journal_write(text: &[String]) -> Result<(), String> { if text.is_empty() { return Err("journal-write requires text".into()); } let text = text.join(" "); let timestamp = store::format_datetime(store::now_epoch()); let slug: String = text.split_whitespace() .take(6) .map(|w| w.to_lowercase() .chars().filter(|c| c.is_alphanumeric() || *c == '-') .collect::()) .collect::>() .join("-"); let slug = if slug.len() > 50 { &slug[..50] } else { &slug }; let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug); let content = format!("## {}\n\n{}", timestamp, text); let source_ref = find_current_transcript(); let mut store = store::Store::load()?; let mut node = store::new_node(&key, &content); node.node_type = store::NodeType::EpisodicSession; node.provenance = "journal".to_string(); if let Some(src) = source_ref { node.source_ref = src; } store.upsert_node(node)?; store.save()?; let word_count = text.split_whitespace().count(); println!("Appended entry at {} ({} words)", timestamp, word_count); Ok(()) } fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> { let store = store::Store::load()?; if level == 0 { journal_tail_entries(&store, n, full) } else { let node_type = match level { 1 => store::NodeType::EpisodicDaily, 2 => store::NodeType::EpisodicWeekly, _ => store::NodeType::EpisodicMonthly, }; journal_tail_digests(&store, node_type, n, full) } } fn journal_tail_entries(store: &store::Store, n: usize, full: bool) -> Result<(), String> { let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap(); let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap(); let normalize_date = |s: &str| -> String { let s = s.replace('t', "T"); if s.len() >= 16 { format!("{}T{}", &s[..10], s[11..].replace('-', ":")) } else { s } }; let extract_sort = |node: &store::Node| -> (i64, String) { if node.created_at > 0 { return (node.created_at, store::format_datetime(node.created_at)); } if let Some(caps) = key_date_re.captures(&node.key) { return (0, normalize_date(&caps[1])); } if let Some(caps) = date_re.captures(&node.content) { return (0, normalize_date(&caps[1])); } (node.timestamp, store::format_datetime(node.timestamp)) }; let mut journal: Vec<_> = store.nodes.values() .filter(|node| node.node_type == store::NodeType::EpisodicSession) .collect(); journal.sort_by(|a, b| { let (at, as_) = extract_sort(a); let (bt, bs) = extract_sort(b); if at > 0 && bt > 0 { at.cmp(&bt) } else { as_.cmp(&bs) } }); let skip = if journal.len() > n { journal.len() - n } else { 0 }; for node in journal.iter().skip(skip) { let (_, ts) = extract_sort(node); let title = extract_title(&node.content); if full { println!("--- [{}] {} ---\n{}\n", ts, title, node.content); } else { println!("[{}] {}", ts, title); } } Ok(()) } fn journal_tail_digests(store: &store::Store, node_type: store::NodeType, n: usize, full: bool) -> Result<(), String> { let mut digests: Vec<_> = store.nodes.values() .filter(|node| node.node_type == node_type) .collect(); digests.sort_by(|a, b| { if a.timestamp > 0 && b.timestamp > 0 { a.timestamp.cmp(&b.timestamp) } else { a.key.cmp(&b.key) } }); let skip = if digests.len() > n { digests.len() - n } else { 0 }; for node in digests.iter().skip(skip) { let label = &node.key; let title = extract_title(&node.content); if full { println!("--- [{}] {} ---\n{}\n", label, title, node.content); } else { println!("[{}] {}", label, title); } } Ok(()) } fn extract_title(content: &str) -> String { let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap(); for line in content.lines() { let stripped = line.trim(); if stripped.is_empty() { continue; } if date_re.is_match(stripped) && stripped.len() < 25 { continue; } if let Some(h) = stripped.strip_prefix("## ") { return h.to_string(); } else if let Some(h) = stripped.strip_prefix("# ") { return h.to_string(); } else { return util::truncate(stripped, 67, "..."); } } String::from("(untitled)") } fn cmd_query(expr: &[String]) -> Result<(), String> { if expr.is_empty() { return Err("query requires an expression (try: poc-memory query --help)".into()); } let query_str = expr.join(" "); let store = store::Store::load()?; let graph = store.build_graph(); query::run_query(&store, &graph, &query_str) } fn cmd_apply_agent(process_all: bool) -> Result<(), String> { let results_dir = store::memory_dir().join("agent-results"); if !results_dir.exists() { println!("No agent results directory"); return Ok(()); } let mut store = store::Store::load()?; let mut applied = 0; let mut errors = 0; let mut files: Vec<_> = std::fs::read_dir(&results_dir) .map_err(|e| format!("read results dir: {}", e))? .filter_map(|e| e.ok()) .filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false)) .collect(); files.sort_by_key(|e| e.path()); for entry in &files { let path = entry.path(); let content = match std::fs::read_to_string(&path) { Ok(c) => c, Err(e) => { eprintln!(" Skip {}: {}", path.display(), e); errors += 1; continue; } }; let data: serde_json::Value = match serde_json::from_str(&content) { Ok(d) => d, Err(e) => { eprintln!(" Skip {}: parse error: {}", path.display(), e); errors += 1; continue; } }; println!("Processing {}:", path.file_name().unwrap().to_string_lossy()); let (a, e) = apply_agent_file(&mut store, &data); applied += a; errors += e; if !process_all { let done_dir = crate::util::memory_subdir("agent-results/done")?; let dest = done_dir.join(path.file_name().unwrap()); std::fs::rename(&path, &dest).ok(); } } if applied > 0 { store.save()?; } println!("\nApplied {} links ({} errors, {} files processed)", applied, errors, files.len()); Ok(()) } fn cmd_digest(level: DigestLevel) -> Result<(), String> { let mut store = store::Store::load()?; match level { DigestLevel::Auto => digest::digest_auto(&mut store), DigestLevel::Daily { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "daily", &arg) } DigestLevel::Weekly { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "weekly", &arg) } DigestLevel::Monthly { date } => { let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); digest::generate(&mut store, "monthly", &arg) } } } fn cmd_experience_mine(jsonl_path: Option) -> Result<(), String> { let jsonl_path = match jsonl_path { Some(p) => p, None => find_current_transcript() .ok_or("no JSONL transcripts found")?, }; if !std::path::Path::new(jsonl_path.as_str()).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } let mut store = store::Store::load()?; let count = crate::enrich::experience_mine(&mut store, &jsonl_path, None)?; println!("Done: {} new entries mined.", count); Ok(()) } fn cmd_daemon(sub: DaemonCmd) -> Result<(), String> { match sub { DaemonCmd::Start => daemon::run_daemon(), DaemonCmd::Status => daemon::show_status(), DaemonCmd::Log { job, lines } => daemon::show_log(job.as_deref(), lines), DaemonCmd::Install => daemon::install_service(), DaemonCmd::Consolidate => daemon::rpc_consolidate(), DaemonCmd::Run { agent, count } => daemon::rpc_run_agent(&agent, count), DaemonCmd::Tui => tui::run_tui(), } }