diff --git a/poc-memory/src/main.rs b/poc-memory/src/main.rs index 1475468..c206220 100644 --- a/poc-memory/src/main.rs +++ b/poc-memory/src/main.rs @@ -1,4 +1,3 @@ - // poc-memory: graph-structured memory for AI assistants // // Authors: ProofOfConcept and Kent Overstreet @@ -18,6 +17,8 @@ use poc_memory::*; +use clap::{Parser, Subcommand}; + use std::env; use std::process; @@ -27,7 +28,6 @@ fn find_current_transcript() -> Option { let projects = std::path::Path::new(&home).join(".claude/projects"); if !projects.exists() { return None; } - // Search all project dirs for the most recent .jsonl let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None; if let Ok(dirs) = std::fs::read_dir(&projects) { for dir_entry in dirs.filter_map(|e| e.ok()) { @@ -51,78 +51,522 @@ fn find_current_transcript() -> Option { newest.map(|(_, p)| p.to_string_lossy().to_string()) } -fn main() { - let args: Vec = env::args().collect(); - if args.len() < 2 { - usage(); - process::exit(1); - } +#[derive(Parser)] +#[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")] +struct Cli { + #[command(subcommand)] + command: Command, +} - let result = match args[1].as_str() { - "search" => cmd_search(&args[2..]), - "init" => cmd_init(), - "migrate" => cmd_migrate(), - "health" => cmd_health(), - "fsck" => cmd_fsck(), - "status" => cmd_status(), - "graph" => cmd_graph(), - "used" => cmd_used(&args[2..]), - "wrong" => cmd_wrong(&args[2..]), - "gap" => cmd_gap(&args[2..]), - "cap-degree" => cmd_cap_degree(&args[2..]), - "link-orphans" => cmd_link_orphans(&args[2..]), - "consolidate-batch" => cmd_consolidate_batch(&args[2..]), - "log" => cmd_log(), - "params" => cmd_params(), - "link" => cmd_link(&args[2..]), - "replay-queue" => cmd_replay_queue(&args[2..]), - "interference" => cmd_interference(&args[2..]), - "link-add" => cmd_link_add(&args[2..]), - "link-impact" => cmd_link_impact(&args[2..]), - "consolidate-session" => cmd_consolidate_session(), - "consolidate-full" => cmd_consolidate_full(), - "triangle-close" => cmd_triangle_close(&args[2..]), - "daily-check" => cmd_daily_check(), - "apply-agent" => cmd_apply_agent(&args[2..]), - "digest" => cmd_digest(&args[2..]), - "digest-links" => cmd_digest_links(&args[2..]), - "journal-enrich" => cmd_journal_enrich(&args[2..]), - "experience-mine" => cmd_experience_mine(&args[2..]), - "apply-consolidation" => cmd_apply_consolidation(&args[2..]), - "differentiate" => cmd_differentiate(&args[2..]), - "link-audit" => cmd_link_audit(&args[2..]), - "trace" => cmd_trace(&args[2..]), - "spectral" => cmd_spectral(&args[2..]), - "spectral-save" => cmd_spectral_save(&args[2..]), - "spectral-neighbors" => cmd_spectral_neighbors(&args[2..]), - "spectral-positions" => cmd_spectral_positions(&args[2..]), - "spectral-suggest" => cmd_spectral_suggest(&args[2..]), - "list-keys" => cmd_list_keys(&args[2..]), - "list-edges" => cmd_list_edges(), - "dump-json" => cmd_dump_json(), - "node-delete" => cmd_node_delete(&args[2..]), - "node-rename" => cmd_node_rename(&args[2..]), - "journal-ts-migrate" => cmd_journal_ts_migrate(), - "load-context" => cmd_load_context(&args[2..]), - "render" => cmd_render(&args[2..]), - "history" => cmd_history(&args[2..]), - "write" => cmd_write(&args[2..]), - "import" => cmd_import(&args[2..]), - "export" => cmd_export(&args[2..]), - "journal-write" => cmd_journal_write(&args[2..]), - "journal-tail" => cmd_journal_tail(&args[2..]), - "query" => cmd_query(&args[2..]), - "lookup-bump" => cmd_lookup_bump(&args[2..]), - "lookups" => cmd_lookups(&args[2..]), - "daemon" => cmd_daemon(&args[2..]), - "knowledge-loop" => cmd_knowledge_loop(&args[2..]), - "fact-mine" => cmd_fact_mine(&args[2..]), - "fact-mine-store" => cmd_fact_mine_store(&args[2..]), - _ => { - eprintln!("Unknown command: {}", args[1]); - usage(); - process::exit(1); - } +#[derive(Subcommand)] +enum Command { + /// Search memory (AND logic across terms) + Search { + /// Search terms + query: Vec, + /// Show 15 results instead of 5, plus spectral neighbors + #[arg(long)] + expand: bool, + }, + /// Scan markdown files, index all memory units + Init, + /// Migrate from old weights.json system + Migrate, + /// Report graph metrics (CC, communities, small-world) + Health, + /// Run consistency checks and repair + Fsck, + /// Summary of memory state + Status, + /// Show graph structure overview + Graph, + /// Mark a memory as useful (boosts weight) + Used { + /// Node key + key: Vec, + }, + /// Mark a memory as wrong/irrelevant + Wrong { + /// Node key + key: String, + /// Optional context + context: Vec, + }, + /// Record a gap in memory coverage + Gap { + /// Gap description + description: Vec, + }, + /// Cap node degree by pruning weak auto edges + #[command(name = "cap-degree")] + CapDegree { + /// Maximum degree (default: 50) + #[arg(default_value_t = 50)] + max_degree: usize, + }, + /// Link orphan nodes to similar neighbors + #[command(name = "link-orphans")] + LinkOrphans { + /// Minimum degree to consider orphan (default: 2) + #[arg(default_value_t = 2)] + min_degree: usize, + /// Links per orphan (default: 3) + #[arg(default_value_t = 3)] + links_per: usize, + /// Similarity threshold (default: 0.15) + #[arg(default_value_t = 0.15)] + sim_threshold: f32, + }, + /// Run agent consolidation on priority nodes + #[command(name = "consolidate-batch")] + ConsolidateBatch { + /// Number of nodes to consolidate + #[arg(long, default_value_t = 5)] + count: usize, + /// Generate replay agent prompt automatically + #[arg(long)] + auto: bool, + /// Generate prompt for a specific agent (replay, linker, separator, transfer, health) + #[arg(long)] + agent: Option, + }, + /// Show recent retrieval log + Log, + /// Show current parameters + Params, + /// Show neighbors of a node + Link { + /// Node key + key: Vec, + }, + /// Show spaced repetition replay queue + #[command(name = "replay-queue")] + ReplayQueue { + /// Number of items to show + #[arg(long, default_value_t = 10)] + count: usize, + }, + /// Detect potentially confusable memory pairs + Interference { + /// Similarity threshold (default: 0.4) + #[arg(long, default_value_t = 0.4)] + threshold: f32, + }, + /// Add a link between two nodes + #[command(name = "link-add")] + LinkAdd { + /// Source node key + source: String, + /// Target node key + target: String, + /// Optional reason + reason: Vec, + }, + /// Simulate adding an edge, report topology impact + #[command(name = "link-impact")] + LinkImpact { + /// Source node key + source: String, + /// Target node key + target: String, + }, + /// Analyze metrics, plan agent allocation + #[command(name = "consolidate-session")] + ConsolidateSession, + /// Autonomous: plan → agents → apply → digests → links + #[command(name = "consolidate-full")] + ConsolidateFull, + /// Close triangles: link similar neighbors of hubs + #[command(name = "triangle-close")] + TriangleClose { + /// Minimum hub degree (default: 5) + #[arg(default_value_t = 5)] + min_degree: usize, + /// Similarity threshold (default: 0.3) + #[arg(default_value_t = 0.3)] + sim_threshold: f32, + /// Maximum links per hub (default: 10) + #[arg(default_value_t = 10)] + max_per_hub: usize, + }, + /// Brief metrics check (for cron/notifications) + #[command(name = "daily-check")] + DailyCheck, + /// Import pending agent results into the graph + #[command(name = "apply-agent")] + ApplyAgent { + /// Process all files without moving to done/ + #[arg(long)] + all: bool, + }, + /// Generate episodic digests (daily, weekly, monthly, auto) + Digest { + /// Digest type: daily, weekly, monthly, auto + #[command(subcommand)] + level: DigestLevel, + }, + /// Parse and apply links from digest nodes + #[command(name = "digest-links")] + DigestLinks { + /// Apply the links (default: dry run) + #[arg(long)] + apply: bool, + }, + /// Enrich journal entry with conversation links + #[command(name = "journal-enrich")] + JournalEnrich { + /// Path to JSONL transcript + jsonl_path: String, + /// Journal entry text to enrich + entry_text: String, + /// Grep line number for source location + #[arg(default_value_t = 0)] + grep_line: usize, + }, + /// Mine conversation for experiential moments to journal + #[command(name = "experience-mine")] + ExperienceMine { + /// Path to JSONL transcript (default: most recent) + jsonl_path: Option, + }, + /// Extract and apply actions from consolidation reports + #[command(name = "apply-consolidation")] + ApplyConsolidation { + /// Apply actions (default: dry run) + #[arg(long)] + apply: bool, + /// Read from specific report file + #[arg(long)] + report: Option, + }, + /// Redistribute hub links to section-level children + Differentiate { + /// Specific hub key (omit to list all differentiable hubs) + key: Option, + /// Apply the redistribution + #[arg(long)] + apply: bool, + }, + /// Walk every link, send to Sonnet for quality review + #[command(name = "link-audit")] + LinkAudit { + /// Apply changes (default: dry run) + #[arg(long)] + apply: bool, + }, + /// Walk temporal links: semantic ↔ episodic ↔ conversation + Trace { + /// Node key + key: Vec, + }, + /// Spectral decomposition of the memory graph + Spectral { + /// Number of eigenvectors (default: 30) + #[arg(default_value_t = 30)] + k: usize, + }, + /// Compute and save spectral embedding + #[command(name = "spectral-save")] + SpectralSave { + /// Number of eigenvectors (default: 20) + #[arg(default_value_t = 20)] + k: usize, + }, + /// Find spectrally nearest nodes + #[command(name = "spectral-neighbors")] + SpectralNeighbors { + /// Node key + key: String, + /// Number of neighbors (default: 15) + #[arg(default_value_t = 15)] + n: usize, + }, + /// Show nodes ranked by outlier/bridge score + #[command(name = "spectral-positions")] + SpectralPositions { + /// Number of nodes to show (default: 30) + #[arg(default_value_t = 30)] + n: usize, + }, + /// Find spectrally close but unlinked pairs + #[command(name = "spectral-suggest")] + SpectralSuggest { + /// Number of pairs (default: 20) + #[arg(default_value_t = 20)] + n: usize, + }, + /// List all node keys (one per line, optional glob) + #[command(name = "list-keys")] + ListKeys { + /// Glob pattern to filter keys + pattern: Option, + }, + /// List all edges (tsv: source target strength type) + #[command(name = "list-edges")] + ListEdges, + /// Dump entire store as JSON + #[command(name = "dump-json")] + DumpJson, + /// Soft-delete a node + #[command(name = "node-delete")] + NodeDelete { + /// Node key + key: Vec, + }, + /// Rename a node key + #[command(name = "node-rename")] + NodeRename { + /// Old key + old_key: String, + /// New key + new_key: String, + }, + /// Populate created_at for nodes missing timestamps + #[command(name = "journal-ts-migrate")] + JournalTsMigrate, + /// Output session-start context from the store + #[command(name = "load-context")] + LoadContext { + /// Show word count statistics instead of content + #[arg(long)] + stats: bool, + }, + /// Output a node's content to stdout + Render { + /// Node key + key: Vec, + }, + /// Show all stored versions of a node + History { + /// Show full content for every version + #[arg(long)] + full: bool, + /// Node key + key: Vec, + }, + /// Upsert node content from stdin + Write { + /// Node key + key: Vec, + }, + /// Import markdown file(s) into the store + Import { + /// File paths + files: Vec, + }, + /// Export store nodes to markdown file(s) + Export { + /// File keys to export (or --all) + files: Vec, + /// Export all file-level nodes + #[arg(long)] + all: bool, + }, + /// Write a journal entry to the store + #[command(name = "journal-write")] + JournalWrite { + /// Entry text + text: Vec, + }, + /// Show recent journal/digest entries + #[command(name = "journal-tail")] + JournalTail { + /// Number of entries to show (default: 20) + #[arg(default_value_t = 20)] + n: usize, + /// Show full content + #[arg(long)] + full: bool, + /// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly + #[arg(long, default_value_t = 0)] + level: u8, + }, + /// Query the memory graph + Query { + /// Query expression (e.g. "degree > 15 | sort degree | limit 10") + expr: Vec, + }, + /// Bump daily lookup counter for keys + #[command(name = "lookup-bump")] + LookupBump { + /// Node keys + keys: Vec, + }, + /// Show daily lookup counts + Lookups { + /// Date (default: today) + date: Option, + }, + /// Background job daemon + Daemon { + /// Subcommand: status, log, install + sub: Option, + /// Additional arguments + args: Vec, + }, + /// Run knowledge agents to convergence + #[command(name = "knowledge-loop")] + KnowledgeLoop { + /// Maximum cycles before stopping + #[arg(long, default_value_t = 20)] + max_cycles: usize, + /// Items per agent per cycle + #[arg(long, default_value_t = 5)] + batch_size: usize, + /// Cycles to check for convergence + #[arg(long, default_value_t = 5)] + window: usize, + /// Maximum inference depth + #[arg(long, default_value_t = 4)] + max_depth: i32, + }, + /// Extract atomic facts from conversation transcripts + #[command(name = "fact-mine")] + FactMine { + /// Path to JSONL transcript or directory (with --batch) + path: String, + /// Process all .jsonl files in directory + #[arg(long)] + batch: bool, + /// Show chunks without calling model + #[arg(long)] + dry_run: bool, + /// Write JSON to file (default: stdout) + #[arg(long, short)] + output: Option, + /// Skip transcripts with fewer messages + #[arg(long, default_value_t = 10)] + min_messages: usize, + }, + /// Extract facts from a transcript and store directly + #[command(name = "fact-mine-store")] + FactMineStore { + /// Path to JSONL transcript + path: String, + }, +} + +#[derive(Subcommand)] +enum DigestLevel { + /// Generate daily digest + Daily { + /// Date (default: today) + date: Option, + }, + /// Generate weekly digest + Weekly { + /// Date or week label (default: current week) + date: Option, + }, + /// Generate monthly digest + Monthly { + /// Month (YYYY-MM) or date (default: current month) + date: Option, + }, + /// Generate all missing digests + Auto, +} + +fn main() { + let cli = Cli::parse(); + + let result = match cli.command { + Command::Search { query, expand } + => cmd_search(&query, expand), + Command::Init => cmd_init(), + Command::Migrate => cmd_migrate(), + Command::Health => cmd_health(), + Command::Fsck => cmd_fsck(), + Command::Status => cmd_status(), + Command::Graph => cmd_graph(), + Command::Used { key } => cmd_used(&key), + Command::Wrong { key, context } + => cmd_wrong(&key, &context), + Command::Gap { description } + => cmd_gap(&description), + Command::CapDegree { max_degree } + => cmd_cap_degree(max_degree), + Command::LinkOrphans { min_degree, links_per, sim_threshold } + => cmd_link_orphans(min_degree, links_per, sim_threshold), + Command::ConsolidateBatch { count, auto, agent } + => cmd_consolidate_batch(count, auto, agent), + Command::Log => cmd_log(), + Command::Params => cmd_params(), + Command::Link { key } => cmd_link(&key), + Command::ReplayQueue { count } + => cmd_replay_queue(count), + Command::Interference { threshold } + => cmd_interference(threshold), + Command::LinkAdd { source, target, reason } + => cmd_link_add(&source, &target, &reason), + Command::LinkImpact { source, target } + => cmd_link_impact(&source, &target), + Command::ConsolidateSession => cmd_consolidate_session(), + Command::ConsolidateFull => cmd_consolidate_full(), + Command::TriangleClose { min_degree, sim_threshold, max_per_hub } + => cmd_triangle_close(min_degree, sim_threshold, max_per_hub), + Command::DailyCheck => cmd_daily_check(), + Command::ApplyAgent { all } + => cmd_apply_agent(all), + Command::Digest { level } => cmd_digest(level), + Command::DigestLinks { apply } + => cmd_digest_links(apply), + Command::JournalEnrich { jsonl_path, entry_text, grep_line } + => cmd_journal_enrich(&jsonl_path, &entry_text, grep_line), + Command::ExperienceMine { jsonl_path } + => cmd_experience_mine(jsonl_path), + Command::ApplyConsolidation { apply, report } + => cmd_apply_consolidation(apply, report.as_deref()), + Command::Differentiate { key, apply } + => cmd_differentiate(key.as_deref(), apply), + Command::LinkAudit { apply } + => cmd_link_audit(apply), + Command::Trace { key } => cmd_trace(&key), + Command::Spectral { k } => cmd_spectral(k), + Command::SpectralSave { k } => cmd_spectral_save(k), + Command::SpectralNeighbors { key, n } + => cmd_spectral_neighbors(&key, n), + Command::SpectralPositions { n } + => cmd_spectral_positions(n), + Command::SpectralSuggest { n } + => cmd_spectral_suggest(n), + Command::ListKeys { pattern } + => cmd_list_keys(pattern.as_deref()), + Command::ListEdges => cmd_list_edges(), + Command::DumpJson => cmd_dump_json(), + Command::NodeDelete { key } + => cmd_node_delete(&key), + Command::NodeRename { old_key, new_key } + => cmd_node_rename(&old_key, &new_key), + Command::JournalTsMigrate => cmd_journal_ts_migrate(), + Command::LoadContext { stats } + => cmd_load_context(stats), + Command::Render { key } => cmd_render(&key), + Command::History { full, key } + => cmd_history(&key, full), + Command::Write { key } => cmd_write(&key), + Command::Import { files } + => cmd_import(&files), + Command::Export { files, all } + => cmd_export(&files, all), + Command::JournalWrite { text } + => cmd_journal_write(&text), + Command::JournalTail { n, full, level } + => cmd_journal_tail(n, full, level), + Command::Query { expr } + => cmd_query(&expr), + Command::LookupBump { keys } + => cmd_lookup_bump(&keys), + Command::Lookups { date } + => cmd_lookups(date.as_deref()), + Command::Daemon { sub, args } + => cmd_daemon(sub.as_deref(), &args), + Command::KnowledgeLoop { max_cycles, batch_size, window, max_depth } + => cmd_knowledge_loop(max_cycles, batch_size, window, max_depth), + Command::FactMine { path, batch, dry_run, output, min_messages } + => cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages), + Command::FactMineStore { path } + => cmd_fact_mine_store(&path), }; if let Err(e) = result { @@ -131,114 +575,16 @@ fn main() { } } -fn usage() { - eprintln!("poc-memory v0.4.0 — graph-structured memory store +// ── Command implementations ───────────────────────────────────────── -Commands: - search QUERY [--expand] Search memory (AND logic) - init Scan markdown files, index all memory units - migrate Migrate from old weights.json system - health Report graph metrics (CC, communities, small-world) - status Summary of memory state - graph Show graph structure overview - used KEY Mark a memory as useful (boosts weight) - wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant - gap DESCRIPTION Record a gap in memory coverage - consolidate-batch [--count N] [--auto] - Run agent consolidation on priority nodes - log Show recent retrieval log - params Show current parameters - link N Interactive graph walk from search result N - replay-queue [--count N] Show spaced repetition replay queue - interference [--threshold F] - Detect potentially confusable memory pairs - link-add SOURCE TARGET [REASON] - Add a link between two nodes - link-impact SOURCE TARGET Simulate adding an edge, report topology impact - consolidate-session Analyze metrics, plan agent allocation - consolidate-full Autonomous: plan → agents → apply → digests → links - triangle-close [DEG] [SIM] [MAX] - Close triangles: link similar neighbors of hubs - daily-check Brief metrics check (for cron/notifications) - apply-agent [--all] Import pending agent results into the graph - digest daily [DATE] Generate daily episodic digest (default: today) - digest weekly [DATE] Generate weekly digest (any date in target week) - digest monthly [YYYY-MM] Generate monthly digest (default: current month) - digest auto Generate all missing digests (daily→weekly→monthly) - digest-links [--apply] Parse and apply links from digest files - journal-enrich JSONL TEXT [LINE] - Enrich journal entry with conversation links - experience-mine [JSONL] Mine conversation for experiential moments to journal - apply-consolidation [--apply] [--report FILE] - Extract and apply actions from consolidation reports - differentiate [KEY] [--apply] - Redistribute hub links to section-level children - link-audit [--apply] Walk every link, send to Sonnet for quality review - trace KEY Walk temporal links: semantic ↔ episodic ↔ conversation - spectral [K] Spectral decomposition of the memory graph (default K=30) - spectral-save [K] Compute and save spectral embedding (default K=20) - spectral-neighbors KEY [N] Find N spectrally nearest nodes (default N=15) - spectral-positions [N] Show N nodes ranked by outlier/bridge score (default 30) - spectral-suggest [N] Find N spectrally close but unlinked pairs (default 20) - list-keys [PATTERN] List all node keys (one per line, optional glob) - list-edges List all edges (tsv: source target strength type) - dump-json Dump entire store as JSON - node-delete KEY Soft-delete a node (appends deleted version to log) - node-rename OLD NEW Rename a node key; updates edge debug strings atomically - journal-ts-migrate Populate created_at for nodes missing it - load-context Output session-start context from the store - render KEY Output a node's content to stdout - history [--full] KEY Show all stored versions of a node - --full shows complete content for every version - write KEY Upsert node content from stdin - import FILE [FILE...] Import markdown file(s) into the store - export [FILE|--all] Export store nodes to markdown file(s) - journal-write TEXT Write a journal entry to the store - journal-tail [N] [--level=L] [--full] - Show last N entries (default 20, --full for content) - --level: 0/journal, 1/daily, 2/weekly, 3/monthly - query 'EXPR | stages' Query the memory graph - Stages: sort F [asc], limit N, select F,F, count - Ex: \"degree > 15 | sort degree | limit 10\" - lookup-bump KEY [KEY...] Bump daily lookup counter for keys (fast, no store) - lookups [DATE] Show daily lookup counts (default: today) - daemon Start background job daemon - daemon status Show daemon status - daemon log [JOB] [N] Show last N log lines (default 50, optional job filter) - knowledge-loop [OPTIONS] Run knowledge agents to convergence - --max-cycles N (default 20) - --batch-size N (default 5) - --window N (default 5) - --max-depth N (default 4) - fact-mine JSONL [OPTIONS] Extract atomic facts from conversation transcripts - fact-mine --batch DIR Mine all .jsonl files in directory"); -} - -fn cmd_search(args: &[String]) -> Result<(), String> { +fn cmd_search(terms: &[String], expand: bool) -> Result<(), String> { use store::StoreView; - if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") { - println!("Usage: poc-memory search QUERY [QUERY...] [OPTIONS] - -Search memory using spreading activation (AND logic across terms). - -Options: - --expand Show 15 results instead of 5, plus spectral neighbors - --help, -h Show this help - -Examples: - poc-memory search irc connection - poc-memory search bcachefs transaction --expand"); - return Ok(()); + if terms.is_empty() { + return Err("search requires at least one term".into()); } - let expand = args.iter().any(|a| a == "--expand"); - - let query: String = args.iter() - .filter(|a| *a != "--expand") - .map(|a| a.as_str()) - .collect::>() - .join(" "); + let query: String = terms.join(" "); let view = store::AnyView::load()?; let results = search::search(&query, &view); @@ -258,21 +604,18 @@ Examples: let bump_keys: Vec<&str> = results.iter().take(limit).map(|r| r.key.as_str()).collect(); let _ = lookups::bump_many(&bump_keys); - // Show text results let text_keys: std::collections::HashSet = results.iter() .take(limit).map(|r| r.key.clone()).collect(); for (i, r) in results.iter().enumerate().take(limit) { let marker = if r.is_direct { "→" } else { " " }; let weight = view.node_weight(&r.key); - print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); - println!(); + println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); if let Some(ref snippet) = r.snippet { println!(" {}", snippet); } } - // Spectral expansion: only with --expand if expand { if let Ok(emb) = spectral::load_embedding() { let seeds: Vec<&str> = results.iter() @@ -292,8 +635,7 @@ Examples: println!("\nSpectral neighbors (structural, not keyword):"); for (k, _dist) in &new_hits { let weight = view.node_weight(k); - print!(" ~ [{:.2}] {}", weight, k); - println!(); + println!(" ~ [{:.2}] {}", weight, k); if let Some(content) = view.node_content(k) { let snippet: String = content.lines() .find(|l| !l.trim().is_empty() && !l.starts_with('#')) @@ -328,7 +670,6 @@ fn cmd_init() -> Result<(), String> { // Initialize store and seed default identity node if empty let mut store = store::Store::load()?; let count = store.init_from_markdown()?; - // Seed default core nodes if missing for key in &cfg.core_nodes { if !store.nodes.contains_key(key.as_str()) { if key == "identity" { @@ -382,11 +723,32 @@ fn cmd_migrate() -> Result<(), String> { } fn cmd_fsck() -> Result<(), String> { - store::fsck()?; - store::strip_md_keys()?; - - // Prune broken links (relations referencing deleted/missing nodes) let mut store = store::Store::load()?; + + // Check node-key consistency + let mut issues = 0; + for (key, node) in &store.nodes { + if key != &node.key { + eprintln!("MISMATCH: map key '{}' vs node.key '{}'", key, node.key); + issues += 1; + } + } + + // Check edge endpoints + let mut dangling = 0; + for rel in &store.relations { + if rel.deleted { continue; } + if !store.nodes.contains_key(&rel.source_key) { + eprintln!("DANGLING: edge source '{}'", rel.source_key); + dangling += 1; + } + if !store.nodes.contains_key(&rel.target_key) { + eprintln!("DANGLING: edge target '{}'", rel.target_key); + dangling += 1; + } + } + + // Prune orphan edges let mut to_tombstone = Vec::new(); for rel in &store.relations { if rel.deleted { continue; } @@ -402,47 +764,52 @@ fn cmd_fsck() -> Result<(), String> { let count = to_tombstone.len(); store.append_relations(&to_tombstone)?; for t in &to_tombstone { - if let Some(r) = store.relations.iter_mut().find(|r| - r.source == t.source && r.target == t.target && !r.deleted) { + if let Some(r) = store.relations.iter_mut().find(|r| r.uuid == t.uuid) { r.deleted = true; r.version = t.version; } } store.save()?; - eprintln!("Pruned {} broken links", count); - } else { - eprintln!("No broken links"); + eprintln!("Pruned {} orphan edges", count); } + + let g = store.build_graph(); + println!("fsck: {} nodes, {} edges, {} issues, {} dangling", + store.nodes.len(), g.edge_count(), issues, dangling); Ok(()) } fn cmd_health() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); - let health = graph::health_report(&g, &store); - println!("{}", health); + let report = graph::health_report(&g, &store); + print!("{}", report); Ok(()) } fn cmd_status() -> Result<(), String> { let store = store::Store::load()?; - let node_count = store.nodes.len(); - let rel_count = store.relations.len(); - - let mut episodic = 0usize; - let mut semantic = 0usize; - for n in store.nodes.values() { - if matches!(n.node_type, store::NodeType::Semantic) { - semantic += 1; - } else { - episodic += 1; - } - } - - println!("Nodes: {} Relations: {}", node_count, rel_count); - println!("Types: semantic={} episodic={}", semantic, episodic); - let g = store.build_graph(); + + let mut type_counts = std::collections::HashMap::new(); + for node in store.nodes.values() { + *type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1; + } + let mut types: Vec<_> = type_counts.iter().collect(); + types.sort_by_key(|(_, c)| std::cmp::Reverse(**c)); + + println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()); + print!("Types:"); + for (t, c) in &types { + let label = match t.as_str() { + "Semantic" => "semantic", + "EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly" + => "episodic", + _ => t, + }; + print!(" {}={}", label, c); + } + println!(); println!("Graph edges: {} Communities: {}", g.edge_count(), g.community_count()); Ok(()) @@ -451,16 +818,19 @@ fn cmd_status() -> Result<(), String> { fn cmd_graph() -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); - println!("Top nodes by degree:"); - query::run_query(&store, &g, - "* | sort degree | limit 10 | select degree,clustering_coefficient") + println!("Graph: {} nodes, {} edges, {} communities", + g.nodes().len(), g.edge_count(), g.community_count()); + println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}", + g.small_world_sigma(), g.degree_power_law_exponent(), + g.degree_gini(), g.avg_clustering_coefficient()); + Ok(()) } -fn cmd_used(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory used KEY".into()); +fn cmd_used(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("used requires a key".into()); } - let key = args.join(" "); + let key = key.join(" "); let mut store = store::Store::load()?; let resolved = store.resolve_key(&key)?; store.mark_used(&resolved); @@ -469,12 +839,8 @@ fn cmd_used(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_wrong(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory wrong KEY [CONTEXT]".into()); - } - let key = &args[0]; - let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None }; +fn cmd_wrong(key: &str, context: &[String]) -> Result<(), String> { + let ctx = if context.is_empty() { None } else { Some(context.join(" ")) }; let mut store = store::Store::load()?; let resolved = store.resolve_key(key)?; store.mark_wrong(&resolved, ctx.as_deref()); @@ -483,11 +849,11 @@ fn cmd_wrong(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_gap(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory gap DESCRIPTION".into()); +fn cmd_gap(description: &[String]) -> Result<(), String> { + if description.is_empty() { + return Err("gap requires a description".into()); } - let desc = args.join(" "); + let desc = description.join(" "); let mut store = store::Store::load()?; store.record_gap(&desc); store.save()?; @@ -495,10 +861,7 @@ fn cmd_gap(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_link_orphans(args: &[String]) -> Result<(), String> { - let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2); - let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3); - let sim_thresh: f32 = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(0.15); +fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> { let mut store = store::Store::load()?; let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh); println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})", @@ -506,8 +869,7 @@ fn cmd_link_orphans(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_cap_degree(args: &[String]) -> Result<(), String> { - let max_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(50); +fn cmd_cap_degree(max_deg: usize) -> Result<(), String> { let mut store = store::Store::load()?; let (hubs, pruned) = store.cap_degree(max_deg)?; store.save()?; @@ -515,30 +877,10 @@ fn cmd_cap_degree(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> { - let mut count = 5usize; - let mut auto = false; - let mut agent: Option = None; - let mut i = 0; - while i < args.len() { - match args[i].as_str() { - "--count" if i + 1 < args.len() => { - count = args[i + 1].parse().map_err(|_| "invalid count")?; - i += 2; - } - "--auto" => { auto = true; i += 1; } - "--agent" if i + 1 < args.len() => { - agent = Some(args[i + 1].clone()); - i += 2; - } - _ => { i += 1; } - } - } - +fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option) -> Result<(), String> { let store = store::Store::load()?; if let Some(agent_name) = agent { - // Generate a specific agent prompt let prompt = neuro::agent_prompt(&store, &agent_name, count)?; println!("{}", prompt); Ok(()) @@ -570,11 +912,11 @@ fn cmd_params() -> Result<(), String> { Ok(()) } -fn cmd_link(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory link KEY".into()); +fn cmd_link(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("link requires a key".into()); } - let key = args.join(" "); + let key = key.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); @@ -583,18 +925,7 @@ fn cmd_link(args: &[String]) -> Result<(), String> { &format!("neighbors('{}') | select strength,clustering_coefficient", resolved)) } -fn cmd_replay_queue(args: &[String]) -> Result<(), String> { - let mut count = 10usize; - let mut i = 0; - while i < args.len() { - match args[i].as_str() { - "--count" if i + 1 < args.len() => { - count = args[i + 1].parse().map_err(|_| "invalid count")?; - i += 2; - } - _ => { i += 1; } - } - } +fn cmd_replay_queue(count: usize) -> Result<(), String> { let store = store::Store::load()?; let queue = neuro::replay_queue(&store, count); println!("Replay queue ({} items):", queue.len()); @@ -618,17 +949,7 @@ fn cmd_consolidate_full() -> Result<(), String> { consolidate::consolidate_full(&mut store) } -fn cmd_triangle_close(args: &[String]) -> Result<(), String> { - let min_degree: usize = args.first() - .and_then(|s| s.parse().ok()) - .unwrap_or(5); - let sim_threshold: f32 = args.get(1) - .and_then(|s| s.parse().ok()) - .unwrap_or(0.3); - let max_per_hub: usize = args.get(2) - .and_then(|s| s.parse().ok()) - .unwrap_or(10); - +fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> { println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}", min_degree, sim_threshold, max_per_hub); @@ -645,14 +966,11 @@ fn cmd_daily_check() -> Result<(), String> { Ok(()) } -fn cmd_link_add(args: &[String]) -> Result<(), String> { - if args.len() < 2 { - return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into()); - } +fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> { let mut store = store::Store::load()?; - let source = store.resolve_key(&args[0])?; - let target = store.resolve_key(&args[1])?; - let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() }; + let source = store.resolve_key(source)?; + let target = store.resolve_key(target)?; + let reason = reason.join(" "); // Refine target to best-matching section let source_content = store.nodes.get(&source) @@ -667,37 +985,31 @@ fn cmd_link_add(args: &[String]) -> Result<(), String> { .map(|n| n.uuid) .ok_or_else(|| format!("target not found: {}", target))?; - // Check if link already exists + // Check for existing link let exists = store.relations.iter().any(|r| - r.source_key == source && r.target_key == target && !r.deleted - ); + !r.deleted && + ((r.source_key == source && r.target_key == target) || + (r.source_key == target && r.target_key == source))); if exists { - println!("Link already exists: {} → {}", source, target); + println!("Link already exists: {} ↔ {}", source, target); return Ok(()); } let rel = store::new_relation( source_uuid, target_uuid, - store::RelationType::Auto, - 0.5, + store::RelationType::Link, 0.8, &source, &target, ); store.add_relation(rel)?; - if !reason.is_empty() { - println!("+ {} → {} ({})", source, target, reason); - } else { - println!("+ {} → {}", source, target); - } + store.save()?; + println!("Linked: {} → {} ({})", source, target, reason); Ok(()) } -fn cmd_link_impact(args: &[String]) -> Result<(), String> { - if args.len() < 2 { - return Err("Usage: poc-memory link-impact SOURCE TARGET".into()); - } +fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> { let store = store::Store::load()?; - let source = store.resolve_key(&args[0])?; - let target = store.resolve_key(&args[1])?; + let source = store.resolve_key(source)?; + let target = store.resolve_key(target)?; let g = store.build_graph(); let impact = g.link_impact(&source, &target); @@ -711,7 +1023,7 @@ fn cmd_link_impact(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_apply_agent(args: &[String]) -> Result<(), String> { +fn cmd_apply_agent(process_all: bool) -> Result<(), String> { let home = env::var("HOME").unwrap_or_default(); let results_dir = std::path::PathBuf::from(&home) .join(".claude/memory/agent-results"); @@ -725,8 +1037,6 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> { let mut applied = 0; let mut errors = 0; - let process_all = args.iter().any(|a| a == "--all"); - // Find .json result files let mut files: Vec<_> = std::fs::read_dir(&results_dir) .map_err(|e| format!("read results dir: {}", e))? @@ -849,32 +1159,27 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> { Ok(()) } - -fn cmd_digest(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory digest daily|weekly|monthly|auto [DATE]".into()); - } - +fn cmd_digest(level: DigestLevel) -> Result<(), String> { let mut store = store::Store::load()?; - let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or(""); - match args[0].as_str() { - "auto" => digest::digest_auto(&mut store), - name @ ("daily" | "weekly" | "monthly") => { - let arg = if date_arg.is_empty() { - store::format_date(store::now_epoch()) - } else { - date_arg.to_string() - }; - digest::generate(&mut store, name, &arg) + match level { + DigestLevel::Auto => digest::digest_auto(&mut store), + DigestLevel::Daily { date } => { + let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); + digest::generate(&mut store, "daily", &arg) + } + DigestLevel::Weekly { date } => { + let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); + digest::generate(&mut store, "weekly", &arg) + } + DigestLevel::Monthly { date } => { + let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch())); + digest::generate(&mut store, "monthly", &arg) } - _ => Err(format!("Unknown digest type: {}. Use: daily, weekly, monthly, auto", args[0])), } } -fn cmd_digest_links(args: &[String]) -> Result<(), String> { - let do_apply = args.iter().any(|a| a == "--apply"); - +fn cmd_digest_links(do_apply: bool) -> Result<(), String> { let store = store::Store::load()?; let links = digest::parse_all_digest_links(&store); drop(store); @@ -897,17 +1202,8 @@ fn cmd_digest_links(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_journal_enrich(args: &[String]) -> Result<(), String> { - if args.len() < 2 { - return Err("Usage: poc-memory journal-enrich JSONL_PATH ENTRY_TEXT [GREP_LINE]".into()); - } - let jsonl_path = &args[0]; - let entry_text = &args[1]; - let grep_line: usize = args.get(2) - .and_then(|a| a.parse().ok()) - .unwrap_or(0); - - if !std::path::Path::new(jsonl_path.as_str()).is_file() { +fn cmd_journal_enrich(jsonl_path: &str, entry_text: &str, grep_line: usize) -> Result<(), String> { + if !std::path::Path::new(jsonl_path).is_file() { return Err(format!("JSONL not found: {}", jsonl_path)); } @@ -915,12 +1211,11 @@ fn cmd_journal_enrich(args: &[String]) -> Result<(), String> { enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line) } -fn cmd_experience_mine(args: &[String]) -> Result<(), String> { - let jsonl_path = if let Some(path) = args.first() { - path.clone() - } else { - find_current_transcript() - .ok_or("no JSONL transcripts found")? +fn cmd_experience_mine(jsonl_path: Option) -> Result<(), String> { + let jsonl_path = match jsonl_path { + Some(p) => p, + None => find_current_transcript() + .ok_or("no JSONL transcripts found")?, }; if !std::path::Path::new(jsonl_path.as_str()).is_file() { @@ -933,26 +1228,15 @@ fn cmd_experience_mine(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> { - let do_apply = args.iter().any(|a| a == "--apply"); - let report_file = args.windows(2) - .find(|w| w[0] == "--report") - .map(|w| w[1].as_str()); - +fn cmd_apply_consolidation(do_apply: bool, report_file: Option<&str>) -> Result<(), String> { let mut store = store::Store::load()?; consolidate::apply_consolidation(&mut store, do_apply, report_file) } -fn cmd_differentiate(args: &[String]) -> Result<(), String> { - let do_apply = args.iter().any(|a| a == "--apply"); - let key_arg: Option<&str> = args.iter() - .find(|a| !a.starts_with("--")) - .map(|s| s.as_str()); - +fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> { let mut store = store::Store::load()?; if let Some(key) = key_arg { - // Differentiate a specific hub let resolved = store.resolve_key(key)?; let moves = neuro::differentiate_hub(&store, &resolved) .ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?; @@ -987,7 +1271,6 @@ fn cmd_differentiate(args: &[String]) -> Result<(), String> { store.save()?; println!("\nApplied: {} Skipped: {}", applied, skipped); } else { - // Show all differentiable hubs let hubs = neuro::find_differentiable_hubs(&store); if hubs.is_empty() { println!("No file-level hubs with sections found above threshold"); @@ -1004,8 +1287,7 @@ fn cmd_differentiate(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_link_audit(args: &[String]) -> Result<(), String> { - let apply = args.iter().any(|a| a == "--apply"); +fn cmd_link_audit(apply: bool) -> Result<(), String> { let mut store = store::Store::load()?; let stats = audit::link_audit(&mut store, apply)?; println!("\n{}", "=".repeat(60)); @@ -1016,11 +1298,11 @@ fn cmd_link_audit(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_trace(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory trace KEY".into()); +fn cmd_trace(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("trace requires a key".into()); } - let key = args.join(" "); + let key = key.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; let g = store.build_graph(); @@ -1106,7 +1388,6 @@ fn cmd_trace(args: &[String]) -> Result<(), String> { } } - // Summary println!("\nLinks: {} session, {} daily, {} weekly, {} semantic", episodic_session.len(), episodic_daily.len(), episodic_weekly.len(), semantic.len()); @@ -1114,10 +1395,7 @@ fn cmd_trace(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_spectral(args: &[String]) -> Result<(), String> { - let k: usize = args.first() - .and_then(|s| s.parse().ok()) - .unwrap_or(30); +fn cmd_spectral(k: usize) -> Result<(), String> { let store = store::Store::load()?; let g = graph::build_graph(&store); let result = spectral::decompose(&g, k); @@ -1125,10 +1403,7 @@ fn cmd_spectral(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_spectral_save(args: &[String]) -> Result<(), String> { - let k: usize = args.first() - .and_then(|s| s.parse().ok()) - .unwrap_or(20); +fn cmd_spectral_save(k: usize) -> Result<(), String> { let store = store::Store::load()?; let g = graph::build_graph(&store); let result = spectral::decompose(&g, k); @@ -1137,19 +1412,10 @@ fn cmd_spectral_save(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_spectral_neighbors(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("usage: spectral-neighbors KEY [N]".to_string()); - } - let key = &args[0]; - let n: usize = args.get(1) - .and_then(|s| s.parse().ok()) - .unwrap_or(15); - +fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> { let emb = spectral::load_embedding()?; - // Show which dimensions this node loads on - let dims = spectral::dominant_dimensions(&emb, &[key.as_str()]); + let dims = spectral::dominant_dimensions(&emb, &[key]); println!("Node: {} (embedding: {} dims)", key, emb.dims); println!("Top spectral axes:"); for &(d, loading) in dims.iter().take(5) { @@ -1164,37 +1430,27 @@ fn cmd_spectral_neighbors(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_spectral_positions(args: &[String]) -> Result<(), String> { - let n: usize = args.first() - .and_then(|s| s.parse().ok()) - .unwrap_or(30); - +fn cmd_spectral_positions(n: usize) -> Result<(), String> { let store = store::Store::load()?; let emb = spectral::load_embedding()?; - // Build communities fresh from graph (don't rely on cached node fields) let g = store.build_graph(); let communities = g.communities().clone(); let positions = spectral::analyze_positions(&emb, &communities); - // Show outliers first println!("Spectral position analysis — {} nodes", positions.len()); println!(" outlier: dist_to_center / median (>1 = unusual position)"); println!(" bridge: dist_to_center / dist_to_nearest_other_community"); println!(); - // Group by classification let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new(); let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new(); - let mut core: Vec<&spectral::SpectralPosition> = Vec::new(); for pos in positions.iter().take(n) { match spectral::classify_position(pos) { "bridge" => bridges.push(pos), - "outlier" => outliers.push(pos), - "core" => core.push(pos), - _ => outliers.push(pos), // peripheral goes with outliers for display + _ => outliers.push(pos), } } @@ -1219,24 +1475,18 @@ fn cmd_spectral_positions(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> { - let n: usize = args.first() - .and_then(|s| s.parse().ok()) - .unwrap_or(20); - +fn cmd_spectral_suggest(n: usize) -> Result<(), String> { let store = store::Store::load()?; let emb = spectral::load_embedding()?; let g = store.build_graph(); let communities = g.communities(); - // Only consider nodes with enough edges for meaningful spectral position let min_degree = 3; let well_connected: std::collections::HashSet<&str> = emb.coords.keys() .filter(|k| g.degree(k) >= min_degree) .map(|k| k.as_str()) .collect(); - // Filter embedding to well-connected nodes let filtered_emb = spectral::SpectralEmbedding { dims: emb.dims, eigenvalues: emb.eigenvalues.clone(), @@ -1246,7 +1496,6 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> { .collect(), }; - // Build set of existing linked pairs let mut linked: std::collections::HashSet<(String, String)> = std::collections::HashSet::new(); for rel in &store.relations { @@ -1274,12 +1523,11 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_list_keys(args: &[String]) -> Result<(), String> { +fn cmd_list_keys(pattern: Option<&str>) -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); - let pattern = args.first().map(|s| s.as_str()); + if let Some(pat) = pattern { - // Simple glob: only support leading/trailing * and *substring* let pat_lower = pat.to_lowercase(); let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') { (None, None, Some(pat_lower.trim_matches('*').to_string())) @@ -1325,11 +1573,11 @@ fn cmd_dump_json() -> Result<(), String> { Ok(()) } -fn cmd_node_delete(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory node-delete KEY".into()); +fn cmd_node_delete(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("node-delete requires a key".into()); } - let key = args.join(" "); + let key = key.join(" "); let mut store = store::Store::load()?; let resolved = store.resolve_key(&key)?; store.delete_node(&resolved)?; @@ -1338,12 +1586,7 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_node_rename(args: &[String]) -> Result<(), String> { - if args.len() < 2 { - return Err("Usage: poc-memory node-rename OLD_KEY NEW_KEY".into()); - } - let old_key = &args[0]; - let new_key = &args[1]; +fn cmd_node_rename(old_key: &str, new_key: &str) -> Result<(), String> { let mut store = store::Store::load()?; let old_resolved = store.resolve_key(old_key)?; store.rename_node(&old_resolved, new_key)?; @@ -1352,15 +1595,12 @@ fn cmd_node_rename(args: &[String]) -> Result<(), String> { Ok(()) } -/// Migration: populate created_at for all nodes with missing or invalid values. -/// Journal nodes: parse timestamp from key. All others: fall back to `timestamp` field. fn cmd_journal_ts_migrate() -> Result<(), String> { use chrono::{NaiveDateTime, TimeZone, Local}; let mut store = store::Store::load()?; let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap(); - // Valid unix epoch range: 2001-01-01 to 2099-12-31 let valid_range = 978_307_200i64..=4_102_444_800i64; let to_update: Vec<_> = store.nodes.values() @@ -1371,7 +1611,6 @@ fn cmd_journal_ts_migrate() -> Result<(), String> { let mut updated = 0usize; for key in &to_update { - // Try parsing timestamp from journal key if let Some(caps) = re.captures(key) { let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]); if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") { @@ -1385,7 +1624,6 @@ fn cmd_journal_ts_migrate() -> Result<(), String> { } } } - // Fall back to the node's timestamp field (last-modified, but better than 0) if let Some(node) = store.nodes.get_mut(key) { node.created_at = node.timestamp as i64; node.version += 1; @@ -1393,7 +1631,6 @@ fn cmd_journal_ts_migrate() -> Result<(), String> { } } - // Persist all updated nodes let nodes_to_write: Vec<_> = to_update.iter() .filter_map(|k| store.nodes.get(k)) .filter(|n| valid_range.contains(&n.created_at)) @@ -1460,8 +1697,7 @@ fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &c } } -fn cmd_load_context(args: &[String]) -> Result<(), String> { - let stats = args.iter().any(|a| a == "--stats"); +fn cmd_load_context(stats: bool) -> Result<(), String> { let cfg = config::get(); let store = store::Store::load()?; @@ -1510,11 +1746,11 @@ fn cmd_load_context(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_render(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory render KEY".into()); +fn cmd_render(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("render requires a key".into()); } - let key = args.join(" "); + let key = key.join(" "); let store = store::Store::load()?; let resolved = store.resolve_key(&key)?; @@ -1525,41 +1761,16 @@ fn cmd_render(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_history(args: &[String]) -> Result<(), String> { - use clap::Parser; - - /// Show all stored versions of a memory node - #[derive(Parser)] - #[command(name = "poc-memory history")] - struct HistoryArgs { - /// Show full content for every version (not just preview) - #[arg(long)] - full: bool, - /// Node key to look up - #[arg(required = true)] - key: Vec, +fn cmd_history(key: &[String], full: bool) -> Result<(), String> { + if key.is_empty() { + return Err("history requires a key".into()); } + let raw_key = key.join(" "); - let parsed = match HistoryArgs::try_parse_from( - std::iter::once("history".to_string()).chain(args.iter().cloned()) - ) { - Ok(p) => p, - Err(e) => { - // Let clap print its own help/error formatting directly - e.print().ok(); - std::process::exit(if e.use_stderr() { 1 } else { 0 }); - } - }; - - let full = parsed.full; - let raw_key = parsed.key.join(" "); - - // Resolve key consistently with render/write let store = store::Store::load()?; let key = store.resolve_key(&raw_key).unwrap_or(raw_key); drop(store); - // Replay the node log, collecting all versions of this key let path = store::nodes_path(); if !path.exists() { return Err("No node log found".into()); @@ -1609,7 +1820,6 @@ fn cmd_history(args: &[String]) -> Result<(), String> { } if !full { - // Show latest full content if let Some(latest) = versions.last() { eprintln!("\n--- Latest content (v{}, {}) ---", latest.version, latest.provenance.label()); @@ -1620,12 +1830,11 @@ fn cmd_history(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_write(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory write KEY < content\n\ - Reads content from stdin, upserts into the store.".into()); +fn cmd_write(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("write requires a key (reads content from stdin)".into()); } - let raw_key = args.join(" "); + let raw_key = key.join(" "); let mut content = String::new(); std::io::Read::read_to_string(&mut std::io::stdin(), &mut content) .map_err(|e| format!("read stdin: {}", e))?; @@ -1635,8 +1844,6 @@ fn cmd_write(args: &[String]) -> Result<(), String> { } let mut store = store::Store::load()?; - // Resolve the key the same way render/search do, so writes and reads - // always hit the same node. Fall back to raw key for new nodes. let key = store.resolve_key(&raw_key).unwrap_or(raw_key); let result = store.upsert(&key, &content)?; match result { @@ -1650,16 +1857,16 @@ fn cmd_write(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_import(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory import FILE [FILE...]".into()); +fn cmd_import(files: &[String]) -> Result<(), String> { + if files.is_empty() { + return Err("import requires at least one file path".into()); } let mut store = store::Store::load()?; let mut total_new = 0; let mut total_updated = 0; - for arg in args { + for arg in files { let path = std::path::PathBuf::from(arg); let resolved = if path.exists() { path @@ -1683,23 +1890,20 @@ fn cmd_import(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_export(args: &[String]) -> Result<(), String> { +fn cmd_export(files: &[String], export_all: bool) -> Result<(), String> { let store = store::Store::load()?; - let export_all = args.iter().any(|a| a == "--all"); let targets: Vec = if export_all { - // Find all unique file-level keys (no # in key) let mut files: Vec = store.nodes.keys() .filter(|k| !k.contains('#')) .cloned() .collect(); files.sort(); files - } else if args.is_empty() { - return Err("Usage: poc-memory export FILE [FILE...] | --all".into()); + } else if files.is_empty() { + return Err("export requires file keys or --all".into()); } else { - args.iter().map(|a| { - // Strip .md if user supplied it — store keys are bare + files.iter().map(|a| { a.strip_suffix(".md").unwrap_or(a).to_string() }).collect() }; @@ -1722,16 +1926,14 @@ fn cmd_export(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_journal_write(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory journal-write TEXT".into()); +fn cmd_journal_write(text: &[String]) -> Result<(), String> { + if text.is_empty() { + return Err("journal-write requires text".into()); } - let text = args.join(" "); + let text = text.join(" "); - // Generate timestamp and slug let timestamp = store::format_datetime(store::now_epoch()); - // Slug: lowercase first ~6 words, hyphenated, truncated let slug: String = text.split_whitespace() .take(6) .map(|w| w.to_lowercase() @@ -1743,10 +1945,8 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> { let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug); - // Build content with header let content = format!("## {}\n\n{}", timestamp, text); - // Find source ref (most recently modified .jsonl transcript) let source_ref = find_current_transcript(); let mut store = store::Store::load()?; @@ -1767,31 +1967,10 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_journal_tail(args: &[String]) -> Result<(), String> { - let mut n: usize = 20; - let mut full = false; - let mut level: u8 = 0; // 0=journal, 1=daily, 2=weekly, 3=monthly - - for arg in args { - if arg == "--full" || arg == "-f" { - full = true; - } else if let Some(val) = arg.strip_prefix("--level=") { - level = match val { - "0" | "journal" => 0, - "1" | "daily" => 1, - "2" | "weekly" => 2, - "3" | "monthly" => 3, - _ => return Err(format!("unknown level '{}': use 0-3 or journal/daily/weekly/monthly", val)), - }; - } else if let Ok(num) = arg.parse::() { - n = num; - } - } - +fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> { let store = store::Store::load()?; if level == 0 { - // Original journal-tail behavior journal_tail_entries(&store, n, full) } else { let node_type = match level { @@ -1859,7 +2038,6 @@ fn journal_tail_digests(store: &store::Store, node_type: store::NodeType, n: usi let mut digests: Vec<_> = store.nodes.values() .filter(|node| node.node_type == node_type) .collect(); - // Sort by timestamp, fall back to key for lexicographic ordering digests.sort_by(|a, b| { if a.timestamp > 0 && b.timestamp > 0 { a.timestamp.cmp(&b.timestamp) @@ -1904,18 +2082,7 @@ fn extract_title(content: &str) -> String { String::from("(untitled)") } -fn cmd_interference(args: &[String]) -> Result<(), String> { - let mut threshold = 0.4f32; - let mut i = 0; - while i < args.len() { - match args[i].as_str() { - "--threshold" if i + 1 < args.len() => { - threshold = args[i + 1].parse().map_err(|_| "invalid threshold")?; - i += 2; - } - _ => { i += 1; } - } - } +fn cmd_interference(threshold: f32) -> Result<(), String> { let store = store::Store::load()?; let g = store.build_graph(); let pairs = neuro::detect_interference(&store, &g, threshold); @@ -1931,14 +2098,13 @@ fn cmd_interference(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_query(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory query 'EXPR | stage | stage ...'\n\n\ +fn cmd_query(expr: &[String]) -> Result<(), String> { + if expr.is_empty() { + return Err("query requires an expression\n\n\ Expressions:\n \ degree > 15 property filter\n \ key ~ 'journal.*' AND degree > 10 boolean + regex\n \ neighbors('identity') WHERE ... graph traversal\n \ - community_id = community('key') function as value\n \ * all nodes\n\n\ Pipe stages:\n \ | sort FIELD [asc] sort (desc by default)\n \ @@ -1947,26 +2113,23 @@ Pipe stages:\n \ | count just show count".into()); } - let query_str = args.join(" "); + let query_str = expr.join(" "); let store = store::Store::load()?; let graph = store.build_graph(); query::run_query(&store, &graph, &query_str) } -fn cmd_lookup_bump(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return Err("Usage: poc-memory lookup-bump KEY [KEY...]".into()); +fn cmd_lookup_bump(keys: &[String]) -> Result<(), String> { + if keys.is_empty() { + return Err("lookup-bump requires at least one key".into()); } - let keys: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); + let keys: Vec<&str> = keys.iter().map(|s| s.as_str()).collect(); lookups::bump_many(&keys) } -fn cmd_lookups(args: &[String]) -> Result<(), String> { - let date = if args.is_empty() { - chrono::Local::now().format("%Y-%m-%d").to_string() - } else { - args[0].clone() - }; +fn cmd_lookups(date: Option<&str>) -> Result<(), String> { + let date = date.map(|d| d.to_string()) + .unwrap_or_else(|| chrono::Local::now().format("%Y-%m-%d").to_string()); let store = store::Store::load()?; let keys: Vec = store.nodes.values().map(|n| n.key.clone()).collect(); @@ -1987,65 +2150,37 @@ fn cmd_lookups(args: &[String]) -> Result<(), String> { Ok(()) } -fn cmd_daemon(args: &[String]) -> Result<(), String> { - if args.is_empty() { - return daemon::run_daemon(); - } - match args[0].as_str() { - "status" => daemon::show_status(), - "log" => { - // daemon log [N] — last N lines (default 20) - // daemon log JOB [N] — last N lines for job - let (job, lines) = match args.get(1) { +fn cmd_daemon(sub: Option<&str>, args: &[String]) -> Result<(), String> { + match sub { + None => daemon::run_daemon(), + Some("status") => daemon::show_status(), + Some("log") => { + let (job, lines) = match args.first() { None => (None, 20), Some(s) => { if let Ok(n) = s.parse::() { (None, n) } else { - let n = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(20); + let n = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(20); (Some(s.as_str()), n) } } }; daemon::show_log(job, lines) } - "install" => daemon::install_service(), - _ => { - eprintln!("Usage: poc-memory daemon [status|log|install]"); - Err("unknown daemon subcommand".into()) - } + Some("install") => daemon::install_service(), + Some(other) => Err(format!("unknown daemon subcommand: {}", other)), } } -fn cmd_knowledge_loop(args: &[String]) -> Result<(), String> { - if args.iter().any(|a| a == "--help" || a == "-h") { - eprintln!("Usage: poc-memory knowledge-loop [OPTIONS] - -Run knowledge agents (observation, extractor, connector, challenger) in -a convergence loop. Each cycle runs all agents, applies actions to the -graph, and checks structural stability metrics. - -Options: - --max-cycles N Maximum cycles before stopping (default: 20) - --batch-size N Items per agent per cycle (default: 5) - --window N Cycles to check for convergence (default: 5) - --max-depth N Maximum inference depth (default: 4)"); - return Ok(()); - } - - let mut config = knowledge::KnowledgeLoopConfig::default(); - - let mut i = 0; - while i < args.len() { - match args[i].as_str() { - "--max-cycles" => { i += 1; config.max_cycles = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.max_cycles); } - "--batch-size" => { i += 1; config.batch_size = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.batch_size); } - "--window" => { i += 1; config.window = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.window); } - "--max-depth" => { i += 1; config.max_depth = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.max_depth); } - other => return Err(format!("Unknown arg: {}. Use --help for usage.", other)), - } - i += 1; - } +fn cmd_knowledge_loop(max_cycles: usize, batch_size: usize, window: usize, max_depth: i32) -> Result<(), String> { + let config = knowledge::KnowledgeLoopConfig { + max_cycles, + batch_size, + window, + max_depth, + ..Default::default() + }; let results = knowledge::run_knowledge_loop(&config)?; eprintln!("\nCompleted {} cycles, {} total actions applied", @@ -2054,42 +2189,8 @@ Options: Ok(()) } -fn cmd_fact_mine(args: &[String]) -> Result<(), String> { - if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") { - eprintln!("Usage: poc-memory fact-mine [OPTIONS] - poc-memory fact-mine --batch [OPTIONS] - -Extract atomic factual claims from conversation transcripts using Haiku. - -Options: - --batch Process all .jsonl files in directory - --dry-run Show chunks without calling model - --output FILE Write JSON to file (default: stdout) - --min-messages N Skip transcripts with fewer messages (default: 10)"); - return Ok(()); - } - - let mut batch = false; - let mut dry_run = false; - let mut output_file: Option = None; - let mut min_messages = 10usize; - let mut path: Option = None; - - let mut i = 0; - while i < args.len() { - match args[i].as_str() { - "--batch" => batch = true, - "--dry-run" => dry_run = true, - "--output" | "-o" => { i += 1; output_file = args.get(i).cloned(); } - "--min-messages" => { i += 1; min_messages = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(min_messages); } - s if !s.starts_with('-') => path = Some(s.to_string()), - other => return Err(format!("Unknown arg: {}", other)), - } - i += 1; - } - - let path = path.ok_or("Missing path argument")?; - let p = std::path::Path::new(&path); +fn cmd_fact_mine(path: &str, batch: bool, dry_run: bool, output_file: Option<&str>, min_messages: usize) -> Result<(), String> { + let p = std::path::Path::new(path); let paths: Vec = if batch { if !p.is_dir() { @@ -2114,7 +2215,7 @@ Options: if !dry_run { let json = serde_json::to_string_pretty(&facts) .map_err(|e| format!("serialize: {}", e))?; - if let Some(out) = &output_file { + if let Some(out) = output_file { std::fs::write(out, &json).map_err(|e| format!("write: {}", e))?; eprintln!("\nWrote {} facts to {}", facts.len(), out); } else { @@ -2126,13 +2227,10 @@ Options: Ok(()) } -fn cmd_fact_mine_store(args: &[String]) -> Result<(), String> { - if args.len() != 1 { - return Err("Usage: poc-memory fact-mine-store ".into()); - } - let path = std::path::Path::new(&args[0]); +fn cmd_fact_mine_store(path: &str) -> Result<(), String> { + let path = std::path::Path::new(path); if !path.exists() { - return Err(format!("File not found: {}", args[0])); + return Err(format!("File not found: {}", path.display())); } let count = fact_mine::mine_and_store(path, None)?; eprintln!("Stored {} facts", count);