consciousness/poc-memory/src/main.rs

3131 lines
106 KiB
Rust
Raw Normal View History

// poc-memory: graph-structured memory for AI assistants
//
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet
// License: MIT OR Apache-2.0
//
// Architecture:
// nodes.capnp - append-only content node log
// relations.capnp - append-only relation log
// state.bin - derived KV cache (rebuilt from logs when stale)
//
// Graph algorithms: clustering coefficient, community detection (label
// propagation), schema fit scoring, small-world metrics, consolidation
// priority. Text similarity via BM25 with Porter stemming.
//
// Neuroscience-inspired: spaced repetition replay, emotional gating,
// interference detection, schema assimilation, reconsolidation.
use poc_memory::*;
use clap::{Parser, Subcommand};
use std::process;
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
fn find_current_transcript() -> Option<String> {
let projects = config::get().projects_dir.clone();
if !projects.exists() { return None; }
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
if let Ok(dirs) = std::fs::read_dir(&projects) {
for dir_entry in dirs.filter_map(|e| e.ok()) {
if !dir_entry.path().is_dir() { continue; }
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
for f in files.filter_map(|e| e.ok()) {
let p = f.path();
if p.extension().map(|x| x == "jsonl").unwrap_or(false) {
if let Ok(meta) = p.metadata() {
if let Ok(mtime) = meta.modified() {
if newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
newest = Some((mtime, p));
}
}
}
}
}
}
}
}
newest.map(|(_, p)| p.to_string_lossy().to_string())
}
#[derive(Parser)]
#[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")]
struct Cli {
#[command(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
// ── Core (daily use) ──────────────────────────────────────────────
/// Search memory (AND logic across terms)
///
/// Pipeline: -p spread -p spectral,k=20
/// Default pipeline: spread
Search {
/// Search terms
query: Vec<String>,
/// Algorithm pipeline stages (repeatable)
#[arg(short, long = "pipeline")]
pipeline: Vec<String>,
/// Show more results
#[arg(long)]
expand: bool,
/// Show node content, not just keys
#[arg(long)]
full: bool,
/// Show debug output for each pipeline stage
#[arg(long)]
debug: bool,
/// Also match key components (e.g. "irc" matches "irc-access")
#[arg(long)]
fuzzy: bool,
/// Also search node content (slow, use when graph search misses)
#[arg(long)]
content: bool,
},
/// Output a node's content to stdout
Render {
/// Node key
key: Vec<String>,
},
/// Upsert node content from stdin
Write {
/// Node key
key: Vec<String>,
},
/// Show all stored versions of a node
History {
/// Show full content for every version
#[arg(long)]
full: bool,
/// Node key
key: Vec<String>,
},
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
},
/// Summary of memory state
Status,
/// Query the memory graph
#[command(after_long_help = "\
EXPRESSIONS:
* all nodes
key ~ 'pattern' regex match on node key
content ~ 'phrase' regex match on node content
degree > 15 numeric comparison on any field
field = value exact match
field != value not equal
expr AND expr boolean AND
expr OR expr boolean OR
NOT expr negation
neighbors('key') nodes linked to key
neighbors('key') WHERE expr ... with filter on edges/nodes
FIELDS:
key, weight, content, degree, node_type, provenance,
emotion, retrievals, uses, wrongs, created,
clustering_coefficient (cc), community_id
OPERATORS:
> < >= <= = != ~(regex)
PIPE STAGES:
| sort FIELD [asc] sort (desc by default)
| limit N cap results
| select F,F,... output fields as TSV
| count just show count
| connectivity show graph structure between results
FUNCTIONS:
community('key') community id of a node
degree('key') degree of a node
EXAMPLES:
key ~ 'inner-life' substring match on keys
content ~ 'made love' full-text search
content ~ 'made love' | connectivity find clusters among results
(content ~ 'A' OR content ~ 'B') | connectivity
degree > 15 | sort degree | limit 10 high-degree nodes
key ~ 'journal' AND degree > 10 | count count matching nodes
neighbors('identity') WHERE strength > 0.5 | sort strength
* | sort weight asc | limit 20 lowest-weight nodes
")]
Query {
/// Query expression (e.g. "key ~ 'inner-life'")
expr: Vec<String>,
},
/// Mark a memory as useful (boosts weight)
Used {
/// Node key
key: Vec<String>,
},
/// Mark a memory as wrong/irrelevant
Wrong {
/// Node key
key: String,
/// Optional context
context: Vec<String>,
},
/// Mark a search result as not relevant (weakens edges that led to it)
#[command(name = "not-relevant")]
NotRelevant {
/// Node key that was not relevant
key: String,
},
/// Mark a node as not useful (weakens node weight, not edges)
#[command(name = "not-useful")]
NotUseful {
/// Node key
key: String,
},
/// Record a gap in memory coverage
Gap {
/// Gap description
description: Vec<String>,
},
// ── Node operations ───────────────────────────────────────────────
/// Node operations (delete, rename, list)
#[command(subcommand)]
Node(NodeCmd),
// ── Journal ───────────────────────────────────────────────────────
/// Journal operations (write, tail, enrich)
#[command(subcommand)]
Journal(JournalCmd),
// ── Graph ─────────────────────────────────────────────────────────
/// Graph operations (link, audit, spectral)
#[command(subcommand, name = "graph")]
GraphCmd(GraphCmd),
// ── Cursor (spatial memory) ──────────────────────────────────────
/// Navigate the memory graph with a persistent cursor
#[command(subcommand)]
Cursor(CursorCmd),
// ── Agents ────────────────────────────────────────────────────────
/// Agent and daemon operations
#[command(subcommand)]
Agent(AgentCmd),
// ── Admin ─────────────────────────────────────────────────────────
/// Admin operations (fsck, health, import, export)
#[command(subcommand)]
Admin(AdminCmd),
}
#[derive(Subcommand)]
enum NodeCmd {
/// Soft-delete a node
Delete {
/// Node key
key: Vec<String>,
},
/// Rename a node key
Rename {
/// Old key
old_key: String,
/// New key
new_key: String,
},
/// List all node keys (one per line, optional glob)
#[command(name = "list")]
List {
/// Glob pattern to filter keys
pattern: Option<String>,
},
/// List all edges (tsv: source target strength type)
Edges,
/// Dump entire store as JSON
#[command(name = "dump")]
Dump,
}
#[derive(Subcommand)]
enum CursorCmd {
/// Show current cursor position with context
Show,
/// Set cursor to a node key
Set {
/// Node key
key: Vec<String>,
},
/// Move cursor forward in time
Forward,
/// Move cursor backward in time
Back,
/// Move up the digest hierarchy (journal→daily→weekly→monthly)
Up,
/// Move down the digest hierarchy (to first child)
Down,
/// Clear the cursor
Clear,
}
#[derive(Subcommand)]
enum JournalCmd {
/// Write a journal entry to the store
Write {
/// Entry text
text: Vec<String>,
},
/// Show recent journal/digest entries
Tail {
/// Number of entries to show (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
#[arg(long, default_value_t = 0)]
level: u8,
},
/// Enrich journal entry with conversation links
Enrich {
/// Path to JSONL transcript
jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
}
#[derive(Subcommand)]
enum GraphCmd {
/// Show neighbors of a node
Link {
/// Node key
key: Vec<String>,
},
/// Add a link between two nodes
#[command(name = "link-add")]
LinkAdd {
/// Source node key
source: String,
/// Target node key
target: String,
/// Optional reason
reason: Vec<String>,
},
/// Simulate adding an edge, report topology impact
#[command(name = "link-impact")]
LinkImpact {
/// Source node key
source: String,
/// Target node key
target: String,
},
/// Walk every link, send to Sonnet for quality review
#[command(name = "link-audit")]
LinkAudit {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Link orphan nodes to similar neighbors
#[command(name = "link-orphans")]
LinkOrphans {
/// Minimum degree to consider orphan (default: 2)
#[arg(default_value_t = 2)]
min_degree: usize,
/// Links per orphan (default: 3)
#[arg(default_value_t = 3)]
links_per: usize,
/// Similarity threshold (default: 0.15)
#[arg(default_value_t = 0.15)]
sim_threshold: f32,
},
/// Close triangles: link similar neighbors of hubs
#[command(name = "triangle-close")]
TriangleClose {
/// Minimum hub degree (default: 5)
#[arg(default_value_t = 5)]
min_degree: usize,
/// Similarity threshold (default: 0.3)
#[arg(default_value_t = 0.3)]
sim_threshold: f32,
/// Maximum links per hub (default: 10)
#[arg(default_value_t = 10)]
max_per_hub: usize,
},
/// Cap node degree by pruning weak auto edges
#[command(name = "cap-degree")]
CapDegree {
/// Maximum degree (default: 50)
#[arg(default_value_t = 50)]
max_degree: usize,
},
/// Set link strengths from neighborhood overlap (Jaccard similarity)
#[command(name = "normalize-strengths")]
NormalizeStrengths {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Redistribute hub links to section-level children
Differentiate {
/// Specific hub key (omit to list all differentiable hubs)
key: Option<String>,
/// Apply the redistribution
#[arg(long)]
apply: bool,
},
/// Walk temporal links: semantic ↔ episodic ↔ conversation
Trace {
/// Node key
key: Vec<String>,
},
/// Detect potentially confusable memory pairs
Interference {
/// Similarity threshold (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
},
/// Show graph structure overview
Overview,
/// Spectral decomposition of the memory graph
Spectral {
/// Number of eigenvectors (default: 30)
#[arg(default_value_t = 30)]
k: usize,
},
/// Compute and save spectral embedding
#[command(name = "spectral-save")]
SpectralSave {
/// Number of eigenvectors (default: 20)
#[arg(default_value_t = 20)]
k: usize,
},
/// Find spectrally nearest nodes
#[command(name = "spectral-neighbors")]
SpectralNeighbors {
/// Node key
key: String,
/// Number of neighbors (default: 15)
#[arg(default_value_t = 15)]
n: usize,
},
/// Show nodes ranked by outlier/bridge score
#[command(name = "spectral-positions")]
SpectralPositions {
/// Number of nodes to show (default: 30)
#[arg(default_value_t = 30)]
n: usize,
},
/// Find spectrally close but unlinked pairs
#[command(name = "spectral-suggest")]
SpectralSuggest {
/// Number of pairs (default: 20)
#[arg(default_value_t = 20)]
n: usize,
},
/// Diagnose duplicate/overlapping nodes for a topic cluster
Organize {
/// Search term (matches node keys; also content unless --key-only)
term: String,
/// Similarity threshold for pair reporting (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
/// Only match node keys, not content
#[arg(long)]
key_only: bool,
/// Create anchor node for the search term and link to cluster
#[arg(long)]
anchor: bool,
},
}
#[derive(Subcommand)]
enum DaemonCmd {
/// Start the daemon (default)
Start,
/// Show daemon status
Status,
/// Show daemon log
Log {
/// Job name to filter by
job: Option<String>,
/// Number of lines to show
#[arg(long, default_value_t = 20)]
lines: usize,
},
/// Install systemd service
Install,
/// Trigger consolidation via daemon
Consolidate,
/// Run an agent via the daemon
Run {
/// Agent name (e.g. organize, replay, linker)
#[arg(default_value = "replay")]
agent: String,
/// Batch size
#[arg(default_value_t = 1)]
count: usize,
},
/// Interactive TUI
Tui,
}
#[derive(Subcommand)]
enum AgentCmd {
/// Background job daemon
#[command(subcommand)]
Daemon(DaemonCmd),
/// Run knowledge agents to convergence
#[command(name = "knowledge-loop")]
KnowledgeLoop {
/// Maximum cycles before stopping
#[arg(long, default_value_t = 20)]
max_cycles: usize,
/// Items per agent per cycle
#[arg(long, default_value_t = 5)]
batch_size: usize,
/// Cycles to check for convergence
#[arg(long, default_value_t = 5)]
window: usize,
/// Maximum inference depth
#[arg(long, default_value_t = 4)]
max_depth: i32,
},
/// Run agent consolidation on priority nodes
#[command(name = "consolidate-batch")]
ConsolidateBatch {
/// Number of nodes to consolidate
#[arg(long, default_value_t = 5)]
count: usize,
/// Generate replay agent prompt automatically
#[arg(long)]
auto: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
#[arg(long)]
agent: Option<String>,
},
/// Analyze metrics, plan agent allocation
#[command(name = "consolidate-session")]
ConsolidateSession,
/// Autonomous: plan → agents → apply → digests → links
#[command(name = "consolidate-full")]
ConsolidateFull,
/// Import pending agent results into the graph
#[command(name = "apply-agent")]
ApplyAgent {
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
/// Apply the links (default: dry run)
#[arg(long)]
apply: bool,
},
/// Mine conversation for experiential moments to journal
#[command(name = "experience-mine")]
ExperienceMine {
/// Path to JSONL transcript (default: most recent)
jsonl_path: Option<String>,
},
/// Extract atomic facts from conversation transcripts
#[command(name = "fact-mine")]
FactMine {
/// Path to JSONL transcript or directory (with --batch)
path: String,
/// Process all .jsonl files in directory
#[arg(long)]
batch: bool,
/// Show chunks without calling model
#[arg(long)]
dry_run: bool,
/// Write JSON to file (default: stdout)
#[arg(long, short)]
output: Option<String>,
/// Skip transcripts with fewer messages
#[arg(long, default_value_t = 10)]
min_messages: usize,
},
/// Extract facts from a transcript and store directly
#[command(name = "fact-mine-store")]
FactMineStore {
/// Path to JSONL transcript
path: String,
},
/// Show spaced repetition replay queue
#[command(name = "replay-queue")]
ReplayQueue {
/// Number of items to show
#[arg(long, default_value_t = 10)]
count: usize,
},
}
#[derive(Subcommand)]
enum AdminCmd {
/// Scan markdown files, index all memory units
Init,
/// Report graph metrics (CC, communities, small-world)
Health,
/// Run consistency checks and repair
Fsck,
/// Find and merge duplicate nodes (same key, multiple UUIDs)
Dedup {
/// Apply the merge (default: dry run)
#[arg(long)]
apply: bool,
},
/// Bulk rename: replace a character in all keys
#[command(name = "bulk-rename")]
BulkRename {
/// Character to replace
from: String,
/// Replacement character
to: String,
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Brief metrics check (for cron/notifications)
#[command(name = "daily-check")]
DailyCheck,
/// Import markdown file(s) into the store
Import {
/// File paths
files: Vec<String>,
},
/// Export store nodes to markdown file(s)
Export {
/// File keys to export (or --all)
files: Vec<String>,
/// Export all file-level nodes
#[arg(long)]
all: bool,
},
/// Output session-start context from the store
#[command(name = "load-context")]
LoadContext {
/// Show word count statistics instead of content
#[arg(long)]
stats: bool,
},
/// Show recent retrieval log
Log,
/// Show current parameters
Params,
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
}
#[derive(Subcommand)]
enum DigestLevel {
/// Generate daily digest
Daily {
/// Date (default: today)
date: Option<String>,
},
/// Generate weekly digest
Weekly {
/// Date or week label (default: current week)
date: Option<String>,
},
/// Generate monthly digest
Monthly {
/// Month (YYYY-MM) or date (default: current month)
date: Option<String>,
},
/// Generate all missing digests
Auto,
}
/// Print help with subcommands expanded to show nested commands.
fn print_help() {
use clap::CommandFactory;
let cmd = Cli::command();
println!("poc-memory - graph-structured memory store");
println!("usage: poc-memory <command> [<args>]\n");
for sub in cmd.get_subcommands() {
if sub.get_name() == "help" { continue }
let children: Vec<_> = sub.get_subcommands()
.filter(|c| c.get_name() != "help")
.collect();
if !children.is_empty() {
for child in &children {
let about = child.get_about().map(|s| s.to_string()).unwrap_or_default();
let full = format!("{} {}", sub.get_name(), child.get_name());
// Recurse one more level for daemon subcommands etc.
let grandchildren: Vec<_> = child.get_subcommands()
.filter(|c| c.get_name() != "help")
.collect();
if !grandchildren.is_empty() {
for gc in grandchildren {
let gc_about = gc.get_about().map(|s| s.to_string()).unwrap_or_default();
let gc_full = format!("{} {}", full, gc.get_name());
println!(" {:<34}{gc_about}", gc_full);
}
} else {
println!(" {:<34}{about}", full);
}
}
} else {
let about = sub.get_about().map(|s| s.to_string()).unwrap_or_default();
println!(" {:<34}{about}", sub.get_name());
}
}
}
fn main() {
// Handle --help ourselves for expanded subcommand display
let args: Vec<String> = std::env::args().collect();
if args.len() <= 1 || args.iter().any(|a| a == "--help" || a == "-h") && args.len() == 2 {
print_help();
return;
}
let cli = Cli::parse();
let result = match cli.command {
// Core
Command::Search { query, pipeline, expand, full, debug, fuzzy, content }
=> cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content),
Command::Render { key } => cmd_render(&key),
Command::Write { key } => cmd_write(&key),
Command::History { full, key } => cmd_history(&key, full),
Command::Tail { n, full } => cmd_tail(n, full),
Command::Status => cmd_status(),
Command::Query { expr } => cmd_query(&expr),
Command::Used { key } => cmd_used(&key),
Command::Wrong { key, context } => cmd_wrong(&key, &context),
Command::NotRelevant { key } => cmd_not_relevant(&key),
Command::NotUseful { key } => cmd_not_useful(&key),
Command::Gap { description } => cmd_gap(&description),
// Node
Command::Node(sub) => match sub {
NodeCmd::Delete { key } => cmd_node_delete(&key),
NodeCmd::Rename { old_key, new_key } => cmd_node_rename(&old_key, &new_key),
NodeCmd::List { pattern } => cmd_list_keys(pattern.as_deref()),
NodeCmd::Edges => cmd_list_edges(),
NodeCmd::Dump => cmd_dump_json(),
},
// Journal
Command::Journal(sub) => match sub {
JournalCmd::Write { text } => cmd_journal_write(&text),
JournalCmd::Tail { n, full, level } => cmd_journal_tail(n, full, level),
JournalCmd::Enrich { jsonl_path, entry_text, grep_line }
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
},
// Graph
Command::GraphCmd(sub) => match sub {
GraphCmd::Link { key } => cmd_link(&key),
GraphCmd::LinkAdd { source, target, reason }
=> cmd_link_add(&source, &target, &reason),
GraphCmd::LinkImpact { source, target }
=> cmd_link_impact(&source, &target),
GraphCmd::LinkAudit { apply } => cmd_link_audit(apply),
GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold }
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub }
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
GraphCmd::CapDegree { max_degree } => cmd_cap_degree(max_degree),
GraphCmd::NormalizeStrengths { apply } => cmd_normalize_strengths(apply),
GraphCmd::Differentiate { key, apply }
=> cmd_differentiate(key.as_deref(), apply),
GraphCmd::Trace { key } => cmd_trace(&key),
GraphCmd::Interference { threshold } => cmd_interference(threshold),
GraphCmd::Overview => cmd_graph(),
GraphCmd::Spectral { k } => cmd_spectral(k),
GraphCmd::SpectralSave { k } => cmd_spectral_save(k),
GraphCmd::SpectralNeighbors { key, n }
=> cmd_spectral_neighbors(&key, n),
GraphCmd::SpectralPositions { n } => cmd_spectral_positions(n),
GraphCmd::SpectralSuggest { n } => cmd_spectral_suggest(n),
GraphCmd::Organize { term, threshold, key_only, anchor }
=> cmd_organize(&term, threshold, key_only, anchor),
},
// Cursor
Command::Cursor(sub) => cmd_cursor(sub),
// Agent
Command::Agent(sub) => match sub {
AgentCmd::Daemon(sub) => cmd_daemon(sub),
AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
=> cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
AgentCmd::ConsolidateBatch { count, auto, agent }
=> cmd_consolidate_batch(count, auto, agent),
AgentCmd::ConsolidateSession => cmd_consolidate_session(),
AgentCmd::ConsolidateFull => cmd_consolidate_full(),
AgentCmd::ApplyAgent { all } => cmd_apply_agent(all),
AgentCmd::ApplyConsolidation { apply, report }
=> cmd_apply_consolidation(apply, report.as_deref()),
AgentCmd::Digest { level } => cmd_digest(level),
AgentCmd::DigestLinks { apply } => cmd_digest_links(apply),
AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path),
AgentCmd::FactMine { path, batch, dry_run, output, min_messages }
=> cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
AgentCmd::FactMineStore { path } => cmd_fact_mine_store(&path),
AgentCmd::ReplayQueue { count } => cmd_replay_queue(count),
},
// Admin
Command::Admin(sub) => match sub {
AdminCmd::Init => cmd_init(),
AdminCmd::Health => cmd_health(),
AdminCmd::Fsck => cmd_fsck(),
AdminCmd::Dedup { apply } => cmd_dedup(apply),
AdminCmd::BulkRename { from, to, apply } => cmd_bulk_rename(&from, &to, apply),
AdminCmd::DailyCheck => cmd_daily_check(),
AdminCmd::Import { files } => cmd_import(&files),
AdminCmd::Export { files, all } => cmd_export(&files, all),
AdminCmd::LoadContext { stats } => cmd_load_context(stats),
AdminCmd::Log => cmd_log(),
AdminCmd::Params => cmd_params(),
AdminCmd::LookupBump { keys } => cmd_lookup_bump(&keys),
AdminCmd::Lookups { date } => cmd_lookups(date.as_deref()),
},
};
if let Err(e) = result {
eprintln!("Error: {}", e);
process::exit(1);
}
}
// ── Command implementations ─────────────────────────────────────────
fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full: bool, debug: bool, fuzzy: bool, content: bool) -> Result<(), String> {
use store::StoreView;
use std::collections::BTreeMap;
// Parse pipeline stages (unified: algorithms, filters, transforms, generators)
let stages: Vec<search::Stage> = if pipeline_args.is_empty() {
vec![search::Stage::Algorithm(search::AlgoStage::parse("spread").unwrap())]
} else {
pipeline_args.iter()
.map(|a| search::Stage::parse(a))
.collect::<Result<Vec<_>, _>>()?
};
// Check if pipeline needs full Store (has filters/transforms/generators)
let needs_store = stages.iter().any(|s| !matches!(s, search::Stage::Algorithm(_)));
// Check if pipeline starts with a generator (doesn't need seed terms)
let has_generator = stages.first().map(|s| matches!(s, search::Stage::Generator(_))).unwrap_or(false);
if terms.is_empty() && !has_generator {
return Err("search requires terms or a generator stage (e.g. 'all')".into());
}
let query: String = terms.join(" ");
if debug {
let names: Vec<String> = stages.iter().map(|s| format!("{}", s)).collect();
println!("[search] pipeline: {}", names.join(""));
}
let max_results = if expand { 15 } else { 5 };
if needs_store {
// Full Store path — needed for filter/transform/generator stages
let store = store::Store::load()?;
let graph = store.build_graph();
let seeds = if has_generator {
vec![] // generator will produce its own result set
} else {
let terms_map: BTreeMap<String, f64> = query.split_whitespace()
.map(|t| (t.to_lowercase(), 1.0))
.collect();
let (seeds, _) = search::match_seeds_opts(&terms_map, &store, fuzzy, content);
seeds
};
let raw = search::run_query(&stages, seeds, &graph, &store, debug, max_results);
if raw.is_empty() {
eprintln!("No results");
return Ok(());
}
for (i, (key, score)) in raw.iter().enumerate().take(max_results) {
let weight = store.nodes.get(key).map(|n| n.weight).unwrap_or(0.0);
println!("{:2}. [{:.2}/{:.2}] {}", i + 1, score, weight, key);
if full {
if let Some(node) = store.nodes.get(key) {
println!();
for line in node.content.lines() {
println!(" {}", line);
}
println!();
}
}
}
} else {
// Fast MmapView path — algorithm-only pipeline
let view = store::AnyView::load()?;
let graph = graph::build_graph_fast(&view);
let terms_map: BTreeMap<String, f64> = query.split_whitespace()
.map(|t| (t.to_lowercase(), 1.0))
.collect();
let (seeds, direct_hits) = search::match_seeds_opts(&terms_map, &view, fuzzy, content);
if seeds.is_empty() {
eprintln!("No results for '{}'", query);
return Ok(());
}
if debug {
println!("[search] {} seeds from query '{}'", seeds.len(), query);
}
// Extract AlgoStages from the unified stages
let algo_stages: Vec<&search::AlgoStage> = stages.iter()
.filter_map(|s| match s {
search::Stage::Algorithm(a) => Some(a),
_ => None,
})
.collect();
let algo_owned: Vec<search::AlgoStage> = algo_stages.into_iter().cloned().collect();
let raw = search::run_pipeline(&algo_owned, seeds, &graph, &view, debug, max_results);
let results: Vec<search::SearchResult> = raw.into_iter()
.map(|(key, activation)| {
let is_direct = direct_hits.contains(&key);
search::SearchResult { key, activation, is_direct, snippet: None }
})
.collect();
if results.is_empty() {
eprintln!("No results for '{}'", query);
return Ok(());
}
// Log retrieval
store::Store::log_retrieval_static(&query,
&results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
let bump_keys: Vec<&str> = results.iter().take(max_results).map(|r| r.key.as_str()).collect();
let _ = lookups::bump_many(&bump_keys);
for (i, r) in results.iter().enumerate().take(max_results) {
let marker = if r.is_direct { "" } else { " " };
let weight = view.node_weight(&r.key);
println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
if full {
if let Some(content) = view.node_content(&r.key) {
println!();
for line in content.lines() {
println!(" {}", line);
}
println!();
}
}
}
}
Ok(())
}
fn cmd_init() -> Result<(), String> {
let cfg = config::get();
// Ensure data directory exists
std::fs::create_dir_all(&cfg.data_dir)
.map_err(|e| format!("create data_dir: {}", e))?;
// Install filesystem files (not store nodes)
install_default_file(&cfg.data_dir, "instructions.md",
include_str!("../defaults/instructions.md"))?;
install_default_file(&cfg.data_dir, "on-consciousness.md",
include_str!("../defaults/on-consciousness.md"))?;
// Initialize store and seed default identity node if empty
let mut store = store::Store::load()?;
let count = store.init_from_markdown()?;
for key in &cfg.core_nodes {
if !store.nodes.contains_key(key) && key == "identity" {
let default = include_str!("../defaults/identity.md");
store.upsert(key, default)
.map_err(|e| format!("seed {}: {}", key, e))?;
println!("Seeded {} in store", key);
}
}
store.save()?;
println!("Indexed {} memory units", count);
// Install hooks
daemon::install_hook()?;
// Create config if none exists
let config_path = std::env::var("POC_MEMORY_CONFIG")
.map(std::path::PathBuf::from)
.unwrap_or_else(|_| {
std::path::PathBuf::from(std::env::var("HOME").unwrap())
.join(".config/poc-memory/config.jsonl")
});
if !config_path.exists() {
let config_dir = config_path.parent().unwrap();
std::fs::create_dir_all(config_dir)
.map_err(|e| format!("create config dir: {}", e))?;
let example = include_str!("../config.example.jsonl");
std::fs::write(&config_path, example)
.map_err(|e| format!("write config: {}", e))?;
println!("Created config at {} — edit with your name and context groups",
config_path.display());
}
println!("Done. Run `poc-memory load-context --stats` to verify.");
Ok(())
}
fn cmd_bulk_rename(from: &str, to: &str, apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
// Find all keys that need renaming
let renames: Vec<(String, String)> = store.nodes.keys()
.filter(|k| k.contains(from))
.map(|k| (k.clone(), k.replace(from, to)))
.collect();
// Check for collisions
let existing: std::collections::HashSet<&String> = store.nodes.keys().collect();
let mut collisions = 0;
for (old, new) in &renames {
if existing.contains(new) && old != new {
eprintln!("COLLISION: {} -> {} (target exists)", old, new);
collisions += 1;
}
}
println!("Bulk rename '{}' -> '{}'", from, to);
println!(" Keys to rename: {}", renames.len());
println!(" Collisions: {}", collisions);
if collisions > 0 {
return Err(format!("{} collisions — aborting", collisions));
}
if !apply {
// Show a sample
for (old, new) in renames.iter().take(10) {
println!(" {} -> {}", old, new);
}
if renames.len() > 10 {
println!(" ... and {} more", renames.len() - 10);
}
println!("\nDry run. Use --apply to execute.");
return Ok(());
}
// Apply renames using rename_node() which properly appends to capnp logs.
// Process in batches to avoid holding the lock too long.
let mut renamed_count = 0;
let mut errors = 0;
let total = renames.len();
for (i, (old_key, new_key)) in renames.iter().enumerate() {
match store.rename_node(old_key, new_key) {
Ok(()) => renamed_count += 1,
Err(e) => {
eprintln!(" RENAME ERROR: {} -> {}: {}", old_key, new_key, e);
errors += 1;
}
}
if (i + 1) % 1000 == 0 {
println!(" {}/{} ({} errors)", i + 1, total, errors);
}
}
store.save()?;
println!("Renamed {} nodes ({} errors).", renamed_count, errors);
// Run fsck to verify
println!("\nRunning fsck...");
drop(store);
cmd_fsck()?;
Ok(())
}
fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> {
let path = data_dir.join(name);
if !path.exists() {
std::fs::write(&path, content)
.map_err(|e| format!("write {}: {}", name, e))?;
println!("Created {}", path.display());
}
Ok(())
}
fn cmd_fsck() -> Result<(), String> {
let mut store = store::Store::load()?;
// Check cache vs log consistency
let log_store = store::Store::load_from_logs()?;
let mut cache_issues = 0;
// Nodes in logs but missing from cache
for key in log_store.nodes.keys() {
if !store.nodes.contains_key(key) {
eprintln!("CACHE MISSING: '{}' exists in capnp log but not in cache", key);
cache_issues += 1;
}
}
// Nodes in cache but not in logs (phantom nodes)
for key in store.nodes.keys() {
if !log_store.nodes.contains_key(key) {
eprintln!("CACHE PHANTOM: '{}' exists in cache but not in capnp log", key);
cache_issues += 1;
}
}
// Version mismatches
for (key, log_node) in &log_store.nodes {
if let Some(cache_node) = store.nodes.get(key) {
if cache_node.version != log_node.version {
eprintln!("CACHE STALE: '{}' cache v{} vs log v{}",
key, cache_node.version, log_node.version);
cache_issues += 1;
}
}
}
if cache_issues > 0 {
eprintln!("{} cache inconsistencies found — rebuilding from logs", cache_issues);
store = log_store;
store.save().map_err(|e| format!("rebuild save: {}", e))?;
}
// Check node-key consistency
let mut issues = 0;
for (key, node) in &store.nodes {
if key != &node.key {
eprintln!("MISMATCH: map key '{}' vs node.key '{}'", key, node.key);
issues += 1;
}
}
// Check edge endpoints
let mut dangling = 0;
for rel in &store.relations {
if rel.deleted { continue; }
if !store.nodes.contains_key(&rel.source_key) {
eprintln!("DANGLING: edge source '{}'", rel.source_key);
dangling += 1;
}
if !store.nodes.contains_key(&rel.target_key) {
eprintln!("DANGLING: edge target '{}'", rel.target_key);
dangling += 1;
}
}
// Prune orphan edges
let mut to_tombstone = Vec::new();
for rel in &store.relations {
if rel.deleted { continue; }
if !store.nodes.contains_key(&rel.source_key)
|| !store.nodes.contains_key(&rel.target_key) {
let mut tombstone = rel.clone();
tombstone.deleted = true;
tombstone.version += 1;
to_tombstone.push(tombstone);
}
}
if !to_tombstone.is_empty() {
let count = to_tombstone.len();
store.append_relations(&to_tombstone)?;
for t in &to_tombstone {
if let Some(r) = store.relations.iter_mut().find(|r| r.uuid == t.uuid) {
r.deleted = true;
r.version = t.version;
}
}
store.save()?;
eprintln!("Pruned {} orphan edges", count);
}
let g = store.build_graph();
println!("fsck: {} nodes, {} edges, {} issues, {} dangling, {} cache",
store.nodes.len(), g.edge_count(), issues, dangling, cache_issues);
Ok(())
}
fn cmd_dedup(apply: bool) -> Result<(), String> {
use std::collections::{HashMap, HashSet};
let mut store = store::Store::load()?;
let duplicates = store.find_duplicates()?;
if duplicates.is_empty() {
println!("No duplicate keys found.");
return Ok(());
}
// Count edges per UUID
let mut edges_by_uuid: HashMap<[u8; 16], usize> = HashMap::new();
for rel in &store.relations {
if rel.deleted { continue; }
*edges_by_uuid.entry(rel.source).or_default() += 1;
*edges_by_uuid.entry(rel.target).or_default() += 1;
}
let mut identical_groups = Vec::new();
let mut diverged_groups = Vec::new();
for (key, mut nodes) in duplicates {
// Sort by version descending so highest-version is first
nodes.sort_by(|a, b| b.version.cmp(&a.version));
// Check if all copies have identical content
let all_same = nodes.windows(2).all(|w| w[0].content == w[1].content);
let info: Vec<_> = nodes.iter().map(|n| {
let edge_count = edges_by_uuid.get(&n.uuid).copied().unwrap_or(0);
(n.clone(), edge_count)
}).collect();
if all_same {
identical_groups.push((key, info));
} else {
diverged_groups.push((key, info));
}
}
// Report
println!("=== Duplicate key report ===\n");
println!("{} identical groups, {} diverged groups\n",
identical_groups.len(), diverged_groups.len());
if !identical_groups.is_empty() {
println!("── Identical (safe to auto-merge) ──");
for (key, copies) in &identical_groups {
let total_edges: usize = copies.iter().map(|c| c.1).sum();
println!(" {} ({} copies, {} total edges)", key, copies.len(), total_edges);
for (node, edges) in copies {
let uuid_hex = node.uuid.iter().map(|b| format!("{:02x}", b)).collect::<String>();
println!(" v{} uuid={}.. edges={}", node.version, &uuid_hex[..8], edges);
}
}
println!();
}
if !diverged_groups.is_empty() {
println!("── Diverged (need review) ──");
for (key, copies) in &diverged_groups {
let total_edges: usize = copies.iter().map(|c| c.1).sum();
println!(" {} ({} copies, {} total edges)", key, copies.len(), total_edges);
for (node, edges) in copies {
let uuid_hex = node.uuid.iter().map(|b| format!("{:02x}", b)).collect::<String>();
let preview: String = node.content.chars().take(80).collect();
println!(" v{} uuid={}.. edges={} | {}{}",
node.version, &uuid_hex[..8], edges, preview,
if node.content.len() > 80 { "..." } else { "" });
}
}
println!();
}
if !apply {
let total_dupes: usize = identical_groups.iter().chain(diverged_groups.iter())
.map(|(_, copies)| copies.len() - 1)
.sum();
println!("Dry run: {} duplicate nodes would be merged. Use --apply to execute.", total_dupes);
return Ok(());
}
// Merge all groups: identical + diverged
// For diverged: keep the copy with most edges (it's the one that got
// woven into the graph — the version that lived). Fall back to highest version.
let all_groups: Vec<_> = identical_groups.into_iter()
.chain(diverged_groups.into_iter())
.collect();
let mut merged = 0usize;
let mut edges_redirected = 0usize;
let mut edges_deduped = 0usize;
for (_key, mut copies) in all_groups {
// Pick survivor: most edges first, then highest version
copies.sort_by(|a, b| b.1.cmp(&a.1).then(b.0.version.cmp(&a.0.version)));
let survivor_uuid = copies[0].0.uuid;
let doomed_uuids: Vec<[u8; 16]> = copies[1..].iter().map(|c| c.0.uuid).collect();
// Redirect edges from doomed UUIDs to survivor
let mut updated_rels = Vec::new();
for rel in &mut store.relations {
if rel.deleted { continue; }
let mut changed = false;
if doomed_uuids.contains(&rel.source) {
rel.source = survivor_uuid;
changed = true;
}
if doomed_uuids.contains(&rel.target) {
rel.target = survivor_uuid;
changed = true;
}
if changed {
rel.version += 1;
updated_rels.push(rel.clone());
edges_redirected += 1;
}
}
// Dedup edges: same (source, target, rel_type) → keep highest strength
let mut seen: HashSet<([u8; 16], [u8; 16], String)> = HashSet::new();
let mut to_tombstone_rels = Vec::new();
// Sort by strength descending so we keep the strongest
let mut rels_with_idx: Vec<(usize, &store::Relation)> = store.relations.iter()
.enumerate()
.filter(|(_, r)| !r.deleted && (r.source == survivor_uuid || r.target == survivor_uuid))
.collect();
rels_with_idx.sort_by(|a, b| b.1.strength.total_cmp(&a.1.strength));
for (idx, rel) in &rels_with_idx {
let edge_key = (rel.source, rel.target, format!("{:?}", rel.rel_type));
if !seen.insert(edge_key) {
to_tombstone_rels.push(*idx);
edges_deduped += 1;
}
}
for &idx in &to_tombstone_rels {
store.relations[idx].deleted = true;
store.relations[idx].version += 1;
updated_rels.push(store.relations[idx].clone());
}
// Tombstone doomed nodes
let mut tombstones = Vec::new();
for (doomed_node, _) in &copies[1..] {
let mut t = doomed_node.clone();
t.deleted = true;
t.version += 1;
tombstones.push(t);
}
store.append_nodes(&tombstones)?;
if !updated_rels.is_empty() {
store.append_relations(&updated_rels)?;
}
for uuid in &doomed_uuids {
store.uuid_to_key.remove(uuid);
}
merged += doomed_uuids.len();
}
// Remove tombstoned relations from cache
store.relations.retain(|r| !r.deleted);
store.save()?;
println!("Merged {} duplicates, redirected {} edges, deduped {} duplicate edges",
merged, edges_redirected, edges_deduped);
Ok(())
}
fn cmd_health() -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let report = graph::health_report(&g, &store);
print!("{}", report);
Ok(())
}
fn cmd_status() -> Result<(), String> {
// If stdout is a tty and daemon is running, launch TUI
if std::io::IsTerminal::is_terminal(&std::io::stdout()) {
// Try TUI first — falls back if daemon not running
match tui::run_tui() {
Ok(()) => return Ok(()),
Err(_) => {} // fall through to text output
}
}
let store = store::Store::load()?;
let g = store.build_graph();
let mut type_counts = std::collections::HashMap::new();
for node in store.nodes.values() {
*type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1;
}
let mut types: Vec<_> = type_counts.iter().collect();
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len());
print!("Types:");
for (t, c) in &types {
let label = match t.as_str() {
"Semantic" => "semantic",
"EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly"
=> "episodic",
_ => t,
};
print!(" {}={}", label, c);
}
println!();
println!("Graph edges: {} Communities: {}",
g.edge_count(), g.community_count());
Ok(())
}
fn cmd_graph() -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
println!("Graph: {} nodes, {} edges, {} communities",
g.nodes().len(), g.edge_count(), g.community_count());
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
g.small_world_sigma(), g.degree_power_law_exponent(),
g.degree_gini(), g.avg_clustering_coefficient());
Ok(())
}
fn cmd_used(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("used requires a key".into());
}
let key = key.join(" ");
let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
store.mark_used(&resolved);
// Also strengthen edges to this node — conscious-tier delta.
const DELTA: f32 = 0.01;
let mut strengthened = 0;
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.source_key == resolved || rel.target_key == resolved {
let old = rel.strength;
rel.strength = (rel.strength + DELTA).clamp(0.05, 0.95);
if (rel.strength - old).abs() > 0.001 {
rel.version += 1;
strengthened += 1;
}
}
}
store.save()?;
println!("Marked '{}' as used (strengthened {} edges)", resolved, strengthened);
Ok(())
}
fn cmd_wrong(key: &str, context: &[String]) -> Result<(), String> {
let ctx = if context.is_empty() { None } else { Some(context.join(" ")) };
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.mark_wrong(&resolved, ctx.as_deref());
store.save()?;
println!("Marked '{}' as wrong", resolved);
Ok(())
}
fn cmd_not_relevant(key: &str) -> Result<(), String> {
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
// Weaken all edges to this node — it was routed to incorrectly.
// Conscious-tier delta: 0.01 per edge.
const DELTA: f32 = -0.01;
let mut adjusted = 0;
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.source_key == resolved || rel.target_key == resolved {
let old = rel.strength;
rel.strength = (rel.strength + DELTA).clamp(0.05, 0.95);
if (rel.strength - old).abs() > 0.001 {
rel.version += 1;
adjusted += 1;
}
}
}
store.save()?;
println!("Not relevant: '{}' — weakened {} edges by {}", resolved, adjusted, DELTA.abs());
Ok(())
}
fn cmd_not_useful(key: &str) -> Result<(), String> {
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
// Same as wrong but with clearer semantics: node content is bad, edges are fine.
store.mark_wrong(&resolved, Some("not-useful"));
store.save()?;
println!("Not useful: '{}' — node weight reduced", resolved);
Ok(())
}
fn cmd_gap(description: &[String]) -> Result<(), String> {
if description.is_empty() {
return Err("gap requires a description".into());
}
let desc = description.join(" ");
let mut store = store::Store::load()?;
store.record_gap(&desc);
store.save()?;
println!("Recorded gap: {}", desc);
Ok(())
}
fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
let mut store = store::Store::load()?;
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
orphans, links, min_deg, links_per, sim_thresh);
Ok(())
}
fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
let mut store = store::Store::load()?;
let (hubs, pruned) = store.cap_degree(max_deg)?;
store.save()?;
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
Ok(())
}
fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build a lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
// Store both directions for easy lookup
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
// Stats
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
// Histogram of new strengths
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
for rel in &mut store.relations {
if rel.deleted { continue; }
// Skip implicit temporal edges (strength 1.0, Auto type)
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply {
rel.strength = new_s;
}
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
println!("Normalize link strengths (Jaccard similarity)");
println!(" Total edges in graph: {}", strengths.len());
println!(" Would change: {}", changed);
println!(" Unchanged: {}", unchanged);
println!(" Temporal (skipped): {}", temporal_skipped);
if changed > 0 {
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
}
println!();
println!(" Strength distribution:");
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
}
if apply {
store.save()?;
println!("\nApplied {} strength updates.", changed);
} else {
println!("\nDry run. Use --apply to write changes.");
}
Ok(())
}
fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option<String>) -> Result<(), String> {
let store = store::Store::load()?;
if let Some(agent_name) = agent {
let batch = agents::prompts::agent_prompt(&store, &agent_name, count)?;
println!("{}", batch.prompt);
Ok(())
} else {
agents::prompts::consolidation_batch(&store, count, auto)
}
}
fn cmd_log() -> Result<(), String> {
let store = store::Store::load()?;
for event in store.retrieval_log.iter().rev().take(20) {
println!("[{}] q=\"{}\"{} results",
event.timestamp, event.query, event.results.len());
for r in &event.results {
println!(" {}", r);
}
}
Ok(())
}
fn cmd_params() -> Result<(), String> {
let store = store::Store::load()?;
println!("decay_factor: {}", store.params.decay_factor);
println!("use_boost: {}", store.params.use_boost);
println!("prune_threshold: {}", store.params.prune_threshold);
println!("edge_decay: {}", store.params.edge_decay);
println!("max_hops: {}", store.params.max_hops);
println!("min_activation: {}", store.params.min_activation);
Ok(())
}
fn cmd_link(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("link requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
println!("Neighbors of '{}':", resolved);
query::run_query(&store, &g,
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
}
fn cmd_replay_queue(count: usize) -> Result<(), String> {
let store = store::Store::load()?;
let queue = neuro::replay_queue(&store, count);
println!("Replay queue ({} items):", queue.len());
for (i, item) in queue.iter().enumerate() {
println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})",
i + 1, item.priority, item.classification, item.key,
item.interval_days, item.emotion, item.outlier_score);
}
Ok(())
}
fn cmd_consolidate_session() -> Result<(), String> {
let store = store::Store::load()?;
let plan = neuro::consolidation_plan(&store);
println!("{}", neuro::format_plan(&plan));
Ok(())
}
fn cmd_consolidate_full() -> Result<(), String> {
let mut store = store::Store::load()?;
consolidate::consolidate_full(&mut store)
}
fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
min_degree, sim_threshold, max_per_hub);
let mut store = store::Store::load()?;
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
Ok(())
}
fn cmd_daily_check() -> Result<(), String> {
let store = store::Store::load()?;
let report = neuro::daily_check(&store);
print!("{}", report);
Ok(())
}
fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
let mut store = store::Store::load()?;
let source = store.resolve_key(source)?;
let target = store.resolve_key(target)?;
let reason = reason.join(" ");
// Refine target to best-matching section
let source_content = store.nodes.get(&source)
.map(|n| n.content.as_str()).unwrap_or("");
let target = neuro::refine_target(&store, source_content, &target);
// Find UUIDs
let source_uuid = store.nodes.get(&source)
.map(|n| n.uuid)
.ok_or_else(|| format!("source not found: {}", source))?;
let target_uuid = store.nodes.get(&target)
.map(|n| n.uuid)
.ok_or_else(|| format!("target not found: {}", target))?;
// Check for existing link
let exists = store.relations.iter().any(|r|
!r.deleted &&
((r.source_key == source && r.target_key == target) ||
(r.source_key == target && r.target_key == source)));
if exists {
println!("Link already exists: {}{}", source, target);
return Ok(());
}
// Compute initial strength from Jaccard neighborhood similarity
let graph = store.build_graph();
let jaccard = graph.jaccard(&source, &target);
let strength = (jaccard * 3.0).clamp(0.1, 1.0);
let rel = store::new_relation(
source_uuid, target_uuid,
store::RelationType::Link, strength,
&source, &target,
);
store.add_relation(rel)?;
store.save()?;
println!("Linked: {}{} (strength={:.2}, {})", source, target, strength, reason);
Ok(())
}
fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
let store = store::Store::load()?;
let source = store.resolve_key(source)?;
let target = store.resolve_key(target)?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
println!("Link impact: {}{}", source, target);
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
println!(" ΔGini: {:+.6}", impact.delta_gini);
println!(" Assessment: {}", impact.assessment);
Ok(())
}
/// Apply links from a single agent result JSON file.
/// Returns (links_applied, errors).
fn apply_agent_file(
store: &mut store::Store,
data: &serde_json::Value,
) -> (usize, usize) {
let agent_result = data.get("agent_result").or(Some(data));
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
Some(l) => l,
None => return (0, 0),
};
let entry_text = data.get("entry_text")
.and_then(|v| v.as_str())
.unwrap_or("");
if let (Some(start), Some(end)) = (
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
) {
println!(" Source: L{}-L{}", start, end);
}
let mut applied = 0;
let mut errors = 0;
for link in links {
let target = match link.get("target").and_then(|v| v.as_str()) {
Some(t) => t,
None => continue,
};
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
if let Some(note) = target.strip_prefix("NOTE:") {
println!(" NOTE: {}{}", note, reason);
continue;
}
let resolved = match store.resolve_key(target) {
Ok(r) => r,
Err(_) => {
println!(" SKIP {} (not found in graph)", target);
continue;
}
};
let source_key = match store.find_journal_node(entry_text) {
Some(k) => k,
None => {
println!(" SKIP {} (no matching journal node)", target);
continue;
}
};
let source_uuid = match store.nodes.get(&source_key) {
Some(n) => n.uuid,
None => continue,
};
let target_uuid = match store.nodes.get(&resolved) {
Some(n) => n.uuid,
None => continue,
};
let rel = store::new_relation(
source_uuid, target_uuid,
store::RelationType::Link,
0.5,
&source_key, &resolved,
);
if let Err(e) = store.add_relation(rel) {
eprintln!(" Error adding relation: {}", e);
errors += 1;
} else {
println!(" LINK {}{} ({})", source_key, resolved, reason);
applied += 1;
}
}
(applied, errors)
}
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
let results_dir = store::memory_dir().join("agent-results");
if !results_dir.exists() {
println!("No agent results directory");
return Ok(());
}
let mut store = store::Store::load()?;
let mut applied = 0;
let mut errors = 0;
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
.map_err(|e| format!("read results dir: {}", e))?
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
.collect();
files.sort_by_key(|e| e.path());
for entry in &files {
let path = entry.path();
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) => {
eprintln!(" Skip {}: {}", path.display(), e);
errors += 1;
continue;
}
};
let data: serde_json::Value = match serde_json::from_str(&content) {
Ok(d) => d,
Err(e) => {
eprintln!(" Skip {}: parse error: {}", path.display(), e);
errors += 1;
continue;
}
};
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
let (a, e) = apply_agent_file(&mut store, &data);
applied += a;
errors += e;
if !process_all {
let done_dir = util::memory_subdir("agent-results/done")?;
let dest = done_dir.join(path.file_name().unwrap());
std::fs::rename(&path, &dest).ok();
}
}
if applied > 0 {
store.save()?;
}
println!("\nApplied {} links ({} errors, {} files processed)",
applied, errors, files.len());
Ok(())
}
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
let mut store = store::Store::load()?;
match level {
DigestLevel::Auto => digest::digest_auto(&mut store),
DigestLevel::Daily { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "daily", &arg)
}
DigestLevel::Weekly { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "weekly", &arg)
}
DigestLevel::Monthly { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "monthly", &arg)
}
}
}
fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
let store = store::Store::load()?;
let links = digest::parse_all_digest_links(&store);
drop(store);
println!("Found {} unique links from digest nodes", links.len());
if !do_apply {
for (i, link) in links.iter().enumerate() {
println!(" {:3}. {}{}", i + 1, link.source, link.target);
if !link.reason.is_empty() {
println!(" ({})", &link.reason[..link.reason.len().min(80)]);
}
}
println!("\nTo apply: poc-memory digest-links --apply");
return Ok(());
}
let mut store = store::Store::load()?;
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
Ok(())
}
fn cmd_journal_enrich(jsonl_path: &str, entry_text: &str, grep_line: usize) -> Result<(), String> {
if !std::path::Path::new(jsonl_path).is_file() {
return Err(format!("JSONL not found: {}", jsonl_path));
}
let mut store = store::Store::load()?;
enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
}
fn cmd_experience_mine(jsonl_path: Option<String>) -> Result<(), String> {
let jsonl_path = match jsonl_path {
Some(p) => p,
None => find_current_transcript()
.ok_or("no JSONL transcripts found")?,
};
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
return Err(format!("JSONL not found: {}", jsonl_path));
}
let mut store = store::Store::load()?;
let count = enrich::experience_mine(&mut store, &jsonl_path, None)?;
println!("Done: {} new entries mined.", count);
Ok(())
}
fn cmd_apply_consolidation(do_apply: bool, report_file: Option<&str>) -> Result<(), String> {
let mut store = store::Store::load()?;
consolidate::apply_consolidation(&mut store, do_apply, report_file)
}
fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
if let Some(key) = key_arg {
let resolved = store.resolve_key(key)?;
let moves = neuro::differentiate_hub(&store, &resolved)
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
// Group by target section for display
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
std::collections::BTreeMap::new();
for mv in &moves {
by_section.entry(mv.to_section.clone()).or_default().push(mv);
}
println!("Hub '{}' — {} links to redistribute across {} sections\n",
resolved, moves.len(), by_section.len());
for (section, section_moves) in &by_section {
println!(" {} ({} links):", section, section_moves.len());
for mv in section_moves.iter().take(5) {
println!(" [{:.3}] {}{}", mv.similarity,
mv.neighbor_key, mv.neighbor_snippet);
}
if section_moves.len() > 5 {
println!(" ... and {} more", section_moves.len() - 5);
}
}
if !do_apply {
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
return Ok(());
}
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
store.save()?;
println!("\nApplied: {} Skipped: {}", applied, skipped);
} else {
let hubs = neuro::find_differentiable_hubs(&store);
if hubs.is_empty() {
println!("No file-level hubs with sections found above threshold");
return Ok(());
}
println!("Differentiable hubs (file-level nodes with sections):\n");
for (key, degree, sections) in &hubs {
println!(" {:40} deg={:3} sections={}", key, degree, sections);
}
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
}
Ok(())
}
fn cmd_link_audit(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let stats = audit::link_audit(&mut store, apply)?;
println!("\n{}", "=".repeat(60));
println!("Link audit complete:");
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
println!("{}", "=".repeat(60));
Ok(())
}
fn cmd_trace(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("trace requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
// Display the node itself
println!("=== {} ===", resolved);
println!("Type: {:?} Weight: {:.2}",
node.node_type, node.weight);
if !node.source_ref.is_empty() {
println!("Source: {}", node.source_ref);
}
// Show content preview
let preview = util::truncate(&node.content, 200, "...");
println!("\n{}\n", preview);
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
store::NodeType::EpisodicSession =>
episodic_session.push(entry),
store::NodeType::EpisodicDaily =>
episodic_daily.push(entry),
store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly =>
episodic_weekly.push(entry),
store::NodeType::Semantic =>
semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
println!("Weekly digests:");
for (k, s, n) in &episodic_weekly {
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_daily.is_empty() {
println!("Daily digests:");
for (k, s, n) in &episodic_daily {
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_session.is_empty() {
println!("Session entries:");
for (k, s, n) in &episodic_session {
let preview = util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
println!(" [{:.2}] {}", s, k);
if !n.source_ref.is_empty() {
println!(" ↳ source: {}", n.source_ref);
}
println!(" {}", preview);
}
}
if !semantic.is_empty() {
println!("Semantic links:");
for (k, s, _) in &semantic {
println!(" [{:.2}] {}", s, k);
}
}
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len());
Ok(())
}
fn cmd_spectral(k: usize) -> Result<(), String> {
let store = store::Store::load()?;
let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k);
spectral::print_summary(&result, &g);
Ok(())
}
fn cmd_spectral_save(k: usize) -> Result<(), String> {
let store = store::Store::load()?;
let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k);
let emb = spectral::to_embedding(&result);
spectral::save_embedding(&emb)?;
Ok(())
}
fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> {
let emb = spectral::load_embedding()?;
let dims = spectral::dominant_dimensions(&emb, &[key]);
println!("Node: {} (embedding: {} dims)", key, emb.dims);
println!("Top spectral axes:");
for &(d, loading) in dims.iter().take(5) {
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
}
println!("\nNearest neighbors in spectral space:");
let neighbors = spectral::nearest_neighbors(&emb, key, n);
for (i, (k, dist)) in neighbors.iter().enumerate() {
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
}
Ok(())
}
fn cmd_spectral_positions(n: usize) -> Result<(), String> {
let store = store::Store::load()?;
let emb = spectral::load_embedding()?;
let g = store.build_graph();
let communities = g.communities().clone();
let positions = spectral::analyze_positions(&emb, &communities);
println!("Spectral position analysis — {} nodes", positions.len());
println!(" outlier: dist_to_center / median (>1 = unusual position)");
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
println!();
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
for pos in positions.iter().take(n) {
match spectral::classify_position(pos) {
"bridge" => bridges.push(pos),
_ => outliers.push(pos),
}
}
if !bridges.is_empty() {
println!("=== Bridges (between communities) ===");
for pos in &bridges {
println!(" [{:.2}/{:.2}] c{} → c{} {}",
pos.outlier_score, pos.bridge_score,
pos.community, pos.nearest_community, pos.key);
}
println!();
}
println!("=== Top outliers (far from own community center) ===");
for pos in positions.iter().take(n) {
let class = spectral::classify_position(pos);
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
class, pos.outlier_score, pos.bridge_score,
pos.community, pos.key);
}
Ok(())
}
fn cmd_spectral_suggest(n: usize) -> Result<(), String> {
let store = store::Store::load()?;
let emb = spectral::load_embedding()?;
let g = store.build_graph();
let communities = g.communities();
let min_degree = 3;
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
.filter(|k| g.degree(k) >= min_degree)
.map(|k| k.as_str())
.collect();
let filtered_emb = spectral::SpectralEmbedding {
dims: emb.dims,
eigenvalues: emb.eigenvalues.clone(),
coords: emb.coords.iter()
.filter(|(k, _)| well_connected.contains(k.as_str()))
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
};
let mut linked: std::collections::HashSet<(String, String)> =
std::collections::HashSet::new();
for rel in &store.relations {
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
}
eprintln!("Searching {} well-connected nodes (degree >= {})...",
filtered_emb.coords.len(), min_degree);
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
let c1 = communities.get(k1)
.map(|c| format!("c{}", c))
.unwrap_or_else(|| "?".into());
let c2 = communities.get(k2)
.map(|c| format!("c{}", c))
.unwrap_or_else(|| "?".into());
let cross = if c1 != c2 { " [cross-community]" } else { "" };
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
i + 1, dist, k1, c1, k2, c2, cross);
}
Ok(())
}
fn cmd_list_keys(pattern: Option<&str>) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
if let Some(pat) = pattern {
let pat_lower = pat.to_lowercase();
let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') {
(None, None, Some(pat_lower.trim_matches('*').to_string()))
} else if pat_lower.starts_with('*') {
(None, Some(pat_lower.trim_start_matches('*').to_string()), None)
} else if pat_lower.ends_with('*') {
(Some(pat_lower.trim_end_matches('*').to_string()), None, None)
} else {
(None, None, Some(pat_lower.clone()))
};
let mut keys: Vec<_> = store.nodes.keys()
.filter(|k| {
let kl = k.to_lowercase();
if let Some(ref m) = middle { kl.contains(m.as_str()) }
else if let Some(ref p) = prefix { kl.starts_with(p.as_str()) }
else if let Some(ref s) = suffix { kl.ends_with(s.as_str()) }
else { true }
})
.cloned()
.collect();
keys.sort();
for k in keys { println!("{}", k); }
Ok(())
} else {
query::run_query(&store, &g, "* | sort key asc")
}
}
fn cmd_list_edges() -> Result<(), String> {
let store = store::Store::load()?;
for rel in &store.relations {
println!("{}\t{}\t{:.2}\t{:?}",
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
}
Ok(())
}
fn cmd_dump_json() -> Result<(), String> {
let store = store::Store::load()?;
let json = serde_json::to_string_pretty(&store)
.map_err(|e| format!("serialize: {}", e))?;
println!("{}", json);
Ok(())
}
fn cmd_node_delete(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("node-delete requires a key".into());
}
let key = key.join(" ");
let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
store.delete_node(&resolved)?;
store.save()?;
println!("Deleted '{}'", resolved);
Ok(())
}
fn cmd_node_rename(old_key: &str, new_key: &str) -> Result<(), String> {
let mut store = store::Store::load()?;
let old_resolved = store.resolve_key(old_key)?;
store.rename_node(&old_resolved, new_key)?;
store.save()?;
println!("Renamed '{}' → '{}'", old_resolved, new_key);
Ok(())
}
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
match group.source {
config::ContextSource::Journal => {
let mut entries = Vec::new();
let now = store::now_epoch();
let window: i64 = cfg.journal_days as i64 * 24 * 3600;
let cutoff = now - window;
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
let journal_ts = |n: &store::Node| -> i64 {
if n.created_at > 0 { return n.created_at; }
if let Some(caps) = key_date_re.captures(&n.key) {
use chrono::{NaiveDate, TimeZone, Local};
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
return dt.timestamp();
}
}
}
n.timestamp
};
let mut journal_nodes: Vec<_> = store.nodes.values()
.filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff)
.collect();
journal_nodes.sort_by_key(|n| journal_ts(n));
let max = cfg.journal_max;
let skip = journal_nodes.len().saturating_sub(max);
for node in journal_nodes.iter().skip(skip) {
entries.push((node.key.clone(), node.content.clone()));
}
entries
}
config::ContextSource::File => {
group.keys.iter().filter_map(|key| {
let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
}
config::ContextSource::Store => {
group.keys.iter().filter_map(|key| {
let content = store.render_file(key)?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
}
}
}
fn cmd_load_context(stats: bool) -> Result<(), String> {
let cfg = config::get();
let store = store::Store::load()?;
if stats {
let mut total_words = 0;
let mut total_entries = 0;
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
println!("{}", "-".repeat(42));
for group in &cfg.context_groups {
let entries = get_group_content(group, &store, cfg);
let words: usize = entries.iter()
.map(|(_, c)| c.split_whitespace().count())
.sum();
let count = entries.len();
println!("{:<25} {:>6} {:>8}", group.label, count, words);
total_words += words;
total_entries += count;
}
println!("{}", "-".repeat(42));
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words);
return Ok(());
}
println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name);
println!();
for group in &cfg.context_groups {
let entries = get_group_content(group, &store, cfg);
if !entries.is_empty() && group.source == config::ContextSource::Journal {
println!("--- recent journal entries ({}/{}) ---",
entries.len(), cfg.journal_max);
}
for (key, content) in entries {
if group.source == config::ContextSource::Journal {
println!("## {}", key);
} else {
println!("--- {} ({}) ---", key, group.label);
}
println!("{}\n", content);
}
}
println!("=== END MEMORY LOAD ===");
Ok(())
}
fn cmd_cursor(sub: CursorCmd) -> Result<(), String> {
match sub {
CursorCmd::Show => {
let store = store::Store::load()?;
cursor::show(&store)
}
CursorCmd::Set { key } => {
if key.is_empty() {
return Err("cursor set requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let bare = store::strip_md_suffix(&key);
if !store.nodes.contains_key(&bare) {
return Err(format!("Node not found: {}", bare));
}
cursor::set(&bare)?;
cursor::show(&store)
}
CursorCmd::Forward => {
let store = store::Store::load()?;
cursor::move_temporal(&store, true)
}
CursorCmd::Back => {
let store = store::Store::load()?;
cursor::move_temporal(&store, false)
}
CursorCmd::Up => {
let store = store::Store::load()?;
cursor::move_up(&store)
}
CursorCmd::Down => {
let store = store::Store::load()?;
cursor::move_down(&store)
}
CursorCmd::Clear => cursor::clear(),
}
}
fn cmd_render(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("render requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let bare = store::strip_md_suffix(&key);
let node = store.nodes.get(&bare)
.ok_or_else(|| format!("Node not found: {}", bare))?;
print!("{}", node.content);
// Show links so the graph is walkable
let mut neighbors: Vec<(&str, f32)> = Vec::new();
for r in &store.relations {
if r.deleted { continue; }
if r.source_key == bare {
neighbors.push((&r.target_key, r.strength));
} else if r.target_key == bare {
neighbors.push((&r.source_key, r.strength));
}
}
if !neighbors.is_empty() {
neighbors.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
neighbors.dedup_by(|a, b| a.0 == b.0);
let total = neighbors.len();
let shown: Vec<_> = neighbors.iter().take(15)
.map(|(k, _)| format!("`poc-memory render {}`", k))
.collect();
print!("\n\n---\nLinks:");
for link in &shown {
println!("\n {}", link);
}
if total > 15 {
println!(" ... and {} more (`poc-memory graph link {}`)", total - 15, bare);
}
}
Ok(())
}
fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
if key.is_empty() {
return Err("history requires a key".into());
}
let raw_key = key.join(" ");
let store = store::Store::load()?;
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
drop(store);
let path = store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| format!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<poc_memory::memory_capnp::node_log::Reader>()
.map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp_migrate(node_reader)?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
return Err(format!("No history found for '{}'", key));
}
eprintln!("{} versions of '{}':\n", versions.len(), key);
for node in &versions {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
store::format_datetime(node.timestamp)
} else {
format!("(raw:{})", node.timestamp)
};
let content_len = node.content.len();
if full {
eprintln!("=== v{} {} {} w={:.3} {}b ===",
node.version, ts, node.provenance, node.weight, content_len);
eprintln!("{}", node.content);
} else {
let preview = util::first_n_chars(&node.content, 120);
let preview = preview.replace('\n', "\\n");
eprintln!(" v{:<3} {} {:24} w={:.3} {}b",
node.version, ts, node.provenance, node.weight, content_len);
eprintln!(" {}", preview);
}
}
if !full {
if let Some(latest) = versions.last() {
eprintln!("\n--- Latest content (v{}, {}) ---",
latest.version, latest.provenance);
print!("{}", latest.content);
}
}
Ok(())
}
fn cmd_tail(n: usize, full: bool) -> Result<(), String> {
let path = store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| format!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
// Read all entries, keep last N
let mut entries: Vec<store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<poc_memory::memory_capnp::node_log::Reader>()
.map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp_migrate(node_reader)?;
entries.push(node);
}
}
let start = entries.len().saturating_sub(n);
for node in &entries[start..] {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
store::format_datetime(node.timestamp)
} else {
format!("(raw:{})", node.timestamp)
};
let del = if node.deleted { " [DELETED]" } else { "" };
if full {
eprintln!("--- {} (v{}) {} via {} w={:.3}{} ---",
node.key, node.version, ts, node.provenance, node.weight, del);
eprintln!("{}\n", node.content);
} else {
let preview = util::first_n_chars(&node.content, 100).replace('\n', "\\n");
eprintln!(" {} v{} w={:.2}{}",
ts, node.version, node.weight, del);
eprintln!(" {} via {}", node.key, node.provenance);
if !preview.is_empty() {
eprintln!(" {}", preview);
}
eprintln!();
}
}
Ok(())
}
fn cmd_write(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("write requires a key (reads content from stdin)".into());
}
let raw_key = key.join(" ");
let mut content = String::new();
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
.map_err(|e| format!("read stdin: {}", e))?;
if content.trim().is_empty() {
return Err("No content on stdin".into());
}
let mut store = store::Store::load()?;
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
let result = store.upsert(&key, &content)?;
match result {
"unchanged" => println!("No change: '{}'", key),
"updated" => println!("Updated '{}' (v{})", key, store.nodes[&key].version),
_ => println!("Created '{}'", key),
}
if result != "unchanged" {
store.save()?;
}
Ok(())
}
fn cmd_import(files: &[String]) -> Result<(), String> {
if files.is_empty() {
return Err("import requires at least one file path".into());
}
let mut store = store::Store::load()?;
let mut total_new = 0;
let mut total_updated = 0;
for arg in files {
let path = std::path::PathBuf::from(arg);
let resolved = if path.exists() {
path
} else {
let mem_path = store::memory_dir().join(arg);
if !mem_path.exists() {
eprintln!("File not found: {}", arg);
continue;
}
mem_path
};
let (n, u) = store.import_file(&resolved)?;
total_new += n;
total_updated += u;
}
if total_new > 0 || total_updated > 0 {
store.save()?;
}
println!("Import: {} new, {} updated", total_new, total_updated);
Ok(())
}
fn cmd_export(files: &[String], export_all: bool) -> Result<(), String> {
let store = store::Store::load()?;
let targets: Vec<String> = if export_all {
let mut files: Vec<String> = store.nodes.keys()
.filter(|k| !k.contains('#'))
.cloned()
.collect();
files.sort();
files
} else if files.is_empty() {
return Err("export requires file keys or --all".into());
} else {
files.iter().map(|a| {
a.strip_suffix(".md").unwrap_or(a).to_string()
}).collect()
};
let mem_dir = store::memory_dir();
for file_key in &targets {
match store.export_to_markdown(file_key) {
Some(content) => {
let out_path = mem_dir.join(format!("{}.md", file_key));
std::fs::write(&out_path, &content)
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
let section_count = content.matches("<!-- mem:").count() + 1;
println!("Exported {} ({} sections)", file_key, section_count);
}
None => eprintln!("No nodes for '{}'", file_key),
}
}
Ok(())
}
fn cmd_journal_write(text: &[String]) -> Result<(), String> {
if text.is_empty() {
return Err("journal-write requires text".into());
}
let text = text.join(" ");
let timestamp = store::format_datetime(store::now_epoch());
let slug: String = text.split_whitespace()
.take(6)
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.collect::<Vec<_>>()
.join("-");
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
let content = format!("## {}\n\n{}", timestamp, text);
let source_ref = find_current_transcript();
let mut store = store::Store::load()?;
let mut node = store::new_node(&key, &content);
node.node_type = store::NodeType::EpisodicSession;
node.provenance = "journal".to_string();
if let Some(src) = source_ref {
node.source_ref = src;
}
store.upsert_node(node)?;
store.save()?;
let word_count = text.split_whitespace().count();
println!("Appended entry at {} ({} words)", timestamp, word_count);
Ok(())
}
fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> {
let store = store::Store::load()?;
if level == 0 {
journal_tail_entries(&store, n, full)
} else {
let node_type = match level {
1 => store::NodeType::EpisodicDaily,
2 => store::NodeType::EpisodicWeekly,
_ => store::NodeType::EpisodicMonthly,
};
journal_tail_digests(&store, node_type, n, full)
}
}
fn journal_tail_entries(store: &store::Store, n: usize, full: bool) -> Result<(), String> {
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
let normalize_date = |s: &str| -> String {
let s = s.replace('t', "T");
if s.len() >= 16 {
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
} else {
s
}
};
let extract_sort = |node: &store::Node| -> (i64, String) {
if node.created_at > 0 {
return (node.created_at, store::format_datetime(node.created_at));
}
if let Some(caps) = key_date_re.captures(&node.key) {
return (0, normalize_date(&caps[1]));
}
if let Some(caps) = date_re.captures(&node.content) {
return (0, normalize_date(&caps[1]));
}
(node.timestamp, store::format_datetime(node.timestamp))
};
let mut journal: Vec<_> = store.nodes.values()
.filter(|node| node.node_type == store::NodeType::EpisodicSession)
.collect();
journal.sort_by(|a, b| {
let (at, as_) = extract_sort(a);
let (bt, bs) = extract_sort(b);
if at > 0 && bt > 0 {
at.cmp(&bt)
} else {
as_.cmp(&bs)
}
});
let skip = if journal.len() > n { journal.len() - n } else { 0 };
for node in journal.iter().skip(skip) {
let (_, ts) = extract_sort(node);
let title = extract_title(&node.content);
if full {
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
} else {
println!("[{}] {}", ts, title);
}
}
Ok(())
}
fn journal_tail_digests(store: &store::Store, node_type: store::NodeType, n: usize, full: bool) -> Result<(), String> {
let mut digests: Vec<_> = store.nodes.values()
.filter(|node| node.node_type == node_type)
.collect();
digests.sort_by(|a, b| {
if a.timestamp > 0 && b.timestamp > 0 {
a.timestamp.cmp(&b.timestamp)
} else {
a.key.cmp(&b.key)
}
});
let skip = if digests.len() > n { digests.len() - n } else { 0 };
for node in digests.iter().skip(skip) {
let label = &node.key;
let title = extract_title(&node.content);
if full {
println!("--- [{}] {} ---\n{}\n", label, title, node.content);
} else {
println!("[{}] {}", label, title);
}
}
Ok(())
}
fn extract_title(content: &str) -> String {
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
for line in content.lines() {
let stripped = line.trim();
if stripped.is_empty() { continue; }
if date_re.is_match(stripped) && stripped.len() < 25 { continue; }
if let Some(h) = stripped.strip_prefix("## ") {
return h.to_string();
} else if let Some(h) = stripped.strip_prefix("# ") {
return h.to_string();
} else {
return util::truncate(stripped, 67, "...");
}
}
String::from("(untitled)")
}
fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
// Step 1: find all non-deleted nodes matching the term
let term_lower = term.to_lowercase();
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
// Prefixes that indicate ephemeral/generated nodes to skip
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "_",
"deep-index#", "facts-", "irc-history#"];
for (key, node) in &store.nodes {
if node.deleted { continue; }
let key_matches = key.to_lowercase().contains(&term_lower);
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
if !key_matches && !content_matches { continue; }
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
topic_nodes.push((key.clone(), node.content.clone()));
}
if topic_nodes.is_empty() {
println!("No topic nodes found matching '{}'", term);
return Ok(());
}
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
println!("=== Organize: '{}' ===", term);
println!("Found {} topic nodes:\n", topic_nodes.len());
for (key, content) in &topic_nodes {
let lines = content.lines().count();
let words = content.split_whitespace().count();
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
}
// Step 2: pairwise similarity
let pairs = similarity::pairwise_similar(&topic_nodes, threshold);
if pairs.is_empty() {
println!("\nNo similar pairs above threshold {:.2}", threshold);
} else {
println!("\n=== Similar pairs (cosine > {:.2}) ===\n", threshold);
for (a, b, sim) in &pairs {
let a_words = topic_nodes.iter().find(|(k,_)| k == a)
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
let b_words = topic_nodes.iter().find(|(k,_)| k == b)
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
println!(" [{:.3}] {} ({} words) ↔ {} ({} words)", sim, a, a_words, b, b_words);
}
}
// Step 3: check connectivity within cluster
let g = store.build_graph();
println!("=== Connectivity ===\n");
// Pick hub by intra-cluster connectivity, not overall degree
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
.map(|(k,_)| k.as_str())
.collect();
let mut best_hub: Option<(&str, usize)> = None;
for key in &cluster_keys {
let intra_degree = g.neighbor_keys(key).iter()
.filter(|n| cluster_keys.contains(*n))
.count();
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
best_hub = Some((key, intra_degree));
}
}
if let Some((hub, deg)) = best_hub {
println!(" Hub: {} (degree {})", hub, deg);
let hub_nbrs = g.neighbor_keys(hub);
let mut unlinked = Vec::new();
for (key, _) in &topic_nodes {
if key == hub { continue; }
if store.nodes.get(key.as_str()).is_none() { continue; }
if !hub_nbrs.contains(key.as_str()) {
unlinked.push(key.clone());
}
}
if unlinked.is_empty() {
println!(" All cluster nodes connected to hub ✓");
} else {
println!(" NOT linked to hub:");
for key in &unlinked {
println!(" {} → needs link to {}", key, hub);
}
}
}
// Step 4: anchor node
if create_anchor {
println!("\n=== Anchor node ===\n");
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
println!(" Anchor '{}' already exists ✓", term);
} else {
let desc = format!("Anchor node for '{}' search term", term);
store.upsert(term, &desc)?;
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
for (key, _) in &topic_nodes {
if store.nodes.get(key.as_str()).is_none() { continue; }
let target_uuid = store.nodes[key.as_str()].uuid;
let rel = store::new_relation(
anchor_uuid, target_uuid,
store::RelationType::Link, 0.8,
term, key,
);
store.add_relation(rel)?;
}
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
}
}
store.save()?;
Ok(())
}
fn cmd_interference(threshold: f32) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let pairs = neuro::detect_interference(&store, &g, threshold);
if pairs.is_empty() {
println!("No interfering pairs above threshold {:.2}", threshold);
} else {
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
for (a, b, sim) in &pairs {
println!(" [{:.3}] {}{}", sim, a, b);
}
}
Ok(())
}
fn cmd_query(expr: &[String]) -> Result<(), String> {
if expr.is_empty() {
return Err("query requires an expression (try: poc-memory query --help)".into());
}
let query_str = expr.join(" ");
let store = store::Store::load()?;
let graph = store.build_graph();
query::run_query(&store, &graph, &query_str)
}
fn cmd_lookup_bump(keys: &[String]) -> Result<(), String> {
if keys.is_empty() {
return Err("lookup-bump requires at least one key".into());
}
let keys: Vec<&str> = keys.iter().map(|s| s.as_str()).collect();
lookups::bump_many(&keys)
}
fn cmd_lookups(date: Option<&str>) -> Result<(), String> {
let date = date.map(|d| d.to_string())
.unwrap_or_else(|| chrono::Local::now().format("%Y-%m-%d").to_string());
let store = store::Store::load()?;
let keys: Vec<String> = store.nodes.values().map(|n| n.key.clone()).collect();
let resolved = lookups::dump_resolved(&date, &keys)?;
if resolved.is_empty() {
println!("No lookups for {}", date);
return Ok(());
}
println!("Lookups for {}:", date);
for (key, count) in &resolved {
println!(" {:4} {}", count, key);
}
println!("\n{} distinct keys, {} total lookups",
resolved.len(),
resolved.iter().map(|(_, c)| *c as u64).sum::<u64>());
Ok(())
}
fn cmd_daemon(sub: DaemonCmd) -> Result<(), String> {
match sub {
DaemonCmd::Start => daemon::run_daemon(),
DaemonCmd::Status => daemon::show_status(),
DaemonCmd::Log { job, lines } => daemon::show_log(job.as_deref(), lines),
DaemonCmd::Install => daemon::install_service(),
DaemonCmd::Consolidate => daemon::rpc_consolidate(),
DaemonCmd::Run { agent, count } => daemon::rpc_run_agent(&agent, count),
DaemonCmd::Tui => tui::run_tui(),
}
}
fn cmd_knowledge_loop(max_cycles: usize, batch_size: usize, window: usize, max_depth: i32) -> Result<(), String> {
let config = knowledge::KnowledgeLoopConfig {
max_cycles,
batch_size,
window,
max_depth,
..Default::default()
};
let results = knowledge::run_knowledge_loop(&config)?;
eprintln!("\nCompleted {} cycles, {} total actions applied",
results.len(),
results.iter().map(|r| r.total_applied).sum::<usize>());
Ok(())
}
fn cmd_fact_mine(path: &str, batch: bool, dry_run: bool, output_file: Option<&str>, min_messages: usize) -> Result<(), String> {
let p = std::path::Path::new(path);
let paths: Vec<std::path::PathBuf> = if batch {
if !p.is_dir() {
return Err(format!("Not a directory: {}", path));
}
let mut files: Vec<_> = std::fs::read_dir(p)
.map_err(|e| format!("read dir: {}", e))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().map(|x| x == "jsonl").unwrap_or(false))
.collect();
files.sort();
eprintln!("Found {} transcripts", files.len());
files
} else {
vec![p.to_path_buf()]
};
let path_refs: Vec<&std::path::Path> = paths.iter().map(|p| p.as_path()).collect();
let facts = fact_mine::mine_batch(&path_refs, min_messages, dry_run)?;
if !dry_run {
let json = serde_json::to_string_pretty(&facts)
.map_err(|e| format!("serialize: {}", e))?;
if let Some(out) = output_file {
std::fs::write(out, &json).map_err(|e| format!("write: {}", e))?;
eprintln!("\nWrote {} facts to {}", facts.len(), out);
} else {
println!("{}", json);
}
}
eprintln!("\nTotal: {} facts from {} transcripts", facts.len(), paths.len());
Ok(())
}
fn cmd_fact_mine_store(path: &str) -> Result<(), String> {
let path = std::path::Path::new(path);
if !path.exists() {
return Err(format!("File not found: {}", path.display()));
}
let count = fact_mine::mine_and_store(path, None)?;
eprintln!("Stored {} facts", count);
Ok(())
}