consciousness/src/main.rs

2184 lines
78 KiB
Rust
Raw Normal View History

#![allow(dead_code)]
// poc-memory: graph-structured memory for AI assistants
//
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet
// License: MIT OR Apache-2.0
//
// Architecture:
// nodes.capnp - append-only content node log
// relations.capnp - append-only relation log
// state.bin - derived KV cache (rebuilt from logs when stale)
//
// Graph algorithms: clustering coefficient, community detection (label
// propagation), schema fit scoring, small-world metrics, consolidation
// priority. Text similarity via BM25 with Porter stemming.
//
// Neuroscience-inspired: spaced repetition replay, emotional gating,
// interference detection, schema assimilation, reconsolidation.
use poc_memory::*;
use std::env;
use std::process;
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
fn find_current_transcript() -> Option<String> {
let home = env::var("HOME").ok()?;
let projects = std::path::Path::new(&home).join(".claude/projects");
if !projects.exists() { return None; }
// Search all project dirs for the most recent .jsonl
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
if let Ok(dirs) = std::fs::read_dir(&projects) {
for dir_entry in dirs.filter_map(|e| e.ok()) {
if !dir_entry.path().is_dir() { continue; }
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
for f in files.filter_map(|e| e.ok()) {
let p = f.path();
if p.extension().map(|x| x == "jsonl").unwrap_or(false) {
if let Ok(meta) = p.metadata() {
if let Ok(mtime) = meta.modified() {
if newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
newest = Some((mtime, p));
}
}
}
}
}
}
}
}
newest.map(|(_, p)| p.to_string_lossy().to_string())
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
usage();
process::exit(1);
}
let result = match args[1].as_str() {
"search" => cmd_search(&args[2..]),
"init" => cmd_init(),
"migrate" => cmd_migrate(),
"health" => cmd_health(),
"fsck" => cmd_fsck(),
"status" => cmd_status(),
"graph" => cmd_graph(),
"used" => cmd_used(&args[2..]),
"wrong" => cmd_wrong(&args[2..]),
"gap" => cmd_gap(&args[2..]),
"categorize" => cmd_categorize(&args[2..]),
"fix-categories" => cmd_fix_categories(),
"cap-degree" => cmd_cap_degree(&args[2..]),
"link-orphans" => cmd_link_orphans(&args[2..]),
"decay" => cmd_decay(),
"consolidate-batch" => cmd_consolidate_batch(&args[2..]),
"log" => cmd_log(),
"params" => cmd_params(),
"link" => cmd_link(&args[2..]),
"replay-queue" => cmd_replay_queue(&args[2..]),
"interference" => cmd_interference(&args[2..]),
"link-add" => cmd_link_add(&args[2..]),
"link-impact" => cmd_link_impact(&args[2..]),
"consolidate-session" => cmd_consolidate_session(),
"consolidate-full" => cmd_consolidate_full(),
"triangle-close" => cmd_triangle_close(&args[2..]),
"daily-check" => cmd_daily_check(),
"apply-agent" => cmd_apply_agent(&args[2..]),
"digest" => cmd_digest(&args[2..]),
"digest-links" => cmd_digest_links(&args[2..]),
"journal-enrich" => cmd_journal_enrich(&args[2..]),
"experience-mine" => cmd_experience_mine(&args[2..]),
"apply-consolidation" => cmd_apply_consolidation(&args[2..]),
"differentiate" => cmd_differentiate(&args[2..]),
"link-audit" => cmd_link_audit(&args[2..]),
"trace" => cmd_trace(&args[2..]),
"spectral" => cmd_spectral(&args[2..]),
"spectral-save" => cmd_spectral_save(&args[2..]),
"spectral-neighbors" => cmd_spectral_neighbors(&args[2..]),
"spectral-positions" => cmd_spectral_positions(&args[2..]),
"spectral-suggest" => cmd_spectral_suggest(&args[2..]),
"list-keys" => cmd_list_keys(&args[2..]),
"list-edges" => cmd_list_edges(),
"dump-json" => cmd_dump_json(),
"node-delete" => cmd_node_delete(&args[2..]),
"node-rename" => cmd_node_rename(&args[2..]),
"journal-ts-migrate" => cmd_journal_ts_migrate(),
"load-context" => cmd_load_context(&args[2..]),
"render" => cmd_render(&args[2..]),
"history" => cmd_history(&args[2..]),
"write" => cmd_write(&args[2..]),
"import" => cmd_import(&args[2..]),
"export" => cmd_export(&args[2..]),
"journal-write" => cmd_journal_write(&args[2..]),
"journal-tail" => cmd_journal_tail(&args[2..]),
"query" => cmd_query(&args[2..]),
"lookup-bump" => cmd_lookup_bump(&args[2..]),
"lookups" => cmd_lookups(&args[2..]),
"daemon" => cmd_daemon(&args[2..]),
"knowledge-loop" => cmd_knowledge_loop(&args[2..]),
"fact-mine" => cmd_fact_mine(&args[2..]),
"fact-mine-store" => cmd_fact_mine_store(&args[2..]),
_ => {
eprintln!("Unknown command: {}", args[1]);
usage();
process::exit(1);
}
};
if let Err(e) = result {
eprintln!("Error: {}", e);
process::exit(1);
}
}
fn usage() {
eprintln!("poc-memory v0.4.0 — graph-structured memory store
Commands:
search QUERY [--expand] [--category CAT] Search memory (AND logic)
init Scan markdown files, index all memory units
migrate Migrate from old weights.json system
health Report graph metrics (CC, communities, small-world)
status Summary of memory state
graph Show graph structure overview
used KEY Mark a memory as useful (boosts weight)
wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant
gap DESCRIPTION Record a gap in memory coverage
categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task)
decay Apply daily weight decay
consolidate-batch [--count N] [--auto]
Run agent consolidation on priority nodes
log Show recent retrieval log
params Show current parameters
link N Interactive graph walk from search result N
replay-queue [--count N] Show spaced repetition replay queue
interference [--threshold F]
Detect potentially confusable memory pairs
link-add SOURCE TARGET [REASON]
Add a link between two nodes
link-impact SOURCE TARGET Simulate adding an edge, report topology impact
consolidate-session Analyze metrics, plan agent allocation
consolidate-full Autonomous: plan agents apply digests links
triangle-close [DEG] [SIM] [MAX]
Close triangles: link similar neighbors of hubs
daily-check Brief metrics check (for cron/notifications)
apply-agent [--all] Import pending agent results into the graph
digest daily [DATE] Generate daily episodic digest (default: today)
digest weekly [DATE] Generate weekly digest (any date in target week)
digest monthly [YYYY-MM] Generate monthly digest (default: current month)
digest auto Generate all missing digests (dailyweeklymonthly)
digest-links [--apply] Parse and apply links from digest files
journal-enrich JSONL TEXT [LINE]
Enrich journal entry with conversation links
experience-mine [JSONL] Mine conversation for experiential moments to journal
apply-consolidation [--apply] [--report FILE]
Extract and apply actions from consolidation reports
differentiate [KEY] [--apply]
Redistribute hub links to section-level children
link-audit [--apply] Walk every link, send to Sonnet for quality review
trace KEY Walk temporal links: semantic episodic conversation
spectral [K] Spectral decomposition of the memory graph (default K=30)
spectral-save [K] Compute and save spectral embedding (default K=20)
spectral-neighbors KEY [N] Find N spectrally nearest nodes (default N=15)
spectral-positions [N] Show N nodes ranked by outlier/bridge score (default 30)
spectral-suggest [N] Find N spectrally close but unlinked pairs (default 20)
list-keys [PATTERN] List all node keys (one per line, optional glob)
list-edges List all edges (tsv: source target strength type)
dump-json Dump entire store as JSON
node-delete KEY Soft-delete a node (appends deleted version to log)
node-rename OLD NEW Rename a node key; updates edge debug strings atomically
journal-ts-migrate Populate created_at for nodes missing it
load-context Output session-start context from the store
render KEY Output a node's content to stdout
history [--full] KEY Show all stored versions of a node
--full shows complete content for every version
write KEY Upsert node content from stdin
import FILE [FILE...] Import markdown file(s) into the store
export [FILE|--all] Export store nodes to markdown file(s)
journal-write TEXT Write a journal entry to the store
journal-tail [N] [--level=L] [--full]
Show last N entries (default 20, --full for content)
--level: 0/journal, 1/daily, 2/weekly, 3/monthly
query 'EXPR | stages' Query the memory graph
Stages: sort F [asc], limit N, select F,F, count
Ex: \"degree > 15 | sort degree | limit 10\"
lookup-bump KEY [KEY...] Bump daily lookup counter for keys (fast, no store)
lookups [DATE] Show daily lookup counts (default: today)
daemon Start background job daemon
daemon status Show daemon status
daemon log [JOB] [N] Show last N log lines (default 50, optional job filter)
knowledge-loop [OPTIONS] Run knowledge agents to convergence
--max-cycles N (default 20)
--batch-size N (default 5)
--window N (default 5)
--max-depth N (default 4)
fact-mine JSONL [OPTIONS] Extract atomic facts from conversation transcripts
fact-mine --batch DIR Mine all .jsonl files in directory");
}
fn cmd_search(args: &[String]) -> Result<(), String> {
use store::StoreView;
if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") {
println!("Usage: poc-memory search QUERY [QUERY...] [OPTIONS]
Search memory using spreading activation (AND logic across terms).
Options:
--expand Show 15 results instead of 5, plus spectral neighbors
--category CAT Filter results to category: core, tech, gen, obs, task
--help, -h Show this help
Examples:
poc-memory search irc connection
poc-memory search bcachefs transaction --expand
poc-memory search rust --category tech");
return Ok(());
}
let expand = args.iter().any(|a| a == "--expand");
let category_filter: Option<String> = {
let mut cat = None;
let mut iter = args.iter();
while let Some(a) = iter.next() {
if a == "--category" {
cat = iter.next().cloned();
break;
}
}
cat
};
let query: String = args.iter()
.filter(|a| *a != "--expand" && *a != "--category")
.scan(false, |skip_next, a| {
if *skip_next { *skip_next = false; return Some(None); }
if a == "--category" { *skip_next = true; return Some(None); }
Some(Some(a.as_str()))
})
.flatten()
.collect::<Vec<_>>()
.join(" ");
let view = store::AnyView::load()?;
let mut results = search::search(&query, &view);
// Filter by category if requested
if let Some(ref cat_str) = category_filter {
let cat = store::Category::from_str(cat_str)
.ok_or_else(|| format!("Unknown category '{}' (use: core, tech, gen, obs, task)", cat_str))?;
let store = store::Store::load()?;
results.retain(|r| {
store.nodes.get(&r.key)
.map(|n| n.category.label() == cat.label())
.unwrap_or(false)
});
}
if results.is_empty() {
eprintln!("No results for '{}'", query);
return Ok(());
}
let limit = if expand { 15 } else { 5 };
// Log retrieval to a small append-only file (avoid 6MB state.bin rewrite)
store::Store::log_retrieval_static(&query,
&results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
// Bump daily lookup counters (fast path, no store needed)
let bump_keys: Vec<&str> = results.iter().take(limit).map(|r| r.key.as_str()).collect();
let _ = lookups::bump_many(&bump_keys);
// Show text results
let text_keys: std::collections::HashSet<String> = results.iter()
.take(limit).map(|r| r.key.clone()).collect();
for (i, r) in results.iter().enumerate().take(limit) {
let marker = if r.is_direct { "" } else { " " };
let weight = view.node_weight(&r.key);
print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
println!();
if let Some(ref snippet) = r.snippet {
println!(" {}", snippet);
}
}
// Spectral expansion: only with --expand
if expand {
if let Ok(emb) = spectral::load_embedding() {
let seeds: Vec<&str> = results.iter()
.take(5)
.map(|r| r.key.as_str())
.filter(|k| emb.coords.contains_key(*k))
.collect();
if !seeds.is_empty() {
let spectral_hits = spectral::nearest_to_seeds(&emb, &seeds, 10);
let new_hits: Vec<_> = spectral_hits.into_iter()
.filter(|(k, _)| !text_keys.contains(k))
.take(5)
.collect();
if !new_hits.is_empty() {
println!("\nSpectral neighbors (structural, not keyword):");
for (k, _dist) in &new_hits {
let weight = view.node_weight(k);
print!(" ~ [{:.2}] {}", weight, k);
println!();
if let Some(content) = view.node_content(k) {
let snippet: String = content.lines()
.find(|l| !l.trim().is_empty() && !l.starts_with('#'))
.unwrap_or("")
.chars().take(100).collect();
if !snippet.is_empty() {
println!(" {}", snippet);
}
}
}
}
}
}
}
Ok(())
}
fn cmd_init() -> Result<(), String> {
let cfg = config::get();
// Ensure data directory exists
std::fs::create_dir_all(&cfg.data_dir)
.map_err(|e| format!("create data_dir: {}", e))?;
// Install filesystem files (not store nodes)
install_default_file(&cfg.data_dir, "instructions.md",
include_str!("../defaults/instructions.md"))?;
install_default_file(&cfg.data_dir, "on-consciousness.md",
include_str!("../defaults/on-consciousness.md"))?;
// Initialize store and seed default identity node if empty
let mut store = store::Store::load()?;
let count = store.init_from_markdown()?;
if !store.nodes.contains_key("identity") {
let default_identity = include_str!("../defaults/identity.md");
store.upsert("identity", default_identity)
.map_err(|e| format!("seed identity: {}", e))?;
println!("Seeded identity in store");
}
store.save()?;
println!("Indexed {} memory units", count);
// Install hooks
daemon::install_hook()?;
// Create config if none exists
let config_path = std::env::var("POC_MEMORY_CONFIG")
.map(std::path::PathBuf::from)
.unwrap_or_else(|_| {
std::path::PathBuf::from(std::env::var("HOME").unwrap())
.join(".config/poc-memory/config.jsonl")
});
if !config_path.exists() {
let config_dir = config_path.parent().unwrap();
std::fs::create_dir_all(config_dir)
.map_err(|e| format!("create config dir: {}", e))?;
let example = include_str!("../config.example.jsonl");
std::fs::write(&config_path, example)
.map_err(|e| format!("write config: {}", e))?;
println!("Created config at {} — edit with your name and context groups",
config_path.display());
}
println!("Done. Run `poc-memory load-context --stats` to verify.");
Ok(())
}
fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> {
let path = data_dir.join(name);
if !path.exists() {
std::fs::write(&path, content)
.map_err(|e| format!("write {}: {}", name, e))?;
println!("Created {}", path.display());
}
Ok(())
}
fn cmd_migrate() -> Result<(), String> {
migrate::migrate()
}
fn cmd_fsck() -> Result<(), String> {
store::fsck()?;
store::strip_md_keys()?;
// Check for broken links
let store = store::Store::load()?;
let mut orphans = 0usize;
for rel in &store.relations {
if rel.deleted { continue; }
if !store.nodes.contains_key(&rel.source_key)
|| !store.nodes.contains_key(&rel.target_key) {
orphans += 1;
}
}
if orphans > 0 {
eprintln!("{} broken links (run `health` for details)", orphans);
} else {
eprintln!("No broken links");
}
Ok(())
}
fn cmd_health() -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let health = graph::health_report(&g, &store);
println!("{}", health);
Ok(())
}
fn cmd_status() -> Result<(), String> {
let store = store::Store::load()?;
let node_count = store.nodes.len();
let rel_count = store.relations.len();
let categories = store.category_counts();
println!("Nodes: {} Relations: {}", node_count, rel_count);
println!("Categories: core={} tech={} gen={} obs={} task={}",
categories.get("core").unwrap_or(&0),
categories.get("tech").unwrap_or(&0),
categories.get("gen").unwrap_or(&0),
categories.get("obs").unwrap_or(&0),
categories.get("task").unwrap_or(&0),
);
let g = store.build_graph();
println!("Graph edges: {} Communities: {}",
g.edge_count(), g.community_count());
Ok(())
}
fn cmd_graph() -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
println!("Top nodes by degree:");
query::run_query(&store, &g,
"* | sort degree | limit 10 | select degree,clustering_coefficient")
}
fn cmd_used(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory used KEY".into());
}
let key = args.join(" ");
let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
store.mark_used(&resolved);
store.save()?;
println!("Marked '{}' as used", resolved);
Ok(())
}
fn cmd_wrong(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory wrong KEY [CONTEXT]".into());
}
let key = &args[0];
let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None };
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.mark_wrong(&resolved, ctx.as_deref());
store.save()?;
println!("Marked '{}' as wrong", resolved);
Ok(())
}
fn cmd_gap(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory gap DESCRIPTION".into());
}
let desc = args.join(" ");
let mut store = store::Store::load()?;
store.record_gap(&desc);
store.save()?;
println!("Recorded gap: {}", desc);
Ok(())
}
fn cmd_categorize(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory categorize KEY CATEGORY".into());
}
let key = &args[0];
let cat = &args[1];
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.categorize(&resolved, cat)?;
store.save()?;
println!("Set '{}' category to {}", resolved, cat);
Ok(())
}
fn cmd_fix_categories() -> Result<(), String> {
let mut store = store::Store::load()?;
let before = format!("{:?}", store.category_counts());
let (changed, kept) = store.fix_categories()?;
store.save()?;
let after = format!("{:?}", store.category_counts());
println!("Category fix: {} changed, {} kept", changed, kept);
println!("\nBefore: {}", before);
println!("After: {}", after);
Ok(())
}
fn cmd_link_orphans(args: &[String]) -> Result<(), String> {
let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2);
let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3);
let sim_thresh: f32 = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(0.15);
let mut store = store::Store::load()?;
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
orphans, links, min_deg, links_per, sim_thresh);
Ok(())
}
fn cmd_cap_degree(args: &[String]) -> Result<(), String> {
let max_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(50);
let mut store = store::Store::load()?;
let (hubs, pruned) = store.cap_degree(max_deg)?;
store.save()?;
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
Ok(())
}
fn cmd_decay() -> Result<(), String> {
let mut store = store::Store::load()?;
let (decayed, pruned) = store.decay();
store.save()?;
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
Ok(())
}
fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
let mut count = 5usize;
let mut auto = false;
let mut agent: Option<String> = None;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--count" if i + 1 < args.len() => {
count = args[i + 1].parse().map_err(|_| "invalid count")?;
i += 2;
}
"--auto" => { auto = true; i += 1; }
"--agent" if i + 1 < args.len() => {
agent = Some(args[i + 1].clone());
i += 2;
}
_ => { i += 1; }
}
}
let store = store::Store::load()?;
if let Some(agent_name) = agent {
// Generate a specific agent prompt
let prompt = neuro::agent_prompt(&store, &agent_name, count)?;
println!("{}", prompt);
Ok(())
} else {
neuro::consolidation_batch(&store, count, auto)
}
}
fn cmd_log() -> Result<(), String> {
let store = store::Store::load()?;
for event in store.retrieval_log.iter().rev().take(20) {
println!("[{}] q=\"{}\"{} results",
event.timestamp, event.query, event.results.len());
for r in &event.results {
println!(" {}", r);
}
}
Ok(())
}
fn cmd_params() -> Result<(), String> {
let store = store::Store::load()?;
println!("decay_factor: {}", store.params.decay_factor);
println!("use_boost: {}", store.params.use_boost);
println!("prune_threshold: {}", store.params.prune_threshold);
println!("edge_decay: {}", store.params.edge_decay);
println!("max_hops: {}", store.params.max_hops);
println!("min_activation: {}", store.params.min_activation);
Ok(())
}
fn cmd_link(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory link KEY".into());
}
let key = args.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
println!("Neighbors of '{}':", resolved);
query::run_query(&store, &g,
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
}
fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
let mut count = 10usize;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--count" if i + 1 < args.len() => {
count = args[i + 1].parse().map_err(|_| "invalid count")?;
i += 2;
}
_ => { i += 1; }
}
}
let store = store::Store::load()?;
let queue = neuro::replay_queue(&store, count);
println!("Replay queue ({} items):", queue.len());
for (i, item) in queue.iter().enumerate() {
println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})",
i + 1, item.priority, item.classification, item.key,
item.interval_days, item.emotion, item.outlier_score);
}
Ok(())
}
fn cmd_consolidate_session() -> Result<(), String> {
let store = store::Store::load()?;
let plan = neuro::consolidation_plan(&store);
println!("{}", neuro::format_plan(&plan));
Ok(())
}
fn cmd_consolidate_full() -> Result<(), String> {
let mut store = store::Store::load()?;
consolidate::consolidate_full(&mut store)
}
fn cmd_triangle_close(args: &[String]) -> Result<(), String> {
let min_degree: usize = args.first()
.and_then(|s| s.parse().ok())
.unwrap_or(5);
let sim_threshold: f32 = args.get(1)
.and_then(|s| s.parse().ok())
.unwrap_or(0.3);
let max_per_hub: usize = args.get(2)
.and_then(|s| s.parse().ok())
.unwrap_or(10);
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
min_degree, sim_threshold, max_per_hub);
let mut store = store::Store::load()?;
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
Ok(())
}
fn cmd_daily_check() -> Result<(), String> {
let store = store::Store::load()?;
let report = neuro::daily_check(&store);
print!("{}", report);
Ok(())
}
fn cmd_link_add(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into());
}
let mut store = store::Store::load()?;
let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?;
let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() };
// Refine target to best-matching section
let source_content = store.nodes.get(&source)
.map(|n| n.content.as_str()).unwrap_or("");
let target = neuro::refine_target(&store, source_content, &target);
// Find UUIDs
let source_uuid = store.nodes.get(&source)
.map(|n| n.uuid)
.ok_or_else(|| format!("source not found: {}", source))?;
let target_uuid = store.nodes.get(&target)
.map(|n| n.uuid)
.ok_or_else(|| format!("target not found: {}", target))?;
// Check if link already exists
let exists = store.relations.iter().any(|r|
r.source_key == source && r.target_key == target && !r.deleted
);
if exists {
println!("Link already exists: {}{}", source, target);
return Ok(());
}
let rel = store::new_relation(
source_uuid, target_uuid,
store::RelationType::Auto,
0.5,
&source, &target,
);
store.add_relation(rel)?;
if !reason.is_empty() {
println!("+ {}{} ({})", source, target, reason);
} else {
println!("+ {}{}", source, target);
}
Ok(())
}
fn cmd_link_impact(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory link-impact SOURCE TARGET".into());
}
let store = store::Store::load()?;
let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
println!("Link impact: {}{}", source, target);
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
println!(" ΔGini: {:+.6}", impact.delta_gini);
println!(" Assessment: {}", impact.assessment);
Ok(())
}
fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
let home = env::var("HOME").unwrap_or_default();
let results_dir = std::path::PathBuf::from(&home)
.join(".claude/memory/agent-results");
if !results_dir.exists() {
println!("No agent results directory");
return Ok(());
}
let mut store = store::Store::load()?;
let mut applied = 0;
let mut errors = 0;
let process_all = args.iter().any(|a| a == "--all");
// Find .json result files
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
.map_err(|e| format!("read results dir: {}", e))?
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
.collect();
files.sort_by_key(|e| e.path());
for entry in &files {
let path = entry.path();
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) => {
eprintln!(" Skip {}: {}", path.display(), e);
errors += 1;
continue;
}
};
let data: serde_json::Value = match serde_json::from_str(&content) {
Ok(d) => d,
Err(e) => {
eprintln!(" Skip {}: parse error: {}", path.display(), e);
errors += 1;
continue;
}
};
// Check for agent_result with links
let agent_result = data.get("agent_result").or(Some(&data));
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
Some(l) => l,
None => continue,
};
let entry_text = data.get("entry_text")
.and_then(|v| v.as_str())
.unwrap_or("");
let source_start = agent_result
.and_then(|r| r.get("source_start"))
.and_then(|v| v.as_u64());
let source_end = agent_result
.and_then(|r| r.get("source_end"))
.and_then(|v| v.as_u64());
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
if let (Some(start), Some(end)) = (source_start, source_end) {
println!(" Source: L{}-L{}", start, end);
}
for link in links {
let target = match link.get("target").and_then(|v| v.as_str()) {
Some(t) => t,
None => continue,
};
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
// Skip NOTE: targets (new topics, not existing nodes)
if let Some(note) = target.strip_prefix("NOTE:") {
println!(" NOTE: {}{}", note, reason);
continue;
}
// Try to resolve the target key and link from journal entry
let resolved = match store.resolve_key(target) {
Ok(r) => r,
Err(_) => {
println!(" SKIP {} (not found in graph)", target);
continue;
}
};
let source_key = match store.find_journal_node(entry_text) {
Some(k) => k,
None => {
println!(" SKIP {} (no matching journal node)", target);
continue;
}
};
// Get UUIDs for both nodes
let source_uuid = match store.nodes.get(&source_key) {
Some(n) => n.uuid,
None => continue,
};
let target_uuid = match store.nodes.get(&resolved) {
Some(n) => n.uuid,
None => continue,
};
let rel = store::new_relation(
source_uuid, target_uuid,
store::RelationType::Link,
0.5,
&source_key, &resolved,
);
if let Err(e) = store.add_relation(rel) {
eprintln!(" Error adding relation: {}", e);
errors += 1;
} else {
println!(" LINK {}{} ({})", source_key, resolved, reason);
applied += 1;
}
}
// Move processed file to avoid re-processing
if !process_all {
let done_dir = util::memory_subdir("agent-results/done")?;
let dest = done_dir.join(path.file_name().unwrap());
std::fs::rename(&path, &dest).ok();
}
}
if applied > 0 {
store.save()?;
}
println!("\nApplied {} links ({} errors, {} files processed)",
applied, errors, files.len());
Ok(())
}
fn cmd_digest(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory digest daily|weekly|monthly|auto [DATE]".into());
}
let mut store = store::Store::load()?;
let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or("");
match args[0].as_str() {
"auto" => digest::digest_auto(&mut store),
name @ ("daily" | "weekly" | "monthly") => {
let arg = if date_arg.is_empty() {
store::format_date(store::now_epoch())
} else {
date_arg.to_string()
};
digest::generate(&mut store, name, &arg)
}
_ => Err(format!("Unknown digest type: {}. Use: daily, weekly, monthly, auto", args[0])),
}
}
fn cmd_digest_links(args: &[String]) -> Result<(), String> {
let do_apply = args.iter().any(|a| a == "--apply");
let store = store::Store::load()?;
let links = digest::parse_all_digest_links(&store);
drop(store);
println!("Found {} unique links from digest nodes", links.len());
if !do_apply {
for (i, link) in links.iter().enumerate() {
println!(" {:3}. {}{}", i + 1, link.source, link.target);
if !link.reason.is_empty() {
println!(" ({})", &link.reason[..link.reason.len().min(80)]);
}
}
println!("\nTo apply: poc-memory digest-links --apply");
return Ok(());
}
let mut store = store::Store::load()?;
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
Ok(())
}
fn cmd_journal_enrich(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory journal-enrich JSONL_PATH ENTRY_TEXT [GREP_LINE]".into());
}
let jsonl_path = &args[0];
let entry_text = &args[1];
let grep_line: usize = args.get(2)
.and_then(|a| a.parse().ok())
.unwrap_or(0);
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
return Err(format!("JSONL not found: {}", jsonl_path));
}
let mut store = store::Store::load()?;
enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
}
fn cmd_experience_mine(args: &[String]) -> Result<(), String> {
let jsonl_path = if let Some(path) = args.first() {
path.clone()
} else {
find_current_transcript()
.ok_or("no JSONL transcripts found")?
};
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
return Err(format!("JSONL not found: {}", jsonl_path));
}
let mut store = store::Store::load()?;
let count = enrich::experience_mine(&mut store, &jsonl_path, None)?;
println!("Done: {} new entries mined.", count);
Ok(())
}
fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> {
let do_apply = args.iter().any(|a| a == "--apply");
let report_file = args.windows(2)
.find(|w| w[0] == "--report")
.map(|w| w[1].as_str());
let mut store = store::Store::load()?;
consolidate::apply_consolidation(&mut store, do_apply, report_file)
}
fn cmd_differentiate(args: &[String]) -> Result<(), String> {
let do_apply = args.iter().any(|a| a == "--apply");
let key_arg: Option<&str> = args.iter()
.find(|a| !a.starts_with("--"))
.map(|s| s.as_str());
let mut store = store::Store::load()?;
if let Some(key) = key_arg {
// Differentiate a specific hub
let resolved = store.resolve_key(key)?;
let moves = neuro::differentiate_hub(&store, &resolved)
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
// Group by target section for display
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
std::collections::BTreeMap::new();
for mv in &moves {
by_section.entry(mv.to_section.clone()).or_default().push(mv);
}
println!("Hub '{}' — {} links to redistribute across {} sections\n",
resolved, moves.len(), by_section.len());
for (section, section_moves) in &by_section {
println!(" {} ({} links):", section, section_moves.len());
for mv in section_moves.iter().take(5) {
println!(" [{:.3}] {}{}", mv.similarity,
mv.neighbor_key, mv.neighbor_snippet);
}
if section_moves.len() > 5 {
println!(" ... and {} more", section_moves.len() - 5);
}
}
if !do_apply {
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
return Ok(());
}
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
store.save()?;
println!("\nApplied: {} Skipped: {}", applied, skipped);
} else {
// Show all differentiable hubs
let hubs = neuro::find_differentiable_hubs(&store);
if hubs.is_empty() {
println!("No file-level hubs with sections found above threshold");
return Ok(());
}
println!("Differentiable hubs (file-level nodes with sections):\n");
for (key, degree, sections) in &hubs {
println!(" {:40} deg={:3} sections={}", key, degree, sections);
}
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
}
Ok(())
}
fn cmd_link_audit(args: &[String]) -> Result<(), String> {
let apply = args.iter().any(|a| a == "--apply");
let mut store = store::Store::load()?;
let stats = audit::link_audit(&mut store, apply)?;
println!("\n{}", "=".repeat(60));
println!("Link audit complete:");
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
println!("{}", "=".repeat(60));
Ok(())
}
fn cmd_trace(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory trace KEY".into());
}
let key = args.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
// Display the node itself
println!("=== {} ===", resolved);
println!("Type: {:?} Category: {} Weight: {:.2}",
node.node_type, node.category.label(), node.weight);
if !node.source_ref.is_empty() {
println!("Source: {}", node.source_ref);
}
// Show content preview
let preview = if node.content.len() > 200 {
let end = node.content.floor_char_boundary(200);
format!("{}...", &node.content[..end])
} else {
node.content.clone()
};
println!("\n{}\n", preview);
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
store::NodeType::EpisodicSession =>
episodic_session.push(entry),
store::NodeType::EpisodicDaily =>
episodic_daily.push(entry),
store::NodeType::EpisodicWeekly =>
episodic_weekly.push(entry),
store::NodeType::Semantic =>
semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
println!("Weekly digests:");
for (k, s, n) in &episodic_weekly {
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_daily.is_empty() {
println!("Daily digests:");
for (k, s, n) in &episodic_daily {
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_session.is_empty() {
println!("Session entries:");
for (k, s, n) in &episodic_session {
let preview = n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}", s, k);
if !n.source_ref.is_empty() {
println!(" ↳ source: {}", n.source_ref);
}
println!(" {}", preview);
}
}
if !semantic.is_empty() {
println!("Semantic links:");
for (k, s, _) in &semantic {
println!(" [{:.2}] {}", s, k);
}
}
// Summary
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len());
Ok(())
}
fn cmd_spectral(args: &[String]) -> Result<(), String> {
let k: usize = args.first()
.and_then(|s| s.parse().ok())
.unwrap_or(30);
let store = store::Store::load()?;
let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k);
spectral::print_summary(&result, &g);
Ok(())
}
fn cmd_spectral_save(args: &[String]) -> Result<(), String> {
let k: usize = args.first()
.and_then(|s| s.parse().ok())
.unwrap_or(20);
let store = store::Store::load()?;
let g = graph::build_graph(&store);
let result = spectral::decompose(&g, k);
let emb = spectral::to_embedding(&result);
spectral::save_embedding(&emb)?;
Ok(())
}
fn cmd_spectral_neighbors(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("usage: spectral-neighbors KEY [N]".to_string());
}
let key = &args[0];
let n: usize = args.get(1)
.and_then(|s| s.parse().ok())
.unwrap_or(15);
let emb = spectral::load_embedding()?;
// Show which dimensions this node loads on
let dims = spectral::dominant_dimensions(&emb, &[key.as_str()]);
println!("Node: {} (embedding: {} dims)", key, emb.dims);
println!("Top spectral axes:");
for &(d, loading) in dims.iter().take(5) {
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
}
println!("\nNearest neighbors in spectral space:");
let neighbors = spectral::nearest_neighbors(&emb, key, n);
for (i, (k, dist)) in neighbors.iter().enumerate() {
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
}
Ok(())
}
fn cmd_spectral_positions(args: &[String]) -> Result<(), String> {
let n: usize = args.first()
.and_then(|s| s.parse().ok())
.unwrap_or(30);
let store = store::Store::load()?;
let emb = spectral::load_embedding()?;
// Build communities fresh from graph (don't rely on cached node fields)
let g = store.build_graph();
let communities = g.communities().clone();
let positions = spectral::analyze_positions(&emb, &communities);
// Show outliers first
println!("Spectral position analysis — {} nodes", positions.len());
println!(" outlier: dist_to_center / median (>1 = unusual position)");
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
println!();
// Group by classification
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
let mut core: Vec<&spectral::SpectralPosition> = Vec::new();
for pos in positions.iter().take(n) {
match spectral::classify_position(pos) {
"bridge" => bridges.push(pos),
"outlier" => outliers.push(pos),
"core" => core.push(pos),
_ => outliers.push(pos), // peripheral goes with outliers for display
}
}
if !bridges.is_empty() {
println!("=== Bridges (between communities) ===");
for pos in &bridges {
println!(" [{:.2}/{:.2}] c{} → c{} {}",
pos.outlier_score, pos.bridge_score,
pos.community, pos.nearest_community, pos.key);
}
println!();
}
println!("=== Top outliers (far from own community center) ===");
for pos in positions.iter().take(n) {
let class = spectral::classify_position(pos);
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
class, pos.outlier_score, pos.bridge_score,
pos.community, pos.key);
}
Ok(())
}
fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> {
let n: usize = args.first()
.and_then(|s| s.parse().ok())
.unwrap_or(20);
let store = store::Store::load()?;
let emb = spectral::load_embedding()?;
let g = store.build_graph();
let communities = g.communities();
// Only consider nodes with enough edges for meaningful spectral position
let min_degree = 3;
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
.filter(|k| g.degree(k) >= min_degree)
.map(|k| k.as_str())
.collect();
// Filter embedding to well-connected nodes
let filtered_emb = spectral::SpectralEmbedding {
dims: emb.dims,
eigenvalues: emb.eigenvalues.clone(),
coords: emb.coords.iter()
.filter(|(k, _)| well_connected.contains(k.as_str()))
.map(|(k, v)| (k.clone(), v.clone()))
.collect(),
};
// Build set of existing linked pairs
let mut linked: std::collections::HashSet<(String, String)> =
std::collections::HashSet::new();
for rel in &store.relations {
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
}
eprintln!("Searching {} well-connected nodes (degree >= {})...",
filtered_emb.coords.len(), min_degree);
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
let c1 = communities.get(k1)
.map(|c| format!("c{}", c))
.unwrap_or_else(|| "?".into());
let c2 = communities.get(k2)
.map(|c| format!("c{}", c))
.unwrap_or_else(|| "?".into());
let cross = if c1 != c2 { " [cross-community]" } else { "" };
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
i + 1, dist, k1, c1, k2, c2, cross);
}
Ok(())
}
fn cmd_list_keys(args: &[String]) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let pattern = args.first().map(|s| s.as_str());
if let Some(pat) = pattern {
// Simple glob: only support leading/trailing * and *substring*
let pat_lower = pat.to_lowercase();
let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') {
(None, None, Some(pat_lower.trim_matches('*').to_string()))
} else if pat_lower.starts_with('*') {
(None, Some(pat_lower.trim_start_matches('*').to_string()), None)
} else if pat_lower.ends_with('*') {
(Some(pat_lower.trim_end_matches('*').to_string()), None, None)
} else {
(None, None, Some(pat_lower.clone()))
};
let mut keys: Vec<_> = store.nodes.keys()
.filter(|k| {
let kl = k.to_lowercase();
if let Some(ref m) = middle { kl.contains(m.as_str()) }
else if let Some(ref p) = prefix { kl.starts_with(p.as_str()) }
else if let Some(ref s) = suffix { kl.ends_with(s.as_str()) }
else { true }
})
.cloned()
.collect();
keys.sort();
for k in keys { println!("{}", k); }
Ok(())
} else {
query::run_query(&store, &g, "* | sort key asc")
}
}
fn cmd_list_edges() -> Result<(), String> {
let store = store::Store::load()?;
for rel in &store.relations {
println!("{}\t{}\t{:.2}\t{:?}",
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
}
Ok(())
}
fn cmd_dump_json() -> Result<(), String> {
let store = store::Store::load()?;
let json = serde_json::to_string_pretty(&store)
.map_err(|e| format!("serialize: {}", e))?;
println!("{}", json);
Ok(())
}
fn cmd_node_delete(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory node-delete KEY".into());
}
let key = args.join(" ");
let mut store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
store.delete_node(&resolved)?;
store.save()?;
println!("Deleted '{}'", resolved);
Ok(())
}
fn cmd_node_rename(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory node-rename OLD_KEY NEW_KEY".into());
}
let old_key = &args[0];
let new_key = &args[1];
let mut store = store::Store::load()?;
let old_resolved = store.resolve_key(old_key)?;
store.rename_node(&old_resolved, new_key)?;
store.save()?;
println!("Renamed '{}' → '{}'", old_resolved, new_key);
Ok(())
}
/// Migration: populate created_at for all nodes with missing or invalid values.
/// Journal nodes: parse timestamp from key. All others: fall back to `timestamp` field.
fn cmd_journal_ts_migrate() -> Result<(), String> {
use chrono::{NaiveDateTime, TimeZone, Local};
let mut store = store::Store::load()?;
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
// Valid unix epoch range: 2001-01-01 to 2099-12-31
let valid_range = 978_307_200i64..=4_102_444_800i64;
let to_update: Vec<_> = store.nodes.values()
.filter(|n| !valid_range.contains(&n.created_at))
.map(|n| n.key.clone())
.collect();
let mut updated = 0usize;
for key in &to_update {
// Try parsing timestamp from journal key
if let Some(caps) = re.captures(key) {
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = dt.timestamp();
node.version += 1;
}
updated += 1;
continue;
}
}
}
// Fall back to the node's timestamp field (last-modified, but better than 0)
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = node.timestamp as i64;
node.version += 1;
updated += 1;
}
}
// Persist all updated nodes
let nodes_to_write: Vec<_> = to_update.iter()
.filter_map(|k| store.nodes.get(k))
.filter(|n| valid_range.contains(&n.created_at))
.cloned()
.collect();
if !nodes_to_write.is_empty() {
store.append_nodes(&nodes_to_write)?;
store.save()?;
}
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
Ok(())
}
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
match group.source {
config::ContextSource::Journal => {
let mut entries = Vec::new();
let now = store::now_epoch();
let window: i64 = cfg.journal_days as i64 * 24 * 3600;
let cutoff = now - window;
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
let journal_ts = |n: &store::Node| -> i64 {
if n.created_at > 0 { return n.created_at; }
if let Some(caps) = key_date_re.captures(&n.key) {
use chrono::{NaiveDate, TimeZone, Local};
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
return dt.timestamp();
}
}
}
n.timestamp
};
let mut journal_nodes: Vec<_> = store.nodes.values()
.filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff)
.collect();
journal_nodes.sort_by_key(|n| journal_ts(n));
let max = cfg.journal_max;
let skip = journal_nodes.len().saturating_sub(max);
for node in journal_nodes.iter().skip(skip) {
entries.push((node.key.clone(), node.content.clone()));
}
entries
}
config::ContextSource::File => {
group.keys.iter().filter_map(|key| {
let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
}
config::ContextSource::Store => {
group.keys.iter().filter_map(|key| {
let content = store.render_file(key)?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
}
}
}
fn cmd_load_context(args: &[String]) -> Result<(), String> {
let stats = args.iter().any(|a| a == "--stats");
let cfg = config::get();
let store = store::Store::load()?;
if stats {
let mut total_words = 0;
let mut total_entries = 0;
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
println!("{}", "-".repeat(42));
for group in &cfg.context_groups {
let entries = get_group_content(group, &store, cfg);
let words: usize = entries.iter()
.map(|(_, c)| c.split_whitespace().count())
.sum();
let count = entries.len();
println!("{:<25} {:>6} {:>8}", group.label, count, words);
total_words += words;
total_entries += count;
}
println!("{}", "-".repeat(42));
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words);
return Ok(());
}
println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name);
println!();
for group in &cfg.context_groups {
let entries = get_group_content(group, &store, cfg);
if !entries.is_empty() && group.source == config::ContextSource::Journal {
println!("--- recent journal entries ({}/{}) ---",
entries.len(), cfg.journal_max);
}
for (key, content) in entries {
if group.source == config::ContextSource::Journal {
println!("## {}", key);
} else {
println!("--- {} ({}) ---", key, group.label);
}
println!("{}\n", content);
}
}
println!("=== END MEMORY LOAD ===");
Ok(())
}
fn cmd_render(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory render KEY".into());
}
let key = args.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
print!("{}", node.content);
Ok(())
}
fn cmd_history(args: &[String]) -> Result<(), String> {
use clap::Parser;
/// Show all stored versions of a memory node
#[derive(Parser)]
#[command(name = "poc-memory history")]
struct HistoryArgs {
/// Show full content for every version (not just preview)
#[arg(long)]
full: bool,
/// Node key to look up
#[arg(required = true)]
key: Vec<String>,
}
let parsed = match HistoryArgs::try_parse_from(
std::iter::once("history".to_string()).chain(args.iter().cloned())
) {
Ok(p) => p,
Err(e) => {
// Let clap print its own help/error formatting directly
e.print().ok();
std::process::exit(if e.use_stderr() { 1 } else { 0 });
}
};
let full = parsed.full;
let raw_key = parsed.key.join(" ");
// Resolve key consistently with render/write
let store = store::Store::load()?;
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
drop(store);
// Replay the node log, collecting all versions of this key
let path = store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| format!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<poc_memory::memory_capnp::node_log::Reader>()
.map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp(node_reader)?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
return Err(format!("No history found for '{}'", key));
}
eprintln!("{} versions of '{}':\n", versions.len(), key);
for node in &versions {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
store::format_datetime(node.timestamp)
} else {
format!("(raw:{})", node.timestamp)
};
let content_len = node.content.len();
if full {
eprintln!("=== v{} {} {} w={:.3} {}b ===",
node.version, ts, node.provenance.label(), node.weight, content_len);
eprintln!("{}", node.content);
} else {
let preview: String = node.content.chars().take(120).collect();
let preview = preview.replace('\n', "\\n");
eprintln!(" v{:<3} {} {:24} w={:.3} {}b",
node.version, ts, node.provenance.label(), node.weight, content_len);
eprintln!(" {}", preview);
}
}
if !full {
// Show latest full content
if let Some(latest) = versions.last() {
eprintln!("\n--- Latest content (v{}, {}) ---",
latest.version, latest.provenance.label());
print!("{}", latest.content);
}
}
Ok(())
}
fn cmd_write(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory write KEY < content\n\
Reads content from stdin, upserts into the store.".into());
}
let raw_key = args.join(" ");
let mut content = String::new();
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
.map_err(|e| format!("read stdin: {}", e))?;
if content.trim().is_empty() {
return Err("No content on stdin".into());
}
let mut store = store::Store::load()?;
// Resolve the key the same way render/search do, so writes and reads
// always hit the same node. Fall back to raw key for new nodes.
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
let result = store.upsert(&key, &content)?;
match result {
"unchanged" => println!("No change: '{}'", key),
"updated" => println!("Updated '{}' (v{})", key, store.nodes[&key].version),
_ => println!("Created '{}'", key),
}
if result != "unchanged" {
store.save()?;
}
Ok(())
}
fn cmd_import(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory import FILE [FILE...]".into());
}
let mut store = store::Store::load()?;
let mut total_new = 0;
let mut total_updated = 0;
for arg in args {
let path = std::path::PathBuf::from(arg);
let resolved = if path.exists() {
path
} else {
let mem_path = store::memory_dir().join(arg);
if !mem_path.exists() {
eprintln!("File not found: {}", arg);
continue;
}
mem_path
};
let (n, u) = store.import_file(&resolved)?;
total_new += n;
total_updated += u;
}
if total_new > 0 || total_updated > 0 {
store.save()?;
}
println!("Import: {} new, {} updated", total_new, total_updated);
Ok(())
}
fn cmd_export(args: &[String]) -> Result<(), String> {
let store = store::Store::load()?;
let export_all = args.iter().any(|a| a == "--all");
let targets: Vec<String> = if export_all {
// Find all unique file-level keys (no # in key)
let mut files: Vec<String> = store.nodes.keys()
.filter(|k| !k.contains('#'))
.cloned()
.collect();
files.sort();
files
} else if args.is_empty() {
return Err("Usage: poc-memory export FILE [FILE...] | --all".into());
} else {
args.iter().map(|a| {
// Strip .md if user supplied it — store keys are bare
a.strip_suffix(".md").unwrap_or(a).to_string()
}).collect()
};
let mem_dir = store::memory_dir();
for file_key in &targets {
match store.export_to_markdown(file_key) {
Some(content) => {
let out_path = mem_dir.join(format!("{}.md", file_key));
std::fs::write(&out_path, &content)
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
let section_count = content.matches("<!-- mem:").count() + 1;
println!("Exported {} ({} sections)", file_key, section_count);
}
None => eprintln!("No nodes for '{}'", file_key),
}
}
Ok(())
}
fn cmd_journal_write(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory journal-write TEXT".into());
}
let text = args.join(" ");
// Generate timestamp and slug
let timestamp = store::format_datetime(store::now_epoch());
// Slug: lowercase first ~6 words, hyphenated, truncated
let slug: String = text.split_whitespace()
.take(6)
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.collect::<Vec<_>>()
.join("-");
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
// Build content with header
let content = format!("## {}\n\n{}", timestamp, text);
// Find source ref (most recently modified .jsonl transcript)
let source_ref = find_current_transcript();
let mut store = store::Store::load()?;
let mut node = store::new_node(&key, &content);
node.node_type = store::NodeType::EpisodicSession;
node.provenance = store::Provenance::Journal;
if let Some(src) = source_ref {
node.source_ref = src;
}
store.upsert_node(node)?;
store.save()?;
let word_count = text.split_whitespace().count();
println!("Appended entry at {} ({} words)", timestamp, word_count);
Ok(())
}
fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
let mut n: usize = 20;
let mut full = false;
let mut level: u8 = 0; // 0=journal, 1=daily, 2=weekly, 3=monthly
for arg in args {
if arg == "--full" || arg == "-f" {
full = true;
} else if let Some(val) = arg.strip_prefix("--level=") {
level = match val {
"0" | "journal" => 0,
"1" | "daily" => 1,
"2" | "weekly" => 2,
"3" | "monthly" => 3,
_ => return Err(format!("unknown level '{}': use 0-3 or journal/daily/weekly/monthly", val)),
};
} else if let Ok(num) = arg.parse::<usize>() {
n = num;
}
}
let store = store::Store::load()?;
if level == 0 {
// Original journal-tail behavior
journal_tail_entries(&store, n, full)
} else {
let prefix = match level {
1 => "daily-",
2 => "weekly-",
_ => "monthly-",
};
journal_tail_digests(&store, prefix, n, full)
}
}
fn journal_tail_entries(store: &store::Store, n: usize, full: bool) -> Result<(), String> {
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
let normalize_date = |s: &str| -> String {
let s = s.replace('t', "T");
if s.len() >= 16 {
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
} else {
s
}
};
let extract_sort = |node: &store::Node| -> (i64, String) {
if node.created_at > 0 {
return (node.created_at, store::format_datetime(node.created_at));
}
if let Some(caps) = key_date_re.captures(&node.key) {
return (0, normalize_date(&caps[1]));
}
if let Some(caps) = date_re.captures(&node.content) {
return (0, normalize_date(&caps[1]));
}
(node.timestamp, store::format_datetime(node.timestamp))
};
let mut journal: Vec<_> = store.nodes.values()
.filter(|node| node.node_type == store::NodeType::EpisodicSession)
.collect();
journal.sort_by(|a, b| {
let (at, as_) = extract_sort(a);
let (bt, bs) = extract_sort(b);
if at > 0 && bt > 0 {
at.cmp(&bt)
} else {
as_.cmp(&bs)
}
});
let skip = if journal.len() > n { journal.len() - n } else { 0 };
for node in journal.iter().skip(skip) {
let (_, ts) = extract_sort(node);
let title = extract_title(&node.content);
if full {
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
} else {
println!("[{}] {}", ts, title);
}
}
Ok(())
}
fn journal_tail_digests(store: &store::Store, prefix: &str, n: usize, full: bool) -> Result<(), String> {
let mut digests: Vec<_> = store.nodes.values()
.filter(|node| node.key.starts_with(prefix))
.collect();
// Sort by key — the date/week label sorts lexicographically
digests.sort_by(|a, b| a.key.cmp(&b.key));
let skip = if digests.len() > n { digests.len() - n } else { 0 };
for node in digests.iter().skip(skip) {
let label = node.key.strip_prefix(prefix)
.unwrap_or(&node.key);
let title = extract_title(&node.content);
if full {
println!("--- [{}] {} ---\n{}\n", label, title, node.content);
} else {
println!("[{}] {}", label, title);
}
}
Ok(())
}
fn extract_title(content: &str) -> String {
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
for line in content.lines() {
let stripped = line.trim();
if stripped.is_empty() { continue; }
if date_re.is_match(stripped) && stripped.len() < 25 { continue; }
if stripped.starts_with("## ") {
return stripped[3..].to_string();
} else if stripped.starts_with("# ") {
return stripped[2..].to_string();
} else {
return if stripped.len() > 70 {
let mut end = 67;
while !stripped.is_char_boundary(end) { end -= 1; }
format!("{}...", &stripped[..end])
} else {
stripped.to_string()
};
}
}
String::from("(untitled)")
}
fn cmd_interference(args: &[String]) -> Result<(), String> {
let mut threshold = 0.4f32;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--threshold" if i + 1 < args.len() => {
threshold = args[i + 1].parse().map_err(|_| "invalid threshold")?;
i += 2;
}
_ => { i += 1; }
}
}
let store = store::Store::load()?;
let g = store.build_graph();
let pairs = neuro::detect_interference(&store, &g, threshold);
if pairs.is_empty() {
println!("No interfering pairs above threshold {:.2}", threshold);
} else {
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
for (a, b, sim) in &pairs {
println!(" [{:.3}] {}{}", sim, a, b);
}
}
Ok(())
}
fn cmd_query(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory query 'EXPR | stage | stage ...'\n\n\
Expressions:\n \
degree > 15 property filter\n \
key ~ 'journal.*' AND degree > 10 boolean + regex\n \
neighbors('identity') WHERE ... graph traversal\n \
community_id = community('key') function as value\n \
* all nodes\n\n\
Pipe stages:\n \
| sort FIELD [asc] sort (desc by default)\n \
| limit N cap results\n \
| select F,F,... output fields as TSV\n \
| count just show count".into());
}
let query_str = args.join(" ");
let store = store::Store::load()?;
let graph = store.build_graph();
query::run_query(&store, &graph, &query_str)
}
fn cmd_lookup_bump(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory lookup-bump KEY [KEY...]".into());
}
let keys: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
lookups::bump_many(&keys)
}
fn cmd_lookups(args: &[String]) -> Result<(), String> {
let date = if args.is_empty() {
chrono::Local::now().format("%Y-%m-%d").to_string()
} else {
args[0].clone()
};
let store = store::Store::load()?;
let keys: Vec<String> = store.nodes.values().map(|n| n.key.clone()).collect();
let resolved = lookups::dump_resolved(&date, &keys)?;
if resolved.is_empty() {
println!("No lookups for {}", date);
return Ok(());
}
println!("Lookups for {}:", date);
for (key, count) in &resolved {
println!(" {:4} {}", count, key);
}
println!("\n{} distinct keys, {} total lookups",
resolved.len(),
resolved.iter().map(|(_, c)| *c as u64).sum::<u64>());
Ok(())
}
fn cmd_daemon(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return daemon::run_daemon();
}
match args[0].as_str() {
"status" => daemon::show_status(),
"log" => {
// daemon log [N] — last N lines (default 20)
// daemon log JOB [N] — last N lines for job
let (job, lines) = match args.get(1) {
None => (None, 20),
Some(s) => {
if let Ok(n) = s.parse::<usize>() {
(None, n)
} else {
let n = args.get(2).and_then(|s| s.parse().ok()).unwrap_or(20);
(Some(s.as_str()), n)
}
}
};
daemon::show_log(job, lines)
}
"install" => daemon::install_service(),
_ => {
eprintln!("Usage: poc-memory daemon [status|log|install]");
Err("unknown daemon subcommand".into())
}
}
}
fn cmd_knowledge_loop(args: &[String]) -> Result<(), String> {
if args.iter().any(|a| a == "--help" || a == "-h") {
eprintln!("Usage: poc-memory knowledge-loop [OPTIONS]
Run knowledge agents (observation, extractor, connector, challenger) in
a convergence loop. Each cycle runs all agents, applies actions to the
graph, and checks structural stability metrics.
Options:
--max-cycles N Maximum cycles before stopping (default: 20)
--batch-size N Items per agent per cycle (default: 5)
--window N Cycles to check for convergence (default: 5)
--max-depth N Maximum inference depth (default: 4)");
return Ok(());
}
let mut config = knowledge::KnowledgeLoopConfig::default();
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--max-cycles" => { i += 1; config.max_cycles = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.max_cycles); }
"--batch-size" => { i += 1; config.batch_size = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.batch_size); }
"--window" => { i += 1; config.window = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.window); }
"--max-depth" => { i += 1; config.max_depth = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(config.max_depth); }
other => return Err(format!("Unknown arg: {}. Use --help for usage.", other)),
}
i += 1;
}
let results = knowledge::run_knowledge_loop(&config)?;
eprintln!("\nCompleted {} cycles, {} total actions applied",
results.len(),
results.iter().map(|r| r.total_applied).sum::<usize>());
Ok(())
}
fn cmd_fact_mine(args: &[String]) -> Result<(), String> {
if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") {
eprintln!("Usage: poc-memory fact-mine <JSONL> [OPTIONS]
poc-memory fact-mine --batch <DIR> [OPTIONS]
Extract atomic factual claims from conversation transcripts using Haiku.
Options:
--batch Process all .jsonl files in directory
--dry-run Show chunks without calling model
--output FILE Write JSON to file (default: stdout)
--min-messages N Skip transcripts with fewer messages (default: 10)");
return Ok(());
}
let mut batch = false;
let mut dry_run = false;
let mut output_file: Option<String> = None;
let mut min_messages = 10usize;
let mut path: Option<String> = None;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--batch" => batch = true,
"--dry-run" => dry_run = true,
"--output" | "-o" => { i += 1; output_file = args.get(i).cloned(); }
"--min-messages" => { i += 1; min_messages = args.get(i).and_then(|s| s.parse().ok()).unwrap_or(min_messages); }
s if !s.starts_with('-') => path = Some(s.to_string()),
other => return Err(format!("Unknown arg: {}", other)),
}
i += 1;
}
let path = path.ok_or("Missing path argument")?;
let p = std::path::Path::new(&path);
let paths: Vec<std::path::PathBuf> = if batch {
if !p.is_dir() {
return Err(format!("Not a directory: {}", path));
}
let mut files: Vec<_> = std::fs::read_dir(p)
.map_err(|e| format!("read dir: {}", e))?
.filter_map(|e| e.ok())
.map(|e| e.path())
.filter(|p| p.extension().map(|x| x == "jsonl").unwrap_or(false))
.collect();
files.sort();
eprintln!("Found {} transcripts", files.len());
files
} else {
vec![p.to_path_buf()]
};
let path_refs: Vec<&std::path::Path> = paths.iter().map(|p| p.as_path()).collect();
let facts = fact_mine::mine_batch(&path_refs, min_messages, dry_run)?;
if !dry_run {
let json = serde_json::to_string_pretty(&facts)
.map_err(|e| format!("serialize: {}", e))?;
if let Some(out) = &output_file {
std::fs::write(out, &json).map_err(|e| format!("write: {}", e))?;
eprintln!("\nWrote {} facts to {}", facts.len(), out);
} else {
println!("{}", json);
}
}
eprintln!("\nTotal: {} facts from {} transcripts", facts.len(), paths.len());
Ok(())
}
fn cmd_fact_mine_store(args: &[String]) -> Result<(), String> {
if args.len() != 1 {
return Err("Usage: poc-memory fact-mine-store <JSONL>".into());
}
let path = std::path::Path::new(&args[0]);
if !path.exists() {
return Err(format!("File not found: {}", args[0]));
}
let count = fact_mine::mine_and_store(path, None)?;
eprintln!("Stored {} facts", count);
Ok(())
}