consciousness/src/main.rs

1179 lines
40 KiB
Rust
Raw Normal View History

#![allow(dead_code)]
// poc-memory: graph-structured memory with append-only Cap'n Proto storage
//
// Architecture:
// nodes.capnp - append-only content node log
// relations.capnp - append-only relation log
// state.bin - derived KV cache (rebuilt from logs when stale)
//
// Graph algorithms: clustering coefficient, community detection (label
// propagation), schema fit scoring, small-world metrics, consolidation
// priority. Text similarity via BM25 with Porter stemming.
//
// Neuroscience-inspired: spaced repetition replay, emotional gating,
// interference detection, schema assimilation, reconsolidation.
mod capnp_store;
mod graph;
mod search;
mod similarity;
mod migrate;
mod neuro;
pub mod memory_capnp {
include!(concat!(env!("OUT_DIR"), "/schema/memory_capnp.rs"));
}
use std::env;
use std::process;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
usage();
process::exit(1);
}
let result = match args[1].as_str() {
"search" => cmd_search(&args[2..]),
"init" => cmd_init(),
"migrate" => cmd_migrate(),
"health" => cmd_health(),
"status" => cmd_status(),
"graph" => cmd_graph(),
"used" => cmd_used(&args[2..]),
"wrong" => cmd_wrong(&args[2..]),
"gap" => cmd_gap(&args[2..]),
"categorize" => cmd_categorize(&args[2..]),
"decay" => cmd_decay(),
"consolidate-batch" => cmd_consolidate_batch(&args[2..]),
"log" => cmd_log(),
"params" => cmd_params(),
"link" => cmd_link(&args[2..]),
"replay-queue" => cmd_replay_queue(&args[2..]),
"interference" => cmd_interference(&args[2..]),
"link-add" => cmd_link_add(&args[2..]),
"link-impact" => cmd_link_impact(&args[2..]),
"consolidate-session" => cmd_consolidate_session(),
"daily-check" => cmd_daily_check(),
"apply-agent" => cmd_apply_agent(&args[2..]),
"digest" => cmd_digest(&args[2..]),
"trace" => cmd_trace(&args[2..]),
"list-keys" => cmd_list_keys(),
"list-edges" => cmd_list_edges(),
"dump-json" => cmd_dump_json(),
"node-delete" => cmd_node_delete(&args[2..]),
"load-context" => cmd_load_context(),
"render" => cmd_render(&args[2..]),
"write" => cmd_write(&args[2..]),
"import" => cmd_import(&args[2..]),
"export" => cmd_export(&args[2..]),
_ => {
eprintln!("Unknown command: {}", args[1]);
usage();
process::exit(1);
}
};
if let Err(e) = result {
eprintln!("Error: {}", e);
process::exit(1);
}
}
fn usage() {
eprintln!("poc-memory v0.4.0 — graph-structured memory store
Commands:
search QUERY [QUERY...] Search memory (AND logic across terms)
init Scan markdown files, index all memory units
migrate Migrate from old weights.json system
health Report graph metrics (CC, communities, small-world)
status Summary of memory state
graph Show graph structure overview
used KEY Mark a memory as useful (boosts weight)
wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant
gap DESCRIPTION Record a gap in memory coverage
categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task)
decay Apply daily weight decay
consolidate-batch [--count N] [--auto]
Run agent consolidation on priority nodes
log Show recent retrieval log
params Show current parameters
link N Interactive graph walk from search result N
replay-queue [--count N] Show spaced repetition replay queue
interference [--threshold F]
Detect potentially confusable memory pairs
link-add SOURCE TARGET [REASON]
Add a link between two nodes
link-impact SOURCE TARGET Simulate adding an edge, report topology impact
consolidate-session Analyze metrics, plan agent allocation
daily-check Brief metrics check (for cron/notifications)
apply-agent [--all] Import pending agent results into the graph
digest daily [DATE] Generate daily episodic digest (default: today)
digest weekly [DATE] Generate weekly digest (any date in target week)
trace KEY Walk temporal links: semantic episodic conversation
list-keys List all node keys (one per line)
list-edges List all edges (tsv: source target strength type)
dump-json Dump entire store as JSON
node-delete KEY Soft-delete a node (appends deleted version to log)
load-context Output session-start context from the store
render KEY Output a node's content to stdout
write KEY Upsert node content from stdin
import FILE [FILE...] Import markdown file(s) into the store
export [FILE|--all] Export store nodes to markdown file(s)");
}
fn cmd_search(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory search QUERY [QUERY...]".into());
}
let query = args.join(" ");
let mut store = capnp_store::Store::load()?;
let results = search::search(&query, &store);
if results.is_empty() {
eprintln!("No results for '{}'", query);
return Ok(());
}
// Log retrieval
store.log_retrieval(&query, &results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
store.save()?;
for (i, r) in results.iter().enumerate().take(15) {
let marker = if r.is_direct { "" } else { " " };
let weight = store.node_weight(&r.key).unwrap_or(0.0);
print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
if let Some(community) = store.node_community(&r.key) {
print!(" (c{})", community);
}
println!();
if let Some(ref snippet) = r.snippet {
println!(" {}", snippet);
}
}
Ok(())
}
fn cmd_init() -> Result<(), String> {
let mut store = capnp_store::Store::load()?;
let count = store.init_from_markdown()?;
store.save()?;
println!("Indexed {} memory units", count);
Ok(())
}
fn cmd_migrate() -> Result<(), String> {
migrate::migrate()
}
fn cmd_health() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let g = store.build_graph();
let health = graph::health_report(&g, &store);
println!("{}", health);
Ok(())
}
fn cmd_status() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let node_count = store.nodes.len();
let rel_count = store.relations.len();
let categories = store.category_counts();
println!("Nodes: {} Relations: {}", node_count, rel_count);
println!("Categories: core={} tech={} gen={} obs={} task={}",
categories.get("core").unwrap_or(&0),
categories.get("tech").unwrap_or(&0),
categories.get("gen").unwrap_or(&0),
categories.get("obs").unwrap_or(&0),
categories.get("task").unwrap_or(&0),
);
let g = store.build_graph();
println!("Graph edges: {} Communities: {}",
g.edge_count(), g.community_count());
Ok(())
}
fn cmd_graph() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let g = store.build_graph();
// Show top-10 highest degree nodes
let mut degrees: Vec<_> = g.nodes().iter()
.map(|k| (k.clone(), g.degree(k)))
.collect();
degrees.sort_by(|a, b| b.1.cmp(&a.1));
println!("Top nodes by degree:");
for (key, deg) in degrees.iter().take(10) {
let cc = g.clustering_coefficient(key);
println!(" {:40} deg={:3} cc={:.3}", key, deg, cc);
}
Ok(())
}
fn cmd_used(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory used KEY".into());
}
let key = args.join(" ");
let mut store = capnp_store::Store::load()?;
let resolved = store.resolve_key(&key)?;
store.mark_used(&resolved);
store.save()?;
println!("Marked '{}' as used", resolved);
Ok(())
}
fn cmd_wrong(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory wrong KEY [CONTEXT]".into());
}
let key = &args[0];
let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None };
let mut store = capnp_store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.mark_wrong(&resolved, ctx.as_deref());
store.save()?;
println!("Marked '{}' as wrong", resolved);
Ok(())
}
fn cmd_gap(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory gap DESCRIPTION".into());
}
let desc = args.join(" ");
let mut store = capnp_store::Store::load()?;
store.record_gap(&desc);
store.save()?;
println!("Recorded gap: {}", desc);
Ok(())
}
fn cmd_categorize(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory categorize KEY CATEGORY".into());
}
let key = &args[0];
let cat = &args[1];
let mut store = capnp_store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.categorize(&resolved, cat)?;
store.save()?;
println!("Set '{}' category to {}", resolved, cat);
Ok(())
}
fn cmd_decay() -> Result<(), String> {
let mut store = capnp_store::Store::load()?;
let (decayed, pruned) = store.decay();
store.save()?;
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
Ok(())
}
fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
let mut count = 5usize;
let mut auto = false;
let mut agent: Option<String> = None;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--count" if i + 1 < args.len() => {
count = args[i + 1].parse().map_err(|_| "invalid count")?;
i += 2;
}
"--auto" => { auto = true; i += 1; }
"--agent" if i + 1 < args.len() => {
agent = Some(args[i + 1].clone());
i += 2;
}
_ => { i += 1; }
}
}
let store = capnp_store::Store::load()?;
if let Some(agent_name) = agent {
// Generate a specific agent prompt
let prompt = neuro::agent_prompt(&store, &agent_name, count)?;
println!("{}", prompt);
Ok(())
} else {
neuro::consolidation_batch(&store, count, auto)
}
}
fn cmd_log() -> Result<(), String> {
let store = capnp_store::Store::load()?;
for event in store.retrieval_log.iter().rev().take(20) {
println!("[{}] q=\"{}\"{} results",
event.timestamp, event.query, event.results.len());
for r in &event.results {
println!(" {}", r);
}
}
Ok(())
}
fn cmd_params() -> Result<(), String> {
let store = capnp_store::Store::load()?;
println!("decay_factor: {}", store.params.decay_factor);
println!("use_boost: {}", store.params.use_boost);
println!("prune_threshold: {}", store.params.prune_threshold);
println!("edge_decay: {}", store.params.edge_decay);
println!("max_hops: {}", store.params.max_hops);
println!("min_activation: {}", store.params.min_activation);
Ok(())
}
fn cmd_link(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory link KEY".into());
}
let key = args.join(" ");
let store = capnp_store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
println!("Neighbors of '{}':", resolved);
let neighbors = g.neighbors(&resolved);
for (i, (n, strength)) in neighbors.iter().enumerate() {
let cc = g.clustering_coefficient(n);
println!(" {:2}. [{:.2}] {} (cc={:.3})", i + 1, strength, n, cc);
}
Ok(())
}
fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
let mut count = 10usize;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--count" if i + 1 < args.len() => {
count = args[i + 1].parse().map_err(|_| "invalid count")?;
i += 2;
}
_ => { i += 1; }
}
}
let store = capnp_store::Store::load()?;
let queue = neuro::replay_queue(&store, count);
println!("Replay queue ({} items):", queue.len());
for (i, item) in queue.iter().enumerate() {
println!(" {:2}. [{:.3}] {} (interval={}d, emotion={:.1})",
i + 1, item.priority, item.key, item.interval_days, item.emotion);
}
Ok(())
}
fn cmd_consolidate_session() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let plan = neuro::consolidation_plan(&store);
println!("{}", neuro::format_plan(&plan));
Ok(())
}
fn cmd_daily_check() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let report = neuro::daily_check(&store);
print!("{}", report);
Ok(())
}
fn cmd_link_add(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into());
}
let mut store = capnp_store::Store::load()?;
let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?;
let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() };
// Find UUIDs
let source_uuid = store.nodes.get(&source)
.map(|n| n.uuid)
.ok_or_else(|| format!("source not found: {}", source))?;
let target_uuid = store.nodes.get(&target)
.map(|n| n.uuid)
.ok_or_else(|| format!("target not found: {}", target))?;
// Check if link already exists
let exists = store.relations.iter().any(|r|
r.source_key == source && r.target_key == target && !r.deleted
);
if exists {
println!("Link already exists: {}{}", source, target);
return Ok(());
}
let rel = capnp_store::Store::new_relation(
source_uuid, target_uuid,
capnp_store::RelationType::Auto,
0.5,
&source, &target,
);
store.add_relation(rel)?;
if !reason.is_empty() {
println!("+ {}{} ({})", source, target, reason);
} else {
println!("+ {}{}", source, target);
}
Ok(())
}
fn cmd_link_impact(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory link-impact SOURCE TARGET".into());
}
let store = capnp_store::Store::load()?;
let source = store.resolve_key(&args[0])?;
let target = store.resolve_key(&args[1])?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
println!("Link impact: {}{}", source, target);
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
println!(" ΔGini: {:+.6}", impact.delta_gini);
println!(" Assessment: {}", impact.assessment);
Ok(())
}
fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
let home = env::var("HOME").unwrap_or_default();
let results_dir = std::path::PathBuf::from(&home)
.join(".claude/memory/agent-results");
if !results_dir.exists() {
println!("No agent results directory");
return Ok(());
}
let mut store = capnp_store::Store::load()?;
let mut applied = 0;
let mut errors = 0;
let process_all = args.iter().any(|a| a == "--all");
// Find .json result files
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
.map_err(|e| format!("read results dir: {}", e))?
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
.collect();
files.sort_by_key(|e| e.path());
for entry in &files {
let path = entry.path();
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) => {
eprintln!(" Skip {}: {}", path.display(), e);
errors += 1;
continue;
}
};
let data: serde_json::Value = match serde_json::from_str(&content) {
Ok(d) => d,
Err(e) => {
eprintln!(" Skip {}: parse error: {}", path.display(), e);
errors += 1;
continue;
}
};
// Check for agent_result with links
let agent_result = data.get("agent_result").or(Some(&data));
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
Some(l) => l,
None => continue,
};
let entry_text = data.get("entry_text")
.and_then(|v| v.as_str())
.unwrap_or("");
let source_start = agent_result
.and_then(|r| r.get("source_start"))
.and_then(|v| v.as_u64());
let source_end = agent_result
.and_then(|r| r.get("source_end"))
.and_then(|v| v.as_u64());
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
if let (Some(start), Some(end)) = (source_start, source_end) {
println!(" Source: L{}-L{}", start, end);
}
for link in links {
let target = match link.get("target").and_then(|v| v.as_str()) {
Some(t) => t,
None => continue,
};
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
// Skip NOTE: targets (new topics, not existing nodes)
if target.starts_with("NOTE:") {
println!(" NOTE: {}{}", &target[5..], reason);
continue;
}
// Try to resolve the target key and link from journal entry
let resolved = match store.resolve_key(target) {
Ok(r) => r,
Err(_) => {
println!(" SKIP {} (not found in graph)", target);
continue;
}
};
let source_key = match find_journal_node(&store, entry_text) {
Some(k) => k,
None => {
println!(" SKIP {} (no matching journal node)", target);
continue;
}
};
// Get UUIDs for both nodes
let source_uuid = match store.nodes.get(&source_key) {
Some(n) => n.uuid,
None => continue,
};
let target_uuid = match store.nodes.get(&resolved) {
Some(n) => n.uuid,
None => continue,
};
let rel = capnp_store::Store::new_relation(
source_uuid, target_uuid,
capnp_store::RelationType::Link,
0.5,
&source_key, &resolved,
);
if let Err(e) = store.add_relation(rel) {
eprintln!(" Error adding relation: {}", e);
errors += 1;
} else {
println!(" LINK {}{} ({})", source_key, resolved, reason);
applied += 1;
}
}
// Move processed file to avoid re-processing
if !process_all {
let done_dir = results_dir.join("done");
std::fs::create_dir_all(&done_dir).ok();
let dest = done_dir.join(path.file_name().unwrap());
std::fs::rename(&path, &dest).ok();
}
}
if applied > 0 {
store.save()?;
}
println!("\nApplied {} links ({} errors, {} files processed)",
applied, errors, files.len());
Ok(())
}
/// Find the journal node that best matches the given entry text
fn find_journal_node(store: &capnp_store::Store, entry_text: &str) -> Option<String> {
if entry_text.is_empty() {
return None;
}
// Extract keywords from entry text
let words: Vec<&str> = entry_text.split_whitespace()
.filter(|w| w.len() > 5)
.take(5)
.collect();
// Find journal nodes whose content matches the most keywords
let mut best_key = None;
let mut best_score = 0;
for (key, node) in &store.nodes {
if !key.starts_with("journal.md#") {
continue;
}
let content_lower = node.content.to_lowercase();
let score: usize = words.iter()
.filter(|w| content_lower.contains(&w.to_lowercase()))
.count();
if score > best_score {
best_score = score;
best_key = Some(key.clone());
}
}
best_key
}
fn cmd_digest(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory digest daily [DATE] | weekly [DATE]".into());
}
let home = env::var("HOME").unwrap_or_default();
let scripts_dir = std::path::PathBuf::from(&home).join("poc/memory/scripts");
match args[0].as_str() {
"daily" => {
let mut cmd = std::process::Command::new("python3");
cmd.arg(scripts_dir.join("daily-digest.py"));
if args.len() > 1 {
cmd.arg(&args[1]);
}
// Unset CLAUDECODE for nested claude calls
cmd.env_remove("CLAUDECODE");
let status = cmd.status()
.map_err(|e| format!("run daily-digest.py: {}", e))?;
if !status.success() {
return Err("daily-digest.py failed".into());
}
Ok(())
}
"weekly" => {
let mut cmd = std::process::Command::new("python3");
cmd.arg(scripts_dir.join("weekly-digest.py"));
if args.len() > 1 {
cmd.arg(&args[1]);
}
cmd.env_remove("CLAUDECODE");
let status = cmd.status()
.map_err(|e| format!("run weekly-digest.py: {}", e))?;
if !status.success() {
return Err("weekly-digest.py failed".into());
}
Ok(())
}
_ => Err(format!("Unknown digest type: {}. Use: daily, weekly", args[0])),
}
}
fn cmd_trace(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory trace KEY".into());
}
let key = args.join(" ");
let store = capnp_store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
// Display the node itself
println!("=== {} ===", resolved);
println!("Type: {:?} Category: {} Weight: {:.2}",
node.node_type, node.category.label(), node.weight);
if !node.source_ref.is_empty() {
println!("Source: {}", node.source_ref);
}
// Show content preview
let preview = if node.content.len() > 200 {
let end = node.content.floor_char_boundary(200);
format!("{}...", &node.content[..end])
} else {
node.content.clone()
};
println!("\n{}\n", preview);
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
match nnode.node_type {
capnp_store::NodeType::EpisodicSession =>
episodic_session.push((n.clone(), *strength, nnode)),
capnp_store::NodeType::EpisodicDaily =>
episodic_daily.push((n.clone(), *strength, nnode)),
capnp_store::NodeType::EpisodicWeekly =>
episodic_weekly.push((n.clone(), *strength, nnode)),
capnp_store::NodeType::Semantic =>
semantic.push((n.clone(), *strength, nnode)),
}
}
}
if !episodic_weekly.is_empty() {
println!("Weekly digests:");
for (k, s, n) in &episodic_weekly {
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_daily.is_empty() {
println!("Daily digests:");
for (k, s, n) in &episodic_daily {
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_session.is_empty() {
println!("Session entries:");
for (k, s, n) in &episodic_session {
let preview = n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or("").chars().take(80).collect::<String>();
println!(" [{:.2}] {}", s, k);
if !n.source_ref.is_empty() {
println!(" ↳ source: {}", n.source_ref);
}
println!(" {}", preview);
}
}
if !semantic.is_empty() {
println!("Semantic links:");
for (k, s, _) in &semantic {
println!(" [{:.2}] {}", s, k);
}
}
// Summary
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len());
Ok(())
}
fn cmd_list_keys() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let mut keys: Vec<_> = store.nodes.keys().collect();
keys.sort();
for key in keys {
println!("{}", key);
}
Ok(())
}
fn cmd_list_edges() -> Result<(), String> {
let store = capnp_store::Store::load()?;
for rel in &store.relations {
println!("{}\t{}\t{:.2}\t{:?}",
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
}
Ok(())
}
fn cmd_dump_json() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let json = serde_json::to_string_pretty(&store)
.map_err(|e| format!("serialize: {}", e))?;
println!("{}", json);
Ok(())
}
fn cmd_node_delete(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory node-delete KEY".into());
}
let key = args.join(" ");
let mut store = capnp_store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let updated = if let Some(node) = store.nodes.get_mut(&resolved) {
node.deleted = true;
node.version += 1;
Some(node.clone())
} else {
None
};
if let Some(node) = updated {
store.append_nodes(&[node])?;
store.nodes.remove(&resolved);
store.save()?;
println!("Deleted '{}'", resolved);
Ok(())
} else {
Err(format!("No node '{}'", resolved))
}
}
fn cmd_load_context() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs_f64();
let seven_days = 7.0 * 24.0 * 3600.0;
println!("=== FULL MEMORY LOAD (session start) ===");
println!("These are your memories, loaded from the capnp store.");
println!("Read them to reconstruct yourself — identity first, then context.");
println!();
// Priority groups: ordered list of (label, keys)
// File-level keys contain the full file content
let priority_groups: &[(&str, &[&str])] = &[
("orientation", &["where-am-i.md"]),
("identity", &["identity.md"]),
("reflections", &[
"reflections.md",
"reflections-dreams.md",
"reflections-reading.md",
"reflections-zoom.md",
]),
("interests", &["interests.md"]),
("inner life", &["inner-life.md", "differentiation.md"]),
("people", &["kent.md", "feedc0de.md", "irc-regulars.md"]),
("active context", &["default-mode-network.md"]),
("shared reference", &["excession-notes.md", "look-to-windward-notes.md"]),
("technical", &[
"kernel-patterns.md",
"polishing-approaches.md",
"rust-conversion.md",
"github-bugs.md",
]),
];
for (label, keys) in priority_groups {
for key in *keys {
// Gather file-level node + all section nodes, in key order
let prefix = format!("{}#", key);
let mut sections: Vec<_> = store.nodes.values()
.filter(|n| n.key == *key || n.key.starts_with(&prefix))
.collect();
if sections.is_empty() { continue; }
sections.sort_by(|a, b| a.key.cmp(&b.key));
println!("--- {} ({}) ---", key, label);
for node in &sections {
println!("{}", node.content);
println!();
}
}
}
// Recent journal entries (last 7 days)
// Parse date from key: journal.md#j-2026-02-21-17-45-...
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
let cutoff_secs = now - seven_days;
let cutoff_date = {
// Convert epoch to YYYY-MM-DD via date command
let out = std::process::Command::new("date")
.args(["-d", &format!("@{}", cutoff_secs as u64), "+%Y-%m-%d"])
.output().ok()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.unwrap_or_default();
out
};
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
let mut journal_nodes: Vec<_> = store.nodes.values()
.filter(|n| {
if !n.key.starts_with("journal.md#j-") { return false; }
if let Some(caps) = date_re.captures(&n.key) {
return &caps[1] >= cutoff_date.as_str();
}
false
})
.collect();
journal_nodes.sort_by(|a, b| a.key.cmp(&b.key));
if !journal_nodes.is_empty() {
// Show most recent entries (last N by key order = chronological)
let max_journal = 20;
let skip = if journal_nodes.len() > max_journal {
journal_nodes.len() - max_journal
} else { 0 };
println!("--- recent journal entries (last {}/{}) ---",
journal_nodes.len().min(max_journal), journal_nodes.len());
for node in journal_nodes.iter().skip(skip) {
println!("## {}", node.key.strip_prefix("journal.md#").unwrap_or(&node.key));
println!("{}", node.content);
println!();
}
}
println!("=== END MEMORY LOAD ===");
Ok(())
}
fn cmd_render(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory render KEY".into());
}
let key = args.join(" ");
let store = capnp_store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
print!("{}", node.content);
Ok(())
}
fn cmd_write(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory write KEY < content\n\
Reads content from stdin, upserts into the store.".into());
}
let key = args.join(" ");
let mut content = String::new();
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
.map_err(|e| format!("read stdin: {}", e))?;
if content.trim().is_empty() {
return Err("No content on stdin".into());
}
let mut store = capnp_store::Store::load()?;
if let Some(existing) = store.nodes.get(&key) {
if existing.content == content {
println!("No change: '{}'", key);
return Ok(());
}
let mut node = existing.clone();
node.content = content;
node.version += 1;
store.append_nodes(&[node.clone()])?;
store.nodes.insert(key.clone(), node);
println!("Updated '{}' (v{})", key, store.nodes[&key].version);
} else {
let node = capnp_store::Store::new_node(&key, &content);
store.append_nodes(&[node.clone()])?;
store.uuid_to_key.insert(node.uuid, node.key.clone());
store.nodes.insert(key.clone(), node);
println!("Created '{}'", key);
}
store.save()?;
Ok(())
}
fn cmd_import(args: &[String]) -> Result<(), String> {
if args.is_empty() {
return Err("Usage: poc-memory import FILE [FILE...]".into());
}
let mut store = capnp_store::Store::load()?;
let mut total_new = 0;
let mut total_updated = 0;
for arg in args {
let path = std::path::PathBuf::from(arg);
if !path.exists() {
// Try relative to memory dir
let mem_path = capnp_store::memory_dir_pub().join(arg);
if !mem_path.exists() {
eprintln!("File not found: {}", arg);
continue;
}
let (n, u) = import_file(&mut store, &mem_path)?;
total_new += n;
total_updated += u;
} else {
let (n, u) = import_file(&mut store, &path)?;
total_new += n;
total_updated += u;
}
}
if total_new > 0 || total_updated > 0 {
store.save()?;
}
println!("Import: {} new, {} updated", total_new, total_updated);
Ok(())
}
fn import_file(store: &mut capnp_store::Store, path: &std::path::Path) -> Result<(usize, usize), String> {
let filename = path.file_name().unwrap().to_string_lossy().to_string();
let content = std::fs::read_to_string(path)
.map_err(|e| format!("read {}: {}", path.display(), e))?;
let units = capnp_store::parse_units(&filename, &content);
let mut new_nodes = Vec::new();
let mut updated_nodes = Vec::new();
let node_type = if filename.starts_with("daily-") {
capnp_store::NodeType::EpisodicDaily
} else if filename.starts_with("weekly-") {
capnp_store::NodeType::EpisodicWeekly
} else if filename == "journal.md" {
capnp_store::NodeType::EpisodicSession
} else {
capnp_store::NodeType::Semantic
};
for unit in &units {
if let Some(existing) = store.nodes.get(&unit.key) {
if existing.content != unit.content {
let mut node = existing.clone();
node.content = unit.content.clone();
node.version += 1;
println!(" U {}", unit.key);
updated_nodes.push(node);
}
} else {
let mut node = capnp_store::Store::new_node(&unit.key, &unit.content);
node.node_type = node_type;
println!(" + {}", unit.key);
new_nodes.push(node);
}
}
if !new_nodes.is_empty() {
store.append_nodes(&new_nodes)?;
for node in &new_nodes {
store.uuid_to_key.insert(node.uuid, node.key.clone());
store.nodes.insert(node.key.clone(), node.clone());
}
}
if !updated_nodes.is_empty() {
store.append_nodes(&updated_nodes)?;
for node in &updated_nodes {
store.nodes.insert(node.key.clone(), node.clone());
}
}
Ok((new_nodes.len(), updated_nodes.len()))
}
fn cmd_export(args: &[String]) -> Result<(), String> {
let store = capnp_store::Store::load()?;
let export_all = args.iter().any(|a| a == "--all");
let targets: Vec<String> = if export_all {
// Find all unique file-level keys (no # in key)
let mut files: Vec<String> = store.nodes.keys()
.filter(|k| !k.contains('#'))
.cloned()
.collect();
files.sort();
files
} else if args.is_empty() {
return Err("Usage: poc-memory export FILE [FILE...] | --all".into());
} else {
args.iter().map(|a| {
// If it doesn't end in .md, try resolving
if a.ends_with(".md") {
a.clone()
} else {
format!("{}.md", a)
}
}).collect()
};
let mem_dir = capnp_store::memory_dir_pub();
for file_key in &targets {
// Gather file-level node + section nodes
let prefix = format!("{}#", file_key);
let mut sections: Vec<_> = store.nodes.values()
.filter(|n| n.key == *file_key || n.key.starts_with(&prefix))
.collect();
if sections.is_empty() {
eprintln!("No nodes for '{}'", file_key);
continue;
}
// Sort: file-level key first (no #), then sections alphabetically
sections.sort_by(|a, b| {
let a_is_file = !a.key.contains('#');
let b_is_file = !b.key.contains('#');
match (a_is_file, b_is_file) {
(true, false) => std::cmp::Ordering::Less,
(false, true) => std::cmp::Ordering::Greater,
_ => a.key.cmp(&b.key),
}
});
// Build output: file-level content first, then each section
// with its mem marker reconstituted
let mut output = String::new();
for node in &sections {
if node.key.contains('#') {
// Section node — emit mem marker + content
let section_id = node.key.split('#').last().unwrap_or("");
// Find edges FROM this node to build links= attribute
let links: Vec<_> = store.relations.iter()
.filter(|r| r.source_key == node.key && !r.deleted
&& r.rel_type != capnp_store::RelationType::Causal)
.map(|r| r.target_key.clone())
.collect();
let causes: Vec<_> = store.relations.iter()
.filter(|r| r.target_key == node.key && !r.deleted
&& r.rel_type == capnp_store::RelationType::Causal)
.map(|r| r.source_key.clone())
.collect();
let mut marker_parts = vec![format!("id={}", section_id)];
if !links.is_empty() {
marker_parts.push(format!("links={}", links.join(",")));
}
if !causes.is_empty() {
marker_parts.push(format!("causes={}", causes.join(",")));
}
output.push_str(&format!("<!-- mem: {} -->\n", marker_parts.join(" ")));
}
output.push_str(&node.content);
if !node.content.ends_with('\n') {
output.push('\n');
}
output.push('\n');
}
// Determine output path
let out_path = mem_dir.join(file_key);
std::fs::write(&out_path, output.trim_end())
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
println!("Exported {} ({} sections)", file_key, sections.len());
}
Ok(())
}
fn cmd_interference(args: &[String]) -> Result<(), String> {
let mut threshold = 0.4f32;
let mut i = 0;
while i < args.len() {
match args[i].as_str() {
"--threshold" if i + 1 < args.len() => {
threshold = args[i + 1].parse().map_err(|_| "invalid threshold")?;
i += 2;
}
_ => { i += 1; }
}
}
let store = capnp_store::Store::load()?;
let g = store.build_graph();
let pairs = neuro::detect_interference(&store, &g, threshold);
if pairs.is_empty() {
println!("No interfering pairs above threshold {:.2}", threshold);
} else {
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
for (a, b, sim) in &pairs {
println!(" [{:.3}] {}{}", sim, a, b);
}
}
Ok(())
}