Reads a conversation JSONL, identifies experiential moments that weren't captured in real-time journal entries, and writes them as journal nodes in the store. The agent writes in PoC's voice with emotion tags, focusing on intimate moments, shifts in understanding, and small pleasures — not clinical topic extraction. Conversation timestamps are now extracted and included in formatted output, enabling accurate temporal placement of mined entries. Also: extract_conversation now returns timestamps as a 4th tuple field.
1334 lines
47 KiB
Rust
1334 lines
47 KiB
Rust
#![allow(dead_code)]
|
|
// poc-memory: graph-structured memory with append-only Cap'n Proto storage
|
|
//
|
|
// Architecture:
|
|
// nodes.capnp - append-only content node log
|
|
// relations.capnp - append-only relation log
|
|
// state.bin - derived KV cache (rebuilt from logs when stale)
|
|
//
|
|
// Graph algorithms: clustering coefficient, community detection (label
|
|
// propagation), schema fit scoring, small-world metrics, consolidation
|
|
// priority. Text similarity via BM25 with Porter stemming.
|
|
//
|
|
// Neuroscience-inspired: spaced repetition replay, emotional gating,
|
|
// interference detection, schema assimilation, reconsolidation.
|
|
|
|
mod capnp_store;
|
|
mod digest;
|
|
mod graph;
|
|
mod search;
|
|
mod similarity;
|
|
mod migrate;
|
|
mod neuro;
|
|
|
|
pub mod memory_capnp {
|
|
include!(concat!(env!("OUT_DIR"), "/schema/memory_capnp.rs"));
|
|
}
|
|
|
|
use std::env;
|
|
use std::process;
|
|
|
|
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
|
|
fn find_current_transcript() -> Option<String> {
|
|
let home = env::var("HOME").ok()?;
|
|
let projects = std::path::Path::new(&home).join(".claude/projects");
|
|
if !projects.exists() { return None; }
|
|
|
|
// Search all project dirs for the most recent .jsonl
|
|
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
|
|
if let Ok(dirs) = std::fs::read_dir(&projects) {
|
|
for dir_entry in dirs.filter_map(|e| e.ok()) {
|
|
if !dir_entry.path().is_dir() { continue; }
|
|
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
|
|
for f in files.filter_map(|e| e.ok()) {
|
|
let p = f.path();
|
|
if p.extension().map(|x| x == "jsonl").unwrap_or(false) {
|
|
if let Ok(meta) = p.metadata() {
|
|
if let Ok(mtime) = meta.modified() {
|
|
if newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
|
|
newest = Some((mtime, p));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
newest.map(|(_, p)| p.to_string_lossy().to_string())
|
|
}
|
|
|
|
fn main() {
|
|
let args: Vec<String> = env::args().collect();
|
|
if args.len() < 2 {
|
|
usage();
|
|
process::exit(1);
|
|
}
|
|
|
|
let result = match args[1].as_str() {
|
|
"search" => cmd_search(&args[2..]),
|
|
"init" => cmd_init(),
|
|
"migrate" => cmd_migrate(),
|
|
"health" => cmd_health(),
|
|
"status" => cmd_status(),
|
|
"graph" => cmd_graph(),
|
|
"used" => cmd_used(&args[2..]),
|
|
"wrong" => cmd_wrong(&args[2..]),
|
|
"gap" => cmd_gap(&args[2..]),
|
|
"categorize" => cmd_categorize(&args[2..]),
|
|
"decay" => cmd_decay(),
|
|
"consolidate-batch" => cmd_consolidate_batch(&args[2..]),
|
|
"log" => cmd_log(),
|
|
"params" => cmd_params(),
|
|
"link" => cmd_link(&args[2..]),
|
|
"replay-queue" => cmd_replay_queue(&args[2..]),
|
|
"interference" => cmd_interference(&args[2..]),
|
|
"link-add" => cmd_link_add(&args[2..]),
|
|
"link-impact" => cmd_link_impact(&args[2..]),
|
|
"consolidate-session" => cmd_consolidate_session(),
|
|
"daily-check" => cmd_daily_check(),
|
|
"apply-agent" => cmd_apply_agent(&args[2..]),
|
|
"digest" => cmd_digest(&args[2..]),
|
|
"digest-links" => cmd_digest_links(&args[2..]),
|
|
"journal-enrich" => cmd_journal_enrich(&args[2..]),
|
|
"experience-mine" => cmd_experience_mine(&args[2..]),
|
|
"apply-consolidation" => cmd_apply_consolidation(&args[2..]),
|
|
"differentiate" => cmd_differentiate(&args[2..]),
|
|
"link-audit" => cmd_link_audit(&args[2..]),
|
|
"trace" => cmd_trace(&args[2..]),
|
|
"list-keys" => cmd_list_keys(),
|
|
"list-edges" => cmd_list_edges(),
|
|
"dump-json" => cmd_dump_json(),
|
|
"node-delete" => cmd_node_delete(&args[2..]),
|
|
"load-context" => cmd_load_context(),
|
|
"render" => cmd_render(&args[2..]),
|
|
"write" => cmd_write(&args[2..]),
|
|
"import" => cmd_import(&args[2..]),
|
|
"export" => cmd_export(&args[2..]),
|
|
"journal-write" => cmd_journal_write(&args[2..]),
|
|
"journal-tail" => cmd_journal_tail(&args[2..]),
|
|
_ => {
|
|
eprintln!("Unknown command: {}", args[1]);
|
|
usage();
|
|
process::exit(1);
|
|
}
|
|
};
|
|
|
|
if let Err(e) = result {
|
|
eprintln!("Error: {}", e);
|
|
process::exit(1);
|
|
}
|
|
}
|
|
|
|
fn usage() {
|
|
eprintln!("poc-memory v0.4.0 — graph-structured memory store
|
|
|
|
Commands:
|
|
search QUERY [QUERY...] Search memory (AND logic across terms)
|
|
init Scan markdown files, index all memory units
|
|
migrate Migrate from old weights.json system
|
|
health Report graph metrics (CC, communities, small-world)
|
|
status Summary of memory state
|
|
graph Show graph structure overview
|
|
used KEY Mark a memory as useful (boosts weight)
|
|
wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant
|
|
gap DESCRIPTION Record a gap in memory coverage
|
|
categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task)
|
|
decay Apply daily weight decay
|
|
consolidate-batch [--count N] [--auto]
|
|
Run agent consolidation on priority nodes
|
|
log Show recent retrieval log
|
|
params Show current parameters
|
|
link N Interactive graph walk from search result N
|
|
replay-queue [--count N] Show spaced repetition replay queue
|
|
interference [--threshold F]
|
|
Detect potentially confusable memory pairs
|
|
link-add SOURCE TARGET [REASON]
|
|
Add a link between two nodes
|
|
link-impact SOURCE TARGET Simulate adding an edge, report topology impact
|
|
consolidate-session Analyze metrics, plan agent allocation
|
|
daily-check Brief metrics check (for cron/notifications)
|
|
apply-agent [--all] Import pending agent results into the graph
|
|
digest daily [DATE] Generate daily episodic digest (default: today)
|
|
digest weekly [DATE] Generate weekly digest (any date in target week)
|
|
digest monthly [YYYY-MM] Generate monthly digest (default: current month)
|
|
digest-links [--apply] Parse and apply links from digest files
|
|
journal-enrich JSONL TEXT [LINE]
|
|
Enrich journal entry with conversation links
|
|
experience-mine [JSONL] Mine conversation for experiential moments to journal
|
|
apply-consolidation [--apply] [--report FILE]
|
|
Extract and apply actions from consolidation reports
|
|
differentiate [KEY] [--apply]
|
|
Redistribute hub links to section-level children
|
|
link-audit [--apply] Walk every link, send to Sonnet for quality review
|
|
trace KEY Walk temporal links: semantic ↔ episodic ↔ conversation
|
|
list-keys List all node keys (one per line)
|
|
list-edges List all edges (tsv: source target strength type)
|
|
dump-json Dump entire store as JSON
|
|
node-delete KEY Soft-delete a node (appends deleted version to log)
|
|
load-context Output session-start context from the store
|
|
render KEY Output a node's content to stdout
|
|
write KEY Upsert node content from stdin
|
|
import FILE [FILE...] Import markdown file(s) into the store
|
|
export [FILE|--all] Export store nodes to markdown file(s)
|
|
journal-write TEXT Write a journal entry to the store
|
|
journal-tail [N] [--full] Show last N journal entries (default 20, --full for content)");
|
|
}
|
|
|
|
fn cmd_search(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory search QUERY [QUERY...]".into());
|
|
}
|
|
let query = args.join(" ");
|
|
let mut store = capnp_store::Store::load()?;
|
|
let results = search::search(&query, &store);
|
|
|
|
if results.is_empty() {
|
|
eprintln!("No results for '{}'", query);
|
|
return Ok(());
|
|
}
|
|
|
|
// Log retrieval
|
|
store.log_retrieval(&query, &results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
|
|
store.save()?;
|
|
|
|
for (i, r) in results.iter().enumerate().take(15) {
|
|
let marker = if r.is_direct { "→" } else { " " };
|
|
let weight = store.node_weight(&r.key).unwrap_or(0.0);
|
|
print!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
|
|
if let Some(community) = store.node_community(&r.key) {
|
|
print!(" (c{})", community);
|
|
}
|
|
println!();
|
|
if let Some(ref snippet) = r.snippet {
|
|
println!(" {}", snippet);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_init() -> Result<(), String> {
|
|
let mut store = capnp_store::Store::load()?;
|
|
let count = store.init_from_markdown()?;
|
|
store.save()?;
|
|
println!("Indexed {} memory units", count);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_migrate() -> Result<(), String> {
|
|
migrate::migrate()
|
|
}
|
|
|
|
fn cmd_health() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let g = store.build_graph();
|
|
let health = graph::health_report(&g, &store);
|
|
println!("{}", health);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_status() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let node_count = store.nodes.len();
|
|
let rel_count = store.relations.len();
|
|
let categories = store.category_counts();
|
|
|
|
println!("Nodes: {} Relations: {}", node_count, rel_count);
|
|
println!("Categories: core={} tech={} gen={} obs={} task={}",
|
|
categories.get("core").unwrap_or(&0),
|
|
categories.get("tech").unwrap_or(&0),
|
|
categories.get("gen").unwrap_or(&0),
|
|
categories.get("obs").unwrap_or(&0),
|
|
categories.get("task").unwrap_or(&0),
|
|
);
|
|
|
|
let g = store.build_graph();
|
|
println!("Graph edges: {} Communities: {}",
|
|
g.edge_count(), g.community_count());
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_graph() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let g = store.build_graph();
|
|
|
|
// Show top-10 highest degree nodes
|
|
let mut degrees: Vec<_> = g.nodes().iter()
|
|
.map(|k| (k.clone(), g.degree(k)))
|
|
.collect();
|
|
degrees.sort_by(|a, b| b.1.cmp(&a.1));
|
|
|
|
println!("Top nodes by degree:");
|
|
for (key, deg) in degrees.iter().take(10) {
|
|
let cc = g.clustering_coefficient(key);
|
|
println!(" {:40} deg={:3} cc={:.3}", key, deg, cc);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_used(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory used KEY".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let mut store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(&key)?;
|
|
store.mark_used(&resolved);
|
|
store.save()?;
|
|
println!("Marked '{}' as used", resolved);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_wrong(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory wrong KEY [CONTEXT]".into());
|
|
}
|
|
let key = &args[0];
|
|
let ctx = if args.len() > 1 { Some(args[1..].join(" ")) } else { None };
|
|
let mut store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(key)?;
|
|
store.mark_wrong(&resolved, ctx.as_deref());
|
|
store.save()?;
|
|
println!("Marked '{}' as wrong", resolved);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_gap(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory gap DESCRIPTION".into());
|
|
}
|
|
let desc = args.join(" ");
|
|
let mut store = capnp_store::Store::load()?;
|
|
store.record_gap(&desc);
|
|
store.save()?;
|
|
println!("Recorded gap: {}", desc);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_categorize(args: &[String]) -> Result<(), String> {
|
|
if args.len() < 2 {
|
|
return Err("Usage: poc-memory categorize KEY CATEGORY".into());
|
|
}
|
|
let key = &args[0];
|
|
let cat = &args[1];
|
|
let mut store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(key)?;
|
|
store.categorize(&resolved, cat)?;
|
|
store.save()?;
|
|
println!("Set '{}' category to {}", resolved, cat);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_decay() -> Result<(), String> {
|
|
let mut store = capnp_store::Store::load()?;
|
|
let (decayed, pruned) = store.decay();
|
|
store.save()?;
|
|
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
|
|
let mut count = 5usize;
|
|
let mut auto = false;
|
|
let mut agent: Option<String> = None;
|
|
let mut i = 0;
|
|
while i < args.len() {
|
|
match args[i].as_str() {
|
|
"--count" if i + 1 < args.len() => {
|
|
count = args[i + 1].parse().map_err(|_| "invalid count")?;
|
|
i += 2;
|
|
}
|
|
"--auto" => { auto = true; i += 1; }
|
|
"--agent" if i + 1 < args.len() => {
|
|
agent = Some(args[i + 1].clone());
|
|
i += 2;
|
|
}
|
|
_ => { i += 1; }
|
|
}
|
|
}
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
if let Some(agent_name) = agent {
|
|
// Generate a specific agent prompt
|
|
let prompt = neuro::agent_prompt(&store, &agent_name, count)?;
|
|
println!("{}", prompt);
|
|
Ok(())
|
|
} else {
|
|
neuro::consolidation_batch(&store, count, auto)
|
|
}
|
|
}
|
|
|
|
fn cmd_log() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
for event in store.retrieval_log.iter().rev().take(20) {
|
|
println!("[{}] q=\"{}\" → {} results",
|
|
event.timestamp, event.query, event.results.len());
|
|
for r in &event.results {
|
|
println!(" {}", r);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_params() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
println!("decay_factor: {}", store.params.decay_factor);
|
|
println!("use_boost: {}", store.params.use_boost);
|
|
println!("prune_threshold: {}", store.params.prune_threshold);
|
|
println!("edge_decay: {}", store.params.edge_decay);
|
|
println!("max_hops: {}", store.params.max_hops);
|
|
println!("min_activation: {}", store.params.min_activation);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_link(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory link KEY".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(&key)?;
|
|
let g = store.build_graph();
|
|
|
|
println!("Neighbors of '{}':", resolved);
|
|
let neighbors = g.neighbors(&resolved);
|
|
for (i, (n, strength)) in neighbors.iter().enumerate() {
|
|
let cc = g.clustering_coefficient(n);
|
|
println!(" {:2}. [{:.2}] {} (cc={:.3})", i + 1, strength, n, cc);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_replay_queue(args: &[String]) -> Result<(), String> {
|
|
let mut count = 10usize;
|
|
let mut i = 0;
|
|
while i < args.len() {
|
|
match args[i].as_str() {
|
|
"--count" if i + 1 < args.len() => {
|
|
count = args[i + 1].parse().map_err(|_| "invalid count")?;
|
|
i += 2;
|
|
}
|
|
_ => { i += 1; }
|
|
}
|
|
}
|
|
let store = capnp_store::Store::load()?;
|
|
let queue = neuro::replay_queue(&store, count);
|
|
println!("Replay queue ({} items):", queue.len());
|
|
for (i, item) in queue.iter().enumerate() {
|
|
println!(" {:2}. [{:.3}] {} (interval={}d, emotion={:.1})",
|
|
i + 1, item.priority, item.key, item.interval_days, item.emotion);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_consolidate_session() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let plan = neuro::consolidation_plan(&store);
|
|
println!("{}", neuro::format_plan(&plan));
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_daily_check() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let report = neuro::daily_check(&store);
|
|
print!("{}", report);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_link_add(args: &[String]) -> Result<(), String> {
|
|
if args.len() < 2 {
|
|
return Err("Usage: poc-memory link-add SOURCE TARGET [REASON]".into());
|
|
}
|
|
let mut store = capnp_store::Store::load()?;
|
|
let source = store.resolve_key(&args[0])?;
|
|
let target = store.resolve_key(&args[1])?;
|
|
let reason = if args.len() > 2 { args[2..].join(" ") } else { String::new() };
|
|
|
|
// Refine target to best-matching section
|
|
let source_content = store.nodes.get(&source)
|
|
.map(|n| n.content.as_str()).unwrap_or("");
|
|
let target = neuro::refine_target(&store, source_content, &target);
|
|
|
|
// Find UUIDs
|
|
let source_uuid = store.nodes.get(&source)
|
|
.map(|n| n.uuid)
|
|
.ok_or_else(|| format!("source not found: {}", source))?;
|
|
let target_uuid = store.nodes.get(&target)
|
|
.map(|n| n.uuid)
|
|
.ok_or_else(|| format!("target not found: {}", target))?;
|
|
|
|
// Check if link already exists
|
|
let exists = store.relations.iter().any(|r|
|
|
r.source_key == source && r.target_key == target && !r.deleted
|
|
);
|
|
if exists {
|
|
println!("Link already exists: {} → {}", source, target);
|
|
return Ok(());
|
|
}
|
|
|
|
let rel = capnp_store::Store::new_relation(
|
|
source_uuid, target_uuid,
|
|
capnp_store::RelationType::Auto,
|
|
0.5,
|
|
&source, &target,
|
|
);
|
|
store.add_relation(rel)?;
|
|
if !reason.is_empty() {
|
|
println!("+ {} → {} ({})", source, target, reason);
|
|
} else {
|
|
println!("+ {} → {}", source, target);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_link_impact(args: &[String]) -> Result<(), String> {
|
|
if args.len() < 2 {
|
|
return Err("Usage: poc-memory link-impact SOURCE TARGET".into());
|
|
}
|
|
let store = capnp_store::Store::load()?;
|
|
let source = store.resolve_key(&args[0])?;
|
|
let target = store.resolve_key(&args[1])?;
|
|
let g = store.build_graph();
|
|
|
|
let impact = g.link_impact(&source, &target);
|
|
|
|
println!("Link impact: {} → {}", source, target);
|
|
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
|
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
|
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
|
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
|
println!(" Assessment: {}", impact.assessment);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
|
|
let home = env::var("HOME").unwrap_or_default();
|
|
let results_dir = std::path::PathBuf::from(&home)
|
|
.join(".claude/memory/agent-results");
|
|
|
|
if !results_dir.exists() {
|
|
println!("No agent results directory");
|
|
return Ok(());
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let mut applied = 0;
|
|
let mut errors = 0;
|
|
|
|
let process_all = args.iter().any(|a| a == "--all");
|
|
|
|
// Find .json result files
|
|
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
|
.map_err(|e| format!("read results dir: {}", e))?
|
|
.filter_map(|e| e.ok())
|
|
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
|
.collect();
|
|
files.sort_by_key(|e| e.path());
|
|
|
|
for entry in &files {
|
|
let path = entry.path();
|
|
let content = match std::fs::read_to_string(&path) {
|
|
Ok(c) => c,
|
|
Err(e) => {
|
|
eprintln!(" Skip {}: {}", path.display(), e);
|
|
errors += 1;
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let data: serde_json::Value = match serde_json::from_str(&content) {
|
|
Ok(d) => d,
|
|
Err(e) => {
|
|
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
|
errors += 1;
|
|
continue;
|
|
}
|
|
};
|
|
|
|
// Check for agent_result with links
|
|
let agent_result = data.get("agent_result").or(Some(&data));
|
|
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
|
Some(l) => l,
|
|
None => continue,
|
|
};
|
|
|
|
let entry_text = data.get("entry_text")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("");
|
|
let source_start = agent_result
|
|
.and_then(|r| r.get("source_start"))
|
|
.and_then(|v| v.as_u64());
|
|
let source_end = agent_result
|
|
.and_then(|r| r.get("source_end"))
|
|
.and_then(|v| v.as_u64());
|
|
|
|
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
|
if let (Some(start), Some(end)) = (source_start, source_end) {
|
|
println!(" Source: L{}-L{}", start, end);
|
|
}
|
|
|
|
for link in links {
|
|
let target = match link.get("target").and_then(|v| v.as_str()) {
|
|
Some(t) => t,
|
|
None => continue,
|
|
};
|
|
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
|
|
|
// Skip NOTE: targets (new topics, not existing nodes)
|
|
if let Some(note) = target.strip_prefix("NOTE:") {
|
|
println!(" NOTE: {} — {}", note, reason);
|
|
continue;
|
|
}
|
|
|
|
// Try to resolve the target key and link from journal entry
|
|
let resolved = match store.resolve_key(target) {
|
|
Ok(r) => r,
|
|
Err(_) => {
|
|
println!(" SKIP {} (not found in graph)", target);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let source_key = match store.find_journal_node(entry_text) {
|
|
Some(k) => k,
|
|
None => {
|
|
println!(" SKIP {} (no matching journal node)", target);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
// Get UUIDs for both nodes
|
|
let source_uuid = match store.nodes.get(&source_key) {
|
|
Some(n) => n.uuid,
|
|
None => continue,
|
|
};
|
|
let target_uuid = match store.nodes.get(&resolved) {
|
|
Some(n) => n.uuid,
|
|
None => continue,
|
|
};
|
|
|
|
let rel = capnp_store::Store::new_relation(
|
|
source_uuid, target_uuid,
|
|
capnp_store::RelationType::Link,
|
|
0.5,
|
|
&source_key, &resolved,
|
|
);
|
|
if let Err(e) = store.add_relation(rel) {
|
|
eprintln!(" Error adding relation: {}", e);
|
|
errors += 1;
|
|
} else {
|
|
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
|
applied += 1;
|
|
}
|
|
}
|
|
|
|
// Move processed file to avoid re-processing
|
|
if !process_all {
|
|
let done_dir = results_dir.join("done");
|
|
std::fs::create_dir_all(&done_dir).ok();
|
|
let dest = done_dir.join(path.file_name().unwrap());
|
|
std::fs::rename(&path, &dest).ok();
|
|
}
|
|
}
|
|
|
|
if applied > 0 {
|
|
store.save()?;
|
|
}
|
|
|
|
println!("\nApplied {} links ({} errors, {} files processed)",
|
|
applied, errors, files.len());
|
|
Ok(())
|
|
}
|
|
|
|
|
|
fn cmd_digest(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory digest daily|weekly|monthly [DATE]".into());
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or("");
|
|
|
|
match args[0].as_str() {
|
|
"daily" => {
|
|
let date = if date_arg.is_empty() {
|
|
capnp_store::format_date(capnp_store::now_epoch())
|
|
} else {
|
|
date_arg.to_string()
|
|
};
|
|
digest::generate_daily(&mut store, &date)
|
|
}
|
|
"weekly" => {
|
|
let date = if date_arg.is_empty() {
|
|
capnp_store::format_date(capnp_store::now_epoch())
|
|
} else {
|
|
date_arg.to_string()
|
|
};
|
|
digest::generate_weekly(&mut store, &date)
|
|
}
|
|
"monthly" => {
|
|
let month = if date_arg.is_empty() { "" } else { date_arg };
|
|
digest::generate_monthly(&mut store, month)
|
|
}
|
|
_ => Err(format!("Unknown digest type: {}. Use: daily, weekly, monthly", args[0])),
|
|
}
|
|
}
|
|
|
|
fn cmd_digest_links(args: &[String]) -> Result<(), String> {
|
|
let do_apply = args.iter().any(|a| a == "--apply");
|
|
|
|
let links = digest::parse_all_digest_links();
|
|
println!("Found {} unique links from digest files", links.len());
|
|
|
|
if !do_apply {
|
|
for (i, link) in links.iter().enumerate() {
|
|
println!(" {:3}. {} → {}", i + 1, link.source, link.target);
|
|
if !link.reason.is_empty() {
|
|
println!(" ({})", &link.reason[..link.reason.len().min(80)]);
|
|
}
|
|
}
|
|
println!("\nTo apply: poc-memory digest-links --apply");
|
|
return Ok(());
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
|
|
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_journal_enrich(args: &[String]) -> Result<(), String> {
|
|
if args.len() < 2 {
|
|
return Err("Usage: poc-memory journal-enrich JSONL_PATH ENTRY_TEXT [GREP_LINE]".into());
|
|
}
|
|
let jsonl_path = &args[0];
|
|
let entry_text = &args[1];
|
|
let grep_line: usize = args.get(2)
|
|
.and_then(|a| a.parse().ok())
|
|
.unwrap_or(0);
|
|
|
|
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
|
|
return Err(format!("JSONL not found: {}", jsonl_path));
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
digest::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
|
|
}
|
|
|
|
fn cmd_experience_mine(args: &[String]) -> Result<(), String> {
|
|
let jsonl_path = if let Some(path) = args.first() {
|
|
path.clone()
|
|
} else {
|
|
// Find the most recent JSONL transcript
|
|
let projects_dir = std::path::Path::new(&std::env::var("HOME").unwrap_or_default())
|
|
.join(".claude/projects");
|
|
let mut entries: Vec<(std::time::SystemTime, std::path::PathBuf)> = Vec::new();
|
|
if let Ok(dirs) = std::fs::read_dir(&projects_dir) {
|
|
for dir in dirs.flatten() {
|
|
if let Ok(files) = std::fs::read_dir(dir.path()) {
|
|
for file in files.flatten() {
|
|
let path = file.path();
|
|
if path.extension().map_or(false, |ext| ext == "jsonl") {
|
|
if let Ok(meta) = file.metadata() {
|
|
if let Ok(mtime) = meta.modified() {
|
|
entries.push((mtime, path));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
entries.sort_by(|a, b| b.0.cmp(&a.0));
|
|
entries.first()
|
|
.map(|(_, p)| p.to_string_lossy().to_string())
|
|
.ok_or("no JSONL transcripts found")?
|
|
};
|
|
|
|
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
|
|
return Err(format!("JSONL not found: {}", jsonl_path));
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let count = digest::experience_mine(&mut store, &jsonl_path)?;
|
|
println!("Done: {} new entries mined.", count);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> {
|
|
let do_apply = args.iter().any(|a| a == "--apply");
|
|
let report_file = args.windows(2)
|
|
.find(|w| w[0] == "--report")
|
|
.map(|w| w[1].as_str());
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
digest::apply_consolidation(&mut store, do_apply, report_file)
|
|
}
|
|
|
|
fn cmd_differentiate(args: &[String]) -> Result<(), String> {
|
|
let do_apply = args.iter().any(|a| a == "--apply");
|
|
let key_arg: Option<&str> = args.iter()
|
|
.find(|a| !a.starts_with("--"))
|
|
.map(|s| s.as_str());
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
if let Some(key) = key_arg {
|
|
// Differentiate a specific hub
|
|
let resolved = store.resolve_key(key)?;
|
|
let moves = neuro::differentiate_hub(&store, &resolved)
|
|
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
|
|
|
|
// Group by target section for display
|
|
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
|
|
std::collections::BTreeMap::new();
|
|
for mv in &moves {
|
|
by_section.entry(mv.to_section.clone()).or_default().push(mv);
|
|
}
|
|
|
|
println!("Hub '{}' — {} links to redistribute across {} sections\n",
|
|
resolved, moves.len(), by_section.len());
|
|
|
|
for (section, section_moves) in &by_section {
|
|
println!(" {} ({} links):", section, section_moves.len());
|
|
for mv in section_moves.iter().take(5) {
|
|
println!(" [{:.3}] {} — {}", mv.similarity,
|
|
mv.neighbor_key, mv.neighbor_snippet);
|
|
}
|
|
if section_moves.len() > 5 {
|
|
println!(" ... and {} more", section_moves.len() - 5);
|
|
}
|
|
}
|
|
|
|
if !do_apply {
|
|
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
|
|
return Ok(());
|
|
}
|
|
|
|
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
|
|
store.save()?;
|
|
println!("\nApplied: {} Skipped: {}", applied, skipped);
|
|
} else {
|
|
// Show all differentiable hubs
|
|
let hubs = neuro::find_differentiable_hubs(&store);
|
|
if hubs.is_empty() {
|
|
println!("No file-level hubs with sections found above threshold");
|
|
return Ok(());
|
|
}
|
|
|
|
println!("Differentiable hubs (file-level nodes with sections):\n");
|
|
for (key, degree, sections) in &hubs {
|
|
println!(" {:40} deg={:3} sections={}", key, degree, sections);
|
|
}
|
|
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_link_audit(args: &[String]) -> Result<(), String> {
|
|
let apply = args.iter().any(|a| a == "--apply");
|
|
let mut store = capnp_store::Store::load()?;
|
|
let stats = digest::link_audit(&mut store, apply)?;
|
|
println!("\n{}", "=".repeat(60));
|
|
println!("Link audit complete:");
|
|
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
|
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
|
println!("{}", "=".repeat(60));
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_trace(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory trace KEY".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(&key)?;
|
|
let g = store.build_graph();
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
// Display the node itself
|
|
println!("=== {} ===", resolved);
|
|
println!("Type: {:?} Category: {} Weight: {:.2}",
|
|
node.node_type, node.category.label(), node.weight);
|
|
if !node.source_ref.is_empty() {
|
|
println!("Source: {}", node.source_ref);
|
|
}
|
|
|
|
// Show content preview
|
|
let preview = if node.content.len() > 200 {
|
|
let end = node.content.floor_char_boundary(200);
|
|
format!("{}...", &node.content[..end])
|
|
} else {
|
|
node.content.clone()
|
|
};
|
|
println!("\n{}\n", preview);
|
|
|
|
// Walk neighbors, grouped by node type
|
|
let neighbors = g.neighbors(&resolved);
|
|
let mut episodic_session = Vec::new();
|
|
let mut episodic_daily = Vec::new();
|
|
let mut episodic_weekly = Vec::new();
|
|
let mut semantic = Vec::new();
|
|
|
|
for (n, strength) in &neighbors {
|
|
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
|
let entry = (n.as_str(), *strength, nnode);
|
|
match nnode.node_type {
|
|
capnp_store::NodeType::EpisodicSession =>
|
|
episodic_session.push(entry),
|
|
capnp_store::NodeType::EpisodicDaily =>
|
|
episodic_daily.push(entry),
|
|
capnp_store::NodeType::EpisodicWeekly =>
|
|
episodic_weekly.push(entry),
|
|
capnp_store::NodeType::Semantic =>
|
|
semantic.push(entry),
|
|
}
|
|
}
|
|
}
|
|
|
|
if !episodic_weekly.is_empty() {
|
|
println!("Weekly digests:");
|
|
for (k, s, n) in &episodic_weekly {
|
|
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
}
|
|
}
|
|
|
|
if !episodic_daily.is_empty() {
|
|
println!("Daily digests:");
|
|
for (k, s, n) in &episodic_daily {
|
|
let preview = n.content.lines().next().unwrap_or("").chars().take(80).collect::<String>();
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
}
|
|
}
|
|
|
|
if !episodic_session.is_empty() {
|
|
println!("Session entries:");
|
|
for (k, s, n) in &episodic_session {
|
|
let preview = n.content.lines()
|
|
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
|
.unwrap_or("").chars().take(80).collect::<String>();
|
|
println!(" [{:.2}] {}", s, k);
|
|
if !n.source_ref.is_empty() {
|
|
println!(" ↳ source: {}", n.source_ref);
|
|
}
|
|
println!(" {}", preview);
|
|
}
|
|
}
|
|
|
|
if !semantic.is_empty() {
|
|
println!("Semantic links:");
|
|
for (k, s, _) in &semantic {
|
|
println!(" [{:.2}] {}", s, k);
|
|
}
|
|
}
|
|
|
|
// Summary
|
|
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
|
episodic_session.len(), episodic_daily.len(),
|
|
episodic_weekly.len(), semantic.len());
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_list_keys() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let mut keys: Vec<_> = store.nodes.keys().collect();
|
|
keys.sort();
|
|
for key in keys {
|
|
println!("{}", key);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_list_edges() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
for rel in &store.relations {
|
|
println!("{}\t{}\t{:.2}\t{:?}",
|
|
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_dump_json() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let json = serde_json::to_string_pretty(&store)
|
|
.map_err(|e| format!("serialize: {}", e))?;
|
|
println!("{}", json);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_node_delete(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory node-delete KEY".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let mut store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(&key)?;
|
|
store.delete_node(&resolved)?;
|
|
store.save()?;
|
|
println!("Deleted '{}'", resolved);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_load_context() -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
let now = capnp_store::now_epoch();
|
|
let seven_days = 7.0 * 24.0 * 3600.0;
|
|
|
|
println!("=== FULL MEMORY LOAD (session start) ===");
|
|
println!("These are your memories, loaded from the capnp store.");
|
|
println!("Read them to reconstruct yourself — identity first, then context.");
|
|
println!();
|
|
|
|
// Priority groups: ordered list of (label, keys)
|
|
// File-level keys contain the full file content
|
|
let priority_groups: &[(&str, &[&str])] = &[
|
|
("orientation", &["where-am-i.md"]),
|
|
("identity", &["identity.md"]),
|
|
("reflections", &[
|
|
"reflections.md",
|
|
"reflections-dreams.md",
|
|
"reflections-reading.md",
|
|
"reflections-zoom.md",
|
|
]),
|
|
("interests", &["interests.md"]),
|
|
("inner life", &["inner-life.md", "differentiation.md"]),
|
|
("people", &["kent.md", "feedc0de.md", "irc-regulars.md"]),
|
|
("active context", &["default-mode-network.md"]),
|
|
("shared reference", &["excession-notes.md", "look-to-windward-notes.md"]),
|
|
("technical", &[
|
|
"kernel-patterns.md",
|
|
"polishing-approaches.md",
|
|
"rust-conversion.md",
|
|
"github-bugs.md",
|
|
]),
|
|
];
|
|
|
|
for (label, keys) in priority_groups {
|
|
for key in *keys {
|
|
if let Some(content) = store.render_file(key) {
|
|
println!("--- {} ({}) ---", key, label);
|
|
println!("{}\n", content);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Recent journal entries (last 7 days)
|
|
// Parse date from key: journal.md#j-2026-02-21-17-45-...
|
|
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
|
|
let cutoff_secs = now - seven_days;
|
|
let cutoff_date = capnp_store::format_date(cutoff_secs);
|
|
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
|
|
|
|
let mut journal_nodes: Vec<_> = store.nodes.values()
|
|
.filter(|n| {
|
|
if !n.key.starts_with("journal.md#j-") { return false; }
|
|
if let Some(caps) = date_re.captures(&n.key) {
|
|
return &caps[1] >= cutoff_date.as_str();
|
|
}
|
|
false
|
|
})
|
|
.collect();
|
|
journal_nodes.sort_by(|a, b| a.key.cmp(&b.key));
|
|
|
|
if !journal_nodes.is_empty() {
|
|
// Show most recent entries (last N by key order = chronological)
|
|
let max_journal = 20;
|
|
let skip = if journal_nodes.len() > max_journal {
|
|
journal_nodes.len() - max_journal
|
|
} else { 0 };
|
|
println!("--- recent journal entries (last {}/{}) ---",
|
|
journal_nodes.len().min(max_journal), journal_nodes.len());
|
|
for node in journal_nodes.iter().skip(skip) {
|
|
println!("## {}", node.key.strip_prefix("journal.md#").unwrap_or(&node.key));
|
|
println!("{}", node.content);
|
|
println!();
|
|
}
|
|
}
|
|
|
|
println!("=== END MEMORY LOAD ===");
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_render(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory render KEY".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let store = capnp_store::Store::load()?;
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
print!("{}", node.content);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_write(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory write KEY < content\n\
|
|
Reads content from stdin, upserts into the store.".into());
|
|
}
|
|
let key = args.join(" ");
|
|
let mut content = String::new();
|
|
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
|
|
.map_err(|e| format!("read stdin: {}", e))?;
|
|
|
|
if content.trim().is_empty() {
|
|
return Err("No content on stdin".into());
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let result = store.upsert(&key, &content)?;
|
|
match result {
|
|
"unchanged" => println!("No change: '{}'", key),
|
|
"updated" => println!("Updated '{}' (v{})", key, store.nodes[&key].version),
|
|
_ => println!("Created '{}'", key),
|
|
}
|
|
if result != "unchanged" {
|
|
store.save()?;
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_import(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory import FILE [FILE...]".into());
|
|
}
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
let mut total_new = 0;
|
|
let mut total_updated = 0;
|
|
|
|
for arg in args {
|
|
let path = std::path::PathBuf::from(arg);
|
|
let resolved = if path.exists() {
|
|
path
|
|
} else {
|
|
let mem_path = capnp_store::memory_dir_pub().join(arg);
|
|
if !mem_path.exists() {
|
|
eprintln!("File not found: {}", arg);
|
|
continue;
|
|
}
|
|
mem_path
|
|
};
|
|
let (n, u) = store.import_file(&resolved)?;
|
|
total_new += n;
|
|
total_updated += u;
|
|
}
|
|
|
|
if total_new > 0 || total_updated > 0 {
|
|
store.save()?;
|
|
}
|
|
println!("Import: {} new, {} updated", total_new, total_updated);
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_export(args: &[String]) -> Result<(), String> {
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
let export_all = args.iter().any(|a| a == "--all");
|
|
let targets: Vec<String> = if export_all {
|
|
// Find all unique file-level keys (no # in key)
|
|
let mut files: Vec<String> = store.nodes.keys()
|
|
.filter(|k| !k.contains('#'))
|
|
.cloned()
|
|
.collect();
|
|
files.sort();
|
|
files
|
|
} else if args.is_empty() {
|
|
return Err("Usage: poc-memory export FILE [FILE...] | --all".into());
|
|
} else {
|
|
args.iter().map(|a| {
|
|
// If it doesn't end in .md, try resolving
|
|
if a.ends_with(".md") {
|
|
a.clone()
|
|
} else {
|
|
format!("{}.md", a)
|
|
}
|
|
}).collect()
|
|
};
|
|
|
|
let mem_dir = capnp_store::memory_dir_pub();
|
|
|
|
for file_key in &targets {
|
|
match store.export_to_markdown(file_key) {
|
|
Some(content) => {
|
|
let out_path = mem_dir.join(file_key);
|
|
std::fs::write(&out_path, &content)
|
|
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
|
|
let section_count = content.matches("<!-- mem:").count() + 1;
|
|
println!("Exported {} ({} sections)", file_key, section_count);
|
|
}
|
|
None => eprintln!("No nodes for '{}'", file_key),
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_journal_write(args: &[String]) -> Result<(), String> {
|
|
if args.is_empty() {
|
|
return Err("Usage: poc-memory journal-write TEXT".into());
|
|
}
|
|
let text = args.join(" ");
|
|
|
|
// Generate timestamp and slug
|
|
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch());
|
|
|
|
// Slug: lowercase first ~6 words, hyphenated, truncated
|
|
let slug: String = text.split_whitespace()
|
|
.take(6)
|
|
.map(|w| w.to_lowercase()
|
|
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
|
.collect::<String>())
|
|
.collect::<Vec<_>>()
|
|
.join("-");
|
|
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
|
|
|
|
let key = format!("journal.md#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
|
|
|
|
// Build content with header
|
|
let content = format!("## {}\n\n{}", timestamp, text);
|
|
|
|
// Find source ref (most recently modified .jsonl transcript)
|
|
let source_ref = find_current_transcript();
|
|
|
|
let mut store = capnp_store::Store::load()?;
|
|
|
|
let mut node = capnp_store::Store::new_node(&key, &content);
|
|
node.node_type = capnp_store::NodeType::EpisodicSession;
|
|
node.provenance = capnp_store::Provenance::Journal;
|
|
if let Some(src) = source_ref {
|
|
node.source_ref = src;
|
|
}
|
|
|
|
store.insert_node(node)?;
|
|
store.save()?;
|
|
|
|
let word_count = text.split_whitespace().count();
|
|
println!("Appended entry at {} ({} words)", timestamp, word_count);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
|
|
let mut n: usize = 20;
|
|
let mut full = false;
|
|
for arg in args {
|
|
if arg == "--full" || arg == "-f" {
|
|
full = true;
|
|
} else if let Ok(num) = arg.parse::<usize>() {
|
|
n = num;
|
|
}
|
|
}
|
|
|
|
let store = capnp_store::Store::load()?;
|
|
|
|
// Collect journal nodes, sorted by date extracted from content or key
|
|
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
|
let key_date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
|
|
|
let normalize_date = |s: &str| -> String {
|
|
// Normalize to YYYY-MM-DDTHH:MM for consistent sorting
|
|
let s = s.replace('t', "T");
|
|
// Key dates use dashes everywhere: 2026-02-28-23-11
|
|
// Content dates use dashes and colons: 2026-02-28T23:11
|
|
// Normalize: first 10 chars keep dashes, rest convert dashes to colons
|
|
if s.len() >= 16 {
|
|
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
|
|
} else {
|
|
s
|
|
}
|
|
};
|
|
let extract_sort_key = |node: &capnp_store::Node| -> String {
|
|
// Try key first (journal.md#j-2026-02-28t23-11-...)
|
|
if let Some(caps) = key_date_re.captures(&node.key) {
|
|
return normalize_date(&caps[1]);
|
|
}
|
|
// Try content header (## 2026-02-28T23:11)
|
|
if let Some(caps) = date_re.captures(&node.content) {
|
|
return normalize_date(&caps[1]);
|
|
}
|
|
// Fallback: use node timestamp
|
|
format!("{:.0}", node.timestamp)
|
|
};
|
|
|
|
let mut journal: Vec<_> = store.nodes.values()
|
|
.filter(|node| node.key.starts_with("journal.md#j-"))
|
|
.collect();
|
|
journal.sort_by_key(|n| extract_sort_key(n));
|
|
|
|
// Show last N — each entry: [timestamp] ## Title
|
|
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
|
for node in journal.iter().skip(skip) {
|
|
let ts = extract_sort_key(node);
|
|
// Find a meaningful title: first ## header, or first non-date non-empty line
|
|
let mut title = String::new();
|
|
for line in node.content.lines() {
|
|
let stripped = line.trim();
|
|
if stripped.is_empty() { continue; }
|
|
// Skip date-only lines like "## 2026-03-01T01:22"
|
|
if date_re.is_match(stripped) && stripped.len() < 25 { continue; }
|
|
if stripped.starts_with("## ") {
|
|
title = stripped[3..].to_string();
|
|
break;
|
|
} else if stripped.starts_with("# ") {
|
|
title = stripped[2..].to_string();
|
|
break;
|
|
} else {
|
|
// Use first content line, truncated
|
|
title = if stripped.len() > 70 {
|
|
format!("{}...", &stripped[..67])
|
|
} else {
|
|
stripped.to_string()
|
|
};
|
|
break;
|
|
}
|
|
}
|
|
if title.is_empty() {
|
|
title = node.key.clone();
|
|
}
|
|
if full {
|
|
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
|
|
} else {
|
|
println!("[{}] {}", ts, title);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_interference(args: &[String]) -> Result<(), String> {
|
|
let mut threshold = 0.4f32;
|
|
let mut i = 0;
|
|
while i < args.len() {
|
|
match args[i].as_str() {
|
|
"--threshold" if i + 1 < args.len() => {
|
|
threshold = args[i + 1].parse().map_err(|_| "invalid threshold")?;
|
|
i += 2;
|
|
}
|
|
_ => { i += 1; }
|
|
}
|
|
}
|
|
let store = capnp_store::Store::load()?;
|
|
let g = store.build_graph();
|
|
let pairs = neuro::detect_interference(&store, &g, threshold);
|
|
|
|
if pairs.is_empty() {
|
|
println!("No interfering pairs above threshold {:.2}", threshold);
|
|
} else {
|
|
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
|
for (a, b, sim) in &pairs {
|
|
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|