2026-03-05 15:41:35 -05:00
|
|
|
|
// poc-memory: graph-structured memory for AI assistants
|
|
|
|
|
|
//
|
|
|
|
|
|
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet
|
|
|
|
|
|
// License: MIT OR Apache-2.0
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
//
|
|
|
|
|
|
// Architecture:
|
|
|
|
|
|
// nodes.capnp - append-only content node log
|
|
|
|
|
|
// relations.capnp - append-only relation log
|
|
|
|
|
|
// state.bin - derived KV cache (rebuilt from logs when stale)
|
|
|
|
|
|
//
|
|
|
|
|
|
// Graph algorithms: clustering coefficient, community detection (label
|
|
|
|
|
|
// propagation), schema fit scoring, small-world metrics, consolidation
|
|
|
|
|
|
// priority. Text similarity via BM25 with Porter stemming.
|
|
|
|
|
|
//
|
|
|
|
|
|
// Neuroscience-inspired: spaced repetition replay, emotional gating,
|
|
|
|
|
|
// interference detection, schema assimilation, reconsolidation.
|
|
|
|
|
|
|
2026-03-05 22:43:50 -05:00
|
|
|
|
use poc_memory::*;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
use clap::{Parser, Subcommand};
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
use std::process;
|
|
|
|
|
|
|
2026-02-28 23:44:44 -05:00
|
|
|
|
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
|
|
|
|
|
|
fn find_current_transcript() -> Option<String> {
|
2026-03-08 21:16:52 -04:00
|
|
|
|
let projects = config::get().projects_dir.clone();
|
2026-02-28 23:44:44 -05:00
|
|
|
|
if !projects.exists() { return None; }
|
|
|
|
|
|
|
|
|
|
|
|
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
|
|
|
|
|
|
if let Ok(dirs) = std::fs::read_dir(&projects) {
|
|
|
|
|
|
for dir_entry in dirs.filter_map(|e| e.ok()) {
|
|
|
|
|
|
if !dir_entry.path().is_dir() { continue; }
|
|
|
|
|
|
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
|
|
|
|
|
|
for f in files.filter_map(|e| e.ok()) {
|
|
|
|
|
|
let p = f.path();
|
|
|
|
|
|
if p.extension().map(|x| x == "jsonl").unwrap_or(false) {
|
|
|
|
|
|
if let Ok(meta) = p.metadata() {
|
|
|
|
|
|
if let Ok(mtime) = meta.modified() {
|
2026-02-28 23:47:11 -05:00
|
|
|
|
if newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
|
2026-02-28 23:44:44 -05:00
|
|
|
|
newest = Some((mtime, p));
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
newest.map(|(_, p)| p.to_string_lossy().to_string())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
#[derive(Parser)]
|
|
|
|
|
|
#[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")]
|
|
|
|
|
|
struct Cli {
|
|
|
|
|
|
#[command(subcommand)]
|
|
|
|
|
|
command: Command,
|
|
|
|
|
|
}
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
|
|
enum Command {
|
|
|
|
|
|
/// Search memory (AND logic across terms)
|
2026-03-09 01:19:04 -04:00
|
|
|
|
///
|
|
|
|
|
|
/// Pipeline: -p spread -p spectral,k=20
|
|
|
|
|
|
/// Default pipeline: spread
|
2026-03-08 21:04:45 -04:00
|
|
|
|
Search {
|
|
|
|
|
|
/// Search terms
|
|
|
|
|
|
query: Vec<String>,
|
2026-03-09 01:19:04 -04:00
|
|
|
|
/// Algorithm pipeline stages (repeatable)
|
|
|
|
|
|
#[arg(short, long = "pipeline")]
|
|
|
|
|
|
pipeline: Vec<String>,
|
|
|
|
|
|
/// Show more results
|
2026-03-08 21:04:45 -04:00
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
expand: bool,
|
2026-03-09 01:25:42 -04:00
|
|
|
|
/// Show node content, not just keys
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
full: bool,
|
2026-03-09 01:19:04 -04:00
|
|
|
|
/// Show debug output for each pipeline stage
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
debug: bool,
|
2026-03-08 21:04:45 -04:00
|
|
|
|
},
|
|
|
|
|
|
/// Scan markdown files, index all memory units
|
|
|
|
|
|
Init,
|
|
|
|
|
|
/// Migrate from old weights.json system
|
|
|
|
|
|
Migrate,
|
|
|
|
|
|
/// Report graph metrics (CC, communities, small-world)
|
|
|
|
|
|
Health,
|
|
|
|
|
|
/// Run consistency checks and repair
|
|
|
|
|
|
Fsck,
|
|
|
|
|
|
/// Summary of memory state
|
|
|
|
|
|
Status,
|
|
|
|
|
|
/// Show graph structure overview
|
|
|
|
|
|
Graph,
|
|
|
|
|
|
/// Mark a memory as useful (boosts weight)
|
|
|
|
|
|
Used {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Mark a memory as wrong/irrelevant
|
|
|
|
|
|
Wrong {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: String,
|
|
|
|
|
|
/// Optional context
|
|
|
|
|
|
context: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Record a gap in memory coverage
|
|
|
|
|
|
Gap {
|
|
|
|
|
|
/// Gap description
|
|
|
|
|
|
description: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Cap node degree by pruning weak auto edges
|
|
|
|
|
|
#[command(name = "cap-degree")]
|
|
|
|
|
|
CapDegree {
|
|
|
|
|
|
/// Maximum degree (default: 50)
|
|
|
|
|
|
#[arg(default_value_t = 50)]
|
|
|
|
|
|
max_degree: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Link orphan nodes to similar neighbors
|
|
|
|
|
|
#[command(name = "link-orphans")]
|
|
|
|
|
|
LinkOrphans {
|
|
|
|
|
|
/// Minimum degree to consider orphan (default: 2)
|
|
|
|
|
|
#[arg(default_value_t = 2)]
|
|
|
|
|
|
min_degree: usize,
|
|
|
|
|
|
/// Links per orphan (default: 3)
|
|
|
|
|
|
#[arg(default_value_t = 3)]
|
|
|
|
|
|
links_per: usize,
|
|
|
|
|
|
/// Similarity threshold (default: 0.15)
|
|
|
|
|
|
#[arg(default_value_t = 0.15)]
|
|
|
|
|
|
sim_threshold: f32,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Run agent consolidation on priority nodes
|
|
|
|
|
|
#[command(name = "consolidate-batch")]
|
|
|
|
|
|
ConsolidateBatch {
|
|
|
|
|
|
/// Number of nodes to consolidate
|
|
|
|
|
|
#[arg(long, default_value_t = 5)]
|
|
|
|
|
|
count: usize,
|
|
|
|
|
|
/// Generate replay agent prompt automatically
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
auto: bool,
|
|
|
|
|
|
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
agent: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show recent retrieval log
|
|
|
|
|
|
Log,
|
|
|
|
|
|
/// Show current parameters
|
|
|
|
|
|
Params,
|
|
|
|
|
|
/// Show neighbors of a node
|
|
|
|
|
|
Link {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show spaced repetition replay queue
|
|
|
|
|
|
#[command(name = "replay-queue")]
|
|
|
|
|
|
ReplayQueue {
|
|
|
|
|
|
/// Number of items to show
|
|
|
|
|
|
#[arg(long, default_value_t = 10)]
|
|
|
|
|
|
count: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Detect potentially confusable memory pairs
|
|
|
|
|
|
Interference {
|
|
|
|
|
|
/// Similarity threshold (default: 0.4)
|
|
|
|
|
|
#[arg(long, default_value_t = 0.4)]
|
|
|
|
|
|
threshold: f32,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Add a link between two nodes
|
|
|
|
|
|
#[command(name = "link-add")]
|
|
|
|
|
|
LinkAdd {
|
|
|
|
|
|
/// Source node key
|
|
|
|
|
|
source: String,
|
|
|
|
|
|
/// Target node key
|
|
|
|
|
|
target: String,
|
|
|
|
|
|
/// Optional reason
|
|
|
|
|
|
reason: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Simulate adding an edge, report topology impact
|
|
|
|
|
|
#[command(name = "link-impact")]
|
|
|
|
|
|
LinkImpact {
|
|
|
|
|
|
/// Source node key
|
|
|
|
|
|
source: String,
|
|
|
|
|
|
/// Target node key
|
|
|
|
|
|
target: String,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Analyze metrics, plan agent allocation
|
|
|
|
|
|
#[command(name = "consolidate-session")]
|
|
|
|
|
|
ConsolidateSession,
|
|
|
|
|
|
/// Autonomous: plan → agents → apply → digests → links
|
|
|
|
|
|
#[command(name = "consolidate-full")]
|
|
|
|
|
|
ConsolidateFull,
|
|
|
|
|
|
/// Close triangles: link similar neighbors of hubs
|
|
|
|
|
|
#[command(name = "triangle-close")]
|
|
|
|
|
|
TriangleClose {
|
|
|
|
|
|
/// Minimum hub degree (default: 5)
|
|
|
|
|
|
#[arg(default_value_t = 5)]
|
|
|
|
|
|
min_degree: usize,
|
|
|
|
|
|
/// Similarity threshold (default: 0.3)
|
|
|
|
|
|
#[arg(default_value_t = 0.3)]
|
|
|
|
|
|
sim_threshold: f32,
|
|
|
|
|
|
/// Maximum links per hub (default: 10)
|
|
|
|
|
|
#[arg(default_value_t = 10)]
|
|
|
|
|
|
max_per_hub: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Brief metrics check (for cron/notifications)
|
|
|
|
|
|
#[command(name = "daily-check")]
|
|
|
|
|
|
DailyCheck,
|
|
|
|
|
|
/// Import pending agent results into the graph
|
|
|
|
|
|
#[command(name = "apply-agent")]
|
|
|
|
|
|
ApplyAgent {
|
|
|
|
|
|
/// Process all files without moving to done/
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
all: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Generate episodic digests (daily, weekly, monthly, auto)
|
|
|
|
|
|
Digest {
|
|
|
|
|
|
/// Digest type: daily, weekly, monthly, auto
|
|
|
|
|
|
#[command(subcommand)]
|
|
|
|
|
|
level: DigestLevel,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Parse and apply links from digest nodes
|
|
|
|
|
|
#[command(name = "digest-links")]
|
|
|
|
|
|
DigestLinks {
|
|
|
|
|
|
/// Apply the links (default: dry run)
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
apply: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Enrich journal entry with conversation links
|
|
|
|
|
|
#[command(name = "journal-enrich")]
|
|
|
|
|
|
JournalEnrich {
|
|
|
|
|
|
/// Path to JSONL transcript
|
|
|
|
|
|
jsonl_path: String,
|
|
|
|
|
|
/// Journal entry text to enrich
|
|
|
|
|
|
entry_text: String,
|
|
|
|
|
|
/// Grep line number for source location
|
|
|
|
|
|
#[arg(default_value_t = 0)]
|
|
|
|
|
|
grep_line: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Mine conversation for experiential moments to journal
|
|
|
|
|
|
#[command(name = "experience-mine")]
|
|
|
|
|
|
ExperienceMine {
|
|
|
|
|
|
/// Path to JSONL transcript (default: most recent)
|
|
|
|
|
|
jsonl_path: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Extract and apply actions from consolidation reports
|
|
|
|
|
|
#[command(name = "apply-consolidation")]
|
|
|
|
|
|
ApplyConsolidation {
|
|
|
|
|
|
/// Apply actions (default: dry run)
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
apply: bool,
|
|
|
|
|
|
/// Read from specific report file
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
report: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Redistribute hub links to section-level children
|
|
|
|
|
|
Differentiate {
|
|
|
|
|
|
/// Specific hub key (omit to list all differentiable hubs)
|
|
|
|
|
|
key: Option<String>,
|
|
|
|
|
|
/// Apply the redistribution
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
apply: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Walk every link, send to Sonnet for quality review
|
|
|
|
|
|
#[command(name = "link-audit")]
|
|
|
|
|
|
LinkAudit {
|
|
|
|
|
|
/// Apply changes (default: dry run)
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
apply: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Walk temporal links: semantic ↔ episodic ↔ conversation
|
|
|
|
|
|
Trace {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Spectral decomposition of the memory graph
|
|
|
|
|
|
Spectral {
|
|
|
|
|
|
/// Number of eigenvectors (default: 30)
|
|
|
|
|
|
#[arg(default_value_t = 30)]
|
|
|
|
|
|
k: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Compute and save spectral embedding
|
|
|
|
|
|
#[command(name = "spectral-save")]
|
|
|
|
|
|
SpectralSave {
|
|
|
|
|
|
/// Number of eigenvectors (default: 20)
|
|
|
|
|
|
#[arg(default_value_t = 20)]
|
|
|
|
|
|
k: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Find spectrally nearest nodes
|
|
|
|
|
|
#[command(name = "spectral-neighbors")]
|
|
|
|
|
|
SpectralNeighbors {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: String,
|
|
|
|
|
|
/// Number of neighbors (default: 15)
|
|
|
|
|
|
#[arg(default_value_t = 15)]
|
|
|
|
|
|
n: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show nodes ranked by outlier/bridge score
|
|
|
|
|
|
#[command(name = "spectral-positions")]
|
|
|
|
|
|
SpectralPositions {
|
|
|
|
|
|
/// Number of nodes to show (default: 30)
|
|
|
|
|
|
#[arg(default_value_t = 30)]
|
|
|
|
|
|
n: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Find spectrally close but unlinked pairs
|
|
|
|
|
|
#[command(name = "spectral-suggest")]
|
|
|
|
|
|
SpectralSuggest {
|
|
|
|
|
|
/// Number of pairs (default: 20)
|
|
|
|
|
|
#[arg(default_value_t = 20)]
|
|
|
|
|
|
n: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// List all node keys (one per line, optional glob)
|
|
|
|
|
|
#[command(name = "list-keys")]
|
|
|
|
|
|
ListKeys {
|
|
|
|
|
|
/// Glob pattern to filter keys
|
|
|
|
|
|
pattern: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// List all edges (tsv: source target strength type)
|
|
|
|
|
|
#[command(name = "list-edges")]
|
|
|
|
|
|
ListEdges,
|
|
|
|
|
|
/// Dump entire store as JSON
|
|
|
|
|
|
#[command(name = "dump-json")]
|
|
|
|
|
|
DumpJson,
|
|
|
|
|
|
/// Soft-delete a node
|
|
|
|
|
|
#[command(name = "node-delete")]
|
|
|
|
|
|
NodeDelete {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Rename a node key
|
|
|
|
|
|
#[command(name = "node-rename")]
|
|
|
|
|
|
NodeRename {
|
|
|
|
|
|
/// Old key
|
|
|
|
|
|
old_key: String,
|
|
|
|
|
|
/// New key
|
|
|
|
|
|
new_key: String,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Populate created_at for nodes missing timestamps
|
|
|
|
|
|
#[command(name = "journal-ts-migrate")]
|
|
|
|
|
|
JournalTsMigrate,
|
|
|
|
|
|
/// Output session-start context from the store
|
|
|
|
|
|
#[command(name = "load-context")]
|
|
|
|
|
|
LoadContext {
|
|
|
|
|
|
/// Show word count statistics instead of content
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
stats: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Output a node's content to stdout
|
|
|
|
|
|
Render {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show all stored versions of a node
|
|
|
|
|
|
History {
|
|
|
|
|
|
/// Show full content for every version
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
full: bool,
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Upsert node content from stdin
|
|
|
|
|
|
Write {
|
|
|
|
|
|
/// Node key
|
|
|
|
|
|
key: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Import markdown file(s) into the store
|
|
|
|
|
|
Import {
|
|
|
|
|
|
/// File paths
|
|
|
|
|
|
files: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Export store nodes to markdown file(s)
|
|
|
|
|
|
Export {
|
|
|
|
|
|
/// File keys to export (or --all)
|
|
|
|
|
|
files: Vec<String>,
|
|
|
|
|
|
/// Export all file-level nodes
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
all: bool,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Write a journal entry to the store
|
|
|
|
|
|
#[command(name = "journal-write")]
|
|
|
|
|
|
JournalWrite {
|
|
|
|
|
|
/// Entry text
|
|
|
|
|
|
text: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show recent journal/digest entries
|
|
|
|
|
|
#[command(name = "journal-tail")]
|
|
|
|
|
|
JournalTail {
|
|
|
|
|
|
/// Number of entries to show (default: 20)
|
|
|
|
|
|
#[arg(default_value_t = 20)]
|
|
|
|
|
|
n: usize,
|
|
|
|
|
|
/// Show full content
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
full: bool,
|
|
|
|
|
|
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
|
|
|
|
|
|
#[arg(long, default_value_t = 0)]
|
|
|
|
|
|
level: u8,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Query the memory graph
|
|
|
|
|
|
Query {
|
|
|
|
|
|
/// Query expression (e.g. "degree > 15 | sort degree | limit 10")
|
|
|
|
|
|
expr: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Bump daily lookup counter for keys
|
|
|
|
|
|
#[command(name = "lookup-bump")]
|
|
|
|
|
|
LookupBump {
|
|
|
|
|
|
/// Node keys
|
|
|
|
|
|
keys: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Show daily lookup counts
|
|
|
|
|
|
Lookups {
|
|
|
|
|
|
/// Date (default: today)
|
|
|
|
|
|
date: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Background job daemon
|
|
|
|
|
|
Daemon {
|
|
|
|
|
|
/// Subcommand: status, log, install
|
|
|
|
|
|
sub: Option<String>,
|
|
|
|
|
|
/// Additional arguments
|
|
|
|
|
|
args: Vec<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Run knowledge agents to convergence
|
|
|
|
|
|
#[command(name = "knowledge-loop")]
|
|
|
|
|
|
KnowledgeLoop {
|
|
|
|
|
|
/// Maximum cycles before stopping
|
|
|
|
|
|
#[arg(long, default_value_t = 20)]
|
|
|
|
|
|
max_cycles: usize,
|
|
|
|
|
|
/// Items per agent per cycle
|
|
|
|
|
|
#[arg(long, default_value_t = 5)]
|
|
|
|
|
|
batch_size: usize,
|
|
|
|
|
|
/// Cycles to check for convergence
|
|
|
|
|
|
#[arg(long, default_value_t = 5)]
|
|
|
|
|
|
window: usize,
|
|
|
|
|
|
/// Maximum inference depth
|
|
|
|
|
|
#[arg(long, default_value_t = 4)]
|
|
|
|
|
|
max_depth: i32,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Extract atomic facts from conversation transcripts
|
|
|
|
|
|
#[command(name = "fact-mine")]
|
|
|
|
|
|
FactMine {
|
|
|
|
|
|
/// Path to JSONL transcript or directory (with --batch)
|
|
|
|
|
|
path: String,
|
|
|
|
|
|
/// Process all .jsonl files in directory
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
batch: bool,
|
|
|
|
|
|
/// Show chunks without calling model
|
|
|
|
|
|
#[arg(long)]
|
|
|
|
|
|
dry_run: bool,
|
|
|
|
|
|
/// Write JSON to file (default: stdout)
|
|
|
|
|
|
#[arg(long, short)]
|
|
|
|
|
|
output: Option<String>,
|
|
|
|
|
|
/// Skip transcripts with fewer messages
|
|
|
|
|
|
#[arg(long, default_value_t = 10)]
|
|
|
|
|
|
min_messages: usize,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Extract facts from a transcript and store directly
|
|
|
|
|
|
#[command(name = "fact-mine-store")]
|
|
|
|
|
|
FactMineStore {
|
|
|
|
|
|
/// Path to JSONL transcript
|
|
|
|
|
|
path: String,
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
|
|
enum DigestLevel {
|
|
|
|
|
|
/// Generate daily digest
|
|
|
|
|
|
Daily {
|
|
|
|
|
|
/// Date (default: today)
|
|
|
|
|
|
date: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Generate weekly digest
|
|
|
|
|
|
Weekly {
|
|
|
|
|
|
/// Date or week label (default: current week)
|
|
|
|
|
|
date: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Generate monthly digest
|
|
|
|
|
|
Monthly {
|
|
|
|
|
|
/// Month (YYYY-MM) or date (default: current month)
|
|
|
|
|
|
date: Option<String>,
|
|
|
|
|
|
},
|
|
|
|
|
|
/// Generate all missing digests
|
|
|
|
|
|
Auto,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
|
let cli = Cli::parse();
|
|
|
|
|
|
|
|
|
|
|
|
let result = match cli.command {
|
2026-03-09 01:25:42 -04:00
|
|
|
|
Command::Search { query, pipeline, expand, full, debug }
|
|
|
|
|
|
=> cmd_search(&query, &pipeline, expand, full, debug),
|
2026-03-08 21:04:45 -04:00
|
|
|
|
Command::Init => cmd_init(),
|
|
|
|
|
|
Command::Migrate => cmd_migrate(),
|
|
|
|
|
|
Command::Health => cmd_health(),
|
|
|
|
|
|
Command::Fsck => cmd_fsck(),
|
|
|
|
|
|
Command::Status => cmd_status(),
|
|
|
|
|
|
Command::Graph => cmd_graph(),
|
|
|
|
|
|
Command::Used { key } => cmd_used(&key),
|
|
|
|
|
|
Command::Wrong { key, context }
|
|
|
|
|
|
=> cmd_wrong(&key, &context),
|
|
|
|
|
|
Command::Gap { description }
|
|
|
|
|
|
=> cmd_gap(&description),
|
|
|
|
|
|
Command::CapDegree { max_degree }
|
|
|
|
|
|
=> cmd_cap_degree(max_degree),
|
|
|
|
|
|
Command::LinkOrphans { min_degree, links_per, sim_threshold }
|
|
|
|
|
|
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
|
|
|
|
|
|
Command::ConsolidateBatch { count, auto, agent }
|
|
|
|
|
|
=> cmd_consolidate_batch(count, auto, agent),
|
|
|
|
|
|
Command::Log => cmd_log(),
|
|
|
|
|
|
Command::Params => cmd_params(),
|
|
|
|
|
|
Command::Link { key } => cmd_link(&key),
|
|
|
|
|
|
Command::ReplayQueue { count }
|
|
|
|
|
|
=> cmd_replay_queue(count),
|
|
|
|
|
|
Command::Interference { threshold }
|
|
|
|
|
|
=> cmd_interference(threshold),
|
|
|
|
|
|
Command::LinkAdd { source, target, reason }
|
|
|
|
|
|
=> cmd_link_add(&source, &target, &reason),
|
|
|
|
|
|
Command::LinkImpact { source, target }
|
|
|
|
|
|
=> cmd_link_impact(&source, &target),
|
|
|
|
|
|
Command::ConsolidateSession => cmd_consolidate_session(),
|
|
|
|
|
|
Command::ConsolidateFull => cmd_consolidate_full(),
|
|
|
|
|
|
Command::TriangleClose { min_degree, sim_threshold, max_per_hub }
|
|
|
|
|
|
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
|
|
|
|
|
|
Command::DailyCheck => cmd_daily_check(),
|
|
|
|
|
|
Command::ApplyAgent { all }
|
|
|
|
|
|
=> cmd_apply_agent(all),
|
|
|
|
|
|
Command::Digest { level } => cmd_digest(level),
|
|
|
|
|
|
Command::DigestLinks { apply }
|
|
|
|
|
|
=> cmd_digest_links(apply),
|
|
|
|
|
|
Command::JournalEnrich { jsonl_path, entry_text, grep_line }
|
|
|
|
|
|
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
|
|
|
|
|
|
Command::ExperienceMine { jsonl_path }
|
|
|
|
|
|
=> cmd_experience_mine(jsonl_path),
|
|
|
|
|
|
Command::ApplyConsolidation { apply, report }
|
|
|
|
|
|
=> cmd_apply_consolidation(apply, report.as_deref()),
|
|
|
|
|
|
Command::Differentiate { key, apply }
|
|
|
|
|
|
=> cmd_differentiate(key.as_deref(), apply),
|
|
|
|
|
|
Command::LinkAudit { apply }
|
|
|
|
|
|
=> cmd_link_audit(apply),
|
|
|
|
|
|
Command::Trace { key } => cmd_trace(&key),
|
|
|
|
|
|
Command::Spectral { k } => cmd_spectral(k),
|
|
|
|
|
|
Command::SpectralSave { k } => cmd_spectral_save(k),
|
|
|
|
|
|
Command::SpectralNeighbors { key, n }
|
|
|
|
|
|
=> cmd_spectral_neighbors(&key, n),
|
|
|
|
|
|
Command::SpectralPositions { n }
|
|
|
|
|
|
=> cmd_spectral_positions(n),
|
|
|
|
|
|
Command::SpectralSuggest { n }
|
|
|
|
|
|
=> cmd_spectral_suggest(n),
|
|
|
|
|
|
Command::ListKeys { pattern }
|
|
|
|
|
|
=> cmd_list_keys(pattern.as_deref()),
|
|
|
|
|
|
Command::ListEdges => cmd_list_edges(),
|
|
|
|
|
|
Command::DumpJson => cmd_dump_json(),
|
|
|
|
|
|
Command::NodeDelete { key }
|
|
|
|
|
|
=> cmd_node_delete(&key),
|
|
|
|
|
|
Command::NodeRename { old_key, new_key }
|
|
|
|
|
|
=> cmd_node_rename(&old_key, &new_key),
|
|
|
|
|
|
Command::JournalTsMigrate => cmd_journal_ts_migrate(),
|
|
|
|
|
|
Command::LoadContext { stats }
|
|
|
|
|
|
=> cmd_load_context(stats),
|
|
|
|
|
|
Command::Render { key } => cmd_render(&key),
|
|
|
|
|
|
Command::History { full, key }
|
|
|
|
|
|
=> cmd_history(&key, full),
|
|
|
|
|
|
Command::Write { key } => cmd_write(&key),
|
|
|
|
|
|
Command::Import { files }
|
|
|
|
|
|
=> cmd_import(&files),
|
|
|
|
|
|
Command::Export { files, all }
|
|
|
|
|
|
=> cmd_export(&files, all),
|
|
|
|
|
|
Command::JournalWrite { text }
|
|
|
|
|
|
=> cmd_journal_write(&text),
|
|
|
|
|
|
Command::JournalTail { n, full, level }
|
|
|
|
|
|
=> cmd_journal_tail(n, full, level),
|
|
|
|
|
|
Command::Query { expr }
|
|
|
|
|
|
=> cmd_query(&expr),
|
|
|
|
|
|
Command::LookupBump { keys }
|
|
|
|
|
|
=> cmd_lookup_bump(&keys),
|
|
|
|
|
|
Command::Lookups { date }
|
|
|
|
|
|
=> cmd_lookups(date.as_deref()),
|
|
|
|
|
|
Command::Daemon { sub, args }
|
|
|
|
|
|
=> cmd_daemon(sub.as_deref(), &args),
|
|
|
|
|
|
Command::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
|
|
|
|
|
|
=> cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
|
|
|
|
|
|
Command::FactMine { path, batch, dry_run, output, min_messages }
|
|
|
|
|
|
=> cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
|
|
|
|
|
|
Command::FactMineStore { path }
|
|
|
|
|
|
=> cmd_fact_mine_store(&path),
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
if let Err(e) = result {
|
|
|
|
|
|
eprintln!("Error: {}", e);
|
|
|
|
|
|
process::exit(1);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
// ── Command implementations ─────────────────────────────────────────
|
2026-03-05 10:23:57 -05:00
|
|
|
|
|
2026-03-09 01:25:42 -04:00
|
|
|
|
fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full: bool, debug: bool) -> Result<(), String> {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
use store::StoreView;
|
2026-03-09 01:19:04 -04:00
|
|
|
|
use std::collections::BTreeMap;
|
2026-03-05 10:23:57 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
if terms.is_empty() {
|
|
|
|
|
|
return Err("search requires at least one term".into());
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
2026-03-03 18:44:44 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let query: String = terms.join(" ");
|
2026-03-03 01:33:31 -05:00
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
// Parse pipeline (default: spread)
|
|
|
|
|
|
let pipeline: Vec<search::AlgoStage> = if pipeline_args.is_empty() {
|
|
|
|
|
|
vec![search::AlgoStage::parse("spread").unwrap()]
|
|
|
|
|
|
} else {
|
|
|
|
|
|
pipeline_args.iter()
|
|
|
|
|
|
.map(|a| search::AlgoStage::parse(a))
|
|
|
|
|
|
.collect::<Result<Vec<_>, _>>()?
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
if debug {
|
|
|
|
|
|
let names: Vec<String> = pipeline.iter().map(|s| format!("{}", s.algo)).collect();
|
|
|
|
|
|
println!("[search] pipeline: {}", names.join(" → "));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let view = store::AnyView::load()?;
|
2026-03-09 01:19:04 -04:00
|
|
|
|
let graph = graph::build_graph_fast(&view);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
// Build equal-weight terms from query
|
|
|
|
|
|
let terms: BTreeMap<String, f64> = query.split_whitespace()
|
|
|
|
|
|
.map(|t| (t.to_lowercase(), 1.0))
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let (seeds, direct_hits) = search::match_seeds(&terms, &view);
|
|
|
|
|
|
|
|
|
|
|
|
if seeds.is_empty() {
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
eprintln!("No results for '{}'", query);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
if debug {
|
|
|
|
|
|
println!("[search] {} seeds from query '{}'", seeds.len(), query);
|
|
|
|
|
|
for (key, score) in &seeds {
|
|
|
|
|
|
println!(" {:.4} {}", score, key);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let max_results = if expand { 15 } else { 5 };
|
|
|
|
|
|
let raw = search::run_pipeline(&pipeline, seeds, &graph, &view, debug, max_results);
|
2026-03-03 18:44:44 -05:00
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
let results: Vec<search::SearchResult> = raw.into_iter()
|
|
|
|
|
|
.map(|(key, activation)| {
|
|
|
|
|
|
let is_direct = direct_hits.contains(&key);
|
|
|
|
|
|
search::SearchResult { key, activation, is_direct, snippet: None }
|
|
|
|
|
|
})
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
if results.is_empty() {
|
|
|
|
|
|
eprintln!("No results for '{}'", query);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Log retrieval
|
2026-03-03 12:56:15 -05:00
|
|
|
|
store::Store::log_retrieval_static(&query,
|
2026-03-03 01:33:31 -05:00
|
|
|
|
&results.iter().map(|r| r.key.clone()).collect::<Vec<_>>());
|
|
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
let bump_keys: Vec<&str> = results.iter().take(max_results).map(|r| r.key.as_str()).collect();
|
2026-03-03 18:36:25 -05:00
|
|
|
|
let _ = lookups::bump_many(&bump_keys);
|
|
|
|
|
|
|
2026-03-09 01:19:04 -04:00
|
|
|
|
for (i, r) in results.iter().enumerate().take(max_results) {
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let marker = if r.is_direct { "→" } else { " " };
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let weight = view.node_weight(&r.key);
|
2026-03-08 21:04:45 -04:00
|
|
|
|
println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
|
2026-03-09 01:25:42 -04:00
|
|
|
|
if full {
|
|
|
|
|
|
if let Some(content) = view.node_content(&r.key) {
|
|
|
|
|
|
println!();
|
|
|
|
|
|
for line in content.lines() {
|
|
|
|
|
|
println!(" {}", line);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-03-03 01:33:31 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_init() -> Result<(), String> {
|
2026-03-05 16:17:49 -05:00
|
|
|
|
let cfg = config::get();
|
|
|
|
|
|
|
|
|
|
|
|
// Ensure data directory exists
|
|
|
|
|
|
std::fs::create_dir_all(&cfg.data_dir)
|
|
|
|
|
|
.map_err(|e| format!("create data_dir: {}", e))?;
|
|
|
|
|
|
|
2026-03-05 16:42:10 -05:00
|
|
|
|
// Install filesystem files (not store nodes)
|
2026-03-05 16:21:13 -05:00
|
|
|
|
install_default_file(&cfg.data_dir, "instructions.md",
|
|
|
|
|
|
include_str!("../defaults/instructions.md"))?;
|
2026-03-05 16:42:10 -05:00
|
|
|
|
install_default_file(&cfg.data_dir, "on-consciousness.md",
|
|
|
|
|
|
include_str!("../defaults/on-consciousness.md"))?;
|
2026-03-05 16:21:13 -05:00
|
|
|
|
|
2026-03-05 16:27:19 -05:00
|
|
|
|
// Initialize store and seed default identity node if empty
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let count = store.init_from_markdown()?;
|
2026-03-08 20:25:09 -04:00
|
|
|
|
for key in &cfg.core_nodes {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
if !store.nodes.contains_key(key) && key == "identity" {
|
|
|
|
|
|
let default = include_str!("../defaults/identity.md");
|
|
|
|
|
|
store.upsert(key, default)
|
|
|
|
|
|
.map_err(|e| format!("seed {}: {}", key, e))?;
|
|
|
|
|
|
println!("Seeded {} in store", key);
|
2026-03-08 20:25:09 -04:00
|
|
|
|
}
|
2026-03-05 16:27:19 -05:00
|
|
|
|
}
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Indexed {} memory units", count);
|
2026-03-05 16:17:49 -05:00
|
|
|
|
|
|
|
|
|
|
// Install hooks
|
|
|
|
|
|
daemon::install_hook()?;
|
|
|
|
|
|
|
2026-03-05 16:21:13 -05:00
|
|
|
|
// Create config if none exists
|
2026-03-05 16:17:49 -05:00
|
|
|
|
let config_path = std::env::var("POC_MEMORY_CONFIG")
|
|
|
|
|
|
.map(std::path::PathBuf::from)
|
|
|
|
|
|
.unwrap_or_else(|_| {
|
|
|
|
|
|
std::path::PathBuf::from(std::env::var("HOME").unwrap())
|
|
|
|
|
|
.join(".config/poc-memory/config.jsonl")
|
|
|
|
|
|
});
|
|
|
|
|
|
if !config_path.exists() {
|
|
|
|
|
|
let config_dir = config_path.parent().unwrap();
|
|
|
|
|
|
std::fs::create_dir_all(config_dir)
|
|
|
|
|
|
.map_err(|e| format!("create config dir: {}", e))?;
|
|
|
|
|
|
let example = include_str!("../config.example.jsonl");
|
|
|
|
|
|
std::fs::write(&config_path, example)
|
|
|
|
|
|
.map_err(|e| format!("write config: {}", e))?;
|
|
|
|
|
|
println!("Created config at {} — edit with your name and context groups",
|
|
|
|
|
|
config_path.display());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Done. Run `poc-memory load-context --stats` to verify.");
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-05 16:21:13 -05:00
|
|
|
|
fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -> Result<(), String> {
|
|
|
|
|
|
let path = data_dir.join(name);
|
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
|
std::fs::write(&path, content)
|
|
|
|
|
|
.map_err(|e| format!("write {}: {}", name, e))?;
|
|
|
|
|
|
println!("Created {}", path.display());
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
fn cmd_migrate() -> Result<(), String> {
|
|
|
|
|
|
migrate::migrate()
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 18:31:19 -04:00
|
|
|
|
fn cmd_fsck() -> Result<(), String> {
|
2026-03-08 20:07:07 -04:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
|
|
|
|
|
|
// Check node-key consistency
|
|
|
|
|
|
let mut issues = 0;
|
|
|
|
|
|
for (key, node) in &store.nodes {
|
|
|
|
|
|
if key != &node.key {
|
|
|
|
|
|
eprintln!("MISMATCH: map key '{}' vs node.key '{}'", key, node.key);
|
|
|
|
|
|
issues += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Check edge endpoints
|
|
|
|
|
|
let mut dangling = 0;
|
|
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
|
if rel.deleted { continue; }
|
|
|
|
|
|
if !store.nodes.contains_key(&rel.source_key) {
|
|
|
|
|
|
eprintln!("DANGLING: edge source '{}'", rel.source_key);
|
|
|
|
|
|
dangling += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
if !store.nodes.contains_key(&rel.target_key) {
|
|
|
|
|
|
eprintln!("DANGLING: edge target '{}'", rel.target_key);
|
|
|
|
|
|
dangling += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Prune orphan edges
|
2026-03-08 20:07:07 -04:00
|
|
|
|
let mut to_tombstone = Vec::new();
|
2026-03-08 19:41:26 -04:00
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
|
if rel.deleted { continue; }
|
|
|
|
|
|
if !store.nodes.contains_key(&rel.source_key)
|
|
|
|
|
|
|| !store.nodes.contains_key(&rel.target_key) {
|
2026-03-08 20:07:07 -04:00
|
|
|
|
let mut tombstone = rel.clone();
|
|
|
|
|
|
tombstone.deleted = true;
|
|
|
|
|
|
tombstone.version += 1;
|
|
|
|
|
|
to_tombstone.push(tombstone);
|
2026-03-08 19:41:26 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-03-08 20:07:07 -04:00
|
|
|
|
if !to_tombstone.is_empty() {
|
|
|
|
|
|
let count = to_tombstone.len();
|
|
|
|
|
|
store.append_relations(&to_tombstone)?;
|
|
|
|
|
|
for t in &to_tombstone {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
if let Some(r) = store.relations.iter_mut().find(|r| r.uuid == t.uuid) {
|
2026-03-08 20:07:07 -04:00
|
|
|
|
r.deleted = true;
|
|
|
|
|
|
r.version = t.version;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
store.save()?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
eprintln!("Pruned {} orphan edges", count);
|
2026-03-08 19:41:26 -04:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("fsck: {} nodes, {} edges, {} issues, {} dangling",
|
|
|
|
|
|
store.nodes.len(), g.edge_count(), issues, dangling);
|
2026-03-08 19:41:26 -04:00
|
|
|
|
Ok(())
|
2026-03-08 18:31:19 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
fn cmd_health() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let g = store.build_graph();
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let report = graph::health_report(&g, &store);
|
|
|
|
|
|
print!("{}", report);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_status() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let g = store.build_graph();
|
2026-03-08 21:04:45 -04:00
|
|
|
|
|
|
|
|
|
|
let mut type_counts = std::collections::HashMap::new();
|
|
|
|
|
|
for node in store.nodes.values() {
|
|
|
|
|
|
*type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
let mut types: Vec<_> = type_counts.iter().collect();
|
|
|
|
|
|
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
|
|
|
|
|
|
|
|
|
|
|
|
println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len());
|
|
|
|
|
|
print!("Types:");
|
|
|
|
|
|
for (t, c) in &types {
|
|
|
|
|
|
let label = match t.as_str() {
|
|
|
|
|
|
"Semantic" => "semantic",
|
|
|
|
|
|
"EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly"
|
|
|
|
|
|
=> "episodic",
|
|
|
|
|
|
_ => t,
|
|
|
|
|
|
};
|
|
|
|
|
|
print!(" {}={}", label, c);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!("Graph edges: {} Communities: {}",
|
|
|
|
|
|
g.edge_count(), g.community_count());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_graph() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let g = store.build_graph();
|
2026-03-08 21:04:45 -04:00
|
|
|
|
println!("Graph: {} nodes, {} edges, {} communities",
|
|
|
|
|
|
g.nodes().len(), g.edge_count(), g.community_count());
|
|
|
|
|
|
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
|
|
|
|
|
|
g.small_world_sigma(), g.degree_power_law_exponent(),
|
|
|
|
|
|
g.degree_gini(), g.avg_clustering_coefficient());
|
|
|
|
|
|
Ok(())
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_used(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("used requires a key".into());
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let key = key.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
store.mark_used(&resolved);
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Marked '{}' as used", resolved);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_wrong(key: &str, context: &[String]) -> Result<(), String> {
|
|
|
|
|
|
let ctx = if context.is_empty() { None } else { Some(context.join(" ")) };
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let resolved = store.resolve_key(key)?;
|
|
|
|
|
|
store.mark_wrong(&resolved, ctx.as_deref());
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Marked '{}' as wrong", resolved);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_gap(description: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if description.is_empty() {
|
|
|
|
|
|
return Err("gap requires a description".into());
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let desc = description.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
store.record_gap(&desc);
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Recorded gap: {}", desc);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-01 08:18:07 -05:00
|
|
|
|
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
|
|
|
|
|
|
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
|
|
|
|
|
|
orphans, links, min_deg, links_per, sim_thresh);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-01 08:18:07 -05:00
|
|
|
|
let (hubs, pruned) = store.cap_degree(max_deg)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option<String>) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
|
|
|
|
|
if let Some(agent_name) = agent {
|
move LLM-dependent modules into agents/ subdir
Separate the agent layer (everything that calls external LLMs or
orchestrates sequences of such calls) from core graph infrastructure.
agents/: llm, prompts, audit, consolidate, knowledge, enrich,
fact_mine, digest, daemon
Root: store/, graph, spectral, search, similarity, lookups, query,
config, util, migrate, neuro/ (scoring + rewrite)
Re-exports at crate root preserve backwards compatibility so
`crate::llm`, `crate::digest` etc. continue to work.
2026-03-08 21:27:41 -04:00
|
|
|
|
let prompt = agents::prompts::agent_prompt(&store, &agent_name, count)?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!("{}", prompt);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
} else {
|
move LLM-dependent modules into agents/ subdir
Separate the agent layer (everything that calls external LLMs or
orchestrates sequences of such calls) from core graph infrastructure.
agents/: llm, prompts, audit, consolidate, knowledge, enrich,
fact_mine, digest, daemon
Root: store/, graph, spectral, search, similarity, lookups, query,
config, util, migrate, neuro/ (scoring + rewrite)
Re-exports at crate root preserve backwards compatibility so
`crate::llm`, `crate::digest` etc. continue to work.
2026-03-08 21:27:41 -04:00
|
|
|
|
agents::prompts::consolidation_batch(&store, count, auto)
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_log() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
for event in store.retrieval_log.iter().rev().take(20) {
|
|
|
|
|
|
println!("[{}] q=\"{}\" → {} results",
|
|
|
|
|
|
event.timestamp, event.query, event.results.len());
|
|
|
|
|
|
for r in &event.results {
|
|
|
|
|
|
println!(" {}", r);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_params() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!("decay_factor: {}", store.params.decay_factor);
|
|
|
|
|
|
println!("use_boost: {}", store.params.use_boost);
|
|
|
|
|
|
println!("prune_threshold: {}", store.params.prune_threshold);
|
|
|
|
|
|
println!("edge_decay: {}", store.params.edge_decay);
|
|
|
|
|
|
println!("max_hops: {}", store.params.max_hops);
|
|
|
|
|
|
println!("min_activation: {}", store.params.min_activation);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_link(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("link requires a key".into());
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let key = key.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("Neighbors of '{}':", resolved);
|
2026-03-03 12:07:04 -05:00
|
|
|
|
query::run_query(&store, &g,
|
|
|
|
|
|
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_replay_queue(count: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let queue = neuro::replay_queue(&store, count);
|
|
|
|
|
|
println!("Replay queue ({} items):", queue.len());
|
|
|
|
|
|
for (i, item) in queue.iter().enumerate() {
|
2026-03-03 01:33:31 -05:00
|
|
|
|
println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})",
|
|
|
|
|
|
i + 1, item.priority, item.classification, item.key,
|
|
|
|
|
|
item.interval_days, item.emotion, item.outlier_score);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_consolidate_session() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let plan = neuro::consolidation_plan(&store);
|
|
|
|
|
|
println!("{}", neuro::format_plan(&plan));
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-01 07:14:03 -05:00
|
|
|
|
fn cmd_consolidate_full() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
consolidate::consolidate_full(&mut store)
|
2026-03-01 07:14:03 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
|
2026-03-01 07:35:29 -05:00
|
|
|
|
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
|
|
|
|
|
|
min_degree, sim_threshold, max_per_hub);
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-01 07:35:29 -05:00
|
|
|
|
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
|
|
|
|
|
|
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
fn cmd_daily_check() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let report = neuro::daily_check(&store);
|
|
|
|
|
|
print!("{}", report);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let reason = reason.join(" ");
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
2026-03-01 00:33:46 -05:00
|
|
|
|
// Refine target to best-matching section
|
|
|
|
|
|
let source_content = store.nodes.get(&source)
|
|
|
|
|
|
.map(|n| n.content.as_str()).unwrap_or("");
|
|
|
|
|
|
let target = neuro::refine_target(&store, source_content, &target);
|
|
|
|
|
|
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
// Find UUIDs
|
|
|
|
|
|
let source_uuid = store.nodes.get(&source)
|
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
|
.ok_or_else(|| format!("source not found: {}", source))?;
|
|
|
|
|
|
let target_uuid = store.nodes.get(&target)
|
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
|
.ok_or_else(|| format!("target not found: {}", target))?;
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
// Check for existing link
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let exists = store.relations.iter().any(|r|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
!r.deleted &&
|
|
|
|
|
|
((r.source_key == source && r.target_key == target) ||
|
|
|
|
|
|
(r.source_key == target && r.target_key == source)));
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
if exists {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
println!("Link already exists: {} ↔ {}", source, target);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let rel = store::new_relation(
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
source_uuid, target_uuid,
|
2026-03-08 21:04:45 -04:00
|
|
|
|
store::RelationType::Link, 0.8,
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
&source, &target,
|
|
|
|
|
|
);
|
|
|
|
|
|
store.add_relation(rel)?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Linked: {} → {} ({})", source, target, reason);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let impact = g.link_impact(&source, &target);
|
|
|
|
|
|
|
|
|
|
|
|
println!("Link impact: {} → {}", source, target);
|
|
|
|
|
|
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
|
|
|
|
|
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
|
|
|
|
|
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
|
|
|
|
|
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
|
|
|
|
|
println!(" Assessment: {}", impact.assessment);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:16:52 -04:00
|
|
|
|
/// Apply links from a single agent result JSON file.
|
|
|
|
|
|
/// Returns (links_applied, errors).
|
|
|
|
|
|
fn apply_agent_file(
|
|
|
|
|
|
store: &mut store::Store,
|
|
|
|
|
|
data: &serde_json::Value,
|
|
|
|
|
|
) -> (usize, usize) {
|
|
|
|
|
|
let agent_result = data.get("agent_result").or(Some(data));
|
|
|
|
|
|
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
|
|
|
|
|
Some(l) => l,
|
|
|
|
|
|
None => return (0, 0),
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let entry_text = data.get("entry_text")
|
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
|
.unwrap_or("");
|
|
|
|
|
|
|
|
|
|
|
|
if let (Some(start), Some(end)) = (
|
|
|
|
|
|
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
|
|
|
|
|
|
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
|
|
|
|
|
|
) {
|
|
|
|
|
|
println!(" Source: L{}-L{}", start, end);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let mut applied = 0;
|
|
|
|
|
|
let mut errors = 0;
|
|
|
|
|
|
|
|
|
|
|
|
for link in links {
|
|
|
|
|
|
let target = match link.get("target").and_then(|v| v.as_str()) {
|
|
|
|
|
|
Some(t) => t,
|
|
|
|
|
|
None => continue,
|
|
|
|
|
|
};
|
|
|
|
|
|
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
|
|
|
|
|
|
|
|
|
|
|
if let Some(note) = target.strip_prefix("NOTE:") {
|
|
|
|
|
|
println!(" NOTE: {} — {}", note, reason);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let resolved = match store.resolve_key(target) {
|
|
|
|
|
|
Ok(r) => r,
|
|
|
|
|
|
Err(_) => {
|
|
|
|
|
|
println!(" SKIP {} (not found in graph)", target);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let source_key = match store.find_journal_node(entry_text) {
|
|
|
|
|
|
Some(k) => k,
|
|
|
|
|
|
None => {
|
|
|
|
|
|
println!(" SKIP {} (no matching journal node)", target);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let source_uuid = match store.nodes.get(&source_key) {
|
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
|
None => continue,
|
|
|
|
|
|
};
|
|
|
|
|
|
let target_uuid = match store.nodes.get(&resolved) {
|
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
|
None => continue,
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let rel = store::new_relation(
|
|
|
|
|
|
source_uuid, target_uuid,
|
|
|
|
|
|
store::RelationType::Link,
|
|
|
|
|
|
0.5,
|
|
|
|
|
|
&source_key, &resolved,
|
|
|
|
|
|
);
|
|
|
|
|
|
if let Err(e) = store.add_relation(rel) {
|
|
|
|
|
|
eprintln!(" Error adding relation: {}", e);
|
|
|
|
|
|
errors += 1;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
|
|
|
|
|
applied += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
(applied, errors)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
|
2026-03-08 21:16:52 -04:00
|
|
|
|
let results_dir = store::memory_dir().join("agent-results");
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
|
|
|
|
|
if !results_dir.exists() {
|
|
|
|
|
|
println!("No agent results directory");
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let mut applied = 0;
|
|
|
|
|
|
let mut errors = 0;
|
|
|
|
|
|
|
|
|
|
|
|
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
|
|
|
|
|
.map_err(|e| format!("read results dir: {}", e))?
|
|
|
|
|
|
.filter_map(|e| e.ok())
|
|
|
|
|
|
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
files.sort_by_key(|e| e.path());
|
|
|
|
|
|
|
|
|
|
|
|
for entry in &files {
|
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
|
let content = match std::fs::read_to_string(&path) {
|
|
|
|
|
|
Ok(c) => c,
|
|
|
|
|
|
Err(e) => {
|
|
|
|
|
|
eprintln!(" Skip {}: {}", path.display(), e);
|
|
|
|
|
|
errors += 1;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let data: serde_json::Value = match serde_json::from_str(&content) {
|
|
|
|
|
|
Ok(d) => d,
|
|
|
|
|
|
Err(e) => {
|
|
|
|
|
|
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
|
|
|
|
|
errors += 1;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
2026-03-08 21:16:52 -04:00
|
|
|
|
let (a, e) = apply_agent_file(&mut store, &data);
|
|
|
|
|
|
applied += a;
|
|
|
|
|
|
errors += e;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
|
|
|
|
|
if !process_all {
|
2026-03-03 17:23:43 -05:00
|
|
|
|
let done_dir = util::memory_subdir("agent-results/done")?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let dest = done_dir.join(path.file_name().unwrap());
|
|
|
|
|
|
std::fs::rename(&path, &dest).ok();
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if applied > 0 {
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nApplied {} links ({} errors, {} files processed)",
|
|
|
|
|
|
applied, errors, files.len());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
match level {
|
|
|
|
|
|
DigestLevel::Auto => digest::digest_auto(&mut store),
|
|
|
|
|
|
DigestLevel::Daily { date } => {
|
|
|
|
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
|
|
|
|
digest::generate(&mut store, "daily", &arg)
|
|
|
|
|
|
}
|
|
|
|
|
|
DigestLevel::Weekly { date } => {
|
|
|
|
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
|
|
|
|
digest::generate(&mut store, "weekly", &arg)
|
|
|
|
|
|
}
|
|
|
|
|
|
DigestLevel::Monthly { date } => {
|
|
|
|
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
|
|
|
|
digest::generate(&mut store, "monthly", &arg)
|
2026-02-28 23:58:05 -05:00
|
|
|
|
}
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
|
2026-03-05 15:30:57 -05:00
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let links = digest::parse_all_digest_links(&store);
|
|
|
|
|
|
drop(store);
|
|
|
|
|
|
println!("Found {} unique links from digest nodes", links.len());
|
2026-03-01 00:10:03 -05:00
|
|
|
|
|
|
|
|
|
|
if !do_apply {
|
|
|
|
|
|
for (i, link) in links.iter().enumerate() {
|
|
|
|
|
|
println!(" {:3}. {} → {}", i + 1, link.source, link.target);
|
|
|
|
|
|
if !link.reason.is_empty() {
|
|
|
|
|
|
println!(" ({})", &link.reason[..link.reason.len().min(80)]);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
println!("\nTo apply: poc-memory digest-links --apply");
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-01 00:10:03 -05:00
|
|
|
|
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
|
|
|
|
|
|
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_journal_enrich(jsonl_path: &str, entry_text: &str, grep_line: usize) -> Result<(), String> {
|
|
|
|
|
|
if !std::path::Path::new(jsonl_path).is_file() {
|
2026-03-01 00:10:03 -05:00
|
|
|
|
return Err(format!("JSONL not found: {}", jsonl_path));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
enrich::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
|
2026-03-01 00:10:03 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_experience_mine(jsonl_path: Option<String>) -> Result<(), String> {
|
|
|
|
|
|
let jsonl_path = match jsonl_path {
|
|
|
|
|
|
Some(p) => p,
|
|
|
|
|
|
None => find_current_transcript()
|
|
|
|
|
|
.ok_or("no JSONL transcripts found")?,
|
2026-03-01 01:47:31 -05:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
|
|
|
|
|
|
return Err(format!("JSONL not found: {}", jsonl_path));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-07 12:01:38 -05:00
|
|
|
|
let count = enrich::experience_mine(&mut store, &jsonl_path, None)?;
|
2026-03-01 01:47:31 -05:00
|
|
|
|
println!("Done: {} new entries mined.", count);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_apply_consolidation(do_apply: bool, report_file: Option<&str>) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
consolidate::apply_consolidation(&mut store, do_apply, report_file)
|
2026-03-01 00:10:03 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-01 00:33:46 -05:00
|
|
|
|
|
|
|
|
|
|
if let Some(key) = key_arg {
|
|
|
|
|
|
let resolved = store.resolve_key(key)?;
|
|
|
|
|
|
let moves = neuro::differentiate_hub(&store, &resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Group by target section for display
|
|
|
|
|
|
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
|
|
|
|
|
|
std::collections::BTreeMap::new();
|
|
|
|
|
|
for mv in &moves {
|
|
|
|
|
|
by_section.entry(mv.to_section.clone()).or_default().push(mv);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Hub '{}' — {} links to redistribute across {} sections\n",
|
|
|
|
|
|
resolved, moves.len(), by_section.len());
|
|
|
|
|
|
|
|
|
|
|
|
for (section, section_moves) in &by_section {
|
|
|
|
|
|
println!(" {} ({} links):", section, section_moves.len());
|
|
|
|
|
|
for mv in section_moves.iter().take(5) {
|
|
|
|
|
|
println!(" [{:.3}] {} — {}", mv.similarity,
|
|
|
|
|
|
mv.neighbor_key, mv.neighbor_snippet);
|
|
|
|
|
|
}
|
|
|
|
|
|
if section_moves.len() > 5 {
|
|
|
|
|
|
println!(" ... and {} more", section_moves.len() - 5);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !do_apply {
|
|
|
|
|
|
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("\nApplied: {} Skipped: {}", applied, skipped);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
let hubs = neuro::find_differentiable_hubs(&store);
|
|
|
|
|
|
if hubs.is_empty() {
|
|
|
|
|
|
println!("No file-level hubs with sections found above threshold");
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Differentiable hubs (file-level nodes with sections):\n");
|
|
|
|
|
|
for (key, degree, sections) in &hubs {
|
|
|
|
|
|
println!(" {:40} deg={:3} sections={}", key, degree, sections);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
let stats = audit::link_audit(&mut store, apply)?;
|
2026-03-01 00:48:44 -05:00
|
|
|
|
println!("\n{}", "=".repeat(60));
|
|
|
|
|
|
println!("Link audit complete:");
|
|
|
|
|
|
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
|
|
|
|
|
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
|
|
|
|
|
println!("{}", "=".repeat(60));
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_trace(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("trace requires a key".into());
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let key = key.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Display the node itself
|
|
|
|
|
|
println!("=== {} ===", resolved);
|
2026-03-08 20:33:03 -04:00
|
|
|
|
println!("Type: {:?} Weight: {:.2}",
|
|
|
|
|
|
node.node_type, node.weight);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
if !node.source_ref.is_empty() {
|
|
|
|
|
|
println!("Source: {}", node.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Show content preview
|
2026-03-08 21:13:02 -04:00
|
|
|
|
let preview = util::truncate(&node.content, 200, "...");
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!("\n{}\n", preview);
|
|
|
|
|
|
|
|
|
|
|
|
// Walk neighbors, grouped by node type
|
|
|
|
|
|
let neighbors = g.neighbors(&resolved);
|
|
|
|
|
|
let mut episodic_session = Vec::new();
|
|
|
|
|
|
let mut episodic_daily = Vec::new();
|
|
|
|
|
|
let mut episodic_weekly = Vec::new();
|
|
|
|
|
|
let mut semantic = Vec::new();
|
|
|
|
|
|
|
|
|
|
|
|
for (n, strength) in &neighbors {
|
|
|
|
|
|
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
2026-02-28 23:44:44 -05:00
|
|
|
|
let entry = (n.as_str(), *strength, nnode);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
match nnode.node_type {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
store::NodeType::EpisodicSession =>
|
2026-02-28 23:44:44 -05:00
|
|
|
|
episodic_session.push(entry),
|
2026-03-03 12:56:15 -05:00
|
|
|
|
store::NodeType::EpisodicDaily =>
|
2026-02-28 23:44:44 -05:00
|
|
|
|
episodic_daily.push(entry),
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
store::NodeType::EpisodicWeekly
|
|
|
|
|
|
| store::NodeType::EpisodicMonthly =>
|
2026-02-28 23:44:44 -05:00
|
|
|
|
episodic_weekly.push(entry),
|
2026-03-03 12:56:15 -05:00
|
|
|
|
store::NodeType::Semantic =>
|
2026-02-28 23:44:44 -05:00
|
|
|
|
semantic.push(entry),
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_weekly.is_empty() {
|
|
|
|
|
|
println!("Weekly digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_weekly {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_daily.is_empty() {
|
|
|
|
|
|
println!("Daily digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_daily {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_session.is_empty() {
|
|
|
|
|
|
println!("Session entries:");
|
|
|
|
|
|
for (k, s, n) in &episodic_session {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
let preview = util::first_n_chars(
|
|
|
|
|
|
n.content.lines()
|
|
|
|
|
|
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
|
|
|
|
|
.unwrap_or(""),
|
|
|
|
|
|
80);
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
if !n.source_ref.is_empty() {
|
|
|
|
|
|
println!(" ↳ source: {}", n.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!(" {}", preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !semantic.is_empty() {
|
|
|
|
|
|
println!("Semantic links:");
|
|
|
|
|
|
for (k, s, _) in &semantic {
|
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
|
|
|
|
|
episodic_session.len(), episodic_daily.len(),
|
|
|
|
|
|
episodic_weekly.len(), semantic.len());
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_spectral(k: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let g = graph::build_graph(&store);
|
|
|
|
|
|
let result = spectral::decompose(&g, k);
|
|
|
|
|
|
spectral::print_summary(&result, &g);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_spectral_save(k: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let g = graph::build_graph(&store);
|
|
|
|
|
|
let result = spectral::decompose(&g, k);
|
|
|
|
|
|
let emb = spectral::to_embedding(&result);
|
|
|
|
|
|
spectral::save_embedding(&emb)?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> {
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let dims = spectral::dominant_dimensions(&emb, &[key]);
|
2026-03-03 01:33:31 -05:00
|
|
|
|
println!("Node: {} (embedding: {} dims)", key, emb.dims);
|
|
|
|
|
|
println!("Top spectral axes:");
|
|
|
|
|
|
for &(d, loading) in dims.iter().take(5) {
|
|
|
|
|
|
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nNearest neighbors in spectral space:");
|
|
|
|
|
|
let neighbors = spectral::nearest_neighbors(&emb, key, n);
|
|
|
|
|
|
for (i, (k, dist)) in neighbors.iter().enumerate() {
|
|
|
|
|
|
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_spectral_positions(n: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let communities = g.communities().clone();
|
|
|
|
|
|
|
|
|
|
|
|
let positions = spectral::analyze_positions(&emb, &communities);
|
|
|
|
|
|
|
|
|
|
|
|
println!("Spectral position analysis — {} nodes", positions.len());
|
|
|
|
|
|
println!(" outlier: dist_to_center / median (>1 = unusual position)");
|
|
|
|
|
|
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
|
|
|
|
|
|
println!();
|
|
|
|
|
|
|
|
|
|
|
|
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
|
|
|
|
|
|
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
|
|
|
|
|
|
|
|
|
|
|
|
for pos in positions.iter().take(n) {
|
|
|
|
|
|
match spectral::classify_position(pos) {
|
|
|
|
|
|
"bridge" => bridges.push(pos),
|
2026-03-08 21:04:45 -04:00
|
|
|
|
_ => outliers.push(pos),
|
2026-03-03 01:33:31 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !bridges.is_empty() {
|
|
|
|
|
|
println!("=== Bridges (between communities) ===");
|
|
|
|
|
|
for pos in &bridges {
|
|
|
|
|
|
println!(" [{:.2}/{:.2}] c{} → c{} {}",
|
|
|
|
|
|
pos.outlier_score, pos.bridge_score,
|
|
|
|
|
|
pos.community, pos.nearest_community, pos.key);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("=== Top outliers (far from own community center) ===");
|
|
|
|
|
|
for pos in positions.iter().take(n) {
|
|
|
|
|
|
let class = spectral::classify_position(pos);
|
|
|
|
|
|
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
|
|
|
|
|
|
class, pos.outlier_score, pos.bridge_score,
|
|
|
|
|
|
pos.community, pos.key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_spectral_suggest(n: usize) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-03 01:33:31 -05:00
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let communities = g.communities();
|
|
|
|
|
|
|
|
|
|
|
|
let min_degree = 3;
|
|
|
|
|
|
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
|
|
|
|
|
|
.filter(|k| g.degree(k) >= min_degree)
|
|
|
|
|
|
.map(|k| k.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let filtered_emb = spectral::SpectralEmbedding {
|
|
|
|
|
|
dims: emb.dims,
|
|
|
|
|
|
eigenvalues: emb.eigenvalues.clone(),
|
|
|
|
|
|
coords: emb.coords.iter()
|
|
|
|
|
|
.filter(|(k, _)| well_connected.contains(k.as_str()))
|
|
|
|
|
|
.map(|(k, v)| (k.clone(), v.clone()))
|
|
|
|
|
|
.collect(),
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let mut linked: std::collections::HashSet<(String, String)> =
|
|
|
|
|
|
std::collections::HashSet::new();
|
|
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
|
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
|
|
|
|
|
|
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
eprintln!("Searching {} well-connected nodes (degree >= {})...",
|
|
|
|
|
|
filtered_emb.coords.len(), min_degree);
|
|
|
|
|
|
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
|
|
|
|
|
|
|
|
|
|
|
|
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
|
|
|
|
|
|
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
|
|
|
|
|
|
let c1 = communities.get(k1)
|
|
|
|
|
|
.map(|c| format!("c{}", c))
|
|
|
|
|
|
.unwrap_or_else(|| "?".into());
|
|
|
|
|
|
let c2 = communities.get(k2)
|
|
|
|
|
|
.map(|c| format!("c{}", c))
|
|
|
|
|
|
.unwrap_or_else(|| "?".into());
|
|
|
|
|
|
let cross = if c1 != c2 { " [cross-community]" } else { "" };
|
|
|
|
|
|
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
|
|
|
|
|
|
i + 1, dist, k1, c1, k2, c2, cross);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_list_keys(pattern: Option<&str>) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-03-03 11:38:11 -05:00
|
|
|
|
let g = store.build_graph();
|
2026-03-08 21:04:45 -04:00
|
|
|
|
|
2026-03-05 10:23:57 -05:00
|
|
|
|
if let Some(pat) = pattern {
|
|
|
|
|
|
let pat_lower = pat.to_lowercase();
|
|
|
|
|
|
let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') {
|
|
|
|
|
|
(None, None, Some(pat_lower.trim_matches('*').to_string()))
|
|
|
|
|
|
} else if pat_lower.starts_with('*') {
|
|
|
|
|
|
(None, Some(pat_lower.trim_start_matches('*').to_string()), None)
|
|
|
|
|
|
} else if pat_lower.ends_with('*') {
|
|
|
|
|
|
(Some(pat_lower.trim_end_matches('*').to_string()), None, None)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
(None, None, Some(pat_lower.clone()))
|
|
|
|
|
|
};
|
|
|
|
|
|
let mut keys: Vec<_> = store.nodes.keys()
|
|
|
|
|
|
.filter(|k| {
|
|
|
|
|
|
let kl = k.to_lowercase();
|
|
|
|
|
|
if let Some(ref m) = middle { kl.contains(m.as_str()) }
|
|
|
|
|
|
else if let Some(ref p) = prefix { kl.starts_with(p.as_str()) }
|
|
|
|
|
|
else if let Some(ref s) = suffix { kl.ends_with(s.as_str()) }
|
|
|
|
|
|
else { true }
|
|
|
|
|
|
})
|
|
|
|
|
|
.cloned()
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
keys.sort();
|
|
|
|
|
|
for k in keys { println!("{}", k); }
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
} else {
|
|
|
|
|
|
query::run_query(&store, &g, "* | sort key asc")
|
|
|
|
|
|
}
|
2026-02-28 22:30:03 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_list_edges() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-02-28 22:30:03 -05:00
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
|
println!("{}\t{}\t{:.2}\t{:?}",
|
|
|
|
|
|
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_dump_json() -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-02-28 22:30:03 -05:00
|
|
|
|
let json = serde_json::to_string_pretty(&store)
|
|
|
|
|
|
.map_err(|e| format!("serialize: {}", e))?;
|
|
|
|
|
|
println!("{}", json);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_node_delete(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("node-delete requires a key".into());
|
2026-02-28 22:40:17 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let key = key.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-02-28 22:40:17 -05:00
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
2026-02-28 23:49:43 -05:00
|
|
|
|
store.delete_node(&resolved)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Deleted '{}'", resolved);
|
|
|
|
|
|
Ok(())
|
2026-02-28 22:40:17 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_node_rename(old_key: &str, new_key: &str) -> Result<(), String> {
|
2026-03-05 10:23:57 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let old_resolved = store.resolve_key(old_key)?;
|
|
|
|
|
|
store.rename_node(&old_resolved, new_key)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Renamed '{}' → '{}'", old_resolved, new_key);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn cmd_journal_ts_migrate() -> Result<(), String> {
|
|
|
|
|
|
use chrono::{NaiveDateTime, TimeZone, Local};
|
|
|
|
|
|
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
|
|
|
|
|
|
|
|
|
|
|
|
let valid_range = 978_307_200i64..=4_102_444_800i64;
|
|
|
|
|
|
|
|
|
|
|
|
let to_update: Vec<_> = store.nodes.values()
|
|
|
|
|
|
.filter(|n| !valid_range.contains(&n.created_at))
|
|
|
|
|
|
.map(|n| n.key.clone())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let mut updated = 0usize;
|
|
|
|
|
|
|
|
|
|
|
|
for key in &to_update {
|
|
|
|
|
|
if let Some(caps) = re.captures(key) {
|
|
|
|
|
|
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
|
|
|
|
|
|
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
|
|
|
|
|
|
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
|
|
|
|
|
|
if let Some(node) = store.nodes.get_mut(key) {
|
|
|
|
|
|
node.created_at = dt.timestamp();
|
|
|
|
|
|
node.version += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
updated += 1;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if let Some(node) = store.nodes.get_mut(key) {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
node.created_at = node.timestamp;
|
2026-03-05 10:23:57 -05:00
|
|
|
|
node.version += 1;
|
|
|
|
|
|
updated += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let nodes_to_write: Vec<_> = to_update.iter()
|
|
|
|
|
|
.filter_map(|k| store.nodes.get(k))
|
|
|
|
|
|
.filter(|n| valid_range.contains(&n.created_at))
|
|
|
|
|
|
.cloned()
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
if !nodes_to_write.is_empty() {
|
|
|
|
|
|
store.append_nodes(&nodes_to_write)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-05 16:10:46 -05:00
|
|
|
|
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
|
|
|
|
|
|
match group.source {
|
|
|
|
|
|
config::ContextSource::Journal => {
|
|
|
|
|
|
let mut entries = Vec::new();
|
|
|
|
|
|
let now = store::now_epoch();
|
|
|
|
|
|
let window: i64 = cfg.journal_days as i64 * 24 * 3600;
|
|
|
|
|
|
let cutoff = now - window;
|
|
|
|
|
|
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
|
|
|
|
|
|
|
|
|
|
|
|
let journal_ts = |n: &store::Node| -> i64 {
|
|
|
|
|
|
if n.created_at > 0 { return n.created_at; }
|
|
|
|
|
|
if let Some(caps) = key_date_re.captures(&n.key) {
|
|
|
|
|
|
use chrono::{NaiveDate, TimeZone, Local};
|
|
|
|
|
|
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
|
|
|
|
|
|
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
|
|
|
|
|
|
return dt.timestamp();
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-03-05 10:23:57 -05:00
|
|
|
|
}
|
2026-03-05 16:10:46 -05:00
|
|
|
|
n.timestamp
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let mut journal_nodes: Vec<_> = store.nodes.values()
|
|
|
|
|
|
.filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff)
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
journal_nodes.sort_by_key(|n| journal_ts(n));
|
|
|
|
|
|
|
|
|
|
|
|
let max = cfg.journal_max;
|
|
|
|
|
|
let skip = journal_nodes.len().saturating_sub(max);
|
|
|
|
|
|
for node in journal_nodes.iter().skip(skip) {
|
|
|
|
|
|
entries.push((node.key.clone(), node.content.clone()));
|
2026-03-05 10:23:57 -05:00
|
|
|
|
}
|
2026-03-05 16:10:46 -05:00
|
|
|
|
entries
|
2026-03-05 10:23:57 -05:00
|
|
|
|
}
|
2026-03-05 16:10:46 -05:00
|
|
|
|
config::ContextSource::File => {
|
|
|
|
|
|
group.keys.iter().filter_map(|key| {
|
|
|
|
|
|
let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?;
|
|
|
|
|
|
if content.trim().is_empty() { return None; }
|
|
|
|
|
|
Some((key.clone(), content.trim().to_string()))
|
|
|
|
|
|
}).collect()
|
|
|
|
|
|
}
|
|
|
|
|
|
config::ContextSource::Store => {
|
|
|
|
|
|
group.keys.iter().filter_map(|key| {
|
|
|
|
|
|
let content = store.render_file(key)?;
|
|
|
|
|
|
if content.trim().is_empty() { return None; }
|
|
|
|
|
|
Some((key.clone(), content.trim().to_string()))
|
|
|
|
|
|
}).collect()
|
add load-context and render commands
load-context replaces the shell hook's file-by-file cat approach.
Queries the capnp store directly for all session-start context:
orientation, identity, reflections, interests, inner life, people,
active context, shared reference, technical, and recent journal.
Sections are gathered per-file and output in priority order.
Journal entries filtered to last 7 days by key-embedded date,
capped at 20 most recent.
render outputs a single node's content to stdout.
The load-memory.sh hook now delegates entirely to
`poc-memory load-context` — capnp store is the single source
of truth for session startup context.
2026-02-28 22:53:39 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-03-05 16:08:15 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_load_context(stats: bool) -> Result<(), String> {
|
2026-03-05 16:08:15 -05:00
|
|
|
|
let cfg = config::get();
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
|
2026-03-05 16:10:46 -05:00
|
|
|
|
if stats {
|
|
|
|
|
|
let mut total_words = 0;
|
|
|
|
|
|
let mut total_entries = 0;
|
|
|
|
|
|
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
|
|
|
|
|
|
println!("{}", "-".repeat(42));
|
|
|
|
|
|
|
|
|
|
|
|
for group in &cfg.context_groups {
|
|
|
|
|
|
let entries = get_group_content(group, &store, cfg);
|
|
|
|
|
|
let words: usize = entries.iter()
|
|
|
|
|
|
.map(|(_, c)| c.split_whitespace().count())
|
|
|
|
|
|
.sum();
|
|
|
|
|
|
let count = entries.len();
|
|
|
|
|
|
println!("{:<25} {:>6} {:>8}", group.label, count, words);
|
|
|
|
|
|
total_words += words;
|
|
|
|
|
|
total_entries += count;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("{}", "-".repeat(42));
|
|
|
|
|
|
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-05 16:21:13 -05:00
|
|
|
|
println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name);
|
2026-03-05 16:08:15 -05:00
|
|
|
|
println!();
|
|
|
|
|
|
|
|
|
|
|
|
for group in &cfg.context_groups {
|
2026-03-05 16:10:46 -05:00
|
|
|
|
let entries = get_group_content(group, &store, cfg);
|
|
|
|
|
|
if !entries.is_empty() && group.source == config::ContextSource::Journal {
|
|
|
|
|
|
println!("--- recent journal entries ({}/{}) ---",
|
|
|
|
|
|
entries.len(), cfg.journal_max);
|
|
|
|
|
|
}
|
|
|
|
|
|
for (key, content) in entries {
|
|
|
|
|
|
if group.source == config::ContextSource::Journal {
|
|
|
|
|
|
println!("## {}", key);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("--- {} ({}) ---", key, group.label);
|
2026-03-05 16:08:15 -05:00
|
|
|
|
}
|
2026-03-05 16:10:46 -05:00
|
|
|
|
println!("{}\n", content);
|
2026-03-05 16:08:15 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
add load-context and render commands
load-context replaces the shell hook's file-by-file cat approach.
Queries the capnp store directly for all session-start context:
orientation, identity, reflections, interests, inner life, people,
active context, shared reference, technical, and recent journal.
Sections are gathered per-file and output in priority order.
Journal entries filtered to last 7 days by key-embedded date,
capped at 20 most recent.
render outputs a single node's content to stdout.
The load-memory.sh hook now delegates entirely to
`poc-memory load-context` — capnp store is the single source
of truth for session startup context.
2026-02-28 22:53:39 -05:00
|
|
|
|
|
|
|
|
|
|
println!("=== END MEMORY LOAD ===");
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_render(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("render requires a key".into());
|
add load-context and render commands
load-context replaces the shell hook's file-by-file cat approach.
Queries the capnp store directly for all session-start context:
orientation, identity, reflections, interests, inner life, people,
active context, shared reference, technical, and recent journal.
Sections are gathered per-file and output in priority order.
Journal entries filtered to last 7 days by key-embedded date,
capped at 20 most recent.
render outputs a single node's content to stdout.
The load-memory.sh hook now delegates entirely to
`poc-memory load-context` — capnp store is the single source
of truth for session startup context.
2026-02-28 22:53:39 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let key = key.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
add load-context and render commands
load-context replaces the shell hook's file-by-file cat approach.
Queries the capnp store directly for all session-start context:
orientation, identity, reflections, interests, inner life, people,
active context, shared reference, technical, and recent journal.
Sections are gathered per-file and output in priority order.
Journal entries filtered to last 7 days by key-embedded date,
capped at 20 most recent.
render outputs a single node's content to stdout.
The load-memory.sh hook now delegates entirely to
`poc-memory load-context` — capnp store is the single source
of truth for session startup context.
2026-02-28 22:53:39 -05:00
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
|
|
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
print!("{}", node.content);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("history requires a key".into());
|
2026-03-08 18:31:19 -04:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let raw_key = key.join(" ");
|
2026-03-08 18:31:19 -04:00
|
|
|
|
|
2026-03-08 19:41:26 -04:00
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
|
|
|
|
|
|
drop(store);
|
2026-03-06 21:38:33 -05:00
|
|
|
|
|
|
|
|
|
|
let path = store::nodes_path();
|
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
|
return Err("No node log found".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
use std::io::BufReader;
|
|
|
|
|
|
let file = std::fs::File::open(&path)
|
|
|
|
|
|
.map_err(|e| format!("open {}: {}", path.display(), e))?;
|
|
|
|
|
|
let mut reader = BufReader::new(file);
|
|
|
|
|
|
|
|
|
|
|
|
let mut versions: Vec<store::Node> = Vec::new();
|
|
|
|
|
|
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
|
|
|
|
|
|
let log = msg.get_root::<poc_memory::memory_capnp::node_log::Reader>()
|
|
|
|
|
|
.map_err(|e| format!("read log: {}", e))?;
|
|
|
|
|
|
for node_reader in log.get_nodes()
|
|
|
|
|
|
.map_err(|e| format!("get nodes: {}", e))? {
|
|
|
|
|
|
let node = store::Node::from_capnp(node_reader)?;
|
|
|
|
|
|
if node.key == key {
|
|
|
|
|
|
versions.push(node);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if versions.is_empty() {
|
|
|
|
|
|
return Err(format!("No history found for '{}'", key));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
eprintln!("{} versions of '{}':\n", versions.len(), key);
|
2026-03-06 21:41:26 -05:00
|
|
|
|
for node in &versions {
|
|
|
|
|
|
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
|
|
|
|
|
|
store::format_datetime(node.timestamp)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
format!("(raw:{})", node.timestamp)
|
|
|
|
|
|
};
|
|
|
|
|
|
let content_len = node.content.len();
|
2026-03-08 18:31:19 -04:00
|
|
|
|
if full {
|
|
|
|
|
|
eprintln!("=== v{} {} {} w={:.3} {}b ===",
|
|
|
|
|
|
node.version, ts, node.provenance.label(), node.weight, content_len);
|
|
|
|
|
|
eprintln!("{}", node.content);
|
|
|
|
|
|
} else {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
let preview = util::first_n_chars(&node.content, 120);
|
2026-03-08 18:31:19 -04:00
|
|
|
|
let preview = preview.replace('\n', "\\n");
|
|
|
|
|
|
eprintln!(" v{:<3} {} {:24} w={:.3} {}b",
|
|
|
|
|
|
node.version, ts, node.provenance.label(), node.weight, content_len);
|
|
|
|
|
|
eprintln!(" {}", preview);
|
|
|
|
|
|
}
|
2026-03-06 21:38:33 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 18:31:19 -04:00
|
|
|
|
if !full {
|
|
|
|
|
|
if let Some(latest) = versions.last() {
|
|
|
|
|
|
eprintln!("\n--- Latest content (v{}, {}) ---",
|
|
|
|
|
|
latest.version, latest.provenance.label());
|
|
|
|
|
|
print!("{}", latest.content);
|
|
|
|
|
|
}
|
2026-03-06 21:38:33 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_write(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("write requires a key (reads content from stdin)".into());
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let raw_key = key.join(" ");
|
2026-02-28 23:00:52 -05:00
|
|
|
|
let mut content = String::new();
|
|
|
|
|
|
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
|
|
|
|
|
|
.map_err(|e| format!("read stdin: {}", e))?;
|
|
|
|
|
|
|
|
|
|
|
|
if content.trim().is_empty() {
|
|
|
|
|
|
return Err("No content on stdin".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-03-08 19:41:26 -04:00
|
|
|
|
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
|
2026-02-28 23:49:43 -05:00
|
|
|
|
let result = store.upsert(&key, &content)?;
|
|
|
|
|
|
match result {
|
|
|
|
|
|
"unchanged" => println!("No change: '{}'", key),
|
|
|
|
|
|
"updated" => println!("Updated '{}' (v{})", key, store.nodes[&key].version),
|
|
|
|
|
|
_ => println!("Created '{}'", key),
|
|
|
|
|
|
}
|
|
|
|
|
|
if result != "unchanged" {
|
|
|
|
|
|
store.save()?;
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_import(files: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if files.is_empty() {
|
|
|
|
|
|
return Err("import requires at least one file path".into());
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-02-28 23:00:52 -05:00
|
|
|
|
let mut total_new = 0;
|
|
|
|
|
|
let mut total_updated = 0;
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
for arg in files {
|
2026-02-28 23:00:52 -05:00
|
|
|
|
let path = std::path::PathBuf::from(arg);
|
2026-02-28 23:44:44 -05:00
|
|
|
|
let resolved = if path.exists() {
|
|
|
|
|
|
path
|
|
|
|
|
|
} else {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mem_path = store::memory_dir().join(arg);
|
2026-02-28 23:00:52 -05:00
|
|
|
|
if !mem_path.exists() {
|
|
|
|
|
|
eprintln!("File not found: {}", arg);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2026-02-28 23:44:44 -05:00
|
|
|
|
mem_path
|
|
|
|
|
|
};
|
|
|
|
|
|
let (n, u) = store.import_file(&resolved)?;
|
|
|
|
|
|
total_new += n;
|
|
|
|
|
|
total_updated += u;
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if total_new > 0 || total_updated > 0 {
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
}
|
|
|
|
|
|
println!("Import: {} new, {} updated", total_new, total_updated);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_export(files: &[String], export_all: bool) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-02-28 23:00:52 -05:00
|
|
|
|
|
|
|
|
|
|
let targets: Vec<String> = if export_all {
|
|
|
|
|
|
let mut files: Vec<String> = store.nodes.keys()
|
|
|
|
|
|
.filter(|k| !k.contains('#'))
|
|
|
|
|
|
.cloned()
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
files.sort();
|
|
|
|
|
|
files
|
2026-03-08 21:04:45 -04:00
|
|
|
|
} else if files.is_empty() {
|
|
|
|
|
|
return Err("export requires file keys or --all".into());
|
2026-02-28 23:00:52 -05:00
|
|
|
|
} else {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
files.iter().map(|a| {
|
2026-03-08 19:41:26 -04:00
|
|
|
|
a.strip_suffix(".md").unwrap_or(a).to_string()
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}).collect()
|
|
|
|
|
|
};
|
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mem_dir = store::memory_dir();
|
2026-02-28 23:00:52 -05:00
|
|
|
|
|
|
|
|
|
|
for file_key in &targets {
|
2026-02-28 23:44:44 -05:00
|
|
|
|
match store.export_to_markdown(file_key) {
|
|
|
|
|
|
Some(content) => {
|
2026-03-08 19:41:26 -04:00
|
|
|
|
let out_path = mem_dir.join(format!("{}.md", file_key));
|
2026-02-28 23:44:44 -05:00
|
|
|
|
std::fs::write(&out_path, &content)
|
|
|
|
|
|
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
|
|
|
|
|
|
let section_count = content.matches("<!-- mem:").count() + 1;
|
|
|
|
|
|
println!("Exported {} ({} sections)", file_key, section_count);
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
2026-02-28 23:44:44 -05:00
|
|
|
|
None => eprintln!("No nodes for '{}'", file_key),
|
2026-02-28 23:00:52 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_journal_write(text: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if text.is_empty() {
|
|
|
|
|
|
return Err("journal-write requires text".into());
|
2026-02-28 23:13:17 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let text = text.join(" ");
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let timestamp = store::format_datetime(store::now_epoch());
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
|
|
|
|
|
let slug: String = text.split_whitespace()
|
|
|
|
|
|
.take(6)
|
|
|
|
|
|
.map(|w| w.to_lowercase()
|
|
|
|
|
|
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
|
|
|
|
|
.collect::<String>())
|
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
|
.join("-");
|
|
|
|
|
|
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
|
|
|
|
|
|
|
2026-03-08 19:41:26 -04:00
|
|
|
|
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
|
|
|
|
|
let content = format!("## {}\n\n{}", timestamp, text);
|
|
|
|
|
|
|
2026-02-28 23:44:44 -05:00
|
|
|
|
let source_ref = find_current_transcript();
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut store = store::Store::load()?;
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let mut node = store::new_node(&key, &content);
|
|
|
|
|
|
node.node_type = store::NodeType::EpisodicSession;
|
|
|
|
|
|
node.provenance = store::Provenance::Journal;
|
2026-02-28 23:49:43 -05:00
|
|
|
|
if let Some(src) = source_ref {
|
|
|
|
|
|
node.source_ref = src;
|
2026-02-28 23:13:17 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-03 12:38:52 -05:00
|
|
|
|
store.upsert_node(node)?;
|
2026-02-28 23:13:17 -05:00
|
|
|
|
store.save()?;
|
|
|
|
|
|
|
|
|
|
|
|
let word_count = text.split_whitespace().count();
|
|
|
|
|
|
println!("Appended entry at {} ({} words)", timestamp, word_count);
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
2026-03-06 15:08:02 -05:00
|
|
|
|
if level == 0 {
|
|
|
|
|
|
journal_tail_entries(&store, n, full)
|
|
|
|
|
|
} else {
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
let node_type = match level {
|
|
|
|
|
|
1 => store::NodeType::EpisodicDaily,
|
|
|
|
|
|
2 => store::NodeType::EpisodicWeekly,
|
|
|
|
|
|
_ => store::NodeType::EpisodicMonthly,
|
2026-03-06 15:08:02 -05:00
|
|
|
|
};
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
journal_tail_digests(&store, node_type, n, full)
|
2026-03-06 15:08:02 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn journal_tail_entries(store: &store::Store, n: usize, full: bool) -> Result<(), String> {
|
2026-02-28 23:13:17 -05:00
|
|
|
|
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
2026-03-05 10:23:57 -05:00
|
|
|
|
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
2026-03-01 01:41:37 -05:00
|
|
|
|
let normalize_date = |s: &str| -> String {
|
|
|
|
|
|
let s = s.replace('t', "T");
|
|
|
|
|
|
if s.len() >= 16 {
|
|
|
|
|
|
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
|
|
|
|
|
|
} else {
|
|
|
|
|
|
s
|
2026-02-28 23:13:17 -05:00
|
|
|
|
}
|
2026-03-01 01:41:37 -05:00
|
|
|
|
};
|
2026-03-05 10:23:57 -05:00
|
|
|
|
|
|
|
|
|
|
let extract_sort = |node: &store::Node| -> (i64, String) {
|
|
|
|
|
|
if node.created_at > 0 {
|
|
|
|
|
|
return (node.created_at, store::format_datetime(node.created_at));
|
|
|
|
|
|
}
|
2026-02-28 23:13:17 -05:00
|
|
|
|
if let Some(caps) = key_date_re.captures(&node.key) {
|
2026-03-05 10:23:57 -05:00
|
|
|
|
return (0, normalize_date(&caps[1]));
|
2026-03-01 01:41:37 -05:00
|
|
|
|
}
|
|
|
|
|
|
if let Some(caps) = date_re.captures(&node.content) {
|
2026-03-05 10:23:57 -05:00
|
|
|
|
return (0, normalize_date(&caps[1]));
|
2026-02-28 23:13:17 -05:00
|
|
|
|
}
|
2026-03-05 10:23:57 -05:00
|
|
|
|
(node.timestamp, store::format_datetime(node.timestamp))
|
2026-02-28 23:13:17 -05:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let mut journal: Vec<_> = store.nodes.values()
|
2026-03-05 10:23:57 -05:00
|
|
|
|
.filter(|node| node.node_type == store::NodeType::EpisodicSession)
|
2026-02-28 23:13:17 -05:00
|
|
|
|
.collect();
|
2026-03-05 10:23:57 -05:00
|
|
|
|
journal.sort_by(|a, b| {
|
|
|
|
|
|
let (at, as_) = extract_sort(a);
|
|
|
|
|
|
let (bt, bs) = extract_sort(b);
|
|
|
|
|
|
if at > 0 && bt > 0 {
|
|
|
|
|
|
at.cmp(&bt)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
as_.cmp(&bs)
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
|
|
|
|
|
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
|
|
|
|
|
for node in journal.iter().skip(skip) {
|
2026-03-05 10:23:57 -05:00
|
|
|
|
let (_, ts) = extract_sort(node);
|
2026-03-06 15:08:02 -05:00
|
|
|
|
let title = extract_title(&node.content);
|
2026-03-01 01:43:02 -05:00
|
|
|
|
if full {
|
|
|
|
|
|
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("[{}] {}", ts, title);
|
|
|
|
|
|
}
|
2026-02-28 23:13:17 -05:00
|
|
|
|
}
|
2026-03-06 15:08:02 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
2026-02-28 23:13:17 -05:00
|
|
|
|
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
fn journal_tail_digests(store: &store::Store, node_type: store::NodeType, n: usize, full: bool) -> Result<(), String> {
|
2026-03-06 15:08:02 -05:00
|
|
|
|
let mut digests: Vec<_> = store.nodes.values()
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
.filter(|node| node.node_type == node_type)
|
2026-03-06 15:08:02 -05:00
|
|
|
|
.collect();
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
digests.sort_by(|a, b| {
|
|
|
|
|
|
if a.timestamp > 0 && b.timestamp > 0 {
|
|
|
|
|
|
a.timestamp.cmp(&b.timestamp)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
a.key.cmp(&b.key)
|
|
|
|
|
|
}
|
|
|
|
|
|
});
|
2026-03-06 15:08:02 -05:00
|
|
|
|
|
|
|
|
|
|
let skip = if digests.len() > n { digests.len() - n } else { 0 };
|
|
|
|
|
|
for node in digests.iter().skip(skip) {
|
query by NodeType instead of key prefix
Replace key prefix matching (journal#j-, daily-, weekly-, monthly-)
with NodeType filters (EpisodicSession, EpisodicDaily, EpisodicWeekly,
EpisodicMonthly) for all queries: journal-tail, digest gathering,
digest auto-detection, experience mining dedup, and find_journal_node.
Add EpisodicMonthly to NodeType enum and capnp schema.
Key naming conventions (journal#j-TIMESTAMP-slug, daily-DATE, etc.)
are retained for key generation — the fix is about how we find nodes,
not how we name them.
Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
2026-03-08 20:14:37 -04:00
|
|
|
|
let label = &node.key;
|
2026-03-06 15:08:02 -05:00
|
|
|
|
let title = extract_title(&node.content);
|
|
|
|
|
|
if full {
|
|
|
|
|
|
println!("--- [{}] {} ---\n{}\n", label, title, node.content);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("[{}] {}", label, title);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-02-28 23:13:17 -05:00
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-06 15:08:02 -05:00
|
|
|
|
fn extract_title(content: &str) -> String {
|
|
|
|
|
|
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
|
|
|
|
|
for line in content.lines() {
|
|
|
|
|
|
let stripped = line.trim();
|
|
|
|
|
|
if stripped.is_empty() { continue; }
|
|
|
|
|
|
if date_re.is_match(stripped) && stripped.len() < 25 { continue; }
|
2026-03-08 21:13:02 -04:00
|
|
|
|
if let Some(h) = stripped.strip_prefix("## ") {
|
|
|
|
|
|
return h.to_string();
|
|
|
|
|
|
} else if let Some(h) = stripped.strip_prefix("# ") {
|
|
|
|
|
|
return h.to_string();
|
2026-03-06 15:08:02 -05:00
|
|
|
|
} else {
|
2026-03-08 21:13:02 -04:00
|
|
|
|
return util::truncate(stripped, 67, "...");
|
2026-03-06 15:08:02 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
String::from("(untitled)")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_interference(threshold: f32) -> Result<(), String> {
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
poc-memory v0.4.0: graph-structured memory with consolidation pipeline
Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
link-add, link-impact, decay, consolidate-session, etc.
Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-02-28 22:17:00 -05:00
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let pairs = neuro::detect_interference(&store, &g, threshold);
|
|
|
|
|
|
|
|
|
|
|
|
if pairs.is_empty() {
|
|
|
|
|
|
println!("No interfering pairs above threshold {:.2}", threshold);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
|
|
|
|
|
for (a, b, sim) in &pairs {
|
|
|
|
|
|
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
query: peg-based query language for ad-hoc graph exploration
poc-memory query "degree > 15"
poc-memory query "key ~ 'journal.*' AND degree > 10"
poc-memory query "neighbors('identity.md') WHERE strength > 0.5"
poc-memory query "community_id = community('identity.md')" --fields degree,category
Grammar-driven: the peg definition IS the language spec. Supports
boolean logic (AND/OR/NOT), numeric and string comparison, regex
match (~), graph traversal (neighbors() with WHERE), and function
calls (community(), degree()). Output flags: --fields, --sort,
--limit, --count.
New dependency: peg 0.8 (~68KB, 2 tiny deps).
2026-03-03 10:55:30 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_query(expr: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if expr.is_empty() {
|
|
|
|
|
|
return Err("query requires an expression\n\n\
|
2026-03-03 11:05:28 -05:00
|
|
|
|
Expressions:\n \
|
|
|
|
|
|
degree > 15 property filter\n \
|
|
|
|
|
|
key ~ 'journal.*' AND degree > 10 boolean + regex\n \
|
2026-03-08 19:41:26 -04:00
|
|
|
|
neighbors('identity') WHERE ... graph traversal\n \
|
2026-03-03 11:05:28 -05:00
|
|
|
|
* all nodes\n\n\
|
|
|
|
|
|
Pipe stages:\n \
|
|
|
|
|
|
| sort FIELD [asc] sort (desc by default)\n \
|
|
|
|
|
|
| limit N cap results\n \
|
|
|
|
|
|
| select F,F,... output fields as TSV\n \
|
|
|
|
|
|
| count just show count".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let query_str = expr.join(" ");
|
2026-03-03 12:56:15 -05:00
|
|
|
|
let store = store::Store::load()?;
|
query: peg-based query language for ad-hoc graph exploration
poc-memory query "degree > 15"
poc-memory query "key ~ 'journal.*' AND degree > 10"
poc-memory query "neighbors('identity.md') WHERE strength > 0.5"
poc-memory query "community_id = community('identity.md')" --fields degree,category
Grammar-driven: the peg definition IS the language spec. Supports
boolean logic (AND/OR/NOT), numeric and string comparison, regex
match (~), graph traversal (neighbors() with WHERE), and function
calls (community(), degree()). Output flags: --fields, --sort,
--limit, --count.
New dependency: peg 0.8 (~68KB, 2 tiny deps).
2026-03-03 10:55:30 -05:00
|
|
|
|
let graph = store.build_graph();
|
2026-03-03 12:07:04 -05:00
|
|
|
|
query::run_query(&store, &graph, &query_str)
|
query: peg-based query language for ad-hoc graph exploration
poc-memory query "degree > 15"
poc-memory query "key ~ 'journal.*' AND degree > 10"
poc-memory query "neighbors('identity.md') WHERE strength > 0.5"
poc-memory query "community_id = community('identity.md')" --fields degree,category
Grammar-driven: the peg definition IS the language spec. Supports
boolean logic (AND/OR/NOT), numeric and string comparison, regex
match (~), graph traversal (neighbors() with WHERE), and function
calls (community(), degree()). Output flags: --fields, --sort,
--limit, --count.
New dependency: peg 0.8 (~68KB, 2 tiny deps).
2026-03-03 10:55:30 -05:00
|
|
|
|
}
|
2026-03-03 18:36:25 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_lookup_bump(keys: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if keys.is_empty() {
|
|
|
|
|
|
return Err("lookup-bump requires at least one key".into());
|
2026-03-03 18:36:25 -05:00
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let keys: Vec<&str> = keys.iter().map(|s| s.as_str()).collect();
|
2026-03-03 18:36:25 -05:00
|
|
|
|
lookups::bump_many(&keys)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_lookups(date: Option<&str>) -> Result<(), String> {
|
|
|
|
|
|
let date = date.map(|d| d.to_string())
|
|
|
|
|
|
.unwrap_or_else(|| chrono::Local::now().format("%Y-%m-%d").to_string());
|
2026-03-03 18:36:25 -05:00
|
|
|
|
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let keys: Vec<String> = store.nodes.values().map(|n| n.key.clone()).collect();
|
|
|
|
|
|
let resolved = lookups::dump_resolved(&date, &keys)?;
|
|
|
|
|
|
|
|
|
|
|
|
if resolved.is_empty() {
|
|
|
|
|
|
println!("No lookups for {}", date);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Lookups for {}:", date);
|
|
|
|
|
|
for (key, count) in &resolved {
|
|
|
|
|
|
println!(" {:4} {}", count, key);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!("\n{} distinct keys, {} total lookups",
|
|
|
|
|
|
resolved.len(),
|
|
|
|
|
|
resolved.iter().map(|(_, c)| *c as u64).sum::<u64>());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
2026-03-05 13:18:00 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_daemon(sub: Option<&str>, args: &[String]) -> Result<(), String> {
|
|
|
|
|
|
match sub {
|
|
|
|
|
|
None => daemon::run_daemon(),
|
|
|
|
|
|
Some("status") => daemon::show_status(),
|
|
|
|
|
|
Some("log") => {
|
|
|
|
|
|
let (job, lines) = match args.first() {
|
2026-03-05 15:30:57 -05:00
|
|
|
|
None => (None, 20),
|
|
|
|
|
|
Some(s) => {
|
|
|
|
|
|
if let Ok(n) = s.parse::<usize>() {
|
|
|
|
|
|
(None, n)
|
|
|
|
|
|
} else {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
let n = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(20);
|
2026-03-05 15:30:57 -05:00
|
|
|
|
(Some(s.as_str()), n)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
};
|
2026-03-05 13:18:00 -05:00
|
|
|
|
daemon::show_log(job, lines)
|
|
|
|
|
|
}
|
2026-03-08 21:04:45 -04:00
|
|
|
|
Some("install") => daemon::install_service(),
|
|
|
|
|
|
Some(other) => Err(format!("unknown daemon subcommand: {}", other)),
|
2026-03-05 13:18:00 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-03-05 15:30:57 -05:00
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_knowledge_loop(max_cycles: usize, batch_size: usize, window: usize, max_depth: i32) -> Result<(), String> {
|
|
|
|
|
|
let config = knowledge::KnowledgeLoopConfig {
|
|
|
|
|
|
max_cycles,
|
|
|
|
|
|
batch_size,
|
|
|
|
|
|
window,
|
|
|
|
|
|
max_depth,
|
|
|
|
|
|
..Default::default()
|
|
|
|
|
|
};
|
2026-03-05 15:30:57 -05:00
|
|
|
|
|
|
|
|
|
|
let results = knowledge::run_knowledge_loop(&config)?;
|
|
|
|
|
|
eprintln!("\nCompleted {} cycles, {} total actions applied",
|
|
|
|
|
|
results.len(),
|
|
|
|
|
|
results.iter().map(|r| r.total_applied).sum::<usize>());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_fact_mine(path: &str, batch: bool, dry_run: bool, output_file: Option<&str>, min_messages: usize) -> Result<(), String> {
|
|
|
|
|
|
let p = std::path::Path::new(path);
|
2026-03-05 15:30:57 -05:00
|
|
|
|
|
|
|
|
|
|
let paths: Vec<std::path::PathBuf> = if batch {
|
|
|
|
|
|
if !p.is_dir() {
|
|
|
|
|
|
return Err(format!("Not a directory: {}", path));
|
|
|
|
|
|
}
|
|
|
|
|
|
let mut files: Vec<_> = std::fs::read_dir(p)
|
|
|
|
|
|
.map_err(|e| format!("read dir: {}", e))?
|
|
|
|
|
|
.filter_map(|e| e.ok())
|
|
|
|
|
|
.map(|e| e.path())
|
|
|
|
|
|
.filter(|p| p.extension().map(|x| x == "jsonl").unwrap_or(false))
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
files.sort();
|
|
|
|
|
|
eprintln!("Found {} transcripts", files.len());
|
|
|
|
|
|
files
|
|
|
|
|
|
} else {
|
|
|
|
|
|
vec![p.to_path_buf()]
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let path_refs: Vec<&std::path::Path> = paths.iter().map(|p| p.as_path()).collect();
|
|
|
|
|
|
let facts = fact_mine::mine_batch(&path_refs, min_messages, dry_run)?;
|
|
|
|
|
|
|
|
|
|
|
|
if !dry_run {
|
|
|
|
|
|
let json = serde_json::to_string_pretty(&facts)
|
|
|
|
|
|
.map_err(|e| format!("serialize: {}", e))?;
|
2026-03-08 21:04:45 -04:00
|
|
|
|
if let Some(out) = output_file {
|
2026-03-05 15:30:57 -05:00
|
|
|
|
std::fs::write(out, &json).map_err(|e| format!("write: {}", e))?;
|
|
|
|
|
|
eprintln!("\nWrote {} facts to {}", facts.len(), out);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("{}", json);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
eprintln!("\nTotal: {} facts from {} transcripts", facts.len(), paths.len());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-08 21:04:45 -04:00
|
|
|
|
fn cmd_fact_mine_store(path: &str) -> Result<(), String> {
|
|
|
|
|
|
let path = std::path::Path::new(path);
|
2026-03-05 15:30:57 -05:00
|
|
|
|
if !path.exists() {
|
2026-03-08 21:04:45 -04:00
|
|
|
|
return Err(format!("File not found: {}", path.display()));
|
2026-03-05 15:30:57 -05:00
|
|
|
|
}
|
2026-03-08 18:31:19 -04:00
|
|
|
|
let count = fact_mine::mine_and_store(path, None)?;
|
2026-03-05 15:30:57 -05:00
|
|
|
|
eprintln!("Stored {} facts", count);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|