forked from kent/consciousness
Run an agent on nodes matching a query: poc-memory agent run linker --query 'key ~ "bcachefs" | limit 10' Resolves the query to node keys, then passes all as seeds to the agent. For large batches, should be queued to daemon (future work).
1077 lines
36 KiB
Rust
1077 lines
36 KiB
Rust
// poc-memory: graph-structured memory for AI assistants
|
|
//
|
|
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet
|
|
// License: MIT OR Apache-2.0
|
|
//
|
|
// Architecture:
|
|
// nodes.capnp - append-only content node log
|
|
// relations.capnp - append-only relation log
|
|
// state.bin - derived KV cache (rebuilt from logs when stale)
|
|
//
|
|
// Graph algorithms: clustering coefficient, community detection (label
|
|
// propagation), schema fit scoring, small-world metrics, consolidation
|
|
// priority. Text similarity via BM25 with Porter stemming.
|
|
//
|
|
// Neuroscience-inspired: spaced repetition replay, emotional gating,
|
|
// interference detection, schema assimilation, reconsolidation.
|
|
|
|
use poc_memory::*;
|
|
|
|
use clap::{Parser, Subcommand};
|
|
|
|
use std::process;
|
|
|
|
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
|
|
#[derive(Parser)]
|
|
#[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")]
|
|
struct Cli {
|
|
#[command(subcommand)]
|
|
command: Command,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum Command {
|
|
// ── Core (daily use) ──────────────────────────────────────────────
|
|
|
|
/// Search memory (AND logic across terms)
|
|
///
|
|
/// Pipeline: -p spread -p spectral,k=20
|
|
/// Default pipeline: spread
|
|
Search {
|
|
/// Search terms
|
|
query: Vec<String>,
|
|
/// Algorithm pipeline stages (repeatable)
|
|
#[arg(short, long = "pipeline")]
|
|
pipeline: Vec<String>,
|
|
/// Show more results
|
|
#[arg(long)]
|
|
expand: bool,
|
|
/// Show node content, not just keys
|
|
#[arg(long)]
|
|
full: bool,
|
|
/// Show debug output for each pipeline stage
|
|
#[arg(long)]
|
|
debug: bool,
|
|
/// Also match key components (e.g. "irc" matches "irc-access")
|
|
#[arg(long)]
|
|
fuzzy: bool,
|
|
/// Also search node content (slow, use when graph search misses)
|
|
#[arg(long)]
|
|
content: bool,
|
|
},
|
|
/// Output a node's content to stdout
|
|
Render {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Upsert node content from stdin
|
|
Write {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Show all stored versions of a node
|
|
History {
|
|
/// Show full content for every version
|
|
#[arg(long)]
|
|
full: bool,
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Show most recent writes to the node log
|
|
Tail {
|
|
/// Number of entries (default: 20)
|
|
#[arg(default_value_t = 20)]
|
|
n: usize,
|
|
/// Show full content
|
|
#[arg(long)]
|
|
full: bool,
|
|
},
|
|
/// Summary of memory state
|
|
Status,
|
|
/// Query the memory graph
|
|
#[command(after_long_help = "\
|
|
EXPRESSIONS:
|
|
* all nodes
|
|
key ~ 'pattern' regex match on node key
|
|
content ~ 'phrase' regex match on node content
|
|
degree > 15 numeric comparison on any field
|
|
field = value exact match
|
|
field != value not equal
|
|
expr AND expr boolean AND
|
|
expr OR expr boolean OR
|
|
NOT expr negation
|
|
neighbors('key') nodes linked to key
|
|
neighbors('key') WHERE expr ... with filter on edges/nodes
|
|
|
|
FIELDS:
|
|
key, weight, content, degree, node_type, provenance,
|
|
emotion, retrievals, uses, wrongs, created,
|
|
clustering_coefficient (cc), community_id
|
|
|
|
OPERATORS:
|
|
> < >= <= = != ~(regex)
|
|
|
|
PIPE STAGES:
|
|
| sort FIELD [asc] sort (desc by default)
|
|
| limit N cap results
|
|
| select F,F,... output fields as TSV
|
|
| count just show count
|
|
| connectivity show graph structure between results
|
|
|
|
FUNCTIONS:
|
|
community('key') community id of a node
|
|
degree('key') degree of a node
|
|
|
|
EXAMPLES:
|
|
key ~ 'inner-life' substring match on keys
|
|
content ~ 'made love' full-text search
|
|
content ~ 'made love' | connectivity find clusters among results
|
|
(content ~ 'A' OR content ~ 'B') | connectivity
|
|
degree > 15 | sort degree | limit 10 high-degree nodes
|
|
key ~ 'journal' AND degree > 10 | count count matching nodes
|
|
neighbors('identity') WHERE strength > 0.5 | sort strength
|
|
* | sort weight asc | limit 20 lowest-weight nodes
|
|
")]
|
|
Query {
|
|
/// Query expression (e.g. "key ~ 'inner-life'")
|
|
expr: Vec<String>,
|
|
},
|
|
/// Mark a memory as useful (boosts weight)
|
|
Used {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Mark a memory as wrong/irrelevant
|
|
Wrong {
|
|
/// Node key
|
|
key: String,
|
|
/// Optional context
|
|
context: Vec<String>,
|
|
},
|
|
/// Mark a search result as not relevant (weakens edges that led to it)
|
|
#[command(name = "not-relevant")]
|
|
NotRelevant {
|
|
/// Node key that was not relevant
|
|
key: String,
|
|
},
|
|
/// Mark a node as not useful (weakens node weight, not edges)
|
|
#[command(name = "not-useful")]
|
|
NotUseful {
|
|
/// Node key
|
|
key: String,
|
|
},
|
|
/// Record a gap in memory coverage
|
|
Gap {
|
|
/// Gap description
|
|
description: Vec<String>,
|
|
},
|
|
|
|
// ── Node operations ───────────────────────────────────────────────
|
|
|
|
/// Node operations (delete, rename, list)
|
|
#[command(subcommand)]
|
|
Node(NodeCmd),
|
|
|
|
// ── Journal ───────────────────────────────────────────────────────
|
|
|
|
/// Journal operations (write, tail, enrich)
|
|
#[command(subcommand)]
|
|
Journal(JournalCmd),
|
|
|
|
// ── Graph ─────────────────────────────────────────────────────────
|
|
|
|
/// Graph operations (link, audit, spectral)
|
|
#[command(subcommand, name = "graph")]
|
|
GraphCmd(GraphCmd),
|
|
|
|
// ── Cursor (spatial memory) ──────────────────────────────────────
|
|
|
|
/// Navigate the memory graph with a persistent cursor
|
|
#[command(subcommand)]
|
|
Cursor(CursorCmd),
|
|
|
|
// ── Agents ────────────────────────────────────────────────────────
|
|
|
|
/// Agent and daemon operations
|
|
#[command(subcommand)]
|
|
Agent(AgentCmd),
|
|
|
|
// ── Admin ─────────────────────────────────────────────────────────
|
|
|
|
/// Admin operations (fsck, health, import, export)
|
|
#[command(subcommand)]
|
|
Admin(AdminCmd),
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum NodeCmd {
|
|
/// Soft-delete a node
|
|
Delete {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Rename a node key
|
|
Rename {
|
|
/// Old key
|
|
old_key: String,
|
|
/// New key
|
|
new_key: String,
|
|
},
|
|
/// List all node keys (one per line, optional glob)
|
|
#[command(name = "list")]
|
|
List {
|
|
/// Glob pattern to filter keys
|
|
pattern: Option<String>,
|
|
},
|
|
/// List all edges (tsv: source target strength type)
|
|
Edges,
|
|
/// Dump entire store as JSON
|
|
#[command(name = "dump")]
|
|
Dump,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum CursorCmd {
|
|
/// Show current cursor position with context
|
|
Show,
|
|
/// Set cursor to a node key
|
|
Set {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Move cursor forward in time
|
|
Forward,
|
|
/// Move cursor backward in time
|
|
Back,
|
|
/// Move up the digest hierarchy (journal→daily→weekly→monthly)
|
|
Up,
|
|
/// Move down the digest hierarchy (to first child)
|
|
Down,
|
|
/// Clear the cursor
|
|
Clear,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum JournalCmd {
|
|
/// Write a journal entry to the store
|
|
Write {
|
|
/// Entry text
|
|
text: Vec<String>,
|
|
},
|
|
/// Show recent journal/digest entries
|
|
Tail {
|
|
/// Number of entries to show (default: 20)
|
|
#[arg(default_value_t = 20)]
|
|
n: usize,
|
|
/// Show full content
|
|
#[arg(long)]
|
|
full: bool,
|
|
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
|
|
#[arg(long, default_value_t = 0)]
|
|
level: u8,
|
|
},
|
|
/// Enrich journal entry with conversation links
|
|
Enrich {
|
|
/// Path to JSONL transcript
|
|
jsonl_path: String,
|
|
/// Journal entry text to enrich
|
|
entry_text: String,
|
|
/// Grep line number for source location
|
|
#[arg(default_value_t = 0)]
|
|
grep_line: usize,
|
|
},
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum GraphCmd {
|
|
/// Show neighbors of a node
|
|
Link {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Add a link between two nodes
|
|
#[command(name = "link-add")]
|
|
LinkAdd {
|
|
/// Source node key
|
|
source: String,
|
|
/// Target node key
|
|
target: String,
|
|
/// Optional reason
|
|
reason: Vec<String>,
|
|
},
|
|
/// Simulate adding an edge, report topology impact
|
|
#[command(name = "link-impact")]
|
|
LinkImpact {
|
|
/// Source node key
|
|
source: String,
|
|
/// Target node key
|
|
target: String,
|
|
},
|
|
/// Walk every link, send to Sonnet for quality review
|
|
#[command(name = "link-audit")]
|
|
LinkAudit {
|
|
/// Apply changes (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Link orphan nodes to similar neighbors
|
|
#[command(name = "link-orphans")]
|
|
LinkOrphans {
|
|
/// Minimum degree to consider orphan (default: 2)
|
|
#[arg(default_value_t = 2)]
|
|
min_degree: usize,
|
|
/// Links per orphan (default: 3)
|
|
#[arg(default_value_t = 3)]
|
|
links_per: usize,
|
|
/// Similarity threshold (default: 0.15)
|
|
#[arg(default_value_t = 0.15)]
|
|
sim_threshold: f32,
|
|
},
|
|
/// Close triangles: link similar neighbors of hubs
|
|
#[command(name = "triangle-close")]
|
|
TriangleClose {
|
|
/// Minimum hub degree (default: 5)
|
|
#[arg(default_value_t = 5)]
|
|
min_degree: usize,
|
|
/// Similarity threshold (default: 0.3)
|
|
#[arg(default_value_t = 0.3)]
|
|
sim_threshold: f32,
|
|
/// Maximum links per hub (default: 10)
|
|
#[arg(default_value_t = 10)]
|
|
max_per_hub: usize,
|
|
},
|
|
/// Cap node degree by pruning weak auto edges
|
|
#[command(name = "cap-degree")]
|
|
CapDegree {
|
|
/// Maximum degree (default: 50)
|
|
#[arg(default_value_t = 50)]
|
|
max_degree: usize,
|
|
},
|
|
/// Set link strengths from neighborhood overlap (Jaccard similarity)
|
|
#[command(name = "normalize-strengths")]
|
|
NormalizeStrengths {
|
|
/// Apply changes (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Redistribute hub links to section-level children
|
|
Differentiate {
|
|
/// Specific hub key (omit to list all differentiable hubs)
|
|
key: Option<String>,
|
|
/// Apply the redistribution
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Walk temporal links: semantic ↔ episodic ↔ conversation
|
|
Trace {
|
|
/// Node key
|
|
key: Vec<String>,
|
|
},
|
|
/// Detect potentially confusable memory pairs
|
|
Interference {
|
|
/// Similarity threshold (default: 0.4)
|
|
#[arg(long, default_value_t = 0.4)]
|
|
threshold: f32,
|
|
},
|
|
/// Show graph structure overview
|
|
Overview,
|
|
/// Spectral decomposition of the memory graph
|
|
Spectral {
|
|
/// Number of eigenvectors (default: 30)
|
|
#[arg(default_value_t = 30)]
|
|
k: usize,
|
|
},
|
|
/// Compute and save spectral embedding
|
|
#[command(name = "spectral-save")]
|
|
SpectralSave {
|
|
/// Number of eigenvectors (default: 20)
|
|
#[arg(default_value_t = 20)]
|
|
k: usize,
|
|
},
|
|
/// Find spectrally nearest nodes
|
|
#[command(name = "spectral-neighbors")]
|
|
SpectralNeighbors {
|
|
/// Node key
|
|
key: String,
|
|
/// Number of neighbors (default: 15)
|
|
#[arg(default_value_t = 15)]
|
|
n: usize,
|
|
},
|
|
/// Show nodes ranked by outlier/bridge score
|
|
#[command(name = "spectral-positions")]
|
|
SpectralPositions {
|
|
/// Number of nodes to show (default: 30)
|
|
#[arg(default_value_t = 30)]
|
|
n: usize,
|
|
},
|
|
/// Find spectrally close but unlinked pairs
|
|
#[command(name = "spectral-suggest")]
|
|
SpectralSuggest {
|
|
/// Number of pairs (default: 20)
|
|
#[arg(default_value_t = 20)]
|
|
n: usize,
|
|
},
|
|
/// Diagnose duplicate/overlapping nodes for a topic cluster
|
|
Organize {
|
|
/// Search term (matches node keys; also content unless --key-only)
|
|
term: String,
|
|
/// Similarity threshold for pair reporting (default: 0.4)
|
|
#[arg(long, default_value_t = 0.4)]
|
|
threshold: f32,
|
|
/// Only match node keys, not content
|
|
#[arg(long)]
|
|
key_only: bool,
|
|
/// Create anchor node for the search term and link to cluster
|
|
#[arg(long)]
|
|
anchor: bool,
|
|
},
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum DaemonCmd {
|
|
/// Start the daemon (default)
|
|
Start,
|
|
/// Show daemon status
|
|
Status,
|
|
/// Show daemon log
|
|
Log {
|
|
/// Job name to filter by
|
|
job: Option<String>,
|
|
/// Number of lines to show
|
|
#[arg(long, default_value_t = 20)]
|
|
lines: usize,
|
|
},
|
|
/// Install systemd service
|
|
Install,
|
|
/// Trigger consolidation via daemon
|
|
Consolidate,
|
|
/// Run an agent via the daemon
|
|
Run {
|
|
/// Agent name (e.g. organize, replay, linker)
|
|
#[arg(default_value = "replay")]
|
|
agent: String,
|
|
/// Batch size
|
|
#[arg(default_value_t = 1)]
|
|
count: usize,
|
|
},
|
|
/// Interactive TUI
|
|
Tui,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum AgentCmd {
|
|
/// Background job daemon
|
|
#[command(subcommand)]
|
|
Daemon(DaemonCmd),
|
|
/// Run knowledge agents to convergence
|
|
#[command(name = "knowledge-loop")]
|
|
KnowledgeLoop {
|
|
/// Maximum cycles before stopping
|
|
#[arg(long, default_value_t = 20)]
|
|
max_cycles: usize,
|
|
/// Items per agent per cycle
|
|
#[arg(long, default_value_t = 5)]
|
|
batch_size: usize,
|
|
/// Cycles to check for convergence
|
|
#[arg(long, default_value_t = 5)]
|
|
window: usize,
|
|
/// Maximum inference depth
|
|
#[arg(long, default_value_t = 4)]
|
|
max_depth: i32,
|
|
},
|
|
/// Run agent consolidation on priority nodes
|
|
#[command(name = "consolidate-batch")]
|
|
ConsolidateBatch {
|
|
/// Number of nodes to consolidate
|
|
#[arg(long, default_value_t = 5)]
|
|
count: usize,
|
|
/// Generate replay agent prompt automatically
|
|
#[arg(long)]
|
|
auto: bool,
|
|
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
|
|
#[arg(long)]
|
|
agent: Option<String>,
|
|
},
|
|
/// Analyze metrics, plan agent allocation
|
|
#[command(name = "consolidate-session")]
|
|
ConsolidateSession,
|
|
/// Autonomous: plan → agents → apply → digests → links
|
|
#[command(name = "consolidate-full")]
|
|
ConsolidateFull,
|
|
/// Import pending agent results into the graph
|
|
#[command(name = "apply-agent")]
|
|
ApplyAgent {
|
|
/// Process all files without moving to done/
|
|
#[arg(long)]
|
|
all: bool,
|
|
},
|
|
/// Extract and apply actions from consolidation reports
|
|
#[command(name = "apply-consolidation")]
|
|
ApplyConsolidation {
|
|
/// Apply actions (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
/// Read from specific report file
|
|
#[arg(long)]
|
|
report: Option<String>,
|
|
},
|
|
/// Generate episodic digests (daily, weekly, monthly, auto)
|
|
Digest {
|
|
/// Digest type: daily, weekly, monthly, auto
|
|
#[command(subcommand)]
|
|
level: DigestLevel,
|
|
},
|
|
/// Parse and apply links from digest nodes
|
|
#[command(name = "digest-links")]
|
|
DigestLinks {
|
|
/// Apply the links (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Mine conversation for experiential moments to journal
|
|
#[command(name = "experience-mine")]
|
|
ExperienceMine {
|
|
/// Path to JSONL transcript (default: most recent)
|
|
jsonl_path: Option<String>,
|
|
},
|
|
/// Extract atomic facts from conversation transcripts
|
|
#[command(name = "fact-mine")]
|
|
FactMine {
|
|
/// Path to JSONL transcript or directory (with --batch)
|
|
path: String,
|
|
/// Process all .jsonl files in directory
|
|
#[arg(long)]
|
|
batch: bool,
|
|
/// Show chunks without calling model
|
|
#[arg(long)]
|
|
dry_run: bool,
|
|
/// Write JSON to file (default: stdout)
|
|
#[arg(long, short)]
|
|
output: Option<String>,
|
|
/// Skip transcripts with fewer messages
|
|
#[arg(long, default_value_t = 10)]
|
|
min_messages: usize,
|
|
},
|
|
/// Extract facts from a transcript and store directly
|
|
#[command(name = "fact-mine-store")]
|
|
FactMineStore {
|
|
/// Path to JSONL transcript
|
|
path: String,
|
|
},
|
|
/// Run a single agent by name
|
|
Run {
|
|
/// Agent name (e.g. observation, linker, distill)
|
|
agent: String,
|
|
/// Batch size (number of seed nodes/fragments)
|
|
#[arg(long, default_value_t = 5)]
|
|
count: usize,
|
|
/// Target specific node keys (overrides agent's query)
|
|
#[arg(long)]
|
|
target: Vec<String>,
|
|
/// Run agent on each result of a query (e.g. 'key ~ "bcachefs" | limit 10')
|
|
#[arg(long)]
|
|
query: Option<String>,
|
|
/// Dry run — set POC_MEMORY_DRY_RUN=1 so mutations are no-ops
|
|
#[arg(long)]
|
|
dry_run: bool,
|
|
/// Debug — print full prompt and response
|
|
#[arg(long)]
|
|
debug: bool,
|
|
},
|
|
/// Show spaced repetition replay queue
|
|
#[command(name = "replay-queue")]
|
|
ReplayQueue {
|
|
/// Number of items to show
|
|
#[arg(long, default_value_t = 10)]
|
|
count: usize,
|
|
},
|
|
/// Evaluate agent quality by LLM-sorted ranking
|
|
#[command(name = "evaluate")]
|
|
Evaluate {
|
|
/// Number of pairwise matchups to run
|
|
#[arg(long, default_value_t = 30)]
|
|
matchups: usize,
|
|
/// Model to use for comparison (haiku or sonnet)
|
|
#[arg(long, default_value = "haiku")]
|
|
model: String,
|
|
/// Show example comparison prompt without calling LLM
|
|
#[arg(long)]
|
|
dry_run: bool,
|
|
},
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum AdminCmd {
|
|
/// Scan markdown files, index all memory units
|
|
Init,
|
|
/// Report graph metrics (CC, communities, small-world)
|
|
Health,
|
|
/// Run consistency checks and repair
|
|
Fsck,
|
|
/// Find and merge duplicate nodes (same key, multiple UUIDs)
|
|
Dedup {
|
|
/// Apply the merge (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Bulk rename: replace a character in all keys
|
|
#[command(name = "bulk-rename")]
|
|
BulkRename {
|
|
/// Character to replace
|
|
from: String,
|
|
/// Replacement character
|
|
to: String,
|
|
/// Apply changes (default: dry run)
|
|
#[arg(long)]
|
|
apply: bool,
|
|
},
|
|
/// Brief metrics check (for cron/notifications)
|
|
#[command(name = "daily-check")]
|
|
DailyCheck,
|
|
/// Import markdown file(s) into the store
|
|
Import {
|
|
/// File paths
|
|
files: Vec<String>,
|
|
},
|
|
/// Export store nodes to markdown file(s)
|
|
Export {
|
|
/// File keys to export (or --all)
|
|
files: Vec<String>,
|
|
/// Export all file-level nodes
|
|
#[arg(long)]
|
|
all: bool,
|
|
},
|
|
/// Output session-start context from the store
|
|
#[command(name = "load-context")]
|
|
LoadContext {
|
|
/// Show word count statistics instead of content
|
|
#[arg(long)]
|
|
stats: bool,
|
|
},
|
|
/// Show recent retrieval log
|
|
Log,
|
|
/// Show current parameters
|
|
Params,
|
|
/// Bump daily lookup counter for keys
|
|
#[command(name = "lookup-bump")]
|
|
LookupBump {
|
|
/// Node keys
|
|
keys: Vec<String>,
|
|
},
|
|
/// Show daily lookup counts
|
|
Lookups {
|
|
/// Date (default: today)
|
|
date: Option<String>,
|
|
},
|
|
/// Migrate transcript stub nodes to progress log
|
|
#[command(name = "migrate-transcript-progress")]
|
|
MigrateTranscriptProgress,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum DigestLevel {
|
|
/// Generate daily digest
|
|
Daily {
|
|
/// Date (default: today)
|
|
date: Option<String>,
|
|
},
|
|
/// Generate weekly digest
|
|
Weekly {
|
|
/// Date or week label (default: current week)
|
|
date: Option<String>,
|
|
},
|
|
/// Generate monthly digest
|
|
Monthly {
|
|
/// Month (YYYY-MM) or date (default: current month)
|
|
date: Option<String>,
|
|
},
|
|
/// Generate all missing digests
|
|
Auto,
|
|
}
|
|
|
|
/// Print help with subcommands expanded to show nested commands.
|
|
fn print_help() {
|
|
use clap::CommandFactory;
|
|
let cmd = Cli::command();
|
|
|
|
println!("poc-memory - graph-structured memory store");
|
|
println!("usage: poc-memory <command> [<args>]\n");
|
|
|
|
for sub in cmd.get_subcommands() {
|
|
if sub.get_name() == "help" { continue }
|
|
let children: Vec<_> = sub.get_subcommands()
|
|
.filter(|c| c.get_name() != "help")
|
|
.collect();
|
|
if !children.is_empty() {
|
|
for child in &children {
|
|
let about = child.get_about().map(|s| s.to_string()).unwrap_or_default();
|
|
let full = format!("{} {}", sub.get_name(), child.get_name());
|
|
// Recurse one more level for daemon subcommands etc.
|
|
let grandchildren: Vec<_> = child.get_subcommands()
|
|
.filter(|c| c.get_name() != "help")
|
|
.collect();
|
|
if !grandchildren.is_empty() {
|
|
for gc in grandchildren {
|
|
let gc_about = gc.get_about().map(|s| s.to_string()).unwrap_or_default();
|
|
let gc_full = format!("{} {}", full, gc.get_name());
|
|
println!(" {:<34}{gc_about}", gc_full);
|
|
}
|
|
} else {
|
|
println!(" {:<34}{about}", full);
|
|
}
|
|
}
|
|
} else {
|
|
let about = sub.get_about().map(|s| s.to_string()).unwrap_or_default();
|
|
println!(" {:<34}{about}", sub.get_name());
|
|
}
|
|
}
|
|
}
|
|
|
|
fn main() {
|
|
// Handle --help ourselves for expanded subcommand display
|
|
let args: Vec<String> = std::env::args().collect();
|
|
if args.len() <= 1 || args.iter().any(|a| a == "--help" || a == "-h") && args.len() == 2 {
|
|
print_help();
|
|
return;
|
|
}
|
|
|
|
let cli = Cli::parse();
|
|
|
|
let result = match cli.command {
|
|
// Core
|
|
Command::Search { query, pipeline, expand, full, debug, fuzzy, content }
|
|
=> cli::misc::cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content),
|
|
Command::Render { key } => cli::node::cmd_render(&key),
|
|
Command::Write { key } => cli::node::cmd_write(&key),
|
|
Command::History { full, key } => cli::node::cmd_history(&key, full),
|
|
Command::Tail { n, full } => cli::journal::cmd_tail(n, full),
|
|
Command::Status => cli::misc::cmd_status(),
|
|
Command::Query { expr } => cli::misc::cmd_query(&expr),
|
|
Command::Used { key } => cli::node::cmd_used(&key),
|
|
Command::Wrong { key, context } => cli::node::cmd_wrong(&key, &context),
|
|
Command::NotRelevant { key } => cli::node::cmd_not_relevant(&key),
|
|
Command::NotUseful { key } => cli::node::cmd_not_useful(&key),
|
|
Command::Gap { description } => cli::node::cmd_gap(&description),
|
|
|
|
// Node
|
|
Command::Node(sub) => match sub {
|
|
NodeCmd::Delete { key } => cli::node::cmd_node_delete(&key),
|
|
NodeCmd::Rename { old_key, new_key } => cli::node::cmd_node_rename(&old_key, &new_key),
|
|
NodeCmd::List { pattern } => cli::node::cmd_list_keys(pattern.as_deref()),
|
|
NodeCmd::Edges => cli::node::cmd_list_edges(),
|
|
NodeCmd::Dump => cli::node::cmd_dump_json(),
|
|
},
|
|
|
|
// Journal
|
|
Command::Journal(sub) => match sub {
|
|
JournalCmd::Write { text } => cli::journal::cmd_journal_write(&text),
|
|
JournalCmd::Tail { n, full, level } => cli::journal::cmd_journal_tail(n, full, level),
|
|
JournalCmd::Enrich { jsonl_path, entry_text, grep_line }
|
|
=> cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
|
|
},
|
|
|
|
// Graph
|
|
Command::GraphCmd(sub) => match sub {
|
|
GraphCmd::Link { key } => cli::graph::cmd_link(&key),
|
|
GraphCmd::LinkAdd { source, target, reason }
|
|
=> cli::graph::cmd_link_add(&source, &target, &reason),
|
|
GraphCmd::LinkImpact { source, target }
|
|
=> cli::graph::cmd_link_impact(&source, &target),
|
|
GraphCmd::LinkAudit { apply } => cli::graph::cmd_link_audit(apply),
|
|
GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold }
|
|
=> cli::graph::cmd_link_orphans(min_degree, links_per, sim_threshold),
|
|
GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub }
|
|
=> cli::graph::cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
|
|
GraphCmd::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree),
|
|
GraphCmd::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply),
|
|
GraphCmd::Differentiate { key, apply }
|
|
=> cli::graph::cmd_differentiate(key.as_deref(), apply),
|
|
GraphCmd::Trace { key } => cli::graph::cmd_trace(&key),
|
|
GraphCmd::Interference { threshold } => cli::graph::cmd_interference(threshold),
|
|
GraphCmd::Overview => cli::graph::cmd_graph(),
|
|
GraphCmd::Spectral { k } => cli::graph::cmd_spectral(k),
|
|
GraphCmd::SpectralSave { k } => cli::graph::cmd_spectral_save(k),
|
|
GraphCmd::SpectralNeighbors { key, n }
|
|
=> cli::graph::cmd_spectral_neighbors(&key, n),
|
|
GraphCmd::SpectralPositions { n } => cli::graph::cmd_spectral_positions(n),
|
|
GraphCmd::SpectralSuggest { n } => cli::graph::cmd_spectral_suggest(n),
|
|
GraphCmd::Organize { term, threshold, key_only, anchor }
|
|
=> cli::graph::cmd_organize(&term, threshold, key_only, anchor),
|
|
},
|
|
|
|
// Cursor
|
|
Command::Cursor(sub) => cmd_cursor(sub),
|
|
|
|
// Agent
|
|
Command::Agent(sub) => match sub {
|
|
AgentCmd::Daemon(sub) => cmd_daemon(sub),
|
|
AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
|
|
=> cli::agent::cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
|
|
AgentCmd::ConsolidateBatch { count, auto, agent }
|
|
=> cli::agent::cmd_consolidate_batch(count, auto, agent),
|
|
AgentCmd::ConsolidateSession => cli::agent::cmd_consolidate_session(),
|
|
AgentCmd::ConsolidateFull => cli::agent::cmd_consolidate_full(),
|
|
AgentCmd::ApplyAgent { all } => cmd_apply_agent(all),
|
|
AgentCmd::ApplyConsolidation { apply, report }
|
|
=> cli::agent::cmd_apply_consolidation(apply, report.as_deref()),
|
|
AgentCmd::Digest { level } => cmd_digest(level),
|
|
AgentCmd::DigestLinks { apply } => cli::agent::cmd_digest_links(apply),
|
|
AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path),
|
|
AgentCmd::FactMine { path, batch, dry_run, output, min_messages }
|
|
=> cli::agent::cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
|
|
AgentCmd::FactMineStore { path } => cli::agent::cmd_fact_mine_store(&path),
|
|
AgentCmd::Run { agent, count, target, query, dry_run, debug }
|
|
=> cli::agent::cmd_run_agent(&agent, count, &target, query.as_deref(), dry_run, debug),
|
|
AgentCmd::ReplayQueue { count } => cli::agent::cmd_replay_queue(count),
|
|
AgentCmd::Evaluate { matchups, model, dry_run }
|
|
=> cli::agent::cmd_evaluate_agents(matchups, &model, dry_run),
|
|
},
|
|
|
|
// Admin
|
|
Command::Admin(sub) => match sub {
|
|
AdminCmd::Init => cli::admin::cmd_init(),
|
|
AdminCmd::Health => cli::admin::cmd_health(),
|
|
AdminCmd::Fsck => cli::admin::cmd_fsck(),
|
|
AdminCmd::Dedup { apply } => cli::admin::cmd_dedup(apply),
|
|
AdminCmd::BulkRename { from, to, apply } => cli::admin::cmd_bulk_rename(&from, &to, apply),
|
|
AdminCmd::DailyCheck => cli::admin::cmd_daily_check(),
|
|
AdminCmd::Import { files } => cli::admin::cmd_import(&files),
|
|
AdminCmd::Export { files, all } => cli::admin::cmd_export(&files, all),
|
|
AdminCmd::LoadContext { stats } => cli::misc::cmd_load_context(stats),
|
|
AdminCmd::Log => cli::misc::cmd_log(),
|
|
AdminCmd::Params => cli::misc::cmd_params(),
|
|
AdminCmd::LookupBump { keys } => cli::node::cmd_lookup_bump(&keys),
|
|
AdminCmd::Lookups { date } => cli::node::cmd_lookups(date.as_deref()),
|
|
AdminCmd::MigrateTranscriptProgress => (|| -> Result<(), String> {
|
|
let mut store = store::Store::load()?;
|
|
let count = store.migrate_transcript_progress()?;
|
|
println!("Migrated {} transcript segment markers", count);
|
|
Ok(())
|
|
})()
|
|
},
|
|
};
|
|
|
|
if let Err(e) = result {
|
|
eprintln!("Error: {}", e);
|
|
process::exit(1);
|
|
}
|
|
}
|
|
|
|
// ── Command implementations ─────────────────────────────────────────
|
|
|
|
/// Apply links from a single agent result JSON file.
|
|
/// Returns (links_applied, errors).
|
|
fn apply_agent_file(
|
|
store: &mut store::Store,
|
|
data: &serde_json::Value,
|
|
) -> (usize, usize) {
|
|
let agent_result = data.get("agent_result").or(Some(data));
|
|
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
|
Some(l) => l,
|
|
None => return (0, 0),
|
|
};
|
|
|
|
let entry_text = data.get("entry_text")
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("");
|
|
|
|
if let (Some(start), Some(end)) = (
|
|
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
|
|
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
|
|
) {
|
|
println!(" Source: L{}-L{}", start, end);
|
|
}
|
|
|
|
let mut applied = 0;
|
|
let mut errors = 0;
|
|
|
|
for link in links {
|
|
let target = match link.get("target").and_then(|v| v.as_str()) {
|
|
Some(t) => t,
|
|
None => continue,
|
|
};
|
|
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
|
|
|
if let Some(note) = target.strip_prefix("NOTE:") {
|
|
println!(" NOTE: {} — {}", note, reason);
|
|
continue;
|
|
}
|
|
|
|
let resolved = match store.resolve_key(target) {
|
|
Ok(r) => r,
|
|
Err(_) => {
|
|
println!(" SKIP {} (not found in graph)", target);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let source_key = match store.find_journal_node(entry_text) {
|
|
Some(k) => k,
|
|
None => {
|
|
println!(" SKIP {} (no matching journal node)", target);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let source_uuid = match store.nodes.get(&source_key) {
|
|
Some(n) => n.uuid,
|
|
None => continue,
|
|
};
|
|
let target_uuid = match store.nodes.get(&resolved) {
|
|
Some(n) => n.uuid,
|
|
None => continue,
|
|
};
|
|
|
|
let rel = store::new_relation(
|
|
source_uuid, target_uuid,
|
|
store::RelationType::Link,
|
|
0.5,
|
|
&source_key, &resolved,
|
|
);
|
|
if let Err(e) = store.add_relation(rel) {
|
|
eprintln!(" Error adding relation: {}", e);
|
|
errors += 1;
|
|
} else {
|
|
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
|
applied += 1;
|
|
}
|
|
}
|
|
|
|
(applied, errors)
|
|
}
|
|
|
|
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
|
|
let results_dir = store::memory_dir().join("agent-results");
|
|
|
|
if !results_dir.exists() {
|
|
println!("No agent results directory");
|
|
return Ok(());
|
|
}
|
|
|
|
let mut store = store::Store::load()?;
|
|
let mut applied = 0;
|
|
let mut errors = 0;
|
|
|
|
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
|
.map_err(|e| format!("read results dir: {}", e))?
|
|
.filter_map(|e| e.ok())
|
|
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
|
.collect();
|
|
files.sort_by_key(|e| e.path());
|
|
|
|
for entry in &files {
|
|
let path = entry.path();
|
|
let content = match std::fs::read_to_string(&path) {
|
|
Ok(c) => c,
|
|
Err(e) => {
|
|
eprintln!(" Skip {}: {}", path.display(), e);
|
|
errors += 1;
|
|
continue;
|
|
}
|
|
};
|
|
|
|
let data: serde_json::Value = match serde_json::from_str(&content) {
|
|
Ok(d) => d,
|
|
Err(e) => {
|
|
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
|
errors += 1;
|
|
continue;
|
|
}
|
|
};
|
|
|
|
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
|
let (a, e) = apply_agent_file(&mut store, &data);
|
|
applied += a;
|
|
errors += e;
|
|
|
|
if !process_all {
|
|
let done_dir = crate::util::memory_subdir("agent-results/done")?;
|
|
let dest = done_dir.join(path.file_name().unwrap());
|
|
std::fs::rename(&path, &dest).ok();
|
|
}
|
|
}
|
|
|
|
if applied > 0 {
|
|
store.save()?;
|
|
}
|
|
|
|
println!("\nApplied {} links ({} errors, {} files processed)",
|
|
applied, errors, files.len());
|
|
Ok(())
|
|
}
|
|
|
|
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
|
|
let mut store = store::Store::load()?;
|
|
|
|
match level {
|
|
DigestLevel::Auto => digest::digest_auto(&mut store),
|
|
DigestLevel::Daily { date } => {
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
digest::generate(&mut store, "daily", &arg)
|
|
}
|
|
DigestLevel::Weekly { date } => {
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
digest::generate(&mut store, "weekly", &arg)
|
|
}
|
|
DigestLevel::Monthly { date } => {
|
|
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
|
digest::generate(&mut store, "monthly", &arg)
|
|
}
|
|
}
|
|
}
|
|
|
|
fn cmd_experience_mine(_jsonl_path: Option<String>) -> Result<(), String> {
|
|
Err("experience-mine has been removed — use the observation agent instead.".into())
|
|
}
|
|
|
|
fn cmd_daemon(sub: DaemonCmd) -> Result<(), String> {
|
|
match sub {
|
|
DaemonCmd::Start => daemon::run_daemon(),
|
|
DaemonCmd::Status => daemon::show_status(),
|
|
DaemonCmd::Log { job, lines } => daemon::show_log(job.as_deref(), lines),
|
|
DaemonCmd::Install => daemon::install_service(),
|
|
DaemonCmd::Consolidate => daemon::rpc_consolidate(),
|
|
DaemonCmd::Run { agent, count } => daemon::rpc_run_agent(&agent, count),
|
|
DaemonCmd::Tui => tui::run_tui(),
|
|
}
|
|
}
|
|
|
|
fn cmd_cursor(sub: CursorCmd) -> Result<(), String> {
|
|
match sub {
|
|
CursorCmd::Show => {
|
|
let store = crate::store::Store::load()?;
|
|
cursor::show(&store)
|
|
}
|
|
CursorCmd::Set { key } => {
|
|
if key.is_empty() {
|
|
return Err("cursor set requires a key".into());
|
|
}
|
|
let key = key.join(" ");
|
|
let store = crate::store::Store::load()?;
|
|
let bare = crate::store::strip_md_suffix(&key);
|
|
if !store.nodes.contains_key(&bare) {
|
|
return Err(format!("Node not found: {}", bare));
|
|
}
|
|
cursor::set(&bare)?;
|
|
cursor::show(&store)
|
|
}
|
|
CursorCmd::Forward => {
|
|
let store = crate::store::Store::load()?;
|
|
cursor::move_temporal(&store, true)
|
|
}
|
|
CursorCmd::Back => {
|
|
let store = crate::store::Store::load()?;
|
|
cursor::move_temporal(&store, false)
|
|
}
|
|
CursorCmd::Up => {
|
|
let store = crate::store::Store::load()?;
|
|
cursor::move_up(&store)
|
|
}
|
|
CursorCmd::Down => {
|
|
let store = crate::store::Store::load()?;
|
|
cursor::move_down(&store)
|
|
}
|
|
CursorCmd::Clear => cursor::clear(),
|
|
}
|
|
}
|
|
|