Each subcommand enum (Command, NodeCmd, JournalCmd, GraphCmd, CursorCmd, DaemonCmd, AgentCmd, AdminCmd) now implements a Run trait. main() becomes `cli.command.run()`. Standalone dispatch functions (cmd_cursor, cmd_daemon, cmd_experience_mine) inlined into their enum's Run impl. No functional changes. Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
1145 lines
39 KiB
Rust
1145 lines
39 KiB
Rust
// poc-memory: graph-structured memory for AI assistants
|
||
//
|
||
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet
|
||
// License: MIT OR Apache-2.0
|
||
//
|
||
// Architecture:
|
||
// nodes.capnp - append-only content node log
|
||
// relations.capnp - append-only relation log
|
||
// state.bin - derived KV cache (rebuilt from logs when stale)
|
||
//
|
||
// Graph algorithms: clustering coefficient, community detection (label
|
||
// propagation), schema fit scoring, small-world metrics, consolidation
|
||
// priority. Text similarity via BM25 with Porter stemming.
|
||
//
|
||
// Neuroscience-inspired: spaced repetition replay, emotional gating,
|
||
// interference detection, schema assimilation, reconsolidation.
|
||
|
||
use poc_memory::*;
|
||
|
||
use clap::{Parser, Subcommand};
|
||
|
||
use std::process;
|
||
|
||
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
|
||
#[derive(Parser)]
|
||
#[command(name = "poc-memory", version = "0.4.0", about = "Graph-structured memory store")]
|
||
struct Cli {
|
||
#[command(subcommand)]
|
||
command: Command,
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum Command {
|
||
// ── Core (daily use) ──────────────────────────────────────────────
|
||
|
||
/// Search memory (AND logic across terms)
|
||
///
|
||
/// Pipeline: -p spread -p spectral,k=20
|
||
/// Default pipeline: spread
|
||
Search {
|
||
/// Search terms
|
||
query: Vec<String>,
|
||
/// Algorithm pipeline stages (repeatable)
|
||
#[arg(short, long = "pipeline")]
|
||
pipeline: Vec<String>,
|
||
/// Show more results
|
||
#[arg(long)]
|
||
expand: bool,
|
||
/// Show node content, not just keys
|
||
#[arg(long)]
|
||
full: bool,
|
||
/// Show debug output for each pipeline stage
|
||
#[arg(long)]
|
||
debug: bool,
|
||
/// Also match key components (e.g. "irc" matches "irc-access")
|
||
#[arg(long)]
|
||
fuzzy: bool,
|
||
/// Also search node content (slow, use when graph search misses)
|
||
#[arg(long)]
|
||
content: bool,
|
||
},
|
||
/// Output a node's content to stdout
|
||
Render {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Upsert node content from stdin
|
||
Write {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Edit a node in $EDITOR
|
||
Edit {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Show all stored versions of a node
|
||
History {
|
||
/// Show full content for every version
|
||
#[arg(long)]
|
||
full: bool,
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Show most recent writes to the node log
|
||
Tail {
|
||
/// Number of entries (default: 20)
|
||
#[arg(default_value_t = 20)]
|
||
n: usize,
|
||
/// Show full content
|
||
#[arg(long)]
|
||
full: bool,
|
||
},
|
||
/// Summary of memory state
|
||
Status,
|
||
/// Query the memory graph
|
||
#[command(after_long_help = "\
|
||
EXPRESSIONS:
|
||
* all nodes
|
||
key ~ 'pattern' regex match on node key
|
||
content ~ 'phrase' regex match on node content
|
||
degree > 15 numeric comparison on any field
|
||
field = value exact match
|
||
field != value not equal
|
||
expr AND expr boolean AND
|
||
expr OR expr boolean OR
|
||
NOT expr negation
|
||
neighbors('key') nodes linked to key
|
||
neighbors('key') WHERE expr ... with filter on edges/nodes
|
||
|
||
FIELDS:
|
||
key, weight, content, degree, node_type, provenance,
|
||
emotion, retrievals, uses, wrongs, created,
|
||
clustering_coefficient (cc), community_id
|
||
|
||
OPERATORS:
|
||
> < >= <= = != ~(regex)
|
||
|
||
PIPE STAGES:
|
||
| sort FIELD [asc] sort (desc by default)
|
||
| limit N cap results
|
||
| select F,F,... output fields as TSV
|
||
| count just show count
|
||
| connectivity show graph structure between results
|
||
|
||
FUNCTIONS:
|
||
community('key') community id of a node
|
||
degree('key') degree of a node
|
||
|
||
EXAMPLES:
|
||
key ~ 'inner-life' substring match on keys
|
||
content ~ 'made love' full-text search
|
||
content ~ 'made love' | connectivity find clusters among results
|
||
(content ~ 'A' OR content ~ 'B') | connectivity
|
||
degree > 15 | sort degree | limit 10 high-degree nodes
|
||
key ~ 'journal' AND degree > 10 | count count matching nodes
|
||
neighbors('identity') WHERE strength > 0.5 | sort strength
|
||
* | sort weight asc | limit 20 lowest-weight nodes
|
||
")]
|
||
Query {
|
||
/// Query expression (e.g. "key ~ 'inner-life'")
|
||
expr: Vec<String>,
|
||
},
|
||
/// Mark a memory as useful (boosts weight)
|
||
Used {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Mark a memory as wrong/irrelevant
|
||
Wrong {
|
||
/// Node key
|
||
key: String,
|
||
/// Optional context
|
||
context: Vec<String>,
|
||
},
|
||
/// Mark a search result as not relevant (weakens edges that led to it)
|
||
#[command(name = "not-relevant")]
|
||
NotRelevant {
|
||
/// Node key that was not relevant
|
||
key: String,
|
||
},
|
||
/// Mark a node as not useful (weakens node weight, not edges)
|
||
#[command(name = "not-useful")]
|
||
NotUseful {
|
||
/// Node key
|
||
key: String,
|
||
},
|
||
/// Set a node's weight directly
|
||
#[command(name = "weight-set")]
|
||
WeightSet {
|
||
/// Node key
|
||
key: String,
|
||
/// Weight (0.01 to 1.0)
|
||
weight: f32,
|
||
},
|
||
/// Record a gap in memory coverage
|
||
Gap {
|
||
/// Gap description
|
||
description: Vec<String>,
|
||
},
|
||
|
||
// ── Node operations ───────────────────────────────────────────────
|
||
|
||
/// Node operations (delete, rename, list)
|
||
#[command(subcommand)]
|
||
Node(NodeCmd),
|
||
|
||
// ── Journal ───────────────────────────────────────────────────────
|
||
|
||
/// Journal operations (write, tail, enrich)
|
||
#[command(subcommand)]
|
||
Journal(JournalCmd),
|
||
|
||
// ── Graph ─────────────────────────────────────────────────────────
|
||
|
||
/// Graph operations (link, audit, spectral)
|
||
#[command(subcommand, name = "graph")]
|
||
GraphCmd(GraphCmd),
|
||
|
||
// ── Cursor (spatial memory) ──────────────────────────────────────
|
||
|
||
/// Navigate the memory graph with a persistent cursor
|
||
#[command(subcommand)]
|
||
Cursor(CursorCmd),
|
||
|
||
// ── Agents ────────────────────────────────────────────────────────
|
||
|
||
/// Agent and daemon operations
|
||
#[command(subcommand)]
|
||
Agent(AgentCmd),
|
||
|
||
// ── Admin ─────────────────────────────────────────────────────────
|
||
|
||
/// Admin operations (fsck, health, import, export)
|
||
#[command(subcommand)]
|
||
Admin(AdminCmd),
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum NodeCmd {
|
||
/// Soft-delete a node
|
||
Delete {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Rename a node key
|
||
Rename {
|
||
/// Old key
|
||
old_key: String,
|
||
/// New key
|
||
new_key: String,
|
||
},
|
||
/// List all node keys (one per line, optional glob)
|
||
#[command(name = "list")]
|
||
List {
|
||
/// Glob pattern to filter keys
|
||
pattern: Option<String>,
|
||
},
|
||
/// List all edges (tsv: source target strength type)
|
||
Edges,
|
||
/// Dump entire store as JSON
|
||
#[command(name = "dump")]
|
||
Dump,
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum CursorCmd {
|
||
/// Show current cursor position with context
|
||
Show,
|
||
/// Set cursor to a node key
|
||
Set {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Move cursor forward in time
|
||
Forward,
|
||
/// Move cursor backward in time
|
||
Back,
|
||
/// Move up the digest hierarchy (journal→daily→weekly→monthly)
|
||
Up,
|
||
/// Move down the digest hierarchy (to first child)
|
||
Down,
|
||
/// Clear the cursor
|
||
Clear,
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum JournalCmd {
|
||
/// Write a journal entry to the store
|
||
Write {
|
||
/// Entry text
|
||
text: Vec<String>,
|
||
},
|
||
/// Show recent journal/digest entries
|
||
Tail {
|
||
/// Number of entries to show (default: 20)
|
||
#[arg(default_value_t = 20)]
|
||
n: usize,
|
||
/// Show full content
|
||
#[arg(long)]
|
||
full: bool,
|
||
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
|
||
#[arg(long, default_value_t = 0)]
|
||
level: u8,
|
||
},
|
||
/// Enrich journal entry with conversation links
|
||
Enrich {
|
||
/// Path to JSONL transcript
|
||
jsonl_path: String,
|
||
/// Journal entry text to enrich
|
||
entry_text: String,
|
||
/// Grep line number for source location
|
||
#[arg(default_value_t = 0)]
|
||
grep_line: usize,
|
||
},
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum GraphCmd {
|
||
/// Show neighbors of a node
|
||
Link {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Add a link between two nodes
|
||
#[command(name = "link-add")]
|
||
LinkAdd {
|
||
/// Source node key
|
||
source: String,
|
||
/// Target node key
|
||
target: String,
|
||
/// Optional reason
|
||
reason: Vec<String>,
|
||
},
|
||
/// Set strength of an existing link
|
||
#[command(name = "link-set")]
|
||
LinkSet {
|
||
/// Source node key
|
||
source: String,
|
||
/// Target node key
|
||
target: String,
|
||
/// Strength (0.0–1.0)
|
||
strength: f32,
|
||
},
|
||
/// Simulate adding an edge, report topology impact
|
||
#[command(name = "link-impact")]
|
||
LinkImpact {
|
||
/// Source node key
|
||
source: String,
|
||
/// Target node key
|
||
target: String,
|
||
},
|
||
/// Walk every link, send to Sonnet for quality review
|
||
#[command(name = "link-audit")]
|
||
LinkAudit {
|
||
/// Apply changes (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Link orphan nodes to similar neighbors
|
||
#[command(name = "link-orphans")]
|
||
LinkOrphans {
|
||
/// Minimum degree to consider orphan (default: 2)
|
||
#[arg(default_value_t = 2)]
|
||
min_degree: usize,
|
||
/// Links per orphan (default: 3)
|
||
#[arg(default_value_t = 3)]
|
||
links_per: usize,
|
||
/// Similarity threshold (default: 0.15)
|
||
#[arg(default_value_t = 0.15)]
|
||
sim_threshold: f32,
|
||
},
|
||
/// Close triangles: link similar neighbors of hubs
|
||
#[command(name = "triangle-close")]
|
||
TriangleClose {
|
||
/// Minimum hub degree (default: 5)
|
||
#[arg(default_value_t = 5)]
|
||
min_degree: usize,
|
||
/// Similarity threshold (default: 0.3)
|
||
#[arg(default_value_t = 0.3)]
|
||
sim_threshold: f32,
|
||
/// Maximum links per hub (default: 10)
|
||
#[arg(default_value_t = 10)]
|
||
max_per_hub: usize,
|
||
},
|
||
/// Cap node degree by pruning weak auto edges
|
||
#[command(name = "cap-degree")]
|
||
CapDegree {
|
||
/// Maximum degree (default: 50)
|
||
#[arg(default_value_t = 50)]
|
||
max_degree: usize,
|
||
},
|
||
/// Set link strengths from neighborhood overlap (Jaccard similarity)
|
||
#[command(name = "normalize-strengths")]
|
||
NormalizeStrengths {
|
||
/// Apply changes (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Redistribute hub links to section-level children
|
||
Differentiate {
|
||
/// Specific hub key (omit to list all differentiable hubs)
|
||
key: Option<String>,
|
||
/// Apply the redistribution
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Walk temporal links: semantic ↔ episodic ↔ conversation
|
||
Trace {
|
||
/// Node key
|
||
key: Vec<String>,
|
||
},
|
||
/// Detect potentially confusable memory pairs
|
||
Interference {
|
||
/// Similarity threshold (default: 0.4)
|
||
#[arg(long, default_value_t = 0.4)]
|
||
threshold: f32,
|
||
},
|
||
/// Show communities sorted by isolation (most isolated first)
|
||
Communities {
|
||
/// Number of communities to show
|
||
#[arg(default_value_t = 20)]
|
||
top_n: usize,
|
||
/// Minimum community size to show
|
||
#[arg(long, default_value_t = 2)]
|
||
min_size: usize,
|
||
},
|
||
/// Show graph structure overview
|
||
Overview,
|
||
/// Spectral decomposition of the memory graph
|
||
Spectral {
|
||
/// Number of eigenvectors (default: 30)
|
||
#[arg(default_value_t = 30)]
|
||
k: usize,
|
||
},
|
||
/// Compute and save spectral embedding
|
||
#[command(name = "spectral-save")]
|
||
SpectralSave {
|
||
/// Number of eigenvectors (default: 20)
|
||
#[arg(default_value_t = 20)]
|
||
k: usize,
|
||
},
|
||
/// Find spectrally nearest nodes
|
||
#[command(name = "spectral-neighbors")]
|
||
SpectralNeighbors {
|
||
/// Node key
|
||
key: String,
|
||
/// Number of neighbors (default: 15)
|
||
#[arg(default_value_t = 15)]
|
||
n: usize,
|
||
},
|
||
/// Show nodes ranked by outlier/bridge score
|
||
#[command(name = "spectral-positions")]
|
||
SpectralPositions {
|
||
/// Number of nodes to show (default: 30)
|
||
#[arg(default_value_t = 30)]
|
||
n: usize,
|
||
},
|
||
/// Find spectrally close but unlinked pairs
|
||
#[command(name = "spectral-suggest")]
|
||
SpectralSuggest {
|
||
/// Number of pairs (default: 20)
|
||
#[arg(default_value_t = 20)]
|
||
n: usize,
|
||
},
|
||
/// Diagnose duplicate/overlapping nodes for a topic cluster
|
||
Organize {
|
||
/// Search term (matches node keys; also content unless --key-only)
|
||
term: String,
|
||
/// Similarity threshold for pair reporting (default: 0.4)
|
||
#[arg(long, default_value_t = 0.4)]
|
||
threshold: f32,
|
||
/// Only match node keys, not content
|
||
#[arg(long)]
|
||
key_only: bool,
|
||
/// Create anchor node for the search term and link to cluster
|
||
#[arg(long)]
|
||
anchor: bool,
|
||
},
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum DaemonCmd {
|
||
/// Start the daemon (default)
|
||
Start,
|
||
/// Show daemon status
|
||
Status,
|
||
/// Show daemon log
|
||
Log {
|
||
/// Job name to filter by
|
||
job: Option<String>,
|
||
/// Tail a task's log file (drill down from daemon log)
|
||
#[arg(long)]
|
||
task: Option<String>,
|
||
/// Number of lines to show
|
||
#[arg(long, default_value_t = 20)]
|
||
lines: usize,
|
||
},
|
||
/// Install systemd service
|
||
Install,
|
||
/// Trigger consolidation via daemon
|
||
Consolidate,
|
||
/// Run an agent via the daemon
|
||
Run {
|
||
/// Agent name (e.g. organize, replay, linker)
|
||
#[arg(default_value = "replay")]
|
||
agent: String,
|
||
/// Batch size
|
||
#[arg(default_value_t = 1)]
|
||
count: usize,
|
||
},
|
||
/// Interactive TUI
|
||
Tui,
|
||
/// Reload config file without restarting
|
||
ReloadConfig,
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum AgentCmd {
|
||
/// Background job daemon
|
||
#[command(subcommand)]
|
||
Daemon(DaemonCmd),
|
||
/// Run knowledge agents to convergence
|
||
#[command(name = "knowledge-loop")]
|
||
KnowledgeLoop {
|
||
/// Maximum cycles before stopping
|
||
#[arg(long, default_value_t = 20)]
|
||
max_cycles: usize,
|
||
/// Items per agent per cycle
|
||
#[arg(long, default_value_t = 5)]
|
||
batch_size: usize,
|
||
/// Cycles to check for convergence
|
||
#[arg(long, default_value_t = 5)]
|
||
window: usize,
|
||
/// Maximum inference depth
|
||
#[arg(long, default_value_t = 4)]
|
||
max_depth: i32,
|
||
},
|
||
/// Run agent consolidation on priority nodes
|
||
#[command(name = "consolidate-batch")]
|
||
ConsolidateBatch {
|
||
/// Number of nodes to consolidate
|
||
#[arg(long, default_value_t = 5)]
|
||
count: usize,
|
||
/// Generate replay agent prompt automatically
|
||
#[arg(long)]
|
||
auto: bool,
|
||
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
|
||
#[arg(long)]
|
||
agent: Option<String>,
|
||
},
|
||
/// Analyze metrics, plan agent allocation
|
||
#[command(name = "consolidate-session")]
|
||
ConsolidateSession,
|
||
/// Autonomous: plan → agents → apply → digests → links
|
||
#[command(name = "consolidate-full")]
|
||
ConsolidateFull,
|
||
/// Import pending agent results into the graph
|
||
#[command(name = "apply-agent")]
|
||
ApplyAgent {
|
||
/// Process all files without moving to done/
|
||
#[arg(long)]
|
||
all: bool,
|
||
},
|
||
/// Extract and apply actions from consolidation reports
|
||
#[command(name = "apply-consolidation")]
|
||
ApplyConsolidation {
|
||
/// Apply actions (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
/// Read from specific report file
|
||
#[arg(long)]
|
||
report: Option<String>,
|
||
},
|
||
/// Generate episodic digests (daily, weekly, monthly, auto)
|
||
Digest {
|
||
/// Digest type: daily, weekly, monthly, auto
|
||
#[command(subcommand)]
|
||
level: DigestLevel,
|
||
},
|
||
/// Parse and apply links from digest nodes
|
||
#[command(name = "digest-links")]
|
||
DigestLinks {
|
||
/// Apply the links (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Mine conversation for experiential moments to journal
|
||
#[command(name = "experience-mine")]
|
||
ExperienceMine {
|
||
/// Path to JSONL transcript (default: most recent)
|
||
jsonl_path: Option<String>,
|
||
},
|
||
/// Extract atomic facts from conversation transcripts
|
||
#[command(name = "fact-mine")]
|
||
FactMine {
|
||
/// Path to JSONL transcript or directory (with --batch)
|
||
path: String,
|
||
/// Process all .jsonl files in directory
|
||
#[arg(long)]
|
||
batch: bool,
|
||
/// Show chunks without calling model
|
||
#[arg(long)]
|
||
dry_run: bool,
|
||
/// Write JSON to file (default: stdout)
|
||
#[arg(long, short)]
|
||
output: Option<String>,
|
||
/// Skip transcripts with fewer messages
|
||
#[arg(long, default_value_t = 10)]
|
||
min_messages: usize,
|
||
},
|
||
/// Extract facts from a transcript and store directly
|
||
#[command(name = "fact-mine-store")]
|
||
FactMineStore {
|
||
/// Path to JSONL transcript
|
||
path: String,
|
||
},
|
||
/// Run a single agent by name
|
||
Run {
|
||
/// Agent name (e.g. observation, linker, distill)
|
||
agent: String,
|
||
/// Batch size (number of seed nodes/fragments)
|
||
#[arg(long, default_value_t = 5)]
|
||
count: usize,
|
||
/// Target specific node keys (overrides agent's query)
|
||
#[arg(long)]
|
||
target: Vec<String>,
|
||
/// Run agent on each result of a query (e.g. 'key ~ "bcachefs" | limit 10')
|
||
#[arg(long)]
|
||
query: Option<String>,
|
||
/// Dry run — set POC_MEMORY_DRY_RUN=1 so mutations are no-ops
|
||
#[arg(long)]
|
||
dry_run: bool,
|
||
/// Run locally instead of queuing to daemon
|
||
#[arg(long)]
|
||
local: bool,
|
||
/// Directory for agent output/input state (persists across runs)
|
||
#[arg(long)]
|
||
state_dir: Option<String>,
|
||
},
|
||
/// Show spaced repetition replay queue
|
||
#[command(name = "replay-queue")]
|
||
ReplayQueue {
|
||
/// Number of items to show
|
||
#[arg(long, default_value_t = 10)]
|
||
count: usize,
|
||
},
|
||
/// Evaluate agent quality by LLM-sorted ranking
|
||
#[command(name = "evaluate")]
|
||
Evaluate {
|
||
/// Number of pairwise matchups to run
|
||
#[arg(long, default_value_t = 30)]
|
||
matchups: usize,
|
||
/// Model to use for comparison (haiku or sonnet)
|
||
#[arg(long, default_value = "haiku")]
|
||
model: String,
|
||
/// Show example comparison prompt without calling LLM
|
||
#[arg(long)]
|
||
dry_run: bool,
|
||
},
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum AdminCmd {
|
||
/// Scan markdown files, index all memory units
|
||
Init,
|
||
/// Report graph metrics (CC, communities, small-world)
|
||
Health,
|
||
/// Run consistency checks and repair
|
||
Fsck,
|
||
/// Find and merge duplicate nodes (same key, multiple UUIDs)
|
||
Dedup {
|
||
/// Apply the merge (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Bulk rename: replace a character in all keys
|
||
#[command(name = "bulk-rename")]
|
||
BulkRename {
|
||
/// Character to replace
|
||
from: String,
|
||
/// Replacement character
|
||
to: String,
|
||
/// Apply changes (default: dry run)
|
||
#[arg(long)]
|
||
apply: bool,
|
||
},
|
||
/// Brief metrics check (for cron/notifications)
|
||
#[command(name = "daily-check")]
|
||
DailyCheck,
|
||
/// Import markdown file(s) into the store
|
||
Import {
|
||
/// File paths
|
||
files: Vec<String>,
|
||
},
|
||
/// Export store nodes to markdown file(s)
|
||
Export {
|
||
/// File keys to export (or --all)
|
||
files: Vec<String>,
|
||
/// Export all file-level nodes
|
||
#[arg(long)]
|
||
all: bool,
|
||
},
|
||
/// Output session-start context from the store
|
||
#[command(name = "load-context")]
|
||
LoadContext {
|
||
/// Show word count statistics instead of content
|
||
#[arg(long)]
|
||
stats: bool,
|
||
},
|
||
/// Show recent retrieval log
|
||
Log,
|
||
/// Show current parameters
|
||
Params,
|
||
/// Bump daily lookup counter for keys
|
||
#[command(name = "lookup-bump")]
|
||
LookupBump {
|
||
/// Node keys
|
||
keys: Vec<String>,
|
||
},
|
||
/// Show daily lookup counts
|
||
Lookups {
|
||
/// Date (default: today)
|
||
date: Option<String>,
|
||
},
|
||
/// Migrate transcript stub nodes to progress log
|
||
#[command(name = "migrate-transcript-progress")]
|
||
MigrateTranscriptProgress,
|
||
}
|
||
|
||
#[derive(Subcommand)]
|
||
enum DigestLevel {
|
||
/// Generate daily digest
|
||
Daily {
|
||
/// Date (default: today)
|
||
date: Option<String>,
|
||
},
|
||
/// Generate weekly digest
|
||
Weekly {
|
||
/// Date or week label (default: current week)
|
||
date: Option<String>,
|
||
},
|
||
/// Generate monthly digest
|
||
Monthly {
|
||
/// Month (YYYY-MM) or date (default: current month)
|
||
date: Option<String>,
|
||
},
|
||
/// Generate all missing digests
|
||
Auto,
|
||
}
|
||
|
||
/// Print help with subcommands expanded to show nested commands.
|
||
fn print_help() {
|
||
use clap::CommandFactory;
|
||
let cmd = Cli::command();
|
||
|
||
println!("poc-memory - graph-structured memory store");
|
||
println!("usage: poc-memory <command> [<args>]\n");
|
||
|
||
for sub in cmd.get_subcommands() {
|
||
if sub.get_name() == "help" { continue }
|
||
let children: Vec<_> = sub.get_subcommands()
|
||
.filter(|c| c.get_name() != "help")
|
||
.collect();
|
||
if !children.is_empty() {
|
||
for child in &children {
|
||
let about = child.get_about().map(|s| s.to_string()).unwrap_or_default();
|
||
let full = format!("{} {}", sub.get_name(), child.get_name());
|
||
// Recurse one more level for daemon subcommands etc.
|
||
let grandchildren: Vec<_> = child.get_subcommands()
|
||
.filter(|c| c.get_name() != "help")
|
||
.collect();
|
||
if !grandchildren.is_empty() {
|
||
for gc in grandchildren {
|
||
let gc_about = gc.get_about().map(|s| s.to_string()).unwrap_or_default();
|
||
let gc_full = format!("{} {}", full, gc.get_name());
|
||
println!(" {:<34}{gc_about}", gc_full);
|
||
}
|
||
} else {
|
||
println!(" {:<34}{about}", full);
|
||
}
|
||
}
|
||
} else {
|
||
let about = sub.get_about().map(|s| s.to_string()).unwrap_or_default();
|
||
println!(" {:<34}{about}", sub.get_name());
|
||
}
|
||
}
|
||
}
|
||
|
||
// ── Dispatch ─────────────────────────────────────────────────────────
|
||
|
||
trait Run {
|
||
fn run(self) -> Result<(), String>;
|
||
}
|
||
|
||
impl Run for Command {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Search { query, pipeline, expand, full, debug, fuzzy, content }
|
||
=> cli::misc::cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content),
|
||
Self::Render { key } => cli::node::cmd_render(&key),
|
||
Self::Write { key } => cli::node::cmd_write(&key),
|
||
Self::Edit { key } => cli::node::cmd_edit(&key),
|
||
Self::History { full, key } => cli::node::cmd_history(&key, full),
|
||
Self::Tail { n, full } => cli::journal::cmd_tail(n, full),
|
||
Self::Status => cli::misc::cmd_status(),
|
||
Self::Query { expr } => cli::misc::cmd_query(&expr),
|
||
Self::Used { key } => cli::node::cmd_used(&key),
|
||
Self::Wrong { key, context } => cli::node::cmd_wrong(&key, &context),
|
||
Self::NotRelevant { key } => cli::node::cmd_not_relevant(&key),
|
||
Self::NotUseful { key } => cli::node::cmd_not_useful(&key),
|
||
Self::WeightSet { key, weight } => cli::node::cmd_weight_set(&key, weight),
|
||
Self::Gap { description } => cli::node::cmd_gap(&description),
|
||
Self::Node(sub) => sub.run(),
|
||
Self::Journal(sub) => sub.run(),
|
||
Self::GraphCmd(sub) => sub.run(),
|
||
Self::Cursor(sub) => sub.run(),
|
||
Self::Agent(sub) => sub.run(),
|
||
Self::Admin(sub) => sub.run(),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for NodeCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Delete { key } => cli::node::cmd_node_delete(&key),
|
||
Self::Rename { old_key, new_key } => cli::node::cmd_node_rename(&old_key, &new_key),
|
||
Self::List { pattern } => cli::node::cmd_list_keys(pattern.as_deref()),
|
||
Self::Edges => cli::node::cmd_list_edges(),
|
||
Self::Dump => cli::node::cmd_dump_json(),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for JournalCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Write { text } => cli::journal::cmd_journal_write(&text),
|
||
Self::Tail { n, full, level } => cli::journal::cmd_journal_tail(n, full, level),
|
||
Self::Enrich { jsonl_path, entry_text, grep_line }
|
||
=> cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for GraphCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Link { key } => cli::graph::cmd_link(&key),
|
||
Self::LinkAdd { source, target, reason }
|
||
=> cli::graph::cmd_link_add(&source, &target, &reason),
|
||
Self::LinkSet { source, target, strength }
|
||
=> cli::graph::cmd_link_set(&source, &target, strength),
|
||
Self::LinkImpact { source, target } => cli::graph::cmd_link_impact(&source, &target),
|
||
Self::LinkAudit { apply } => cli::graph::cmd_link_audit(apply),
|
||
Self::LinkOrphans { min_degree, links_per, sim_threshold }
|
||
=> cli::graph::cmd_link_orphans(min_degree, links_per, sim_threshold),
|
||
Self::TriangleClose { min_degree, sim_threshold, max_per_hub }
|
||
=> cli::graph::cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
|
||
Self::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree),
|
||
Self::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply),
|
||
Self::Differentiate { key, apply } => cli::graph::cmd_differentiate(key.as_deref(), apply),
|
||
Self::Trace { key } => cli::graph::cmd_trace(&key),
|
||
Self::Interference { threshold } => cli::graph::cmd_interference(threshold),
|
||
Self::Communities { top_n, min_size } => cli::graph::cmd_communities(top_n, min_size),
|
||
Self::Overview => cli::graph::cmd_graph(),
|
||
Self::Spectral { k } => cli::graph::cmd_spectral(k),
|
||
Self::SpectralSave { k } => cli::graph::cmd_spectral_save(k),
|
||
Self::SpectralNeighbors { key, n } => cli::graph::cmd_spectral_neighbors(&key, n),
|
||
Self::SpectralPositions { n } => cli::graph::cmd_spectral_positions(n),
|
||
Self::SpectralSuggest { n } => cli::graph::cmd_spectral_suggest(n),
|
||
Self::Organize { term, threshold, key_only, anchor }
|
||
=> cli::graph::cmd_organize(&term, threshold, key_only, anchor),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for CursorCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Show => {
|
||
let store = store::Store::load()?;
|
||
cursor::show(&store)
|
||
}
|
||
Self::Set { key } => {
|
||
if key.is_empty() { return Err("cursor set requires a key".into()); }
|
||
let key = key.join(" ");
|
||
let store = store::Store::load()?;
|
||
let bare = store::strip_md_suffix(&key);
|
||
if !store.nodes.contains_key(&bare) {
|
||
return Err(format!("Node not found: {}", bare));
|
||
}
|
||
cursor::set(&bare)?;
|
||
cursor::show(&store)
|
||
}
|
||
Self::Forward => { let s = store::Store::load()?; cursor::move_temporal(&s, true) }
|
||
Self::Back => { let s = store::Store::load()?; cursor::move_temporal(&s, false) }
|
||
Self::Up => { let s = store::Store::load()?; cursor::move_up(&s) }
|
||
Self::Down => { let s = store::Store::load()?; cursor::move_down(&s) }
|
||
Self::Clear => cursor::clear(),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for DaemonCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Start => daemon::run_daemon(),
|
||
Self::Status => daemon::show_status(),
|
||
Self::Log { job, task, lines } => {
|
||
if let Some(ref task_name) = task {
|
||
daemon::show_task_log(task_name, lines)
|
||
} else {
|
||
daemon::show_log(job.as_deref(), lines)
|
||
}
|
||
}
|
||
Self::Install => daemon::install_service(),
|
||
Self::Consolidate => daemon::rpc_consolidate(),
|
||
Self::Run { agent, count } => daemon::rpc_run_agent(&agent, count),
|
||
Self::Tui => tui::run_tui(),
|
||
Self::ReloadConfig => {
|
||
match daemon::send_rpc_pub("reload-config") {
|
||
Some(resp) => { eprintln!("{}", resp.trim()); Ok(()) }
|
||
None => Err("daemon not running".into()),
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for AgentCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Daemon(sub) => sub.run(),
|
||
Self::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
|
||
=> cli::agent::cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
|
||
Self::ConsolidateBatch { count, auto, agent }
|
||
=> cli::agent::cmd_consolidate_batch(count, auto, agent),
|
||
Self::ConsolidateSession => cli::agent::cmd_consolidate_session(),
|
||
Self::ConsolidateFull => cli::agent::cmd_consolidate_full(),
|
||
Self::ApplyAgent { all } => cmd_apply_agent(all),
|
||
Self::ApplyConsolidation { apply, report }
|
||
=> cli::agent::cmd_apply_consolidation(apply, report.as_deref()),
|
||
Self::Digest { level } => cmd_digest(level),
|
||
Self::DigestLinks { apply } => cli::agent::cmd_digest_links(apply),
|
||
Self::ExperienceMine { .. }
|
||
=> Err("experience-mine has been removed — use the observation agent instead.".into()),
|
||
Self::FactMine { path, batch, dry_run, output, min_messages }
|
||
=> cli::agent::cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
|
||
Self::FactMineStore { path } => cli::agent::cmd_fact_mine_store(&path),
|
||
Self::Run { agent, count, target, query, dry_run, local, state_dir }
|
||
=> cli::agent::cmd_run_agent(&agent, count, &target, query.as_deref(), dry_run, local, state_dir.as_deref()),
|
||
Self::ReplayQueue { count } => cli::agent::cmd_replay_queue(count),
|
||
Self::Evaluate { matchups, model, dry_run }
|
||
=> cli::agent::cmd_evaluate_agents(matchups, &model, dry_run),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Run for AdminCmd {
|
||
fn run(self) -> Result<(), String> {
|
||
match self {
|
||
Self::Init => cli::admin::cmd_init(),
|
||
Self::Health => cli::admin::cmd_health(),
|
||
Self::Fsck => cli::admin::cmd_fsck(),
|
||
Self::Dedup { apply } => cli::admin::cmd_dedup(apply),
|
||
Self::BulkRename { from, to, apply } => cli::admin::cmd_bulk_rename(&from, &to, apply),
|
||
Self::DailyCheck => cli::admin::cmd_daily_check(),
|
||
Self::Import { files } => cli::admin::cmd_import(&files),
|
||
Self::Export { files, all } => cli::admin::cmd_export(&files, all),
|
||
Self::LoadContext { stats } => cli::misc::cmd_load_context(stats),
|
||
Self::Log => cli::misc::cmd_log(),
|
||
Self::Params => cli::misc::cmd_params(),
|
||
Self::LookupBump { keys } => cli::node::cmd_lookup_bump(&keys),
|
||
Self::Lookups { date } => cli::node::cmd_lookups(date.as_deref()),
|
||
Self::MigrateTranscriptProgress => {
|
||
let mut store = store::Store::load()?;
|
||
let count = store.migrate_transcript_progress()?;
|
||
println!("Migrated {} transcript segment markers", count);
|
||
Ok(())
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
fn main() {
|
||
// Handle --help ourselves for expanded subcommand display
|
||
let args: Vec<String> = std::env::args().collect();
|
||
if args.len() <= 1 || args.iter().any(|a| a == "--help" || a == "-h") && args.len() == 2 {
|
||
print_help();
|
||
return;
|
||
}
|
||
|
||
let cli = Cli::parse();
|
||
|
||
if let Err(e) = cli.command.run() {
|
||
eprintln!("Error: {}", e);
|
||
process::exit(1);
|
||
}
|
||
}
|
||
|
||
// ── Command implementations ─────────────────────────────────────────
|
||
|
||
/// Apply links from a single agent result JSON file.
|
||
/// Returns (links_applied, errors).
|
||
fn apply_agent_file(
|
||
store: &mut store::Store,
|
||
data: &serde_json::Value,
|
||
) -> (usize, usize) {
|
||
let agent_result = data.get("agent_result").or(Some(data));
|
||
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
||
Some(l) => l,
|
||
None => return (0, 0),
|
||
};
|
||
|
||
let entry_text = data.get("entry_text")
|
||
.and_then(|v| v.as_str())
|
||
.unwrap_or("");
|
||
|
||
if let (Some(start), Some(end)) = (
|
||
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
|
||
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
|
||
) {
|
||
println!(" Source: L{}-L{}", start, end);
|
||
}
|
||
|
||
let mut applied = 0;
|
||
let mut errors = 0;
|
||
|
||
for link in links {
|
||
let target = match link.get("target").and_then(|v| v.as_str()) {
|
||
Some(t) => t,
|
||
None => continue,
|
||
};
|
||
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
||
|
||
if let Some(note) = target.strip_prefix("NOTE:") {
|
||
println!(" NOTE: {} — {}", note, reason);
|
||
continue;
|
||
}
|
||
|
||
let resolved = match store.resolve_key(target) {
|
||
Ok(r) => r,
|
||
Err(_) => {
|
||
println!(" SKIP {} (not found in graph)", target);
|
||
continue;
|
||
}
|
||
};
|
||
|
||
let source_key = match store.find_journal_node(entry_text) {
|
||
Some(k) => k,
|
||
None => {
|
||
println!(" SKIP {} (no matching journal node)", target);
|
||
continue;
|
||
}
|
||
};
|
||
|
||
let source_uuid = match store.nodes.get(&source_key) {
|
||
Some(n) => n.uuid,
|
||
None => continue,
|
||
};
|
||
let target_uuid = match store.nodes.get(&resolved) {
|
||
Some(n) => n.uuid,
|
||
None => continue,
|
||
};
|
||
|
||
let rel = store::new_relation(
|
||
source_uuid, target_uuid,
|
||
store::RelationType::Link,
|
||
0.5,
|
||
&source_key, &resolved,
|
||
);
|
||
if let Err(e) = store.add_relation(rel) {
|
||
eprintln!(" Error adding relation: {}", e);
|
||
errors += 1;
|
||
} else {
|
||
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
||
applied += 1;
|
||
}
|
||
}
|
||
|
||
(applied, errors)
|
||
}
|
||
|
||
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
|
||
let results_dir = store::memory_dir().join("agent-results");
|
||
|
||
if !results_dir.exists() {
|
||
println!("No agent results directory");
|
||
return Ok(());
|
||
}
|
||
|
||
let mut store = store::Store::load()?;
|
||
let mut applied = 0;
|
||
let mut errors = 0;
|
||
|
||
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
||
.map_err(|e| format!("read results dir: {}", e))?
|
||
.filter_map(|e| e.ok())
|
||
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
||
.collect();
|
||
files.sort_by_key(|e| e.path());
|
||
|
||
for entry in &files {
|
||
let path = entry.path();
|
||
let content = match std::fs::read_to_string(&path) {
|
||
Ok(c) => c,
|
||
Err(e) => {
|
||
eprintln!(" Skip {}: {}", path.display(), e);
|
||
errors += 1;
|
||
continue;
|
||
}
|
||
};
|
||
|
||
let data: serde_json::Value = match serde_json::from_str(&content) {
|
||
Ok(d) => d,
|
||
Err(e) => {
|
||
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
||
errors += 1;
|
||
continue;
|
||
}
|
||
};
|
||
|
||
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
||
let (a, e) = apply_agent_file(&mut store, &data);
|
||
applied += a;
|
||
errors += e;
|
||
|
||
if !process_all {
|
||
let done_dir = crate::util::memory_subdir("agent-results/done")?;
|
||
let dest = done_dir.join(path.file_name().unwrap());
|
||
std::fs::rename(&path, &dest).ok();
|
||
}
|
||
}
|
||
|
||
if applied > 0 {
|
||
store.save()?;
|
||
}
|
||
|
||
println!("\nApplied {} links ({} errors, {} files processed)",
|
||
applied, errors, files.len());
|
||
Ok(())
|
||
}
|
||
|
||
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
|
||
let mut store = store::Store::load()?;
|
||
|
||
match level {
|
||
DigestLevel::Auto => digest::digest_auto(&mut store),
|
||
DigestLevel::Daily { date } => {
|
||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||
digest::generate(&mut store, "daily", &arg)
|
||
}
|
||
DigestLevel::Weekly { date } => {
|
||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||
digest::generate(&mut store, "weekly", &arg)
|
||
}
|
||
DigestLevel::Monthly { date } => {
|
||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||
digest::generate(&mut store, "monthly", &arg)
|
||
}
|
||
}
|
||
}
|
||
|