reorganize subcommands into logical groups

60+ flat subcommands grouped into:
- Core (daily use): search, render, write, history, tail, status, query, used, wrong, gap
- Node: delete, rename, list, edges, dump
- Journal: write, tail, enrich
- Graph: link, audit, spectral, etc.
- Agent: daemon, knowledge-loop, consolidate, digest, etc.
- Admin: init, health, fsck, import, export, etc.

Also: remove dead migration code (migrate.rs, Migrate/JournalTsMigrate commands),
update memory-search and poc-hook for new subcommand paths, update daemon systemd
template for `agent daemon` path.
This commit is contained in:
ProofOfConcept 2026-03-11 01:32:21 -04:00
parent d76b14dfcd
commit 57c26d8157
5 changed files with 366 additions and 388 deletions

View file

@ -1690,7 +1690,7 @@ After=default.target
[Service] [Service]
Type=simple Type=simple
ExecStart={exe} daemon ExecStart={exe} agent daemon
Restart=on-failure Restart=on-failure
RestartSec=30 RestartSec=30
Environment=HOME={home} Environment=HOME={home}
@ -1748,7 +1748,7 @@ After=default.target
[Service] [Service]
Type=simple Type=simple
ExecStart={exe} daemon ExecStart={exe} agent daemon
Restart=on-failure Restart=on-failure
RestartSec=10 RestartSec=10
Environment=HOME={home} Environment=HOME={home}

View file

@ -126,7 +126,7 @@ fn main() {
if debug { println!("[memory-search] loading full context"); } if debug { println!("[memory-search] loading full context"); }
// Load full memory context, chunk it, print first chunk, save rest // Load full memory context, chunk it, print first chunk, save rest
if let Ok(output) = Command::new("poc-memory").args(["load-context"]).output() { if let Ok(output) = Command::new("poc-memory").args(["admin", "load-context"]).output() {
if output.status.success() { if output.status.success() {
let ctx = String::from_utf8_lossy(&output.stdout).to_string(); let ctx = String::from_utf8_lossy(&output.stdout).to_string();
if !ctx.trim().is_empty() { if !ctx.trim().is_empty() {

View file

@ -115,7 +115,7 @@ fn check_context(transcript: &PathBuf, rate_limit: bool) {
"\ "\
CONTEXT WARNING: Compaction approaching ({usage} tokens). Write a journal entry NOW. CONTEXT WARNING: Compaction approaching ({usage} tokens). Write a journal entry NOW.
Use `poc-memory journal-write \"entry text\"` to save a dated entry covering: Use `poc-memory journal write \"entry text\"` to save a dated entry covering:
- What you're working on and current state (done / in progress / blocked) - What you're working on and current state (done / in progress / blocked)
- Key things learned this session (patterns, debugging insights) - Key things learned this session (patterns, debugging insights)
- Anything half-finished that needs pickup - Anything half-finished that needs pickup

View file

@ -13,7 +13,6 @@ pub mod similarity;
pub mod spectral; pub mod spectral;
pub mod lookups; pub mod lookups;
pub mod query; pub mod query;
pub mod migrate;
pub mod transcript; pub mod transcript;
pub mod neuro; pub mod neuro;
pub mod counters; pub mod counters;

View file

@ -58,6 +58,8 @@ struct Cli {
#[derive(Subcommand)] #[derive(Subcommand)]
enum Command { enum Command {
// ── Core (daily use) ──────────────────────────────────────────────
/// Search memory (AND logic across terms) /// Search memory (AND logic across terms)
/// ///
/// Pipeline: -p spread -p spectral,k=20 /// Pipeline: -p spread -p spectral,k=20
@ -84,24 +86,40 @@ enum Command {
#[arg(long)] #[arg(long)]
content: bool, content: bool,
}, },
/// Scan markdown files, index all memory units /// Output a node's content to stdout
Init, Render {
/// Migrate from old weights.json system /// Node key
Migrate, key: Vec<String>,
/// Report graph metrics (CC, communities, small-world) },
Health, /// Upsert node content from stdin
/// Run consistency checks and repair Write {
Fsck, /// Node key
/// Find and merge duplicate nodes (same key, multiple UUIDs) key: Vec<String>,
Dedup { },
/// Apply the merge (default: dry run) /// Show all stored versions of a node
History {
/// Show full content for every version
#[arg(long)] #[arg(long)]
apply: bool, full: bool,
/// Node key
key: Vec<String>,
},
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
}, },
/// Summary of memory state /// Summary of memory state
Status, Status,
/// Show graph structure overview /// Query the memory graph
Graph, Query {
/// Query expression (e.g. "degree > 15 | sort degree | limit 10")
expr: Vec<String>,
},
/// Mark a memory as useful (boosts weight) /// Mark a memory as useful (boosts weight)
Used { Used {
/// Node key /// Node key
@ -119,61 +137,103 @@ enum Command {
/// Gap description /// Gap description
description: Vec<String>, description: Vec<String>,
}, },
/// Cap node degree by pruning weak auto edges
#[command(name = "cap-degree")] // ── Node operations ───────────────────────────────────────────────
CapDegree {
/// Maximum degree (default: 50) /// Node operations (delete, rename, list)
#[arg(default_value_t = 50)] #[command(subcommand)]
max_degree: usize, Node(NodeCmd),
// ── Journal ───────────────────────────────────────────────────────
/// Journal operations (write, tail, enrich)
#[command(subcommand)]
Journal(JournalCmd),
// ── Graph ─────────────────────────────────────────────────────────
/// Graph operations (link, audit, spectral)
#[command(subcommand, name = "graph")]
GraphCmd(GraphCmd),
// ── Agents ────────────────────────────────────────────────────────
/// Agent and daemon operations
#[command(subcommand)]
Agent(AgentCmd),
// ── Admin ─────────────────────────────────────────────────────────
/// Admin operations (fsck, health, import, export)
#[command(subcommand)]
Admin(AdminCmd),
}
#[derive(Subcommand)]
enum NodeCmd {
/// Soft-delete a node
Delete {
/// Node key
key: Vec<String>,
}, },
/// Link orphan nodes to similar neighbors /// Rename a node key
#[command(name = "link-orphans")] Rename {
LinkOrphans { /// Old key
/// Minimum degree to consider orphan (default: 2) old_key: String,
#[arg(default_value_t = 2)] /// New key
min_degree: usize, new_key: String,
/// Links per orphan (default: 3)
#[arg(default_value_t = 3)]
links_per: usize,
/// Similarity threshold (default: 0.15)
#[arg(default_value_t = 0.15)]
sim_threshold: f32,
}, },
/// Run agent consolidation on priority nodes /// List all node keys (one per line, optional glob)
#[command(name = "consolidate-batch")] #[command(name = "list")]
ConsolidateBatch { List {
/// Number of nodes to consolidate /// Glob pattern to filter keys
#[arg(long, default_value_t = 5)] pattern: Option<String>,
count: usize, },
/// Generate replay agent prompt automatically /// List all edges (tsv: source target strength type)
Edges,
/// Dump entire store as JSON
#[command(name = "dump")]
Dump,
}
#[derive(Subcommand)]
enum JournalCmd {
/// Write a journal entry to the store
Write {
/// Entry text
text: Vec<String>,
},
/// Show recent journal/digest entries
Tail {
/// Number of entries to show (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)] #[arg(long)]
auto: bool, full: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health) /// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
#[arg(long)] #[arg(long, default_value_t = 0)]
agent: Option<String>, level: u8,
}, },
/// Show recent retrieval log /// Enrich journal entry with conversation links
Log, Enrich {
/// Show current parameters /// Path to JSONL transcript
Params, jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
}
#[derive(Subcommand)]
enum GraphCmd {
/// Show neighbors of a node /// Show neighbors of a node
Link { Link {
/// Node key /// Node key
key: Vec<String>, key: Vec<String>,
}, },
/// Show spaced repetition replay queue
#[command(name = "replay-queue")]
ReplayQueue {
/// Number of items to show
#[arg(long, default_value_t = 10)]
count: usize,
},
/// Detect potentially confusable memory pairs
Interference {
/// Similarity threshold (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
},
/// Add a link between two nodes /// Add a link between two nodes
#[command(name = "link-add")] #[command(name = "link-add")]
LinkAdd { LinkAdd {
@ -192,12 +252,26 @@ enum Command {
/// Target node key /// Target node key
target: String, target: String,
}, },
/// Analyze metrics, plan agent allocation /// Walk every link, send to Sonnet for quality review
#[command(name = "consolidate-session")] #[command(name = "link-audit")]
ConsolidateSession, LinkAudit {
/// Autonomous: plan → agents → apply → digests → links /// Apply changes (default: dry run)
#[command(name = "consolidate-full")] #[arg(long)]
ConsolidateFull, apply: bool,
},
/// Link orphan nodes to similar neighbors
#[command(name = "link-orphans")]
LinkOrphans {
/// Minimum degree to consider orphan (default: 2)
#[arg(default_value_t = 2)]
min_degree: usize,
/// Links per orphan (default: 3)
#[arg(default_value_t = 3)]
links_per: usize,
/// Similarity threshold (default: 0.15)
#[arg(default_value_t = 0.15)]
sim_threshold: f32,
},
/// Close triangles: link similar neighbors of hubs /// Close triangles: link similar neighbors of hubs
#[command(name = "triangle-close")] #[command(name = "triangle-close")]
TriangleClose { TriangleClose {
@ -211,55 +285,12 @@ enum Command {
#[arg(default_value_t = 10)] #[arg(default_value_t = 10)]
max_per_hub: usize, max_per_hub: usize,
}, },
/// Brief metrics check (for cron/notifications) /// Cap node degree by pruning weak auto edges
#[command(name = "daily-check")] #[command(name = "cap-degree")]
DailyCheck, CapDegree {
/// Import pending agent results into the graph /// Maximum degree (default: 50)
#[command(name = "apply-agent")] #[arg(default_value_t = 50)]
ApplyAgent { max_degree: usize,
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
/// Apply the links (default: dry run)
#[arg(long)]
apply: bool,
},
/// Enrich journal entry with conversation links
#[command(name = "journal-enrich")]
JournalEnrich {
/// Path to JSONL transcript
jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
/// Mine conversation for experiential moments to journal
#[command(name = "experience-mine")]
ExperienceMine {
/// Path to JSONL transcript (default: most recent)
jsonl_path: Option<String>,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
}, },
/// Redistribute hub links to section-level children /// Redistribute hub links to section-level children
Differentiate { Differentiate {
@ -269,18 +300,19 @@ enum Command {
#[arg(long)] #[arg(long)]
apply: bool, apply: bool,
}, },
/// Walk every link, send to Sonnet for quality review
#[command(name = "link-audit")]
LinkAudit {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Walk temporal links: semantic ↔ episodic ↔ conversation /// Walk temporal links: semantic ↔ episodic ↔ conversation
Trace { Trace {
/// Node key /// Node key
key: Vec<String>, key: Vec<String>,
}, },
/// Detect potentially confusable memory pairs
Interference {
/// Similarity threshold (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
},
/// Show graph structure overview
Overview,
/// Spectral decomposition of the memory graph /// Spectral decomposition of the memory graph
Spectral { Spectral {
/// Number of eigenvectors (default: 30) /// Number of eigenvectors (default: 30)
@ -317,117 +349,10 @@ enum Command {
#[arg(default_value_t = 20)] #[arg(default_value_t = 20)]
n: usize, n: usize,
}, },
/// List all node keys (one per line, optional glob) }
#[command(name = "list-keys")]
ListKeys { #[derive(Subcommand)]
/// Glob pattern to filter keys enum AgentCmd {
pattern: Option<String>,
},
/// List all edges (tsv: source target strength type)
#[command(name = "list-edges")]
ListEdges,
/// Dump entire store as JSON
#[command(name = "dump-json")]
DumpJson,
/// Soft-delete a node
#[command(name = "node-delete")]
NodeDelete {
/// Node key
key: Vec<String>,
},
/// Rename a node key
#[command(name = "node-rename")]
NodeRename {
/// Old key
old_key: String,
/// New key
new_key: String,
},
/// Populate created_at for nodes missing timestamps
#[command(name = "journal-ts-migrate")]
JournalTsMigrate,
/// Output session-start context from the store
#[command(name = "load-context")]
LoadContext {
/// Show word count statistics instead of content
#[arg(long)]
stats: bool,
},
/// Output a node's content to stdout
Render {
/// Node key
key: Vec<String>,
},
/// Show all stored versions of a node
History {
/// Show full content for every version
#[arg(long)]
full: bool,
/// Node key
key: Vec<String>,
},
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
},
/// Upsert node content from stdin
Write {
/// Node key
key: Vec<String>,
},
/// Import markdown file(s) into the store
Import {
/// File paths
files: Vec<String>,
},
/// Export store nodes to markdown file(s)
Export {
/// File keys to export (or --all)
files: Vec<String>,
/// Export all file-level nodes
#[arg(long)]
all: bool,
},
/// Write a journal entry to the store
#[command(name = "journal-write")]
JournalWrite {
/// Entry text
text: Vec<String>,
},
/// Show recent journal/digest entries
#[command(name = "journal-tail")]
JournalTail {
/// Number of entries to show (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
#[arg(long, default_value_t = 0)]
level: u8,
},
/// Query the memory graph
Query {
/// Query expression (e.g. "degree > 15 | sort degree | limit 10")
expr: Vec<String>,
},
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
/// Background job daemon /// Background job daemon
Daemon { Daemon {
/// Subcommand: status, log, install /// Subcommand: status, log, install
@ -451,6 +376,61 @@ enum Command {
#[arg(long, default_value_t = 4)] #[arg(long, default_value_t = 4)]
max_depth: i32, max_depth: i32,
}, },
/// Run agent consolidation on priority nodes
#[command(name = "consolidate-batch")]
ConsolidateBatch {
/// Number of nodes to consolidate
#[arg(long, default_value_t = 5)]
count: usize,
/// Generate replay agent prompt automatically
#[arg(long)]
auto: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
#[arg(long)]
agent: Option<String>,
},
/// Analyze metrics, plan agent allocation
#[command(name = "consolidate-session")]
ConsolidateSession,
/// Autonomous: plan → agents → apply → digests → links
#[command(name = "consolidate-full")]
ConsolidateFull,
/// Import pending agent results into the graph
#[command(name = "apply-agent")]
ApplyAgent {
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
/// Apply the links (default: dry run)
#[arg(long)]
apply: bool,
},
/// Mine conversation for experiential moments to journal
#[command(name = "experience-mine")]
ExperienceMine {
/// Path to JSONL transcript (default: most recent)
jsonl_path: Option<String>,
},
/// Extract atomic facts from conversation transcripts /// Extract atomic facts from conversation transcripts
#[command(name = "fact-mine")] #[command(name = "fact-mine")]
FactMine { FactMine {
@ -475,6 +455,67 @@ enum Command {
/// Path to JSONL transcript /// Path to JSONL transcript
path: String, path: String,
}, },
/// Show spaced repetition replay queue
#[command(name = "replay-queue")]
ReplayQueue {
/// Number of items to show
#[arg(long, default_value_t = 10)]
count: usize,
},
}
#[derive(Subcommand)]
enum AdminCmd {
/// Scan markdown files, index all memory units
Init,
/// Report graph metrics (CC, communities, small-world)
Health,
/// Run consistency checks and repair
Fsck,
/// Find and merge duplicate nodes (same key, multiple UUIDs)
Dedup {
/// Apply the merge (default: dry run)
#[arg(long)]
apply: bool,
},
/// Brief metrics check (for cron/notifications)
#[command(name = "daily-check")]
DailyCheck,
/// Import markdown file(s) into the store
Import {
/// File paths
files: Vec<String>,
},
/// Export store nodes to markdown file(s)
Export {
/// File keys to export (or --all)
files: Vec<String>,
/// Export all file-level nodes
#[arg(long)]
all: bool,
},
/// Output session-start context from the store
#[command(name = "load-context")]
LoadContext {
/// Show word count statistics instead of content
#[arg(long)]
stats: bool,
},
/// Show recent retrieval log
Log,
/// Show current parameters
Params,
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
} }
#[derive(Subcommand)] #[derive(Subcommand)]
@ -502,105 +543,98 @@ fn main() {
let cli = Cli::parse(); let cli = Cli::parse();
let result = match cli.command { let result = match cli.command {
// Core
Command::Search { query, pipeline, expand, full, debug, fuzzy, content } Command::Search { query, pipeline, expand, full, debug, fuzzy, content }
=> cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content), => cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content),
Command::Init => cmd_init(),
Command::Migrate => cmd_migrate(),
Command::Health => cmd_health(),
Command::Fsck => cmd_fsck(),
Command::Dedup { apply } => cmd_dedup(apply),
Command::Status => cmd_status(),
Command::Graph => cmd_graph(),
Command::Used { key } => cmd_used(&key),
Command::Wrong { key, context }
=> cmd_wrong(&key, &context),
Command::Gap { description }
=> cmd_gap(&description),
Command::CapDegree { max_degree }
=> cmd_cap_degree(max_degree),
Command::LinkOrphans { min_degree, links_per, sim_threshold }
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
Command::ConsolidateBatch { count, auto, agent }
=> cmd_consolidate_batch(count, auto, agent),
Command::Log => cmd_log(),
Command::Params => cmd_params(),
Command::Link { key } => cmd_link(&key),
Command::ReplayQueue { count }
=> cmd_replay_queue(count),
Command::Interference { threshold }
=> cmd_interference(threshold),
Command::LinkAdd { source, target, reason }
=> cmd_link_add(&source, &target, &reason),
Command::LinkImpact { source, target }
=> cmd_link_impact(&source, &target),
Command::ConsolidateSession => cmd_consolidate_session(),
Command::ConsolidateFull => cmd_consolidate_full(),
Command::TriangleClose { min_degree, sim_threshold, max_per_hub }
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
Command::DailyCheck => cmd_daily_check(),
Command::ApplyAgent { all }
=> cmd_apply_agent(all),
Command::Digest { level } => cmd_digest(level),
Command::DigestLinks { apply }
=> cmd_digest_links(apply),
Command::JournalEnrich { jsonl_path, entry_text, grep_line }
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
Command::ExperienceMine { jsonl_path }
=> cmd_experience_mine(jsonl_path),
Command::ApplyConsolidation { apply, report }
=> cmd_apply_consolidation(apply, report.as_deref()),
Command::Differentiate { key, apply }
=> cmd_differentiate(key.as_deref(), apply),
Command::LinkAudit { apply }
=> cmd_link_audit(apply),
Command::Trace { key } => cmd_trace(&key),
Command::Spectral { k } => cmd_spectral(k),
Command::SpectralSave { k } => cmd_spectral_save(k),
Command::SpectralNeighbors { key, n }
=> cmd_spectral_neighbors(&key, n),
Command::SpectralPositions { n }
=> cmd_spectral_positions(n),
Command::SpectralSuggest { n }
=> cmd_spectral_suggest(n),
Command::ListKeys { pattern }
=> cmd_list_keys(pattern.as_deref()),
Command::ListEdges => cmd_list_edges(),
Command::DumpJson => cmd_dump_json(),
Command::NodeDelete { key }
=> cmd_node_delete(&key),
Command::NodeRename { old_key, new_key }
=> cmd_node_rename(&old_key, &new_key),
Command::JournalTsMigrate => cmd_journal_ts_migrate(),
Command::LoadContext { stats }
=> cmd_load_context(stats),
Command::Render { key } => cmd_render(&key), Command::Render { key } => cmd_render(&key),
Command::History { full, key }
=> cmd_history(&key, full),
Command::Tail { n, full }
=> cmd_tail(n, full),
Command::Write { key } => cmd_write(&key), Command::Write { key } => cmd_write(&key),
Command::Import { files } Command::History { full, key } => cmd_history(&key, full),
=> cmd_import(&files), Command::Tail { n, full } => cmd_tail(n, full),
Command::Export { files, all } Command::Status => cmd_status(),
=> cmd_export(&files, all), Command::Query { expr } => cmd_query(&expr),
Command::JournalWrite { text } Command::Used { key } => cmd_used(&key),
=> cmd_journal_write(&text), Command::Wrong { key, context } => cmd_wrong(&key, &context),
Command::JournalTail { n, full, level } Command::Gap { description } => cmd_gap(&description),
=> cmd_journal_tail(n, full, level),
Command::Query { expr } // Node
=> cmd_query(&expr), Command::Node(sub) => match sub {
Command::LookupBump { keys } NodeCmd::Delete { key } => cmd_node_delete(&key),
=> cmd_lookup_bump(&keys), NodeCmd::Rename { old_key, new_key } => cmd_node_rename(&old_key, &new_key),
Command::Lookups { date } NodeCmd::List { pattern } => cmd_list_keys(pattern.as_deref()),
=> cmd_lookups(date.as_deref()), NodeCmd::Edges => cmd_list_edges(),
Command::Daemon { sub, args } NodeCmd::Dump => cmd_dump_json(),
=> cmd_daemon(sub.as_deref(), &args), },
Command::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
// Journal
Command::Journal(sub) => match sub {
JournalCmd::Write { text } => cmd_journal_write(&text),
JournalCmd::Tail { n, full, level } => cmd_journal_tail(n, full, level),
JournalCmd::Enrich { jsonl_path, entry_text, grep_line }
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
},
// Graph
Command::GraphCmd(sub) => match sub {
GraphCmd::Link { key } => cmd_link(&key),
GraphCmd::LinkAdd { source, target, reason }
=> cmd_link_add(&source, &target, &reason),
GraphCmd::LinkImpact { source, target }
=> cmd_link_impact(&source, &target),
GraphCmd::LinkAudit { apply } => cmd_link_audit(apply),
GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold }
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub }
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
GraphCmd::CapDegree { max_degree } => cmd_cap_degree(max_degree),
GraphCmd::Differentiate { key, apply }
=> cmd_differentiate(key.as_deref(), apply),
GraphCmd::Trace { key } => cmd_trace(&key),
GraphCmd::Interference { threshold } => cmd_interference(threshold),
GraphCmd::Overview => cmd_graph(),
GraphCmd::Spectral { k } => cmd_spectral(k),
GraphCmd::SpectralSave { k } => cmd_spectral_save(k),
GraphCmd::SpectralNeighbors { key, n }
=> cmd_spectral_neighbors(&key, n),
GraphCmd::SpectralPositions { n } => cmd_spectral_positions(n),
GraphCmd::SpectralSuggest { n } => cmd_spectral_suggest(n),
},
// Agent
Command::Agent(sub) => match sub {
AgentCmd::Daemon { sub, args } => cmd_daemon(sub.as_deref(), &args),
AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
=> cmd_knowledge_loop(max_cycles, batch_size, window, max_depth), => cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
Command::FactMine { path, batch, dry_run, output, min_messages } AgentCmd::ConsolidateBatch { count, auto, agent }
=> cmd_consolidate_batch(count, auto, agent),
AgentCmd::ConsolidateSession => cmd_consolidate_session(),
AgentCmd::ConsolidateFull => cmd_consolidate_full(),
AgentCmd::ApplyAgent { all } => cmd_apply_agent(all),
AgentCmd::ApplyConsolidation { apply, report }
=> cmd_apply_consolidation(apply, report.as_deref()),
AgentCmd::Digest { level } => cmd_digest(level),
AgentCmd::DigestLinks { apply } => cmd_digest_links(apply),
AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path),
AgentCmd::FactMine { path, batch, dry_run, output, min_messages }
=> cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages), => cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
Command::FactMineStore { path } AgentCmd::FactMineStore { path } => cmd_fact_mine_store(&path),
=> cmd_fact_mine_store(&path), AgentCmd::ReplayQueue { count } => cmd_replay_queue(count),
},
// Admin
Command::Admin(sub) => match sub {
AdminCmd::Init => cmd_init(),
AdminCmd::Health => cmd_health(),
AdminCmd::Fsck => cmd_fsck(),
AdminCmd::Dedup { apply } => cmd_dedup(apply),
AdminCmd::DailyCheck => cmd_daily_check(),
AdminCmd::Import { files } => cmd_import(&files),
AdminCmd::Export { files, all } => cmd_export(&files, all),
AdminCmd::LoadContext { stats } => cmd_load_context(stats),
AdminCmd::Log => cmd_log(),
AdminCmd::Params => cmd_params(),
AdminCmd::LookupBump { keys } => cmd_lookup_bump(&keys),
AdminCmd::Lookups { date } => cmd_lookups(date.as_deref()),
},
}; };
if let Err(e) = result { if let Err(e) = result {
@ -807,10 +841,6 @@ fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -
Ok(()) Ok(())
} }
fn cmd_migrate() -> Result<(), String> {
migrate::migrate()
}
fn cmd_fsck() -> Result<(), String> { fn cmd_fsck() -> Result<(), String> {
let mut store = store::Store::load()?; let mut store = store::Store::load()?;
@ -1871,57 +1901,6 @@ fn cmd_node_rename(old_key: &str, new_key: &str) -> Result<(), String> {
Ok(()) Ok(())
} }
fn cmd_journal_ts_migrate() -> Result<(), String> {
use chrono::{NaiveDateTime, TimeZone, Local};
let mut store = store::Store::load()?;
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
let valid_range = 978_307_200i64..=4_102_444_800i64;
let to_update: Vec<_> = store.nodes.values()
.filter(|n| !valid_range.contains(&n.created_at))
.map(|n| n.key.clone())
.collect();
let mut updated = 0usize;
for key in &to_update {
if let Some(caps) = re.captures(key) {
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = dt.timestamp();
node.version += 1;
}
updated += 1;
continue;
}
}
}
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = node.timestamp;
node.version += 1;
updated += 1;
}
}
let nodes_to_write: Vec<_> = to_update.iter()
.filter_map(|k| store.nodes.get(k))
.filter(|n| valid_range.contains(&n.created_at))
.cloned()
.collect();
if !nodes_to_write.is_empty() {
store.append_nodes(&nodes_to_write)?;
store.save()?;
}
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
Ok(())
}
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> { fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
match group.source { match group.source {
config::ContextSource::Journal => { config::ContextSource::Journal => {