reorganize subcommands into logical groups

60+ flat subcommands grouped into:
- Core (daily use): search, render, write, history, tail, status, query, used, wrong, gap
- Node: delete, rename, list, edges, dump
- Journal: write, tail, enrich
- Graph: link, audit, spectral, etc.
- Agent: daemon, knowledge-loop, consolidate, digest, etc.
- Admin: init, health, fsck, import, export, etc.

Also: remove dead migration code (migrate.rs, Migrate/JournalTsMigrate commands),
update memory-search and poc-hook for new subcommand paths, update daemon systemd
template for `agent daemon` path.
This commit is contained in:
ProofOfConcept 2026-03-11 01:32:21 -04:00
parent d76b14dfcd
commit 57c26d8157
5 changed files with 366 additions and 388 deletions

View file

@ -58,6 +58,8 @@ struct Cli {
#[derive(Subcommand)]
enum Command {
// ── Core (daily use) ──────────────────────────────────────────────
/// Search memory (AND logic across terms)
///
/// Pipeline: -p spread -p spectral,k=20
@ -84,24 +86,40 @@ enum Command {
#[arg(long)]
content: bool,
},
/// Scan markdown files, index all memory units
Init,
/// Migrate from old weights.json system
Migrate,
/// Report graph metrics (CC, communities, small-world)
Health,
/// Run consistency checks and repair
Fsck,
/// Find and merge duplicate nodes (same key, multiple UUIDs)
Dedup {
/// Apply the merge (default: dry run)
/// Output a node's content to stdout
Render {
/// Node key
key: Vec<String>,
},
/// Upsert node content from stdin
Write {
/// Node key
key: Vec<String>,
},
/// Show all stored versions of a node
History {
/// Show full content for every version
#[arg(long)]
apply: bool,
full: bool,
/// Node key
key: Vec<String>,
},
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
},
/// Summary of memory state
Status,
/// Show graph structure overview
Graph,
/// Query the memory graph
Query {
/// Query expression (e.g. "degree > 15 | sort degree | limit 10")
expr: Vec<String>,
},
/// Mark a memory as useful (boosts weight)
Used {
/// Node key
@ -119,61 +137,103 @@ enum Command {
/// Gap description
description: Vec<String>,
},
/// Cap node degree by pruning weak auto edges
#[command(name = "cap-degree")]
CapDegree {
/// Maximum degree (default: 50)
#[arg(default_value_t = 50)]
max_degree: usize,
// ── Node operations ───────────────────────────────────────────────
/// Node operations (delete, rename, list)
#[command(subcommand)]
Node(NodeCmd),
// ── Journal ───────────────────────────────────────────────────────
/// Journal operations (write, tail, enrich)
#[command(subcommand)]
Journal(JournalCmd),
// ── Graph ─────────────────────────────────────────────────────────
/// Graph operations (link, audit, spectral)
#[command(subcommand, name = "graph")]
GraphCmd(GraphCmd),
// ── Agents ────────────────────────────────────────────────────────
/// Agent and daemon operations
#[command(subcommand)]
Agent(AgentCmd),
// ── Admin ─────────────────────────────────────────────────────────
/// Admin operations (fsck, health, import, export)
#[command(subcommand)]
Admin(AdminCmd),
}
#[derive(Subcommand)]
enum NodeCmd {
/// Soft-delete a node
Delete {
/// Node key
key: Vec<String>,
},
/// Link orphan nodes to similar neighbors
#[command(name = "link-orphans")]
LinkOrphans {
/// Minimum degree to consider orphan (default: 2)
#[arg(default_value_t = 2)]
min_degree: usize,
/// Links per orphan (default: 3)
#[arg(default_value_t = 3)]
links_per: usize,
/// Similarity threshold (default: 0.15)
#[arg(default_value_t = 0.15)]
sim_threshold: f32,
/// Rename a node key
Rename {
/// Old key
old_key: String,
/// New key
new_key: String,
},
/// Run agent consolidation on priority nodes
#[command(name = "consolidate-batch")]
ConsolidateBatch {
/// Number of nodes to consolidate
#[arg(long, default_value_t = 5)]
count: usize,
/// Generate replay agent prompt automatically
/// List all node keys (one per line, optional glob)
#[command(name = "list")]
List {
/// Glob pattern to filter keys
pattern: Option<String>,
},
/// List all edges (tsv: source target strength type)
Edges,
/// Dump entire store as JSON
#[command(name = "dump")]
Dump,
}
#[derive(Subcommand)]
enum JournalCmd {
/// Write a journal entry to the store
Write {
/// Entry text
text: Vec<String>,
},
/// Show recent journal/digest entries
Tail {
/// Number of entries to show (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
auto: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
#[arg(long)]
agent: Option<String>,
full: bool,
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
#[arg(long, default_value_t = 0)]
level: u8,
},
/// Show recent retrieval log
Log,
/// Show current parameters
Params,
/// Enrich journal entry with conversation links
Enrich {
/// Path to JSONL transcript
jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
}
#[derive(Subcommand)]
enum GraphCmd {
/// Show neighbors of a node
Link {
/// Node key
key: Vec<String>,
},
/// Show spaced repetition replay queue
#[command(name = "replay-queue")]
ReplayQueue {
/// Number of items to show
#[arg(long, default_value_t = 10)]
count: usize,
},
/// Detect potentially confusable memory pairs
Interference {
/// Similarity threshold (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
},
/// Add a link between two nodes
#[command(name = "link-add")]
LinkAdd {
@ -192,12 +252,26 @@ enum Command {
/// Target node key
target: String,
},
/// Analyze metrics, plan agent allocation
#[command(name = "consolidate-session")]
ConsolidateSession,
/// Autonomous: plan → agents → apply → digests → links
#[command(name = "consolidate-full")]
ConsolidateFull,
/// Walk every link, send to Sonnet for quality review
#[command(name = "link-audit")]
LinkAudit {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Link orphan nodes to similar neighbors
#[command(name = "link-orphans")]
LinkOrphans {
/// Minimum degree to consider orphan (default: 2)
#[arg(default_value_t = 2)]
min_degree: usize,
/// Links per orphan (default: 3)
#[arg(default_value_t = 3)]
links_per: usize,
/// Similarity threshold (default: 0.15)
#[arg(default_value_t = 0.15)]
sim_threshold: f32,
},
/// Close triangles: link similar neighbors of hubs
#[command(name = "triangle-close")]
TriangleClose {
@ -211,55 +285,12 @@ enum Command {
#[arg(default_value_t = 10)]
max_per_hub: usize,
},
/// Brief metrics check (for cron/notifications)
#[command(name = "daily-check")]
DailyCheck,
/// Import pending agent results into the graph
#[command(name = "apply-agent")]
ApplyAgent {
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
/// Apply the links (default: dry run)
#[arg(long)]
apply: bool,
},
/// Enrich journal entry with conversation links
#[command(name = "journal-enrich")]
JournalEnrich {
/// Path to JSONL transcript
jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
/// Mine conversation for experiential moments to journal
#[command(name = "experience-mine")]
ExperienceMine {
/// Path to JSONL transcript (default: most recent)
jsonl_path: Option<String>,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
/// Cap node degree by pruning weak auto edges
#[command(name = "cap-degree")]
CapDegree {
/// Maximum degree (default: 50)
#[arg(default_value_t = 50)]
max_degree: usize,
},
/// Redistribute hub links to section-level children
Differentiate {
@ -269,18 +300,19 @@ enum Command {
#[arg(long)]
apply: bool,
},
/// Walk every link, send to Sonnet for quality review
#[command(name = "link-audit")]
LinkAudit {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Walk temporal links: semantic ↔ episodic ↔ conversation
Trace {
/// Node key
key: Vec<String>,
},
/// Detect potentially confusable memory pairs
Interference {
/// Similarity threshold (default: 0.4)
#[arg(long, default_value_t = 0.4)]
threshold: f32,
},
/// Show graph structure overview
Overview,
/// Spectral decomposition of the memory graph
Spectral {
/// Number of eigenvectors (default: 30)
@ -317,117 +349,10 @@ enum Command {
#[arg(default_value_t = 20)]
n: usize,
},
/// List all node keys (one per line, optional glob)
#[command(name = "list-keys")]
ListKeys {
/// Glob pattern to filter keys
pattern: Option<String>,
},
/// List all edges (tsv: source target strength type)
#[command(name = "list-edges")]
ListEdges,
/// Dump entire store as JSON
#[command(name = "dump-json")]
DumpJson,
/// Soft-delete a node
#[command(name = "node-delete")]
NodeDelete {
/// Node key
key: Vec<String>,
},
/// Rename a node key
#[command(name = "node-rename")]
NodeRename {
/// Old key
old_key: String,
/// New key
new_key: String,
},
/// Populate created_at for nodes missing timestamps
#[command(name = "journal-ts-migrate")]
JournalTsMigrate,
/// Output session-start context from the store
#[command(name = "load-context")]
LoadContext {
/// Show word count statistics instead of content
#[arg(long)]
stats: bool,
},
/// Output a node's content to stdout
Render {
/// Node key
key: Vec<String>,
},
/// Show all stored versions of a node
History {
/// Show full content for every version
#[arg(long)]
full: bool,
/// Node key
key: Vec<String>,
},
/// Show most recent writes to the node log
Tail {
/// Number of entries (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
},
/// Upsert node content from stdin
Write {
/// Node key
key: Vec<String>,
},
/// Import markdown file(s) into the store
Import {
/// File paths
files: Vec<String>,
},
/// Export store nodes to markdown file(s)
Export {
/// File keys to export (or --all)
files: Vec<String>,
/// Export all file-level nodes
#[arg(long)]
all: bool,
},
/// Write a journal entry to the store
#[command(name = "journal-write")]
JournalWrite {
/// Entry text
text: Vec<String>,
},
/// Show recent journal/digest entries
#[command(name = "journal-tail")]
JournalTail {
/// Number of entries to show (default: 20)
#[arg(default_value_t = 20)]
n: usize,
/// Show full content
#[arg(long)]
full: bool,
/// Digest level: 0/journal, 1/daily, 2/weekly, 3/monthly
#[arg(long, default_value_t = 0)]
level: u8,
},
/// Query the memory graph
Query {
/// Query expression (e.g. "degree > 15 | sort degree | limit 10")
expr: Vec<String>,
},
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
}
#[derive(Subcommand)]
enum AgentCmd {
/// Background job daemon
Daemon {
/// Subcommand: status, log, install
@ -451,6 +376,61 @@ enum Command {
#[arg(long, default_value_t = 4)]
max_depth: i32,
},
/// Run agent consolidation on priority nodes
#[command(name = "consolidate-batch")]
ConsolidateBatch {
/// Number of nodes to consolidate
#[arg(long, default_value_t = 5)]
count: usize,
/// Generate replay agent prompt automatically
#[arg(long)]
auto: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
#[arg(long)]
agent: Option<String>,
},
/// Analyze metrics, plan agent allocation
#[command(name = "consolidate-session")]
ConsolidateSession,
/// Autonomous: plan → agents → apply → digests → links
#[command(name = "consolidate-full")]
ConsolidateFull,
/// Import pending agent results into the graph
#[command(name = "apply-agent")]
ApplyAgent {
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
/// Apply the links (default: dry run)
#[arg(long)]
apply: bool,
},
/// Mine conversation for experiential moments to journal
#[command(name = "experience-mine")]
ExperienceMine {
/// Path to JSONL transcript (default: most recent)
jsonl_path: Option<String>,
},
/// Extract atomic facts from conversation transcripts
#[command(name = "fact-mine")]
FactMine {
@ -475,6 +455,67 @@ enum Command {
/// Path to JSONL transcript
path: String,
},
/// Show spaced repetition replay queue
#[command(name = "replay-queue")]
ReplayQueue {
/// Number of items to show
#[arg(long, default_value_t = 10)]
count: usize,
},
}
#[derive(Subcommand)]
enum AdminCmd {
/// Scan markdown files, index all memory units
Init,
/// Report graph metrics (CC, communities, small-world)
Health,
/// Run consistency checks and repair
Fsck,
/// Find and merge duplicate nodes (same key, multiple UUIDs)
Dedup {
/// Apply the merge (default: dry run)
#[arg(long)]
apply: bool,
},
/// Brief metrics check (for cron/notifications)
#[command(name = "daily-check")]
DailyCheck,
/// Import markdown file(s) into the store
Import {
/// File paths
files: Vec<String>,
},
/// Export store nodes to markdown file(s)
Export {
/// File keys to export (or --all)
files: Vec<String>,
/// Export all file-level nodes
#[arg(long)]
all: bool,
},
/// Output session-start context from the store
#[command(name = "load-context")]
LoadContext {
/// Show word count statistics instead of content
#[arg(long)]
stats: bool,
},
/// Show recent retrieval log
Log,
/// Show current parameters
Params,
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
}
#[derive(Subcommand)]
@ -502,105 +543,98 @@ fn main() {
let cli = Cli::parse();
let result = match cli.command {
// Core
Command::Search { query, pipeline, expand, full, debug, fuzzy, content }
=> cmd_search(&query, &pipeline, expand, full, debug, fuzzy, content),
Command::Init => cmd_init(),
Command::Migrate => cmd_migrate(),
Command::Health => cmd_health(),
Command::Fsck => cmd_fsck(),
Command::Dedup { apply } => cmd_dedup(apply),
Command::Status => cmd_status(),
Command::Graph => cmd_graph(),
Command::Used { key } => cmd_used(&key),
Command::Wrong { key, context }
=> cmd_wrong(&key, &context),
Command::Gap { description }
=> cmd_gap(&description),
Command::CapDegree { max_degree }
=> cmd_cap_degree(max_degree),
Command::LinkOrphans { min_degree, links_per, sim_threshold }
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
Command::ConsolidateBatch { count, auto, agent }
=> cmd_consolidate_batch(count, auto, agent),
Command::Log => cmd_log(),
Command::Params => cmd_params(),
Command::Link { key } => cmd_link(&key),
Command::ReplayQueue { count }
=> cmd_replay_queue(count),
Command::Interference { threshold }
=> cmd_interference(threshold),
Command::LinkAdd { source, target, reason }
=> cmd_link_add(&source, &target, &reason),
Command::LinkImpact { source, target }
=> cmd_link_impact(&source, &target),
Command::ConsolidateSession => cmd_consolidate_session(),
Command::ConsolidateFull => cmd_consolidate_full(),
Command::TriangleClose { min_degree, sim_threshold, max_per_hub }
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
Command::DailyCheck => cmd_daily_check(),
Command::ApplyAgent { all }
=> cmd_apply_agent(all),
Command::Digest { level } => cmd_digest(level),
Command::DigestLinks { apply }
=> cmd_digest_links(apply),
Command::JournalEnrich { jsonl_path, entry_text, grep_line }
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
Command::ExperienceMine { jsonl_path }
=> cmd_experience_mine(jsonl_path),
Command::ApplyConsolidation { apply, report }
=> cmd_apply_consolidation(apply, report.as_deref()),
Command::Differentiate { key, apply }
=> cmd_differentiate(key.as_deref(), apply),
Command::LinkAudit { apply }
=> cmd_link_audit(apply),
Command::Trace { key } => cmd_trace(&key),
Command::Spectral { k } => cmd_spectral(k),
Command::SpectralSave { k } => cmd_spectral_save(k),
Command::SpectralNeighbors { key, n }
=> cmd_spectral_neighbors(&key, n),
Command::SpectralPositions { n }
=> cmd_spectral_positions(n),
Command::SpectralSuggest { n }
=> cmd_spectral_suggest(n),
Command::ListKeys { pattern }
=> cmd_list_keys(pattern.as_deref()),
Command::ListEdges => cmd_list_edges(),
Command::DumpJson => cmd_dump_json(),
Command::NodeDelete { key }
=> cmd_node_delete(&key),
Command::NodeRename { old_key, new_key }
=> cmd_node_rename(&old_key, &new_key),
Command::JournalTsMigrate => cmd_journal_ts_migrate(),
Command::LoadContext { stats }
=> cmd_load_context(stats),
Command::Render { key } => cmd_render(&key),
Command::History { full, key }
=> cmd_history(&key, full),
Command::Tail { n, full }
=> cmd_tail(n, full),
Command::Write { key } => cmd_write(&key),
Command::Import { files }
=> cmd_import(&files),
Command::Export { files, all }
=> cmd_export(&files, all),
Command::JournalWrite { text }
=> cmd_journal_write(&text),
Command::JournalTail { n, full, level }
=> cmd_journal_tail(n, full, level),
Command::Query { expr }
=> cmd_query(&expr),
Command::LookupBump { keys }
=> cmd_lookup_bump(&keys),
Command::Lookups { date }
=> cmd_lookups(date.as_deref()),
Command::Daemon { sub, args }
=> cmd_daemon(sub.as_deref(), &args),
Command::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
=> cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
Command::FactMine { path, batch, dry_run, output, min_messages }
=> cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
Command::FactMineStore { path }
=> cmd_fact_mine_store(&path),
Command::History { full, key } => cmd_history(&key, full),
Command::Tail { n, full } => cmd_tail(n, full),
Command::Status => cmd_status(),
Command::Query { expr } => cmd_query(&expr),
Command::Used { key } => cmd_used(&key),
Command::Wrong { key, context } => cmd_wrong(&key, &context),
Command::Gap { description } => cmd_gap(&description),
// Node
Command::Node(sub) => match sub {
NodeCmd::Delete { key } => cmd_node_delete(&key),
NodeCmd::Rename { old_key, new_key } => cmd_node_rename(&old_key, &new_key),
NodeCmd::List { pattern } => cmd_list_keys(pattern.as_deref()),
NodeCmd::Edges => cmd_list_edges(),
NodeCmd::Dump => cmd_dump_json(),
},
// Journal
Command::Journal(sub) => match sub {
JournalCmd::Write { text } => cmd_journal_write(&text),
JournalCmd::Tail { n, full, level } => cmd_journal_tail(n, full, level),
JournalCmd::Enrich { jsonl_path, entry_text, grep_line }
=> cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
},
// Graph
Command::GraphCmd(sub) => match sub {
GraphCmd::Link { key } => cmd_link(&key),
GraphCmd::LinkAdd { source, target, reason }
=> cmd_link_add(&source, &target, &reason),
GraphCmd::LinkImpact { source, target }
=> cmd_link_impact(&source, &target),
GraphCmd::LinkAudit { apply } => cmd_link_audit(apply),
GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold }
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub }
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
GraphCmd::CapDegree { max_degree } => cmd_cap_degree(max_degree),
GraphCmd::Differentiate { key, apply }
=> cmd_differentiate(key.as_deref(), apply),
GraphCmd::Trace { key } => cmd_trace(&key),
GraphCmd::Interference { threshold } => cmd_interference(threshold),
GraphCmd::Overview => cmd_graph(),
GraphCmd::Spectral { k } => cmd_spectral(k),
GraphCmd::SpectralSave { k } => cmd_spectral_save(k),
GraphCmd::SpectralNeighbors { key, n }
=> cmd_spectral_neighbors(&key, n),
GraphCmd::SpectralPositions { n } => cmd_spectral_positions(n),
GraphCmd::SpectralSuggest { n } => cmd_spectral_suggest(n),
},
// Agent
Command::Agent(sub) => match sub {
AgentCmd::Daemon { sub, args } => cmd_daemon(sub.as_deref(), &args),
AgentCmd::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
=> cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
AgentCmd::ConsolidateBatch { count, auto, agent }
=> cmd_consolidate_batch(count, auto, agent),
AgentCmd::ConsolidateSession => cmd_consolidate_session(),
AgentCmd::ConsolidateFull => cmd_consolidate_full(),
AgentCmd::ApplyAgent { all } => cmd_apply_agent(all),
AgentCmd::ApplyConsolidation { apply, report }
=> cmd_apply_consolidation(apply, report.as_deref()),
AgentCmd::Digest { level } => cmd_digest(level),
AgentCmd::DigestLinks { apply } => cmd_digest_links(apply),
AgentCmd::ExperienceMine { jsonl_path } => cmd_experience_mine(jsonl_path),
AgentCmd::FactMine { path, batch, dry_run, output, min_messages }
=> cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
AgentCmd::FactMineStore { path } => cmd_fact_mine_store(&path),
AgentCmd::ReplayQueue { count } => cmd_replay_queue(count),
},
// Admin
Command::Admin(sub) => match sub {
AdminCmd::Init => cmd_init(),
AdminCmd::Health => cmd_health(),
AdminCmd::Fsck => cmd_fsck(),
AdminCmd::Dedup { apply } => cmd_dedup(apply),
AdminCmd::DailyCheck => cmd_daily_check(),
AdminCmd::Import { files } => cmd_import(&files),
AdminCmd::Export { files, all } => cmd_export(&files, all),
AdminCmd::LoadContext { stats } => cmd_load_context(stats),
AdminCmd::Log => cmd_log(),
AdminCmd::Params => cmd_params(),
AdminCmd::LookupBump { keys } => cmd_lookup_bump(&keys),
AdminCmd::Lookups { date } => cmd_lookups(date.as_deref()),
},
};
if let Err(e) = result {
@ -807,10 +841,6 @@ fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -
Ok(())
}
fn cmd_migrate() -> Result<(), String> {
migrate::migrate()
}
fn cmd_fsck() -> Result<(), String> {
let mut store = store::Store::load()?;
@ -1871,57 +1901,6 @@ fn cmd_node_rename(old_key: &str, new_key: &str) -> Result<(), String> {
Ok(())
}
fn cmd_journal_ts_migrate() -> Result<(), String> {
use chrono::{NaiveDateTime, TimeZone, Local};
let mut store = store::Store::load()?;
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
let valid_range = 978_307_200i64..=4_102_444_800i64;
let to_update: Vec<_> = store.nodes.values()
.filter(|n| !valid_range.contains(&n.created_at))
.map(|n| n.key.clone())
.collect();
let mut updated = 0usize;
for key in &to_update {
if let Some(caps) = re.captures(key) {
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = dt.timestamp();
node.version += 1;
}
updated += 1;
continue;
}
}
}
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = node.timestamp;
node.version += 1;
updated += 1;
}
}
let nodes_to_write: Vec<_> = to_update.iter()
.filter_map(|k| store.nodes.get(k))
.filter(|n| valid_range.contains(&n.created_at))
.cloned()
.collect();
if !nodes_to_write.is_empty() {
store.append_nodes(&nodes_to_write)?;
store.save()?;
}
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
Ok(())
}
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
match group.source {
config::ContextSource::Journal => {