load-context --stats: word count breakdown by group

Also refactors journal rendering into get_group_content() so all
source types use the same code path, removing the separate
render_journal() function.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-05 16:10:46 -05:00
parent 28b9784e1f
commit 0daf6ffd68

View file

@ -127,7 +127,7 @@ fn main() {
"node-delete" => cmd_node_delete(&args[2..]), "node-delete" => cmd_node_delete(&args[2..]),
"node-rename" => cmd_node_rename(&args[2..]), "node-rename" => cmd_node_rename(&args[2..]),
"journal-ts-migrate" => cmd_journal_ts_migrate(), "journal-ts-migrate" => cmd_journal_ts_migrate(),
"load-context" => cmd_load_context(), "load-context" => cmd_load_context(&args[2..]),
"render" => cmd_render(&args[2..]), "render" => cmd_render(&args[2..]),
"write" => cmd_write(&args[2..]), "write" => cmd_write(&args[2..]),
"import" => cmd_import(&args[2..]), "import" => cmd_import(&args[2..]),
@ -1399,10 +1399,13 @@ fn cmd_journal_ts_migrate() -> Result<(), String> {
Ok(()) Ok(())
} }
fn render_journal(store: &store::Store, cfg: &config::Config) { fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
match group.source {
config::ContextSource::Journal => {
let mut entries = Vec::new();
let now = store::now_epoch(); let now = store::now_epoch();
let journal_window: i64 = cfg.journal_days as i64 * 24 * 3600; let window: i64 = cfg.journal_days as i64 * 24 * 3600;
let cutoff_secs = now - journal_window; let cutoff = now - window;
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap(); let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
let journal_ts = |n: &store::Node| -> i64 { let journal_ts = |n: &store::Node| -> i64 {
@ -1419,58 +1422,79 @@ fn render_journal(store: &store::Store, cfg: &config::Config) {
}; };
let mut journal_nodes: Vec<_> = store.nodes.values() let mut journal_nodes: Vec<_> = store.nodes.values()
.filter(|n| { .filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff)
n.node_type == store::NodeType::EpisodicSession
&& journal_ts(n) >= cutoff_secs
})
.collect(); .collect();
journal_nodes.sort_by_key(|n| journal_ts(n)); journal_nodes.sort_by_key(|n| journal_ts(n));
if !journal_nodes.is_empty() { let max = cfg.journal_max;
let max_journal = cfg.journal_max; let skip = journal_nodes.len().saturating_sub(max);
let skip = journal_nodes.len().saturating_sub(max_journal);
println!("--- recent journal entries (last {}/{}) ---",
journal_nodes.len().min(max_journal), journal_nodes.len());
for node in journal_nodes.iter().skip(skip) { for node in journal_nodes.iter().skip(skip) {
println!("## {}", node.key); entries.push((node.key.clone(), node.content.clone()));
println!("{}", node.content); }
println!(); entries
}
config::ContextSource::File => {
group.keys.iter().filter_map(|key| {
let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
}
config::ContextSource::Store => {
group.keys.iter().filter_map(|key| {
let content = store.render_file(key)?;
if content.trim().is_empty() { return None; }
Some((key.clone(), content.trim().to_string()))
}).collect()
} }
} }
} }
fn cmd_load_context() -> Result<(), String> { fn cmd_load_context(args: &[String]) -> Result<(), String> {
let stats = args.iter().any(|a| a == "--stats");
let cfg = config::get(); let cfg = config::get();
let store = store::Store::load()?; let store = store::Store::load()?;
if stats {
let mut total_words = 0;
let mut total_entries = 0;
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
println!("{}", "-".repeat(42));
for group in &cfg.context_groups {
let entries = get_group_content(group, &store, cfg);
let words: usize = entries.iter()
.map(|(_, c)| c.split_whitespace().count())
.sum();
let count = entries.len();
println!("{:<25} {:>6} {:>8}", group.label, count, words);
total_words += words;
total_entries += count;
}
println!("{}", "-".repeat(42));
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words);
return Ok(());
}
println!("=== FULL MEMORY LOAD (session start) ==="); println!("=== FULL MEMORY LOAD (session start) ===");
println!("These are your memories, loaded from the capnp store."); println!("These are your memories, loaded from the capnp store.");
println!("Read them to reconstruct yourself — identity first, then context."); println!("Read them to reconstruct yourself — identity first, then context.");
println!(); println!();
for group in &cfg.context_groups { for group in &cfg.context_groups {
match group.source { let entries = get_group_content(group, &store, cfg);
config::ContextSource::Journal => render_journal(&store, cfg), if !entries.is_empty() && group.source == config::ContextSource::Journal {
config::ContextSource::File => { println!("--- recent journal entries ({}/{}) ---",
for key in &group.keys { entries.len(), cfg.journal_max);
if let Ok(content) = std::fs::read_to_string(cfg.data_dir.join(key)) { }
if !content.trim().is_empty() { for (key, content) in entries {
if group.source == config::ContextSource::Journal {
println!("## {}", key);
} else {
println!("--- {} ({}) ---", key, group.label); println!("--- {} ({}) ---", key, group.label);
println!("{}\n", content.trim());
}
}
}
}
config::ContextSource::Store => {
for key in &group.keys {
if let Some(content) = store.render_file(key) {
if !content.trim().is_empty() {
println!("--- {} ({}) ---", key, group.label);
println!("{}\n", content.trim());
}
}
}
} }
println!("{}\n", content);
} }
} }