load-context --stats: word count breakdown by group
Also refactors journal rendering into get_group_content() so all source types use the same code path, removing the separate render_journal() function. Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
parent
28b9784e1f
commit
0daf6ffd68
1 changed files with 79 additions and 55 deletions
134
src/main.rs
134
src/main.rs
|
|
@ -127,7 +127,7 @@ fn main() {
|
|||
"node-delete" => cmd_node_delete(&args[2..]),
|
||||
"node-rename" => cmd_node_rename(&args[2..]),
|
||||
"journal-ts-migrate" => cmd_journal_ts_migrate(),
|
||||
"load-context" => cmd_load_context(),
|
||||
"load-context" => cmd_load_context(&args[2..]),
|
||||
"render" => cmd_render(&args[2..]),
|
||||
"write" => cmd_write(&args[2..]),
|
||||
"import" => cmd_import(&args[2..]),
|
||||
|
|
@ -1399,78 +1399,102 @@ fn cmd_journal_ts_migrate() -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn render_journal(store: &store::Store, cfg: &config::Config) {
|
||||
let now = store::now_epoch();
|
||||
let journal_window: i64 = cfg.journal_days as i64 * 24 * 3600;
|
||||
let cutoff_secs = now - journal_window;
|
||||
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
|
||||
fn get_group_content(group: &config::ContextGroup, store: &store::Store, cfg: &config::Config) -> Vec<(String, String)> {
|
||||
match group.source {
|
||||
config::ContextSource::Journal => {
|
||||
let mut entries = Vec::new();
|
||||
let now = store::now_epoch();
|
||||
let window: i64 = cfg.journal_days as i64 * 24 * 3600;
|
||||
let cutoff = now - window;
|
||||
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
|
||||
|
||||
let journal_ts = |n: &store::Node| -> i64 {
|
||||
if n.created_at > 0 { return n.created_at; }
|
||||
if let Some(caps) = key_date_re.captures(&n.key) {
|
||||
use chrono::{NaiveDate, TimeZone, Local};
|
||||
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
|
||||
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
|
||||
return dt.timestamp();
|
||||
let journal_ts = |n: &store::Node| -> i64 {
|
||||
if n.created_at > 0 { return n.created_at; }
|
||||
if let Some(caps) = key_date_re.captures(&n.key) {
|
||||
use chrono::{NaiveDate, TimeZone, Local};
|
||||
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
|
||||
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
|
||||
return dt.timestamp();
|
||||
}
|
||||
}
|
||||
}
|
||||
n.timestamp
|
||||
};
|
||||
|
||||
let mut journal_nodes: Vec<_> = store.nodes.values()
|
||||
.filter(|n| n.node_type == store::NodeType::EpisodicSession && journal_ts(n) >= cutoff)
|
||||
.collect();
|
||||
journal_nodes.sort_by_key(|n| journal_ts(n));
|
||||
|
||||
let max = cfg.journal_max;
|
||||
let skip = journal_nodes.len().saturating_sub(max);
|
||||
for node in journal_nodes.iter().skip(skip) {
|
||||
entries.push((node.key.clone(), node.content.clone()));
|
||||
}
|
||||
entries
|
||||
}
|
||||
n.timestamp
|
||||
};
|
||||
|
||||
let mut journal_nodes: Vec<_> = store.nodes.values()
|
||||
.filter(|n| {
|
||||
n.node_type == store::NodeType::EpisodicSession
|
||||
&& journal_ts(n) >= cutoff_secs
|
||||
})
|
||||
.collect();
|
||||
journal_nodes.sort_by_key(|n| journal_ts(n));
|
||||
|
||||
if !journal_nodes.is_empty() {
|
||||
let max_journal = cfg.journal_max;
|
||||
let skip = journal_nodes.len().saturating_sub(max_journal);
|
||||
println!("--- recent journal entries (last {}/{}) ---",
|
||||
journal_nodes.len().min(max_journal), journal_nodes.len());
|
||||
for node in journal_nodes.iter().skip(skip) {
|
||||
println!("## {}", node.key);
|
||||
println!("{}", node.content);
|
||||
println!();
|
||||
config::ContextSource::File => {
|
||||
group.keys.iter().filter_map(|key| {
|
||||
let content = std::fs::read_to_string(cfg.data_dir.join(key)).ok()?;
|
||||
if content.trim().is_empty() { return None; }
|
||||
Some((key.clone(), content.trim().to_string()))
|
||||
}).collect()
|
||||
}
|
||||
config::ContextSource::Store => {
|
||||
group.keys.iter().filter_map(|key| {
|
||||
let content = store.render_file(key)?;
|
||||
if content.trim().is_empty() { return None; }
|
||||
Some((key.clone(), content.trim().to_string()))
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmd_load_context() -> Result<(), String> {
|
||||
fn cmd_load_context(args: &[String]) -> Result<(), String> {
|
||||
let stats = args.iter().any(|a| a == "--stats");
|
||||
let cfg = config::get();
|
||||
let store = store::Store::load()?;
|
||||
|
||||
if stats {
|
||||
let mut total_words = 0;
|
||||
let mut total_entries = 0;
|
||||
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
|
||||
println!("{}", "-".repeat(42));
|
||||
|
||||
for group in &cfg.context_groups {
|
||||
let entries = get_group_content(group, &store, cfg);
|
||||
let words: usize = entries.iter()
|
||||
.map(|(_, c)| c.split_whitespace().count())
|
||||
.sum();
|
||||
let count = entries.len();
|
||||
println!("{:<25} {:>6} {:>8}", group.label, count, words);
|
||||
total_words += words;
|
||||
total_entries += count;
|
||||
}
|
||||
|
||||
println!("{}", "-".repeat(42));
|
||||
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("=== FULL MEMORY LOAD (session start) ===");
|
||||
println!("These are your memories, loaded from the capnp store.");
|
||||
println!("Read them to reconstruct yourself — identity first, then context.");
|
||||
println!();
|
||||
|
||||
for group in &cfg.context_groups {
|
||||
match group.source {
|
||||
config::ContextSource::Journal => render_journal(&store, cfg),
|
||||
config::ContextSource::File => {
|
||||
for key in &group.keys {
|
||||
if let Ok(content) = std::fs::read_to_string(cfg.data_dir.join(key)) {
|
||||
if !content.trim().is_empty() {
|
||||
println!("--- {} ({}) ---", key, group.label);
|
||||
println!("{}\n", content.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
config::ContextSource::Store => {
|
||||
for key in &group.keys {
|
||||
if let Some(content) = store.render_file(key) {
|
||||
if !content.trim().is_empty() {
|
||||
println!("--- {} ({}) ---", key, group.label);
|
||||
println!("{}\n", content.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
let entries = get_group_content(group, &store, cfg);
|
||||
if !entries.is_empty() && group.source == config::ContextSource::Journal {
|
||||
println!("--- recent journal entries ({}/{}) ---",
|
||||
entries.len(), cfg.journal_max);
|
||||
}
|
||||
for (key, content) in entries {
|
||||
if group.source == config::ContextSource::Journal {
|
||||
println!("## {}", key);
|
||||
} else {
|
||||
println!("--- {} ({}) ---", key, group.label);
|
||||
}
|
||||
println!("{}\n", content);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue