add journal-write and journal-tail commands
journal-write creates entries directly in the capnp store with auto-generated timestamped keys (journal.md#j-YYYY-MM-DDtHH-MM-slug), episodic session type, and source ref from current transcript. journal-tail sorts entries by date extracted from content headers, falling back to key-embedded dates, then node timestamp. poc-journal shell script now delegates to these commands instead of appending to journal.md. Journal entries are store-first.
This commit is contained in:
parent
7b811125ca
commit
da10dfaeb2
1 changed files with 115 additions and 1 deletions
116
src/main.rs
116
src/main.rs
|
|
@ -68,6 +68,8 @@ fn main() {
|
|||
"write" => cmd_write(&args[2..]),
|
||||
"import" => cmd_import(&args[2..]),
|
||||
"export" => cmd_export(&args[2..]),
|
||||
"journal-write" => cmd_journal_write(&args[2..]),
|
||||
"journal-tail" => cmd_journal_tail(&args[2..]),
|
||||
_ => {
|
||||
eprintln!("Unknown command: {}", args[1]);
|
||||
usage();
|
||||
|
|
@ -121,7 +123,9 @@ Commands:
|
|||
render KEY Output a node's content to stdout
|
||||
write KEY Upsert node content from stdin
|
||||
import FILE [FILE...] Import markdown file(s) into the store
|
||||
export [FILE|--all] Export store nodes to markdown file(s)");
|
||||
export [FILE|--all] Export store nodes to markdown file(s)
|
||||
journal-write TEXT Write a journal entry to the store
|
||||
journal-tail [N] Show last N journal entries (default 20)");
|
||||
}
|
||||
|
||||
fn cmd_search(args: &[String]) -> Result<(), String> {
|
||||
|
|
@ -1147,6 +1151,116 @@ fn cmd_export(args: &[String]) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_journal_write(args: &[String]) -> Result<(), String> {
|
||||
if args.is_empty() {
|
||||
return Err("Usage: poc-memory journal-write TEXT".into());
|
||||
}
|
||||
let text = args.join(" ");
|
||||
|
||||
// Generate timestamp and slug
|
||||
let timestamp = {
|
||||
let out = std::process::Command::new("date")
|
||||
.arg("+%Y-%m-%dT%H:%M")
|
||||
.output().map_err(|e| format!("date: {}", e))?;
|
||||
String::from_utf8_lossy(&out.stdout).trim().to_string()
|
||||
};
|
||||
|
||||
// Slug: lowercase first ~6 words, hyphenated, truncated
|
||||
let slug: String = text.split_whitespace()
|
||||
.take(6)
|
||||
.map(|w| w.to_lowercase()
|
||||
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
||||
.collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join("-");
|
||||
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
|
||||
|
||||
let key = format!("journal.md#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
|
||||
|
||||
// Build content with header
|
||||
let content = format!("## {}\n\n{}", timestamp, text);
|
||||
|
||||
// Find source ref (current transcript)
|
||||
let source_ref = {
|
||||
let project_dir = format!(
|
||||
"{}/.claude/projects/-home-kent-bcachefs-tools",
|
||||
std::env::var("HOME").unwrap_or_default()
|
||||
);
|
||||
let dir = std::path::Path::new(&project_dir);
|
||||
if dir.exists() {
|
||||
let mut jsonls: Vec<_> = std::fs::read_dir(dir).ok()
|
||||
.map(|rd| rd.filter_map(|e| e.ok())
|
||||
.filter(|e| e.path().extension().map(|x| x == "jsonl").unwrap_or(false))
|
||||
.collect())
|
||||
.unwrap_or_default();
|
||||
jsonls.sort_by_key(|e| std::cmp::Reverse(
|
||||
e.metadata().ok().and_then(|m| m.modified().ok())
|
||||
));
|
||||
jsonls.first().map(|e| e.path().to_string_lossy().to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let mut store = capnp_store::Store::load()?;
|
||||
|
||||
let mut node = capnp_store::Store::new_node(&key, &content);
|
||||
node.node_type = capnp_store::NodeType::EpisodicSession;
|
||||
node.provenance = capnp_store::Provenance::Journal;
|
||||
if let Some(ref src) = source_ref {
|
||||
node.source_ref = src.clone();
|
||||
}
|
||||
|
||||
store.append_nodes(&[node.clone()])?;
|
||||
store.uuid_to_key.insert(node.uuid, node.key.clone());
|
||||
store.nodes.insert(key.clone(), node);
|
||||
store.save()?;
|
||||
|
||||
let word_count = text.split_whitespace().count();
|
||||
println!("Appended entry at {} ({} words)", timestamp, word_count);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
|
||||
let n: usize = args.first()
|
||||
.and_then(|a| a.parse().ok())
|
||||
.unwrap_or(20);
|
||||
|
||||
let store = capnp_store::Store::load()?;
|
||||
|
||||
// Collect journal nodes, sorted by date extracted from content or key
|
||||
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
||||
let key_date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
||||
|
||||
let extract_sort_key = |node: &capnp_store::Node| -> String {
|
||||
// Try content header first (## 2026-02-28T23:11)
|
||||
if let Some(caps) = date_re.captures(&node.content) {
|
||||
return caps[1].to_string();
|
||||
}
|
||||
// Try key (journal.md#j-2026-02-28t23-11-...)
|
||||
if let Some(caps) = key_date_re.captures(&node.key) {
|
||||
return caps[1].replace('t', "T").replace('-', ":");
|
||||
}
|
||||
// Fallback: use node timestamp
|
||||
format!("{:.0}", node.timestamp)
|
||||
};
|
||||
|
||||
let mut journal: Vec<_> = store.nodes.values()
|
||||
.filter(|node| node.key.starts_with("journal.md#j-"))
|
||||
.collect();
|
||||
journal.sort_by_key(|n| extract_sort_key(n));
|
||||
|
||||
// Show last N
|
||||
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
||||
for node in journal.iter().skip(skip) {
|
||||
println!("{}", node.content);
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_interference(args: &[String]) -> Result<(), String> {
|
||||
let mut threshold = 0.4f32;
|
||||
let mut i = 0;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue