experience-mine: retroactive journaling from conversation transcripts

Reads a conversation JSONL, identifies experiential moments that
weren't captured in real-time journal entries, and writes them as
journal nodes in the store. The agent writes in PoC's voice with
emotion tags, focusing on intimate moments, shifts in understanding,
and small pleasures — not clinical topic extraction.

Conversation timestamps are now extracted and included in formatted
output, enabling accurate temporal placement of mined entries.

Also: extract_conversation now returns timestamps as a 4th tuple field.
This commit is contained in:
ProofOfConcept 2026-03-01 01:47:31 -05:00
parent 515f673251
commit 30d176d455
2 changed files with 236 additions and 5 deletions

View file

@ -91,6 +91,7 @@ fn main() {
"digest" => cmd_digest(&args[2..]),
"digest-links" => cmd_digest_links(&args[2..]),
"journal-enrich" => cmd_journal_enrich(&args[2..]),
"experience-mine" => cmd_experience_mine(&args[2..]),
"apply-consolidation" => cmd_apply_consolidation(&args[2..]),
"differentiate" => cmd_differentiate(&args[2..]),
"link-audit" => cmd_link_audit(&args[2..]),
@ -154,6 +155,7 @@ Commands:
digest-links [--apply] Parse and apply links from digest files
journal-enrich JSONL TEXT [LINE]
Enrich journal entry with conversation links
experience-mine [JSONL] Mine conversation for experiential moments to journal
apply-consolidation [--apply] [--report FILE]
Extract and apply actions from consolidation reports
differentiate [KEY] [--apply]
@ -714,6 +716,46 @@ fn cmd_journal_enrich(args: &[String]) -> Result<(), String> {
digest::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
}
fn cmd_experience_mine(args: &[String]) -> Result<(), String> {
let jsonl_path = if let Some(path) = args.first() {
path.clone()
} else {
// Find the most recent JSONL transcript
let projects_dir = std::path::Path::new(&std::env::var("HOME").unwrap_or_default())
.join(".claude/projects");
let mut entries: Vec<(std::time::SystemTime, std::path::PathBuf)> = Vec::new();
if let Ok(dirs) = std::fs::read_dir(&projects_dir) {
for dir in dirs.flatten() {
if let Ok(files) = std::fs::read_dir(dir.path()) {
for file in files.flatten() {
let path = file.path();
if path.extension().map_or(false, |ext| ext == "jsonl") {
if let Ok(meta) = file.metadata() {
if let Ok(mtime) = meta.modified() {
entries.push((mtime, path));
}
}
}
}
}
}
}
entries.sort_by(|a, b| b.0.cmp(&a.0));
entries.first()
.map(|(_, p)| p.to_string_lossy().to_string())
.ok_or("no JSONL transcripts found")?
};
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
return Err(format!("JSONL not found: {}", jsonl_path));
}
let mut store = capnp_store::Store::load()?;
let count = digest::experience_mine(&mut store, &jsonl_path)?;
println!("Done: {} new entries mined.", count);
Ok(())
}
fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> {
let do_apply = args.iter().any(|a| a == "--apply");
let report_file = args.windows(2)