2026-03-14 18:10:22 -04:00
|
|
|
// cli/journal.rs — journal subcommand handlers
|
|
|
|
|
|
|
|
|
|
|
2026-03-26 18:41:10 -04:00
|
|
|
pub fn cmd_tail(n: usize, full: bool, provenance: Option<&str>) -> Result<(), String> {
|
2026-03-14 18:10:22 -04:00
|
|
|
let path = crate::store::nodes_path();
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
return Err("No node log found".into());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
use std::io::BufReader;
|
|
|
|
|
let file = std::fs::File::open(&path)
|
|
|
|
|
.map_err(|e| format!("open {}: {}", path.display(), e))?;
|
|
|
|
|
let mut reader = BufReader::new(file);
|
|
|
|
|
|
|
|
|
|
// Read all entries, keep last N
|
|
|
|
|
let mut entries: Vec<crate::store::Node> = Vec::new();
|
|
|
|
|
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
|
|
|
|
|
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
|
|
|
|
|
.map_err(|e| format!("read log: {}", e))?;
|
|
|
|
|
for node_reader in log.get_nodes()
|
|
|
|
|
.map_err(|e| format!("get nodes: {}", e))? {
|
|
|
|
|
let node = crate::store::Node::from_capnp_migrate(node_reader)?;
|
|
|
|
|
entries.push(node);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-26 18:41:10 -04:00
|
|
|
// Filter by provenance if specified (prefix match)
|
|
|
|
|
if let Some(prov) = provenance {
|
|
|
|
|
entries.retain(|n| n.provenance.contains(prov));
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-14 18:10:22 -04:00
|
|
|
let start = entries.len().saturating_sub(n);
|
|
|
|
|
for node in &entries[start..] {
|
|
|
|
|
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
|
|
|
|
|
crate::store::format_datetime(node.timestamp)
|
|
|
|
|
} else {
|
|
|
|
|
format!("(raw:{})", node.timestamp)
|
|
|
|
|
};
|
|
|
|
|
let del = if node.deleted { " [DELETED]" } else { "" };
|
|
|
|
|
if full {
|
2026-03-26 17:48:44 -04:00
|
|
|
println!("--- {} (v{}) {} via {} w={:.3}{} ---",
|
2026-03-14 18:10:22 -04:00
|
|
|
node.key, node.version, ts, node.provenance, node.weight, del);
|
2026-03-26 17:48:44 -04:00
|
|
|
println!("{}\n", node.content);
|
2026-03-14 18:10:22 -04:00
|
|
|
} else {
|
|
|
|
|
let preview = crate::util::first_n_chars(&node.content, 100).replace('\n', "\\n");
|
2026-03-26 17:48:44 -04:00
|
|
|
println!(" {} v{} w={:.2}{}",
|
2026-03-14 18:10:22 -04:00
|
|
|
ts, node.version, node.weight, del);
|
2026-03-26 17:48:44 -04:00
|
|
|
println!(" {} via {}", node.key, node.provenance);
|
2026-03-14 18:10:22 -04:00
|
|
|
if !preview.is_empty() {
|
2026-03-26 17:48:44 -04:00
|
|
|
println!(" {}", preview);
|
2026-03-14 18:10:22 -04:00
|
|
|
}
|
2026-03-26 17:48:44 -04:00
|
|
|
println!();
|
2026-03-14 18:10:22 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-14 18:14:52 -04:00
|
|
|
pub fn find_current_transcript() -> Option<String> {
|
|
|
|
|
let projects = crate::config::get().projects_dir.clone();
|
|
|
|
|
if !projects.exists() { return None; }
|
|
|
|
|
|
|
|
|
|
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
|
|
|
|
|
if let Ok(dirs) = std::fs::read_dir(&projects) {
|
|
|
|
|
for dir_entry in dirs.filter_map(|e| e.ok()) {
|
|
|
|
|
if !dir_entry.path().is_dir() { continue; }
|
|
|
|
|
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
|
|
|
|
|
for f in files.filter_map(|e| e.ok()) {
|
|
|
|
|
let p = f.path();
|
2026-03-21 19:42:38 -04:00
|
|
|
if p.extension().map(|x| x == "jsonl").unwrap_or(false)
|
|
|
|
|
&& let Ok(meta) = p.metadata()
|
|
|
|
|
&& let Ok(mtime) = meta.modified()
|
|
|
|
|
&& newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
|
2026-03-14 18:14:52 -04:00
|
|
|
newest = Some((mtime, p));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
newest.map(|(_, p)| p.to_string_lossy().to_string())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn journal_tail_entries(store: &crate::store::Store, n: usize, full: bool) -> Result<(), String> {
|
|
|
|
|
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
|
|
|
|
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
|
|
|
|
|
|
|
|
|
let normalize_date = |s: &str| -> String {
|
|
|
|
|
let s = s.replace('t', "T");
|
|
|
|
|
if s.len() >= 16 {
|
|
|
|
|
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
|
|
|
|
|
} else {
|
|
|
|
|
s
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let extract_sort = |node: &crate::store::Node| -> (i64, String) {
|
|
|
|
|
if node.created_at > 0 {
|
|
|
|
|
return (node.created_at, crate::store::format_datetime(node.created_at));
|
|
|
|
|
}
|
|
|
|
|
if let Some(caps) = key_date_re.captures(&node.key) {
|
|
|
|
|
return (0, normalize_date(&caps[1]));
|
|
|
|
|
}
|
|
|
|
|
if let Some(caps) = date_re.captures(&node.content) {
|
|
|
|
|
return (0, normalize_date(&caps[1]));
|
|
|
|
|
}
|
|
|
|
|
(node.timestamp, crate::store::format_datetime(node.timestamp))
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut journal: Vec<_> = store.nodes.values()
|
|
|
|
|
.filter(|node| node.node_type == crate::store::NodeType::EpisodicSession)
|
|
|
|
|
.collect();
|
|
|
|
|
journal.sort_by(|a, b| {
|
|
|
|
|
let (at, as_) = extract_sort(a);
|
|
|
|
|
let (bt, bs) = extract_sort(b);
|
|
|
|
|
if at > 0 && bt > 0 {
|
|
|
|
|
at.cmp(&bt)
|
|
|
|
|
} else {
|
|
|
|
|
as_.cmp(&bs)
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
|
|
|
|
for node in journal.iter().skip(skip) {
|
|
|
|
|
let (_, ts) = extract_sort(node);
|
|
|
|
|
let title = extract_title(&node.content);
|
|
|
|
|
if full {
|
|
|
|
|
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
|
|
|
|
|
} else {
|
|
|
|
|
println!("[{}] {}", ts, title);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn journal_tail_digests(store: &crate::store::Store, node_type: crate::store::NodeType, n: usize, full: bool) -> Result<(), String> {
|
|
|
|
|
let mut digests: Vec<_> = store.nodes.values()
|
|
|
|
|
.filter(|node| node.node_type == node_type)
|
|
|
|
|
.collect();
|
|
|
|
|
digests.sort_by(|a, b| {
|
|
|
|
|
if a.timestamp > 0 && b.timestamp > 0 {
|
|
|
|
|
a.timestamp.cmp(&b.timestamp)
|
|
|
|
|
} else {
|
|
|
|
|
a.key.cmp(&b.key)
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let skip = if digests.len() > n { digests.len() - n } else { 0 };
|
|
|
|
|
for node in digests.iter().skip(skip) {
|
|
|
|
|
let label = &node.key;
|
|
|
|
|
let title = extract_title(&node.content);
|
|
|
|
|
if full {
|
|
|
|
|
println!("--- [{}] {} ---\n{}\n", label, title, node.content);
|
|
|
|
|
} else {
|
|
|
|
|
println!("[{}] {}", label, title);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> {
|
|
|
|
|
let store = crate::store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
if level == 0 {
|
|
|
|
|
journal_tail_entries(&store, n, full)
|
|
|
|
|
} else {
|
|
|
|
|
let node_type = match level {
|
|
|
|
|
1 => crate::store::NodeType::EpisodicDaily,
|
|
|
|
|
2 => crate::store::NodeType::EpisodicWeekly,
|
|
|
|
|
_ => crate::store::NodeType::EpisodicMonthly,
|
|
|
|
|
};
|
|
|
|
|
journal_tail_digests(&store, node_type, n, full)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn cmd_journal_write(text: &[String]) -> Result<(), String> {
|
|
|
|
|
if text.is_empty() {
|
|
|
|
|
return Err("journal-write requires text".into());
|
|
|
|
|
}
|
poc-memory: POC_MEMORY_DRY_RUN=1 for agent testing
All mutating commands (write, delete, rename, link-add, journal write,
used, wrong, not-useful, gap) check POC_MEMORY_DRY_RUN after argument
validation but before mutation. If set, process exits silently — agent
tool calls are visible in the LLM output so we can see what it tried
to do without applying changes.
Read commands (render, search, graph link, journal tail) work normally
in dry-run mode so agents can still explore the graph.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-16 18:09:56 -04:00
|
|
|
super::check_dry_run();
|
2026-03-14 18:14:52 -04:00
|
|
|
let text = text.join(" ");
|
|
|
|
|
|
|
|
|
|
let timestamp = crate::store::format_datetime(crate::store::now_epoch());
|
|
|
|
|
|
|
|
|
|
let slug: String = text.split_whitespace()
|
|
|
|
|
.take(6)
|
|
|
|
|
.map(|w| w.to_lowercase()
|
|
|
|
|
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
|
|
|
|
|
.collect::<String>())
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join("-");
|
|
|
|
|
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
|
|
|
|
|
|
|
|
|
|
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
|
|
|
|
|
|
|
|
|
|
let content = format!("## {}\n\n{}", timestamp, text);
|
|
|
|
|
|
|
|
|
|
let source_ref = find_current_transcript();
|
|
|
|
|
|
|
|
|
|
let mut store = crate::store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
let mut node = crate::store::new_node(&key, &content);
|
|
|
|
|
node.node_type = crate::store::NodeType::EpisodicSession;
|
|
|
|
|
node.provenance = "journal".to_string();
|
|
|
|
|
if let Some(src) = source_ref {
|
|
|
|
|
node.source_ref = src;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
store.upsert_node(node)?;
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
|
|
|
|
let word_count = text.split_whitespace().count();
|
|
|
|
|
println!("Appended entry at {} ({} words)", timestamp, word_count);
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn extract_title(content: &str) -> String {
|
|
|
|
|
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
|
|
|
|
for line in content.lines() {
|
|
|
|
|
let stripped = line.trim();
|
|
|
|
|
if stripped.is_empty() { continue; }
|
|
|
|
|
if date_re.is_match(stripped) && stripped.len() < 25 { continue; }
|
|
|
|
|
if let Some(h) = stripped.strip_prefix("## ") {
|
|
|
|
|
return h.to_string();
|
|
|
|
|
} else if let Some(h) = stripped.strip_prefix("# ") {
|
|
|
|
|
return h.to_string();
|
|
|
|
|
} else {
|
|
|
|
|
return crate::util::truncate(stripped, 67, "...");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
String::from("(untitled)")
|
|
|
|
|
}
|
|
|
|
|
|