journal: remove all stringly-typed key patterns, use NodeType

- journal_new: key is slugified title (agent names things properly)
- journal_tail: sort by created_at (immutable), not timestamp (mutable)
- journal_update: find latest by created_at
- {{latest_journal}}: query by NodeType::EpisodicSession, not "journal" key
- poc-memory journal write: requires a name argument
- Removed all journal#j-{timestamp}-{slug} patterns from:
  - prompts.rs (rename candidates)
  - graph.rs (date extraction, organize skip list)
  - cursor.rs (date extraction)
  - store/mod.rs (doc comment)
- graph.rs organize: filter by NodeType::Semantic instead of key prefix
- cursor.rs: use created_at for date extraction instead of key parsing

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
ProofOfConcept 2026-03-26 19:11:17 -04:00
parent 85fa54cba9
commit eac59b423e
9 changed files with 63 additions and 67 deletions

View file

@ -156,7 +156,8 @@ pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>)
let mut entries: Vec<&crate::store::Node> = store.nodes.values()
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
.collect();
entries.sort_by_key(|n| n.timestamp);
// Sort by creation time (immutable), not update time
entries.sort_by_key(|n| n.created_at);
let start = entries.len().saturating_sub(count);
if entries[start..].is_empty() {
Ok("(no journal entries)".into())
@ -173,19 +174,18 @@ pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>)
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
let content = format!("## {}{}\n\n{}", ts, title, body);
let slug: String = title.split_whitespace()
.take(6)
// Key from title — the agent names things, not a placeholder slug
let key: String = title.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
let key = format!("journal-j-{}-{}",
ts.to_string().to_lowercase().replace(':', "-"), slug);
let key = if key.len() > 80 { &key[..80] } else { &key };
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
let mut node = crate::store::new_node(&key, &content);
let mut node = crate::store::new_node(key, &content);
node.node_type = crate::store::NodeType::EpisodicSession;
node.provenance = prov.to_string();
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
@ -196,10 +196,10 @@ pub fn dispatch(name: &str, args: &serde_json::Value, provenance: Option<&str>)
"journal_update" => {
let body = get_str(args, "body")?;
let mut store = Store::load().map_err(|e| anyhow::anyhow!("{}", e))?;
// Find most recent EpisodicSession node
// Find most recent EpisodicSession by creation time
let latest_key = store.nodes.values()
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
.max_by_key(|n| n.timestamp)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no journal entry to update — use journal_new first");

View file

@ -480,12 +480,12 @@ pub fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: b
let term_lower = term.to_lowercase();
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
// Prefixes that indicate ephemeral/generated nodes to skip
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "_",
"deep-index#", "facts-", "irc-history#"];
let skip_prefixes = ["_", "deep-index#", "facts-", "irc-history#"];
for (key, node) in &store.nodes {
if node.deleted { continue; }
// Skip episodic/digest nodes — use NodeType, not key prefix
if node.node_type != crate::store::NodeType::Semantic { continue; }
let key_matches = key.to_lowercase().contains(&term_lower);
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
if !key_matches && !content_matches { continue; }

View file

@ -1,7 +1,7 @@
// cli/journal.rs — journal subcommand handlers
pub fn cmd_tail(n: usize, full: bool, provenance: Option<&str>) -> Result<(), String> {
pub fn cmd_tail(n: usize, full: bool, provenance: Option<&str>, dedup: bool) -> Result<(), String> {
let path = crate::store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
@ -24,11 +24,21 @@ pub fn cmd_tail(n: usize, full: bool, provenance: Option<&str>) -> Result<(), St
}
}
// Filter by provenance if specified (prefix match)
// Filter by provenance if specified (substring match)
if let Some(prov) = provenance {
entries.retain(|n| n.provenance.contains(prov));
}
// Dedup: keep only the latest version of each key
if dedup {
let mut seen = std::collections::HashSet::new();
// Walk backwards so we keep the latest
entries = entries.into_iter().rev()
.filter(|n| seen.insert(n.key.clone()))
.collect();
entries.reverse();
}
let start = entries.len().saturating_sub(n);
for node in &entries[start..] {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
@ -172,27 +182,23 @@ pub fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> {
}
}
pub fn cmd_journal_write(text: &[String]) -> Result<(), String> {
pub fn cmd_journal_write(name: &str, text: &[String]) -> Result<(), String> {
if text.is_empty() {
return Err("journal-write requires text".into());
return Err("journal write requires text".into());
}
super::check_dry_run();
let text = text.join(" ");
let timestamp = crate::store::format_datetime(crate::store::now_epoch());
let content = format!("## {}{}\n\n{}", timestamp, name, text);
let slug: String = text.split_whitespace()
.take(6)
let key: String = name.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
let content = format!("## {}\n\n{}", timestamp, text);
let source_ref = find_current_transcript();

View file

@ -89,15 +89,11 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
if node.timestamp > 0 {
dates.push(store::format_date(node.timestamp));
}
// Extract date from key patterns like "journal#2026-03-03-..." or "journal#j-2026-03-13t..."
if let Some(rest) = key.strip_prefix("journal#j-").or_else(|| key.strip_prefix("journal#"))
&& rest.len() >= 10 {
let candidate = &rest[..10];
if candidate.chars().nth(4) == Some('-') {
let date = candidate.to_string();
if !dates.contains(&date) {
dates.push(date);
}
// Extract date from created_at timestamp
if node.created_at > 0 {
let created_date = store::format_date(node.created_at);
if !dates.contains(&created_date) {
dates.push(created_date);
}
}
for date in &dates {

View file

@ -566,20 +566,15 @@ fn add_implicit_temporal_edges(
use chrono::{Datelike, DateTime, NaiveDate};
// Extract the covered date from a key name.
// Patterns: "daily-2026-03-06", "daily-2026-03-06-identity",
// "weekly-2026-W09", "monthly-2026-02"
// "journal#j-2026-03-13t...", "journal#2026-03-13-..."
// Patterns: "daily-2026-03-06", "daily-2026-03-06-identity"
fn date_from_key(key: &str) -> Option<NaiveDate> {
// Try extracting YYYY-MM-DD after known prefixes
for prefix in ["daily-", "journal#j-", "journal#"] {
if let Some(rest) = key.strip_prefix(prefix)
&& rest.len() >= 10
&& let Ok(d) = NaiveDate::parse_from_str(&rest[..10], "%Y-%m-%d") {
return Some(d);
}
}
let rest = key.strip_prefix("daily-")?;
if rest.len() >= 10 {
NaiveDate::parse_from_str(&rest[..10], "%Y-%m-%d").ok()
} else {
None
}
}
fn week_from_key(key: &str) -> Option<(i32, u32)> {
// "weekly-2026-W09" → (2026, 9)

View file

@ -48,7 +48,7 @@ use std::path::Path;
use parse::classify_filename;
/// Strip .md suffix from a key, handling both bare keys and section keys.
/// "journal.md#j-2026" → "journal#j-2026", "identity.md" → "identity", "identity" → "identity"
/// "identity.md" → "identity", "foo.md#section" → "foo#section", "identity" → "identity"
pub fn strip_md_suffix(key: &str) -> String {
if let Some((file, section)) = key.split_once('#') {
let bare = file.strip_suffix(".md").unwrap_or(file);

View file

@ -93,6 +93,9 @@ enum Command {
/// Filter by provenance (substring match, e.g. "surface-observe")
#[arg(long, short)]
provenance: Option<String>,
/// Show all versions (default: dedup to latest per key)
#[arg(long)]
all_versions: bool,
},
/// Summary of memory state
Status,
@ -271,6 +274,8 @@ enum CursorCmd {
enum JournalCmd {
/// Write a journal entry to the store
Write {
/// Entry name (becomes the node key)
name: String,
/// Entry text
text: Vec<String>,
},
@ -785,8 +790,8 @@ impl Run for Command {
Self::Write { key } => cli::node::cmd_write(&key),
Self::Edit { key } => cli::node::cmd_edit(&key),
Self::History { full, key } => cli::node::cmd_history(&key, full),
Self::Tail { n, full, provenance }
=> cli::journal::cmd_tail(n, full, provenance.as_deref()),
Self::Tail { n, full, provenance, all_versions }
=> cli::journal::cmd_tail(n, full, provenance.as_deref(), !all_versions),
Self::Status => cli::misc::cmd_status(),
Self::Query { expr } => cli::misc::cmd_query(&expr),
Self::Used { key } => cli::node::cmd_used(&key),
@ -820,7 +825,7 @@ impl Run for NodeCmd {
impl Run for JournalCmd {
fn run(self) -> Result<(), String> {
match self {
Self::Write { text } => cli::journal::cmd_journal_write(&text),
Self::Write { name, text } => cli::journal::cmd_journal_write(&name, &text),
Self::Tail { n, full, level } => cli::journal::cmd_journal_tail(n, full, level),
Self::Enrich { jsonl_path, entry_text, grep_line }
=> cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),

View file

@ -552,22 +552,16 @@ fn resolve(
Some(Resolved { text, keys: vec![] })
}
// latest_journal — the most recent journal entry for the journal agent
// latest_journal — the most recent EpisodicSession entry
"latest_journal" => {
let text = store.nodes.get("journal")
.map(|n| {
// Get the last entry (last ## section)
let content = &n.content;
content.rfind("\n## ")
.map(|pos| content[pos..].to_string())
.unwrap_or_else(|| {
// Take the last 2000 chars if no ## found
let start = content.len().saturating_sub(2000);
content[start..].to_string()
})
})
.unwrap_or_else(|| "(no previous journal entry)".to_string());
Some(Resolved { text, keys: vec!["journal".to_string()] })
let latest = store.nodes.values()
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
.max_by_key(|n| n.created_at);
let (text, keys) = match latest {
Some(n) => (n.content.clone(), vec![n.key.clone()]),
None => ("(no previous journal entry)".to_string(), vec![]),
};
Some(Resolved { text, keys })
}
_ => None,

View file

@ -243,10 +243,10 @@ pub fn format_pairs_section(
pub fn format_rename_candidates(store: &Store, count: usize) -> (Vec<String>, String) {
let mut candidates: Vec<(&str, &crate::store::Node)> = store.nodes.iter()
.filter(|(key, _)| {
.filter(|(key, node)| {
if key.starts_with("_facts-") { return true; }
if key.len() < 60 { return false; }
if key.starts_with("journal#j-") { return true; }
if node.node_type == crate::store::NodeType::EpisodicSession { return true; }
if key.starts_with("_mined-transcripts#f-") { return true; }
false
})
@ -271,9 +271,9 @@ pub fn format_rename_candidates(store: &Store, count: usize) -> (Vec<String>, St
let mut out = String::new();
out.push_str(&format!("## Nodes to rename ({} of {} candidates)\n\n",
candidates.len(),
store.nodes.keys().filter(|k| k.starts_with("_facts-") ||
store.nodes.iter().filter(|(k, n)| k.starts_with("_facts-") ||
(k.len() >= 60 &&
(k.starts_with("journal#j-") || k.starts_with("_mined-transcripts#f-")))).count()));
(n.node_type == crate::store::NodeType::EpisodicSession || k.starts_with("_mined-transcripts#f-")))).count()));
for (key, node) in &candidates {
out.push_str(&format!("### {}\n", key));