types: unify all epoch timestamps to i64
All epoch timestamp fields (timestamp, last_replayed, created_at on nodes; timestamp on relations) are now i64. Previously a mix of f64 and i64 which caused type seams and required unnecessary casts. - Kill now_epoch() -> f64 and now_epoch_i64(), replace with single now_epoch() -> i64 - All formatting functions take i64 - new_node() sets created_at automatically - journal-ts-migrate handles all nodes, with valid_range check to detect garbage from f64->i64 bit reinterpretation - capnp schema: Float64 -> Int64 for all timestamp fields
This commit is contained in:
parent
b4bbafdf1c
commit
4747004b36
4 changed files with 232 additions and 56 deletions
242
src/main.rs
242
src/main.rs
|
|
@ -114,10 +114,12 @@ fn main() {
|
|||
"spectral-neighbors" => cmd_spectral_neighbors(&args[2..]),
|
||||
"spectral-positions" => cmd_spectral_positions(&args[2..]),
|
||||
"spectral-suggest" => cmd_spectral_suggest(&args[2..]),
|
||||
"list-keys" => cmd_list_keys(),
|
||||
"list-keys" => cmd_list_keys(&args[2..]),
|
||||
"list-edges" => cmd_list_edges(),
|
||||
"dump-json" => cmd_dump_json(),
|
||||
"node-delete" => cmd_node_delete(&args[2..]),
|
||||
"node-rename" => cmd_node_rename(&args[2..]),
|
||||
"journal-ts-migrate" => cmd_journal_ts_migrate(),
|
||||
"load-context" => cmd_load_context(),
|
||||
"render" => cmd_render(&args[2..]),
|
||||
"write" => cmd_write(&args[2..]),
|
||||
|
|
@ -145,7 +147,7 @@ fn usage() {
|
|||
eprintln!("poc-memory v0.4.0 — graph-structured memory store
|
||||
|
||||
Commands:
|
||||
search QUERY [--expand] Search memory (AND logic across terms)
|
||||
search QUERY [--expand] [--category CAT] Search memory (AND logic)
|
||||
init Scan markdown files, index all memory units
|
||||
migrate Migrate from old weights.json system
|
||||
health Report graph metrics (CC, communities, small-world)
|
||||
|
|
@ -192,10 +194,12 @@ Commands:
|
|||
spectral-neighbors KEY [N] Find N spectrally nearest nodes (default N=15)
|
||||
spectral-positions [N] Show N nodes ranked by outlier/bridge score (default 30)
|
||||
spectral-suggest [N] Find N spectrally close but unlinked pairs (default 20)
|
||||
list-keys List all node keys (one per line)
|
||||
list-keys [PATTERN] List all node keys (one per line, optional glob)
|
||||
list-edges List all edges (tsv: source target strength type)
|
||||
dump-json Dump entire store as JSON
|
||||
node-delete KEY Soft-delete a node (appends deleted version to log)
|
||||
node-rename OLD NEW Rename a node key; updates edge debug strings atomically
|
||||
journal-ts-migrate Populate created_at for nodes missing it
|
||||
load-context Output session-start context from the store
|
||||
render KEY Output a node's content to stdout
|
||||
write KEY Upsert node content from stdin
|
||||
|
|
@ -213,19 +217,62 @@ Commands:
|
|||
fn cmd_search(args: &[String]) -> Result<(), String> {
|
||||
use store::StoreView;
|
||||
|
||||
if args.is_empty() {
|
||||
return Err("Usage: poc-memory search QUERY [QUERY...] [--expand]".into());
|
||||
if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") {
|
||||
println!("Usage: poc-memory search QUERY [QUERY...] [OPTIONS]
|
||||
|
||||
Search memory using spreading activation (AND logic across terms).
|
||||
|
||||
Options:
|
||||
--expand Show 15 results instead of 5, plus spectral neighbors
|
||||
--category CAT Filter results to category: core, tech, gen, obs, task
|
||||
--help, -h Show this help
|
||||
|
||||
Examples:
|
||||
poc-memory search irc connection
|
||||
poc-memory search bcachefs transaction --expand
|
||||
poc-memory search rust --category tech");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let expand = args.iter().any(|a| a == "--expand");
|
||||
|
||||
let category_filter: Option<String> = {
|
||||
let mut cat = None;
|
||||
let mut iter = args.iter();
|
||||
while let Some(a) = iter.next() {
|
||||
if a == "--category" {
|
||||
cat = iter.next().cloned();
|
||||
break;
|
||||
}
|
||||
}
|
||||
cat
|
||||
};
|
||||
|
||||
let query: String = args.iter()
|
||||
.filter(|a| *a != "--expand")
|
||||
.cloned()
|
||||
.filter(|a| *a != "--expand" && *a != "--category")
|
||||
.scan(false, |skip_next, a| {
|
||||
if *skip_next { *skip_next = false; return Some(None); }
|
||||
if a == "--category" { *skip_next = true; return Some(None); }
|
||||
Some(Some(a.as_str()))
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
let view = store::AnyView::load()?;
|
||||
let results = search::search(&query, &view);
|
||||
let mut results = search::search(&query, &view);
|
||||
|
||||
// Filter by category if requested
|
||||
if let Some(ref cat_str) = category_filter {
|
||||
let cat = store::Category::from_str(cat_str)
|
||||
.ok_or_else(|| format!("Unknown category '{}' (use: core, tech, gen, obs, task)", cat_str))?;
|
||||
let store = store::Store::load()?;
|
||||
results.retain(|r| {
|
||||
store.nodes.get(&r.key)
|
||||
.map(|n| n.category.label() == cat.label())
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
|
||||
if results.is_empty() {
|
||||
eprintln!("No results for '{}'", query);
|
||||
|
|
@ -1194,10 +1241,38 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_list_keys() -> Result<(), String> {
|
||||
fn cmd_list_keys(args: &[String]) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
query::run_query(&store, &g, "* | sort key asc")
|
||||
let pattern = args.first().map(|s| s.as_str());
|
||||
if let Some(pat) = pattern {
|
||||
// Simple glob: only support leading/trailing * and *substring*
|
||||
let pat_lower = pat.to_lowercase();
|
||||
let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') {
|
||||
(None, None, Some(pat_lower.trim_matches('*').to_string()))
|
||||
} else if pat_lower.starts_with('*') {
|
||||
(None, Some(pat_lower.trim_start_matches('*').to_string()), None)
|
||||
} else if pat_lower.ends_with('*') {
|
||||
(Some(pat_lower.trim_end_matches('*').to_string()), None, None)
|
||||
} else {
|
||||
(None, None, Some(pat_lower.clone()))
|
||||
};
|
||||
let mut keys: Vec<_> = store.nodes.keys()
|
||||
.filter(|k| {
|
||||
let kl = k.to_lowercase();
|
||||
if let Some(ref m) = middle { kl.contains(m.as_str()) }
|
||||
else if let Some(ref p) = prefix { kl.starts_with(p.as_str()) }
|
||||
else if let Some(ref s) = suffix { kl.ends_with(s.as_str()) }
|
||||
else { true }
|
||||
})
|
||||
.cloned()
|
||||
.collect();
|
||||
keys.sort();
|
||||
for k in keys { println!("{}", k); }
|
||||
Ok(())
|
||||
} else {
|
||||
query::run_query(&store, &g, "* | sort key asc")
|
||||
}
|
||||
}
|
||||
|
||||
fn cmd_list_edges() -> Result<(), String> {
|
||||
|
|
@ -1230,10 +1305,81 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_node_rename(args: &[String]) -> Result<(), String> {
|
||||
if args.len() < 2 {
|
||||
return Err("Usage: poc-memory node-rename OLD_KEY NEW_KEY".into());
|
||||
}
|
||||
let old_key = &args[0];
|
||||
let new_key = &args[1];
|
||||
let mut store = store::Store::load()?;
|
||||
let old_resolved = store.resolve_key(old_key)?;
|
||||
store.rename_node(&old_resolved, new_key)?;
|
||||
store.save()?;
|
||||
println!("Renamed '{}' → '{}'", old_resolved, new_key);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Migration: populate created_at for all nodes with missing or invalid values.
|
||||
/// Journal nodes: parse timestamp from key. All others: fall back to `timestamp` field.
|
||||
fn cmd_journal_ts_migrate() -> Result<(), String> {
|
||||
use chrono::{NaiveDateTime, TimeZone, Local};
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
|
||||
|
||||
// Valid unix epoch range: 2001-01-01 to 2099-12-31
|
||||
let valid_range = 978_307_200i64..=4_102_444_800i64;
|
||||
|
||||
let to_update: Vec<_> = store.nodes.values()
|
||||
.filter(|n| !valid_range.contains(&n.created_at))
|
||||
.map(|n| n.key.clone())
|
||||
.collect();
|
||||
|
||||
let mut updated = 0usize;
|
||||
|
||||
for key in &to_update {
|
||||
// Try parsing timestamp from journal key
|
||||
if let Some(caps) = re.captures(key) {
|
||||
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
|
||||
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
|
||||
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
|
||||
if let Some(node) = store.nodes.get_mut(key) {
|
||||
node.created_at = dt.timestamp();
|
||||
node.version += 1;
|
||||
}
|
||||
updated += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Fall back to the node's timestamp field (last-modified, but better than 0)
|
||||
if let Some(node) = store.nodes.get_mut(key) {
|
||||
node.created_at = node.timestamp as i64;
|
||||
node.version += 1;
|
||||
updated += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Persist all updated nodes
|
||||
let nodes_to_write: Vec<_> = to_update.iter()
|
||||
.filter_map(|k| store.nodes.get(k))
|
||||
.filter(|n| valid_range.contains(&n.created_at))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if !nodes_to_write.is_empty() {
|
||||
store.append_nodes(&nodes_to_write)?;
|
||||
store.save()?;
|
||||
}
|
||||
|
||||
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_load_context() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let now = store::now_epoch();
|
||||
let seven_days = 7.0 * 24.0 * 3600.0;
|
||||
let seven_days: i64 = 7 * 24 * 3600;
|
||||
|
||||
println!("=== FULL MEMORY LOAD (session start) ===");
|
||||
println!("These are your memories, loaded from the capnp store.");
|
||||
|
|
@ -1273,23 +1419,32 @@ fn cmd_load_context() -> Result<(), String> {
|
|||
}
|
||||
}
|
||||
|
||||
// Recent journal entries (last 7 days)
|
||||
// Parse date from key: journal.md#j-2026-02-21-17-45-...
|
||||
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
|
||||
// Recent journal entries (last 7 days).
|
||||
// Use created_at if set (rename-safe); fall back to key parsing.
|
||||
let cutoff_secs = now - seven_days;
|
||||
let cutoff_date = store::format_date(cutoff_secs);
|
||||
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
|
||||
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
|
||||
|
||||
let journal_ts = |n: &store::Node| -> i64 {
|
||||
if n.created_at > 0 { return n.created_at; }
|
||||
// Legacy: parse date from key to approximate epoch
|
||||
if let Some(caps) = key_date_re.captures(&n.key) {
|
||||
use chrono::{NaiveDate, TimeZone, Local};
|
||||
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
|
||||
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
|
||||
return dt.timestamp();
|
||||
}
|
||||
}
|
||||
}
|
||||
n.timestamp
|
||||
};
|
||||
|
||||
let mut journal_nodes: Vec<_> = store.nodes.values()
|
||||
.filter(|n| {
|
||||
if !n.key.starts_with("journal.md#j-") { return false; }
|
||||
if let Some(caps) = date_re.captures(&n.key) {
|
||||
return &caps[1] >= cutoff_date.as_str();
|
||||
}
|
||||
false
|
||||
n.node_type == store::NodeType::EpisodicSession
|
||||
&& journal_ts(n) >= cutoff_secs
|
||||
})
|
||||
.collect();
|
||||
journal_nodes.sort_by(|a, b| a.key.cmp(&b.key));
|
||||
journal_nodes.sort_by_key(|n| journal_ts(n));
|
||||
|
||||
if !journal_nodes.is_empty() {
|
||||
// Show most recent entries (last N by key order = chronological)
|
||||
|
|
@ -1300,7 +1455,7 @@ fn cmd_load_context() -> Result<(), String> {
|
|||
println!("--- recent journal entries (last {}/{}) ---",
|
||||
journal_nodes.len().min(max_journal), journal_nodes.len());
|
||||
for node in journal_nodes.iter().skip(skip) {
|
||||
println!("## {}", node.key.strip_prefix("journal.md#").unwrap_or(&node.key));
|
||||
println!("## {}", node.key);
|
||||
println!("{}", node.content);
|
||||
println!();
|
||||
}
|
||||
|
|
@ -1486,44 +1641,53 @@ fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
|
|||
|
||||
let store = store::Store::load()?;
|
||||
|
||||
// Collect journal nodes, sorted by date extracted from content or key
|
||||
// Collect journal nodes (EpisodicSession), sorted by created_at.
|
||||
// Legacy nodes (created_at == 0) fall back to key/content parsing.
|
||||
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
||||
let key_date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
||||
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
||||
|
||||
let normalize_date = |s: &str| -> String {
|
||||
// Normalize to YYYY-MM-DDTHH:MM for consistent sorting
|
||||
let s = s.replace('t', "T");
|
||||
// Key dates use dashes everywhere: 2026-02-28-23-11
|
||||
// Content dates use dashes and colons: 2026-02-28T23:11
|
||||
// Normalize: first 10 chars keep dashes, rest convert dashes to colons
|
||||
if s.len() >= 16 {
|
||||
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let extract_sort_key = |node: &store::Node| -> String {
|
||||
// Try key first (journal.md#j-2026-02-28t23-11-...)
|
||||
|
||||
// Returns (sort_key, display_string) for a journal node.
|
||||
// Prefer created_at (stable, rename-safe); fall back to key/content.
|
||||
let extract_sort = |node: &store::Node| -> (i64, String) {
|
||||
if node.created_at > 0 {
|
||||
return (node.created_at, store::format_datetime(node.created_at));
|
||||
}
|
||||
// Legacy: parse from key or content
|
||||
if let Some(caps) = key_date_re.captures(&node.key) {
|
||||
return normalize_date(&caps[1]);
|
||||
return (0, normalize_date(&caps[1]));
|
||||
}
|
||||
// Try content header (## 2026-02-28T23:11)
|
||||
if let Some(caps) = date_re.captures(&node.content) {
|
||||
return normalize_date(&caps[1]);
|
||||
return (0, normalize_date(&caps[1]));
|
||||
}
|
||||
// Fallback: use node timestamp
|
||||
format!("{:.0}", node.timestamp)
|
||||
(node.timestamp, store::format_datetime(node.timestamp))
|
||||
};
|
||||
|
||||
let mut journal: Vec<_> = store.nodes.values()
|
||||
.filter(|node| node.key.starts_with("journal.md#j-"))
|
||||
.filter(|node| node.node_type == store::NodeType::EpisodicSession)
|
||||
.collect();
|
||||
journal.sort_by_key(|n| extract_sort_key(n));
|
||||
journal.sort_by(|a, b| {
|
||||
let (at, as_) = extract_sort(a);
|
||||
let (bt, bs) = extract_sort(b);
|
||||
if at > 0 && bt > 0 {
|
||||
at.cmp(&bt)
|
||||
} else {
|
||||
as_.cmp(&bs)
|
||||
}
|
||||
});
|
||||
|
||||
// Show last N — each entry: [timestamp] ## Title
|
||||
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
||||
for node in journal.iter().skip(skip) {
|
||||
let ts = extract_sort_key(node);
|
||||
let (_, ts) = extract_sort(node);
|
||||
// Find a meaningful title: first ## header, or first non-date non-empty line
|
||||
let mut title = String::new();
|
||||
for line in node.content.lines() {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue