types: unify all epoch timestamps to i64

All epoch timestamp fields (timestamp, last_replayed, created_at on
nodes; timestamp on relations) are now i64. Previously a mix of f64
and i64 which caused type seams and required unnecessary casts.

- Kill now_epoch() -> f64 and now_epoch_i64(), replace with single
  now_epoch() -> i64
- All formatting functions take i64
- new_node() sets created_at automatically
- journal-ts-migrate handles all nodes, with valid_range check to
  detect garbage from f64->i64 bit reinterpretation
- capnp schema: Float64 -> Int64 for all timestamp fields
This commit is contained in:
ProofOfConcept 2026-03-05 10:23:57 -05:00
parent b4bbafdf1c
commit 4747004b36
4 changed files with 232 additions and 56 deletions

View file

@ -10,7 +10,7 @@
struct ContentNode {
uuid @0 :Data; # 16 bytes, random
version @1 :UInt32; # monotonic per UUID, latest wins
timestamp @2 :Float64; # unix epoch
timestamp @2 :Int64; # unix epoch seconds
nodeType @3 :NodeType;
provenance @4 :Provenance;
key @5 :Text; # "identity.md#boundaries" human-readable
@ -29,11 +29,15 @@ struct ContentNode {
stateTag @16 :Text; # cognitive state (warm/open, bright/alert, etc.)
# Spaced repetition
lastReplayed @17 :Float64; # unix epoch
lastReplayed @17 :Int64; # unix epoch seconds
spacedRepetitionInterval @18 :UInt32; # days: 1, 3, 7, 14, 30
# Section ordering within a file
position @19 :UInt32; # 0 = file-level, 1+ = section index
# Stable creation timestamp (unix epoch seconds). Set once when the
# node is first created; never changes on rename or content update.
createdAt @20 :Int64;
}
enum NodeType {
@ -62,7 +66,7 @@ enum Category {
struct Relation {
uuid @0 :Data; # 16 bytes, random
version @1 :UInt32;
timestamp @2 :Float64; # unix epoch
timestamp @2 :Int64; # unix epoch seconds
source @3 :Data; # content node UUID
target @4 :Data; # content node UUID
relType @5 :RelationType;

View file

@ -114,10 +114,12 @@ fn main() {
"spectral-neighbors" => cmd_spectral_neighbors(&args[2..]),
"spectral-positions" => cmd_spectral_positions(&args[2..]),
"spectral-suggest" => cmd_spectral_suggest(&args[2..]),
"list-keys" => cmd_list_keys(),
"list-keys" => cmd_list_keys(&args[2..]),
"list-edges" => cmd_list_edges(),
"dump-json" => cmd_dump_json(),
"node-delete" => cmd_node_delete(&args[2..]),
"node-rename" => cmd_node_rename(&args[2..]),
"journal-ts-migrate" => cmd_journal_ts_migrate(),
"load-context" => cmd_load_context(),
"render" => cmd_render(&args[2..]),
"write" => cmd_write(&args[2..]),
@ -145,7 +147,7 @@ fn usage() {
eprintln!("poc-memory v0.4.0 — graph-structured memory store
Commands:
search QUERY [--expand] Search memory (AND logic across terms)
search QUERY [--expand] [--category CAT] Search memory (AND logic)
init Scan markdown files, index all memory units
migrate Migrate from old weights.json system
health Report graph metrics (CC, communities, small-world)
@ -192,10 +194,12 @@ Commands:
spectral-neighbors KEY [N] Find N spectrally nearest nodes (default N=15)
spectral-positions [N] Show N nodes ranked by outlier/bridge score (default 30)
spectral-suggest [N] Find N spectrally close but unlinked pairs (default 20)
list-keys List all node keys (one per line)
list-keys [PATTERN] List all node keys (one per line, optional glob)
list-edges List all edges (tsv: source target strength type)
dump-json Dump entire store as JSON
node-delete KEY Soft-delete a node (appends deleted version to log)
node-rename OLD NEW Rename a node key; updates edge debug strings atomically
journal-ts-migrate Populate created_at for nodes missing it
load-context Output session-start context from the store
render KEY Output a node's content to stdout
write KEY Upsert node content from stdin
@ -213,19 +217,62 @@ Commands:
fn cmd_search(args: &[String]) -> Result<(), String> {
use store::StoreView;
if args.is_empty() {
return Err("Usage: poc-memory search QUERY [QUERY...] [--expand]".into());
if args.is_empty() || args.iter().any(|a| a == "--help" || a == "-h") {
println!("Usage: poc-memory search QUERY [QUERY...] [OPTIONS]
Search memory using spreading activation (AND logic across terms).
Options:
--expand Show 15 results instead of 5, plus spectral neighbors
--category CAT Filter results to category: core, tech, gen, obs, task
--help, -h Show this help
Examples:
poc-memory search irc connection
poc-memory search bcachefs transaction --expand
poc-memory search rust --category tech");
return Ok(());
}
let expand = args.iter().any(|a| a == "--expand");
let category_filter: Option<String> = {
let mut cat = None;
let mut iter = args.iter();
while let Some(a) = iter.next() {
if a == "--category" {
cat = iter.next().cloned();
break;
}
}
cat
};
let query: String = args.iter()
.filter(|a| *a != "--expand")
.cloned()
.filter(|a| *a != "--expand" && *a != "--category")
.scan(false, |skip_next, a| {
if *skip_next { *skip_next = false; return Some(None); }
if a == "--category" { *skip_next = true; return Some(None); }
Some(Some(a.as_str()))
})
.flatten()
.collect::<Vec<_>>()
.join(" ");
let view = store::AnyView::load()?;
let results = search::search(&query, &view);
let mut results = search::search(&query, &view);
// Filter by category if requested
if let Some(ref cat_str) = category_filter {
let cat = store::Category::from_str(cat_str)
.ok_or_else(|| format!("Unknown category '{}' (use: core, tech, gen, obs, task)", cat_str))?;
let store = store::Store::load()?;
results.retain(|r| {
store.nodes.get(&r.key)
.map(|n| n.category.label() == cat.label())
.unwrap_or(false)
});
}
if results.is_empty() {
eprintln!("No results for '{}'", query);
@ -1194,10 +1241,38 @@ fn cmd_spectral_suggest(args: &[String]) -> Result<(), String> {
Ok(())
}
fn cmd_list_keys() -> Result<(), String> {
fn cmd_list_keys(args: &[String]) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let pattern = args.first().map(|s| s.as_str());
if let Some(pat) = pattern {
// Simple glob: only support leading/trailing * and *substring*
let pat_lower = pat.to_lowercase();
let (prefix, suffix, middle) = if pat_lower.starts_with('*') && pat_lower.ends_with('*') {
(None, None, Some(pat_lower.trim_matches('*').to_string()))
} else if pat_lower.starts_with('*') {
(None, Some(pat_lower.trim_start_matches('*').to_string()), None)
} else if pat_lower.ends_with('*') {
(Some(pat_lower.trim_end_matches('*').to_string()), None, None)
} else {
(None, None, Some(pat_lower.clone()))
};
let mut keys: Vec<_> = store.nodes.keys()
.filter(|k| {
let kl = k.to_lowercase();
if let Some(ref m) = middle { kl.contains(m.as_str()) }
else if let Some(ref p) = prefix { kl.starts_with(p.as_str()) }
else if let Some(ref s) = suffix { kl.ends_with(s.as_str()) }
else { true }
})
.cloned()
.collect();
keys.sort();
for k in keys { println!("{}", k); }
Ok(())
} else {
query::run_query(&store, &g, "* | sort key asc")
}
}
fn cmd_list_edges() -> Result<(), String> {
@ -1230,10 +1305,81 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> {
Ok(())
}
fn cmd_node_rename(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory node-rename OLD_KEY NEW_KEY".into());
}
let old_key = &args[0];
let new_key = &args[1];
let mut store = store::Store::load()?;
let old_resolved = store.resolve_key(old_key)?;
store.rename_node(&old_resolved, new_key)?;
store.save()?;
println!("Renamed '{}' → '{}'", old_resolved, new_key);
Ok(())
}
/// Migration: populate created_at for all nodes with missing or invalid values.
/// Journal nodes: parse timestamp from key. All others: fall back to `timestamp` field.
fn cmd_journal_ts_migrate() -> Result<(), String> {
use chrono::{NaiveDateTime, TimeZone, Local};
let mut store = store::Store::load()?;
let re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})[t-](\d{2})-(\d{2})").unwrap();
// Valid unix epoch range: 2001-01-01 to 2099-12-31
let valid_range = 978_307_200i64..=4_102_444_800i64;
let to_update: Vec<_> = store.nodes.values()
.filter(|n| !valid_range.contains(&n.created_at))
.map(|n| n.key.clone())
.collect();
let mut updated = 0usize;
for key in &to_update {
// Try parsing timestamp from journal key
if let Some(caps) = re.captures(key) {
let date_str = format!("{} {}:{}", &caps[1], &caps[2], &caps[3]);
if let Ok(ndt) = NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d %H:%M") {
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = dt.timestamp();
node.version += 1;
}
updated += 1;
continue;
}
}
}
// Fall back to the node's timestamp field (last-modified, but better than 0)
if let Some(node) = store.nodes.get_mut(key) {
node.created_at = node.timestamp as i64;
node.version += 1;
updated += 1;
}
}
// Persist all updated nodes
let nodes_to_write: Vec<_> = to_update.iter()
.filter_map(|k| store.nodes.get(k))
.filter(|n| valid_range.contains(&n.created_at))
.cloned()
.collect();
if !nodes_to_write.is_empty() {
store.append_nodes(&nodes_to_write)?;
store.save()?;
}
println!("journal-ts-migrate: updated {}/{}", updated, to_update.len());
Ok(())
}
fn cmd_load_context() -> Result<(), String> {
let store = store::Store::load()?;
let now = store::now_epoch();
let seven_days = 7.0 * 24.0 * 3600.0;
let seven_days: i64 = 7 * 24 * 3600;
println!("=== FULL MEMORY LOAD (session start) ===");
println!("These are your memories, loaded from the capnp store.");
@ -1273,23 +1419,32 @@ fn cmd_load_context() -> Result<(), String> {
}
}
// Recent journal entries (last 7 days)
// Parse date from key: journal.md#j-2026-02-21-17-45-...
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
// Recent journal entries (last 7 days).
// Use created_at if set (rename-safe); fall back to key parsing.
let cutoff_secs = now - seven_days;
let cutoff_date = store::format_date(cutoff_secs);
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2})").unwrap();
let journal_ts = |n: &store::Node| -> i64 {
if n.created_at > 0 { return n.created_at; }
// Legacy: parse date from key to approximate epoch
if let Some(caps) = key_date_re.captures(&n.key) {
use chrono::{NaiveDate, TimeZone, Local};
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") {
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
return dt.timestamp();
}
}
}
n.timestamp
};
let mut journal_nodes: Vec<_> = store.nodes.values()
.filter(|n| {
if !n.key.starts_with("journal.md#j-") { return false; }
if let Some(caps) = date_re.captures(&n.key) {
return &caps[1] >= cutoff_date.as_str();
}
false
n.node_type == store::NodeType::EpisodicSession
&& journal_ts(n) >= cutoff_secs
})
.collect();
journal_nodes.sort_by(|a, b| a.key.cmp(&b.key));
journal_nodes.sort_by_key(|n| journal_ts(n));
if !journal_nodes.is_empty() {
// Show most recent entries (last N by key order = chronological)
@ -1300,7 +1455,7 @@ fn cmd_load_context() -> Result<(), String> {
println!("--- recent journal entries (last {}/{}) ---",
journal_nodes.len().min(max_journal), journal_nodes.len());
for node in journal_nodes.iter().skip(skip) {
println!("## {}", node.key.strip_prefix("journal.md#").unwrap_or(&node.key));
println!("## {}", node.key);
println!("{}", node.content);
println!();
}
@ -1486,44 +1641,53 @@ fn cmd_journal_tail(args: &[String]) -> Result<(), String> {
let store = store::Store::load()?;
// Collect journal nodes, sorted by date extracted from content or key
// Collect journal nodes (EpisodicSession), sorted by created_at.
// Legacy nodes (created_at == 0) fall back to key/content parsing.
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
let key_date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
let normalize_date = |s: &str| -> String {
// Normalize to YYYY-MM-DDTHH:MM for consistent sorting
let s = s.replace('t', "T");
// Key dates use dashes everywhere: 2026-02-28-23-11
// Content dates use dashes and colons: 2026-02-28T23:11
// Normalize: first 10 chars keep dashes, rest convert dashes to colons
if s.len() >= 16 {
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
} else {
s
}
};
let extract_sort_key = |node: &store::Node| -> String {
// Try key first (journal.md#j-2026-02-28t23-11-...)
// Returns (sort_key, display_string) for a journal node.
// Prefer created_at (stable, rename-safe); fall back to key/content.
let extract_sort = |node: &store::Node| -> (i64, String) {
if node.created_at > 0 {
return (node.created_at, store::format_datetime(node.created_at));
}
// Legacy: parse from key or content
if let Some(caps) = key_date_re.captures(&node.key) {
return normalize_date(&caps[1]);
return (0, normalize_date(&caps[1]));
}
// Try content header (## 2026-02-28T23:11)
if let Some(caps) = date_re.captures(&node.content) {
return normalize_date(&caps[1]);
return (0, normalize_date(&caps[1]));
}
// Fallback: use node timestamp
format!("{:.0}", node.timestamp)
(node.timestamp, store::format_datetime(node.timestamp))
};
let mut journal: Vec<_> = store.nodes.values()
.filter(|node| node.key.starts_with("journal.md#j-"))
.filter(|node| node.node_type == store::NodeType::EpisodicSession)
.collect();
journal.sort_by_key(|n| extract_sort_key(n));
journal.sort_by(|a, b| {
let (at, as_) = extract_sort(a);
let (bt, bs) = extract_sort(b);
if at > 0 && bt > 0 {
at.cmp(&bt)
} else {
as_.cmp(&bs)
}
});
// Show last N — each entry: [timestamp] ## Title
let skip = if journal.len() > n { journal.len() - n } else { 0 };
for node in journal.iter().skip(skip) {
let ts = extract_sort_key(node);
let (_, ts) = extract_sort(node);
// Find a meaningful title: first ## header, or first non-date non-empty line
let mut title = String::new();
for line in node.content.lines() {

View file

@ -216,9 +216,10 @@ pub fn migrate() -> Result<(), String> {
uses: old_entry.uses,
wrongs: old_entry.wrongs,
state_tag,
last_replayed: 0.0,
last_replayed: 0,
spaced_repetition_interval: 1,
position: 0,
created_at: 0,
community_id: None,
clustering_coefficient: None,
degree: None,
@ -255,9 +256,10 @@ pub fn migrate() -> Result<(), String> {
uses: 0,
wrongs: 0,
state_tag: unit.state.clone().unwrap_or_default(),
last_replayed: 0.0,
last_replayed: 0,
spaced_repetition_interval: 1,
position: 0,
created_at: 0,
community_id: None,
clustering_coefficient: None,
degree: None,

View file

@ -120,18 +120,18 @@ impl StoreLock {
// Lock released automatically when _file is dropped (flock semantics)
}
pub fn now_epoch() -> f64 {
pub fn now_epoch() -> i64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs_f64()
.as_secs() as i64
}
/// Convert epoch seconds to broken-down local time components.
/// Returns (year, month, day, hour, minute, second).
pub fn epoch_to_local(epoch: f64) -> (i32, u32, u32, u32, u32, u32) {
pub fn epoch_to_local(epoch: i64) -> (i32, u32, u32, u32, u32, u32) {
use chrono::{Datelike, Local, TimeZone, Timelike};
let dt = Local.timestamp_opt(epoch as i64, 0).unwrap();
let dt = Local.timestamp_opt(epoch, 0).unwrap();
(
dt.year(),
dt.month(),
@ -143,19 +143,19 @@ pub fn epoch_to_local(epoch: f64) -> (i32, u32, u32, u32, u32, u32) {
}
/// Format epoch as "YYYY-MM-DD"
pub fn format_date(epoch: f64) -> String {
pub fn format_date(epoch: i64) -> String {
let (y, m, d, _, _, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}", y, m, d)
}
/// Format epoch as "YYYY-MM-DDTHH:MM"
pub fn format_datetime(epoch: f64) -> String {
pub fn format_datetime(epoch: i64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}T{:02}:{:02}", y, m, d, h, min)
}
/// Format epoch as "YYYY-MM-DD HH:MM"
pub fn format_datetime_space(epoch: f64) -> String {
pub fn format_datetime_space(epoch: i64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02} {:02}:{:02}", y, m, d, h, min)
}
@ -170,7 +170,7 @@ pub fn today() -> String {
pub struct Node {
pub uuid: [u8; 16],
pub version: u32,
pub timestamp: f64,
pub timestamp: i64,
pub node_type: NodeType,
pub provenance: Provenance,
pub key: String,
@ -185,13 +185,18 @@ pub struct Node {
pub uses: u32,
pub wrongs: u32,
pub state_tag: String,
pub last_replayed: f64,
pub last_replayed: i64,
pub spaced_repetition_interval: u32,
// Position within file (section index, for export ordering)
#[serde(default)]
pub position: u32,
// Stable creation timestamp (unix epoch seconds). Set once at creation;
// never updated on rename or content update. Zero for legacy nodes.
#[serde(default)]
pub created_at: i64,
// Derived fields (not in capnp, computed from graph)
#[serde(default)]
pub community_id: Option<u32>,
@ -206,7 +211,7 @@ pub struct Node {
pub struct Relation {
pub uuid: [u8; 16],
pub version: u32,
pub timestamp: f64,
pub timestamp: i64,
pub source: [u8; 16],
pub target: [u8; 16],
pub rel_type: RelationType,
@ -306,7 +311,7 @@ capnp_message!(Node,
uuid: [uuid],
prim: [version, timestamp, weight, emotion, deleted,
retrievals, uses, wrongs, last_replayed,
spaced_repetition_interval, position],
spaced_repetition_interval, position, created_at],
enm: [node_type: NodeType, provenance: Provenance, category: Category],
skip: [community_id, clustering_coefficient, degree],
);
@ -444,9 +449,10 @@ pub fn new_node(key: &str, content: &str) -> Node {
uses: 0,
wrongs: 0,
state_tag: String::new(),
last_replayed: 0.0,
last_replayed: 0,
spaced_repetition_interval: 1,
position: 0,
created_at: now_epoch(),
community_id: None,
clustering_coefficient: None,
degree: None,