refactor: eliminate date shell-outs, move logic to Store methods

- Replace all 5 `Command::new("date")` calls across 4 files with
  pure Rust time formatting via libc localtime_r
- Add format_date/format_datetime/format_datetime_space helpers to
  capnp_store
- Move import_file, find_journal_node, export_to_markdown, render_file,
  file_sections into Store methods where they belong
- Fix find_current_transcript to search all project dirs instead of
  hardcoding bcachefs-tools path
- Fix double-reference .clone() warnings in cmd_trace
- Fix unused variable warning in neuro.rs

main.rs: 1290 → 1137 lines, zero warnings.
This commit is contained in:
ProofOfConcept 2026-02-28 23:44:44 -05:00
parent d14710e477
commit 7ee6f9c651
4 changed files with 263 additions and 233 deletions

View file

@ -23,7 +23,7 @@ use std::fs;
use std::io::{BufReader, BufWriter, Write as IoWrite};
use std::os::unix::io::AsRawFd;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
// Data dir: ~/.claude/memory/
@ -62,17 +62,50 @@ impl StoreLock {
// Lock released automatically when _file is dropped (flock semantics)
}
fn now_epoch() -> f64 {
pub fn now_epoch() -> f64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs_f64()
}
fn today() -> String {
let out = Command::new("date").arg("+%Y-%m-%d")
.output().expect("date command failed");
String::from_utf8_lossy(&out.stdout).trim().to_string()
/// Convert epoch seconds to broken-down local time components.
/// Returns (year, month, day, hour, minute, second).
pub fn epoch_to_local(epoch: f64) -> (i32, u32, u32, u32, u32, u32) {
// Use libc localtime_r for timezone-correct conversion
let secs = epoch as libc::time_t;
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
unsafe { libc::localtime_r(&secs, &mut tm) };
(
tm.tm_year + 1900,
(tm.tm_mon + 1) as u32,
tm.tm_mday as u32,
tm.tm_hour as u32,
tm.tm_min as u32,
tm.tm_sec as u32,
)
}
/// Format epoch as "YYYY-MM-DD"
pub fn format_date(epoch: f64) -> String {
let (y, m, d, _, _, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}", y, m, d)
}
/// Format epoch as "YYYY-MM-DDTHH:MM"
pub fn format_datetime(epoch: f64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02}T{:02}:{:02}", y, m, d, h, min)
}
/// Format epoch as "YYYY-MM-DD HH:MM"
pub fn format_datetime_space(epoch: f64) -> String {
let (y, m, d, h, min, _) = epoch_to_local(epoch);
format!("{:04}-{:02}-{:02} {:02}:{:02}", y, m, d, h, min)
}
pub fn today() -> String {
format_date(now_epoch())
}
// In-memory node representation
@ -908,6 +941,165 @@ impl Store {
node.schema_fit = fits.get(key).copied();
}
}
/// Import a markdown file into the store, parsing it into nodes.
/// Returns (new_count, updated_count).
pub fn import_file(&mut self, path: &Path) -> Result<(usize, usize), String> {
let filename = path.file_name().unwrap().to_string_lossy().to_string();
let content = fs::read_to_string(path)
.map_err(|e| format!("read {}: {}", path.display(), e))?;
let units = parse_units(&filename, &content);
let mut new_nodes = Vec::new();
let mut updated_nodes = Vec::new();
let node_type = if filename.starts_with("daily-") {
NodeType::EpisodicDaily
} else if filename.starts_with("weekly-") {
NodeType::EpisodicWeekly
} else if filename == "journal.md" {
NodeType::EpisodicSession
} else {
NodeType::Semantic
};
for (pos, unit) in units.iter().enumerate() {
if let Some(existing) = self.nodes.get(&unit.key) {
let pos_changed = existing.position != pos as u32;
if existing.content != unit.content || pos_changed {
let mut node = existing.clone();
node.content = unit.content.clone();
node.position = pos as u32;
node.version += 1;
println!(" U {}", unit.key);
updated_nodes.push(node);
}
} else {
let mut node = Store::new_node(&unit.key, &unit.content);
node.node_type = node_type;
node.position = pos as u32;
println!(" + {}", unit.key);
new_nodes.push(node);
}
}
if !new_nodes.is_empty() {
self.append_nodes(&new_nodes)?;
for node in &new_nodes {
self.uuid_to_key.insert(node.uuid, node.key.clone());
self.nodes.insert(node.key.clone(), node.clone());
}
}
if !updated_nodes.is_empty() {
self.append_nodes(&updated_nodes)?;
for node in &updated_nodes {
self.nodes.insert(node.key.clone(), node.clone());
}
}
Ok((new_nodes.len(), updated_nodes.len()))
}
/// Gather all sections for a file key, sorted by position.
/// Returns None if no nodes found.
pub fn file_sections(&self, file_key: &str) -> Option<Vec<&Node>> {
let prefix = format!("{}#", file_key);
let mut sections: Vec<_> = self.nodes.values()
.filter(|n| n.key == file_key || n.key.starts_with(&prefix))
.collect();
if sections.is_empty() {
return None;
}
sections.sort_by_key(|n| n.position);
Some(sections)
}
/// Render a file key as plain content (no mem markers).
pub fn render_file(&self, file_key: &str) -> Option<String> {
let sections = self.file_sections(file_key)?;
let mut output = String::new();
for node in &sections {
output.push_str(&node.content);
if !node.content.ends_with('\n') {
output.push('\n');
}
output.push('\n');
}
Some(output.trim_end().to_string())
}
/// Render a file key (and all its section nodes) back to markdown
/// with reconstituted mem markers. Returns None if no nodes found.
pub fn export_to_markdown(&self, file_key: &str) -> Option<String> {
let sections = self.file_sections(file_key)?;
let mut output = String::new();
for node in &sections {
if node.key.contains('#') {
let section_id = node.key.split('#').last().unwrap_or("");
let links: Vec<_> = self.relations.iter()
.filter(|r| r.source_key == node.key && !r.deleted
&& r.rel_type != RelationType::Causal)
.map(|r| r.target_key.clone())
.collect();
let causes: Vec<_> = self.relations.iter()
.filter(|r| r.target_key == node.key && !r.deleted
&& r.rel_type == RelationType::Causal)
.map(|r| r.source_key.clone())
.collect();
let mut marker_parts = vec![format!("id={}", section_id)];
if !links.is_empty() {
marker_parts.push(format!("links={}", links.join(",")));
}
if !causes.is_empty() {
marker_parts.push(format!("causes={}", causes.join(",")));
}
output.push_str(&format!("<!-- mem: {} -->\n", marker_parts.join(" ")));
}
output.push_str(&node.content);
if !node.content.ends_with('\n') {
output.push('\n');
}
output.push('\n');
}
Some(output.trim_end().to_string())
}
/// Find the journal node that best matches the given entry text.
/// Used by apply-agent to link agent results back to source entries.
pub fn find_journal_node(&self, entry_text: &str) -> Option<String> {
if entry_text.is_empty() {
return None;
}
let words: Vec<&str> = entry_text.split_whitespace()
.filter(|w| w.len() > 5)
.take(5)
.collect();
let mut best_key = None;
let mut best_score = 0;
for (key, node) in &self.nodes {
if !key.starts_with("journal.md#") {
continue;
}
let content_lower = node.content.to_lowercase();
let score: usize = words.iter()
.filter(|w| content_lower.contains(&w.to_lowercase()))
.count();
if score > best_score {
best_score = score;
best_key = Some(key.clone());
}
}
best_key
}
}
// Markdown parsing — same as old system but returns structured units

View file

@ -603,13 +603,8 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
let cats = store.category_counts();
// Snapshot current metrics and log
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH).unwrap().as_secs_f64();
let date = {
let out = std::process::Command::new("date").arg("+%Y-%m-%d %H:%M")
.output().unwrap_or_else(|_| std::process::Command::new("echo").output().unwrap());
String::from_utf8_lossy(&out.stdout).trim().to_string()
};
let now = crate::capnp_store::now_epoch();
let date = crate::capnp_store::format_datetime_space(now);
let snap = MetricsSnapshot {
timestamp: now,
date: date.clone(),

View file

@ -27,6 +27,36 @@ pub mod memory_capnp {
use std::env;
use std::process;
/// Find the most recently modified .jsonl transcript in the Claude projects dir.
fn find_current_transcript() -> Option<String> {
let home = env::var("HOME").ok()?;
let projects = std::path::Path::new(&home).join(".claude/projects");
if !projects.exists() { return None; }
// Search all project dirs for the most recent .jsonl
let mut newest: Option<(std::time::SystemTime, std::path::PathBuf)> = None;
if let Ok(dirs) = std::fs::read_dir(&projects) {
for dir_entry in dirs.filter_map(|e| e.ok()) {
if !dir_entry.path().is_dir() { continue; }
if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
for f in files.filter_map(|e| e.ok()) {
let p = f.path();
if p.extension().map(|x| x == "jsonl").unwrap_or(false) {
if let Ok(meta) = p.metadata() {
if let Ok(mtime) = meta.modified() {
if newest.as_ref().map_or(true, |(t, _)| mtime > *t) {
newest = Some((mtime, p));
}
}
}
}
}
}
}
}
newest.map(|(_, p)| p.to_string_lossy().to_string())
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
@ -538,7 +568,7 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
}
};
let source_key = match find_journal_node(&store, entry_text) {
let source_key = match store.find_journal_node(entry_text) {
Some(k) => k,
None => {
println!(" SKIP {} (no matching journal node)", target);
@ -589,38 +619,6 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
Ok(())
}
/// Find the journal node that best matches the given entry text
fn find_journal_node(store: &capnp_store::Store, entry_text: &str) -> Option<String> {
if entry_text.is_empty() {
return None;
}
// Extract keywords from entry text
let words: Vec<&str> = entry_text.split_whitespace()
.filter(|w| w.len() > 5)
.take(5)
.collect();
// Find journal nodes whose content matches the most keywords
let mut best_key = None;
let mut best_score = 0;
for (key, node) in &store.nodes {
if !key.starts_with("journal.md#") {
continue;
}
let content_lower = node.content.to_lowercase();
let score: usize = words.iter()
.filter(|w| content_lower.contains(&w.to_lowercase()))
.count();
if score > best_score {
best_score = score;
best_key = Some(key.clone());
}
}
best_key
}
fn cmd_digest(args: &[String]) -> Result<(), String> {
if args.is_empty() {
@ -702,15 +700,16 @@ fn cmd_trace(args: &[String]) -> Result<(), String> {
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
capnp_store::NodeType::EpisodicSession =>
episodic_session.push((n.clone(), *strength, nnode)),
episodic_session.push(entry),
capnp_store::NodeType::EpisodicDaily =>
episodic_daily.push((n.clone(), *strength, nnode)),
episodic_daily.push(entry),
capnp_store::NodeType::EpisodicWeekly =>
episodic_weekly.push((n.clone(), *strength, nnode)),
episodic_weekly.push(entry),
capnp_store::NodeType::Semantic =>
semantic.push((n.clone(), *strength, nnode)),
semantic.push(entry),
}
}
}
@ -816,10 +815,7 @@ fn cmd_node_delete(args: &[String]) -> Result<(), String> {
fn cmd_load_context() -> Result<(), String> {
let store = capnp_store::Store::load()?;
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs_f64();
let now = capnp_store::now_epoch();
let seven_days = 7.0 * 24.0 * 3600.0;
println!("=== FULL MEMORY LOAD (session start) ===");
@ -853,18 +849,9 @@ fn cmd_load_context() -> Result<(), String> {
for (label, keys) in priority_groups {
for key in *keys {
// Gather file-level node + all section nodes, in key order
let prefix = format!("{}#", key);
let mut sections: Vec<_> = store.nodes.values()
.filter(|n| n.key == *key || n.key.starts_with(&prefix))
.collect();
if sections.is_empty() { continue; }
sections.sort_by(|a, b| a.position.cmp(&b.position));
println!("--- {} ({}) ---", key, label);
for node in &sections {
println!("{}", node.content);
println!();
if let Some(content) = store.render_file(key) {
println!("--- {} ({}) ---", key, label);
println!("{}\n", content);
}
}
}
@ -873,15 +860,7 @@ fn cmd_load_context() -> Result<(), String> {
// Parse date from key: journal.md#j-2026-02-21-17-45-...
// Cutoff = today minus 7 days as YYYY-MM-DD string for lexicographic compare
let cutoff_secs = now - seven_days;
let cutoff_date = {
// Convert epoch to YYYY-MM-DD via date command
let out = std::process::Command::new("date")
.args(["-d", &format!("@{}", cutoff_secs as u64), "+%Y-%m-%d"])
.output().ok()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
.unwrap_or_default();
out
};
let cutoff_date = capnp_store::format_date(cutoff_secs);
let date_re = regex::Regex::new(r"^journal\.md#j-(\d{4}-\d{2}-\d{2})").unwrap();
let mut journal_nodes: Vec<_> = store.nodes.values()
@ -979,21 +958,19 @@ fn cmd_import(args: &[String]) -> Result<(), String> {
for arg in args {
let path = std::path::PathBuf::from(arg);
if !path.exists() {
// Try relative to memory dir
let resolved = if path.exists() {
path
} else {
let mem_path = capnp_store::memory_dir_pub().join(arg);
if !mem_path.exists() {
eprintln!("File not found: {}", arg);
continue;
}
let (n, u) = import_file(&mut store, &mem_path)?;
total_new += n;
total_updated += u;
} else {
let (n, u) = import_file(&mut store, &path)?;
total_new += n;
total_updated += u;
}
mem_path
};
let (n, u) = store.import_file(&resolved)?;
total_new += n;
total_updated += u;
}
if total_new > 0 || total_updated > 0 {
@ -1003,62 +980,6 @@ fn cmd_import(args: &[String]) -> Result<(), String> {
Ok(())
}
fn import_file(store: &mut capnp_store::Store, path: &std::path::Path) -> Result<(usize, usize), String> {
let filename = path.file_name().unwrap().to_string_lossy().to_string();
let content = std::fs::read_to_string(path)
.map_err(|e| format!("read {}: {}", path.display(), e))?;
let units = capnp_store::parse_units(&filename, &content);
let mut new_nodes = Vec::new();
let mut updated_nodes = Vec::new();
let node_type = if filename.starts_with("daily-") {
capnp_store::NodeType::EpisodicDaily
} else if filename.starts_with("weekly-") {
capnp_store::NodeType::EpisodicWeekly
} else if filename == "journal.md" {
capnp_store::NodeType::EpisodicSession
} else {
capnp_store::NodeType::Semantic
};
for (pos, unit) in units.iter().enumerate() {
if let Some(existing) = store.nodes.get(&unit.key) {
let pos_changed = existing.position != pos as u32;
if existing.content != unit.content || pos_changed {
let mut node = existing.clone();
node.content = unit.content.clone();
node.position = pos as u32;
node.version += 1;
println!(" U {}", unit.key);
updated_nodes.push(node);
}
} else {
let mut node = capnp_store::Store::new_node(&unit.key, &unit.content);
node.node_type = node_type;
node.position = pos as u32;
println!(" + {}", unit.key);
new_nodes.push(node);
}
}
if !new_nodes.is_empty() {
store.append_nodes(&new_nodes)?;
for node in &new_nodes {
store.uuid_to_key.insert(node.uuid, node.key.clone());
store.nodes.insert(node.key.clone(), node.clone());
}
}
if !updated_nodes.is_empty() {
store.append_nodes(&updated_nodes)?;
for node in &updated_nodes {
store.nodes.insert(node.key.clone(), node.clone());
}
}
Ok((new_nodes.len(), updated_nodes.len()))
}
fn cmd_export(args: &[String]) -> Result<(), String> {
let store = capnp_store::Store::load()?;
@ -1087,65 +1008,16 @@ fn cmd_export(args: &[String]) -> Result<(), String> {
let mem_dir = capnp_store::memory_dir_pub();
for file_key in &targets {
// Gather file-level node + section nodes
let prefix = format!("{}#", file_key);
let mut sections: Vec<_> = store.nodes.values()
.filter(|n| n.key == *file_key || n.key.starts_with(&prefix))
.collect();
if sections.is_empty() {
eprintln!("No nodes for '{}'", file_key);
continue;
}
// Sort by position (preserves original file order)
sections.sort_by(|a, b| {
a.position.cmp(&b.position)
});
// Build output: file-level content first, then each section
// with its mem marker reconstituted
let mut output = String::new();
for node in &sections {
if node.key.contains('#') {
// Section node — emit mem marker + content
let section_id = node.key.split('#').last().unwrap_or("");
// Find edges FROM this node to build links= attribute
let links: Vec<_> = store.relations.iter()
.filter(|r| r.source_key == node.key && !r.deleted
&& r.rel_type != capnp_store::RelationType::Causal)
.map(|r| r.target_key.clone())
.collect();
let causes: Vec<_> = store.relations.iter()
.filter(|r| r.target_key == node.key && !r.deleted
&& r.rel_type == capnp_store::RelationType::Causal)
.map(|r| r.source_key.clone())
.collect();
let mut marker_parts = vec![format!("id={}", section_id)];
if !links.is_empty() {
marker_parts.push(format!("links={}", links.join(",")));
}
if !causes.is_empty() {
marker_parts.push(format!("causes={}", causes.join(",")));
}
output.push_str(&format!("<!-- mem: {} -->\n", marker_parts.join(" ")));
match store.export_to_markdown(file_key) {
Some(content) => {
let out_path = mem_dir.join(file_key);
std::fs::write(&out_path, &content)
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
let section_count = content.matches("<!-- mem:").count() + 1;
println!("Exported {} ({} sections)", file_key, section_count);
}
output.push_str(&node.content);
if !node.content.ends_with('\n') {
output.push('\n');
}
output.push('\n');
None => eprintln!("No nodes for '{}'", file_key),
}
// Determine output path
let out_path = mem_dir.join(file_key);
std::fs::write(&out_path, output.trim_end())
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
println!("Exported {} ({} sections)", file_key, sections.len());
}
Ok(())
@ -1158,12 +1030,7 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> {
let text = args.join(" ");
// Generate timestamp and slug
let timestamp = {
let out = std::process::Command::new("date")
.arg("+%Y-%m-%dT%H:%M")
.output().map_err(|e| format!("date: {}", e))?;
String::from_utf8_lossy(&out.stdout).trim().to_string()
};
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch());
// Slug: lowercase first ~6 words, hyphenated, truncated
let slug: String = text.split_whitespace()
@ -1180,27 +1047,8 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> {
// Build content with header
let content = format!("## {}\n\n{}", timestamp, text);
// Find source ref (current transcript)
let source_ref = {
let project_dir = format!(
"{}/.claude/projects/-home-kent-bcachefs-tools",
std::env::var("HOME").unwrap_or_default()
);
let dir = std::path::Path::new(&project_dir);
if dir.exists() {
let mut jsonls: Vec<_> = std::fs::read_dir(dir).ok()
.map(|rd| rd.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "jsonl").unwrap_or(false))
.collect())
.unwrap_or_default();
jsonls.sort_by_key(|e| std::cmp::Reverse(
e.metadata().ok().and_then(|m| m.modified().ok())
));
jsonls.first().map(|e| e.path().to_string_lossy().to_string())
} else {
None
}
};
// Find source ref (most recently modified .jsonl transcript)
let source_ref = find_current_transcript();
let mut store = capnp_store::Store::load()?;

View file

@ -503,7 +503,7 @@ pub fn consolidation_plan(store: &Store) -> ConsolidationPlan {
let episodic_count = store.nodes.iter()
.filter(|(k, _)| k.contains("journal") || k.contains("session"))
.count();
let semantic_count = store.nodes.len() - episodic_count;
let _semantic_count = store.nodes.len() - episodic_count;
let episodic_ratio = if store.nodes.is_empty() { 0.0 }
else { episodic_count as f32 / store.nodes.len() as f32 };
@ -686,13 +686,8 @@ pub fn daily_check(store: &Store) -> String {
}
// Log this snapshot too
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH).unwrap().as_secs_f64();
let date = {
let o = std::process::Command::new("date").arg("+%Y-%m-%d %H:%M")
.output().unwrap_or_else(|_| std::process::Command::new("echo").output().unwrap());
String::from_utf8_lossy(&o.stdout).trim().to_string()
};
let now = crate::capnp_store::now_epoch();
let date = crate::capnp_store::format_datetime_space(now);
graph::save_metrics_snapshot(&graph::MetricsSnapshot {
timestamp: now, date,
nodes: graph.nodes().len(),