diff --git a/Cargo.toml b/Cargo.toml index 6d4b065..59615a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ serde = { version = "1", features = ["derive"] } serde_json = "1" bincode = "1" regex = "1" +chrono = "0.4" libc = "0.2" faer = "0.24.0" rkyv = { version = "0.7", features = ["validation", "std"] } diff --git a/src/consolidate.rs b/src/consolidate.rs index bf854b2..94e0f9e 100644 --- a/src/consolidate.rs +++ b/src/consolidate.rs @@ -19,11 +19,7 @@ use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -fn agent_results_dir() -> PathBuf { - let dir = store::memory_dir().join("agent-results"); - fs::create_dir_all(&dir).ok(); - dir -} +use crate::util::memory_subdir; /// Simple append-only log writer for consolidate-full. struct LogWriter { @@ -49,7 +45,7 @@ impl LogWriter { /// Run the full autonomous consolidation pipeline with logging. pub fn consolidate_full(store: &mut Store) -> Result<(), String> { let start = std::time::Instant::now(); - let log_path = agent_results_dir().join("consolidate-full.log"); + let log_path = memory_subdir("agent-results")?.join("consolidate-full.log"); let mut log = LogWriter::new(&log_path)?; log.write("=== CONSOLIDATE FULL ===")?; @@ -162,7 +158,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> { let ts = store::format_datetime(store::now_epoch()) .replace([':', '-', 'T'], ""); let report_name = format!("consolidation-{}-{}.md", agent_type, ts); - let report_path = agent_results_dir().join(&report_name); + let report_path = memory_subdir("agent-results")?.join(&report_name); fs::write(&report_path, &response) .map_err(|e| format!("write report: {}", e))?; reports.push(report_path.clone()); @@ -233,7 +229,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> { println!("\n--- Applying digest links ---"); *store = Store::load()?; - let links = digest::parse_all_digest_links(); + let links = digest::parse_all_digest_links()?; let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links); store.save()?; log.write(&format!(" {} links applied, {} skipped, {} fallbacks", @@ -265,8 +261,8 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> { } /// Find the most recent set of consolidation reports. -fn find_consolidation_reports() -> Vec { - let dir = agent_results_dir(); +fn find_consolidation_reports() -> Result, String> { + let dir = memory_subdir("agent-results")?; let mut reports: Vec = fs::read_dir(&dir) .map(|entries| { entries.filter_map(|e| e.ok()) @@ -283,7 +279,7 @@ fn find_consolidation_reports() -> Vec { reports.sort(); reports.reverse(); - if reports.is_empty() { return reports; } + if reports.is_empty() { return Ok(reports); } // Group by timestamp (last segment of stem before .md) let latest_ts = reports[0].file_stem() @@ -299,7 +295,7 @@ fn find_consolidation_reports() -> Vec { .ends_with(latest_ts.as_str()) }); - reports + Ok(reports) } fn build_consolidation_prompt(reports: &[PathBuf]) -> Result { @@ -321,7 +317,7 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio let reports = if let Some(path) = report_file { vec![PathBuf::from(path)] } else { - find_consolidation_reports() + find_consolidation_reports()? }; if reports.is_empty() { @@ -350,7 +346,7 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio // Save actions let timestamp = store::format_datetime(store::now_epoch()) .replace([':', '-'], ""); - let actions_path = agent_results_dir() + let actions_path = memory_subdir("agent-results")? .join(format!("consolidation-actions-{}.json", timestamp)); fs::write(&actions_path, serde_json::to_string_pretty(&actions_value).unwrap()) .map_err(|e| format!("write {}: {}", actions_path.display(), e))?; diff --git a/src/digest.rs b/src/digest.rs index b7038c1..dbd0424 100644 --- a/src/digest.rs +++ b/src/digest.rs @@ -12,17 +12,7 @@ use regex::Regex; use std::fs; use std::path::{Path, PathBuf}; -fn episodic_dir() -> PathBuf { - let dir = store::memory_dir().join("episodic"); - fs::create_dir_all(&dir).ok(); - dir -} - -fn agent_results_dir() -> PathBuf { - let dir = store::memory_dir().join("agent-results"); - fs::create_dir_all(&dir).ok(); - dir -} +use crate::util::memory_subdir; /// Extract link proposals from digest text (backtick-arrow patterns) fn extract_links(text: &str) -> Vec<(String, String)> { @@ -97,7 +87,7 @@ pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> { let digest = call_sonnet(&prompt, 300)?; // Write to episodic dir - let output_path = episodic_dir().join(format!("daily-{}.md", date)); + let output_path = memory_subdir("episodic")?.join(format!("daily-{}.md", date)); fs::write(&output_path, &digest) .map_err(|e| format!("write {}: {}", output_path.display(), e))?; println!(" Written: {}", output_path.display()); @@ -118,7 +108,7 @@ pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> { "digest_path": output_path.to_string_lossy(), "links": links_json, }); - let links_path = agent_results_dir().join(format!("daily-{}-links.json", date)); + let links_path = memory_subdir("agent-results")?.join(format!("daily-{}-links.json", date)); let json = serde_json::to_string_pretty(&result) .map_err(|e| format!("serialize: {}", e))?; fs::write(&links_path, json) @@ -135,70 +125,36 @@ pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> { /// Get ISO week label and the 7 dates (Mon-Sun) for the week containing `date`. fn week_dates(date: &str) -> Result<(String, Vec), String> { - // Parse YYYY-MM-DD - let parts: Vec<&str> = date.split('-').collect(); - if parts.len() != 3 { - return Err(format!("bad date: {}", date)); - } - let y: i32 = parts[0].parse().map_err(|_| "bad year")?; - let m: u32 = parts[1].parse().map_err(|_| "bad month")?; - let d: u32 = parts[2].parse().map_err(|_| "bad day")?; + use chrono::{Datelike, Duration, NaiveDate}; - let (weekday, iso_year, iso_week) = iso_week_info(y, m, d)?; - - let week_label = format!("{}-W{:02}", iso_year, iso_week); + let nd = NaiveDate::parse_from_str(date, "%Y-%m-%d") + .map_err(|e| format!("bad date '{}': {}", date, e))?; + let iso = nd.iso_week(); + let week_label = format!("{}-W{:02}", iso.year(), iso.week()); // Find Monday of this week - let days_since_monday = (weekday + 6) % 7; // weekday: 0=Sun, adjust to Mon=0 - let monday_epoch = date_to_epoch(y, m, d) - (days_since_monday as i64) * 86400; + let days_since_monday = nd.weekday().num_days_from_monday() as i64; + let monday = nd - Duration::days(days_since_monday); - let mut dates = Vec::new(); - for i in 0..7 { - let day_epoch = monday_epoch + (i * 86400); - let (dy, dm, dd, _, _, _) = store::epoch_to_local(day_epoch as f64); - dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd)); - } + let dates = (0..7) + .map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string()) + .collect(); Ok((week_label, dates)) } -fn date_to_epoch(y: i32, m: u32, d: u32) -> i64 { - let mut tm: libc::tm = unsafe { std::mem::zeroed() }; - tm.tm_year = y - 1900; - tm.tm_mon = (m as i32) - 1; - tm.tm_mday = d as i32; - tm.tm_hour = 12; // noon to avoid DST edge cases - unsafe { libc::mktime(&mut tm) as i64 } -} - /// Returns (weekday 0=Sun, iso_year, iso_week) for a given date. fn iso_week_info(y: i32, m: u32, d: u32) -> Result<(u32, i32, u32), String> { - let mut tm: libc::tm = unsafe { std::mem::zeroed() }; - tm.tm_year = y - 1900; - tm.tm_mon = (m as i32) - 1; - tm.tm_mday = d as i32; - tm.tm_hour = 12; - let epoch = unsafe { libc::mktime(&mut tm) }; - if epoch == -1 { - return Err(format!("invalid date: {}-{}-{}", y, m, d)); - } - let wday = tm.tm_wday as u32; - - let mut buf = [0u8; 32]; - let fmt = std::ffi::CString::new("%G %V").unwrap(); - let len = unsafe { - libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm) - }; - let iso_str = std::str::from_utf8(&buf[..len]).unwrap_or("0 0"); - let iso_parts: Vec<&str> = iso_str.split_whitespace().collect(); - let iso_year: i32 = iso_parts.first().and_then(|s| s.parse().ok()).unwrap_or(y); - let iso_week: u32 = iso_parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(1); - - Ok((wday, iso_year, iso_week)) + use chrono::{Datelike, NaiveDate}; + let date = NaiveDate::from_ymd_opt(y, m, d) + .ok_or_else(|| format!("invalid date: {}-{}-{}", y, m, d))?; + let wday = date.weekday().num_days_from_sunday(); + let iso = date.iso_week(); + Ok((wday, iso.year(), iso.week())) } -fn load_digest_files(prefix: &str, labels: &[String]) -> Vec<(String, String)> { - let dir = episodic_dir(); +fn load_digest_files(prefix: &str, labels: &[String]) -> Result, String> { + let dir = memory_subdir("episodic")?; let mut digests = Vec::new(); for label in labels { let path = dir.join(format!("{}-{}.md", prefix, label)); @@ -206,7 +162,7 @@ fn load_digest_files(prefix: &str, labels: &[String]) -> Vec<(String, String)> { digests.push((label.clone(), content)); } } - digests + Ok(digests) } fn build_weekly_prompt(week_label: &str, digests: &[(String, String)], keys: &[String]) -> Result { @@ -237,7 +193,7 @@ pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> { let (week_label, dates) = week_dates(date)?; println!("Generating weekly digest for {}...", week_label); - let digests = load_digest_files("daily", &dates); + let digests = load_digest_files("daily", &dates)?; if digests.is_empty() { println!(" No daily digests found for {}", week_label); println!(" Run `poc-memory digest daily` first for relevant dates"); @@ -254,7 +210,7 @@ pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> { println!(" Calling Sonnet..."); let digest = call_sonnet(&prompt, 300)?; - let output_path = episodic_dir().join(format!("weekly-{}.md", week_label)); + let output_path = memory_subdir("episodic")?.join(format!("weekly-{}.md", week_label)); fs::write(&output_path, &digest) .map_err(|e| format!("write {}: {}", output_path.display(), e))?; println!(" Written: {}", output_path.display()); @@ -269,7 +225,7 @@ pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> { "digest_path": output_path.to_string_lossy(), "daily_digests": digests.iter().map(|(d, _)| d).collect::>(), }); - let links_path = agent_results_dir().join(format!("weekly-{}-links.json", week_label)); + let links_path = memory_subdir("agent-results")?.join(format!("weekly-{}-links.json", week_label)); fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap()) .map_err(|e| format!("write {}: {}", links_path.display(), e))?; @@ -280,28 +236,13 @@ pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> { // --- Monthly digest --- fn weeks_in_month(year: i32, month: u32) -> Vec { + use chrono::{Datelike, NaiveDate}; let mut weeks = std::collections::BTreeSet::new(); let mut d = 1u32; - loop { - let epoch = date_to_epoch(year, month, d); - let (_, _, _, _, _, _) = store::epoch_to_local(epoch as f64); - // Check if we're still in the target month - let mut tm: libc::tm = unsafe { std::mem::zeroed() }; - let secs = epoch as libc::time_t; - unsafe { libc::localtime_r(&secs, &mut tm) }; - if (tm.tm_mon + 1) as u32 != month || tm.tm_year + 1900 != year { - break; - } - - let mut buf = [0u8; 16]; - let fmt = std::ffi::CString::new("%G-W%V").unwrap(); - let len = unsafe { - libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm) - }; - let week = std::str::from_utf8(&buf[..len]).unwrap_or("").to_string(); - if !week.is_empty() { - weeks.insert(week); - } + while let Some(date) = NaiveDate::from_ymd_opt(year, month, d) { + if date.month() != month { break; } + let iso = date.iso_week(); + weeks.insert(format!("{}-W{:02}", iso.year(), iso.week())); d += 1; } weeks.into_iter().collect() @@ -352,7 +293,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String let week_labels = weeks_in_month(year, month); println!(" Weeks in month: {}", week_labels.join(", ")); - let digests = load_digest_files("weekly", &week_labels); + let digests = load_digest_files("weekly", &week_labels)?; if digests.is_empty() { println!(" No weekly digests found for {}", month_label); println!(" Run `poc-memory digest weekly` first for relevant weeks"); @@ -369,7 +310,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String println!(" Calling Sonnet..."); let digest = call_sonnet(&prompt, 600)?; - let output_path = episodic_dir().join(format!("monthly-{}.md", month_label)); + let output_path = memory_subdir("episodic")?.join(format!("monthly-{}.md", month_label)); fs::write(&output_path, &digest) .map_err(|e| format!("write {}: {}", output_path.display(), e))?; println!(" Written: {}", output_path.display()); @@ -384,7 +325,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String "digest_path": output_path.to_string_lossy(), "weekly_digests": digests.iter().map(|(w, _)| w).collect::>(), }); - let links_path = agent_results_dir().join(format!("monthly-{}-links.json", month_label)); + let links_path = memory_subdir("agent-results")?.join(format!("monthly-{}-links.json", month_label)); fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap()) .map_err(|e| format!("write {}: {}", links_path.display(), e))?; @@ -400,7 +341,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String /// digests. pub fn digest_auto(store: &mut Store) -> Result<(), String> { let today = store::today(); - let epi = episodic_dir(); + let epi = memory_subdir("episodic")?; // --- Phase 1: find dates with journal entries but no daily digest --- let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}").unwrap(); @@ -677,8 +618,8 @@ fn parse_digest_file_links(path: &Path) -> Vec { } /// Parse links from all digest files in the episodic dir. -pub fn parse_all_digest_links() -> Vec { - let dir = episodic_dir(); +pub fn parse_all_digest_links() -> Result, String> { + let dir = memory_subdir("episodic")?; let mut all_links = Vec::new(); for pattern in &["daily-*.md", "weekly-*.md", "monthly-*.md"] { @@ -707,7 +648,7 @@ pub fn parse_all_digest_links() -> Vec { let mut seen = std::collections::HashSet::new(); all_links.retain(|link| seen.insert((link.source.clone(), link.target.clone()))); - all_links + Ok(all_links) } /// Apply parsed digest links to the store. diff --git a/src/enrich.rs b/src/enrich.rs index 31d2ed8..02a50de 100644 --- a/src/enrich.rs +++ b/src/enrich.rs @@ -16,11 +16,7 @@ use std::collections::hash_map::DefaultHasher; use std::fs; use std::hash::{Hash, Hasher}; -fn agent_results_dir() -> std::path::PathBuf { - let dir = store::memory_dir().join("agent-results"); - fs::create_dir_all(&dir).ok(); - dir -} +use crate::util::memory_subdir; /// Extract user/assistant messages with line numbers from a JSONL transcript. /// (line_number, role, text, timestamp) @@ -194,7 +190,7 @@ pub fn journal_enrich( // Save result to agent-results let timestamp = store::format_datetime(store::now_epoch()) .replace([':', '-'], ""); - let result_file = agent_results_dir() + let result_file = memory_subdir("agent-results")? .join(format!("{}.json", timestamp)); let output = serde_json::json!({ "timestamp": timestamp, diff --git a/src/main.rs b/src/main.rs index f7b07fd..6531e1b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -14,6 +14,7 @@ // interference detection, schema assimilation, reconsolidation. mod store; +mod util; mod llm; mod digest; mod audit; @@ -739,8 +740,7 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> { // Move processed file to avoid re-processing if !process_all { - let done_dir = results_dir.join("done"); - std::fs::create_dir_all(&done_dir).ok(); + let done_dir = util::memory_subdir("agent-results/done")?; let dest = done_dir.join(path.file_name().unwrap()); std::fs::rename(&path, &dest).ok(); } @@ -793,7 +793,7 @@ fn cmd_digest(args: &[String]) -> Result<(), String> { fn cmd_digest_links(args: &[String]) -> Result<(), String> { let do_apply = args.iter().any(|a| a == "--apply"); - let links = digest::parse_all_digest_links(); + let links = digest::parse_all_digest_links()?; println!("Found {} unique links from digest files", links.len()); if !do_apply { diff --git a/src/store/types.rs b/src/store/types.rs index 78fa62a..d6bd2ae 100644 --- a/src/store/types.rs +++ b/src/store/types.rs @@ -130,17 +130,15 @@ pub fn now_epoch() -> f64 { /// Convert epoch seconds to broken-down local time components. /// Returns (year, month, day, hour, minute, second). pub fn epoch_to_local(epoch: f64) -> (i32, u32, u32, u32, u32, u32) { - // Use libc localtime_r for timezone-correct conversion - let secs = epoch as libc::time_t; - let mut tm: libc::tm = unsafe { std::mem::zeroed() }; - unsafe { libc::localtime_r(&secs, &mut tm) }; + use chrono::{Datelike, Local, TimeZone, Timelike}; + let dt = Local.timestamp_opt(epoch as i64, 0).unwrap(); ( - tm.tm_year + 1900, - (tm.tm_mon + 1) as u32, - tm.tm_mday as u32, - tm.tm_hour as u32, - tm.tm_min as u32, - tm.tm_sec as u32, + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second(), ) } diff --git a/src/util.rs b/src/util.rs new file mode 100644 index 0000000..550cc8b --- /dev/null +++ b/src/util.rs @@ -0,0 +1,14 @@ +// Shared utilities + +use crate::store; + +use std::fs; +use std::path::PathBuf; + +/// Ensure a subdirectory of the memory dir exists and return its path. +pub(crate) fn memory_subdir(name: &str) -> Result { + let dir = store::memory_dir().join(name); + fs::create_dir_all(&dir) + .map_err(|e| format!("create {}: {}", dir.display(), e))?; + Ok(dir) +}