split capnp_store.rs into src/store/ module hierarchy
capnp_store.rs (1772 lines) → four focused modules: store/types.rs — types, macros, constants, path helpers store/parse.rs — markdown parsing (MemoryUnit, parse_units) store/view.rs — StoreView trait, MmapView, AnyView store/mod.rs — Store impl methods, re-exports new_node/new_relation become free functions in types.rs. All callers updated: capnp_store:: → store::
This commit is contained in:
parent
e34c0ccf4c
commit
635da6d3e2
11 changed files with 980 additions and 978 deletions
|
|
@ -8,7 +8,7 @@
|
|||
// 4. Writes the digest to the store + episodic dir
|
||||
// 5. Extracts links and saves agent results
|
||||
|
||||
use crate::capnp_store::{self, Store};
|
||||
use crate::store::{self, Store, new_node, new_relation};
|
||||
use crate::neuro;
|
||||
|
||||
use regex::Regex;
|
||||
|
|
@ -19,7 +19,7 @@ use std::path::{Path, PathBuf};
|
|||
use std::process::Command;
|
||||
|
||||
fn memory_dir() -> PathBuf {
|
||||
capnp_store::memory_dir()
|
||||
store::memory_dir()
|
||||
}
|
||||
|
||||
fn episodic_dir() -> PathBuf {
|
||||
|
|
@ -261,7 +261,7 @@ fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
|
|||
let mut dates = Vec::new();
|
||||
for i in 0..7 {
|
||||
let day_epoch = monday_epoch + (i * 86400);
|
||||
let (dy, dm, dd, _, _, _) = capnp_store::epoch_to_local(day_epoch as f64);
|
||||
let (dy, dm, dd, _, _, _) = store::epoch_to_local(day_epoch as f64);
|
||||
dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd));
|
||||
}
|
||||
|
||||
|
|
@ -439,7 +439,7 @@ fn weeks_in_month(year: i32, month: u32) -> Vec<String> {
|
|||
let mut d = 1u32;
|
||||
loop {
|
||||
let epoch = date_to_epoch(year, month, d);
|
||||
let (_, _, _, _, _, _) = capnp_store::epoch_to_local(epoch as f64);
|
||||
let (_, _, _, _, _, _) = store::epoch_to_local(epoch as f64);
|
||||
// Check if we're still in the target month
|
||||
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
|
||||
let secs = epoch as libc::time_t;
|
||||
|
|
@ -551,8 +551,8 @@ Use ONLY keys from the semantic memory list below.
|
|||
|
||||
pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> {
|
||||
let (year, month) = if month_arg.is_empty() {
|
||||
let now = capnp_store::now_epoch();
|
||||
let (y, m, _, _, _, _) = capnp_store::epoch_to_local(now);
|
||||
let now = store::now_epoch();
|
||||
let (y, m, _, _, _, _) = store::epoch_to_local(now);
|
||||
(y, m)
|
||||
} else {
|
||||
let parts: Vec<&str> = month_arg.split('-').collect();
|
||||
|
|
@ -617,7 +617,7 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String
|
|||
/// (needs weeklies). Skips today (incomplete day). Skips already-existing
|
||||
/// digests.
|
||||
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
|
||||
let today = capnp_store::today();
|
||||
let today = store::today();
|
||||
let epi = episodic_dir();
|
||||
|
||||
// --- Phase 1: find dates with journal entries but no daily digest ---
|
||||
|
|
@ -707,7 +707,7 @@ pub fn digest_auto(store: &mut Store) -> Result<(), String> {
|
|||
// A month is "ready" if the month is before the current month and at
|
||||
// least one weekly digest exists for it.
|
||||
|
||||
let (cur_y, cur_m, _, _, _, _) = capnp_store::epoch_to_local(capnp_store::now_epoch());
|
||||
let (cur_y, cur_m, _, _, _, _) = store::epoch_to_local(store::now_epoch());
|
||||
let mut months_seen: std::collections::BTreeSet<(i32, u32)> = std::collections::BTreeSet::new();
|
||||
|
||||
for date in &daily_dates_done {
|
||||
|
|
@ -782,7 +782,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
|
|||
let mut log = LogWriter::new(&log_path)?;
|
||||
|
||||
log.write("=== CONSOLIDATE FULL ===")?;
|
||||
log.write(&format!("Started: {}", capnp_store::format_datetime(capnp_store::now_epoch())))?;
|
||||
log.write(&format!("Started: {}", store::format_datetime(store::now_epoch())))?;
|
||||
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
|
||||
log.write("")?;
|
||||
|
||||
|
|
@ -890,7 +890,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
|
|||
};
|
||||
|
||||
// Save report
|
||||
let ts = capnp_store::format_datetime(capnp_store::now_epoch())
|
||||
let ts = store::format_datetime(store::now_epoch())
|
||||
.replace([':', '-', 'T'], "");
|
||||
let report_name = format!("consolidation-{}-{}.md", agent_type, ts);
|
||||
let report_path = agent_results_dir().join(&report_name);
|
||||
|
|
@ -973,7 +973,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
|
|||
// --- Step 6: Summary ---
|
||||
let elapsed = start.elapsed();
|
||||
log.write("\n--- Summary ---")?;
|
||||
log.write(&format!("Finished: {}", capnp_store::format_datetime(capnp_store::now_epoch())))?;
|
||||
log.write(&format!("Finished: {}", store::format_datetime(store::now_epoch())))?;
|
||||
log.write(&format!("Duration: {:.0}s", elapsed.as_secs_f64()))?;
|
||||
*store = Store::load()?;
|
||||
log.write(&format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()))?;
|
||||
|
|
@ -1238,9 +1238,9 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
|
|||
None => { skipped += 1; continue; }
|
||||
};
|
||||
|
||||
let rel = Store::new_relation(
|
||||
let rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Link,
|
||||
store::RelationType::Link,
|
||||
0.5,
|
||||
&source, &target,
|
||||
);
|
||||
|
|
@ -1512,9 +1512,9 @@ pub fn journal_enrich(
|
|||
None => continue,
|
||||
};
|
||||
|
||||
let rel = Store::new_relation(
|
||||
let rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Link,
|
||||
store::RelationType::Link,
|
||||
0.5,
|
||||
&source_key, &resolved,
|
||||
);
|
||||
|
|
@ -1525,7 +1525,7 @@ pub fn journal_enrich(
|
|||
}
|
||||
|
||||
// Save result to agent-results
|
||||
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch())
|
||||
let timestamp = store::format_datetime(store::now_epoch())
|
||||
.replace([':', '-'], "");
|
||||
let result_file = agent_results_dir()
|
||||
.join(format!("{}.json", timestamp));
|
||||
|
|
@ -1658,7 +1658,7 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
|
|||
println!(" {} actions extracted", actions.len());
|
||||
|
||||
// Save actions
|
||||
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch())
|
||||
let timestamp = store::format_datetime(store::now_epoch())
|
||||
.replace([':', '-'], "");
|
||||
let actions_path = agent_results_dir()
|
||||
.join(format!("consolidation-actions-{}.json", timestamp));
|
||||
|
|
@ -1747,9 +1747,9 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Optio
|
|||
let source_uuid = match store.nodes.get(&source) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
|
||||
let target_uuid = match store.nodes.get(&target) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
|
||||
|
||||
let rel = Store::new_relation(
|
||||
let rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Auto,
|
||||
store::RelationType::Auto,
|
||||
0.5,
|
||||
&source, &target,
|
||||
);
|
||||
|
|
@ -2110,9 +2110,9 @@ pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String>
|
|||
|
||||
// Create new
|
||||
if target_uuid != [0u8; 16] {
|
||||
let new_rel = Store::new_relation(
|
||||
let new_rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Auto,
|
||||
store::RelationType::Auto,
|
||||
old_strength,
|
||||
&source_key, new_target,
|
||||
);
|
||||
|
|
@ -2303,9 +2303,9 @@ pub fn experience_mine(
|
|||
}
|
||||
|
||||
// Write to store
|
||||
let mut node = Store::new_node(&key, &full_content);
|
||||
node.node_type = capnp_store::NodeType::EpisodicSession;
|
||||
node.category = capnp_store::Category::Observation;
|
||||
let mut node = new_node(&key, &full_content);
|
||||
node.node_type = store::NodeType::EpisodicSession;
|
||||
node.category = store::Category::Observation;
|
||||
let _ = store.upsert_node(node);
|
||||
count += 1;
|
||||
|
||||
|
|
@ -2315,8 +2315,8 @@ pub fn experience_mine(
|
|||
|
||||
// Record this transcript as mined (even if count == 0, to prevent re-runs)
|
||||
let dedup_content = format!("Mined {} ({} entries)", jsonl_path, count);
|
||||
let mut dedup_node = Store::new_node(&dedup_key, &dedup_content);
|
||||
dedup_node.category = capnp_store::Category::Task;
|
||||
let mut dedup_node = new_node(&dedup_key, &dedup_content);
|
||||
dedup_node.category = store::Category::Task;
|
||||
let _ = store.upsert_node(dedup_node);
|
||||
|
||||
if count > 0 {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue