split capnp_store.rs into src/store/ module hierarchy

capnp_store.rs (1772 lines) → four focused modules:
  store/types.rs  — types, macros, constants, path helpers
  store/parse.rs  — markdown parsing (MemoryUnit, parse_units)
  store/view.rs   — StoreView trait, MmapView, AnyView
  store/mod.rs    — Store impl methods, re-exports

new_node/new_relation become free functions in types.rs.
All callers updated: capnp_store:: → store::
This commit is contained in:
ProofOfConcept 2026-03-03 12:56:15 -05:00
parent e34c0ccf4c
commit 635da6d3e2
11 changed files with 980 additions and 978 deletions

View file

@ -11,9 +11,9 @@
//
// Old files are preserved as backup. Run once.
use crate::capnp_store::{
use crate::store::{
self, Store, Node, Category, NodeType, Provenance, RelationType,
parse_units,
parse_units, new_relation,
};
use serde::Deserialize;
@ -149,7 +149,7 @@ pub fn migrate() -> Result<(), String> {
old_store.entries.len(), old_store.retrieval_log.len());
// Scan markdown files to get content + edges
let mut units_by_key: HashMap<String, capnp_store::MemoryUnit> = HashMap::new();
let mut units_by_key: HashMap<String, store::MemoryUnit> = HashMap::new();
scan_markdown_dir(&memory_dir, &mut units_by_key)?;
eprintln!("Scanned {} markdown units", units_by_key.len());
@ -168,7 +168,7 @@ pub fn migrate() -> Result<(), String> {
// Migrate retrieval log
store.retrieval_log = old_store.retrieval_log.iter().map(|e| {
capnp_store::RetrievalEvent {
store::RetrievalEvent {
query: e.query.clone(),
timestamp: e.timestamp.clone(),
results: e.results.clone(),
@ -197,7 +197,7 @@ pub fn migrate() -> Result<(), String> {
let node = Node {
uuid,
version: 1,
timestamp: capnp_store::now_epoch(),
timestamp: store::now_epoch(),
node_type: if key.contains("journal") {
NodeType::EpisodicSession
} else {
@ -236,7 +236,7 @@ pub fn migrate() -> Result<(), String> {
let node = Node {
uuid,
version: 1,
timestamp: capnp_store::now_epoch(),
timestamp: store::now_epoch(),
node_type: if key.contains("journal") {
NodeType::EpisodicSession
} else {
@ -291,12 +291,12 @@ pub fn migrate() -> Result<(), String> {
};
// Avoid duplicate relations
let exists = all_relations.iter().any(|r: &capnp_store::Relation|
let exists = all_relations.iter().any(|r: &store::Relation|
(r.source == source_uuid && r.target == target_uuid) ||
(r.source == target_uuid && r.target == source_uuid));
if exists { continue; }
all_relations.push(Store::new_relation(
all_relations.push(new_relation(
source_uuid, target_uuid,
RelationType::Link, 1.0,
key, link,
@ -310,7 +310,7 @@ pub fn migrate() -> Result<(), String> {
None => continue,
};
all_relations.push(Store::new_relation(
all_relations.push(new_relation(
cause_uuid, source_uuid,
RelationType::Causal, 1.0,
cause, key,
@ -349,7 +349,7 @@ pub fn migrate() -> Result<(), String> {
fn scan_markdown_dir(
dir: &Path,
units: &mut HashMap<String, capnp_store::MemoryUnit>,
units: &mut HashMap<String, store::MemoryUnit>,
) -> Result<(), String> {
let entries = fs::read_dir(dir)
.map_err(|e| format!("read dir {}: {}", dir.display(), e))?;