diff --git a/poc-memory/src/audit.rs b/poc-memory/src/agents/audit.rs similarity index 99% rename from poc-memory/src/audit.rs rename to poc-memory/src/agents/audit.rs index f3e5999..f83ee72 100644 --- a/poc-memory/src/audit.rs +++ b/poc-memory/src/agents/audit.rs @@ -3,7 +3,7 @@ // Each batch of links gets reviewed by Sonnet, which returns per-link actions: // KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon. -use crate::llm::call_sonnet; +use super::llm::call_sonnet; use crate::store::{self, Store, new_relation}; use std::collections::HashSet; diff --git a/poc-memory/src/consolidate.rs b/poc-memory/src/agents/consolidate.rs similarity index 98% rename from poc-memory/src/consolidate.rs rename to poc-memory/src/agents/consolidate.rs index 2f9f7d7..20487a5 100644 --- a/poc-memory/src/consolidate.rs +++ b/poc-memory/src/agents/consolidate.rs @@ -10,8 +10,8 @@ // // apply_consolidation() processes consolidation reports independently. -use crate::digest; -use crate::llm::{call_sonnet, parse_json_response}; +use super::digest; +use super::llm::{call_sonnet, parse_json_response}; use crate::neuro; use crate::store::{self, Store, new_relation}; @@ -98,7 +98,7 @@ pub fn consolidate_full_with_progress( *store = Store::load()?; } - let prompt = match neuro::agent_prompt(store, agent_type, *count) { + let prompt = match super::prompts::agent_prompt(store, agent_type, *count) { Ok(p) => p, Err(e) => { let msg = format!(" ERROR building prompt: {}", e); @@ -266,7 +266,7 @@ fn build_consolidation_prompt(store: &Store, report_keys: &[String]) -> Result Result<(), TaskError> { ctx.log_line("mining facts"); let p = std::path::Path::new(&path); let progress = |msg: &str| { ctx.set_progress(msg); }; - let count = crate::fact_mine::mine_and_store(p, Some(&progress))?; + let count = super::fact_mine::mine_and_store(p, Some(&progress))?; ctx.log_line(format!("{count} facts stored")); Ok(()) }) @@ -125,7 +125,7 @@ fn job_consolidate(ctx: &ExecutionContext) -> Result<(), TaskError> { run_job(ctx, "consolidate", || { ctx.log_line("loading store"); let mut store = crate::store::Store::load()?; - crate::consolidate::consolidate_full_with_progress(&mut store, &|msg| { + super::consolidate::consolidate_full_with_progress(&mut store, &|msg| { ctx.log_line(msg); }) }) @@ -133,13 +133,13 @@ fn job_consolidate(ctx: &ExecutionContext) -> Result<(), TaskError> { fn job_knowledge_loop(ctx: &ExecutionContext) -> Result<(), TaskError> { run_job(ctx, "knowledge-loop", || { - let config = crate::knowledge::KnowledgeLoopConfig { + let config = super::knowledge::KnowledgeLoopConfig { max_cycles: 100, batch_size: 5, ..Default::default() }; ctx.log_line("running agents"); - let results = crate::knowledge::run_knowledge_loop(&config)?; + let results = super::knowledge::run_knowledge_loop(&config)?; ctx.log_line(format!("{} cycles, {} actions", results.len(), results.iter().map(|r| r.total_applied).sum::())); @@ -329,7 +329,7 @@ pub fn run_daemon() -> Result<(), String> { let stale = find_stale_sessions(); // Load mined transcript keys once for this tick - let mined = crate::enrich::mined_transcript_keys(); + let mined = super::enrich::mined_transcript_keys(); // Limit new tasks per tick — the resource pool gates execution, // but we don't need thousands of task objects in the registry. @@ -372,7 +372,7 @@ pub fn run_daemon() -> Result<(), String> { let path_str = session.to_string_lossy().to_string(); // Check for old-style whole-file mined key - let experience_done = crate::enrich::is_transcript_mined_with_keys(&mined, &path_str); + let experience_done = super::enrich::is_transcript_mined_with_keys(&mined, &path_str); if !experience_done { if is_file_open(&session) { @@ -384,11 +384,11 @@ pub fn run_daemon() -> Result<(), String> { let seg_count = if let Some(&cached) = seg_cache.get(&path_str) { cached } else { - let messages = match crate::enrich::extract_conversation(&path_str) { + let messages = match super::enrich::extract_conversation(&path_str) { Ok(m) => m, Err(_) => continue, }; - let count = crate::enrich::split_on_compaction(messages).len(); + let count = super::enrich::split_on_compaction(messages).len(); seg_cache.insert(path_str.clone(), count); count }; @@ -400,7 +400,7 @@ pub fn run_daemon() -> Result<(), String> { } } else { // Multi-segment — find unmined segments - let fname_key = crate::enrich::transcript_filename_key(&path_str); + let fname_key = super::enrich::transcript_filename_key(&path_str); let mut unmined = 0; for i in 0..seg_count { let seg_key = format!("{}.{}", fname_key, i); diff --git a/poc-memory/src/digest.rs b/poc-memory/src/agents/digest.rs similarity index 99% rename from poc-memory/src/digest.rs rename to poc-memory/src/agents/digest.rs index 5509a1f..9d6386b 100644 --- a/poc-memory/src/digest.rs +++ b/poc-memory/src/agents/digest.rs @@ -5,7 +5,7 @@ // summarize weeklies. All three share the same generate/auto-detect // pipeline, parameterized by DigestLevel. -use crate::llm::{call_sonnet, semantic_keys}; +use super::llm::{call_sonnet, semantic_keys}; use crate::store::{self, Store, new_relation}; use crate::neuro; @@ -209,7 +209,7 @@ fn generate_digest( .collect::>() .join(", "); - let prompt = neuro::load_prompt("digest", &[ + let prompt = super::prompts::load_prompt("digest", &[ ("{{LEVEL}}", level.title), ("{{PERIOD}}", level.period), ("{{INPUT_TITLE}}", level.input_title), diff --git a/poc-memory/src/enrich.rs b/poc-memory/src/agents/enrich.rs similarity index 98% rename from poc-memory/src/enrich.rs rename to poc-memory/src/agents/enrich.rs index 559aea6..cd88e52 100644 --- a/poc-memory/src/enrich.rs +++ b/poc-memory/src/agents/enrich.rs @@ -7,7 +7,7 @@ // Both extract conversation from JSONL transcripts, build prompts, call Sonnet, // and apply results to the store. -use crate::llm::{call_sonnet, parse_json_response, semantic_keys}; +use super::llm::{call_sonnet, parse_json_response, semantic_keys}; use crate::neuro; use crate::store::{self, Store, new_node, new_relation}; @@ -174,7 +174,7 @@ fn build_journal_prompt( .collect::>() .join("\n"); - neuro::load_prompt("journal-enrich", &[ + super::prompts::load_prompt("journal-enrich", &[ ("{{GREP_LINE}}", &grep_line.to_string()), ("{{ENTRY_TEXT}}", entry_text), ("{{KEYS}}", &keys_text), @@ -334,7 +334,7 @@ pub fn experience_mine( .collect::>() .join("\n"); - let prompt = neuro::load_prompt("experience", &[ + let prompt = super::prompts::load_prompt("experience", &[ ("{{IDENTITY}}", &identity), ("{{RECENT_JOURNAL}}", &recent), ("{{KEYS}}", &keys_text), diff --git a/poc-memory/src/fact_mine.rs b/poc-memory/src/agents/fact_mine.rs similarity index 99% rename from poc-memory/src/fact_mine.rs rename to poc-memory/src/agents/fact_mine.rs index dfadcd6..d4eb701 100644 --- a/poc-memory/src/fact_mine.rs +++ b/poc-memory/src/agents/fact_mine.rs @@ -6,7 +6,7 @@ // Uses Haiku (not Sonnet) for cost efficiency on high-volume extraction. use crate::config; -use crate::llm; +use super::llm; use crate::store::{self, Provenance}; use serde::{Deserialize, Serialize}; diff --git a/poc-memory/src/knowledge.rs b/poc-memory/src/agents/knowledge.rs similarity index 99% rename from poc-memory/src/knowledge.rs rename to poc-memory/src/agents/knowledge.rs index a7471ad..06ed246 100644 --- a/poc-memory/src/knowledge.rs +++ b/poc-memory/src/agents/knowledge.rs @@ -11,7 +11,7 @@ // convergence via graph-structural metrics (sigma, CC, communities). use crate::graph::Graph; -use crate::llm; +use super::llm; use crate::spectral; use crate::store::{self, Store, new_relation, RelationType}; @@ -329,7 +329,7 @@ fn agent_provenance(agent: &str) -> store::Provenance { // --------------------------------------------------------------------------- fn load_prompt(name: &str) -> Result { - crate::neuro::load_prompt(name, &[]) + super::prompts::load_prompt(name, &[]) } fn get_graph_topology(store: &Store, graph: &Graph) -> String { diff --git a/poc-memory/src/llm.rs b/poc-memory/src/agents/llm.rs similarity index 100% rename from poc-memory/src/llm.rs rename to poc-memory/src/agents/llm.rs diff --git a/poc-memory/src/agents/mod.rs b/poc-memory/src/agents/mod.rs new file mode 100644 index 0000000..c3c83a1 --- /dev/null +++ b/poc-memory/src/agents/mod.rs @@ -0,0 +1,25 @@ +// Agent layer: LLM-powered operations on the memory graph +// +// Everything here calls external models (Sonnet, Haiku) or orchestrates +// sequences of such calls. The core graph infrastructure (store, graph, +// spectral, search, similarity) lives at the crate root. +// +// llm — model invocation, response parsing +// prompts — prompt generation from store data +// audit — link quality review via Sonnet +// consolidate — full consolidation pipeline +// knowledge — knowledge production agents + convergence loop +// enrich — journal enrichment, experience mining +// fact_mine — fact extraction from transcripts +// digest — episodic digest generation (daily/weekly/monthly) +// daemon — background job scheduler + +pub mod llm; +pub mod prompts; +pub mod audit; +pub mod consolidate; +pub mod knowledge; +pub mod enrich; +pub mod fact_mine; +pub mod digest; +pub mod daemon; diff --git a/poc-memory/src/neuro/prompts.rs b/poc-memory/src/agents/prompts.rs similarity index 99% rename from poc-memory/src/neuro/prompts.rs rename to poc-memory/src/agents/prompts.rs index ac8563b..0c58533 100644 --- a/poc-memory/src/neuro/prompts.rs +++ b/poc-memory/src/agents/prompts.rs @@ -6,7 +6,7 @@ use crate::graph::Graph; use crate::similarity; use crate::spectral; -use super::scoring::{ +use crate::neuro::{ ReplayItem, consolidation_priority, replay_queue, replay_queue_with_graph, detect_interference, }; diff --git a/poc-memory/src/lib.rs b/poc-memory/src/lib.rs index ae56baf..13251a1 100644 --- a/poc-memory/src/lib.rs +++ b/poc-memory/src/lib.rs @@ -3,25 +3,27 @@ // Re-exports modules so that memory-search and other binaries // can call library functions directly instead of shelling out. +// Core infrastructure pub mod config; pub mod store; pub mod util; -pub mod llm; -pub mod digest; -pub mod audit; -pub mod enrich; -pub mod consolidate; pub mod graph; pub mod search; pub mod similarity; -pub mod migrate; -pub mod neuro; -pub mod query; pub mod spectral; pub mod lookups; -pub mod daemon; -pub mod fact_mine; -pub mod knowledge; +pub mod query; +pub mod migrate; +pub mod neuro; + +// Agent layer (LLM-powered operations) +pub mod agents; + +// Re-export agent submodules at crate root for backwards compatibility +pub use agents::{ + llm, audit, consolidate, knowledge, + enrich, fact_mine, digest, daemon, +}; pub mod memory_capnp { include!(concat!(env!("OUT_DIR"), "/schema/memory_capnp.rs")); diff --git a/poc-memory/src/main.rs b/poc-memory/src/main.rs index f49dc3d..482e0b3 100644 --- a/poc-memory/src/main.rs +++ b/poc-memory/src/main.rs @@ -878,11 +878,11 @@ fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option) -> Res let store = store::Store::load()?; if let Some(agent_name) = agent { - let prompt = neuro::agent_prompt(&store, &agent_name, count)?; + let prompt = agents::prompts::agent_prompt(&store, &agent_name, count)?; println!("{}", prompt); Ok(()) } else { - neuro::consolidation_batch(&store, count, auto) + agents::prompts::consolidation_batch(&store, count, auto) } } diff --git a/poc-memory/src/neuro/mod.rs b/poc-memory/src/neuro/mod.rs index 187a73b..851f8b9 100644 --- a/poc-memory/src/neuro/mod.rs +++ b/poc-memory/src/neuro/mod.rs @@ -5,22 +5,17 @@ // rewrite — graph topology mutations: differentiation, closure, linking mod scoring; -mod prompts; mod rewrite; -// Re-export public API so `neuro::` paths continue to work. - pub use scoring::{ - replay_queue, detect_interference, + ReplayItem, + consolidation_priority, + replay_queue, replay_queue_with_graph, + detect_interference, consolidation_plan, format_plan, daily_check, }; -pub use prompts::{ - load_prompt, - consolidation_batch, agent_prompt, -}; - pub use rewrite::{ refine_target, LinkMove, differentiate_hub,