// Consolidation pipeline: plan → agents → maintenance → digests → links // // consolidate_full() runs the full autonomous consolidation: // 1. Plan: analyze metrics, allocate agents // 2. Execute: run each agent (agents apply changes via tool calls) // 3. Graph maintenance (orphans, degree cap) // 4. Digest: generate missing daily/weekly/monthly digests // 5. Links: apply links extracted from digests // 6. Summary: final metrics comparison use super::digest; use super::knowledge; use crate::neuro; use crate::store::{self, Store}; /// Append a line to the log buffer. fn log_line(buf: &mut String, line: &str) { buf.push_str(line); buf.push('\n'); } /// Run the full autonomous consolidation pipeline with logging. pub fn consolidate_full(store: &mut Store) -> Result<(), String> { consolidate_full_with_progress(store, &|_| {}) } pub fn consolidate_full_with_progress( store: &mut Store, on_progress: &dyn Fn(&str), ) -> Result<(), String> { let start = std::time::Instant::now(); let log_key = format!("_consolidate-log-{}", store::compact_timestamp()); let mut log_buf = String::new(); log_line(&mut log_buf, "=== CONSOLIDATE FULL ==="); log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch()))); log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len())); log_line(&mut log_buf, ""); // --- Step 1: Plan --- log_line(&mut log_buf, "--- Step 1: Plan ---"); on_progress("planning"); let plan = neuro::consolidation_plan(store); let plan_text = neuro::format_plan(&plan); log_line(&mut log_buf, &plan_text); println!("{}", plan_text); let total_agents = plan.total(); log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents)); // --- Step 2: Execute agents --- log_line(&mut log_buf, "\n--- Step 2: Execute agents ---"); let mut agent_num = 0usize; let mut agent_errors = 0usize; let batch_size = 5; let runs = plan.to_agent_runs(batch_size); for (agent_type, count) in &runs { agent_num += 1; let label = if *count > 0 { format!("[{}/{}] {} (batch={})", agent_num, runs.len(), agent_type, count) } else { format!("[{}/{}] {}", agent_num, runs.len(), agent_type) }; log_line(&mut log_buf, &format!("\n{}", label)); on_progress(&label); println!("{}", label); // Reload store to pick up changes from previous agents if agent_num > 1 { *store = Store::load()?; } match knowledge::run_and_apply(store, agent_type, *count, "consolidate") { Ok(()) => { let msg = " Done".to_string(); log_line(&mut log_buf, &msg); on_progress(&msg); println!("{}", msg); } Err(e) => { let msg = format!(" ERROR: {}", e); log_line(&mut log_buf, &msg); eprintln!("{}", msg); agent_errors += 1; } } } log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors", agent_num - agent_errors, agent_errors)); store.save()?; // --- Step 3: Link orphans --- log_line(&mut log_buf, "\n--- Step 3: Link orphans ---"); on_progress("linking orphans"); println!("\n--- Linking orphan nodes ---"); *store = Store::load()?; let (lo_orphans, lo_added) = neuro::link_orphans(store, 2, 3, 0.15); log_line(&mut log_buf, &format!(" {} orphans, {} links added", lo_orphans, lo_added)); // --- Step 3b: Cap degree --- log_line(&mut log_buf, "\n--- Step 3b: Cap degree ---"); on_progress("capping degree"); println!("\n--- Capping node degree ---"); *store = Store::load()?; match store.cap_degree(50) { Ok((hubs, pruned)) => { store.save()?; log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned)); } Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)), } // --- Step 4: Digest auto --- log_line(&mut log_buf, "\n--- Step 4: Digest auto ---"); on_progress("generating digests"); println!("\n--- Generating missing digests ---"); *store = Store::load()?; match digest::digest_auto(store) { Ok(()) => log_line(&mut log_buf, " Digests done."), Err(e) => { let msg = format!(" ERROR in digest auto: {}", e); log_line(&mut log_buf, &msg); eprintln!("{}", msg); } } // --- Step 5: Apply digest links --- log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---"); on_progress("applying digest links"); println!("\n--- Applying digest links ---"); *store = Store::load()?; let links = digest::parse_all_digest_links(store); let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links); store.save()?; log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks", applied, skipped, fallbacks)); // --- Step 6: Summary --- let elapsed = start.elapsed(); log_line(&mut log_buf, "\n--- Summary ---"); log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch()))); log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64())); *store = Store::load()?; log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len())); let summary = format!( "\n=== CONSOLIDATE FULL COMPLETE ===\n\ Duration: {:.0}s\n\ Agents: {} run, {} errors\n\ Nodes: {} Relations: {}\n", elapsed.as_secs_f64(), agent_num - agent_errors, agent_errors, store.nodes.len(), store.relations.len(), ); log_line(&mut log_buf, &summary); println!("{}", summary); // Store the log as a node store.upsert_provenance(&log_key, &log_buf, "consolidate:write").ok(); store.save()?; Ok(()) }