2026-03-03 17:18:18 -05:00
|
|
|
// Consolidation pipeline: plan → agents → apply → digests → links
|
|
|
|
|
//
|
|
|
|
|
// consolidate_full() runs the full autonomous consolidation:
|
|
|
|
|
// 1. Plan: analyze metrics, allocate agents
|
2026-03-10 17:22:53 -04:00
|
|
|
// 2. Execute: run each agent, parse + apply actions inline
|
|
|
|
|
// 3. Graph maintenance (orphans, degree cap)
|
2026-03-03 17:18:18 -05:00
|
|
|
// 4. Digest: generate missing daily/weekly/monthly digests
|
|
|
|
|
// 5. Links: apply links extracted from digests
|
|
|
|
|
// 6. Summary: final metrics comparison
|
|
|
|
|
//
|
2026-03-10 17:22:53 -04:00
|
|
|
// Actions are parsed directly from agent output using the same parser
|
|
|
|
|
// as the knowledge loop (WRITE_NODE, LINK, REFINE), eliminating the
|
|
|
|
|
// second LLM call that was previously needed.
|
2026-03-03 17:18:18 -05:00
|
|
|
|
move LLM-dependent modules into agents/ subdir
Separate the agent layer (everything that calls external LLMs or
orchestrates sequences of such calls) from core graph infrastructure.
agents/: llm, prompts, audit, consolidate, knowledge, enrich,
fact_mine, digest, daemon
Root: store/, graph, spectral, search, similarity, lookups, query,
config, util, migrate, neuro/ (scoring + rewrite)
Re-exports at crate root preserve backwards compatibility so
`crate::llm`, `crate::digest` etc. continue to work.
2026-03-08 21:27:41 -04:00
|
|
|
use super::digest;
|
2026-03-10 17:22:53 -04:00
|
|
|
use super::knowledge;
|
2026-03-03 17:18:18 -05:00
|
|
|
use crate::neuro;
|
2026-03-10 17:22:53 -04:00
|
|
|
use crate::store::{self, Store};
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
/// Append a line to the log buffer.
|
|
|
|
|
fn log_line(buf: &mut String, line: &str) {
|
|
|
|
|
buf.push_str(line);
|
|
|
|
|
buf.push('\n');
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Run the full autonomous consolidation pipeline with logging.
|
2026-03-05 22:16:17 -05:00
|
|
|
/// If `on_progress` is provided, it's called at each significant step.
|
2026-03-03 17:18:18 -05:00
|
|
|
pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
|
2026-03-05 22:16:17 -05:00
|
|
|
consolidate_full_with_progress(store, &|_| {})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn consolidate_full_with_progress(
|
|
|
|
|
store: &mut Store,
|
|
|
|
|
on_progress: &dyn Fn(&str),
|
|
|
|
|
) -> Result<(), String> {
|
2026-03-03 17:18:18 -05:00
|
|
|
let start = std::time::Instant::now();
|
2026-03-10 17:48:00 -04:00
|
|
|
let log_key = format!("_consolidate-log-{}", store::compact_timestamp());
|
2026-03-05 15:30:57 -05:00
|
|
|
let mut log_buf = String::new();
|
2026-03-03 17:18:18 -05:00
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "=== CONSOLIDATE FULL ===");
|
|
|
|
|
log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch())));
|
|
|
|
|
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
|
|
|
|
|
log_line(&mut log_buf, "");
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
// --- Step 1: Plan ---
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "--- Step 1: Plan ---");
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress("planning");
|
2026-03-03 17:18:18 -05:00
|
|
|
let plan = neuro::consolidation_plan(store);
|
|
|
|
|
let plan_text = neuro::format_plan(&plan);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &plan_text);
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("{}", plan_text);
|
|
|
|
|
|
|
|
|
|
let total_agents = plan.replay_count + plan.linker_count
|
|
|
|
|
+ plan.separator_count + plan.transfer_count
|
|
|
|
|
+ if plan.run_health { 1 } else { 0 };
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
// --- Step 2: Execute agents ---
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "\n--- Step 2: Execute agents ---");
|
2026-03-03 17:18:18 -05:00
|
|
|
let mut agent_num = 0usize;
|
|
|
|
|
let mut agent_errors = 0usize;
|
2026-03-10 17:22:53 -04:00
|
|
|
let mut total_applied = 0usize;
|
|
|
|
|
let mut total_actions = 0usize;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
2026-03-08 21:13:02 -04:00
|
|
|
let batch_size = 5;
|
2026-03-10 17:48:00 -04:00
|
|
|
let runs = plan.to_agent_runs(batch_size);
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
for (agent_type, count) in &runs {
|
|
|
|
|
agent_num += 1;
|
|
|
|
|
let label = if *count > 0 {
|
|
|
|
|
format!("[{}/{}] {} (batch={})", agent_num, runs.len(), agent_type, count)
|
|
|
|
|
} else {
|
|
|
|
|
format!("[{}/{}] {}", agent_num, runs.len(), agent_type)
|
|
|
|
|
};
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!("\n{}", label));
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress(&label);
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("{}", label);
|
|
|
|
|
|
|
|
|
|
// Reload store to pick up changes from previous agents
|
|
|
|
|
if agent_num > 1 {
|
|
|
|
|
*store = Store::load()?;
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-10 17:51:32 -04:00
|
|
|
let (total, applied) = match knowledge::run_and_apply(store, agent_type, *count, "consolidate") {
|
2026-03-03 17:18:18 -05:00
|
|
|
Ok(r) => r,
|
|
|
|
|
Err(e) => {
|
2026-03-10 17:33:12 -04:00
|
|
|
let msg = format!(" ERROR: {}", e);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &msg);
|
2026-03-03 17:18:18 -05:00
|
|
|
eprintln!("{}", msg);
|
|
|
|
|
agent_errors += 1;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
2026-03-10 17:51:32 -04:00
|
|
|
total_actions += total;
|
2026-03-10 17:22:53 -04:00
|
|
|
total_applied += applied;
|
2026-03-05 15:30:57 -05:00
|
|
|
|
2026-03-10 17:51:32 -04:00
|
|
|
let msg = format!(" Done: {} actions ({} applied)", total, applied);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &msg);
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress(&msg);
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("{}", msg);
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-10 17:22:53 -04:00
|
|
|
log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors, {} actions ({} applied)",
|
|
|
|
|
agent_num - agent_errors, agent_errors, total_actions, total_applied));
|
|
|
|
|
store.save()?;
|
2026-03-03 17:18:18 -05:00
|
|
|
|
2026-03-10 17:22:53 -04:00
|
|
|
// --- Step 3: Link orphans ---
|
|
|
|
|
log_line(&mut log_buf, "\n--- Step 3: Link orphans ---");
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress("linking orphans");
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("\n--- Linking orphan nodes ---");
|
|
|
|
|
*store = Store::load()?;
|
|
|
|
|
|
|
|
|
|
let (lo_orphans, lo_added) = neuro::link_orphans(store, 2, 3, 0.15);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!(" {} orphans, {} links added", lo_orphans, lo_added));
|
2026-03-03 17:18:18 -05:00
|
|
|
|
2026-03-10 17:22:53 -04:00
|
|
|
// --- Step 3b: Cap degree ---
|
|
|
|
|
log_line(&mut log_buf, "\n--- Step 3b: Cap degree ---");
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress("capping degree");
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("\n--- Capping node degree ---");
|
|
|
|
|
*store = Store::load()?;
|
|
|
|
|
|
|
|
|
|
match store.cap_degree(50) {
|
|
|
|
|
Ok((hubs, pruned)) => {
|
|
|
|
|
store.save()?;
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned));
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
2026-03-05 15:30:57 -05:00
|
|
|
Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)),
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- Step 4: Digest auto ---
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "\n--- Step 4: Digest auto ---");
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress("generating digests");
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("\n--- Generating missing digests ---");
|
|
|
|
|
*store = Store::load()?;
|
|
|
|
|
|
|
|
|
|
match digest::digest_auto(store) {
|
2026-03-05 15:30:57 -05:00
|
|
|
Ok(()) => log_line(&mut log_buf, " Digests done."),
|
2026-03-03 17:18:18 -05:00
|
|
|
Err(e) => {
|
|
|
|
|
let msg = format!(" ERROR in digest auto: {}", e);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &msg);
|
2026-03-03 17:18:18 -05:00
|
|
|
eprintln!("{}", msg);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// --- Step 5: Apply digest links ---
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---");
|
2026-03-05 22:16:17 -05:00
|
|
|
on_progress("applying digest links");
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("\n--- Applying digest links ---");
|
|
|
|
|
*store = Store::load()?;
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
let links = digest::parse_all_digest_links(store);
|
2026-03-03 17:18:18 -05:00
|
|
|
let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links);
|
|
|
|
|
store.save()?;
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks",
|
|
|
|
|
applied, skipped, fallbacks));
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
// --- Step 6: Summary ---
|
|
|
|
|
let elapsed = start.elapsed();
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, "\n--- Summary ---");
|
|
|
|
|
log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch())));
|
|
|
|
|
log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64()));
|
2026-03-03 17:18:18 -05:00
|
|
|
*store = Store::load()?;
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
let summary = format!(
|
|
|
|
|
"\n=== CONSOLIDATE FULL COMPLETE ===\n\
|
|
|
|
|
Duration: {:.0}s\n\
|
|
|
|
|
Agents: {} run, {} errors\n\
|
2026-03-05 15:30:57 -05:00
|
|
|
Nodes: {} Relations: {}\n",
|
2026-03-03 17:18:18 -05:00
|
|
|
elapsed.as_secs_f64(),
|
|
|
|
|
agent_num - agent_errors, agent_errors,
|
|
|
|
|
store.nodes.len(), store.relations.len(),
|
|
|
|
|
);
|
2026-03-05 15:30:57 -05:00
|
|
|
log_line(&mut log_buf, &summary);
|
2026-03-03 17:18:18 -05:00
|
|
|
println!("{}", summary);
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
// Store the log as a node
|
|
|
|
|
store.upsert_provenance(&log_key, &log_buf,
|
|
|
|
|
store::Provenance::AgentConsolidate).ok();
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
2026-03-03 17:18:18 -05:00
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-10 17:22:53 -04:00
|
|
|
/// Re-parse and apply actions from stored consolidation reports.
|
|
|
|
|
/// This is for manually re-processing reports — during normal consolidation,
|
|
|
|
|
/// actions are applied inline as each agent runs.
|
2026-03-05 15:30:57 -05:00
|
|
|
pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_key: Option<&str>) -> Result<(), String> {
|
2026-03-10 17:22:53 -04:00
|
|
|
let reports: Vec<String> = if let Some(key) = report_key {
|
2026-03-05 15:30:57 -05:00
|
|
|
vec![key.to_string()]
|
2026-03-03 17:18:18 -05:00
|
|
|
} else {
|
2026-03-10 17:22:53 -04:00
|
|
|
// Find the most recent batch of reports
|
|
|
|
|
let mut keys: Vec<&String> = store.nodes.keys()
|
|
|
|
|
.filter(|k| k.starts_with("_consolidation-") && !k.contains("-actions-") && !k.contains("-log-"))
|
|
|
|
|
.collect();
|
|
|
|
|
keys.sort();
|
|
|
|
|
keys.reverse();
|
|
|
|
|
|
|
|
|
|
if keys.is_empty() { return Ok(()); }
|
|
|
|
|
|
|
|
|
|
let latest_ts = keys[0].rsplit('-').next().unwrap_or("").to_string();
|
|
|
|
|
keys.into_iter()
|
|
|
|
|
.filter(|k| k.ends_with(&latest_ts))
|
|
|
|
|
.cloned()
|
|
|
|
|
.collect()
|
2026-03-03 17:18:18 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if reports.is_empty() {
|
|
|
|
|
println!("No consolidation reports found.");
|
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
println!("Found {} reports:", reports.len());
|
2026-03-10 17:22:53 -04:00
|
|
|
let mut all_actions = Vec::new();
|
|
|
|
|
for key in &reports {
|
|
|
|
|
let content = store.nodes.get(key).map(|n| n.content.as_str()).unwrap_or("");
|
|
|
|
|
let actions = knowledge::parse_all_actions(content);
|
|
|
|
|
println!(" {} → {} actions", key, actions.len());
|
|
|
|
|
all_actions.extend(actions);
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !do_apply {
|
2026-03-10 17:22:53 -04:00
|
|
|
println!("\nDRY RUN — {} actions parsed", all_actions.len());
|
|
|
|
|
for action in &all_actions {
|
|
|
|
|
match &action.kind {
|
|
|
|
|
knowledge::ActionKind::Link { source, target } =>
|
|
|
|
|
println!(" LINK {} → {}", source, target),
|
|
|
|
|
knowledge::ActionKind::WriteNode { key, .. } =>
|
|
|
|
|
println!(" WRITE {}", key),
|
|
|
|
|
knowledge::ActionKind::Refine { key, .. } =>
|
|
|
|
|
println!(" REFINE {}", key),
|
2026-03-10 22:57:02 -04:00
|
|
|
knowledge::ActionKind::Demote { key } =>
|
|
|
|
|
println!(" DEMOTE {}", key),
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
|
|
|
|
}
|
2026-03-10 17:22:53 -04:00
|
|
|
println!("\nTo apply: poc-memory apply-consolidation --apply");
|
2026-03-03 17:18:18 -05:00
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-10 17:51:32 -04:00
|
|
|
let ts = store::compact_timestamp();
|
2026-03-10 17:22:53 -04:00
|
|
|
let mut applied = 0;
|
|
|
|
|
for action in &all_actions {
|
|
|
|
|
if knowledge::apply_action(store, action, "consolidate", &ts, 0) {
|
|
|
|
|
applied += 1;
|
2026-03-03 17:18:18 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if applied > 0 {
|
|
|
|
|
store.save()?;
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-10 17:22:53 -04:00
|
|
|
println!("Applied: {}/{} actions", applied, all_actions.len());
|
2026-03-03 17:18:18 -05:00
|
|
|
Ok(())
|
|
|
|
|
}
|