neuro: unify consolidation planning, fix threshold drift

The daemon's compute_graph_health had a duplicated copy of the
consolidation planning thresholds that had drifted from the canonical
version (α<2.0 → +7 replay in daemon vs +10 in neuro).

Split consolidation_plan into _inner(store, detect_interference) so
the daemon can call consolidation_plan_quick (skips O(n²) interference)
while using the same threshold logic.
This commit is contained in:
ProofOfConcept 2026-03-10 17:55:08 -04:00
parent 945865f594
commit 8ba58ce9cd
3 changed files with 36 additions and 64 deletions

View file

@ -492,9 +492,6 @@ fn job_daily_check(
}
fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
// Only compute cheap metrics here — interference detection is O(n²)
// and takes minutes. The full plan (with interference) runs during
// consolidation itself.
let graph = store.build_graph();
let snap = crate::graph::current_metrics(&graph);
@ -504,38 +501,8 @@ fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
let episodic_ratio = if store.nodes.is_empty() { 0.0 }
else { episodic_count as f32 / store.nodes.len() as f32 };
// Estimate plan from cheap metrics only (skip interference)
let mut plan_replay = 3usize; // baseline maintenance
let mut plan_linker = 0usize;
let plan_separator = 0usize; // needs interference, skip for status
let mut plan_transfer = 0usize;
let mut rationale = Vec::new();
if snap.alpha < 2.0 {
plan_replay += 7; plan_linker += 5;
rationale.push(format!("α={:.2}: extreme hub dominance", snap.alpha));
} else if snap.alpha < 2.5 {
plan_replay += 2; plan_linker += 3;
rationale.push(format!("α={:.2}: moderate hub dominance", snap.alpha));
}
if snap.gini > 0.5 {
plan_replay += 3;
rationale.push(format!("gini={:.3}: high inequality", snap.gini));
}
if snap.avg_cc < 0.1 {
plan_replay += 5;
rationale.push(format!("cc={:.3}: very poor integration", snap.avg_cc));
} else if snap.avg_cc < 0.2 {
plan_replay += 2;
rationale.push(format!("cc={:.3}: low integration", snap.avg_cc));
}
if episodic_ratio > 0.6 {
plan_transfer += 10;
rationale.push(format!("episodic={:.0}%: needs extraction", episodic_ratio * 100.0));
} else if episodic_ratio > 0.4 {
plan_transfer += 5;
rationale.push(format!("episodic={:.0}%", episodic_ratio * 100.0));
}
// Use the same planning logic as consolidation (skip O(n²) interference)
let plan = crate::neuro::consolidation_plan_quick(store);
GraphHealth {
nodes: snap.nodes,
@ -546,12 +513,12 @@ fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
avg_cc: snap.avg_cc,
sigma: snap.sigma,
episodic_ratio,
interference: 0, // not computed in status check
plan_replay,
plan_linker,
plan_separator,
plan_transfer,
plan_rationale: rationale,
interference: 0,
plan_replay: plan.replay_count,
plan_linker: plan.linker_count,
plan_separator: plan.separator_count,
plan_transfer: plan.transfer_count,
plan_rationale: plan.rationale,
computed_at: crate::store::format_datetime_space(crate::store::now_epoch()),
}
}
@ -1054,27 +1021,22 @@ pub fn run_daemon() -> Result<(), String> {
if last.is_none_or(|d| d < today) && gh.is_some() {
log_event("scheduler", "daily-trigger", &today.to_string());
// Use cached graph health for plan (cheap — no O(n²) interference detection).
let (replay, linker, separator, transfer) = match gh {
Some(ref h) => (h.plan_replay, h.plan_linker, h.plan_separator, h.plan_transfer),
None => unreachable!(), // guarded by gh.is_some() above
};
// Use cached graph health plan (from consolidation_plan_quick).
let h = gh.as_ref().unwrap(); // guarded by gh.is_some() above
let plan = crate::neuro::ConsolidationPlan {
replay_count: replay,
linker_count: linker,
separator_count: separator,
transfer_count: transfer,
replay_count: h.plan_replay,
linker_count: h.plan_linker,
separator_count: h.plan_separator,
transfer_count: h.plan_transfer,
run_health: true,
rationale: Vec::new(),
};
let batch_size = 5;
let runs = plan.to_agent_runs(batch_size);
let runs = plan.to_agent_runs(5);
log_event("scheduler", "consolidation-plan",
&format!("{} agents ({}r {}l {}s {}t)",
runs.len(), plan.replay_count, plan.linker_count,
plan.separator_count, plan.transfer_count));
runs.len(), h.plan_replay, h.plan_linker,
h.plan_separator, h.plan_transfer));
// Phase 1: Agent runs (sequential — each reloads store to see prior changes)
let mut prev_agent = None;