consolidation: data-driven agent plan, drop transfer/connector/replay
Replace per-field ConsolidationPlan struct with HashMap<String, usize> counts map. Agent types are no longer hardcoded in the struct — add agents by adding entries to the map. Active agents: linker, organize, distill, separator, split. Removed: transfer (redundant with distill), connector (rethink later), replay (not needed for current graph work). Elo-based budget allocation now iterates the map instead of indexing a fixed array. Status display and TUI adapted to show dynamic agent lists. memory-instructions-core v13: added protected nodes section — agents must not rewrite core-personality, core-personality-detail, or memory-instructions-core. They may add links but not modify content. High-value neighbors should be treated with care. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
d6c26e27fe
commit
d20baafe9d
5 changed files with 116 additions and 224 deletions
|
|
@ -46,9 +46,7 @@ pub fn consolidate_full_with_progress(
|
|||
log_line(&mut log_buf, &plan_text);
|
||||
println!("{}", plan_text);
|
||||
|
||||
let total_agents = plan.replay_count + plan.linker_count
|
||||
+ plan.separator_count + plan.transfer_count
|
||||
+ if plan.run_health { 1 } else { 0 };
|
||||
let total_agents = plan.total();
|
||||
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
|
||||
|
||||
// --- Step 2: Execute agents ---
|
||||
|
|
|
|||
|
|
@ -552,13 +552,7 @@ fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
|
|||
sigma: snap.sigma,
|
||||
episodic_ratio,
|
||||
interference: 0,
|
||||
plan_replay: plan.replay_count,
|
||||
plan_linker: plan.linker_count,
|
||||
plan_separator: plan.separator_count,
|
||||
plan_transfer: plan.transfer_count,
|
||||
plan_organize: plan.organize_count,
|
||||
plan_connector: plan.connector_count,
|
||||
plan_distill: plan.distill_count,
|
||||
plan_counts: plan.counts,
|
||||
plan_rationale: plan.rationale,
|
||||
computed_at: crate::store::format_datetime_space(crate::store::now_epoch()),
|
||||
}
|
||||
|
|
@ -680,14 +674,8 @@ pub struct GraphHealth {
|
|||
pub episodic_ratio: f32, // episodic/total nodes (target <0.4)
|
||||
pub interference: usize, // interfering pairs (target <50)
|
||||
// Consolidation work estimate from plan
|
||||
pub plan_replay: usize,
|
||||
pub plan_linker: usize,
|
||||
pub plan_separator: usize,
|
||||
pub plan_transfer: usize,
|
||||
pub plan_organize: usize,
|
||||
pub plan_connector: usize,
|
||||
#[serde(default)]
|
||||
pub plan_distill: usize,
|
||||
pub plan_counts: std::collections::HashMap<String, usize>,
|
||||
pub plan_rationale: Vec<String>,
|
||||
pub computed_at: String,
|
||||
}
|
||||
|
|
@ -1042,22 +1030,18 @@ pub fn run_daemon() -> Result<(), String> {
|
|||
// Use cached graph health plan (from consolidation_plan_quick).
|
||||
let h = gh.as_ref().unwrap(); // guarded by gh.is_some() above
|
||||
let plan = crate::neuro::ConsolidationPlan {
|
||||
replay_count: h.plan_replay,
|
||||
linker_count: h.plan_linker,
|
||||
separator_count: h.plan_separator,
|
||||
transfer_count: h.plan_transfer,
|
||||
organize_count: h.plan_organize,
|
||||
connector_count: h.plan_connector,
|
||||
distill_count: h.plan_distill,
|
||||
counts: h.plan_counts.clone(),
|
||||
run_health: true,
|
||||
rationale: Vec::new(),
|
||||
};
|
||||
let runs = plan.to_agent_runs(5);
|
||||
|
||||
let summary: Vec<String> = h.plan_counts.iter()
|
||||
.filter(|(_, c)| **c > 0)
|
||||
.map(|(a, c)| format!("{}{}", &a[..1], c))
|
||||
.collect();
|
||||
log_event("scheduler", "consolidation-plan",
|
||||
&format!("{} agents ({}r {}l {}s {}t {}d)",
|
||||
runs.len(), h.plan_replay, h.plan_linker,
|
||||
h.plan_separator, h.plan_transfer, h.plan_distill));
|
||||
&format!("{} agents ({})", runs.len(), summary.join(" ")));
|
||||
|
||||
// Phase 1: Agent runs — sequential within type, parallel across types.
|
||||
// Same-type agents chain (they may touch overlapping graph regions),
|
||||
|
|
@ -1076,10 +1060,10 @@ pub fn run_daemon() -> Result<(), String> {
|
|||
.init(move |ctx| {
|
||||
job_consolidation_agent(ctx, &agent, b, &in_flight_clone)
|
||||
});
|
||||
if let Some(dep) = prev_by_type.get(*agent_type) {
|
||||
if let Some(dep) = prev_by_type.get(agent_type.as_str()) {
|
||||
builder.depend_on(dep);
|
||||
}
|
||||
prev_by_type.insert(agent_type.to_string(), builder.run());
|
||||
prev_by_type.insert(agent_type.clone(), builder.run());
|
||||
}
|
||||
// Orphans phase depends on all agent type chains completing
|
||||
let prev_agent = prev_by_type.into_values().last();
|
||||
|
|
@ -1501,9 +1485,13 @@ pub fn show_status() -> Result<(), String> {
|
|||
indicator(gh.episodic_ratio, 0.4, false), gh.episodic_ratio * 100.0,
|
||||
gh.sigma);
|
||||
|
||||
let total = gh.plan_replay + gh.plan_linker + gh.plan_separator + gh.plan_transfer + gh.plan_distill + 1;
|
||||
eprintln!(" consolidation plan: {} agents ({}r {}l {}s {}t {}d +health)",
|
||||
total, gh.plan_replay, gh.plan_linker, gh.plan_separator, gh.plan_transfer, gh.plan_distill);
|
||||
let plan_total: usize = gh.plan_counts.values().sum::<usize>() + 1;
|
||||
let plan_summary: Vec<String> = gh.plan_counts.iter()
|
||||
.filter(|(_, c)| **c > 0)
|
||||
.map(|(a, c)| format!("{}{}", &a[..1], c))
|
||||
.collect();
|
||||
eprintln!(" consolidation plan: {} agents ({} +health)",
|
||||
plan_total, plan_summary.join(" "));
|
||||
}
|
||||
eprintln!();
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue