diff --git a/poc-memory/src/agents/daemon.rs b/poc-memory/src/agents/daemon.rs index 4cd2fb4..7616871 100644 --- a/poc-memory/src/agents/daemon.rs +++ b/poc-memory/src/agents/daemon.rs @@ -478,6 +478,8 @@ fn compute_graph_health(store: &crate::store::Store) -> GraphHealth { plan_linker: plan.linker_count, plan_separator: plan.separator_count, plan_transfer: plan.transfer_count, + plan_organize: plan.organize_count, + plan_connector: plan.connector_count, plan_rationale: plan.rationale, computed_at: crate::store::format_datetime_space(crate::store::now_epoch()), } @@ -603,6 +605,8 @@ pub struct GraphHealth { pub plan_linker: usize, pub plan_separator: usize, pub plan_transfer: usize, + pub plan_organize: usize, + pub plan_connector: usize, pub plan_rationale: Vec, pub computed_at: String, } @@ -986,6 +990,8 @@ pub fn run_daemon() -> Result<(), String> { linker_count: h.plan_linker, separator_count: h.plan_separator, transfer_count: h.plan_transfer, + organize_count: h.plan_organize, + connector_count: h.plan_connector, run_health: true, rationale: Vec::new(), }; diff --git a/poc-memory/src/neuro/scoring.rs b/poc-memory/src/neuro/scoring.rs index 39d26fd..6cd0de6 100644 --- a/poc-memory/src/neuro/scoring.rs +++ b/poc-memory/src/neuro/scoring.rs @@ -170,6 +170,8 @@ pub struct ConsolidationPlan { pub linker_count: usize, pub separator_count: usize, pub transfer_count: usize, + pub organize_count: usize, + pub connector_count: usize, pub run_health: bool, pub rationale: Vec, } @@ -183,9 +185,11 @@ impl ConsolidationPlan { } // Build per-type batch lists, then interleave so different agent // types alternate rather than running all-replay-then-all-linker. - let types: [(&str, usize); 4] = [ + let types: [(&str, usize); 6] = [ ("linker", self.linker_count), + ("organize", self.organize_count), ("replay", self.replay_count), + ("connector", self.connector_count), ("separator", self.separator_count), ("transfer", self.transfer_count), ]; @@ -251,6 +255,8 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation linker_count: 0, separator_count: 0, transfer_count: 0, + organize_count: 0, + connector_count: 0, run_health: true, rationale: Vec::new(), }; @@ -330,6 +336,28 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation episodic_ratio * 100.0)); } + // Organize: proportional to linker — synthesizes what linker connects + plan.organize_count = plan.linker_count / 2; + plan.rationale.push(format!( + "Organize: {} (half of linker count)", plan.organize_count)); + + // Connector: bridges fragmented communities + let community_count = graph.community_count(); + let nodes_per_community = if community_count > 0 { + store.nodes.len() / community_count + } else { 0 }; + if nodes_per_community < 5 { + plan.connector_count += 20; + plan.rationale.push(format!( + "Communities fragmented ({} communities, {:.1} nodes/community) → 20 connector", + community_count, nodes_per_community)); + } else if nodes_per_community < 10 { + plan.connector_count += 10; + plan.rationale.push(format!( + "Communities moderate ({:.1} nodes/community) → 10 connector", + nodes_per_community)); + } + plan } @@ -365,10 +393,21 @@ pub fn format_plan(plan: &ConsolidationPlan) -> String { if plan.transfer_count > 0 { out.push_str(&format!(" {}. transfer ×{:2} — episodic→semantic extraction\n", step, plan.transfer_count)); + step += 1; + } + if plan.organize_count > 0 { + out.push_str(&format!(" {}. organize ×{:2} — hub creation + knowledge synthesis\n", + step, plan.organize_count)); + step += 1; + } + if plan.connector_count > 0 { + out.push_str(&format!(" {}. connector ×{} — cross-cluster bridging\n", + step, plan.connector_count)); } let total = plan.replay_count + plan.linker_count + plan.separator_count + plan.transfer_count + + plan.organize_count + plan.connector_count + if plan.run_health { 1 } else { 0 }; out.push_str(&format!("\nTotal agent runs: {}\n", total));