eliminate schema_fit: it's clustering coefficient

schema_fit was algebraically identical to clustering_coefficient
(both compute 2E/(d*(d-1)) = fraction of connected neighbor pairs).
Remove the redundant function, field, and metrics column.

- Delete schema_fit() and schema_fit_all() from graph.rs
- Remove schema_fit field from Node struct
- Remove avg_schema_fit from MetricsSnapshot (duplicated avg_cc)
- Replace all callers with graph.clustering_coefficient()
- Rename ReplayItem.schema_fit to .cc
- Query: "cc" and "schema_fit" both resolve from graph CC
- Low-CC count folded into health report CC line

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
ProofOfConcept 2026-03-03 12:21:04 -05:00
parent fb7aa46e03
commit ec8b4b2ed2
5 changed files with 33 additions and 83 deletions

View file

@ -19,12 +19,12 @@ const SECS_PER_DAY: f64 = 86400.0;
/// With spectral data:
/// priority = spectral_displacement × overdue × emotion
/// Without:
/// priority = (1 - schema_fit) × overdue × emotion
/// priority = (1 - cc) × overdue × emotion
///
/// Spectral displacement is the outlier_score clamped and normalized —
/// it measures how far a node sits from its community center in the
/// eigenspace. This is a global signal (considers all graph structure)
/// vs schema_fit which is local (only immediate neighbors).
/// vs CC which is local (only immediate neighbors).
pub fn consolidation_priority(
store: &Store,
key: &str,
@ -44,8 +44,8 @@ pub fn consolidation_priority(
// outlier=5 and outlier=10 matters less than 1 vs 2.
(outlier / 3.0).min(3.0)
} else {
let fit = graph::schema_fit(graph, key) as f64;
1.0 - fit
let cc = graph.clustering_coefficient(key) as f64;
1.0 - cc
};
// Spaced repetition: how overdue is this node for replay?
@ -69,7 +69,7 @@ pub struct ReplayItem {
pub priority: f64,
pub interval_days: u32,
pub emotion: f32,
pub schema_fit: f32,
pub cc: f32,
/// Spectral classification: "bridge", "outlier", "core", "peripheral"
pub classification: &'static str,
/// Raw spectral outlier score (distance / median)
@ -91,8 +91,6 @@ pub fn replay_queue_with_graph(
graph: &Graph,
emb: Option<&SpectralEmbedding>,
) -> Vec<ReplayItem> {
let fits = graph::schema_fit_all(graph);
// Build spectral position map if embedding is available
let positions: HashMap<String, SpectralPosition> = if let Some(emb) = emb {
let communities = graph.communities().clone();
@ -116,14 +114,12 @@ pub fn replay_queue_with_graph(
store, key, graph,
pos.map(|p| p.outlier_score),
);
let fit = fits.get(key).copied().unwrap_or(0.0);
ReplayItem {
key: key.clone(),
priority,
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
schema_fit: fit,
cc: graph.clustering_coefficient(key),
classification,
outlier_score,
}
@ -178,7 +174,7 @@ pub fn detect_interference(
/// Low fit (<0.2): deep examination needed — new schema seed, bridge, or noise?
pub fn schema_assimilation(store: &Store, key: &str) -> (f32, &'static str) {
let graph = store.build_graph();
let fit = graph::schema_fit(&graph, key);
let fit = graph.clustering_coefficient(key);
let recommendation = if fit > 0.5 {
"auto-integrate"
@ -267,8 +263,8 @@ fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> S
};
out.push_str(&format!("## {} \n", item.key));
out.push_str(&format!("Priority: {:.3} Schema fit: {:.3} Emotion: {:.1} ",
item.priority, item.schema_fit, item.emotion));
out.push_str(&format!("Priority: {:.3} CC: {:.3} Emotion: {:.1} ",
item.priority, item.cc, item.emotion));
out.push_str(&format!("Category: {} Interval: {}d\n",
node.category.label(), node.spaced_repetition_interval));
if item.outlier_score > 0.0 {
@ -485,8 +481,8 @@ pub fn consolidation_batch(store: &Store, count: usize, auto: bool) -> Result<()
let node_type = store.nodes.get(&item.key)
.map(|n| if n.key.contains("journal") { "episodic" } else { "semantic" })
.unwrap_or("?");
println!(" [{:.3}] {} (fit={:.3}, interval={}d, type={})",
item.priority, item.key, item.schema_fit, item.interval_days, node_type);
println!(" [{:.3}] {} (cc={:.3}, interval={}d, type={})",
item.priority, item.key, item.cc, item.interval_days, node_type);
}
// Also show interference pairs
@ -556,13 +552,12 @@ pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<String,
let items: Vec<ReplayItem> = episode_keys.iter()
.filter_map(|k| {
let node = store.nodes.get(k)?;
let fit = graph::schema_fit(&graph, k);
Some(ReplayItem {
key: k.clone(),
priority: consolidation_priority(store, k, &graph, None),
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
schema_fit: fit,
cc: graph.clustering_coefficient(k),
classification: "unknown",
outlier_score: 0.0,
})
@ -597,12 +592,7 @@ pub fn consolidation_plan(store: &Store) -> ConsolidationPlan {
let graph = store.build_graph();
let alpha = graph.degree_power_law_exponent();
let gini = graph.degree_gini();
let avg_fit = {
let fits = graph::schema_fit_all(&graph);
if fits.is_empty() { 0.0 } else {
fits.values().sum::<f32>() / fits.len() as f32
}
};
let avg_cc = graph.avg_clustering_coefficient();
let interference_pairs = detect_interference(store, &graph, 0.5);
let interference_count = interference_pairs.len();
@ -651,17 +641,17 @@ pub fn consolidation_plan(store: &Store) -> ConsolidationPlan {
gini));
}
// Target: avg schema fit ≥ 0.2
if avg_fit < 0.1 {
// Target: avg CC ≥ 0.2
if avg_cc < 0.1 {
plan.replay_count += 5;
plan.rationale.push(format!(
"Schema fit={:.3} (target ≥0.2): very poor integration → +5 replay",
avg_fit));
} else if avg_fit < 0.2 {
"CC={:.3} (target ≥0.2): very poor integration → +5 replay",
avg_cc));
} else if avg_cc < 0.2 {
plan.replay_count += 2;
plan.rationale.push(format!(
"Schema fit={:.3} (target ≥0.2): low integration → +2 replay",
avg_fit));
"CC={:.3} (target ≥0.2): low integration → +2 replay",
avg_cc));
}
// Interference: >100 pairs is a lot, <10 is clean
@ -748,21 +738,14 @@ pub fn daily_check(store: &Store) -> String {
let gini = graph.degree_gini();
let sigma = graph.small_world_sigma();
let avg_cc = graph.avg_clustering_coefficient();
let avg_fit = {
let fits = graph::schema_fit_all(&graph);
if fits.is_empty() { 0.0 } else {
fits.values().sum::<f32>() / fits.len() as f32
}
};
let history = graph::load_metrics_history();
let prev = history.last();
let mut out = String::from("Memory daily check\n");
// Current state
out.push_str(&format!(" σ={:.1} α={:.2} gini={:.3} cc={:.4} fit={:.3}\n",
sigma, alpha, gini, avg_cc, avg_fit));
out.push_str(&format!(" σ={:.1} α={:.2} gini={:.3} cc={:.4}\n",
sigma, alpha, gini, avg_cc));
// Trend
if let Some(p) = prev {
@ -777,7 +760,7 @@ pub fn daily_check(store: &Store) -> String {
let mut issues = Vec::new();
if alpha < 2.0 { issues.push("hub dominance critical"); }
if gini > 0.5 { issues.push("high inequality"); }
if avg_fit < 0.1 { issues.push("poor integration"); }
if avg_cc < 0.1 { issues.push("poor integration"); }
if d_sigma < -5.0 { issues.push("σ declining"); }
if d_alpha < -0.1 { issues.push("α declining"); }
if d_gini > 0.02 { issues.push("inequality increasing"); }
@ -802,7 +785,6 @@ pub fn daily_check(store: &Store) -> String {
communities: graph.community_count(),
sigma, alpha, gini, avg_cc,
avg_path_length: graph.avg_path_length(),
avg_schema_fit: avg_fit,
});
out