graph_normalize_strengths: convert to RPC tool

Agents can use this to check if edge weights are skewed.
Dry run by default, pass apply:true to write changes.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-12 23:12:42 -04:00
parent a8d91896a2
commit f02a23468e
2 changed files with 78 additions and 71 deletions

View file

@ -123,6 +123,7 @@ async fn dispatch(
"graph_topology" => graph_topology().await,
"graph_health" => graph_health().await,
"graph_communities" => graph_communities(&args).await,
"graph_normalize_strengths" => graph_normalize_strengths(&args).await,
"journal_tail" => journal_tail(&args).await,
"journal_new" => journal_new(agent, &args).await,
"journal_update" => journal_update(agent, &args).await,
@ -614,3 +615,74 @@ async fn graph_communities(args: &serde_json::Value) -> Result<String> {
Ok(out)
}
async fn graph_normalize_strengths(args: &serde_json::Value) -> Result<String> {
let apply = args.get("apply").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let mut store = arc.lock().await;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
let mut buckets = [0usize; 10];
for rel in &mut store.relations {
if rel.deleted { continue; }
if rel.strength == 1.0 && rel.rel_type == crate::store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply { rel.strength = new_s; }
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Normalize link strengths (Jaccard similarity)").ok();
writeln!(out, " Total edges in graph: {}", strengths.len()).ok();
writeln!(out, " Would change: {}", changed).ok();
writeln!(out, " Unchanged: {}", unchanged).ok();
writeln!(out, " Temporal (skipped): {}", temporal_skipped).ok();
if changed > 0 {
writeln!(out, " Avg delta: {:.3}", delta_sum / changed as f64).ok();
}
writeln!(out).ok();
writeln!(out, " Strength distribution:").ok();
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
writeln!(out, " {:.1}-{:.1}: {:5} {}", lo, hi, count, bar).ok();
}
if apply {
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
writeln!(out, "\nApplied {} strength updates.", changed).ok();
} else {
writeln!(out, "\nDry run. Pass apply:true to write changes.").ok();
}
Ok(out)
}

View file

@ -27,77 +27,12 @@ pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
}
pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build a lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
// Store both directions for easy lookup
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
// Stats
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
// Histogram of new strengths
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
for rel in &mut store.relations {
if rel.deleted { continue; }
// Skip implicit temporal edges (strength 1.0, Auto type)
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply {
rel.strength = new_s;
}
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
println!("Normalize link strengths (Jaccard similarity)");
println!(" Total edges in graph: {}", strengths.len());
println!(" Would change: {}", changed);
println!(" Unchanged: {}", unchanged);
println!(" Temporal (skipped): {}", temporal_skipped);
if changed > 0 {
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
}
println!();
println!(" Strength distribution:");
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
}
if apply {
store.save()?;
println!("\nApplied {} strength updates.", changed);
} else {
println!("\nDry run. Use --apply to write changes.");
}
if apply { super::check_dry_run(); }
let result = crate::mcp_server::memory_rpc(
"graph_normalize_strengths",
serde_json::json!({"apply": apply}),
).map_err(|e| e.to_string())?;
print!("{}", result);
Ok(())
}