cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
// cli/graph.rs — graph subcommand handlers
|
|
|
|
|
|
//
|
|
|
|
|
|
// Extracted from main.rs. All graph-related CLI commands:
|
Delete similarity module, rewrite module, and all text-similarity code
Text cosine similarity was being used as a crutch for operations
the graph structure should handle: interference detection, orphan
linking, triangle closing, hub differentiation. These are all
graph-structural operations that the agents (linker, extractor)
handle with actual semantic understanding.
Removed: similarity.rs (stemming + cosine), rewrite.rs (orphan
linking, triangle closing, hub differentiation), detect_interference,
and all CLI commands and consolidation steps that used them.
-794 lines.
Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-10 15:44:10 -04:00
|
|
|
|
// link, link-add, link-impact, link-audit, cap-degree,
|
|
|
|
|
|
// normalize-strengths, trace, spectral-*, organize, communities.
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
|
Delete similarity module, rewrite module, and all text-similarity code
Text cosine similarity was being used as a crutch for operations
the graph structure should handle: interference detection, orphan
linking, triangle closing, hub differentiation. These are all
graph-structural operations that the agents (linker, extractor)
handle with actual semantic understanding.
Removed: similarity.rs (stemming + cosine), rewrite.rs (orphan
linking, triangle closing, hub differentiation), detect_interference,
and all CLI commands and consolidation steps that used them.
-794 lines.
Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-10 15:44:10 -04:00
|
|
|
|
use crate::{store, graph};
|
2026-03-31 18:21:01 -04:00
|
|
|
|
use crate::store::StoreView;
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
|
|
|
|
|
|
pub fn cmd_graph() -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("Graph: {} nodes, {} edges, {} communities",
|
|
|
|
|
|
g.nodes().len(), g.edge_count(), g.community_count());
|
|
|
|
|
|
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
|
|
|
|
|
|
g.small_world_sigma(), g.degree_power_law_exponent(),
|
|
|
|
|
|
g.degree_gini(), g.avg_clustering_coefficient());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let (hubs, pruned) = store.cap_degree(max_deg)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let graph = store.build_graph();
|
|
|
|
|
|
let strengths = graph.jaccard_strengths();
|
|
|
|
|
|
|
|
|
|
|
|
// Build a lookup from (source_key, target_key) → new_strength
|
|
|
|
|
|
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
|
|
|
|
|
|
for (a, b, s) in &strengths {
|
|
|
|
|
|
// Store both directions for easy lookup
|
|
|
|
|
|
updates.insert((a.clone(), b.clone()), *s);
|
|
|
|
|
|
updates.insert((b.clone(), a.clone()), *s);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Stats
|
|
|
|
|
|
let mut changed = 0usize;
|
|
|
|
|
|
let mut unchanged = 0usize;
|
|
|
|
|
|
let mut temporal_skipped = 0usize;
|
|
|
|
|
|
let mut delta_sum: f64 = 0.0;
|
|
|
|
|
|
|
|
|
|
|
|
// Histogram of new strengths
|
|
|
|
|
|
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
|
|
|
|
|
|
|
|
|
|
|
|
for rel in &mut store.relations {
|
|
|
|
|
|
if rel.deleted { continue; }
|
|
|
|
|
|
|
|
|
|
|
|
// Skip implicit temporal edges (strength 1.0, Auto type)
|
|
|
|
|
|
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
|
|
|
|
|
|
temporal_skipped += 1;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
|
|
|
|
|
|
let old_s = rel.strength;
|
|
|
|
|
|
let delta = (new_s - old_s).abs();
|
|
|
|
|
|
if delta > 0.001 {
|
|
|
|
|
|
delta_sum += delta as f64;
|
|
|
|
|
|
if apply {
|
|
|
|
|
|
rel.strength = new_s;
|
|
|
|
|
|
}
|
|
|
|
|
|
changed += 1;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
unchanged += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
let bucket = ((new_s * 10.0) as usize).min(9);
|
|
|
|
|
|
buckets[bucket] += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Normalize link strengths (Jaccard similarity)");
|
|
|
|
|
|
println!(" Total edges in graph: {}", strengths.len());
|
|
|
|
|
|
println!(" Would change: {}", changed);
|
|
|
|
|
|
println!(" Unchanged: {}", unchanged);
|
|
|
|
|
|
println!(" Temporal (skipped): {}", temporal_skipped);
|
|
|
|
|
|
if changed > 0 {
|
|
|
|
|
|
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
|
|
|
|
|
println!(" Strength distribution:");
|
|
|
|
|
|
for (i, &count) in buckets.iter().enumerate() {
|
|
|
|
|
|
let lo = i as f32 / 10.0;
|
|
|
|
|
|
let hi = lo + 0.1;
|
|
|
|
|
|
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
|
|
|
|
|
|
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if apply {
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("\nApplied {} strength updates.", changed);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("\nDry run. Use --apply to write changes.");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-31 18:21:01 -04:00
|
|
|
|
pub fn cmd_spread(keys: &[String], max_results: usize) -> Result<(), String> {
|
|
|
|
|
|
if keys.is_empty() {
|
|
|
|
|
|
return Err("spread requires at least one seed key".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let graph = graph::build_graph_fast(&store);
|
|
|
|
|
|
let params = store.params();
|
|
|
|
|
|
|
|
|
|
|
|
let seeds: Vec<(String, f64)> = keys.iter()
|
|
|
|
|
|
.filter_map(|k| {
|
|
|
|
|
|
let resolved = store.resolve_key(k).ok()?;
|
|
|
|
|
|
Some((resolved, 1.0))
|
|
|
|
|
|
})
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
if seeds.is_empty() {
|
|
|
|
|
|
return Err("no valid seed keys found".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let results = crate::search::spreading_activation(
|
|
|
|
|
|
&seeds, &graph, &store,
|
|
|
|
|
|
params.max_hops, params.edge_decay, params.min_activation,
|
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
|
|
let seed_keys: std::collections::HashSet<&str> = seeds.iter()
|
|
|
|
|
|
.map(|(k, _)| k.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
for (key, score) in results.iter()
|
|
|
|
|
|
.filter(|(k, _)| !seed_keys.contains(k.as_str()))
|
|
|
|
|
|
.take(max_results)
|
|
|
|
|
|
{
|
|
|
|
|
|
println!(" {:.2} {}", score, key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
pub fn cmd_link(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("link requires a key".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
let key = key.join(" ");
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("Neighbors of '{}':", resolved);
|
|
|
|
|
|
crate::query_parser::run_query(&store, &g,
|
|
|
|
|
|
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
|
poc-memory: POC_MEMORY_DRY_RUN=1 for agent testing
All mutating commands (write, delete, rename, link-add, journal write,
used, wrong, not-useful, gap) check POC_MEMORY_DRY_RUN after argument
validation but before mutation. If set, process exits silently — agent
tool calls are visible in the LLM output so we can see what it tried
to do without applying changes.
Read commands (render, search, graph link, journal tail) work normally
in dry-run mode so agents can still explore the graph.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-16 18:09:56 -04:00
|
|
|
|
super::check_dry_run();
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let reason = reason.join(" ");
|
|
|
|
|
|
|
2026-03-25 01:55:21 -04:00
|
|
|
|
match store.add_link(&source, &target, "manual") {
|
|
|
|
|
|
Ok(strength) => {
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Linked: {} → {} (strength={:.2}, {})", source, target, strength, reason);
|
|
|
|
|
|
}
|
|
|
|
|
|
Err(msg) if msg.contains("already exists") => {
|
|
|
|
|
|
println!("Link already exists: {} ↔ {}", source, target);
|
|
|
|
|
|
}
|
|
|
|
|
|
Err(e) => return Err(e),
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-17 01:39:41 -04:00
|
|
|
|
pub fn cmd_link_set(source: &str, target: &str, strength: f32) -> Result<(), String> {
|
|
|
|
|
|
super::check_dry_run();
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
|
2026-03-25 01:55:21 -04:00
|
|
|
|
let old = store.set_link_strength(&source, &target, strength)?;
|
|
|
|
|
|
println!("Set: {} ↔ {} strength {:.2} → {:.2}", source, target, old, strength);
|
2026-03-17 01:39:41 -04:00
|
|
|
|
store.save()?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let impact = g.link_impact(&source, &target);
|
|
|
|
|
|
|
|
|
|
|
|
println!("Link impact: {} → {}", source, target);
|
|
|
|
|
|
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
|
|
|
|
|
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
|
|
|
|
|
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
|
|
|
|
|
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
|
|
|
|
|
println!(" Assessment: {}", impact.assessment);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let stats = crate::audit::link_audit(&mut store, apply)?;
|
|
|
|
|
|
println!("\n{}", "=".repeat(60));
|
|
|
|
|
|
println!("Link audit complete:");
|
|
|
|
|
|
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
|
|
|
|
|
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
|
|
|
|
|
println!("{}", "=".repeat(60));
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("trace requires a key".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
let key = key.join(" ");
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Display the node itself
|
|
|
|
|
|
println!("=== {} ===", resolved);
|
|
|
|
|
|
println!("Type: {:?} Weight: {:.2}",
|
|
|
|
|
|
node.node_type, node.weight);
|
|
|
|
|
|
if !node.source_ref.is_empty() {
|
|
|
|
|
|
println!("Source: {}", node.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Show content preview
|
|
|
|
|
|
let preview = crate::util::truncate(&node.content, 200, "...");
|
|
|
|
|
|
println!("\n{}\n", preview);
|
|
|
|
|
|
|
|
|
|
|
|
// Walk neighbors, grouped by node type
|
|
|
|
|
|
let neighbors = g.neighbors(&resolved);
|
|
|
|
|
|
let mut episodic_session = Vec::new();
|
|
|
|
|
|
let mut episodic_daily = Vec::new();
|
|
|
|
|
|
let mut episodic_weekly = Vec::new();
|
|
|
|
|
|
let mut semantic = Vec::new();
|
|
|
|
|
|
|
|
|
|
|
|
for (n, strength) in &neighbors {
|
|
|
|
|
|
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
|
|
|
|
|
let entry = (n.as_str(), *strength, nnode);
|
|
|
|
|
|
match nnode.node_type {
|
|
|
|
|
|
store::NodeType::EpisodicSession =>
|
|
|
|
|
|
episodic_session.push(entry),
|
|
|
|
|
|
store::NodeType::EpisodicDaily =>
|
|
|
|
|
|
episodic_daily.push(entry),
|
|
|
|
|
|
store::NodeType::EpisodicWeekly
|
|
|
|
|
|
| store::NodeType::EpisodicMonthly =>
|
|
|
|
|
|
episodic_weekly.push(entry),
|
|
|
|
|
|
store::NodeType::Semantic =>
|
|
|
|
|
|
semantic.push(entry),
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_weekly.is_empty() {
|
|
|
|
|
|
println!("Weekly digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_weekly {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_daily.is_empty() {
|
|
|
|
|
|
println!("Daily digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_daily {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_session.is_empty() {
|
|
|
|
|
|
println!("Session entries:");
|
|
|
|
|
|
for (k, s, n) in &episodic_session {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(
|
|
|
|
|
|
n.content.lines()
|
|
|
|
|
|
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
|
|
|
|
|
.unwrap_or(""),
|
|
|
|
|
|
80);
|
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
if !n.source_ref.is_empty() {
|
|
|
|
|
|
println!(" ↳ source: {}", n.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!(" {}", preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !semantic.is_empty() {
|
|
|
|
|
|
println!("Semantic links:");
|
|
|
|
|
|
for (k, s, _) in &semantic {
|
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
|
|
|
|
|
episodic_session.len(), episodic_daily.len(),
|
|
|
|
|
|
episodic_weekly.len(), semantic.len());
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
|
|
// Step 1: find all non-deleted nodes matching the term
|
|
|
|
|
|
let term_lower = term.to_lowercase();
|
|
|
|
|
|
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
|
|
|
|
|
|
|
2026-03-26 19:11:17 -04:00
|
|
|
|
let skip_prefixes = ["_", "deep-index#", "facts-", "irc-history#"];
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
|
|
|
|
|
|
for (key, node) in &store.nodes {
|
|
|
|
|
|
if node.deleted { continue; }
|
2026-03-26 19:11:17 -04:00
|
|
|
|
// Skip episodic/digest nodes — use NodeType, not key prefix
|
|
|
|
|
|
if node.node_type != crate::store::NodeType::Semantic { continue; }
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
let key_matches = key.to_lowercase().contains(&term_lower);
|
|
|
|
|
|
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
|
|
|
|
|
|
if !key_matches && !content_matches { continue; }
|
|
|
|
|
|
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
|
|
|
|
|
|
topic_nodes.push((key.clone(), node.content.clone()));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if topic_nodes.is_empty() {
|
|
|
|
|
|
println!("No topic nodes found matching '{}'", term);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
|
|
|
|
|
|
|
|
println!("=== Organize: '{}' ===", term);
|
|
|
|
|
|
println!("Found {} topic nodes:\n", topic_nodes.len());
|
|
|
|
|
|
for (key, content) in &topic_nodes {
|
|
|
|
|
|
let lines = content.lines().count();
|
|
|
|
|
|
let words = content.split_whitespace().count();
|
|
|
|
|
|
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
Delete similarity module, rewrite module, and all text-similarity code
Text cosine similarity was being used as a crutch for operations
the graph structure should handle: interference detection, orphan
linking, triangle closing, hub differentiation. These are all
graph-structural operations that the agents (linker, extractor)
handle with actual semantic understanding.
Removed: similarity.rs (stemming + cosine), rewrite.rs (orphan
linking, triangle closing, hub differentiation), detect_interference,
and all CLI commands and consolidation steps that used them.
-794 lines.
Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-10 15:44:10 -04:00
|
|
|
|
// Step 2: check connectivity within cluster
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("=== Connectivity ===\n");
|
|
|
|
|
|
|
|
|
|
|
|
// Pick hub by intra-cluster connectivity, not overall degree
|
|
|
|
|
|
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
|
|
|
|
|
|
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
|
|
|
|
|
|
.map(|(k,_)| k.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let mut best_hub: Option<(&str, usize)> = None;
|
|
|
|
|
|
for key in &cluster_keys {
|
|
|
|
|
|
let intra_degree = g.neighbor_keys(key).iter()
|
|
|
|
|
|
.filter(|n| cluster_keys.contains(*n))
|
|
|
|
|
|
.count();
|
|
|
|
|
|
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
|
|
|
|
|
|
best_hub = Some((key, intra_degree));
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if let Some((hub, deg)) = best_hub {
|
|
|
|
|
|
println!(" Hub: {} (degree {})", hub, deg);
|
|
|
|
|
|
let hub_nbrs = g.neighbor_keys(hub);
|
|
|
|
|
|
|
|
|
|
|
|
let mut unlinked = Vec::new();
|
|
|
|
|
|
for (key, _) in &topic_nodes {
|
|
|
|
|
|
if key == hub { continue; }
|
|
|
|
|
|
if store.nodes.get(key.as_str()).is_none() { continue; }
|
|
|
|
|
|
if !hub_nbrs.contains(key.as_str()) {
|
|
|
|
|
|
unlinked.push(key.clone());
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if unlinked.is_empty() {
|
|
|
|
|
|
println!(" All cluster nodes connected to hub ✓");
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!(" NOT linked to hub:");
|
|
|
|
|
|
for key in &unlinked {
|
|
|
|
|
|
println!(" {} → needs link to {}", key, hub);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 4: anchor node
|
|
|
|
|
|
if create_anchor {
|
|
|
|
|
|
println!("\n=== Anchor node ===\n");
|
|
|
|
|
|
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
|
|
|
|
|
|
println!(" Anchor '{}' already exists ✓", term);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
let desc = format!("Anchor node for '{}' search term", term);
|
|
|
|
|
|
store.upsert(term, &desc)?;
|
|
|
|
|
|
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
|
|
|
|
|
|
for (key, _) in &topic_nodes {
|
|
|
|
|
|
if store.nodes.get(key.as_str()).is_none() { continue; }
|
|
|
|
|
|
let target_uuid = store.nodes[key.as_str()].uuid;
|
|
|
|
|
|
let rel = store::new_relation(
|
|
|
|
|
|
anchor_uuid, target_uuid,
|
|
|
|
|
|
store::RelationType::Link, 0.8,
|
|
|
|
|
|
term, key,
|
|
|
|
|
|
);
|
|
|
|
|
|
store.add_relation(rel)?;
|
|
|
|
|
|
}
|
|
|
|
|
|
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-20 12:55:14 -04:00
|
|
|
|
/// Show communities sorted by isolation (most isolated first).
|
|
|
|
|
|
/// Useful for finding poorly-integrated knowledge clusters that need
|
|
|
|
|
|
/// organize agents aimed at them.
|
|
|
|
|
|
pub fn cmd_communities(top_n: usize, min_size: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let infos = g.community_info();
|
|
|
|
|
|
|
|
|
|
|
|
let total = infos.len();
|
|
|
|
|
|
let shown: Vec<_> = infos.into_iter()
|
|
|
|
|
|
.filter(|c| c.size >= min_size)
|
|
|
|
|
|
.take(top_n)
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
println!("{} communities total ({} with size >= {})\n",
|
|
|
|
|
|
total, shown.len(), min_size);
|
|
|
|
|
|
println!("{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross");
|
|
|
|
|
|
println!("{}", "-".repeat(70));
|
|
|
|
|
|
|
|
|
|
|
|
for c in &shown {
|
|
|
|
|
|
let preview: Vec<&str> = c.members.iter()
|
|
|
|
|
|
.take(5)
|
|
|
|
|
|
.map(|s| s.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
let more = if c.size > 5 {
|
|
|
|
|
|
format!(" +{}", c.size - 5)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
String::new()
|
|
|
|
|
|
};
|
|
|
|
|
|
println!("{:<6} {:>5} {:>6.0}% {:>7} {}{}",
|
|
|
|
|
|
c.id, c.size, c.isolation * 100.0, c.cross_edges,
|
|
|
|
|
|
preview.join(", "), more);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|