cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
// cli/graph.rs — graph subcommand handlers
|
|
|
|
|
|
//
|
|
|
|
|
|
// Extracted from main.rs. All graph-related CLI commands:
|
|
|
|
|
|
// link, link-add, link-impact, link-audit, link-orphans,
|
|
|
|
|
|
// triangle-close, cap-degree, normalize-strengths, differentiate,
|
|
|
|
|
|
// trace, spectral-*, organize, interference.
|
|
|
|
|
|
|
|
|
|
|
|
use crate::{store, graph, neuro, spectral};
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_graph() -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("Graph: {} nodes, {} edges, {} communities",
|
|
|
|
|
|
g.nodes().len(), g.edge_count(), g.community_count());
|
|
|
|
|
|
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
|
|
|
|
|
|
g.small_world_sigma(), g.degree_power_law_exponent(),
|
|
|
|
|
|
g.degree_gini(), g.avg_clustering_coefficient());
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
|
|
|
|
|
|
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
|
|
|
|
|
|
orphans, links, min_deg, links_per, sim_thresh);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let (hubs, pruned) = store.cap_degree(max_deg)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let graph = store.build_graph();
|
|
|
|
|
|
let strengths = graph.jaccard_strengths();
|
|
|
|
|
|
|
|
|
|
|
|
// Build a lookup from (source_key, target_key) → new_strength
|
|
|
|
|
|
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
|
|
|
|
|
|
for (a, b, s) in &strengths {
|
|
|
|
|
|
// Store both directions for easy lookup
|
|
|
|
|
|
updates.insert((a.clone(), b.clone()), *s);
|
|
|
|
|
|
updates.insert((b.clone(), a.clone()), *s);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Stats
|
|
|
|
|
|
let mut changed = 0usize;
|
|
|
|
|
|
let mut unchanged = 0usize;
|
|
|
|
|
|
let mut temporal_skipped = 0usize;
|
|
|
|
|
|
let mut delta_sum: f64 = 0.0;
|
|
|
|
|
|
|
|
|
|
|
|
// Histogram of new strengths
|
|
|
|
|
|
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
|
|
|
|
|
|
|
|
|
|
|
|
for rel in &mut store.relations {
|
|
|
|
|
|
if rel.deleted { continue; }
|
|
|
|
|
|
|
|
|
|
|
|
// Skip implicit temporal edges (strength 1.0, Auto type)
|
|
|
|
|
|
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
|
|
|
|
|
|
temporal_skipped += 1;
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
|
|
|
|
|
|
let old_s = rel.strength;
|
|
|
|
|
|
let delta = (new_s - old_s).abs();
|
|
|
|
|
|
if delta > 0.001 {
|
|
|
|
|
|
delta_sum += delta as f64;
|
|
|
|
|
|
if apply {
|
|
|
|
|
|
rel.strength = new_s;
|
|
|
|
|
|
}
|
|
|
|
|
|
changed += 1;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
unchanged += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
let bucket = ((new_s * 10.0) as usize).min(9);
|
|
|
|
|
|
buckets[bucket] += 1;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Normalize link strengths (Jaccard similarity)");
|
|
|
|
|
|
println!(" Total edges in graph: {}", strengths.len());
|
|
|
|
|
|
println!(" Would change: {}", changed);
|
|
|
|
|
|
println!(" Unchanged: {}", unchanged);
|
|
|
|
|
|
println!(" Temporal (skipped): {}", temporal_skipped);
|
|
|
|
|
|
if changed > 0 {
|
|
|
|
|
|
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
|
|
|
|
|
println!(" Strength distribution:");
|
|
|
|
|
|
for (i, &count) in buckets.iter().enumerate() {
|
|
|
|
|
|
let lo = i as f32 / 10.0;
|
|
|
|
|
|
let hi = lo + 0.1;
|
|
|
|
|
|
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
|
|
|
|
|
|
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if apply {
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("\nApplied {} strength updates.", changed);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("\nDry run. Use --apply to write changes.");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("link requires a key".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
let key = key.join(" ");
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("Neighbors of '{}':", resolved);
|
|
|
|
|
|
crate::query_parser::run_query(&store, &g,
|
|
|
|
|
|
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
|
|
|
|
|
|
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
|
|
|
|
|
|
min_degree, sim_threshold, max_per_hub);
|
|
|
|
|
|
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
|
|
|
|
|
|
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
|
poc-memory: POC_MEMORY_DRY_RUN=1 for agent testing
All mutating commands (write, delete, rename, link-add, journal write,
used, wrong, not-useful, gap) check POC_MEMORY_DRY_RUN after argument
validation but before mutation. If set, process exits silently — agent
tool calls are visible in the LLM output so we can see what it tried
to do without applying changes.
Read commands (render, search, graph link, journal tail) work normally
in dry-run mode so agents can still explore the graph.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-16 18:09:56 -04:00
|
|
|
|
super::check_dry_run();
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let reason = reason.join(" ");
|
|
|
|
|
|
|
|
|
|
|
|
// Refine target to best-matching section
|
|
|
|
|
|
let source_content = store.nodes.get(&source)
|
|
|
|
|
|
.map(|n| n.content.as_str()).unwrap_or("");
|
|
|
|
|
|
let target = neuro::refine_target(&store, source_content, &target);
|
|
|
|
|
|
|
|
|
|
|
|
// Find UUIDs
|
|
|
|
|
|
let source_uuid = store.nodes.get(&source)
|
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
|
.ok_or_else(|| format!("source not found: {}", source))?;
|
|
|
|
|
|
let target_uuid = store.nodes.get(&target)
|
|
|
|
|
|
.map(|n| n.uuid)
|
|
|
|
|
|
.ok_or_else(|| format!("target not found: {}", target))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Check for existing link
|
|
|
|
|
|
let exists = store.relations.iter().any(|r|
|
|
|
|
|
|
!r.deleted &&
|
|
|
|
|
|
((r.source_key == source && r.target_key == target) ||
|
|
|
|
|
|
(r.source_key == target && r.target_key == source)));
|
|
|
|
|
|
if exists {
|
|
|
|
|
|
println!("Link already exists: {} ↔ {}", source, target);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Compute initial strength from Jaccard neighborhood similarity
|
|
|
|
|
|
let graph = store.build_graph();
|
|
|
|
|
|
let jaccard = graph.jaccard(&source, &target);
|
|
|
|
|
|
let strength = (jaccard * 3.0).clamp(0.1, 1.0);
|
|
|
|
|
|
|
|
|
|
|
|
let rel = store::new_relation(
|
|
|
|
|
|
source_uuid, target_uuid,
|
|
|
|
|
|
store::RelationType::Link, strength,
|
|
|
|
|
|
&source, &target,
|
|
|
|
|
|
);
|
|
|
|
|
|
store.add_relation(rel)?;
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("Linked: {} → {} (strength={:.2}, {})", source, target, strength, reason);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-17 01:39:41 -04:00
|
|
|
|
pub fn cmd_link_set(source: &str, target: &str, strength: f32) -> Result<(), String> {
|
|
|
|
|
|
super::check_dry_run();
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let strength = strength.clamp(0.01, 1.0);
|
|
|
|
|
|
|
|
|
|
|
|
let mut found = false;
|
2026-03-19 22:58:54 -04:00
|
|
|
|
let mut first = true;
|
2026-03-17 01:39:41 -04:00
|
|
|
|
for rel in &mut store.relations {
|
|
|
|
|
|
if rel.deleted { continue; }
|
|
|
|
|
|
if (rel.source_key == source && rel.target_key == target)
|
|
|
|
|
|
|| (rel.source_key == target && rel.target_key == source)
|
|
|
|
|
|
{
|
2026-03-19 22:58:54 -04:00
|
|
|
|
if first {
|
|
|
|
|
|
let old = rel.strength;
|
|
|
|
|
|
rel.strength = strength;
|
|
|
|
|
|
println!("Set: {} ↔ {} strength {:.2} → {:.2}", source, target, old, strength);
|
|
|
|
|
|
first = false;
|
|
|
|
|
|
} else {
|
|
|
|
|
|
// Duplicate — mark deleted
|
|
|
|
|
|
rel.deleted = true;
|
|
|
|
|
|
println!(" (removed duplicate link)");
|
|
|
|
|
|
}
|
2026-03-17 01:39:41 -04:00
|
|
|
|
found = true;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !found {
|
|
|
|
|
|
return Err(format!("No link found between {} and {}", source, target));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs:
link, link-add, link-impact, link-audit, link-orphans,
triangle-close, cap-degree, normalize-strengths, differentiate,
trace, spectral-*, organize, interference.
main.rs: 3130 → 2518 lines.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-03-14 17:59:46 -04:00
|
|
|
|
pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let source = store.resolve_key(source)?;
|
|
|
|
|
|
let target = store.resolve_key(target)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let impact = g.link_impact(&source, &target);
|
|
|
|
|
|
|
|
|
|
|
|
println!("Link impact: {} → {}", source, target);
|
|
|
|
|
|
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
|
|
|
|
|
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
|
|
|
|
|
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
|
|
|
|
|
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
|
|
|
|
|
println!(" Assessment: {}", impact.assessment);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
|
|
if let Some(key) = key_arg {
|
|
|
|
|
|
let resolved = store.resolve_key(key)?;
|
|
|
|
|
|
let moves = neuro::differentiate_hub(&store, &resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Group by target section for display
|
|
|
|
|
|
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
|
|
|
|
|
|
std::collections::BTreeMap::new();
|
|
|
|
|
|
for mv in &moves {
|
|
|
|
|
|
by_section.entry(mv.to_section.clone()).or_default().push(mv);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Hub '{}' — {} links to redistribute across {} sections\n",
|
|
|
|
|
|
resolved, moves.len(), by_section.len());
|
|
|
|
|
|
|
|
|
|
|
|
for (section, section_moves) in &by_section {
|
|
|
|
|
|
println!(" {} ({} links):", section, section_moves.len());
|
|
|
|
|
|
for mv in section_moves.iter().take(5) {
|
|
|
|
|
|
println!(" [{:.3}] {} — {}", mv.similarity,
|
|
|
|
|
|
mv.neighbor_key, mv.neighbor_snippet);
|
|
|
|
|
|
}
|
|
|
|
|
|
if section_moves.len() > 5 {
|
|
|
|
|
|
println!(" ... and {} more", section_moves.len() - 5);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !do_apply {
|
|
|
|
|
|
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
println!("\nApplied: {} Skipped: {}", applied, skipped);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
let hubs = neuro::find_differentiable_hubs(&store);
|
|
|
|
|
|
if hubs.is_empty() {
|
|
|
|
|
|
println!("No file-level hubs with sections found above threshold");
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("Differentiable hubs (file-level nodes with sections):\n");
|
|
|
|
|
|
for (key, degree, sections) in &hubs {
|
|
|
|
|
|
println!(" {:40} deg={:3} sections={}", key, degree, sections);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
let stats = crate::audit::link_audit(&mut store, apply)?;
|
|
|
|
|
|
println!("\n{}", "=".repeat(60));
|
|
|
|
|
|
println!("Link audit complete:");
|
|
|
|
|
|
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
|
|
|
|
|
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
|
|
|
|
|
println!("{}", "=".repeat(60));
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
|
|
|
|
|
|
if key.is_empty() {
|
|
|
|
|
|
return Err("trace requires a key".into());
|
|
|
|
|
|
}
|
|
|
|
|
|
let key = key.join(" ");
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let resolved = store.resolve_key(&key)?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
|
|
|
|
|
|
let node = store.nodes.get(&resolved)
|
|
|
|
|
|
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
|
|
|
|
|
|
|
|
|
|
|
// Display the node itself
|
|
|
|
|
|
println!("=== {} ===", resolved);
|
|
|
|
|
|
println!("Type: {:?} Weight: {:.2}",
|
|
|
|
|
|
node.node_type, node.weight);
|
|
|
|
|
|
if !node.source_ref.is_empty() {
|
|
|
|
|
|
println!("Source: {}", node.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Show content preview
|
|
|
|
|
|
let preview = crate::util::truncate(&node.content, 200, "...");
|
|
|
|
|
|
println!("\n{}\n", preview);
|
|
|
|
|
|
|
|
|
|
|
|
// Walk neighbors, grouped by node type
|
|
|
|
|
|
let neighbors = g.neighbors(&resolved);
|
|
|
|
|
|
let mut episodic_session = Vec::new();
|
|
|
|
|
|
let mut episodic_daily = Vec::new();
|
|
|
|
|
|
let mut episodic_weekly = Vec::new();
|
|
|
|
|
|
let mut semantic = Vec::new();
|
|
|
|
|
|
|
|
|
|
|
|
for (n, strength) in &neighbors {
|
|
|
|
|
|
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
|
|
|
|
|
let entry = (n.as_str(), *strength, nnode);
|
|
|
|
|
|
match nnode.node_type {
|
|
|
|
|
|
store::NodeType::EpisodicSession =>
|
|
|
|
|
|
episodic_session.push(entry),
|
|
|
|
|
|
store::NodeType::EpisodicDaily =>
|
|
|
|
|
|
episodic_daily.push(entry),
|
|
|
|
|
|
store::NodeType::EpisodicWeekly
|
|
|
|
|
|
| store::NodeType::EpisodicMonthly =>
|
|
|
|
|
|
episodic_weekly.push(entry),
|
|
|
|
|
|
store::NodeType::Semantic =>
|
|
|
|
|
|
semantic.push(entry),
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_weekly.is_empty() {
|
|
|
|
|
|
println!("Weekly digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_weekly {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_daily.is_empty() {
|
|
|
|
|
|
println!("Daily digests:");
|
|
|
|
|
|
for (k, s, n) in &episodic_daily {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
|
|
|
|
|
println!(" [{:.2}] {} — {}", s, k, preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !episodic_session.is_empty() {
|
|
|
|
|
|
println!("Session entries:");
|
|
|
|
|
|
for (k, s, n) in &episodic_session {
|
|
|
|
|
|
let preview = crate::util::first_n_chars(
|
|
|
|
|
|
n.content.lines()
|
|
|
|
|
|
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
|
|
|
|
|
.unwrap_or(""),
|
|
|
|
|
|
80);
|
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
if !n.source_ref.is_empty() {
|
|
|
|
|
|
println!(" ↳ source: {}", n.source_ref);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!(" {}", preview);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !semantic.is_empty() {
|
|
|
|
|
|
println!("Semantic links:");
|
|
|
|
|
|
for (k, s, _) in &semantic {
|
|
|
|
|
|
println!(" [{:.2}] {}", s, k);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
|
|
|
|
|
episodic_session.len(), episodic_daily.len(),
|
|
|
|
|
|
episodic_weekly.len(), semantic.len());
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_spectral(k: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = graph::build_graph(&store);
|
|
|
|
|
|
let result = spectral::decompose(&g, k);
|
|
|
|
|
|
spectral::print_summary(&result, &g);
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_spectral_save(k: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = graph::build_graph(&store);
|
|
|
|
|
|
let result = spectral::decompose(&g, k);
|
|
|
|
|
|
let emb = spectral::to_embedding(&result);
|
|
|
|
|
|
spectral::save_embedding(&emb)?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> {
|
|
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
|
|
|
|
|
|
let dims = spectral::dominant_dimensions(&emb, &[key]);
|
|
|
|
|
|
println!("Node: {} (embedding: {} dims)", key, emb.dims);
|
|
|
|
|
|
println!("Top spectral axes:");
|
|
|
|
|
|
for &(d, loading) in dims.iter().take(5) {
|
|
|
|
|
|
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("\nNearest neighbors in spectral space:");
|
|
|
|
|
|
let neighbors = spectral::nearest_neighbors(&emb, key, n);
|
|
|
|
|
|
for (i, (k, dist)) in neighbors.iter().enumerate() {
|
|
|
|
|
|
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_spectral_positions(n: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let communities = g.communities().clone();
|
|
|
|
|
|
|
|
|
|
|
|
let positions = spectral::analyze_positions(&emb, &communities);
|
|
|
|
|
|
|
|
|
|
|
|
println!("Spectral position analysis — {} nodes", positions.len());
|
|
|
|
|
|
println!(" outlier: dist_to_center / median (>1 = unusual position)");
|
|
|
|
|
|
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
|
|
|
|
|
|
println!();
|
|
|
|
|
|
|
|
|
|
|
|
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
|
|
|
|
|
|
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
|
|
|
|
|
|
|
|
|
|
|
|
for pos in positions.iter().take(n) {
|
|
|
|
|
|
match spectral::classify_position(pos) {
|
|
|
|
|
|
"bridge" => bridges.push(pos),
|
|
|
|
|
|
_ => outliers.push(pos),
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !bridges.is_empty() {
|
|
|
|
|
|
println!("=== Bridges (between communities) ===");
|
|
|
|
|
|
for pos in &bridges {
|
|
|
|
|
|
println!(" [{:.2}/{:.2}] c{} → c{} {}",
|
|
|
|
|
|
pos.outlier_score, pos.bridge_score,
|
|
|
|
|
|
pos.community, pos.nearest_community, pos.key);
|
|
|
|
|
|
}
|
|
|
|
|
|
println!();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
println!("=== Top outliers (far from own community center) ===");
|
|
|
|
|
|
for pos in positions.iter().take(n) {
|
|
|
|
|
|
let class = spectral::classify_position(pos);
|
|
|
|
|
|
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
|
|
|
|
|
|
class, pos.outlier_score, pos.bridge_score,
|
|
|
|
|
|
pos.community, pos.key);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_spectral_suggest(n: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let emb = spectral::load_embedding()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let communities = g.communities();
|
|
|
|
|
|
|
|
|
|
|
|
let min_degree = 3;
|
|
|
|
|
|
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
|
|
|
|
|
|
.filter(|k| g.degree(k) >= min_degree)
|
|
|
|
|
|
.map(|k| k.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let filtered_emb = spectral::SpectralEmbedding {
|
|
|
|
|
|
dims: emb.dims,
|
|
|
|
|
|
eigenvalues: emb.eigenvalues.clone(),
|
|
|
|
|
|
coords: emb.coords.iter()
|
|
|
|
|
|
.filter(|(k, _)| well_connected.contains(k.as_str()))
|
|
|
|
|
|
.map(|(k, v)| (k.clone(), v.clone()))
|
|
|
|
|
|
.collect(),
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
let mut linked: std::collections::HashSet<(String, String)> =
|
|
|
|
|
|
std::collections::HashSet::new();
|
|
|
|
|
|
for rel in &store.relations {
|
|
|
|
|
|
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
|
|
|
|
|
|
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
eprintln!("Searching {} well-connected nodes (degree >= {})...",
|
|
|
|
|
|
filtered_emb.coords.len(), min_degree);
|
|
|
|
|
|
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
|
|
|
|
|
|
|
|
|
|
|
|
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
|
|
|
|
|
|
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
|
|
|
|
|
|
let c1 = communities.get(k1)
|
|
|
|
|
|
.map(|c| format!("c{}", c))
|
|
|
|
|
|
.unwrap_or_else(|| "?".into());
|
|
|
|
|
|
let c2 = communities.get(k2)
|
|
|
|
|
|
.map(|c| format!("c{}", c))
|
|
|
|
|
|
.unwrap_or_else(|| "?".into());
|
|
|
|
|
|
let cross = if c1 != c2 { " [cross-community]" } else { "" };
|
|
|
|
|
|
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
|
|
|
|
|
|
i + 1, dist, k1, c1, k2, c2, cross);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
|
|
|
|
|
|
let mut store = store::Store::load()?;
|
|
|
|
|
|
|
|
|
|
|
|
// Step 1: find all non-deleted nodes matching the term
|
|
|
|
|
|
let term_lower = term.to_lowercase();
|
|
|
|
|
|
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
|
|
|
|
|
|
|
|
|
|
|
|
// Prefixes that indicate ephemeral/generated nodes to skip
|
|
|
|
|
|
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "_",
|
|
|
|
|
|
"deep-index#", "facts-", "irc-history#"];
|
|
|
|
|
|
|
|
|
|
|
|
for (key, node) in &store.nodes {
|
|
|
|
|
|
if node.deleted { continue; }
|
|
|
|
|
|
let key_matches = key.to_lowercase().contains(&term_lower);
|
|
|
|
|
|
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
|
|
|
|
|
|
if !key_matches && !content_matches { continue; }
|
|
|
|
|
|
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
|
|
|
|
|
|
topic_nodes.push((key.clone(), node.content.clone()));
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if topic_nodes.is_empty() {
|
|
|
|
|
|
println!("No topic nodes found matching '{}'", term);
|
|
|
|
|
|
return Ok(());
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
|
|
|
|
|
|
|
|
println!("=== Organize: '{}' ===", term);
|
|
|
|
|
|
println!("Found {} topic nodes:\n", topic_nodes.len());
|
|
|
|
|
|
for (key, content) in &topic_nodes {
|
|
|
|
|
|
let lines = content.lines().count();
|
|
|
|
|
|
let words = content.split_whitespace().count();
|
|
|
|
|
|
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 2: pairwise similarity
|
|
|
|
|
|
let pairs = crate::similarity::pairwise_similar(&topic_nodes, threshold);
|
|
|
|
|
|
|
|
|
|
|
|
if pairs.is_empty() {
|
|
|
|
|
|
println!("\nNo similar pairs above threshold {:.2}", threshold);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("\n=== Similar pairs (cosine > {:.2}) ===\n", threshold);
|
|
|
|
|
|
for (a, b, sim) in &pairs {
|
|
|
|
|
|
let a_words = topic_nodes.iter().find(|(k,_)| k == a)
|
|
|
|
|
|
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
|
|
|
|
|
let b_words = topic_nodes.iter().find(|(k,_)| k == b)
|
|
|
|
|
|
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
|
|
|
|
|
|
|
|
|
|
|
println!(" [{:.3}] {} ({} words) ↔ {} ({} words)", sim, a, a_words, b, b_words);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 3: check connectivity within cluster
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
println!("=== Connectivity ===\n");
|
|
|
|
|
|
|
|
|
|
|
|
// Pick hub by intra-cluster connectivity, not overall degree
|
|
|
|
|
|
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
|
|
|
|
|
|
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
|
|
|
|
|
|
.map(|(k,_)| k.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
let mut best_hub: Option<(&str, usize)> = None;
|
|
|
|
|
|
for key in &cluster_keys {
|
|
|
|
|
|
let intra_degree = g.neighbor_keys(key).iter()
|
|
|
|
|
|
.filter(|n| cluster_keys.contains(*n))
|
|
|
|
|
|
.count();
|
|
|
|
|
|
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
|
|
|
|
|
|
best_hub = Some((key, intra_degree));
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if let Some((hub, deg)) = best_hub {
|
|
|
|
|
|
println!(" Hub: {} (degree {})", hub, deg);
|
|
|
|
|
|
let hub_nbrs = g.neighbor_keys(hub);
|
|
|
|
|
|
|
|
|
|
|
|
let mut unlinked = Vec::new();
|
|
|
|
|
|
for (key, _) in &topic_nodes {
|
|
|
|
|
|
if key == hub { continue; }
|
|
|
|
|
|
if store.nodes.get(key.as_str()).is_none() { continue; }
|
|
|
|
|
|
if !hub_nbrs.contains(key.as_str()) {
|
|
|
|
|
|
unlinked.push(key.clone());
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if unlinked.is_empty() {
|
|
|
|
|
|
println!(" All cluster nodes connected to hub ✓");
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!(" NOT linked to hub:");
|
|
|
|
|
|
for key in &unlinked {
|
|
|
|
|
|
println!(" {} → needs link to {}", key, hub);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Step 4: anchor node
|
|
|
|
|
|
if create_anchor {
|
|
|
|
|
|
println!("\n=== Anchor node ===\n");
|
|
|
|
|
|
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
|
|
|
|
|
|
println!(" Anchor '{}' already exists ✓", term);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
let desc = format!("Anchor node for '{}' search term", term);
|
|
|
|
|
|
store.upsert(term, &desc)?;
|
|
|
|
|
|
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
|
|
|
|
|
|
for (key, _) in &topic_nodes {
|
|
|
|
|
|
if store.nodes.get(key.as_str()).is_none() { continue; }
|
|
|
|
|
|
let target_uuid = store.nodes[key.as_str()].uuid;
|
|
|
|
|
|
let rel = store::new_relation(
|
|
|
|
|
|
anchor_uuid, target_uuid,
|
|
|
|
|
|
store::RelationType::Link, 0.8,
|
|
|
|
|
|
term, key,
|
|
|
|
|
|
);
|
|
|
|
|
|
store.add_relation(rel)?;
|
|
|
|
|
|
}
|
|
|
|
|
|
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
store.save()?;
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
pub fn cmd_interference(threshold: f32) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let pairs = neuro::detect_interference(&store, &g, threshold);
|
|
|
|
|
|
|
|
|
|
|
|
if pairs.is_empty() {
|
|
|
|
|
|
println!("No interfering pairs above threshold {:.2}", threshold);
|
|
|
|
|
|
} else {
|
|
|
|
|
|
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
|
|
|
|
|
for (a, b, sim) in &pairs {
|
|
|
|
|
|
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2026-03-20 12:55:14 -04:00
|
|
|
|
/// Show communities sorted by isolation (most isolated first).
|
|
|
|
|
|
/// Useful for finding poorly-integrated knowledge clusters that need
|
|
|
|
|
|
/// organize agents aimed at them.
|
|
|
|
|
|
pub fn cmd_communities(top_n: usize, min_size: usize) -> Result<(), String> {
|
|
|
|
|
|
let store = store::Store::load()?;
|
|
|
|
|
|
let g = store.build_graph();
|
|
|
|
|
|
let infos = g.community_info();
|
|
|
|
|
|
|
|
|
|
|
|
let total = infos.len();
|
|
|
|
|
|
let shown: Vec<_> = infos.into_iter()
|
|
|
|
|
|
.filter(|c| c.size >= min_size)
|
|
|
|
|
|
.take(top_n)
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
|
|
println!("{} communities total ({} with size >= {})\n",
|
|
|
|
|
|
total, shown.len(), min_size);
|
|
|
|
|
|
println!("{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross");
|
|
|
|
|
|
println!("{}", "-".repeat(70));
|
|
|
|
|
|
|
|
|
|
|
|
for c in &shown {
|
|
|
|
|
|
let preview: Vec<&str> = c.members.iter()
|
|
|
|
|
|
.take(5)
|
|
|
|
|
|
.map(|s| s.as_str())
|
|
|
|
|
|
.collect();
|
|
|
|
|
|
let more = if c.size > 5 {
|
|
|
|
|
|
format!(" +{}", c.size - 5)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
String::new()
|
|
|
|
|
|
};
|
|
|
|
|
|
println!("{:<6} {:>5} {:>6.0}% {:>7} {}{}",
|
|
|
|
|
|
c.id, c.size, c.isolation * 100.0, c.cross_edges,
|
|
|
|
|
|
preview.join(", "), more);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
|
}
|
|
|
|
|
|
|