consciousness/src/cli/graph.rs
Kent Overstreet 917960cb76 deps: remove faer (224 transitive crates)
Spectral decomposition (eigenvalue computation) removed — it was
only used by the spectral-save CLI command. The spectral embedding
reader and query engine features remain (they load pre-computed
embeddings from disk, no faer needed).

Removes: faer, nano-gemm, private-gemm, and ~220 other transitive
dependencies. Significant build time and artifact size reduction.

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
2026-04-05 06:39:47 -04:00

562 lines
20 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// cli/graph.rs — graph subcommand handlers
//
// Extracted from main.rs. All graph-related CLI commands:
// link, link-add, link-impact, link-audit, link-orphans,
// triangle-close, cap-degree, normalize-strengths, differentiate,
// trace, spectral-*, organize, interference.
use crate::{store, graph, neuro};
use crate::store::StoreView;
pub fn cmd_graph() -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
println!("Graph: {} nodes, {} edges, {} communities",
g.nodes().len(), g.edge_count(), g.community_count());
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
g.small_world_sigma(), g.degree_power_law_exponent(),
g.degree_gini(), g.avg_clustering_coefficient());
Ok(())
}
pub fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
let mut store = store::Store::load()?;
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
orphans, links, min_deg, links_per, sim_thresh);
Ok(())
}
pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
let mut store = store::Store::load()?;
let (hubs, pruned) = store.cap_degree(max_deg)?;
store.save()?;
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
Ok(())
}
pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build a lookup from (source_key, target_key) → new_strength
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
// Store both directions for easy lookup
updates.insert((a.clone(), b.clone()), *s);
updates.insert((b.clone(), a.clone()), *s);
}
// Stats
let mut changed = 0usize;
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
// Histogram of new strengths
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
for rel in &mut store.relations {
if rel.deleted { continue; }
// Skip implicit temporal edges (strength 1.0, Auto type)
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
temporal_skipped += 1;
continue;
}
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
let old_s = rel.strength;
let delta = (new_s - old_s).abs();
if delta > 0.001 {
delta_sum += delta as f64;
if apply {
rel.strength = new_s;
}
changed += 1;
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
println!("Normalize link strengths (Jaccard similarity)");
println!(" Total edges in graph: {}", strengths.len());
println!(" Would change: {}", changed);
println!(" Unchanged: {}", unchanged);
println!(" Temporal (skipped): {}", temporal_skipped);
if changed > 0 {
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
}
println!();
println!(" Strength distribution:");
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
}
if apply {
store.save()?;
println!("\nApplied {} strength updates.", changed);
} else {
println!("\nDry run. Use --apply to write changes.");
}
Ok(())
}
pub fn cmd_spread(keys: &[String], max_results: usize) -> Result<(), String> {
if keys.is_empty() {
return Err("spread requires at least one seed key".into());
}
let store = store::Store::load()?;
let graph = graph::build_graph_fast(&store);
let params = store.params();
let seeds: Vec<(String, f64)> = keys.iter()
.filter_map(|k| {
let resolved = store.resolve_key(k).ok()?;
Some((resolved, 1.0))
})
.collect();
if seeds.is_empty() {
return Err("no valid seed keys found".into());
}
let results = crate::search::spreading_activation(
&seeds, &graph, &store,
params.max_hops, params.edge_decay, params.min_activation,
);
let seed_keys: std::collections::HashSet<&str> = seeds.iter()
.map(|(k, _)| k.as_str())
.collect();
for (key, score) in results.iter()
.filter(|(k, _)| !seed_keys.contains(k.as_str()))
.take(max_results)
{
println!(" {:.2} {}", score, key);
}
Ok(())
}
pub fn cmd_link(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("link requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
println!("Neighbors of '{}':", resolved);
crate::query_parser::run_query(&store, &g,
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
}
pub fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
min_degree, sim_threshold, max_per_hub);
let mut store = store::Store::load()?;
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
Ok(())
}
pub fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
super::check_dry_run();
let mut store = store::Store::load()?;
let source = store.resolve_key(source)?;
let target = store.resolve_key(target)?;
let reason = reason.join(" ");
// Refine target to best-matching section
let source_content = store.nodes.get(&source)
.map(|n| n.content.as_str()).unwrap_or("");
let target = neuro::refine_target(&store, source_content, &target);
match store.add_link(&source, &target, "manual") {
Ok(strength) => {
store.save()?;
println!("Linked: {}{} (strength={:.2}, {})", source, target, strength, reason);
}
Err(msg) if msg.contains("already exists") => {
println!("Link already exists: {}{}", source, target);
}
Err(e) => return Err(e),
}
Ok(())
}
pub fn cmd_link_set(source: &str, target: &str, strength: f32) -> Result<(), String> {
super::check_dry_run();
let mut store = store::Store::load()?;
let source = store.resolve_key(source)?;
let target = store.resolve_key(target)?;
let old = store.set_link_strength(&source, &target, strength)?;
println!("Set: {}{} strength {:.2}{:.2}", source, target, old, strength);
store.save()?;
Ok(())
}
pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
let store = store::Store::load()?;
let source = store.resolve_key(source)?;
let target = store.resolve_key(target)?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
println!("Link impact: {}{}", source, target);
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
println!(" ΔGini: {:+.6}", impact.delta_gini);
println!(" Assessment: {}", impact.assessment);
Ok(())
}
pub fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
if let Some(key) = key_arg {
let resolved = store.resolve_key(key)?;
let moves = neuro::differentiate_hub(&store, &resolved)
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
// Group by target section for display
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
std::collections::BTreeMap::new();
for mv in &moves {
by_section.entry(mv.to_section.clone()).or_default().push(mv);
}
println!("Hub '{}' — {} links to redistribute across {} sections\n",
resolved, moves.len(), by_section.len());
for (section, section_moves) in &by_section {
println!(" {} ({} links):", section, section_moves.len());
for mv in section_moves.iter().take(5) {
println!(" [{:.3}] {}{}", mv.similarity,
mv.neighbor_key, mv.neighbor_snippet);
}
if section_moves.len() > 5 {
println!(" ... and {} more", section_moves.len() - 5);
}
}
if !do_apply {
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
return Ok(());
}
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
store.save()?;
println!("\nApplied: {} Skipped: {}", applied, skipped);
} else {
let hubs = neuro::find_differentiable_hubs(&store);
if hubs.is_empty() {
println!("No file-level hubs with sections found above threshold");
return Ok(());
}
println!("Differentiable hubs (file-level nodes with sections):\n");
for (key, degree, sections) in &hubs {
println!(" {:40} deg={:3} sections={}", key, degree, sections);
}
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
}
Ok(())
}
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let stats = crate::audit::link_audit(&mut store, apply)?;
println!("\n{}", "=".repeat(60));
println!("Link audit complete:");
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
println!("{}", "=".repeat(60));
Ok(())
}
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("trace requires a key".into());
}
let key = key.join(" ");
let store = store::Store::load()?;
let resolved = store.resolve_key(&key)?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
.ok_or_else(|| format!("Node not found: {}", resolved))?;
// Display the node itself
println!("=== {} ===", resolved);
println!("Type: {:?} Weight: {:.2}",
node.node_type, node.weight);
if !node.source_ref.is_empty() {
println!("Source: {}", node.source_ref);
}
// Show content preview
let preview = crate::util::truncate(&node.content, 200, "...");
println!("\n{}\n", preview);
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
store::NodeType::EpisodicSession =>
episodic_session.push(entry),
store::NodeType::EpisodicDaily =>
episodic_daily.push(entry),
store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly =>
episodic_weekly.push(entry),
store::NodeType::Semantic =>
semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
println!("Weekly digests:");
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_daily.is_empty() {
println!("Daily digests:");
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
println!(" [{:.2}] {}{}", s, k, preview);
}
}
if !episodic_session.is_empty() {
println!("Session entries:");
for (k, s, n) in &episodic_session {
let preview = crate::util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
println!(" [{:.2}] {}", s, k);
if !n.source_ref.is_empty() {
println!(" ↳ source: {}", n.source_ref);
}
println!(" {}", preview);
}
}
if !semantic.is_empty() {
println!("Semantic links:");
for (k, s, _) in &semantic {
println!(" [{:.2}] {}", s, k);
}
}
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len());
Ok(())
}
pub fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
// Step 1: find all non-deleted nodes matching the term
let term_lower = term.to_lowercase();
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
let skip_prefixes = ["_", "deep-index#", "facts-", "irc-history#"];
for (key, node) in &store.nodes {
if node.deleted { continue; }
// Skip episodic/digest nodes — use NodeType, not key prefix
if node.node_type != crate::store::NodeType::Semantic { continue; }
let key_matches = key.to_lowercase().contains(&term_lower);
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
if !key_matches && !content_matches { continue; }
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
topic_nodes.push((key.clone(), node.content.clone()));
}
if topic_nodes.is_empty() {
println!("No topic nodes found matching '{}'", term);
return Ok(());
}
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
println!("=== Organize: '{}' ===", term);
println!("Found {} topic nodes:\n", topic_nodes.len());
for (key, content) in &topic_nodes {
let lines = content.lines().count();
let words = content.split_whitespace().count();
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
}
// Step 2: pairwise similarity
let pairs = crate::similarity::pairwise_similar(&topic_nodes, threshold);
if pairs.is_empty() {
println!("\nNo similar pairs above threshold {:.2}", threshold);
} else {
println!("\n=== Similar pairs (cosine > {:.2}) ===\n", threshold);
for (a, b, sim) in &pairs {
let a_words = topic_nodes.iter().find(|(k,_)| k == a)
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
let b_words = topic_nodes.iter().find(|(k,_)| k == b)
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
println!(" [{:.3}] {} ({} words) ↔ {} ({} words)", sim, a, a_words, b, b_words);
}
}
// Step 3: check connectivity within cluster
let g = store.build_graph();
println!("=== Connectivity ===\n");
// Pick hub by intra-cluster connectivity, not overall degree
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
.map(|(k,_)| k.as_str())
.collect();
let mut best_hub: Option<(&str, usize)> = None;
for key in &cluster_keys {
let intra_degree = g.neighbor_keys(key).iter()
.filter(|n| cluster_keys.contains(*n))
.count();
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
best_hub = Some((key, intra_degree));
}
}
if let Some((hub, deg)) = best_hub {
println!(" Hub: {} (degree {})", hub, deg);
let hub_nbrs = g.neighbor_keys(hub);
let mut unlinked = Vec::new();
for (key, _) in &topic_nodes {
if key == hub { continue; }
if store.nodes.get(key.as_str()).is_none() { continue; }
if !hub_nbrs.contains(key.as_str()) {
unlinked.push(key.clone());
}
}
if unlinked.is_empty() {
println!(" All cluster nodes connected to hub ✓");
} else {
println!(" NOT linked to hub:");
for key in &unlinked {
println!(" {} → needs link to {}", key, hub);
}
}
}
// Step 4: anchor node
if create_anchor {
println!("\n=== Anchor node ===\n");
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
println!(" Anchor '{}' already exists ✓", term);
} else {
let desc = format!("Anchor node for '{}' search term", term);
store.upsert(term, &desc)?;
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
for (key, _) in &topic_nodes {
if store.nodes.get(key.as_str()).is_none() { continue; }
let target_uuid = store.nodes[key.as_str()].uuid;
let rel = store::new_relation(
anchor_uuid, target_uuid,
store::RelationType::Link, 0.8,
term, key,
);
store.add_relation(rel)?;
}
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
}
}
store.save()?;
Ok(())
}
pub fn cmd_interference(threshold: f32) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let pairs = neuro::detect_interference(&store, &g, threshold);
if pairs.is_empty() {
println!("No interfering pairs above threshold {:.2}", threshold);
} else {
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
for (a, b, sim) in &pairs {
println!(" [{:.3}] {}{}", sim, a, b);
}
}
Ok(())
}
/// Show communities sorted by isolation (most isolated first).
/// Useful for finding poorly-integrated knowledge clusters that need
/// organize agents aimed at them.
pub fn cmd_communities(top_n: usize, min_size: usize) -> Result<(), String> {
let store = store::Store::load()?;
let g = store.build_graph();
let infos = g.community_info();
let total = infos.len();
let shown: Vec<_> = infos.into_iter()
.filter(|c| c.size >= min_size)
.take(top_n)
.collect();
println!("{} communities total ({} with size >= {})\n",
total, shown.len(), min_size);
println!("{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross");
println!("{}", "-".repeat(70));
for c in &shown {
let preview: Vec<&str> = c.members.iter()
.take(5)
.map(|s| s.as_str())
.collect();
let more = if c.size > 5 {
format!(" +{}", c.size - 5)
} else {
String::new()
};
println!("{:<6} {:>5} {:>6.0}% {:>7} {}{}",
c.id, c.size, c.isolation * 100.0, c.cross_edges,
preview.join(", "), more);
}
Ok(())
}