diff --git a/poc-memory/src/cli/graph.rs b/poc-memory/src/cli/graph.rs new file mode 100644 index 0000000..59f6111 --- /dev/null +++ b/poc-memory/src/cli/graph.rs @@ -0,0 +1,622 @@ +// cli/graph.rs — graph subcommand handlers +// +// Extracted from main.rs. All graph-related CLI commands: +// link, link-add, link-impact, link-audit, link-orphans, +// triangle-close, cap-degree, normalize-strengths, differentiate, +// trace, spectral-*, organize, interference. + +use crate::{store, graph, neuro, spectral}; +use crate::store::StoreView; + +pub fn cmd_graph() -> Result<(), String> { + let store = store::Store::load()?; + let g = store.build_graph(); + println!("Graph: {} nodes, {} edges, {} communities", + g.nodes().len(), g.edge_count(), g.community_count()); + println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}", + g.small_world_sigma(), g.degree_power_law_exponent(), + g.degree_gini(), g.avg_clustering_coefficient()); + Ok(()) +} + +pub fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> { + let mut store = store::Store::load()?; + let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh); + println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})", + orphans, links, min_deg, links_per, sim_thresh); + Ok(()) +} + +pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> { + let mut store = store::Store::load()?; + let (hubs, pruned) = store.cap_degree(max_deg)?; + store.save()?; + println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg); + Ok(()) +} + +pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> { + let mut store = store::Store::load()?; + let graph = store.build_graph(); + let strengths = graph.jaccard_strengths(); + + // Build a lookup from (source_key, target_key) → new_strength + let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new(); + for (a, b, s) in &strengths { + // Store both directions for easy lookup + updates.insert((a.clone(), b.clone()), *s); + updates.insert((b.clone(), a.clone()), *s); + } + + // Stats + let mut changed = 0usize; + let mut unchanged = 0usize; + let mut temporal_skipped = 0usize; + let mut delta_sum: f64 = 0.0; + + // Histogram of new strengths + let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ... + + for rel in &mut store.relations { + if rel.deleted { continue; } + + // Skip implicit temporal edges (strength 1.0, Auto type) + if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto { + temporal_skipped += 1; + continue; + } + + if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) { + let old_s = rel.strength; + let delta = (new_s - old_s).abs(); + if delta > 0.001 { + delta_sum += delta as f64; + if apply { + rel.strength = new_s; + } + changed += 1; + } else { + unchanged += 1; + } + let bucket = ((new_s * 10.0) as usize).min(9); + buckets[bucket] += 1; + } + } + + println!("Normalize link strengths (Jaccard similarity)"); + println!(" Total edges in graph: {}", strengths.len()); + println!(" Would change: {}", changed); + println!(" Unchanged: {}", unchanged); + println!(" Temporal (skipped): {}", temporal_skipped); + if changed > 0 { + println!(" Avg delta: {:.3}", delta_sum / changed as f64); + } + println!(); + println!(" Strength distribution:"); + for (i, &count) in buckets.iter().enumerate() { + let lo = i as f32 / 10.0; + let hi = lo + 0.1; + let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 }); + println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar); + } + + if apply { + store.save()?; + println!("\nApplied {} strength updates.", changed); + } else { + println!("\nDry run. Use --apply to write changes."); + } + + Ok(()) +} + +pub fn cmd_link(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("link requires a key".into()); + } + let key = key.join(" "); + let store = store::Store::load()?; + let resolved = store.resolve_key(&key)?; + let g = store.build_graph(); + println!("Neighbors of '{}':", resolved); + crate::query_parser::run_query(&store, &g, + &format!("neighbors('{}') | select strength,clustering_coefficient", resolved)) +} + +pub fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> { + println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}", + min_degree, sim_threshold, max_per_hub); + + let mut store = store::Store::load()?; + let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub); + println!("\nProcessed {} hubs, added {} lateral links", hubs, added); + Ok(()) +} + +pub fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> { + let mut store = store::Store::load()?; + let source = store.resolve_key(source)?; + let target = store.resolve_key(target)?; + let reason = reason.join(" "); + + // Refine target to best-matching section + let source_content = store.nodes.get(&source) + .map(|n| n.content.as_str()).unwrap_or(""); + let target = neuro::refine_target(&store, source_content, &target); + + // Find UUIDs + let source_uuid = store.nodes.get(&source) + .map(|n| n.uuid) + .ok_or_else(|| format!("source not found: {}", source))?; + let target_uuid = store.nodes.get(&target) + .map(|n| n.uuid) + .ok_or_else(|| format!("target not found: {}", target))?; + + // Check for existing link + let exists = store.relations.iter().any(|r| + !r.deleted && + ((r.source_key == source && r.target_key == target) || + (r.source_key == target && r.target_key == source))); + if exists { + println!("Link already exists: {} ↔ {}", source, target); + return Ok(()); + } + + // Compute initial strength from Jaccard neighborhood similarity + let graph = store.build_graph(); + let jaccard = graph.jaccard(&source, &target); + let strength = (jaccard * 3.0).clamp(0.1, 1.0); + + let rel = store::new_relation( + source_uuid, target_uuid, + store::RelationType::Link, strength, + &source, &target, + ); + store.add_relation(rel)?; + store.save()?; + println!("Linked: {} → {} (strength={:.2}, {})", source, target, strength, reason); + Ok(()) +} + +pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> { + let store = store::Store::load()?; + let source = store.resolve_key(source)?; + let target = store.resolve_key(target)?; + let g = store.build_graph(); + + let impact = g.link_impact(&source, &target); + + println!("Link impact: {} → {}", source, target); + println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg); + println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community); + println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target); + println!(" ΔGini: {:+.6}", impact.delta_gini); + println!(" Assessment: {}", impact.assessment); + Ok(()) +} + +pub fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> { + let mut store = store::Store::load()?; + + if let Some(key) = key_arg { + let resolved = store.resolve_key(key)?; + let moves = neuro::differentiate_hub(&store, &resolved) + .ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?; + + // Group by target section for display + let mut by_section: std::collections::BTreeMap> = + std::collections::BTreeMap::new(); + for mv in &moves { + by_section.entry(mv.to_section.clone()).or_default().push(mv); + } + + println!("Hub '{}' — {} links to redistribute across {} sections\n", + resolved, moves.len(), by_section.len()); + + for (section, section_moves) in &by_section { + println!(" {} ({} links):", section, section_moves.len()); + for mv in section_moves.iter().take(5) { + println!(" [{:.3}] {} — {}", mv.similarity, + mv.neighbor_key, mv.neighbor_snippet); + } + if section_moves.len() > 5 { + println!(" ... and {} more", section_moves.len() - 5); + } + } + + if !do_apply { + println!("\nTo apply: poc-memory differentiate {} --apply", resolved); + return Ok(()); + } + + let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves); + store.save()?; + println!("\nApplied: {} Skipped: {}", applied, skipped); + } else { + let hubs = neuro::find_differentiable_hubs(&store); + if hubs.is_empty() { + println!("No file-level hubs with sections found above threshold"); + return Ok(()); + } + + println!("Differentiable hubs (file-level nodes with sections):\n"); + for (key, degree, sections) in &hubs { + println!(" {:40} deg={:3} sections={}", key, degree, sections); + } + println!("\nRun: poc-memory differentiate KEY to preview a specific hub"); + } + + Ok(()) +} + +pub fn cmd_link_audit(apply: bool) -> Result<(), String> { + let mut store = store::Store::load()?; + let stats = crate::audit::link_audit(&mut store, apply)?; + println!("\n{}", "=".repeat(60)); + println!("Link audit complete:"); + println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}", + stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors); + println!("{}", "=".repeat(60)); + Ok(()) +} + +pub fn cmd_trace(key: &[String]) -> Result<(), String> { + if key.is_empty() { + return Err("trace requires a key".into()); + } + let key = key.join(" "); + let store = store::Store::load()?; + let resolved = store.resolve_key(&key)?; + let g = store.build_graph(); + + let node = store.nodes.get(&resolved) + .ok_or_else(|| format!("Node not found: {}", resolved))?; + + // Display the node itself + println!("=== {} ===", resolved); + println!("Type: {:?} Weight: {:.2}", + node.node_type, node.weight); + if !node.source_ref.is_empty() { + println!("Source: {}", node.source_ref); + } + + // Show content preview + let preview = crate::util::truncate(&node.content, 200, "..."); + println!("\n{}\n", preview); + + // Walk neighbors, grouped by node type + let neighbors = g.neighbors(&resolved); + let mut episodic_session = Vec::new(); + let mut episodic_daily = Vec::new(); + let mut episodic_weekly = Vec::new(); + let mut semantic = Vec::new(); + + for (n, strength) in &neighbors { + if let Some(nnode) = store.nodes.get(n.as_str()) { + let entry = (n.as_str(), *strength, nnode); + match nnode.node_type { + store::NodeType::EpisodicSession => + episodic_session.push(entry), + store::NodeType::EpisodicDaily => + episodic_daily.push(entry), + store::NodeType::EpisodicWeekly + | store::NodeType::EpisodicMonthly => + episodic_weekly.push(entry), + store::NodeType::Semantic => + semantic.push(entry), + } + } + } + + if !episodic_weekly.is_empty() { + println!("Weekly digests:"); + for (k, s, n) in &episodic_weekly { + let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80); + println!(" [{:.2}] {} — {}", s, k, preview); + } + } + + if !episodic_daily.is_empty() { + println!("Daily digests:"); + for (k, s, n) in &episodic_daily { + let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80); + println!(" [{:.2}] {} — {}", s, k, preview); + } + } + + if !episodic_session.is_empty() { + println!("Session entries:"); + for (k, s, n) in &episodic_session { + let preview = crate::util::first_n_chars( + n.content.lines() + .find(|l| !l.is_empty() && !l.starts_with("