cli: extract graph commands from main.rs into cli/graph.rs
Move 18 graph subcommand handlers (594 lines) out of main.rs: link, link-add, link-impact, link-audit, link-orphans, triangle-close, cap-degree, normalize-strengths, differentiate, trace, spectral-*, organize, interference. main.rs: 3130 → 2518 lines. Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
55715ad998
commit
c8d86e94c1
4 changed files with 649 additions and 630 deletions
622
poc-memory/src/cli/graph.rs
Normal file
622
poc-memory/src/cli/graph.rs
Normal file
|
|
@ -0,0 +1,622 @@
|
|||
// cli/graph.rs — graph subcommand handlers
|
||||
//
|
||||
// Extracted from main.rs. All graph-related CLI commands:
|
||||
// link, link-add, link-impact, link-audit, link-orphans,
|
||||
// triangle-close, cap-degree, normalize-strengths, differentiate,
|
||||
// trace, spectral-*, organize, interference.
|
||||
|
||||
use crate::{store, graph, neuro, spectral};
|
||||
use crate::store::StoreView;
|
||||
|
||||
pub fn cmd_graph() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
println!("Graph: {} nodes, {} edges, {} communities",
|
||||
g.nodes().len(), g.edge_count(), g.community_count());
|
||||
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
|
||||
g.small_world_sigma(), g.degree_power_law_exponent(),
|
||||
g.degree_gini(), g.avg_clustering_coefficient());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
|
||||
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
|
||||
orphans, links, min_deg, links_per, sim_thresh);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let (hubs, pruned) = store.cap_degree(max_deg)?;
|
||||
store.save()?;
|
||||
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let graph = store.build_graph();
|
||||
let strengths = graph.jaccard_strengths();
|
||||
|
||||
// Build a lookup from (source_key, target_key) → new_strength
|
||||
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
|
||||
for (a, b, s) in &strengths {
|
||||
// Store both directions for easy lookup
|
||||
updates.insert((a.clone(), b.clone()), *s);
|
||||
updates.insert((b.clone(), a.clone()), *s);
|
||||
}
|
||||
|
||||
// Stats
|
||||
let mut changed = 0usize;
|
||||
let mut unchanged = 0usize;
|
||||
let mut temporal_skipped = 0usize;
|
||||
let mut delta_sum: f64 = 0.0;
|
||||
|
||||
// Histogram of new strengths
|
||||
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
|
||||
|
||||
for rel in &mut store.relations {
|
||||
if rel.deleted { continue; }
|
||||
|
||||
// Skip implicit temporal edges (strength 1.0, Auto type)
|
||||
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
|
||||
temporal_skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
|
||||
let old_s = rel.strength;
|
||||
let delta = (new_s - old_s).abs();
|
||||
if delta > 0.001 {
|
||||
delta_sum += delta as f64;
|
||||
if apply {
|
||||
rel.strength = new_s;
|
||||
}
|
||||
changed += 1;
|
||||
} else {
|
||||
unchanged += 1;
|
||||
}
|
||||
let bucket = ((new_s * 10.0) as usize).min(9);
|
||||
buckets[bucket] += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!("Normalize link strengths (Jaccard similarity)");
|
||||
println!(" Total edges in graph: {}", strengths.len());
|
||||
println!(" Would change: {}", changed);
|
||||
println!(" Unchanged: {}", unchanged);
|
||||
println!(" Temporal (skipped): {}", temporal_skipped);
|
||||
if changed > 0 {
|
||||
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
|
||||
}
|
||||
println!();
|
||||
println!(" Strength distribution:");
|
||||
for (i, &count) in buckets.iter().enumerate() {
|
||||
let lo = i as f32 / 10.0;
|
||||
let hi = lo + 0.1;
|
||||
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
|
||||
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
|
||||
}
|
||||
|
||||
if apply {
|
||||
store.save()?;
|
||||
println!("\nApplied {} strength updates.", changed);
|
||||
} else {
|
||||
println!("\nDry run. Use --apply to write changes.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("link requires a key".into());
|
||||
}
|
||||
let key = key.join(" ");
|
||||
let store = store::Store::load()?;
|
||||
let resolved = store.resolve_key(&key)?;
|
||||
let g = store.build_graph();
|
||||
println!("Neighbors of '{}':", resolved);
|
||||
crate::query_parser::run_query(&store, &g,
|
||||
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
|
||||
}
|
||||
|
||||
pub fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
|
||||
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
|
||||
min_degree, sim_threshold, max_per_hub);
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
|
||||
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let source = store.resolve_key(source)?;
|
||||
let target = store.resolve_key(target)?;
|
||||
let reason = reason.join(" ");
|
||||
|
||||
// Refine target to best-matching section
|
||||
let source_content = store.nodes.get(&source)
|
||||
.map(|n| n.content.as_str()).unwrap_or("");
|
||||
let target = neuro::refine_target(&store, source_content, &target);
|
||||
|
||||
// Find UUIDs
|
||||
let source_uuid = store.nodes.get(&source)
|
||||
.map(|n| n.uuid)
|
||||
.ok_or_else(|| format!("source not found: {}", source))?;
|
||||
let target_uuid = store.nodes.get(&target)
|
||||
.map(|n| n.uuid)
|
||||
.ok_or_else(|| format!("target not found: {}", target))?;
|
||||
|
||||
// Check for existing link
|
||||
let exists = store.relations.iter().any(|r|
|
||||
!r.deleted &&
|
||||
((r.source_key == source && r.target_key == target) ||
|
||||
(r.source_key == target && r.target_key == source)));
|
||||
if exists {
|
||||
println!("Link already exists: {} ↔ {}", source, target);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Compute initial strength from Jaccard neighborhood similarity
|
||||
let graph = store.build_graph();
|
||||
let jaccard = graph.jaccard(&source, &target);
|
||||
let strength = (jaccard * 3.0).clamp(0.1, 1.0);
|
||||
|
||||
let rel = store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
store::RelationType::Link, strength,
|
||||
&source, &target,
|
||||
);
|
||||
store.add_relation(rel)?;
|
||||
store.save()?;
|
||||
println!("Linked: {} → {} (strength={:.2}, {})", source, target, strength, reason);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let source = store.resolve_key(source)?;
|
||||
let target = store.resolve_key(target)?;
|
||||
let g = store.build_graph();
|
||||
|
||||
let impact = g.link_impact(&source, &target);
|
||||
|
||||
println!("Link impact: {} → {}", source, target);
|
||||
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
||||
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
||||
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
||||
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
||||
println!(" Assessment: {}", impact.assessment);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
if let Some(key) = key_arg {
|
||||
let resolved = store.resolve_key(key)?;
|
||||
let moves = neuro::differentiate_hub(&store, &resolved)
|
||||
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
|
||||
|
||||
// Group by target section for display
|
||||
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
|
||||
std::collections::BTreeMap::new();
|
||||
for mv in &moves {
|
||||
by_section.entry(mv.to_section.clone()).or_default().push(mv);
|
||||
}
|
||||
|
||||
println!("Hub '{}' — {} links to redistribute across {} sections\n",
|
||||
resolved, moves.len(), by_section.len());
|
||||
|
||||
for (section, section_moves) in &by_section {
|
||||
println!(" {} ({} links):", section, section_moves.len());
|
||||
for mv in section_moves.iter().take(5) {
|
||||
println!(" [{:.3}] {} — {}", mv.similarity,
|
||||
mv.neighbor_key, mv.neighbor_snippet);
|
||||
}
|
||||
if section_moves.len() > 5 {
|
||||
println!(" ... and {} more", section_moves.len() - 5);
|
||||
}
|
||||
}
|
||||
|
||||
if !do_apply {
|
||||
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
|
||||
store.save()?;
|
||||
println!("\nApplied: {} Skipped: {}", applied, skipped);
|
||||
} else {
|
||||
let hubs = neuro::find_differentiable_hubs(&store);
|
||||
if hubs.is_empty() {
|
||||
println!("No file-level hubs with sections found above threshold");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Differentiable hubs (file-level nodes with sections):\n");
|
||||
for (key, degree, sections) in &hubs {
|
||||
println!(" {:40} deg={:3} sections={}", key, degree, sections);
|
||||
}
|
||||
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let stats = crate::audit::link_audit(&mut store, apply)?;
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Link audit complete:");
|
||||
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
||||
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
||||
println!("{}", "=".repeat(60));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("trace requires a key".into());
|
||||
}
|
||||
let key = key.join(" ");
|
||||
let store = store::Store::load()?;
|
||||
let resolved = store.resolve_key(&key)?;
|
||||
let g = store.build_graph();
|
||||
|
||||
let node = store.nodes.get(&resolved)
|
||||
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
||||
|
||||
// Display the node itself
|
||||
println!("=== {} ===", resolved);
|
||||
println!("Type: {:?} Weight: {:.2}",
|
||||
node.node_type, node.weight);
|
||||
if !node.source_ref.is_empty() {
|
||||
println!("Source: {}", node.source_ref);
|
||||
}
|
||||
|
||||
// Show content preview
|
||||
let preview = crate::util::truncate(&node.content, 200, "...");
|
||||
println!("\n{}\n", preview);
|
||||
|
||||
// Walk neighbors, grouped by node type
|
||||
let neighbors = g.neighbors(&resolved);
|
||||
let mut episodic_session = Vec::new();
|
||||
let mut episodic_daily = Vec::new();
|
||||
let mut episodic_weekly = Vec::new();
|
||||
let mut semantic = Vec::new();
|
||||
|
||||
for (n, strength) in &neighbors {
|
||||
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
||||
let entry = (n.as_str(), *strength, nnode);
|
||||
match nnode.node_type {
|
||||
store::NodeType::EpisodicSession =>
|
||||
episodic_session.push(entry),
|
||||
store::NodeType::EpisodicDaily =>
|
||||
episodic_daily.push(entry),
|
||||
store::NodeType::EpisodicWeekly
|
||||
| store::NodeType::EpisodicMonthly =>
|
||||
episodic_weekly.push(entry),
|
||||
store::NodeType::Semantic =>
|
||||
semantic.push(entry),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_weekly.is_empty() {
|
||||
println!("Weekly digests:");
|
||||
for (k, s, n) in &episodic_weekly {
|
||||
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
||||
println!(" [{:.2}] {} — {}", s, k, preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_daily.is_empty() {
|
||||
println!("Daily digests:");
|
||||
for (k, s, n) in &episodic_daily {
|
||||
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
||||
println!(" [{:.2}] {} — {}", s, k, preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_session.is_empty() {
|
||||
println!("Session entries:");
|
||||
for (k, s, n) in &episodic_session {
|
||||
let preview = crate::util::first_n_chars(
|
||||
n.content.lines()
|
||||
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
||||
.unwrap_or(""),
|
||||
80);
|
||||
println!(" [{:.2}] {}", s, k);
|
||||
if !n.source_ref.is_empty() {
|
||||
println!(" ↳ source: {}", n.source_ref);
|
||||
}
|
||||
println!(" {}", preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !semantic.is_empty() {
|
||||
println!("Semantic links:");
|
||||
for (k, s, _) in &semantic {
|
||||
println!(" [{:.2}] {}", s, k);
|
||||
}
|
||||
}
|
||||
|
||||
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
||||
episodic_session.len(), episodic_daily.len(),
|
||||
episodic_weekly.len(), semantic.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_spectral(k: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = graph::build_graph(&store);
|
||||
let result = spectral::decompose(&g, k);
|
||||
spectral::print_summary(&result, &g);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_spectral_save(k: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = graph::build_graph(&store);
|
||||
let result = spectral::decompose(&g, k);
|
||||
let emb = spectral::to_embedding(&result);
|
||||
spectral::save_embedding(&emb)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> {
|
||||
let emb = spectral::load_embedding()?;
|
||||
|
||||
let dims = spectral::dominant_dimensions(&emb, &[key]);
|
||||
println!("Node: {} (embedding: {} dims)", key, emb.dims);
|
||||
println!("Top spectral axes:");
|
||||
for &(d, loading) in dims.iter().take(5) {
|
||||
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
|
||||
}
|
||||
|
||||
println!("\nNearest neighbors in spectral space:");
|
||||
let neighbors = spectral::nearest_neighbors(&emb, key, n);
|
||||
for (i, (k, dist)) in neighbors.iter().enumerate() {
|
||||
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_spectral_positions(n: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let emb = spectral::load_embedding()?;
|
||||
|
||||
let g = store.build_graph();
|
||||
let communities = g.communities().clone();
|
||||
|
||||
let positions = spectral::analyze_positions(&emb, &communities);
|
||||
|
||||
println!("Spectral position analysis — {} nodes", positions.len());
|
||||
println!(" outlier: dist_to_center / median (>1 = unusual position)");
|
||||
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
|
||||
println!();
|
||||
|
||||
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
|
||||
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
|
||||
|
||||
for pos in positions.iter().take(n) {
|
||||
match spectral::classify_position(pos) {
|
||||
"bridge" => bridges.push(pos),
|
||||
_ => outliers.push(pos),
|
||||
}
|
||||
}
|
||||
|
||||
if !bridges.is_empty() {
|
||||
println!("=== Bridges (between communities) ===");
|
||||
for pos in &bridges {
|
||||
println!(" [{:.2}/{:.2}] c{} → c{} {}",
|
||||
pos.outlier_score, pos.bridge_score,
|
||||
pos.community, pos.nearest_community, pos.key);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
println!("=== Top outliers (far from own community center) ===");
|
||||
for pos in positions.iter().take(n) {
|
||||
let class = spectral::classify_position(pos);
|
||||
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
|
||||
class, pos.outlier_score, pos.bridge_score,
|
||||
pos.community, pos.key);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_spectral_suggest(n: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let emb = spectral::load_embedding()?;
|
||||
let g = store.build_graph();
|
||||
let communities = g.communities();
|
||||
|
||||
let min_degree = 3;
|
||||
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
|
||||
.filter(|k| g.degree(k) >= min_degree)
|
||||
.map(|k| k.as_str())
|
||||
.collect();
|
||||
|
||||
let filtered_emb = spectral::SpectralEmbedding {
|
||||
dims: emb.dims,
|
||||
eigenvalues: emb.eigenvalues.clone(),
|
||||
coords: emb.coords.iter()
|
||||
.filter(|(k, _)| well_connected.contains(k.as_str()))
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let mut linked: std::collections::HashSet<(String, String)> =
|
||||
std::collections::HashSet::new();
|
||||
for rel in &store.relations {
|
||||
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
|
||||
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
|
||||
}
|
||||
|
||||
eprintln!("Searching {} well-connected nodes (degree >= {})...",
|
||||
filtered_emb.coords.len(), min_degree);
|
||||
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
|
||||
|
||||
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
|
||||
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
|
||||
let c1 = communities.get(k1)
|
||||
.map(|c| format!("c{}", c))
|
||||
.unwrap_or_else(|| "?".into());
|
||||
let c2 = communities.get(k2)
|
||||
.map(|c| format!("c{}", c))
|
||||
.unwrap_or_else(|| "?".into());
|
||||
let cross = if c1 != c2 { " [cross-community]" } else { "" };
|
||||
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
|
||||
i + 1, dist, k1, c1, k2, c2, cross);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
// Step 1: find all non-deleted nodes matching the term
|
||||
let term_lower = term.to_lowercase();
|
||||
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
|
||||
|
||||
// Prefixes that indicate ephemeral/generated nodes to skip
|
||||
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "_",
|
||||
"deep-index#", "facts-", "irc-history#"];
|
||||
|
||||
for (key, node) in &store.nodes {
|
||||
if node.deleted { continue; }
|
||||
let key_matches = key.to_lowercase().contains(&term_lower);
|
||||
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
|
||||
if !key_matches && !content_matches { continue; }
|
||||
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
|
||||
topic_nodes.push((key.clone(), node.content.clone()));
|
||||
}
|
||||
|
||||
if topic_nodes.is_empty() {
|
||||
println!("No topic nodes found matching '{}'", term);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
println!("=== Organize: '{}' ===", term);
|
||||
println!("Found {} topic nodes:\n", topic_nodes.len());
|
||||
for (key, content) in &topic_nodes {
|
||||
let lines = content.lines().count();
|
||||
let words = content.split_whitespace().count();
|
||||
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
|
||||
}
|
||||
|
||||
// Step 2: pairwise similarity
|
||||
let pairs = crate::similarity::pairwise_similar(&topic_nodes, threshold);
|
||||
|
||||
if pairs.is_empty() {
|
||||
println!("\nNo similar pairs above threshold {:.2}", threshold);
|
||||
} else {
|
||||
println!("\n=== Similar pairs (cosine > {:.2}) ===\n", threshold);
|
||||
for (a, b, sim) in &pairs {
|
||||
let a_words = topic_nodes.iter().find(|(k,_)| k == a)
|
||||
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
||||
let b_words = topic_nodes.iter().find(|(k,_)| k == b)
|
||||
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
||||
|
||||
println!(" [{:.3}] {} ({} words) ↔ {} ({} words)", sim, a, a_words, b, b_words);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: check connectivity within cluster
|
||||
let g = store.build_graph();
|
||||
println!("=== Connectivity ===\n");
|
||||
|
||||
// Pick hub by intra-cluster connectivity, not overall degree
|
||||
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
|
||||
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
|
||||
.map(|(k,_)| k.as_str())
|
||||
.collect();
|
||||
|
||||
let mut best_hub: Option<(&str, usize)> = None;
|
||||
for key in &cluster_keys {
|
||||
let intra_degree = g.neighbor_keys(key).iter()
|
||||
.filter(|n| cluster_keys.contains(*n))
|
||||
.count();
|
||||
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
|
||||
best_hub = Some((key, intra_degree));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((hub, deg)) = best_hub {
|
||||
println!(" Hub: {} (degree {})", hub, deg);
|
||||
let hub_nbrs = g.neighbor_keys(hub);
|
||||
|
||||
let mut unlinked = Vec::new();
|
||||
for (key, _) in &topic_nodes {
|
||||
if key == hub { continue; }
|
||||
if store.nodes.get(key.as_str()).is_none() { continue; }
|
||||
if !hub_nbrs.contains(key.as_str()) {
|
||||
unlinked.push(key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if unlinked.is_empty() {
|
||||
println!(" All cluster nodes connected to hub ✓");
|
||||
} else {
|
||||
println!(" NOT linked to hub:");
|
||||
for key in &unlinked {
|
||||
println!(" {} → needs link to {}", key, hub);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: anchor node
|
||||
if create_anchor {
|
||||
println!("\n=== Anchor node ===\n");
|
||||
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
|
||||
println!(" Anchor '{}' already exists ✓", term);
|
||||
} else {
|
||||
let desc = format!("Anchor node for '{}' search term", term);
|
||||
store.upsert(term, &desc)?;
|
||||
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
|
||||
for (key, _) in &topic_nodes {
|
||||
if store.nodes.get(key.as_str()).is_none() { continue; }
|
||||
let target_uuid = store.nodes[key.as_str()].uuid;
|
||||
let rel = store::new_relation(
|
||||
anchor_uuid, target_uuid,
|
||||
store::RelationType::Link, 0.8,
|
||||
term, key,
|
||||
);
|
||||
store.add_relation(rel)?;
|
||||
}
|
||||
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
|
||||
}
|
||||
}
|
||||
|
||||
store.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_interference(threshold: f32) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
let pairs = neuro::detect_interference(&store, &g, threshold);
|
||||
|
||||
if pairs.is_empty() {
|
||||
println!("No interfering pairs above threshold {:.2}", threshold);
|
||||
} else {
|
||||
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
||||
for (a, b, sim) in &pairs {
|
||||
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
6
poc-memory/src/cli/mod.rs
Normal file
6
poc-memory/src/cli/mod.rs
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
// cli/ — command-line interface handlers
|
||||
//
|
||||
// Split from main.rs for readability. Each module handles a group
|
||||
// of related subcommands.
|
||||
|
||||
pub mod graph;
|
||||
|
|
@ -21,6 +21,9 @@ pub mod neuro;
|
|||
pub mod counters;
|
||||
pub mod cursor;
|
||||
|
||||
// CLI handlers (split from main.rs)
|
||||
pub mod cli;
|
||||
|
||||
// Agent layer (LLM-powered operations)
|
||||
pub mod agents;
|
||||
pub mod tui;
|
||||
|
|
|
|||
|
|
@ -763,31 +763,31 @@ fn main() {
|
|||
|
||||
// Graph
|
||||
Command::GraphCmd(sub) => match sub {
|
||||
GraphCmd::Link { key } => cmd_link(&key),
|
||||
GraphCmd::Link { key } => cli::graph::cmd_link(&key),
|
||||
GraphCmd::LinkAdd { source, target, reason }
|
||||
=> cmd_link_add(&source, &target, &reason),
|
||||
=> cli::graph::cmd_link_add(&source, &target, &reason),
|
||||
GraphCmd::LinkImpact { source, target }
|
||||
=> cmd_link_impact(&source, &target),
|
||||
GraphCmd::LinkAudit { apply } => cmd_link_audit(apply),
|
||||
=> cli::graph::cmd_link_impact(&source, &target),
|
||||
GraphCmd::LinkAudit { apply } => cli::graph::cmd_link_audit(apply),
|
||||
GraphCmd::LinkOrphans { min_degree, links_per, sim_threshold }
|
||||
=> cmd_link_orphans(min_degree, links_per, sim_threshold),
|
||||
=> cli::graph::cmd_link_orphans(min_degree, links_per, sim_threshold),
|
||||
GraphCmd::TriangleClose { min_degree, sim_threshold, max_per_hub }
|
||||
=> cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
|
||||
GraphCmd::CapDegree { max_degree } => cmd_cap_degree(max_degree),
|
||||
GraphCmd::NormalizeStrengths { apply } => cmd_normalize_strengths(apply),
|
||||
=> cli::graph::cmd_triangle_close(min_degree, sim_threshold, max_per_hub),
|
||||
GraphCmd::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree),
|
||||
GraphCmd::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply),
|
||||
GraphCmd::Differentiate { key, apply }
|
||||
=> cmd_differentiate(key.as_deref(), apply),
|
||||
GraphCmd::Trace { key } => cmd_trace(&key),
|
||||
GraphCmd::Interference { threshold } => cmd_interference(threshold),
|
||||
GraphCmd::Overview => cmd_graph(),
|
||||
GraphCmd::Spectral { k } => cmd_spectral(k),
|
||||
GraphCmd::SpectralSave { k } => cmd_spectral_save(k),
|
||||
=> cli::graph::cmd_differentiate(key.as_deref(), apply),
|
||||
GraphCmd::Trace { key } => cli::graph::cmd_trace(&key),
|
||||
GraphCmd::Interference { threshold } => cli::graph::cmd_interference(threshold),
|
||||
GraphCmd::Overview => cli::graph::cmd_graph(),
|
||||
GraphCmd::Spectral { k } => cli::graph::cmd_spectral(k),
|
||||
GraphCmd::SpectralSave { k } => cli::graph::cmd_spectral_save(k),
|
||||
GraphCmd::SpectralNeighbors { key, n }
|
||||
=> cmd_spectral_neighbors(&key, n),
|
||||
GraphCmd::SpectralPositions { n } => cmd_spectral_positions(n),
|
||||
GraphCmd::SpectralSuggest { n } => cmd_spectral_suggest(n),
|
||||
=> cli::graph::cmd_spectral_neighbors(&key, n),
|
||||
GraphCmd::SpectralPositions { n } => cli::graph::cmd_spectral_positions(n),
|
||||
GraphCmd::SpectralSuggest { n } => cli::graph::cmd_spectral_suggest(n),
|
||||
GraphCmd::Organize { term, threshold, key_only, anchor }
|
||||
=> cmd_organize(&term, threshold, key_only, anchor),
|
||||
=> cli::graph::cmd_organize(&term, threshold, key_only, anchor),
|
||||
},
|
||||
|
||||
// Cursor
|
||||
|
|
@ -1416,17 +1416,6 @@ fn cmd_status() -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_graph() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
println!("Graph: {} nodes, {} edges, {} communities",
|
||||
g.nodes().len(), g.edge_count(), g.community_count());
|
||||
println!("σ={:.2} α={:.2} gini={:.3} cc={:.4}",
|
||||
g.small_world_sigma(), g.degree_power_law_exponent(),
|
||||
g.degree_gini(), g.avg_clustering_coefficient());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_used(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("used requires a key".into());
|
||||
|
|
@ -1512,97 +1501,6 @@ fn cmd_gap(description: &[String]) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_link_orphans(min_deg: usize, links_per: usize, sim_thresh: f32) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let (orphans, links) = neuro::link_orphans(&mut store, min_deg, links_per, sim_thresh);
|
||||
println!("Linked {} orphans, added {} connections (min_degree={}, links_per={}, sim>{})",
|
||||
orphans, links, min_deg, links_per, sim_thresh);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_cap_degree(max_deg: usize) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let (hubs, pruned) = store.cap_degree(max_deg)?;
|
||||
store.save()?;
|
||||
println!("Capped {} hubs, pruned {} weak Auto edges (max_degree={})", hubs, pruned, max_deg);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_normalize_strengths(apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let graph = store.build_graph();
|
||||
let strengths = graph.jaccard_strengths();
|
||||
|
||||
// Build a lookup from (source_key, target_key) → new_strength
|
||||
let mut updates: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
|
||||
for (a, b, s) in &strengths {
|
||||
// Store both directions for easy lookup
|
||||
updates.insert((a.clone(), b.clone()), *s);
|
||||
updates.insert((b.clone(), a.clone()), *s);
|
||||
}
|
||||
|
||||
// Stats
|
||||
let mut changed = 0usize;
|
||||
let mut unchanged = 0usize;
|
||||
let mut temporal_skipped = 0usize;
|
||||
let mut delta_sum: f64 = 0.0;
|
||||
|
||||
// Histogram of new strengths
|
||||
let mut buckets = [0usize; 10]; // 0.0-0.1, 0.1-0.2, ...
|
||||
|
||||
for rel in &mut store.relations {
|
||||
if rel.deleted { continue; }
|
||||
|
||||
// Skip implicit temporal edges (strength 1.0, Auto type)
|
||||
if rel.strength == 1.0 && rel.rel_type == store::RelationType::Auto {
|
||||
temporal_skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(&new_s) = updates.get(&(rel.source_key.clone(), rel.target_key.clone())) {
|
||||
let old_s = rel.strength;
|
||||
let delta = (new_s - old_s).abs();
|
||||
if delta > 0.001 {
|
||||
delta_sum += delta as f64;
|
||||
if apply {
|
||||
rel.strength = new_s;
|
||||
}
|
||||
changed += 1;
|
||||
} else {
|
||||
unchanged += 1;
|
||||
}
|
||||
let bucket = ((new_s * 10.0) as usize).min(9);
|
||||
buckets[bucket] += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!("Normalize link strengths (Jaccard similarity)");
|
||||
println!(" Total edges in graph: {}", strengths.len());
|
||||
println!(" Would change: {}", changed);
|
||||
println!(" Unchanged: {}", unchanged);
|
||||
println!(" Temporal (skipped): {}", temporal_skipped);
|
||||
if changed > 0 {
|
||||
println!(" Avg delta: {:.3}", delta_sum / changed as f64);
|
||||
}
|
||||
println!();
|
||||
println!(" Strength distribution:");
|
||||
for (i, &count) in buckets.iter().enumerate() {
|
||||
let lo = i as f32 / 10.0;
|
||||
let hi = lo + 0.1;
|
||||
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
|
||||
println!(" {:.1}-{:.1}: {:5} {}", lo, hi, count, bar);
|
||||
}
|
||||
|
||||
if apply {
|
||||
store.save()?;
|
||||
println!("\nApplied {} strength updates.", changed);
|
||||
} else {
|
||||
println!("\nDry run. Use --apply to write changes.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option<String>) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
|
||||
|
|
@ -1638,19 +1536,6 @@ fn cmd_params() -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_link(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("link requires a key".into());
|
||||
}
|
||||
let key = key.join(" ");
|
||||
let store = store::Store::load()?;
|
||||
let resolved = store.resolve_key(&key)?;
|
||||
let g = store.build_graph();
|
||||
println!("Neighbors of '{}':", resolved);
|
||||
query::run_query(&store, &g,
|
||||
&format!("neighbors('{}') | select strength,clustering_coefficient", resolved))
|
||||
}
|
||||
|
||||
fn cmd_replay_queue(count: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let queue = neuro::replay_queue(&store, count);
|
||||
|
|
@ -1675,16 +1560,6 @@ fn cmd_consolidate_full() -> Result<(), String> {
|
|||
consolidate::consolidate_full(&mut store)
|
||||
}
|
||||
|
||||
fn cmd_triangle_close(min_degree: usize, sim_threshold: f32, max_per_hub: usize) -> Result<(), String> {
|
||||
println!("Triangle closure: min_degree={}, sim_threshold={}, max_per_hub={}",
|
||||
min_degree, sim_threshold, max_per_hub);
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let (hubs, added) = neuro::triangle_close(&mut store, min_degree, sim_threshold, max_per_hub);
|
||||
println!("\nProcessed {} hubs, added {} lateral links", hubs, added);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_daily_check() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let report = neuro::daily_check(&store);
|
||||
|
|
@ -1692,68 +1567,6 @@ fn cmd_daily_check() -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_link_add(source: &str, target: &str, reason: &[String]) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let source = store.resolve_key(source)?;
|
||||
let target = store.resolve_key(target)?;
|
||||
let reason = reason.join(" ");
|
||||
|
||||
// Refine target to best-matching section
|
||||
let source_content = store.nodes.get(&source)
|
||||
.map(|n| n.content.as_str()).unwrap_or("");
|
||||
let target = neuro::refine_target(&store, source_content, &target);
|
||||
|
||||
// Find UUIDs
|
||||
let source_uuid = store.nodes.get(&source)
|
||||
.map(|n| n.uuid)
|
||||
.ok_or_else(|| format!("source not found: {}", source))?;
|
||||
let target_uuid = store.nodes.get(&target)
|
||||
.map(|n| n.uuid)
|
||||
.ok_or_else(|| format!("target not found: {}", target))?;
|
||||
|
||||
// Check for existing link
|
||||
let exists = store.relations.iter().any(|r|
|
||||
!r.deleted &&
|
||||
((r.source_key == source && r.target_key == target) ||
|
||||
(r.source_key == target && r.target_key == source)));
|
||||
if exists {
|
||||
println!("Link already exists: {} ↔ {}", source, target);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Compute initial strength from Jaccard neighborhood similarity
|
||||
let graph = store.build_graph();
|
||||
let jaccard = graph.jaccard(&source, &target);
|
||||
let strength = (jaccard * 3.0).clamp(0.1, 1.0);
|
||||
|
||||
let rel = store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
store::RelationType::Link, strength,
|
||||
&source, &target,
|
||||
);
|
||||
store.add_relation(rel)?;
|
||||
store.save()?;
|
||||
println!("Linked: {} → {} (strength={:.2}, {})", source, target, strength, reason);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let source = store.resolve_key(source)?;
|
||||
let target = store.resolve_key(target)?;
|
||||
let g = store.build_graph();
|
||||
|
||||
let impact = g.link_impact(&source, &target);
|
||||
|
||||
println!("Link impact: {} → {}", source, target);
|
||||
println!(" Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg);
|
||||
println!(" Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community);
|
||||
println!(" ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target);
|
||||
println!(" ΔGini: {:+.6}", impact.delta_gini);
|
||||
println!(" Assessment: {}", impact.assessment);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Apply links from a single agent result JSON file.
|
||||
/// Returns (links_applied, errors).
|
||||
fn apply_agent_file(
|
||||
|
|
@ -1969,293 +1782,6 @@ fn cmd_apply_consolidation(do_apply: bool, report_file: Option<&str>) -> Result<
|
|||
consolidate::apply_consolidation(&mut store, do_apply, report_file)
|
||||
}
|
||||
|
||||
fn cmd_differentiate(key_arg: Option<&str>, do_apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
if let Some(key) = key_arg {
|
||||
let resolved = store.resolve_key(key)?;
|
||||
let moves = neuro::differentiate_hub(&store, &resolved)
|
||||
.ok_or_else(|| format!("'{}' is not a file-level hub with sections", resolved))?;
|
||||
|
||||
// Group by target section for display
|
||||
let mut by_section: std::collections::BTreeMap<String, Vec<&neuro::LinkMove>> =
|
||||
std::collections::BTreeMap::new();
|
||||
for mv in &moves {
|
||||
by_section.entry(mv.to_section.clone()).or_default().push(mv);
|
||||
}
|
||||
|
||||
println!("Hub '{}' — {} links to redistribute across {} sections\n",
|
||||
resolved, moves.len(), by_section.len());
|
||||
|
||||
for (section, section_moves) in &by_section {
|
||||
println!(" {} ({} links):", section, section_moves.len());
|
||||
for mv in section_moves.iter().take(5) {
|
||||
println!(" [{:.3}] {} — {}", mv.similarity,
|
||||
mv.neighbor_key, mv.neighbor_snippet);
|
||||
}
|
||||
if section_moves.len() > 5 {
|
||||
println!(" ... and {} more", section_moves.len() - 5);
|
||||
}
|
||||
}
|
||||
|
||||
if !do_apply {
|
||||
println!("\nTo apply: poc-memory differentiate {} --apply", resolved);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (applied, skipped) = neuro::apply_differentiation(&mut store, &moves);
|
||||
store.save()?;
|
||||
println!("\nApplied: {} Skipped: {}", applied, skipped);
|
||||
} else {
|
||||
let hubs = neuro::find_differentiable_hubs(&store);
|
||||
if hubs.is_empty() {
|
||||
println!("No file-level hubs with sections found above threshold");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Differentiable hubs (file-level nodes with sections):\n");
|
||||
for (key, degree, sections) in &hubs {
|
||||
println!(" {:40} deg={:3} sections={}", key, degree, sections);
|
||||
}
|
||||
println!("\nRun: poc-memory differentiate KEY to preview a specific hub");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let stats = audit::link_audit(&mut store, apply)?;
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Link audit complete:");
|
||||
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
||||
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
||||
println!("{}", "=".repeat(60));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_trace(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("trace requires a key".into());
|
||||
}
|
||||
let key = key.join(" ");
|
||||
let store = store::Store::load()?;
|
||||
let resolved = store.resolve_key(&key)?;
|
||||
let g = store.build_graph();
|
||||
|
||||
let node = store.nodes.get(&resolved)
|
||||
.ok_or_else(|| format!("Node not found: {}", resolved))?;
|
||||
|
||||
// Display the node itself
|
||||
println!("=== {} ===", resolved);
|
||||
println!("Type: {:?} Weight: {:.2}",
|
||||
node.node_type, node.weight);
|
||||
if !node.source_ref.is_empty() {
|
||||
println!("Source: {}", node.source_ref);
|
||||
}
|
||||
|
||||
// Show content preview
|
||||
let preview = util::truncate(&node.content, 200, "...");
|
||||
println!("\n{}\n", preview);
|
||||
|
||||
// Walk neighbors, grouped by node type
|
||||
let neighbors = g.neighbors(&resolved);
|
||||
let mut episodic_session = Vec::new();
|
||||
let mut episodic_daily = Vec::new();
|
||||
let mut episodic_weekly = Vec::new();
|
||||
let mut semantic = Vec::new();
|
||||
|
||||
for (n, strength) in &neighbors {
|
||||
if let Some(nnode) = store.nodes.get(n.as_str()) {
|
||||
let entry = (n.as_str(), *strength, nnode);
|
||||
match nnode.node_type {
|
||||
store::NodeType::EpisodicSession =>
|
||||
episodic_session.push(entry),
|
||||
store::NodeType::EpisodicDaily =>
|
||||
episodic_daily.push(entry),
|
||||
store::NodeType::EpisodicWeekly
|
||||
| store::NodeType::EpisodicMonthly =>
|
||||
episodic_weekly.push(entry),
|
||||
store::NodeType::Semantic =>
|
||||
semantic.push(entry),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_weekly.is_empty() {
|
||||
println!("Weekly digests:");
|
||||
for (k, s, n) in &episodic_weekly {
|
||||
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
||||
println!(" [{:.2}] {} — {}", s, k, preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_daily.is_empty() {
|
||||
println!("Daily digests:");
|
||||
for (k, s, n) in &episodic_daily {
|
||||
let preview = util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
|
||||
println!(" [{:.2}] {} — {}", s, k, preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !episodic_session.is_empty() {
|
||||
println!("Session entries:");
|
||||
for (k, s, n) in &episodic_session {
|
||||
let preview = util::first_n_chars(
|
||||
n.content.lines()
|
||||
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
|
||||
.unwrap_or(""),
|
||||
80);
|
||||
println!(" [{:.2}] {}", s, k);
|
||||
if !n.source_ref.is_empty() {
|
||||
println!(" ↳ source: {}", n.source_ref);
|
||||
}
|
||||
println!(" {}", preview);
|
||||
}
|
||||
}
|
||||
|
||||
if !semantic.is_empty() {
|
||||
println!("Semantic links:");
|
||||
for (k, s, _) in &semantic {
|
||||
println!(" [{:.2}] {}", s, k);
|
||||
}
|
||||
}
|
||||
|
||||
println!("\nLinks: {} session, {} daily, {} weekly, {} semantic",
|
||||
episodic_session.len(), episodic_daily.len(),
|
||||
episodic_weekly.len(), semantic.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_spectral(k: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = graph::build_graph(&store);
|
||||
let result = spectral::decompose(&g, k);
|
||||
spectral::print_summary(&result, &g);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_spectral_save(k: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = graph::build_graph(&store);
|
||||
let result = spectral::decompose(&g, k);
|
||||
let emb = spectral::to_embedding(&result);
|
||||
spectral::save_embedding(&emb)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_spectral_neighbors(key: &str, n: usize) -> Result<(), String> {
|
||||
let emb = spectral::load_embedding()?;
|
||||
|
||||
let dims = spectral::dominant_dimensions(&emb, &[key]);
|
||||
println!("Node: {} (embedding: {} dims)", key, emb.dims);
|
||||
println!("Top spectral axes:");
|
||||
for &(d, loading) in dims.iter().take(5) {
|
||||
println!(" axis {:<2} (λ={:.4}): loading={:.5}", d, emb.eigenvalues[d], loading);
|
||||
}
|
||||
|
||||
println!("\nNearest neighbors in spectral space:");
|
||||
let neighbors = spectral::nearest_neighbors(&emb, key, n);
|
||||
for (i, (k, dist)) in neighbors.iter().enumerate() {
|
||||
println!(" {:>2}. {:.5} {}", i + 1, dist, k);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_spectral_positions(n: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let emb = spectral::load_embedding()?;
|
||||
|
||||
let g = store.build_graph();
|
||||
let communities = g.communities().clone();
|
||||
|
||||
let positions = spectral::analyze_positions(&emb, &communities);
|
||||
|
||||
println!("Spectral position analysis — {} nodes", positions.len());
|
||||
println!(" outlier: dist_to_center / median (>1 = unusual position)");
|
||||
println!(" bridge: dist_to_center / dist_to_nearest_other_community");
|
||||
println!();
|
||||
|
||||
let mut bridges: Vec<&spectral::SpectralPosition> = Vec::new();
|
||||
let mut outliers: Vec<&spectral::SpectralPosition> = Vec::new();
|
||||
|
||||
for pos in positions.iter().take(n) {
|
||||
match spectral::classify_position(pos) {
|
||||
"bridge" => bridges.push(pos),
|
||||
_ => outliers.push(pos),
|
||||
}
|
||||
}
|
||||
|
||||
if !bridges.is_empty() {
|
||||
println!("=== Bridges (between communities) ===");
|
||||
for pos in &bridges {
|
||||
println!(" [{:.2}/{:.2}] c{} → c{} {}",
|
||||
pos.outlier_score, pos.bridge_score,
|
||||
pos.community, pos.nearest_community, pos.key);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
println!("=== Top outliers (far from own community center) ===");
|
||||
for pos in positions.iter().take(n) {
|
||||
let class = spectral::classify_position(pos);
|
||||
println!(" {:>10} outlier={:.2} bridge={:.2} c{:<3} {}",
|
||||
class, pos.outlier_score, pos.bridge_score,
|
||||
pos.community, pos.key);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_spectral_suggest(n: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let emb = spectral::load_embedding()?;
|
||||
let g = store.build_graph();
|
||||
let communities = g.communities();
|
||||
|
||||
let min_degree = 3;
|
||||
let well_connected: std::collections::HashSet<&str> = emb.coords.keys()
|
||||
.filter(|k| g.degree(k) >= min_degree)
|
||||
.map(|k| k.as_str())
|
||||
.collect();
|
||||
|
||||
let filtered_emb = spectral::SpectralEmbedding {
|
||||
dims: emb.dims,
|
||||
eigenvalues: emb.eigenvalues.clone(),
|
||||
coords: emb.coords.iter()
|
||||
.filter(|(k, _)| well_connected.contains(k.as_str()))
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let mut linked: std::collections::HashSet<(String, String)> =
|
||||
std::collections::HashSet::new();
|
||||
for rel in &store.relations {
|
||||
linked.insert((rel.source_key.clone(), rel.target_key.clone()));
|
||||
linked.insert((rel.target_key.clone(), rel.source_key.clone()));
|
||||
}
|
||||
|
||||
eprintln!("Searching {} well-connected nodes (degree >= {})...",
|
||||
filtered_emb.coords.len(), min_degree);
|
||||
let pairs = spectral::unlinked_neighbors(&filtered_emb, &linked, n);
|
||||
|
||||
println!("{} closest unlinked pairs (candidates for extractor agents):", pairs.len());
|
||||
for (i, (k1, k2, dist)) in pairs.iter().enumerate() {
|
||||
let c1 = communities.get(k1)
|
||||
.map(|c| format!("c{}", c))
|
||||
.unwrap_or_else(|| "?".into());
|
||||
let c2 = communities.get(k2)
|
||||
.map(|c| format!("c{}", c))
|
||||
.unwrap_or_else(|| "?".into());
|
||||
let cross = if c1 != c2 { " [cross-community]" } else { "" };
|
||||
println!(" {:>2}. dist={:.4} {} ({}) ↔ {} ({}){}",
|
||||
i + 1, dist, k1, c1, k2, c2, cross);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_list_keys(pattern: Option<&str>) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
|
|
@ -2873,144 +2399,6 @@ fn extract_title(content: &str) -> String {
|
|||
String::from("(untitled)")
|
||||
}
|
||||
|
||||
fn cmd_organize(term: &str, threshold: f32, key_only: bool, create_anchor: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
// Step 1: find all non-deleted nodes matching the term
|
||||
let term_lower = term.to_lowercase();
|
||||
let mut topic_nodes: Vec<(String, String)> = Vec::new(); // (key, content)
|
||||
|
||||
// Prefixes that indicate ephemeral/generated nodes to skip
|
||||
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "_",
|
||||
"deep-index#", "facts-", "irc-history#"];
|
||||
|
||||
for (key, node) in &store.nodes {
|
||||
if node.deleted { continue; }
|
||||
let key_matches = key.to_lowercase().contains(&term_lower);
|
||||
let content_matches = !key_only && node.content.to_lowercase().contains(&term_lower);
|
||||
if !key_matches && !content_matches { continue; }
|
||||
if skip_prefixes.iter().any(|p| key.starts_with(p)) { continue; }
|
||||
topic_nodes.push((key.clone(), node.content.clone()));
|
||||
}
|
||||
|
||||
if topic_nodes.is_empty() {
|
||||
println!("No topic nodes found matching '{}'", term);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
topic_nodes.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
println!("=== Organize: '{}' ===", term);
|
||||
println!("Found {} topic nodes:\n", topic_nodes.len());
|
||||
for (key, content) in &topic_nodes {
|
||||
let lines = content.lines().count();
|
||||
let words = content.split_whitespace().count();
|
||||
println!(" {:60} {:>4} lines {:>5} words", key, lines, words);
|
||||
}
|
||||
|
||||
// Step 2: pairwise similarity
|
||||
let pairs = similarity::pairwise_similar(&topic_nodes, threshold);
|
||||
|
||||
if pairs.is_empty() {
|
||||
println!("\nNo similar pairs above threshold {:.2}", threshold);
|
||||
} else {
|
||||
println!("\n=== Similar pairs (cosine > {:.2}) ===\n", threshold);
|
||||
for (a, b, sim) in &pairs {
|
||||
let a_words = topic_nodes.iter().find(|(k,_)| k == a)
|
||||
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
||||
let b_words = topic_nodes.iter().find(|(k,_)| k == b)
|
||||
.map(|(_,c)| c.split_whitespace().count()).unwrap_or(0);
|
||||
|
||||
println!(" [{:.3}] {} ({} words) ↔ {} ({} words)", sim, a, a_words, b, b_words);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: check connectivity within cluster
|
||||
let g = store.build_graph();
|
||||
println!("=== Connectivity ===\n");
|
||||
|
||||
// Pick hub by intra-cluster connectivity, not overall degree
|
||||
let cluster_keys: std::collections::HashSet<&str> = topic_nodes.iter()
|
||||
.filter(|(k,_)| store.nodes.contains_key(k.as_str()))
|
||||
.map(|(k,_)| k.as_str())
|
||||
.collect();
|
||||
|
||||
let mut best_hub: Option<(&str, usize)> = None;
|
||||
for key in &cluster_keys {
|
||||
let intra_degree = g.neighbor_keys(key).iter()
|
||||
.filter(|n| cluster_keys.contains(*n))
|
||||
.count();
|
||||
if best_hub.is_none() || intra_degree > best_hub.unwrap().1 {
|
||||
best_hub = Some((key, intra_degree));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((hub, deg)) = best_hub {
|
||||
println!(" Hub: {} (degree {})", hub, deg);
|
||||
let hub_nbrs = g.neighbor_keys(hub);
|
||||
|
||||
let mut unlinked = Vec::new();
|
||||
for (key, _) in &topic_nodes {
|
||||
if key == hub { continue; }
|
||||
if store.nodes.get(key.as_str()).is_none() { continue; }
|
||||
if !hub_nbrs.contains(key.as_str()) {
|
||||
unlinked.push(key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if unlinked.is_empty() {
|
||||
println!(" All cluster nodes connected to hub ✓");
|
||||
} else {
|
||||
println!(" NOT linked to hub:");
|
||||
for key in &unlinked {
|
||||
println!(" {} → needs link to {}", key, hub);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: anchor node
|
||||
if create_anchor {
|
||||
println!("\n=== Anchor node ===\n");
|
||||
if store.nodes.contains_key(term) && !store.nodes[term].deleted {
|
||||
println!(" Anchor '{}' already exists ✓", term);
|
||||
} else {
|
||||
let desc = format!("Anchor node for '{}' search term", term);
|
||||
store.upsert(term, &desc)?;
|
||||
let anchor_uuid = store.nodes.get(term).unwrap().uuid;
|
||||
for (key, _) in &topic_nodes {
|
||||
if store.nodes.get(key.as_str()).is_none() { continue; }
|
||||
let target_uuid = store.nodes[key.as_str()].uuid;
|
||||
let rel = store::new_relation(
|
||||
anchor_uuid, target_uuid,
|
||||
store::RelationType::Link, 0.8,
|
||||
term, key,
|
||||
);
|
||||
store.add_relation(rel)?;
|
||||
}
|
||||
println!(" Created anchor '{}' with {} links", term, topic_nodes.len());
|
||||
}
|
||||
}
|
||||
|
||||
store.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_interference(threshold: f32) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
let pairs = neuro::detect_interference(&store, &g, threshold);
|
||||
|
||||
if pairs.is_empty() {
|
||||
println!("No interfering pairs above threshold {:.2}", threshold);
|
||||
} else {
|
||||
println!("Interfering pairs (similarity > {:.2}, different communities):", threshold);
|
||||
for (a, b, sim) in &pairs {
|
||||
println!(" [{:.3}] {} ↔ {}", sim, a, b);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_query(expr: &[String]) -> Result<(), String> {
|
||||
if expr.is_empty() {
|
||||
return Err("query requires an expression (try: poc-memory query --help)".into());
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue