forked from kent/consciousness
CLI: more RPC conversions, delete obsolete commands
- cmd_health: use graph_health RPC - cmd_topology: new command using graph_topology RPC - cmd_status: use graph_topology RPC (type counts folded into topology) - cmd_run_agent: query resolution via memory_query RPC - Delete cmd_bulk_rename (one-time migration, obsolete) - Delete cmd_replay_queue, cmd_digest_links (unconscious agents handle) - format_topology_header: add type counts, takes &Store now Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
1f6bfb5915
commit
2ab4aef19f
6 changed files with 58 additions and 173 deletions
|
|
@ -568,7 +568,7 @@ async fn graph_topology() -> Result<String> {
|
|||
let arc = cached_store().await?;
|
||||
let store = arc.lock().await;
|
||||
let graph = store.build_graph();
|
||||
Ok(crate::subconscious::prompts::format_topology_header(&graph))
|
||||
Ok(crate::subconscious::prompts::format_topology_header(&store, &graph))
|
||||
}
|
||||
|
||||
async fn graph_health() -> Result<String> {
|
||||
|
|
|
|||
116
src/cli/admin.rs
116
src/cli/admin.rs
|
|
@ -61,73 +61,6 @@ pub fn cmd_init() -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_bulk_rename(from: &str, to: &str, apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
// Find all keys that need renaming
|
||||
let renames: Vec<(String, String)> = store.nodes.keys()
|
||||
.filter(|k| k.contains(from))
|
||||
.map(|k| (k.clone(), k.replace(from, to)))
|
||||
.collect();
|
||||
|
||||
// Check for collisions
|
||||
let existing: std::collections::HashSet<&String> = store.nodes.keys().collect();
|
||||
let mut collisions = 0;
|
||||
for (old, new) in &renames {
|
||||
if existing.contains(new) && old != new {
|
||||
eprintln!("COLLISION: {} -> {} (target exists)", old, new);
|
||||
collisions += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!("Bulk rename '{}' -> '{}'", from, to);
|
||||
println!(" Keys to rename: {}", renames.len());
|
||||
println!(" Collisions: {}", collisions);
|
||||
|
||||
if collisions > 0 {
|
||||
return Err(format!("{} collisions — aborting", collisions));
|
||||
}
|
||||
|
||||
if !apply {
|
||||
// Show a sample
|
||||
for (old, new) in renames.iter().take(10) {
|
||||
println!(" {} -> {}", old, new);
|
||||
}
|
||||
if renames.len() > 10 {
|
||||
println!(" ... and {} more", renames.len() - 10);
|
||||
}
|
||||
println!("\nDry run. Use --apply to execute.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Apply renames using rename_node() which properly appends to capnp logs.
|
||||
// Process in batches to avoid holding the lock too long.
|
||||
let mut renamed_count = 0;
|
||||
let mut errors = 0;
|
||||
let total = renames.len();
|
||||
for (i, (old_key, new_key)) in renames.iter().enumerate() {
|
||||
match store.rename_node(old_key, new_key) {
|
||||
Ok(()) => renamed_count += 1,
|
||||
Err(e) => {
|
||||
eprintln!(" RENAME ERROR: {} -> {}: {}", old_key, new_key, e);
|
||||
errors += 1;
|
||||
}
|
||||
}
|
||||
if (i + 1) % 1000 == 0 {
|
||||
println!(" {}/{} ({} errors)", i + 1, total, errors);
|
||||
}
|
||||
}
|
||||
store.save()?;
|
||||
println!("Renamed {} nodes ({} errors).", renamed_count, errors);
|
||||
|
||||
// Run fsck to verify
|
||||
println!("\nRunning fsck...");
|
||||
drop(store);
|
||||
cmd_fsck()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_fsck() -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
|
|
@ -396,10 +329,20 @@ pub fn cmd_dedup(apply: bool) -> Result<(), String> {
|
|||
}
|
||||
|
||||
pub fn cmd_health() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
let report = crate::graph::health_report(&g, &store);
|
||||
print!("{}", report);
|
||||
let result = crate::mcp_server::memory_rpc(
|
||||
"graph_health",
|
||||
serde_json::json!({}),
|
||||
).map_err(|e| e.to_string())?;
|
||||
print!("{}", result);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_topology() -> Result<(), String> {
|
||||
let result = crate::mcp_server::memory_rpc(
|
||||
"graph_topology",
|
||||
serde_json::json!({}),
|
||||
).map_err(|e| e.to_string())?;
|
||||
print!("{}", result);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -480,31 +423,10 @@ pub fn cmd_export(files: &[String], export_all: bool) -> Result<(), String> {
|
|||
}
|
||||
|
||||
pub fn cmd_status() -> Result<(), String> {
|
||||
// TUI moved to consciousness binary (F4 unconscious screen)
|
||||
|
||||
let store = crate::store::Store::load()?;
|
||||
let g = store.build_graph();
|
||||
|
||||
let mut type_counts = std::collections::HashMap::new();
|
||||
for node in store.nodes.values() {
|
||||
*type_counts.entry(format!("{:?}", node.node_type)).or_insert(0usize) += 1;
|
||||
}
|
||||
let mut types: Vec<_> = type_counts.iter().collect();
|
||||
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
|
||||
|
||||
println!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len());
|
||||
print!("Types:");
|
||||
for (t, c) in &types {
|
||||
let label = match t.as_str() {
|
||||
"Semantic" => "semantic",
|
||||
"EpisodicSession" | "EpisodicDaily" | "EpisodicWeekly" | "EpisodicMonthly"
|
||||
=> "episodic",
|
||||
_ => t,
|
||||
};
|
||||
print!(" {}={}", label, c);
|
||||
}
|
||||
println!();
|
||||
println!("Graph edges: {} Communities: {}",
|
||||
g.edge_count(), g.community_count());
|
||||
let result = crate::mcp_server::memory_rpc(
|
||||
"graph_topology",
|
||||
serde_json::json!({}),
|
||||
).map_err(|e| e.to_string())?;
|
||||
print!("{}", result);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
// cli/agent.rs — agent subcommand handlers
|
||||
|
||||
use crate::store;
|
||||
use crate::subconscious::digest;
|
||||
|
||||
pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option<&str>, dry_run: bool, _local: bool, state_dir: Option<&str>) -> Result<(), String> {
|
||||
// Mark as agent so tool calls (e.g. poc-memory render) don't
|
||||
|
|
@ -19,19 +18,22 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
unsafe { std::env::set_var("POC_MEMORY_DRY_RUN", "1"); }
|
||||
}
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
// Resolve targets: explicit --target, --query, or agent's default query
|
||||
let resolved_targets: Vec<String> = if !target.is_empty() {
|
||||
target.to_vec()
|
||||
} else if let Some(q) = query {
|
||||
let graph = store.build_graph();
|
||||
let stages = crate::query_parser::parse_stages(q)?;
|
||||
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, count);
|
||||
if results.is_empty() {
|
||||
// Use RPC to resolve query
|
||||
let result = crate::mcp_server::memory_rpc(
|
||||
"memory_query",
|
||||
serde_json::json!({"query": format!("{} | limit:{}", q, count)}),
|
||||
).map_err(|e| e.to_string())?;
|
||||
let keys: Vec<String> = result.lines()
|
||||
.filter(|l| !l.is_empty() && *l != "no results")
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
if keys.is_empty() {
|
||||
return Err(format!("query returned no results: {}", q));
|
||||
}
|
||||
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
|
||||
println!("[{}] query matched {} nodes", agent, keys.len());
|
||||
keys
|
||||
} else {
|
||||
|
|
@ -41,7 +43,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
if !resolved_targets.is_empty() {
|
||||
for (i, key) in resolved_targets.iter().enumerate() {
|
||||
println!("[{}] [{}/{}] {}", agent, i + 1, resolved_targets.len(), key);
|
||||
if i > 0 { store = store::Store::load()?; }
|
||||
let mut store = store::Store::load()?;
|
||||
if let Err(e) = crate::agent::oneshot::run_one_agent(
|
||||
&mut store, agent, count, Some(&[key.clone()]),
|
||||
) {
|
||||
|
|
@ -50,6 +52,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
}
|
||||
} else {
|
||||
// Local execution (--local, --debug, dry-run, or daemon unavailable)
|
||||
let mut store = store::Store::load()?;
|
||||
crate::agent::oneshot::run_one_agent(
|
||||
&mut store, agent, count, None,
|
||||
)?;
|
||||
|
|
@ -57,37 +60,3 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_replay_queue(count: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let queue = crate::neuro::replay_queue(&store, count);
|
||||
println!("Replay queue ({} items):", queue.len());
|
||||
for (i, item) in queue.iter().enumerate() {
|
||||
println!(" {:2}. [{:.3}] {:>10} {} (interval={}d, emotion={:.1}, spectral={:.1})",
|
||||
i + 1, item.priority, item.classification, item.key,
|
||||
item.interval_days, item.emotion, item.outlier_score);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let links = digest::parse_all_digest_links(&store);
|
||||
drop(store);
|
||||
println!("Found {} unique links from digest nodes", links.len());
|
||||
|
||||
if !do_apply {
|
||||
for (i, link) in links.iter().enumerate() {
|
||||
println!(" {:3}. {} → {}", i + 1, link.source, link.target);
|
||||
if !link.reason.is_empty() {
|
||||
println!(" ({})", &link.reason[..link.reason.floor_char_boundary(link.reason.len().min(80))]);
|
||||
}
|
||||
}
|
||||
println!("\nTo apply: poc-memory digest-links --apply");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
|
||||
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
31
src/main.rs
31
src/main.rs
|
|
@ -274,13 +274,6 @@ enum GraphCmd {
|
|||
|
||||
#[derive(Subcommand)]
|
||||
enum AgentCmd {
|
||||
/// Parse and apply links from digest nodes
|
||||
#[command(name = "digest-links")]
|
||||
DigestLinks {
|
||||
/// Apply the links (default: dry run)
|
||||
#[arg(long)]
|
||||
apply: bool,
|
||||
},
|
||||
/// Run a single agent by name
|
||||
Run {
|
||||
/// Agent name (e.g. observation, linker, distill)
|
||||
|
|
@ -304,13 +297,6 @@ enum AgentCmd {
|
|||
#[arg(long)]
|
||||
state_dir: Option<String>,
|
||||
},
|
||||
/// Show spaced repetition replay queue
|
||||
#[command(name = "replay-queue")]
|
||||
ReplayQueue {
|
||||
/// Number of items to show
|
||||
#[arg(long, default_value_t = 10)]
|
||||
count: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
|
|
@ -319,6 +305,8 @@ enum AdminCmd {
|
|||
Init,
|
||||
/// Report graph metrics (CC, communities, small-world)
|
||||
Health,
|
||||
/// Show graph topology with hub warnings
|
||||
Topology,
|
||||
/// Run consistency checks and repair
|
||||
Fsck,
|
||||
/// Find and merge duplicate nodes (same key, multiple UUIDs)
|
||||
|
|
@ -327,17 +315,6 @@ enum AdminCmd {
|
|||
#[arg(long)]
|
||||
apply: bool,
|
||||
},
|
||||
/// Bulk rename: replace a character in all keys
|
||||
#[command(name = "bulk-rename")]
|
||||
BulkRename {
|
||||
/// Character to replace
|
||||
from: String,
|
||||
/// Replacement character
|
||||
to: String,
|
||||
/// Apply changes (default: dry run)
|
||||
#[arg(long)]
|
||||
apply: bool,
|
||||
},
|
||||
/// Brief metrics check (for cron/notifications)
|
||||
#[command(name = "daily-check")]
|
||||
DailyCheck,
|
||||
|
|
@ -471,10 +448,8 @@ impl Run for GraphCmd {
|
|||
impl Run for AgentCmd {
|
||||
fn run(self) -> Result<(), String> {
|
||||
match self {
|
||||
Self::DigestLinks { apply } => cli::agent::cmd_digest_links(apply),
|
||||
Self::Run { agent, count, target, query, dry_run, local, state_dir }
|
||||
=> cli::agent::cmd_run_agent(&agent, count, &target, query.as_deref(), dry_run, local, state_dir.as_deref()),
|
||||
Self::ReplayQueue { count } => cli::agent::cmd_replay_queue(count),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -484,9 +459,9 @@ impl Run for AdminCmd {
|
|||
match self {
|
||||
Self::Init => cli::admin::cmd_init(),
|
||||
Self::Health => cli::admin::cmd_health(),
|
||||
Self::Topology => cli::admin::cmd_topology(),
|
||||
Self::Fsck => cli::admin::cmd_fsck(),
|
||||
Self::Dedup { apply } => cli::admin::cmd_dedup(apply),
|
||||
Self::BulkRename { from, to, apply } => cli::admin::cmd_bulk_rename(&from, &to, apply),
|
||||
Self::DailyCheck => cli::admin::cmd_daily_check(),
|
||||
Self::Import { files } => cli::admin::cmd_import(&files),
|
||||
Self::Export { files, all } => cli::admin::cmd_export(&files, all),
|
||||
|
|
|
|||
|
|
@ -209,7 +209,7 @@ fn resolve(
|
|||
) -> Option<Resolved> {
|
||||
match name {
|
||||
"topology" => Some(Resolved {
|
||||
text: super::prompts::format_topology_header(graph),
|
||||
text: super::prompts::format_topology_header(store, graph),
|
||||
keys: vec![],
|
||||
}),
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ pub struct AgentBatch {
|
|||
pub node_keys: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn format_topology_header(graph: &Graph) -> String {
|
||||
pub fn format_topology_header(store: &Store, graph: &Graph) -> String {
|
||||
let sigma = graph.small_world_sigma();
|
||||
let alpha = graph.degree_power_law_exponent();
|
||||
let gini = graph.degree_gini();
|
||||
|
|
@ -28,6 +28,25 @@ pub fn format_topology_header(graph: &Graph) -> String {
|
|||
let n = graph.nodes().len();
|
||||
let e = graph.edge_count();
|
||||
|
||||
// Type counts
|
||||
let mut type_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
|
||||
for node in store.nodes.values() {
|
||||
let label = match node.node_type {
|
||||
crate::store::NodeType::Semantic => "semantic",
|
||||
crate::store::NodeType::EpisodicSession
|
||||
| crate::store::NodeType::EpisodicDaily
|
||||
| crate::store::NodeType::EpisodicWeekly
|
||||
| crate::store::NodeType::EpisodicMonthly => "episodic",
|
||||
};
|
||||
*type_counts.entry(label).or_default() += 1;
|
||||
}
|
||||
let mut types: Vec<_> = type_counts.iter().collect();
|
||||
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
|
||||
let type_str: String = types.iter()
|
||||
.map(|(t, c)| format!("{}={}", t, c))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
// Identify saturated hubs — nodes with degree well above threshold
|
||||
let threshold = graph.hub_threshold();
|
||||
let mut hubs: Vec<_> = graph.nodes().iter()
|
||||
|
|
@ -54,13 +73,13 @@ pub fn format_topology_header(graph: &Graph) -> String {
|
|||
|
||||
format!(
|
||||
"## Current graph topology\n\
|
||||
Nodes: {} Edges: {} Communities: {}\n\
|
||||
Nodes: {} Edges: {} Communities: {} Types: {}\n\
|
||||
Small-world σ: {:.1} Power-law α: {:.2} Degree Gini: {:.3}\n\
|
||||
Avg clustering coefficient: {:.4}\n\n\
|
||||
{}\
|
||||
Each node below shows its hub-link ratio (fraction of edges to top-5% degree nodes).\n\
|
||||
Use `poc-memory link-impact SOURCE TARGET` to evaluate proposed links.\n\n",
|
||||
n, e, graph.community_count(), sigma, alpha, gini, avg_cc, hub_list)
|
||||
n, e, graph.community_count(), type_str, sigma, alpha, gini, avg_cc, hub_list)
|
||||
}
|
||||
|
||||
pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> String {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue