agents: port knowledge agents to .agent files with visit tracking
The four knowledge agents (observation, extractor, connector,
challenger) were hardcoded in knowledge.rs with their own node
selection logic that bypassed the query pipeline and visit tracking.
Now they're .agent files like the consolidation agents:
- extractor: not-visited:extractor,7d | sort:priority | limit:20
- observation: uses new {{CONVERSATIONS}} placeholder
- connector: type:semantic | not-visited:connector,7d
- challenger: type:semantic | not-visited:challenger,14d
The knowledge loop's run_cycle dispatches through defs::run_agent
instead of calling hardcoded functions, so all agents get visit
tracking automatically. This means the extractor now sees _facts-*
and _mined-transcripts nodes that it was previously blind to.
~200 lines of dead code removed (old runner functions, spectral
clustering for node selection, per-agent LLM dispatch).
New placeholders in defs.rs:
- {{CONVERSATIONS}} — raw transcript fragments for observation agent
- {{TARGETS}} — alias for {{NODES}} (challenger compatibility)
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
7d6ebbacab
commit
91878d17a0
6 changed files with 542 additions and 224 deletions
|
|
@ -1,14 +1,12 @@
|
|||
// knowledge.rs — knowledge production agents and convergence loop
|
||||
// knowledge.rs — knowledge agent action parsing, depth tracking, and convergence loop
|
||||
//
|
||||
// Rust port of knowledge_agents.py + knowledge_loop.py.
|
||||
// Four agents mine the memory graph for new knowledge:
|
||||
// 1. Observation — extract facts from raw conversations
|
||||
// 2. Extractor — find patterns in node clusters
|
||||
// 3. Connector — find cross-domain structural connections
|
||||
// 4. Challenger — stress-test existing knowledge nodes
|
||||
//
|
||||
// The loop runs agents in sequence, applies results, measures
|
||||
// convergence via graph-structural metrics (sigma, CC, communities).
|
||||
// Agent prompts live in agents/*.agent files, dispatched via defs.rs.
|
||||
// This module handles:
|
||||
// - Action parsing (WRITE_NODE, LINK, REFINE from LLM output)
|
||||
// - Inference depth tracking (prevents runaway abstraction)
|
||||
// - Action application (write to store with provenance)
|
||||
// - Convergence loop (sequences agents, measures graph stability)
|
||||
// - Conversation fragment selection (for observation agent)
|
||||
|
||||
use crate::graph::Graph;
|
||||
use super::llm;
|
||||
|
|
@ -17,7 +15,7 @@ use crate::store::{self, Store, new_relation, RelationType};
|
|||
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
|
|
@ -324,15 +322,6 @@ fn agent_provenance(agent: &str) -> store::Provenance {
|
|||
// Agent runners
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn load_prompt(name: &str) -> Result<String, String> {
|
||||
super::prompts::load_prompt(name, &[])
|
||||
}
|
||||
|
||||
fn get_graph_topology(store: &Store, graph: &Graph) -> String {
|
||||
format!("Nodes: {} Relations: {}\n", store.nodes.len(), graph.edge_count())
|
||||
}
|
||||
|
||||
/// Strip <system-reminder> blocks from text
|
||||
/// Extract human-readable dialogue from a conversation JSONL
|
||||
fn extract_conversation_text(path: &Path, max_chars: usize) -> String {
|
||||
let cfg = crate::config::get();
|
||||
|
|
@ -372,7 +361,7 @@ fn count_dialogue_turns(path: &Path) -> usize {
|
|||
}
|
||||
|
||||
/// Select conversation fragments for the observation extractor
|
||||
fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
|
||||
pub fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
|
||||
let projects = crate::config::get().projects_dir.clone();
|
||||
if !projects.exists() { return Vec::new(); }
|
||||
|
||||
|
|
@ -415,199 +404,6 @@ fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
|
|||
fragments
|
||||
}
|
||||
|
||||
pub fn run_observation_extractor(store: &Store, graph: &Graph, batch_size: usize) -> Result<String, String> {
|
||||
let template = load_prompt("observation-extractor")?;
|
||||
let topology = get_graph_topology(store, graph);
|
||||
let fragments = select_conversation_fragments(batch_size);
|
||||
|
||||
let mut results = Vec::new();
|
||||
for (i, (session_id, text)) in fragments.iter().enumerate() {
|
||||
eprintln!(" Observation extractor {}/{}: session {}... ({} chars)",
|
||||
i + 1, fragments.len(), &session_id[..session_id.len().min(12)], text.len());
|
||||
|
||||
let prompt = template
|
||||
.replace("{{TOPOLOGY}}", &topology)
|
||||
.replace("{{CONVERSATIONS}}", &format!("### Session {}\n\n{}", session_id, text));
|
||||
|
||||
let response = llm::call_sonnet("knowledge", &prompt)?;
|
||||
results.push(format!("## Session: {}\n\n{}", session_id, response));
|
||||
}
|
||||
Ok(results.join("\n\n---\n\n"))
|
||||
}
|
||||
|
||||
/// Load spectral embedding from disk
|
||||
fn load_spectral_embedding() -> HashMap<String, Vec<f64>> {
|
||||
spectral::load_embedding()
|
||||
.map(|emb| emb.coords)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn spectral_distance(embedding: &HashMap<String, Vec<f64>>, a: &str, b: &str) -> f64 {
|
||||
let (Some(va), Some(vb)) = (embedding.get(a), embedding.get(b)) else {
|
||||
return f64::INFINITY;
|
||||
};
|
||||
let dot: f64 = va.iter().zip(vb.iter()).map(|(a, b)| a * b).sum();
|
||||
let norm_a: f64 = va.iter().map(|x| x * x).sum::<f64>().sqrt();
|
||||
let norm_b: f64 = vb.iter().map(|x| x * x).sum::<f64>().sqrt();
|
||||
if norm_a == 0.0 || norm_b == 0.0 {
|
||||
return f64::INFINITY;
|
||||
}
|
||||
1.0 - dot / (norm_a * norm_b)
|
||||
}
|
||||
|
||||
fn select_extractor_clusters(_store: &Store, n: usize) -> Vec<Vec<String>> {
|
||||
let embedding = load_spectral_embedding();
|
||||
let semantic_keys: Vec<&String> = embedding.keys().collect();
|
||||
|
||||
let cluster_size = 5;
|
||||
let mut used = HashSet::new();
|
||||
let mut clusters = Vec::new();
|
||||
|
||||
for _ in 0..n {
|
||||
let available: Vec<&&String> = semantic_keys.iter()
|
||||
.filter(|k| !used.contains(**k))
|
||||
.collect();
|
||||
if available.len() < cluster_size { break; }
|
||||
|
||||
let seed = available[0];
|
||||
let mut distances: Vec<(f64, &String)> = available.iter()
|
||||
.filter(|k| ***k != *seed)
|
||||
.map(|k| (spectral_distance(&embedding, seed, k), **k))
|
||||
.filter(|(d, _)| d.is_finite())
|
||||
.collect();
|
||||
distances.sort_by(|a, b| a.0.total_cmp(&b.0));
|
||||
|
||||
let cluster: Vec<String> = std::iter::once((*seed).clone())
|
||||
.chain(distances.iter().take(cluster_size - 1).map(|(_, k)| (*k).clone()))
|
||||
.collect();
|
||||
for k in &cluster { used.insert(k.clone()); }
|
||||
clusters.push(cluster);
|
||||
}
|
||||
clusters
|
||||
}
|
||||
|
||||
pub fn run_extractor(store: &Store, graph: &Graph, batch_size: usize) -> Result<String, String> {
|
||||
let template = load_prompt("extractor")?;
|
||||
let topology = get_graph_topology(store, graph);
|
||||
let clusters = select_extractor_clusters(store, batch_size);
|
||||
|
||||
let mut results = Vec::new();
|
||||
for (i, cluster) in clusters.iter().enumerate() {
|
||||
eprintln!(" Extractor cluster {}/{}: {} nodes", i + 1, clusters.len(), cluster.len());
|
||||
|
||||
let node_texts: Vec<String> = cluster.iter()
|
||||
.filter_map(|key| {
|
||||
let content = store.nodes.get(key)?.content.as_str();
|
||||
Some(format!("### {}\n{}", key, content))
|
||||
})
|
||||
.collect();
|
||||
if node_texts.is_empty() { continue; }
|
||||
|
||||
let prompt = template
|
||||
.replace("{{TOPOLOGY}}", &topology)
|
||||
.replace("{{NODES}}", &node_texts.join("\n\n"));
|
||||
|
||||
let response = llm::call_sonnet("knowledge", &prompt)?;
|
||||
results.push(format!("## Cluster {}: {}...\n\n{}", i + 1,
|
||||
cluster.iter().take(3).cloned().collect::<Vec<_>>().join(", "), response));
|
||||
}
|
||||
Ok(results.join("\n\n---\n\n"))
|
||||
}
|
||||
|
||||
fn select_connector_pairs(store: &Store, graph: &Graph, n: usize) -> Vec<(Vec<String>, Vec<String>)> {
|
||||
let embedding = load_spectral_embedding();
|
||||
let semantic_keys: Vec<&String> = embedding.keys().collect();
|
||||
|
||||
let mut pairs = Vec::new();
|
||||
let mut used = HashSet::new();
|
||||
|
||||
for seed in semantic_keys.iter().take(n * 10) {
|
||||
if used.contains(*seed) { continue; }
|
||||
|
||||
let mut near: Vec<(f64, &String)> = semantic_keys.iter()
|
||||
.filter(|k| ***k != **seed && !used.contains(**k))
|
||||
.map(|k| (spectral_distance(&embedding, seed, k), *k))
|
||||
.filter(|(d, _)| *d < 0.5 && d.is_finite())
|
||||
.collect();
|
||||
near.sort_by(|a, b| a.0.total_cmp(&b.0));
|
||||
|
||||
for (_, target) in near.iter().take(5) {
|
||||
if !has_edge(store, seed, target) {
|
||||
let _ = graph; // graph available for future use
|
||||
used.insert((*seed).clone());
|
||||
used.insert((*target).clone());
|
||||
pairs.push((vec![(*seed).clone()], vec![(*target).clone()]));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if pairs.len() >= n { break; }
|
||||
}
|
||||
pairs
|
||||
}
|
||||
|
||||
pub fn run_connector(store: &Store, graph: &Graph, batch_size: usize) -> Result<String, String> {
|
||||
let template = load_prompt("connector")?;
|
||||
let topology = get_graph_topology(store, graph);
|
||||
let pairs = select_connector_pairs(store, graph, batch_size);
|
||||
|
||||
let mut results = Vec::new();
|
||||
for (i, (group_a, group_b)) in pairs.iter().enumerate() {
|
||||
eprintln!(" Connector pair {}/{}", i + 1, pairs.len());
|
||||
|
||||
let nodes_a: Vec<String> = group_a.iter()
|
||||
.filter_map(|k| {
|
||||
let c = store.nodes.get(k)?.content.as_str();
|
||||
Some(format!("### {}\n{}", k, c))
|
||||
})
|
||||
.collect();
|
||||
let nodes_b: Vec<String> = group_b.iter()
|
||||
.filter_map(|k| {
|
||||
let c = store.nodes.get(k)?.content.as_str();
|
||||
Some(format!("### {}\n{}", k, c))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let prompt = template
|
||||
.replace("{{TOPOLOGY}}", &topology)
|
||||
.replace("{{NODES_A}}", &nodes_a.join("\n\n"))
|
||||
.replace("{{NODES_B}}", &nodes_b.join("\n\n"));
|
||||
|
||||
let response = llm::call_sonnet("knowledge", &prompt)?;
|
||||
results.push(format!("## Pair {}: {} ↔ {}\n\n{}",
|
||||
i + 1, group_a.join(", "), group_b.join(", "), response));
|
||||
}
|
||||
Ok(results.join("\n\n---\n\n"))
|
||||
}
|
||||
|
||||
pub fn run_challenger(store: &Store, graph: &Graph, batch_size: usize) -> Result<String, String> {
|
||||
let template = load_prompt("challenger")?;
|
||||
let topology = get_graph_topology(store, graph);
|
||||
|
||||
let mut candidates: Vec<(&String, usize)> = store.nodes.keys()
|
||||
.map(|k| (k, graph.degree(k)))
|
||||
.collect();
|
||||
candidates.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
|
||||
let mut results = Vec::new();
|
||||
for (i, (key, _)) in candidates.iter().take(batch_size).enumerate() {
|
||||
eprintln!(" Challenger {}/{}: {}", i + 1, batch_size.min(candidates.len()), key);
|
||||
|
||||
let content = match store.nodes.get(key.as_str()) {
|
||||
Some(n) => &n.content,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let prompt = template
|
||||
.replace("{{TOPOLOGY}}", &topology)
|
||||
.replace("{{NODE_KEY}}", key)
|
||||
.replace("{{NODE_CONTENT}}", content);
|
||||
|
||||
let response = llm::call_sonnet("knowledge", &prompt)?;
|
||||
results.push(format!("## Challenge: {}\n\n{}", key, response));
|
||||
}
|
||||
Ok(results.join("\n\n---\n\n"))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Convergence metrics
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -771,23 +567,38 @@ fn run_cycle(
|
|||
let mut depth_rejected = 0;
|
||||
let mut total_applied = 0;
|
||||
|
||||
// Run each agent, rebuilding graph after mutations
|
||||
// Run each agent via .agent file dispatch
|
||||
let agent_names = ["observation", "extractor", "connector", "challenger"];
|
||||
|
||||
for agent_name in &agent_names {
|
||||
eprintln!("\n --- {} (n={}) ---", agent_name, config.batch_size);
|
||||
|
||||
// Rebuild graph to reflect any mutations from previous agents
|
||||
let graph = store.build_graph();
|
||||
|
||||
let output = match *agent_name {
|
||||
"observation" => run_observation_extractor(&store, &graph, config.batch_size),
|
||||
"extractor" => run_extractor(&store, &graph, config.batch_size),
|
||||
"connector" => run_connector(&store, &graph, config.batch_size),
|
||||
"challenger" => run_challenger(&store, &graph, config.batch_size),
|
||||
_ => unreachable!(),
|
||||
let def = match super::defs::get_def(agent_name) {
|
||||
Some(d) => d,
|
||||
None => {
|
||||
eprintln!(" SKIP: no .agent file for {}", agent_name);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let agent_batch = match super::defs::run_agent(&store, &def, config.batch_size) {
|
||||
Ok(b) => b,
|
||||
Err(e) => {
|
||||
eprintln!(" ERROR building prompt: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
eprintln!(" prompt: {} chars ({} nodes)", agent_batch.prompt.len(), agent_batch.node_keys.len());
|
||||
let output = llm::call_sonnet("knowledge", &agent_batch.prompt);
|
||||
|
||||
// Record visits for processed nodes
|
||||
if !agent_batch.node_keys.is_empty() {
|
||||
if let Err(e) = store.record_agent_visits(&agent_batch.node_keys, agent_name) {
|
||||
eprintln!(" visit recording: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
let output = match output {
|
||||
Ok(o) => o,
|
||||
Err(e) => {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue