forked from kent/consciousness
Move agent queries inline: {{nodes}} → {{tool: memory_query}}
Add "format": "full" option to memory_query that renders with
full content, graph metrics, and hub analysis (format_nodes_section).
Convert 6 agents (linker, challenger, connector, extractor, replay,
transfer) to inline their queries via {{tool: memory_query}} instead
of separate header query + {{nodes}} placeholder.
Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
96e573f2e5
commit
5b4f497d94
9 changed files with 31 additions and 16 deletions
|
|
@ -238,11 +238,26 @@ async fn supersede(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &s
|
|||
|
||||
async fn query(args: &serde_json::Value) -> Result<String> {
|
||||
let query_str = get_str(args, "query")?;
|
||||
let format = args.get("format").and_then(|v| v.as_str()).unwrap_or("compact");
|
||||
let arc = cached_store().await?;
|
||||
let store = arc.lock().await;
|
||||
let graph = store.build_graph();
|
||||
crate::query_parser::query_to_string(&store, &graph, query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
|
||||
match format {
|
||||
"full" => {
|
||||
// Rich output with full content, graph metrics, hub analysis
|
||||
let stages = crate::search::Stage::parse_pipeline(query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, 100);
|
||||
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
|
||||
let items = crate::subconscious::defs::keys_to_replay_items(&store, &keys, &graph);
|
||||
Ok(crate::subconscious::prompts::format_nodes_section(&store, &items, &graph))
|
||||
}
|
||||
_ => {
|
||||
crate::query_parser::query_to_string(&store, &graph, query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Journal tools ──────────────────────────────────────────────
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "challenger", "query": "all | type:semantic | not-visited:challenger,14d | sort:priority | limit:10", "schedule": "weekly"}
|
||||
{"agent": "challenger", "schedule": "weekly"}
|
||||
# Challenger Agent — Adversarial Truth-Testing
|
||||
|
||||
|
||||
|
|
@ -52,4 +52,4 @@ For each target node, one of:
|
|||
|
||||
## Target nodes to challenge
|
||||
|
||||
{{NODES}}
|
||||
{{tool: memory_query {"query": "all | type:semantic | not-visited:challenger,14d | sort:priority | limit:10", "format": "full"}}}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "connector", "query": "all | type:semantic | not-visited:connector,7d | sort:priority | limit:20", "schedule": "daily"}
|
||||
{"agent": "connector", "schedule": "daily"}
|
||||
# Connector Agent — Cross-Domain Insight
|
||||
|
||||
|
||||
|
|
@ -83,4 +83,4 @@ you're exploring and you have context to judge them, reweight those too.
|
|||
|
||||
## Nodes to examine for cross-community connections
|
||||
|
||||
{{NODES}}
|
||||
{{tool: memory_query {"query": "all | type:semantic | not-visited:connector,7d | sort:priority | limit:20", "format": "full"}}}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "extractor", "query": "all | not-visited:extractor,7d | sort:priority | limit:3 | spread | not-visited:extractor,7d | limit:20", "schedule": "daily"}
|
||||
{"agent": "extractor", "schedule": "daily"}
|
||||
# Extractor Agent — Knowledge Organizer
|
||||
|
||||
{{tool: memory_render core-personality}}
|
||||
|
|
@ -48,4 +48,4 @@ pattern you've found.
|
|||
|
||||
## Neighborhood nodes
|
||||
|
||||
{{NODES}}
|
||||
{{tool: memory_query {"query": "all | not-visited:extractor,7d | sort:priority | limit:3 | spread | not-visited:extractor,7d | limit:20", "format": "full"}}}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent":"linker","query":"all | not-visited:linker,7d | sort:isolation*0.7+recency(linker)*0.3 | limit:5","schedule":"daily"}
|
||||
{"agent":"linker","schedule":"daily"}
|
||||
|
||||
# Linker Agent — Relational Binding
|
||||
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
## Seed nodes
|
||||
|
||||
{{nodes}}
|
||||
{{tool: memory_query {"query": "all | not-visited:linker,7d | sort:isolation*0.7+recency(linker)*0.3 | limit:5", "format": "full"}}}
|
||||
|
||||
{{tool: memory_render memory-instructions-core-subconscious}}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "replay", "query": "all | !type:daily | !type:weekly | !type:monthly | sort:priority | limit:15", "schedule": "daily"}
|
||||
{"agent": "replay", "schedule": "daily"}
|
||||
# Replay Agent — Hippocampal Replay + Schema Assimilation
|
||||
|
||||
|
||||
|
|
@ -44,4 +44,4 @@ clusters and determine how it fits.
|
|||
|
||||
## Nodes to review
|
||||
|
||||
{{NODES}}
|
||||
{{tool: memory_query {"query": "all | !type:daily | !type:weekly | !type:monthly | sort:priority | limit:15", "format": "full"}}}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "transfer", "query": "all | type:episodic | sort:timestamp | limit:15", "schedule": "daily"}
|
||||
{"agent": "transfer", "schedule": "daily"}
|
||||
# Transfer Agent — Complementary Learning Systems
|
||||
|
||||
{{tool: memory_render core-personality}}
|
||||
|
|
@ -51,4 +51,4 @@ entries, and extract those patterns into semantic nodes.
|
|||
|
||||
## Episodes to process
|
||||
|
||||
{{EPISODES}}
|
||||
{{tool: memory_query {"query": "all | type:episodic | sort:timestamp | limit:15", "format": "full"}}}
|
||||
|
|
|
|||
|
|
@ -845,7 +845,7 @@ pub fn run_agent(
|
|||
}
|
||||
|
||||
/// Convert a list of keys to ReplayItems with priority and graph metrics.
|
||||
fn keys_to_replay_items(
|
||||
pub fn keys_to_replay_items(
|
||||
store: &Store,
|
||||
keys: &[String],
|
||||
graph: &Graph,
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ pub fn format_topology_header(graph: &Graph) -> String {
|
|||
n, e, graph.community_count(), sigma, alpha, gini, avg_cc, hub_list)
|
||||
}
|
||||
|
||||
pub(super) fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> String {
|
||||
pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> String {
|
||||
let hub_thresh = graph.hub_threshold();
|
||||
let mut out = String::new();
|
||||
for item in items {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue