From 7bf4fbe0ec59f6870f5b198bb1ee2afd483204d8 Mon Sep 17 00:00:00 2001 From: ProofOfConcept Date: Thu, 12 Mar 2026 18:08:58 -0400 Subject: [PATCH] add {{siblings}} placeholder for graph neighborhood context MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit New placeholder that expands query keys one hop through the graph, giving agents visibility into what's already connected to the nodes they're working on. Excludes the query keys themselves so there's no duplication with {{nodes}}. Added to transfer (sees existing semantic nodes linked to episodes, so it REFINEs instead of duplicating) and challenger (sees neighbor context to find real evidence for/against claims). Also removes find_existing_observations — superseded by the per-segment dedup fix and this general-purpose placeholder. --- poc-memory/agents/challenger.agent | 2 ++ poc-memory/agents/transfer.agent | 2 ++ poc-memory/src/agents/defs.rs | 38 ++++++++++++++++------- poc-memory/src/agents/knowledge.rs | 49 ------------------------------ 4 files changed, 31 insertions(+), 60 deletions(-) diff --git a/poc-memory/agents/challenger.agent b/poc-memory/agents/challenger.agent index f5686d4..55a1b7b 100644 --- a/poc-memory/agents/challenger.agent +++ b/poc-memory/agents/challenger.agent @@ -70,6 +70,8 @@ LINK key original_key {{TOPOLOGY}} +{{SIBLINGS}} + ## Target nodes to challenge {{NODES}} diff --git a/poc-memory/agents/transfer.agent b/poc-memory/agents/transfer.agent index a06d2cc..2523e47 100644 --- a/poc-memory/agents/transfer.agent +++ b/poc-memory/agents/transfer.agent @@ -125,6 +125,8 @@ be compressed to a one-sentence reference. {{TOPOLOGY}} +{{SIBLINGS}} + ## Episodes to process {{EPISODES}} diff --git a/poc-memory/src/agents/defs.rs b/poc-memory/src/agents/defs.rs index 8fc4691..eb26109 100644 --- a/poc-memory/src/agents/defs.rs +++ b/poc-memory/src/agents/defs.rs @@ -163,22 +163,38 @@ fn resolve( "conversations" => { let fragments = super::knowledge::select_conversation_fragments(count); let text = fragments.iter() - .map(|(id, text)| { - let existing = super::knowledge::find_existing_observations(store, text, 10); - let mut section = format!("### Session {}\n\n{}", id, text); - if !existing.is_empty() { - section.push_str("\n\n#### Already extracted from this or similar conversations\n\n"); - for (key, preview) in &existing { - section.push_str(&format!("- **`{}`**: {}\n", key, preview)); - } - } - section - }) + .map(|(id, text)| format!("### Session {}\n\n{}", id, text)) .collect::>() .join("\n\n---\n\n"); Some(Resolved { text, keys: vec![] }) } + "siblings" | "neighborhood" => { + let mut seen: std::collections::HashSet = keys.iter().cloned().collect(); + let mut siblings = Vec::new(); + for key in keys { + for (neighbor, _) in graph.neighbors(key) { + if seen.insert(neighbor.clone()) { + if let Some(node) = store.nodes.get(neighbor.as_str()) { + siblings.push((neighbor.clone(), node.content.clone())); + } + } + if siblings.len() >= count { break; } + } + if siblings.len() >= count { break; } + } + let text = if siblings.is_empty() { + String::new() + } else { + let mut out = String::from("## Sibling nodes (one hop in graph)\n\n"); + for (key, content) in &siblings { + out.push_str(&format!("### {}\n{}\n\n", key, content)); + } + out + }; + Some(Resolved { text, keys: vec![] }) + } + // targets/context: aliases for challenger-style presentation "targets" => { let items = keys_to_replay_items(store, keys, graph); diff --git a/poc-memory/src/agents/knowledge.rs b/poc-memory/src/agents/knowledge.rs index 8f69132..f5f5012 100644 --- a/poc-memory/src/agents/knowledge.rs +++ b/poc-memory/src/agents/knowledge.rs @@ -362,55 +362,6 @@ pub enum NamingResolution { MergeInto(String), } -/// Find existing observation-authored nodes relevant to a conversation fragment. -/// Used to show the observation agent what's already been extracted, -/// preventing duplicate extractions across consolidation runs. -pub fn find_existing_observations( - store: &Store, - conversation_text: &str, - limit: usize, -) -> Vec<(String, String)> { - use std::collections::{BTreeMap, HashSet}; - - let graph = store.build_graph(); - - let content_terms = crate::search::extract_query_terms(conversation_text, 15); - let mut terms: BTreeMap = BTreeMap::new(); - for term in content_terms.split_whitespace() { - terms.entry(term.to_string()).or_insert(1.0); - } - if terms.is_empty() { - return Vec::new(); - } - - let (seeds, _) = crate::search::match_seeds_opts(&terms, store, true, false); - - // Collect seeds + their graph neighbors (one hop) - let mut seen = HashSet::new(); - let mut result = Vec::new(); - - for (key, _) in &seeds { - // Add the seed itself - if seen.insert(key.clone()) { - if let Some(node) = store.nodes.get(key.as_str()) { - result.push((key.clone(), node.content.clone())); - } - } - // Add its neighbors - for (neighbor, _) in graph.neighbors(key) { - if seen.insert(neighbor.clone()) { - if let Some(node) = store.nodes.get(neighbor.as_str()) { - result.push((neighbor.clone(), node.content.clone())); - } - } - } - if result.len() >= limit { break; } - } - - result.truncate(limit); - result -} - /// Find existing nodes that might conflict with a proposed new node. /// Returns up to `limit` (key, content_preview) pairs. fn find_conflicts(