From 39dcf27bd082f6f9be294b9b4eb5218653489537 Mon Sep 17 00:00:00 2001 From: Kent Overstreet Date: Tue, 7 Apr 2026 03:14:24 -0400 Subject: [PATCH] Memory scores on entries, not a separate Vec ConversationEntry::Memory gains score: Option. The scorer writes scores directly onto entries when results arrive. Removes Agent.memory_scores Vec and the memory_scores parameter from context_state_summary(). Scores are serialized to/from the conversation log as memory_score. Co-Authored-By: Proof of Concept --- src/agent/context.rs | 12 ++++++++--- src/agent/mod.rs | 51 +++++++++----------------------------------- src/mind/dmn.rs | 2 +- src/mind/mod.rs | 17 ++++++++++++--- src/user/chat.rs | 2 +- src/user/context.rs | 2 +- 6 files changed, 36 insertions(+), 50 deletions(-) diff --git a/src/agent/context.rs b/src/agent/context.rs index 0323be0..f6de30c 100644 --- a/src/agent/context.rs +++ b/src/agent/context.rs @@ -186,7 +186,7 @@ pub fn is_stream_error(err: &anyhow::Error) -> bool { #[derive(Debug, Clone, PartialEq)] pub enum ConversationEntry { Message(Message), - Memory { key: String, message: Message }, + Memory { key: String, message: Message, score: Option }, /// DMN heartbeat/autonomous prompt — evicted aggressively during compaction. Dmn(Message), /// Debug/status log line — written to conversation log for tracing, @@ -201,7 +201,7 @@ impl Serialize for ConversationEntry { use serde::ser::SerializeMap; match self { Self::Message(m) | Self::Dmn(m) => m.serialize(s), - Self::Memory { key, message } => { + Self::Memory { key, message, score } => { let json = serde_json::to_value(message).map_err(serde::ser::Error::custom)?; let mut map = s.serialize_map(None)?; if let serde_json::Value::Object(obj) = json { @@ -210,6 +210,9 @@ impl Serialize for ConversationEntry { } } map.serialize_entry("memory_key", key)?; + if let Some(s) = score { + map.serialize_entry("memory_score", s)?; + } map.end() } Self::Log(text) => { @@ -232,8 +235,11 @@ impl<'de> Deserialize<'de> for ConversationEntry { } if let Some(key) = json.as_object_mut().and_then(|o| o.remove("memory_key")) { let key = key.as_str().unwrap_or("").to_string(); + let score = json.as_object_mut() + .and_then(|o| o.remove("memory_score")) + .and_then(|v| v.as_f64()); let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?; - Ok(Self::Memory { key, message }) + Ok(Self::Memory { key, message, score }) } else { let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?; Ok(Self::Message(message)) diff --git a/src/agent/mod.rs b/src/agent/mod.rs index cb897b7..016ce97 100644 --- a/src/agent/mod.rs +++ b/src/agent/mod.rs @@ -29,7 +29,6 @@ use tools::{summarize_args, working_stack}; use crate::mind::log::ConversationLog; use crate::agent::context::ContextSection; -use crate::subconscious::learn; // --- Activity tracking (RAII guards) --- @@ -175,8 +174,6 @@ pub struct Agent { pub generation: u64, /// Whether incremental memory scoring is currently running. pub memory_scoring_in_flight: bool, - /// Latest per-memory scores from incremental scoring. - pub memory_scores: Vec<(String, f64)>, /// Shared active tools — Agent writes, TUI reads. pub active_tools: tools::SharedActiveTools, /// Fires when agent state changes — UI wakes on this instead of polling. @@ -225,7 +222,6 @@ impl Agent { session_id, generation: 0, memory_scoring_in_flight: false, - memory_scores: Vec::new(), active_tools, changed: Arc::new(tokio::sync::Notify::new()), }; @@ -265,7 +261,6 @@ impl Agent { session_id: self.session_id.clone(), generation: 0, memory_scoring_in_flight: false, - memory_scores: Vec::new(), active_tools: tools::shared_active_tools(), changed: Arc::new(tokio::sync::Notify::new()), } @@ -648,7 +643,7 @@ impl Agent { if let Some(key) = args.get("key").and_then(|v| v.as_str()) { let mut msg = Message::tool_result(&call.id, &output); msg.stamp(); - self.push_entry(ConversationEntry::Memory { key: key.to_string(), message: msg }); + self.push_entry(ConversationEntry::Memory { key: key.to_string(), message: msg, score: None }); return; } } @@ -657,7 +652,7 @@ impl Agent { } /// Build context state summary for the debug screen. - pub fn context_state_summary(&self, memory_scores: Option<&learn::MemoryScore>) -> Vec { + pub fn context_state_summary(&self) -> Vec { let count_msg = |m: &Message| context::msg_token_count(&self.tokenizer, m); let mut sections = Vec::new(); @@ -699,23 +694,13 @@ impl Agent { if !memory_entries.is_empty() { let node_children: Vec = memory_entries.iter() .map(|entry| { - let key = match entry { - ConversationEntry::Memory { key, .. } => key.as_str(), + let (key, score) = match entry { + ConversationEntry::Memory { key, score, .. } => (key.as_str(), *score), _ => unreachable!(), }; - // Show node weight from graph (updated by incremental scorer) - let graph_weight = crate::hippocampus::store::Store::load().ok() - .and_then(|s| s.nodes.get(key).map(|n| n.weight)); - // Show full matrix score if available - let matrix_score = memory_scores - .and_then(|s| s.memory_weights.iter() - .find(|(k, _)| k == key) - .map(|(_, v)| *v)); - let label = match (graph_weight, matrix_score) { - (Some(w), Some(s)) => format!("{} (w:{:.2} score:{:.1})", key, w, s), - (Some(w), None) => format!("{} (w:{:.2})", key, w), - (None, Some(s)) => format!("{} (score:{:.1})", key, s), - (None, None) => key.to_string(), + let label = match score { + Some(s) => format!("{} (score:{:.1})", key, s), + None => key.to_string(), }; ContextSection { name: label, @@ -772,27 +757,11 @@ impl Agent { Role::System => "system".to_string(), } }; - // Show which memories were important for this response - let children = if m.role == Role::Assistant { - memory_scores - .map(|s| s.important_memories_for_entry(i)) - .unwrap_or_default() - .into_iter() - .map(|(key, score)| ContextSection { - name: format!("← {} ({:.1})", key, score), - tokens: 0, - content: String::new(), - children: Vec::new(), - }) - .collect() - } else { - Vec::new() - }; ContextSection { name: format!("[{}] {}: {}", i, role_name, label), tokens, content: text, - children, + children: Vec::new(), } }) .collect(); @@ -986,7 +955,7 @@ impl Agent { self.generation += 1; self.last_prompt_tokens = 0; - let sections = self.context_state_summary(None); + let sections = self.context_state_summary(); dbglog!("[compact] budget: {}", context::sections_budget_string(§ions)); } @@ -1015,7 +984,7 @@ impl Agent { self.compact(); // Estimate prompt tokens from sections so status bar isn't 0 on startup self.last_prompt_tokens = context::sections_used( - &self.context_state_summary(None)) as u32; + &self.context_state_summary()) as u32; true } diff --git a/src/mind/dmn.rs b/src/mind/dmn.rs index 1e9c4a0..cfe408e 100644 --- a/src/mind/dmn.rs +++ b/src/mind/dmn.rs @@ -428,7 +428,7 @@ impl Subconscious { )); msg.stamp(); ag.push_entry(ConversationEntry::Memory { - key: key.to_string(), message: msg, + key: key.to_string(), message: msg, score: None, }); } } diff --git a/src/mind/mod.rs b/src/mind/mod.rs index 602fc21..b946318 100644 --- a/src/mind/mod.rs +++ b/src/mind/mod.rs @@ -276,7 +276,7 @@ impl Mind { MindCommand::Compact => { let threshold = compaction_threshold(&self.config.app) as usize; let mut ag = self.agent.lock().await; - let sections = ag.context_state_summary(None); + let sections = ag.context_state_summary(); if crate::agent::context::sections_used(§ions) > threshold { ag.compact(); ag.notify("compacted"); @@ -341,7 +341,18 @@ impl Mind { { let mut ag = agent.lock().await; ag.memory_scoring_in_flight = false; - if let Ok(ref scores) = result { ag.memory_scores = scores.clone(); } + if let Ok(ref scores) = result { + // Write scores onto Memory entries + for (key, weight) in scores { + for entry in &mut ag.context.entries { + if let crate::agent::context::ConversationEntry::Memory { + key: k, score, .. + } = entry { + if k == key { *score = Some(*weight); } + } + } + } + } } let _ = bg_tx.send(BgEvent::ScoringDone); }); @@ -364,7 +375,7 @@ impl Mind { // Compact if over budget before sending let threshold = compaction_threshold(&self.config.app) as usize; let used = crate::agent::context::sections_used( - &ag.context_state_summary(None)); + &ag.context_state_summary()); if used > threshold { ag.compact(); ag.notify("compacted"); diff --git a/src/user/chat.rs b/src/user/chat.rs index bd643e2..53342ec 100644 --- a/src/user/chat.rs +++ b/src/user/chat.rs @@ -863,7 +863,7 @@ impl ScreenView for InteractScreen { agent.expire_activities(); app.status.prompt_tokens = agent.last_prompt_tokens(); app.status.model = agent.model().to_string(); - let sections = agent.context_state_summary(None); + let sections = agent.context_state_summary(); app.status.context_budget = crate::agent::context::sections_budget_string(§ions); app.activity = agent.activities.last() .map(|a| a.label.clone()) diff --git a/src/user/context.rs b/src/user/context.rs index af13932..5d72df2 100644 --- a/src/user/context.rs +++ b/src/user/context.rs @@ -29,7 +29,7 @@ impl ConsciousScreen { fn read_context_state(&self) -> Vec { match self.agent.try_lock() { - Ok(ag) => ag.context_state_summary(None), + Ok(ag) => ag.context_state_summary(), Err(_) => Vec::new(), } }