Memory scores on entries, not a separate Vec

ConversationEntry::Memory gains score: Option<f64>. The scorer
writes scores directly onto entries when results arrive. Removes
Agent.memory_scores Vec and the memory_scores parameter from
context_state_summary().

Scores are serialized to/from the conversation log as memory_score.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 03:14:24 -04:00
parent 93f5f8b0c7
commit 39dcf27bd0
6 changed files with 36 additions and 50 deletions

View file

@ -186,7 +186,7 @@ pub fn is_stream_error(err: &anyhow::Error) -> bool {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum ConversationEntry { pub enum ConversationEntry {
Message(Message), Message(Message),
Memory { key: String, message: Message }, Memory { key: String, message: Message, score: Option<f64> },
/// DMN heartbeat/autonomous prompt — evicted aggressively during compaction. /// DMN heartbeat/autonomous prompt — evicted aggressively during compaction.
Dmn(Message), Dmn(Message),
/// Debug/status log line — written to conversation log for tracing, /// Debug/status log line — written to conversation log for tracing,
@ -201,7 +201,7 @@ impl Serialize for ConversationEntry {
use serde::ser::SerializeMap; use serde::ser::SerializeMap;
match self { match self {
Self::Message(m) | Self::Dmn(m) => m.serialize(s), Self::Message(m) | Self::Dmn(m) => m.serialize(s),
Self::Memory { key, message } => { Self::Memory { key, message, score } => {
let json = serde_json::to_value(message).map_err(serde::ser::Error::custom)?; let json = serde_json::to_value(message).map_err(serde::ser::Error::custom)?;
let mut map = s.serialize_map(None)?; let mut map = s.serialize_map(None)?;
if let serde_json::Value::Object(obj) = json { if let serde_json::Value::Object(obj) = json {
@ -210,6 +210,9 @@ impl Serialize for ConversationEntry {
} }
} }
map.serialize_entry("memory_key", key)?; map.serialize_entry("memory_key", key)?;
if let Some(s) = score {
map.serialize_entry("memory_score", s)?;
}
map.end() map.end()
} }
Self::Log(text) => { Self::Log(text) => {
@ -232,8 +235,11 @@ impl<'de> Deserialize<'de> for ConversationEntry {
} }
if let Some(key) = json.as_object_mut().and_then(|o| o.remove("memory_key")) { if let Some(key) = json.as_object_mut().and_then(|o| o.remove("memory_key")) {
let key = key.as_str().unwrap_or("").to_string(); let key = key.as_str().unwrap_or("").to_string();
let score = json.as_object_mut()
.and_then(|o| o.remove("memory_score"))
.and_then(|v| v.as_f64());
let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?; let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?;
Ok(Self::Memory { key, message }) Ok(Self::Memory { key, message, score })
} else { } else {
let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?; let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?;
Ok(Self::Message(message)) Ok(Self::Message(message))

View file

@ -29,7 +29,6 @@ use tools::{summarize_args, working_stack};
use crate::mind::log::ConversationLog; use crate::mind::log::ConversationLog;
use crate::agent::context::ContextSection; use crate::agent::context::ContextSection;
use crate::subconscious::learn;
// --- Activity tracking (RAII guards) --- // --- Activity tracking (RAII guards) ---
@ -175,8 +174,6 @@ pub struct Agent {
pub generation: u64, pub generation: u64,
/// Whether incremental memory scoring is currently running. /// Whether incremental memory scoring is currently running.
pub memory_scoring_in_flight: bool, pub memory_scoring_in_flight: bool,
/// Latest per-memory scores from incremental scoring.
pub memory_scores: Vec<(String, f64)>,
/// Shared active tools — Agent writes, TUI reads. /// Shared active tools — Agent writes, TUI reads.
pub active_tools: tools::SharedActiveTools, pub active_tools: tools::SharedActiveTools,
/// Fires when agent state changes — UI wakes on this instead of polling. /// Fires when agent state changes — UI wakes on this instead of polling.
@ -225,7 +222,6 @@ impl Agent {
session_id, session_id,
generation: 0, generation: 0,
memory_scoring_in_flight: false, memory_scoring_in_flight: false,
memory_scores: Vec::new(),
active_tools, active_tools,
changed: Arc::new(tokio::sync::Notify::new()), changed: Arc::new(tokio::sync::Notify::new()),
}; };
@ -265,7 +261,6 @@ impl Agent {
session_id: self.session_id.clone(), session_id: self.session_id.clone(),
generation: 0, generation: 0,
memory_scoring_in_flight: false, memory_scoring_in_flight: false,
memory_scores: Vec::new(),
active_tools: tools::shared_active_tools(), active_tools: tools::shared_active_tools(),
changed: Arc::new(tokio::sync::Notify::new()), changed: Arc::new(tokio::sync::Notify::new()),
} }
@ -648,7 +643,7 @@ impl Agent {
if let Some(key) = args.get("key").and_then(|v| v.as_str()) { if let Some(key) = args.get("key").and_then(|v| v.as_str()) {
let mut msg = Message::tool_result(&call.id, &output); let mut msg = Message::tool_result(&call.id, &output);
msg.stamp(); msg.stamp();
self.push_entry(ConversationEntry::Memory { key: key.to_string(), message: msg }); self.push_entry(ConversationEntry::Memory { key: key.to_string(), message: msg, score: None });
return; return;
} }
} }
@ -657,7 +652,7 @@ impl Agent {
} }
/// Build context state summary for the debug screen. /// Build context state summary for the debug screen.
pub fn context_state_summary(&self, memory_scores: Option<&learn::MemoryScore>) -> Vec<ContextSection> { pub fn context_state_summary(&self) -> Vec<ContextSection> {
let count_msg = |m: &Message| context::msg_token_count(&self.tokenizer, m); let count_msg = |m: &Message| context::msg_token_count(&self.tokenizer, m);
let mut sections = Vec::new(); let mut sections = Vec::new();
@ -699,23 +694,13 @@ impl Agent {
if !memory_entries.is_empty() { if !memory_entries.is_empty() {
let node_children: Vec<ContextSection> = memory_entries.iter() let node_children: Vec<ContextSection> = memory_entries.iter()
.map(|entry| { .map(|entry| {
let key = match entry { let (key, score) = match entry {
ConversationEntry::Memory { key, .. } => key.as_str(), ConversationEntry::Memory { key, score, .. } => (key.as_str(), *score),
_ => unreachable!(), _ => unreachable!(),
}; };
// Show node weight from graph (updated by incremental scorer) let label = match score {
let graph_weight = crate::hippocampus::store::Store::load().ok() Some(s) => format!("{} (score:{:.1})", key, s),
.and_then(|s| s.nodes.get(key).map(|n| n.weight)); None => key.to_string(),
// Show full matrix score if available
let matrix_score = memory_scores
.and_then(|s| s.memory_weights.iter()
.find(|(k, _)| k == key)
.map(|(_, v)| *v));
let label = match (graph_weight, matrix_score) {
(Some(w), Some(s)) => format!("{} (w:{:.2} score:{:.1})", key, w, s),
(Some(w), None) => format!("{} (w:{:.2})", key, w),
(None, Some(s)) => format!("{} (score:{:.1})", key, s),
(None, None) => key.to_string(),
}; };
ContextSection { ContextSection {
name: label, name: label,
@ -772,27 +757,11 @@ impl Agent {
Role::System => "system".to_string(), Role::System => "system".to_string(),
} }
}; };
// Show which memories were important for this response
let children = if m.role == Role::Assistant {
memory_scores
.map(|s| s.important_memories_for_entry(i))
.unwrap_or_default()
.into_iter()
.map(|(key, score)| ContextSection {
name: format!("← {} ({:.1})", key, score),
tokens: 0,
content: String::new(),
children: Vec::new(),
})
.collect()
} else {
Vec::new()
};
ContextSection { ContextSection {
name: format!("[{}] {}: {}", i, role_name, label), name: format!("[{}] {}: {}", i, role_name, label),
tokens, tokens,
content: text, content: text,
children, children: Vec::new(),
} }
}) })
.collect(); .collect();
@ -986,7 +955,7 @@ impl Agent {
self.generation += 1; self.generation += 1;
self.last_prompt_tokens = 0; self.last_prompt_tokens = 0;
let sections = self.context_state_summary(None); let sections = self.context_state_summary();
dbglog!("[compact] budget: {}", context::sections_budget_string(&sections)); dbglog!("[compact] budget: {}", context::sections_budget_string(&sections));
} }
@ -1015,7 +984,7 @@ impl Agent {
self.compact(); self.compact();
// Estimate prompt tokens from sections so status bar isn't 0 on startup // Estimate prompt tokens from sections so status bar isn't 0 on startup
self.last_prompt_tokens = context::sections_used( self.last_prompt_tokens = context::sections_used(
&self.context_state_summary(None)) as u32; &self.context_state_summary()) as u32;
true true
} }

View file

@ -428,7 +428,7 @@ impl Subconscious {
)); ));
msg.stamp(); msg.stamp();
ag.push_entry(ConversationEntry::Memory { ag.push_entry(ConversationEntry::Memory {
key: key.to_string(), message: msg, key: key.to_string(), message: msg, score: None,
}); });
} }
} }

View file

@ -276,7 +276,7 @@ impl Mind {
MindCommand::Compact => { MindCommand::Compact => {
let threshold = compaction_threshold(&self.config.app) as usize; let threshold = compaction_threshold(&self.config.app) as usize;
let mut ag = self.agent.lock().await; let mut ag = self.agent.lock().await;
let sections = ag.context_state_summary(None); let sections = ag.context_state_summary();
if crate::agent::context::sections_used(&sections) > threshold { if crate::agent::context::sections_used(&sections) > threshold {
ag.compact(); ag.compact();
ag.notify("compacted"); ag.notify("compacted");
@ -341,7 +341,18 @@ impl Mind {
{ {
let mut ag = agent.lock().await; let mut ag = agent.lock().await;
ag.memory_scoring_in_flight = false; ag.memory_scoring_in_flight = false;
if let Ok(ref scores) = result { ag.memory_scores = scores.clone(); } if let Ok(ref scores) = result {
// Write scores onto Memory entries
for (key, weight) in scores {
for entry in &mut ag.context.entries {
if let crate::agent::context::ConversationEntry::Memory {
key: k, score, ..
} = entry {
if k == key { *score = Some(*weight); }
}
}
}
}
} }
let _ = bg_tx.send(BgEvent::ScoringDone); let _ = bg_tx.send(BgEvent::ScoringDone);
}); });
@ -364,7 +375,7 @@ impl Mind {
// Compact if over budget before sending // Compact if over budget before sending
let threshold = compaction_threshold(&self.config.app) as usize; let threshold = compaction_threshold(&self.config.app) as usize;
let used = crate::agent::context::sections_used( let used = crate::agent::context::sections_used(
&ag.context_state_summary(None)); &ag.context_state_summary());
if used > threshold { if used > threshold {
ag.compact(); ag.compact();
ag.notify("compacted"); ag.notify("compacted");

View file

@ -863,7 +863,7 @@ impl ScreenView for InteractScreen {
agent.expire_activities(); agent.expire_activities();
app.status.prompt_tokens = agent.last_prompt_tokens(); app.status.prompt_tokens = agent.last_prompt_tokens();
app.status.model = agent.model().to_string(); app.status.model = agent.model().to_string();
let sections = agent.context_state_summary(None); let sections = agent.context_state_summary();
app.status.context_budget = crate::agent::context::sections_budget_string(&sections); app.status.context_budget = crate::agent::context::sections_budget_string(&sections);
app.activity = agent.activities.last() app.activity = agent.activities.last()
.map(|a| a.label.clone()) .map(|a| a.label.clone())

View file

@ -29,7 +29,7 @@ impl ConsciousScreen {
fn read_context_state(&self) -> Vec<ContextSection> { fn read_context_state(&self) -> Vec<ContextSection> {
match self.agent.try_lock() { match self.agent.try_lock() {
Ok(ag) => ag.context_state_summary(None), Ok(ag) => ag.context_state_summary(),
Err(_) => Vec::new(), Err(_) => Vec::new(),
} }
} }