Restore full N×M memory scoring matrix (/score command)
The full matrix scorer was deleted during the AST conversion. Restore it: /score runs score_memories() which computes divergence for every memory × response pair, stores the MemoryScore on MindState, and displays per-memory weights with bar charts on the F2 screen. Both scoring paths now use ActivityGuard::update() for live progress in the status bar instead of creating a new activity per iteration. Also bumps score API timeout from 120s to 300s and adds progress logging throughout. Co-Authored-By: Proof of Concept <poc@bcachefs.org> Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
f6a6c37435
commit
58cec97e57
6 changed files with 187 additions and 98 deletions
|
|
@ -93,7 +93,14 @@ impl<'de> Deserialize<'de> for NodeLeaf {
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum AstNode {
|
||||
Leaf(NodeLeaf),
|
||||
Branch { role: Role, children: Vec<AstNode> },
|
||||
Branch {
|
||||
role: Role,
|
||||
children: Vec<AstNode>,
|
||||
/// Per-response memory attribution from full scoring matrix.
|
||||
/// Maps memory key → divergence score for this response.
|
||||
#[serde(default, skip_serializing_if = "std::collections::BTreeMap::is_empty")]
|
||||
memory_scores: std::collections::BTreeMap<String, f64>,
|
||||
},
|
||||
}
|
||||
|
||||
/// The context window: four sections as Vec<AstNode>.
|
||||
|
|
@ -277,13 +284,14 @@ impl AstNode {
|
|||
// -- Branch constructors --------------------------------------------------
|
||||
|
||||
pub fn branch(role: Role, children: Vec<AstNode>) -> Self {
|
||||
Self::Branch { role, children }
|
||||
Self::Branch { role, children, memory_scores: Default::default() }
|
||||
}
|
||||
|
||||
pub fn system_msg(text: impl Into<String>) -> Self {
|
||||
Self::Branch {
|
||||
role: Role::System,
|
||||
children: vec![Self::content(text)],
|
||||
memory_scores: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -291,6 +299,7 @@ impl AstNode {
|
|||
Self::Branch {
|
||||
role: Role::User,
|
||||
children: vec![Self::content(text)],
|
||||
memory_scores: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -306,9 +315,10 @@ impl AstNode {
|
|||
};
|
||||
Self::Leaf(NodeLeaf { token_ids, ..leaf })
|
||||
}
|
||||
Self::Branch { role, children } => Self::Branch {
|
||||
Self::Branch { role, children, memory_scores, .. } => Self::Branch {
|
||||
role,
|
||||
children: children.into_iter().map(|c| c.retokenize()).collect(),
|
||||
memory_scores,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -339,7 +349,7 @@ impl AstNode {
|
|||
pub fn label(&self) -> String {
|
||||
let cfg = crate::config::get();
|
||||
match self {
|
||||
Self::Branch { role, children } => {
|
||||
Self::Branch { role, children, .. } => {
|
||||
let preview = children.first()
|
||||
.and_then(|c| c.leaf())
|
||||
.map(|l| truncate_preview(l.body.text(), 60))
|
||||
|
|
@ -370,7 +380,7 @@ impl AstNode {
|
|||
fn render_into(&self, out: &mut String) {
|
||||
match self {
|
||||
Self::Leaf(leaf) => leaf.body.render_into(out),
|
||||
Self::Branch { role, children } => {
|
||||
Self::Branch { role, children, .. } => {
|
||||
out.push_str(&format!("<|im_start|>{}\n", role.as_str()));
|
||||
for child in children {
|
||||
child.render_into(out);
|
||||
|
|
@ -383,7 +393,7 @@ impl AstNode {
|
|||
fn token_ids_into(&self, out: &mut Vec<u32>) {
|
||||
match self {
|
||||
Self::Leaf(leaf) => out.extend_from_slice(&leaf.token_ids),
|
||||
Self::Branch { role, children } => {
|
||||
Self::Branch { role, children, .. } => {
|
||||
out.push(tokenizer::IM_START);
|
||||
out.extend(tokenizer::encode(&format!("{}\n", role.as_str())));
|
||||
for child in children {
|
||||
|
|
@ -412,7 +422,7 @@ impl Ast for AstNode {
|
|||
fn tokens(&self) -> usize {
|
||||
match self {
|
||||
Self::Leaf(leaf) => leaf.tokens(),
|
||||
Self::Branch { role, children } => {
|
||||
Self::Branch { role, children, .. } => {
|
||||
1 + tokenizer::encode(&format!("{}\n", role.as_str())).len()
|
||||
+ children.iter().map(|c| c.tokens()).sum::<usize>()
|
||||
+ 1 + tokenizer::encode("\n").len()
|
||||
|
|
@ -752,6 +762,7 @@ impl ContextState {
|
|||
pub fn identity(&self) -> &[AstNode] { &self.identity }
|
||||
pub fn journal(&self) -> &[AstNode] { &self.journal }
|
||||
pub fn conversation(&self) -> &[AstNode] { &self.conversation }
|
||||
pub fn conversation_mut(&mut self) -> &mut Vec<AstNode> { &mut self.conversation }
|
||||
|
||||
fn sections(&self) -> [&Vec<AstNode>; 4] {
|
||||
[&self.system, &self.identity, &self.journal, &self.conversation]
|
||||
|
|
|
|||
|
|
@ -137,8 +137,10 @@ impl Clone for MindState {
|
|||
pub enum MindCommand {
|
||||
/// Run compaction check
|
||||
Compact,
|
||||
/// Run memory scoring
|
||||
/// Run incremental memory scoring (auto, after turns)
|
||||
Score,
|
||||
/// Run full N×M memory scoring matrix (/score command)
|
||||
ScoreFull,
|
||||
/// Abort current turn, kill processes
|
||||
Interrupt,
|
||||
/// Reset session
|
||||
|
|
@ -362,6 +364,18 @@ impl Mind {
|
|||
s.scoring_in_flight = true;
|
||||
drop(s);
|
||||
self.start_memory_scoring();
|
||||
} else {
|
||||
dbglog!("[scoring] skipped: scoring_in_flight=true");
|
||||
}
|
||||
}
|
||||
MindCommand::ScoreFull => {
|
||||
let mut s = self.shared.lock().unwrap();
|
||||
if !s.scoring_in_flight {
|
||||
s.scoring_in_flight = true;
|
||||
drop(s);
|
||||
self.start_full_scoring();
|
||||
} else {
|
||||
dbglog!("[scoring-full] skipped: scoring_in_flight=true");
|
||||
}
|
||||
}
|
||||
MindCommand::Interrupt => {
|
||||
|
|
@ -406,7 +420,10 @@ impl Mind {
|
|||
tokio::spawn(async move {
|
||||
let (context, client) = {
|
||||
let mut st = agent.state.lock().await;
|
||||
if st.memory_scoring_in_flight { return; }
|
||||
if st.memory_scoring_in_flight {
|
||||
dbglog!("[scoring] skipped: memory_scoring_in_flight=true");
|
||||
return;
|
||||
}
|
||||
st.memory_scoring_in_flight = true;
|
||||
drop(st);
|
||||
let ctx = agent.context.lock().await.clone();
|
||||
|
|
@ -445,6 +462,28 @@ impl Mind {
|
|||
});
|
||||
}
|
||||
|
||||
/// Run full N×M scoring matrix — scores every memory against every response.
|
||||
pub fn start_full_scoring(&self) {
|
||||
let agent = self.agent.clone();
|
||||
let bg_tx = self.bg_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
{
|
||||
let mut st = agent.state.lock().await;
|
||||
if st.memory_scoring_in_flight {
|
||||
dbglog!("[scoring-full] skipped: memory_scoring_in_flight=true");
|
||||
return;
|
||||
}
|
||||
st.memory_scoring_in_flight = true;
|
||||
}
|
||||
let client = agent.client.clone();
|
||||
match learn::score_memories(&client, &agent).await {
|
||||
Ok(()) => { let _ = bg_tx.send(BgEvent::ScoringDone); }
|
||||
Err(e) => { dbglog!("[scoring-full] FAILED: {:#}", e); }
|
||||
}
|
||||
agent.state.lock().await.memory_scoring_in_flight = false;
|
||||
});
|
||||
}
|
||||
|
||||
async fn start_turn(&self, text: &str, target: StreamTarget) {
|
||||
{
|
||||
match target {
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
use crate::agent::api::ApiClient;
|
||||
use crate::agent::context::{AstNode, Ast, NodeBody, ContextState, Role};
|
||||
|
||||
const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(120);
|
||||
const SCORE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(300);
|
||||
|
||||
// ── Message building ────────────────────────────────────────────
|
||||
|
||||
|
|
@ -167,98 +167,92 @@ async fn score_divergence(
|
|||
|
||||
// ── Full matrix scoring (debug screen) ──────────────────────────
|
||||
|
||||
/// Result of scoring one conversation's memory usage.
|
||||
pub struct MemoryScore {
|
||||
pub memory_weights: Vec<(String, f64)>,
|
||||
pub response_scores: Vec<f64>,
|
||||
/// Full matrix: divergence[memory_idx][response_idx]
|
||||
pub matrix: Vec<Vec<f64>>,
|
||||
pub memory_keys: Vec<String>,
|
||||
pub response_entry_indices: Vec<usize>,
|
||||
}
|
||||
|
||||
impl MemoryScore {
|
||||
pub fn important_memories_for_entry(&self, entry_idx: usize) -> Vec<(&str, f64)> {
|
||||
let Some(resp_idx) = self.response_entry_indices.iter().position(|&i| i == entry_idx)
|
||||
else { return Vec::new() };
|
||||
|
||||
let mut result: Vec<(&str, f64)> = self.memory_keys.iter()
|
||||
.zip(self.matrix.iter())
|
||||
.filter_map(|(key, row)| {
|
||||
let score = row.get(resp_idx).copied().unwrap_or(0.0);
|
||||
if score > 0.01 { Some((key.as_str(), score)) } else { None }
|
||||
})
|
||||
.collect();
|
||||
result.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// Score how important each memory is to the conversation (full matrix).
|
||||
pub async fn score_memories(
|
||||
context: &ContextState,
|
||||
client: &ApiClient,
|
||||
) -> anyhow::Result<MemoryScore> {
|
||||
let mut memory_keys: Vec<String> = context.conversation().iter()
|
||||
agent: &std::sync::Arc<crate::agent::Agent>,
|
||||
) -> anyhow::Result<()> {
|
||||
// Collect memory keys and response indices under a brief lock
|
||||
let (memory_keys, response_indices) = {
|
||||
let ctx = agent.context.lock().await;
|
||||
let mut keys: Vec<String> = ctx.conversation().iter()
|
||||
.filter_map(|node| memory_key(node).map(String::from))
|
||||
.collect();
|
||||
memory_keys.dedup();
|
||||
|
||||
let response_indices: Vec<usize> = context.conversation().iter().enumerate()
|
||||
keys.dedup();
|
||||
let resp: Vec<usize> = ctx.conversation().iter().enumerate()
|
||||
.filter(|(_, node)| is_assistant(node))
|
||||
.map(|(i, _)| i)
|
||||
.collect();
|
||||
(keys, resp)
|
||||
};
|
||||
|
||||
if memory_keys.is_empty() || response_indices.is_empty() {
|
||||
return Ok(MemoryScore {
|
||||
memory_weights: Vec::new(), response_scores: Vec::new(),
|
||||
matrix: Vec::new(), memory_keys: Vec::new(),
|
||||
response_entry_indices: Vec::new(),
|
||||
});
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
||||
let http = http_client();
|
||||
let range = 0..context.conversation().len();
|
||||
|
||||
let baseline = call_score(&http, client, &build_token_ids(context, range.clone(), Filter::None), Some(5)).await?;
|
||||
|
||||
let total = memory_keys.len();
|
||||
let mut matrix: Vec<Vec<f64>> = Vec::new();
|
||||
dbglog!("[scoring-full] starting: {} memories × {} responses",
|
||||
total, response_indices.len());
|
||||
|
||||
let http = http_client();
|
||||
|
||||
let activity = crate::agent::start_activity(agent, "scoring: baseline").await;
|
||||
let baseline_tokens = {
|
||||
let ctx = agent.context.lock().await;
|
||||
build_token_ids(&ctx, 0..ctx.conversation().len(), Filter::None)
|
||||
};
|
||||
let baseline = call_score(&http, client, &baseline_tokens, Some(5)).await?;
|
||||
dbglog!("[scoring-full] baseline done ({} response scores)", baseline.len());
|
||||
|
||||
for (mem_idx, key) in memory_keys.iter().enumerate() {
|
||||
dbglog!(
|
||||
"scoring {}/{}: {}...", mem_idx + 1, total, key,
|
||||
);
|
||||
let msgs = build_token_ids(context, range.clone(), Filter::SkipKey(key));
|
||||
match call_score(&http, client, &msgs, Some(5)).await {
|
||||
Ok(without) => matrix.push(divergence(&baseline, &without)),
|
||||
activity.update(format!("scoring: {}/{}", mem_idx + 1, total)).await;
|
||||
dbglog!("[scoring-full] {}/{}: {}", mem_idx + 1, total, key);
|
||||
let tokens = {
|
||||
let ctx = agent.context.lock().await;
|
||||
build_token_ids(&ctx, 0..ctx.conversation().len(), Filter::SkipKey(key))
|
||||
};
|
||||
let row = match call_score(&http, client, &tokens, Some(5)).await {
|
||||
Ok(without) => {
|
||||
let divs = divergence(&baseline, &without);
|
||||
let max_div = divs.iter().cloned().fold(0.0f64, f64::max);
|
||||
dbglog!("[scoring-full] {}/{}: {} max_div={:.3}",
|
||||
mem_idx + 1, total, key, max_div);
|
||||
divs
|
||||
}
|
||||
Err(e) => {
|
||||
dbglog!(
|
||||
"[training] {} FAILED: {:#}", key, e,
|
||||
);
|
||||
matrix.push(vec![0.0; baseline.len()]);
|
||||
dbglog!("[scoring-full] {}/{}: {} FAILED: {:#}",
|
||||
mem_idx + 1, total, key, e);
|
||||
vec![0.0; baseline.len()]
|
||||
}
|
||||
};
|
||||
// Write this memory's scores to the live AST nodes
|
||||
{
|
||||
let mut ctx = agent.context.lock().await;
|
||||
let mut set_count = 0;
|
||||
|
||||
for (resp_idx, &idx) in response_indices.iter().enumerate() {
|
||||
if idx >= ctx.conversation().len() { continue; }
|
||||
let node = &mut ctx.conversation_mut()[idx];
|
||||
if let AstNode::Branch {
|
||||
role: Role::Assistant, memory_scores, ..
|
||||
} = node {
|
||||
if let Some(&score) = row.get(resp_idx) {
|
||||
if score > 0.01 {
|
||||
memory_scores.insert(key.clone(), score);
|
||||
set_count += 1;
|
||||
} else {
|
||||
memory_scores.remove(key.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let memory_weights: Vec<(String, f64)> = memory_keys.iter()
|
||||
.zip(matrix.iter())
|
||||
.map(|(key, row)| (key.clone(), row.iter().sum()))
|
||||
.collect();
|
||||
|
||||
let mut response_scores = vec![0.0; response_indices.len()];
|
||||
for row in &matrix {
|
||||
for (j, &v) in row.iter().enumerate() {
|
||||
if j < response_scores.len() { response_scores[j] += v; }
|
||||
dbglog!("[scoring-full] {}/{} AST: set={}", mem_idx + 1, total, set_count);
|
||||
}
|
||||
agent.state.lock().await.changed.notify_one();
|
||||
}
|
||||
|
||||
Ok(MemoryScore {
|
||||
memory_weights, response_scores, matrix, memory_keys,
|
||||
response_entry_indices: response_indices,
|
||||
})
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Find the entry index after `start` that contains the Nth assistant response.
|
||||
|
|
@ -365,7 +359,9 @@ where
|
|||
cumulative.push(running);
|
||||
}
|
||||
|
||||
dbglog!("[scoring] total_tokens={}, cutoff={}, {} candidates", total_tokens, token_cutoff, candidates.len());
|
||||
let total = candidates.len();
|
||||
dbglog!("[scoring] total_tokens={}, cutoff={}, {} candidates", total_tokens, token_cutoff, total);
|
||||
let activity = crate::agent::start_activity(agent, format!("scoring: 0/{}", total)).await;
|
||||
|
||||
for (pos, key, _) in &candidates {
|
||||
// Only score memories in the first 60% of the conversation by tokens —
|
||||
|
|
@ -382,7 +378,7 @@ where
|
|||
continue;
|
||||
}
|
||||
|
||||
let _scoring = crate::agent::start_activity(agent, format!("scoring: {}", key)).await;
|
||||
activity.update(format!("scoring: {}/{} {}", scored + 1, total, key)).await;
|
||||
match score_divergence(&http, client, context, range, Filter::SkipKey(key), Some(5)).await {
|
||||
Ok((divs, _)) => {
|
||||
let n_responses = divs.len();
|
||||
|
|
|
|||
|
|
@ -57,8 +57,8 @@ fn commands() -> Vec<SlashCommand> { vec![
|
|||
});
|
||||
}
|
||||
} },
|
||||
SlashCommand { name: "/score", help: "Score memory importance",
|
||||
handler: |s, _| { let _ = s.mind_tx.send(MindCommand::Score); } },
|
||||
SlashCommand { name: "/score", help: "Score memory importance (full matrix)",
|
||||
handler: |s, _| { let _ = s.mind_tx.send(MindCommand::ScoreFull); } },
|
||||
SlashCommand { name: "/dmn", help: "Show DMN state",
|
||||
handler: |s, _| {
|
||||
let st = s.shared_mind.lock().unwrap();
|
||||
|
|
@ -527,7 +527,7 @@ impl InteractScreen {
|
|||
}
|
||||
}
|
||||
}
|
||||
AstNode::Branch { role, children } => {
|
||||
AstNode::Branch { role, children, .. } => {
|
||||
match role {
|
||||
Role::User => {
|
||||
let text: String = children.iter()
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ impl ConsciousScreen {
|
|||
if let AstNode::Leaf(leaf) = node {
|
||||
if let NodeBody::Memory { key, score, text } = leaf.body() {
|
||||
let status = match score {
|
||||
Some(s) => { scored += 1; format!("score: {:.2}", s) }
|
||||
Some(s) => { scored += 1; format!("{:.2}", s) }
|
||||
None => { unscored += 1; String::new() }
|
||||
};
|
||||
mem_children.push(SectionView {
|
||||
|
|
@ -63,7 +63,51 @@ impl ConsciousScreen {
|
|||
});
|
||||
}
|
||||
|
||||
views.push(section_to_view("Conversation", ctx.conversation()));
|
||||
let conv = ctx.conversation();
|
||||
let mut conv_children: Vec<SectionView> = Vec::new();
|
||||
for node in conv {
|
||||
let mut view = SectionView {
|
||||
name: node.label(),
|
||||
tokens: node.tokens(),
|
||||
content: match node {
|
||||
AstNode::Leaf(leaf) => leaf.body().text().to_string(),
|
||||
_ => String::new(),
|
||||
},
|
||||
children: match node {
|
||||
AstNode::Branch { children, .. } => children.iter()
|
||||
.map(|c| SectionView {
|
||||
name: c.label(), tokens: c.tokens(),
|
||||
content: match c { AstNode::Leaf(l) => l.body().text().to_string(), _ => String::new() },
|
||||
children: Vec::new(), status: String::new(),
|
||||
}).collect(),
|
||||
_ => Vec::new(),
|
||||
},
|
||||
status: String::new(),
|
||||
};
|
||||
// Show memory attribution inline as status text
|
||||
if let AstNode::Branch { memory_scores: ms, .. } = node {
|
||||
if !ms.is_empty() {
|
||||
let mut attrs: Vec<(&str, f64)> = ms.iter()
|
||||
.map(|(k, v)| (k.as_str(), *v))
|
||||
.collect();
|
||||
attrs.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
|
||||
let parts: Vec<String> = attrs.iter()
|
||||
.map(|(k, s)| format!("{}({:.1})", k, s))
|
||||
.collect();
|
||||
view.status = format!("← {}", parts.join(" "));
|
||||
}
|
||||
}
|
||||
conv_children.push(view);
|
||||
}
|
||||
let conv_tokens: usize = conv_children.iter().map(|c| c.tokens).sum();
|
||||
views.push(SectionView {
|
||||
name: format!("Conversation ({} entries)", conv_children.len()),
|
||||
tokens: conv_tokens,
|
||||
content: String::new(),
|
||||
children: conv_children,
|
||||
status: String::new(),
|
||||
});
|
||||
|
||||
views
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -187,10 +187,9 @@ impl SubconsciousScreen {
|
|||
agent.context.try_lock().ok()
|
||||
.map(|ctx| {
|
||||
let conv = ctx.conversation();
|
||||
let mut view = section_to_view("Conversation", conv);
|
||||
let view = section_to_view("Conversation", conv);
|
||||
let fork = fork_point.min(view.children.len());
|
||||
view.children = view.children.split_off(fork);
|
||||
vec![view]
|
||||
view.children.into_iter().skip(fork).collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue