Kill Reasoning, Debug, Activity variants — read status from Agent directly

Reasoning tokens: dropped for now, will land in context entries later.
Debug sends: converted to dbglog! macro (writes to debug.log).
Activity: now a field on Agent, set directly, read by UI via try_lock.
score_memories_incremental takes agent Arc for activity writes.

UiMessage down to 2 variants: TextDelta, Info.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-05 21:45:55 -04:00
parent eafc2887a3
commit e7914e3d58
7 changed files with 54 additions and 88 deletions

View file

@ -204,36 +204,31 @@ pub async fn score_memories(
});
}
let _ = ui_tx.send(UiMessage::Info(format!(
"[scoring {} memories × {} responses]", memory_keys.len(), response_indices.len(),
)));
let http = http_client();
let range = 0..context.entries.len();
let _ = ui_tx.send(UiMessage::Activity("scoring baseline...".into()));
let baseline = call_score(&http, client, &build_messages(context, range.clone(), Filter::None)).await?;
let total = memory_keys.len();
let mut matrix: Vec<Vec<f64>> = Vec::new();
for (mem_idx, key) in memory_keys.iter().enumerate() {
let _ = ui_tx.send(UiMessage::Activity(format!(
dbglog!(
"scoring {}/{}: {}...", mem_idx + 1, total, key,
)));
);
let msgs = build_messages(context, range.clone(), Filter::SkipKey(key));
match call_score(&http, client, &msgs).await {
Ok(without) => matrix.push(divergence(&baseline, &without)),
Err(e) => {
let _ = ui_tx.send(UiMessage::Debug(format!(
dbglog!(
"[training] {} FAILED: {:#}", key, e,
)));
);
matrix.push(vec![0.0; baseline.len()]);
}
}
}
let _ = ui_tx.send(UiMessage::Activity(String::new()));
let memory_weights: Vec<(String, f64)> = memory_keys.iter()
.zip(matrix.iter())
@ -295,9 +290,7 @@ pub async fn score_memory(
}
let http = http_client();
let _ = ui_tx.send(UiMessage::Activity(format!("scoring memory: {}...", key)));
let (divs, _) = score_divergence(&http, client, context, range, Filter::SkipKey(key)).await?;
let _ = ui_tx.send(UiMessage::Activity(String::new()));
Ok(divs.iter().sum())
}
@ -365,16 +358,16 @@ pub async fn score_memories_incremental(
Ok((divs, _)) => {
let n_responses = divs.len();
let max_div = divs.iter().cloned().fold(0.0f64, f64::max);
let _ = ui_tx.send(UiMessage::Debug(format!(
dbglog!(
"[scoring] {} max:{:.3} ({} responses)", key, max_div, n_responses,
)));
);
// TODO: update graph weight once normalization is figured out
results.push((key.clone(), max_div));
}
Err(e) => {
let _ = ui_tx.send(UiMessage::Debug(format!(
dbglog!(
"[scoring] {} FAILED: {:#}", key, e,
)));
);
}
}
}
@ -408,9 +401,7 @@ pub async fn score_finetune(
}
let http = http_client();
let _ = ui_tx.send(UiMessage::Activity("scoring for fine-tuning...".into()));
let (divs, _) = score_divergence(&http, client, context, range, Filter::SkipAllMemories).await?;
let _ = ui_tx.send(UiMessage::Activity(String::new()));
let mut results: Vec<(usize, f64)> = response_positions.iter()
.enumerate()