observation extractor: per-segment dedup using shared transcript helpers
The observation agent was re-extracting the same conversations every consolidation run because select_conversation_fragments had no tracking of what had already been processed. Extract shared helpers from the fact miner's dedup pattern: - transcript_key(prefix, path): namespaced key from prefix + filename - segment_key(base, idx): per-segment key - keys_with_prefix(prefix): bulk lookup from store - unmined_segments(path, prefix, known): find unprocessed segments - mark_segment(...): mark a segment as processed Rewrite select_conversation_fragments to use these with _observed-transcripts prefix. Each compaction segment within a transcript is now tracked independently — new segments from ongoing sessions get picked up, already-processed segments are skipped.
This commit is contained in:
parent
9d1d690f17
commit
10499a98ea
2 changed files with 121 additions and 63 deletions
|
|
@ -611,48 +611,16 @@ pub fn run_one_agent(
|
|||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Extract human-readable dialogue from a conversation JSONL
|
||||
fn extract_conversation_text(path: &Path, max_chars: usize) -> String {
|
||||
let cfg = crate::config::get();
|
||||
let messages = super::transcript::parse_transcript(path).unwrap_or_default();
|
||||
let mut fragments = Vec::new();
|
||||
let mut total = 0;
|
||||
const OBSERVED_PREFIX: &str = "_observed-transcripts";
|
||||
|
||||
for msg in &messages {
|
||||
let min_len = if msg.role == "user" { 5 } else { 10 };
|
||||
if msg.text.len() <= min_len { continue; }
|
||||
|
||||
// Only include external user messages
|
||||
if msg.role == "user" {
|
||||
if msg.user_type.as_deref() != Some("external") { continue; }
|
||||
if msg.text.starts_with("[Request interrupted") { continue; }
|
||||
}
|
||||
|
||||
let role = if msg.role == "user" { &cfg.user_name } else { &cfg.assistant_name };
|
||||
fragments.push(format!("**{}:** {}", role, msg.text));
|
||||
total += msg.text.len();
|
||||
if total > max_chars { break; }
|
||||
}
|
||||
fragments.join("\n\n")
|
||||
}
|
||||
|
||||
/// Count short user messages (dialogue turns) in a JSONL
|
||||
fn count_dialogue_turns(path: &Path) -> usize {
|
||||
let messages = super::transcript::parse_transcript(path).unwrap_or_default();
|
||||
messages.iter()
|
||||
.filter(|m| m.role == "user"
|
||||
&& m.user_type.as_deref() == Some("external")
|
||||
&& m.text.len() > 5
|
||||
&& m.text.len() < 500
|
||||
&& !m.text.starts_with("[Request interrupted")
|
||||
&& !m.text.starts_with("Implement the following"))
|
||||
.count()
|
||||
}
|
||||
|
||||
/// Select conversation fragments for the observation extractor
|
||||
/// Select conversation fragments (per-segment) for the observation extractor.
|
||||
/// Skips segments already processed, marks selected segments as observed.
|
||||
pub fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
|
||||
let projects = crate::config::get().projects_dir.clone();
|
||||
if !projects.exists() { return Vec::new(); }
|
||||
|
||||
let observed = super::enrich::keys_with_prefix(&format!("{}#", OBSERVED_PREFIX));
|
||||
|
||||
let mut jsonl_files: Vec<PathBuf> = Vec::new();
|
||||
if let Ok(dirs) = fs::read_dir(&projects) {
|
||||
for dir in dirs.filter_map(|e| e.ok()) {
|
||||
|
|
@ -672,24 +640,61 @@ pub fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
|
|||
}
|
||||
}
|
||||
|
||||
let mut scored: Vec<(usize, PathBuf)> = jsonl_files.into_iter()
|
||||
.map(|f| (count_dialogue_turns(&f), f))
|
||||
.filter(|(turns, _)| *turns >= 10)
|
||||
.collect();
|
||||
scored.sort_by(|a, b| b.0.cmp(&a.0));
|
||||
|
||||
let mut fragments = Vec::new();
|
||||
for (_, f) in scored.iter().take(n * 2) {
|
||||
let session_id = f.file_stem()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| "unknown".into());
|
||||
let text = extract_conversation_text(f, 8000);
|
||||
if text.len() > 500 {
|
||||
fragments.push((session_id, text));
|
||||
// Collect unmined segments across all transcripts, keeping the text
|
||||
let mut candidates: Vec<(PathBuf, usize, String, String)> = Vec::new();
|
||||
for path in &jsonl_files {
|
||||
for (seg_idx, messages) in super::enrich::unmined_segments(path, OBSERVED_PREFIX, &observed) {
|
||||
let text = format_segment(&messages, 8000);
|
||||
if text.len() > 500 {
|
||||
let session_id = path.file_stem()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| "unknown".into());
|
||||
let id = format!("{}.{}", session_id, seg_idx);
|
||||
candidates.push((path.clone(), seg_idx, id, text));
|
||||
}
|
||||
}
|
||||
if fragments.len() >= n { break; }
|
||||
}
|
||||
fragments
|
||||
|
||||
// Take up to n, mark them, and return the text
|
||||
let selected: Vec<_> = candidates.into_iter().take(n).collect();
|
||||
|
||||
if !selected.is_empty() {
|
||||
if let Ok(mut store) = crate::store::Store::load() {
|
||||
for (path, seg_idx, _, _) in &selected {
|
||||
super::enrich::mark_segment(
|
||||
&mut store,
|
||||
&path.to_string_lossy(),
|
||||
OBSERVED_PREFIX,
|
||||
*seg_idx,
|
||||
"agent:knowledge-observation",
|
||||
"observed",
|
||||
);
|
||||
}
|
||||
let _ = store.save();
|
||||
}
|
||||
}
|
||||
|
||||
selected.into_iter()
|
||||
.map(|(_, _, id, text)| (id, text))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Format a segment's messages into readable text for the observation agent.
|
||||
fn format_segment(messages: &[(usize, String, String, String)], max_chars: usize) -> String {
|
||||
let cfg = crate::config::get();
|
||||
let mut fragments = Vec::new();
|
||||
let mut total = 0;
|
||||
|
||||
for (_, role, text, _) in messages {
|
||||
let min_len = if role == "user" { 5 } else { 10 };
|
||||
if text.len() <= min_len { continue; }
|
||||
|
||||
let name = if role == "user" { &cfg.user_name } else { &cfg.assistant_name };
|
||||
fragments.push(format!("**{}:** {}", name, text));
|
||||
total += text.len();
|
||||
if total > max_chars { break; }
|
||||
}
|
||||
fragments.join("\n\n")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue