memory-search --seen: show current and previous seen sets separately
Instead of merging both into one flat list, display them as distinct sections so it's clear what was surfaced in this context vs what came from before compaction. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
134f7308e3
commit
e50d43bbf0
1 changed files with 31 additions and 44 deletions
|
|
@ -737,56 +737,43 @@ fn show_seen() {
|
||||||
println!("Pending chunks: {}", pending);
|
println!("Pending chunks: {}", pending);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read seen file in insertion order (append-only file)
|
|
||||||
let seen_path = state_dir.join(format!("seen-{}", session_id));
|
|
||||||
let seen_lines: Vec<String> = fs::read_to_string(&seen_path)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.lines()
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let returned = load_returned(&state_dir, session_id);
|
let returned = load_returned(&state_dir, session_id);
|
||||||
let returned_set: HashSet<_> = returned.iter().cloned().collect();
|
let returned_set: HashSet<_> = returned.iter().cloned().collect();
|
||||||
|
|
||||||
// Count context-loaded vs search-returned
|
let print_seen_file = |label: &str, path: &std::path::Path| {
|
||||||
let context_keys: Vec<_> = seen_lines.iter()
|
let lines: Vec<String> = fs::read_to_string(path)
|
||||||
.map(|l| parse_seen_line(l).to_string())
|
.unwrap_or_default()
|
||||||
.filter(|k| !returned_set.contains(k))
|
.lines()
|
||||||
.collect();
|
.filter(|s| !s.is_empty())
|
||||||
let search_keys: Vec<_> = seen_lines.iter()
|
.map(|s| s.to_string())
|
||||||
.map(|l| parse_seen_line(l).to_string())
|
.collect();
|
||||||
.filter(|k| returned_set.contains(k))
|
if lines.is_empty() { return; }
|
||||||
.collect();
|
|
||||||
|
|
||||||
println!("\nSeen set ({} total):", seen_lines.len());
|
let context_keys: Vec<_> = lines.iter()
|
||||||
if !context_keys.is_empty() {
|
.map(|l| parse_seen_line(l).to_string())
|
||||||
println!(" Context-loaded ({}):", context_keys.len());
|
.filter(|k| !returned_set.contains(k))
|
||||||
for key in &context_keys {
|
.collect();
|
||||||
println!(" {}", key);
|
let search_keys: Vec<_> = lines.iter()
|
||||||
}
|
.map(|l| parse_seen_line(l).to_string())
|
||||||
}
|
.filter(|k| returned_set.contains(k))
|
||||||
if !search_keys.is_empty() {
|
.collect();
|
||||||
println!(" Search-returned ({}):", search_keys.len());
|
|
||||||
for key in &search_keys {
|
|
||||||
println!(" {}", key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show returned keys that aren't in the seen set (bug indicator)
|
println!("\n{} ({} total):", label, lines.len());
|
||||||
let seen_key_set: HashSet<_> = seen_lines.iter()
|
if !context_keys.is_empty() {
|
||||||
.map(|l| parse_seen_line(l).to_string())
|
println!(" Context-loaded ({}):", context_keys.len());
|
||||||
.collect();
|
for key in &context_keys { println!(" {}", key); }
|
||||||
let orphan_returned: Vec<_> = returned.iter()
|
|
||||||
.filter(|k| !seen_key_set.contains(k.as_str()))
|
|
||||||
.collect();
|
|
||||||
if !orphan_returned.is_empty() {
|
|
||||||
println!("\n WARNING: {} returned keys not in seen set (pre-compaction?):",
|
|
||||||
orphan_returned.len());
|
|
||||||
for key in &orphan_returned {
|
|
||||||
println!(" {}", key);
|
|
||||||
}
|
}
|
||||||
}
|
if !search_keys.is_empty() {
|
||||||
|
println!(" Search-returned ({}):", search_keys.len());
|
||||||
|
for key in &search_keys { println!(" {}", key); }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let current_path = state_dir.join(format!("seen-{}", session_id));
|
||||||
|
let prev_path = state_dir.join(format!("seen-prev-{}", session_id));
|
||||||
|
|
||||||
|
print_seen_file("Current seen set", ¤t_path);
|
||||||
|
print_seen_file("Previous seen set (pre-compaction)", &prev_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cleanup_stale_files(dir: &Path, max_age: Duration) {
|
fn cleanup_stale_files(dir: &Path, max_age: Duration) {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue