stop filtering journal/digest nodes from knowledge and search

Journal and digest nodes are episodic memory — they should participate
in the graph on the same terms as everything else. Remove all
journal#/daily-/weekly-/monthly- skip filters from knowledge
extraction, connector pairs, challenger, semantic keys, and link
candidate selection. Use node_type field instead of key name matching
for episodic/semantic classification.

Operational nodes (MEMORY, where-am-i, work-queue, work-state) are
still filtered — they're system state, not memory.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-08 20:02:01 -04:00
parent b00e09b091
commit 70c0276fa0
4 changed files with 13 additions and 28 deletions

View file

@ -515,10 +515,10 @@ fn spectral_distance(embedding: &HashMap<String, Vec<f64>>, a: &str, b: &str) ->
fn select_extractor_clusters(_store: &Store, n: usize) -> Vec<Vec<String>> {
let embedding = load_spectral_embedding();
let skip = ["journal", "MEMORY", "where-am-i", "work-queue"];
let skip = ["MEMORY", "where-am-i", "work-queue", "work-state"];
let semantic_keys: Vec<&String> = embedding.keys()
.filter(|k| !k.starts_with("journal#") && !skip.contains(&k.as_str()))
.filter(|k| !skip.contains(&k.as_str()))
.collect();
let cluster_size = 5;
@ -578,15 +578,11 @@ pub fn run_extractor(store: &Store, graph: &Graph, batch_size: usize) -> Result<
fn select_connector_pairs(store: &Store, graph: &Graph, n: usize) -> Vec<(Vec<String>, Vec<String>)> {
let embedding = load_spectral_embedding();
let skip_prefixes = ["journal#", "daily-", "weekly-", "monthly-", "all-sessions"];
let skip_exact: HashSet<&str> = ["journal", "MEMORY", "where-am-i",
let skip_exact: HashSet<&str> = ["MEMORY", "where-am-i",
"work-queue", "work-state"].iter().copied().collect();
let semantic_keys: Vec<&String> = embedding.keys()
.filter(|k| {
!skip_exact.contains(k.as_str())
&& !skip_prefixes.iter().any(|p| k.starts_with(p))
})
.filter(|k| !skip_exact.contains(k.as_str()))
.collect();
let mut pairs = Vec::new();
@ -656,8 +652,7 @@ pub fn run_challenger(store: &Store, graph: &Graph, batch_size: usize) -> Result
let mut candidates: Vec<(&String, usize)> = store.nodes.iter()
.filter(|(k, _)| {
!k.starts_with("journal#")
&& !["journal", "MEMORY", "where-am-i"].contains(&k.as_str())
!["MEMORY", "where-am-i", "work-queue", "work-state"].contains(&k.as_str())
})
.map(|(k, _)| (k, graph.degree(k)))
.collect();