migrate more files to use index-based node access

- learn.rs, daemon.rs, graph.rs, digest.rs, prompts.rs
- Convert store.nodes.get() → store.get_node()
- Convert store.nodes.contains_key() → store.contains_key()
- Convert store.nodes.values/iter() → all_keys + get_node

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-13 19:37:11 -04:00
parent fe6450223c
commit af3e41f1d9
5 changed files with 63 additions and 43 deletions

View file

@ -886,8 +886,8 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
let mut missing_nodes: HashSet<String> = HashSet::new(); let mut missing_nodes: HashSet<String> = HashSet::new();
for rel in &store.relations { for rel in &store.relations {
if rel.deleted { continue; } if rel.deleted { continue; }
let s_missing = !store.nodes.contains_key(&rel.source_key); let s_missing = !store.contains_key(&rel.source_key).unwrap_or(false);
let t_missing = !store.nodes.contains_key(&rel.target_key); let t_missing = !store.contains_key(&rel.target_key).unwrap_or(false);
if s_missing || t_missing { if s_missing || t_missing {
orphan_edges += 1; orphan_edges += 1;
if s_missing { missing_nodes.insert(rel.source_key.clone()); } if s_missing { missing_nodes.insert(rel.source_key.clone()); }
@ -897,7 +897,9 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
// NodeType breakdown // NodeType breakdown
let mut type_counts: HashMap<&str, usize> = HashMap::new(); let mut type_counts: HashMap<&str, usize> = HashMap::new();
for node in store.nodes.values() { let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let label = match node.node_type { let label = match node.node_type {
crate::store::NodeType::EpisodicSession => "episodic", crate::store::NodeType::EpisodicSession => "episodic",
crate::store::NodeType::EpisodicDaily => "daily", crate::store::NodeType::EpisodicDaily => "daily",
@ -907,6 +909,7 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
}; };
*type_counts.entry(label).or_default() += 1; *type_counts.entry(label).or_default() += 1;
} }
}
// Load history for deltas // Load history for deltas
let history = load_metrics_history(); let history = load_metrics_history();

View file

@ -27,11 +27,14 @@ pub fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
let graph = store.build_graph(); let graph = store.build_graph();
let snap = crate::graph::current_metrics(&graph); let snap = crate::graph::current_metrics(&graph);
let episodic_count = store.nodes.iter() let all_keys = store.all_keys().unwrap_or_default();
.filter(|(_, n)| matches!(n.node_type, crate::store::NodeType::EpisodicSession)) let episodic_count = all_keys.iter()
.filter_map(|k| store.get_node(k).ok()?)
.filter(|n| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
.count(); .count();
let episodic_ratio = if store.nodes.is_empty() { 0.0 } let total = all_keys.len();
else { episodic_count as f32 / store.nodes.len() as f32 }; let episodic_ratio = if total == 0 { 0.0 }
else { episodic_count as f32 / total as f32 };
// Use the same planning logic as consolidation (skip O(n²) interference) // Use the same planning logic as consolidation (skip O(n²) interference)
let plan = crate::neuro::consolidation_plan_quick(store); let plan = crate::neuro::consolidation_plan_quick(store);

View file

@ -112,17 +112,21 @@ fn parse_digest_node_links(key: &str, content: &str) -> Vec<DigestLink> {
pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> { pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> {
let mut all_links = Vec::new(); let mut all_links = Vec::new();
let mut digest_keys: Vec<&String> = store.nodes.iter() let all_keys = store.all_keys().unwrap_or_default();
.filter(|(_, n)| matches!(n.node_type, let mut digest_keys: Vec<String> = all_keys.into_iter()
.filter(|k| {
store.get_node(k).ok().flatten()
.map(|n| matches!(n.node_type,
store::NodeType::EpisodicDaily store::NodeType::EpisodicDaily
| store::NodeType::EpisodicWeekly | store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly)) | store::NodeType::EpisodicMonthly))
.map(|(k, _)| k) .unwrap_or(false)
})
.collect(); .collect();
digest_keys.sort(); digest_keys.sort();
for key in digest_keys { for key in &digest_keys {
if let Some(node) = store.nodes.get(key) { if let Ok(Some(node)) = store.get_node(key) {
all_links.extend(parse_digest_node_links(key, &node.content)); all_links.extend(parse_digest_node_links(key, &node.content));
} }
} }
@ -178,11 +182,11 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
); );
if exists { skipped += 1; continue; } if exists { skipped += 1; continue; }
let source_uuid = match store.nodes.get(&source) { let source_uuid = match store.get_node(&source).ok().flatten() {
Some(n) => n.uuid, Some(n) => n.uuid,
None => { skipped += 1; continue; } None => { skipped += 1; continue; }
}; };
let target_uuid = match store.nodes.get(&target) { let target_uuid = match store.get_node(&target).ok().flatten() {
Some(n) => n.uuid, Some(n) => n.uuid,
None => { skipped += 1; continue; } None => { skipped += 1; continue; }
}; };

View file

@ -334,7 +334,9 @@ where
for (i, node) in context.conversation().iter().enumerate() { for (i, node) in context.conversation().iter().enumerate() {
if let Some(key) = memory_key(node) { if let Some(key) = memory_key(node) {
if !seen.insert(key.to_owned()) { continue; } if !seen.insert(key.to_owned()) { continue; }
let last_scored = store.nodes.get(key) let last_scored = store.get_node(key)
.ok()
.flatten()
.map(|n| n.last_scored) .map(|n| n.last_scored)
.unwrap_or(0); .unwrap_or(0);
if now - last_scored >= max_age_secs { if now - last_scored >= max_age_secs {

View file

@ -30,7 +30,9 @@ pub fn format_topology_header(store: &Store, graph: &Graph) -> String {
// Type counts // Type counts
let mut type_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new(); let mut type_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
for node in store.nodes.values() { let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let label = match node.node_type { let label = match node.node_type {
crate::store::NodeType::Semantic => "semantic", crate::store::NodeType::Semantic => "semantic",
crate::store::NodeType::EpisodicSession crate::store::NodeType::EpisodicSession
@ -40,6 +42,7 @@ pub fn format_topology_header(store: &Store, graph: &Graph) -> String {
}; };
*type_counts.entry(label).or_default() += 1; *type_counts.entry(label).or_default() += 1;
} }
}
let mut types: Vec<_> = type_counts.iter().collect(); let mut types: Vec<_> = type_counts.iter().collect();
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c)); types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
let type_str: String = types.iter() let type_str: String = types.iter()
@ -86,7 +89,7 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
let hub_thresh = graph.hub_threshold(); let hub_thresh = graph.hub_threshold();
let mut out = String::new(); let mut out = String::new();
for item in items { for item in items {
let node = match store.nodes.get(&item.key) { let node = match store.get_node(&item.key).ok().flatten() {
Some(n) => n, Some(n) => n,
None => continue, None => continue,
}; };
@ -139,7 +142,9 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
out.push_str("Neighbors:\n"); out.push_str("Neighbors:\n");
for (n, strength) in neighbors.iter().take(15) { for (n, strength) in neighbors.iter().take(15) {
let n_cc = graph.clustering_coefficient(n); let n_cc = graph.clustering_coefficient(n);
let n_community = store.nodes.get(n.as_str()) let n_community = store.get_node(n)
.ok()
.flatten()
.and_then(|n| n.community_id); .and_then(|n| n.community_id);
out.push_str(&format!(" - {} (str={:.2}, cc={:.3}", out.push_str(&format!(" - {} (str={:.2}, cc={:.3}",
n, strength, n_cc)); n, strength, n_cc));
@ -165,10 +170,13 @@ pub fn format_health_section(store: &Store, graph: &Graph) -> String {
// Weight histogram // Weight histogram
let mut buckets = [0u32; 10]; // 0.0-0.1, 0.1-0.2, ..., 0.9-1.0 let mut buckets = [0u32; 10]; // 0.0-0.1, 0.1-0.2, ..., 0.9-1.0
for node in store.nodes.values() { let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let bucket = ((node.weight * 10.0) as usize).min(9); let bucket = ((node.weight * 10.0) as usize).min(9);
buckets[bucket] += 1; buckets[bucket] += 1;
} }
}
for (i, &count) in buckets.iter().enumerate() { for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0; let lo = i as f32 / 10.0;
let hi = (i + 1) as f32 / 10.0; let hi = (i + 1) as f32 / 10.0;
@ -177,9 +185,9 @@ pub fn format_health_section(store: &Store, graph: &Graph) -> String {
} }
// Near-prune nodes // Near-prune nodes
let near_prune: Vec<_> = store.nodes.iter() let near_prune: Vec<_> = all_keys.iter()
.filter(|(_, n)| n.weight < 0.15) .filter_map(|k| store.get_node(k).ok()?.map(|n| (k.clone(), n.weight)))
.map(|(k, n)| (k.clone(), n.weight)) .filter(|(_, w)| *w < 0.15)
.collect(); .collect();
if !near_prune.is_empty() { if !near_prune.is_empty() {
out.push_str(&format!("\n## Near-prune nodes ({} total)\n", near_prune.len())); out.push_str(&format!("\n## Near-prune nodes ({} total)\n", near_prune.len()));