migrate more files to use index-based node access

- learn.rs, daemon.rs, graph.rs, digest.rs, prompts.rs
- Convert store.nodes.get() → store.get_node()
- Convert store.nodes.contains_key() → store.contains_key()
- Convert store.nodes.values/iter() → all_keys + get_node

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-13 19:37:11 -04:00
parent fe6450223c
commit af3e41f1d9
5 changed files with 63 additions and 43 deletions

View file

@ -886,8 +886,8 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
let mut missing_nodes: HashSet<String> = HashSet::new();
for rel in &store.relations {
if rel.deleted { continue; }
let s_missing = !store.nodes.contains_key(&rel.source_key);
let t_missing = !store.nodes.contains_key(&rel.target_key);
let s_missing = !store.contains_key(&rel.source_key).unwrap_or(false);
let t_missing = !store.contains_key(&rel.target_key).unwrap_or(false);
if s_missing || t_missing {
orphan_edges += 1;
if s_missing { missing_nodes.insert(rel.source_key.clone()); }
@ -897,15 +897,18 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
// NodeType breakdown
let mut type_counts: HashMap<&str, usize> = HashMap::new();
for node in store.nodes.values() {
let label = match node.node_type {
crate::store::NodeType::EpisodicSession => "episodic",
crate::store::NodeType::EpisodicDaily => "daily",
crate::store::NodeType::EpisodicWeekly => "weekly",
crate::store::NodeType::EpisodicMonthly => "monthly",
crate::store::NodeType::Semantic => "semantic",
};
*type_counts.entry(label).or_default() += 1;
let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let label = match node.node_type {
crate::store::NodeType::EpisodicSession => "episodic",
crate::store::NodeType::EpisodicDaily => "daily",
crate::store::NodeType::EpisodicWeekly => "weekly",
crate::store::NodeType::EpisodicMonthly => "monthly",
crate::store::NodeType::Semantic => "semantic",
};
*type_counts.entry(label).or_default() += 1;
}
}
// Load history for deltas

View file

@ -27,11 +27,14 @@ pub fn compute_graph_health(store: &crate::store::Store) -> GraphHealth {
let graph = store.build_graph();
let snap = crate::graph::current_metrics(&graph);
let episodic_count = store.nodes.iter()
.filter(|(_, n)| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
let all_keys = store.all_keys().unwrap_or_default();
let episodic_count = all_keys.iter()
.filter_map(|k| store.get_node(k).ok()?)
.filter(|n| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
.count();
let episodic_ratio = if store.nodes.is_empty() { 0.0 }
else { episodic_count as f32 / store.nodes.len() as f32 };
let total = all_keys.len();
let episodic_ratio = if total == 0 { 0.0 }
else { episodic_count as f32 / total as f32 };
// Use the same planning logic as consolidation (skip O(n²) interference)
let plan = crate::neuro::consolidation_plan_quick(store);

View file

@ -112,17 +112,21 @@ fn parse_digest_node_links(key: &str, content: &str) -> Vec<DigestLink> {
pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> {
let mut all_links = Vec::new();
let mut digest_keys: Vec<&String> = store.nodes.iter()
.filter(|(_, n)| matches!(n.node_type,
store::NodeType::EpisodicDaily
| store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly))
.map(|(k, _)| k)
let all_keys = store.all_keys().unwrap_or_default();
let mut digest_keys: Vec<String> = all_keys.into_iter()
.filter(|k| {
store.get_node(k).ok().flatten()
.map(|n| matches!(n.node_type,
store::NodeType::EpisodicDaily
| store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly))
.unwrap_or(false)
})
.collect();
digest_keys.sort();
for key in digest_keys {
if let Some(node) = store.nodes.get(key) {
for key in &digest_keys {
if let Ok(Some(node)) = store.get_node(key) {
all_links.extend(parse_digest_node_links(key, &node.content));
}
}
@ -178,11 +182,11 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
);
if exists { skipped += 1; continue; }
let source_uuid = match store.nodes.get(&source) {
let source_uuid = match store.get_node(&source).ok().flatten() {
Some(n) => n.uuid,
None => { skipped += 1; continue; }
};
let target_uuid = match store.nodes.get(&target) {
let target_uuid = match store.get_node(&target).ok().flatten() {
Some(n) => n.uuid,
None => { skipped += 1; continue; }
};

View file

@ -334,7 +334,9 @@ where
for (i, node) in context.conversation().iter().enumerate() {
if let Some(key) = memory_key(node) {
if !seen.insert(key.to_owned()) { continue; }
let last_scored = store.nodes.get(key)
let last_scored = store.get_node(key)
.ok()
.flatten()
.map(|n| n.last_scored)
.unwrap_or(0);
if now - last_scored >= max_age_secs {

View file

@ -30,15 +30,18 @@ pub fn format_topology_header(store: &Store, graph: &Graph) -> String {
// Type counts
let mut type_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
for node in store.nodes.values() {
let label = match node.node_type {
crate::store::NodeType::Semantic => "semantic",
crate::store::NodeType::EpisodicSession
| crate::store::NodeType::EpisodicDaily
| crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => "episodic",
};
*type_counts.entry(label).or_default() += 1;
let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let label = match node.node_type {
crate::store::NodeType::Semantic => "semantic",
crate::store::NodeType::EpisodicSession
| crate::store::NodeType::EpisodicDaily
| crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => "episodic",
};
*type_counts.entry(label).or_default() += 1;
}
}
let mut types: Vec<_> = type_counts.iter().collect();
types.sort_by_key(|(_, c)| std::cmp::Reverse(**c));
@ -86,7 +89,7 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
let hub_thresh = graph.hub_threshold();
let mut out = String::new();
for item in items {
let node = match store.nodes.get(&item.key) {
let node = match store.get_node(&item.key).ok().flatten() {
Some(n) => n,
None => continue,
};
@ -139,7 +142,9 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
out.push_str("Neighbors:\n");
for (n, strength) in neighbors.iter().take(15) {
let n_cc = graph.clustering_coefficient(n);
let n_community = store.nodes.get(n.as_str())
let n_community = store.get_node(n)
.ok()
.flatten()
.and_then(|n| n.community_id);
out.push_str(&format!(" - {} (str={:.2}, cc={:.3}",
n, strength, n_cc));
@ -165,9 +170,12 @@ pub fn format_health_section(store: &Store, graph: &Graph) -> String {
// Weight histogram
let mut buckets = [0u32; 10]; // 0.0-0.1, 0.1-0.2, ..., 0.9-1.0
for node in store.nodes.values() {
let bucket = ((node.weight * 10.0) as usize).min(9);
buckets[bucket] += 1;
let all_keys = store.all_keys().unwrap_or_default();
for key in &all_keys {
if let Ok(Some(node)) = store.get_node(key) {
let bucket = ((node.weight * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
}
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
@ -177,9 +185,9 @@ pub fn format_health_section(store: &Store, graph: &Graph) -> String {
}
// Near-prune nodes
let near_prune: Vec<_> = store.nodes.iter()
.filter(|(_, n)| n.weight < 0.15)
.map(|(k, n)| (k.clone(), n.weight))
let near_prune: Vec<_> = all_keys.iter()
.filter_map(|k| store.get_node(k).ok()?.map(|n| (k.clone(), n.weight)))
.filter(|(_, w)| *w < 0.15)
.collect();
if !near_prune.is_empty() {
out.push_str(&format!("\n## Near-prune nodes ({} total)\n", near_prune.len()));