migrate more files to use index-based node access

- learn.rs, daemon.rs, graph.rs, digest.rs, prompts.rs
- Convert store.nodes.get() → store.get_node()
- Convert store.nodes.contains_key() → store.contains_key()
- Convert store.nodes.values/iter() → all_keys + get_node

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-13 19:37:11 -04:00
parent fe6450223c
commit af3e41f1d9
5 changed files with 63 additions and 43 deletions

View file

@ -112,17 +112,21 @@ fn parse_digest_node_links(key: &str, content: &str) -> Vec<DigestLink> {
pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> {
let mut all_links = Vec::new();
let mut digest_keys: Vec<&String> = store.nodes.iter()
.filter(|(_, n)| matches!(n.node_type,
store::NodeType::EpisodicDaily
| store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly))
.map(|(k, _)| k)
let all_keys = store.all_keys().unwrap_or_default();
let mut digest_keys: Vec<String> = all_keys.into_iter()
.filter(|k| {
store.get_node(k).ok().flatten()
.map(|n| matches!(n.node_type,
store::NodeType::EpisodicDaily
| store::NodeType::EpisodicWeekly
| store::NodeType::EpisodicMonthly))
.unwrap_or(false)
})
.collect();
digest_keys.sort();
for key in digest_keys {
if let Some(node) = store.nodes.get(key) {
for key in &digest_keys {
if let Ok(Some(node)) = store.get_node(key) {
all_links.extend(parse_digest_node_links(key, &node.content));
}
}
@ -178,11 +182,11 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
);
if exists { skipped += 1; continue; }
let source_uuid = match store.nodes.get(&source) {
let source_uuid = match store.get_node(&source).ok().flatten() {
Some(n) => n.uuid,
None => { skipped += 1; continue; }
};
let target_uuid = match store.nodes.get(&target) {
let target_uuid = match store.get_node(&target).ok().flatten() {
Some(n) => n.uuid,
None => { skipped += 1; continue; }
};