migrate local.rs and memory.rs to use index

- Add Store::all_keys() method for iteration
- Convert store.nodes.get() → store.get_node()
- Convert store.nodes.contains_key() → store.contains_key()
- Convert store.nodes.values() iteration → all_keys + get_node

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-13 19:34:45 -04:00
parent 7eb86656d4
commit fe6450223c
3 changed files with 46 additions and 27 deletions

View file

@ -77,7 +77,9 @@ pub fn memory_links(store: &Store, _provenance: &str, key: &str) -> Result<Vec<L
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
let mut links = Vec::new();
for (target, strength, _is_new) in &node.links {
let node_weight = store.nodes.get(target.as_str())
let node_weight = store.get_node(target)
.ok()
.flatten()
.map(|n| n.weight)
.unwrap_or(0.5);
links.push(LinkInfo {
@ -178,8 +180,9 @@ pub fn memory_rename(store: &mut Store, _provenance: &str, old_key: &str, new_ke
pub fn memory_supersede(store: &mut Store, provenance: &str, old_key: &str, new_key: &str, reason: Option<&str>) -> Result<String> {
let reason = reason.unwrap_or("superseded");
let content = store.nodes.get(old_key)
.map(|n| n.content.clone())
let content = store.get_node(old_key)
.map_err(|e| anyhow::anyhow!("{}", e))?
.map(|n| n.content)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
new_key, reason, content.trim());
@ -198,7 +201,7 @@ pub fn keys_to_replay_items(
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {
let node = store.nodes.get(key)?;
let node = store.get_node(key).ok()??;
let priority = consolidation_priority(store, key, graph, None);
let cc = graph.clustering_coefficient(key);
@ -265,12 +268,14 @@ pub fn journal_tail(store: &Store, _provenance: &str, count: Option<u64>, level:
.map(|dt| dt.and_utc().timestamp())
});
let mut entries: Vec<_> = store.nodes.values()
let all_keys = store.all_keys()?;
let mut entries: Vec<_> = all_keys.iter()
.filter_map(|key| store.get_node(key).ok()?)
.filter(|n| n.node_type == node_type)
.filter(|n| after_ts.map(|ts| n.created_at >= ts).unwrap_or(true))
.map(|n| JournalEntry {
key: n.key.clone(),
content: n.content.clone(),
content: n.content,
created_at: n.created_at,
})
.collect();
@ -302,11 +307,11 @@ pub fn journal_new(store: &mut Store, provenance: &str, name: &str, title: &str,
.join("-");
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
let key = if store.nodes.contains_key(base_key) {
let key = if store.contains_key(base_key).unwrap_or(false) {
let mut n = 2;
loop {
let candidate = format!("{}-{}", base_key, n);
if !store.nodes.contains_key(&candidate) { break candidate; }
if !store.contains_key(&candidate).unwrap_or(false) { break candidate; }
n += 1;
}
} else {
@ -324,14 +329,16 @@ pub fn journal_new(store: &mut Store, provenance: &str, name: &str, title: &str,
pub fn journal_update(store: &mut Store, provenance: &str, body: &str, level: Option<i64>) -> Result<String> {
let level = level.unwrap_or(0);
let node_type = level_to_node_type(level);
let latest_key = store.nodes.values()
let all_keys = store.all_keys()?;
let latest_key = all_keys.iter()
.filter_map(|key| store.get_node(key).ok()?)
.filter(|n| n.node_type == node_type)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
};
let existing = store.nodes.get(&key).unwrap().content.clone();
let existing = store.get_node(&key)?.ok_or_else(|| anyhow::anyhow!("node not found"))?.content;
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
store.upsert_provenance(&key, &new_content, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
@ -479,9 +486,10 @@ pub fn graph_hubs(store: &Store, _provenance: &str, count: Option<usize>) -> Res
let graph = store.build_graph();
// Top hub nodes by degree, spread apart (skip neighbors of already-selected hubs)
let mut hubs: Vec<(String, usize)> = store.nodes.iter()
.filter(|(k, n)| !n.deleted && !k.starts_with('_'))
.map(|(k, _)| {
let all_keys = store.all_keys().unwrap_or_default();
let mut hubs: Vec<(String, usize)> = all_keys.iter()
.filter(|k| !k.starts_with('_'))
.map(|k| {
let degree = graph.neighbors(k).len();
(k.clone(), degree)
})
@ -508,7 +516,7 @@ pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let node = store.nodes.get(&resolved)
let node = store.get_node(&resolved)?
.ok_or_else(|| anyhow::anyhow!("Node not found: {}", resolved))?;
use std::fmt::Write;
@ -525,15 +533,17 @@ pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session = Vec::new();
let mut episodic_daily = Vec::new();
let mut episodic_weekly = Vec::new();
let mut semantic = Vec::new();
let mut episodic_session: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut episodic_daily: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut episodic_weekly: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut semantic: Vec<(String, f32, crate::store::Node)> = Vec::new();
for (n, strength) in &neighbors {
if let Some(nnode) = store.nodes.get(n.as_str()) {
let entry = (n.as_str(), *strength, nnode);
match nnode.node_type {
if let Ok(Some(nnode)) = store.get_node(n) {
let node_type = nnode.node_type;
let key: String = (*n).clone();
let entry = (key, *strength, nnode);
match node_type {
crate::store::NodeType::EpisodicSession => episodic_session.push(entry),
crate::store::NodeType::EpisodicDaily => episodic_daily.push(entry),
crate::store::NodeType::EpisodicWeekly
@ -547,7 +557,7 @@ pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String
writeln!(out, "Weekly digests:").ok();
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
writeln!(out, " [{:.2}] {} — {}", s, &k, preview).ok();
}
}
@ -555,7 +565,7 @@ pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String
writeln!(out, "Daily digests:").ok();
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, k, preview).ok();
writeln!(out, " [{:.2}] {} — {}", s, &k, preview).ok();
}
}
@ -567,7 +577,7 @@ pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
writeln!(out, " [{:.2}] {}", s, k).ok();
writeln!(out, " [{:.2}] {}", s, &k).ok();
if !n.source_ref.is_empty() {
writeln!(out, " ↳ source: {}", n.source_ref).ok();
}

View file

@ -26,7 +26,7 @@ impl MemoryNode {
/// Load from an already-open store.
pub fn from_store(store: &Store, key: &str) -> Option<Self> {
let node = store.nodes.get(key)?;
let node = store.get_node(key).ok()??;
// If set, tag links to nodes created after this timestamp as (new)
let older_than: i64 = std::env::var("POC_MEMORIES_OLDER_THAN")
@ -45,7 +45,9 @@ impl MemoryNode {
continue;
};
let is_new = older_than > 0 && store.nodes.get(neighbor_key.as_str())
let is_new = older_than > 0 && store.get_node(neighbor_key)
.ok()
.flatten()
.map(|n| n.created_at > older_than)
.unwrap_or(false);
@ -61,7 +63,7 @@ impl MemoryNode {
Some(MemoryNode {
key: key.to_string(),
content: node.content.clone(),
content: node.content,
links,
version: node.version,
weight: node.weight,

View file

@ -94,6 +94,13 @@ impl Store {
index::contains_key(db, key)
}
/// Get all node keys.
pub fn all_keys(&self) -> Result<Vec<String>> {
let db = self.db.as_ref()
.ok_or_else(|| anyhow::anyhow!("store not loaded"))?;
index::all_keys(db)
}
pub fn resolve_key(&self, target: &str) -> Result<String> {
// Strip .md suffix if present — keys no longer use it
let bare = strip_md_suffix(target);