store: remove nodes and uuid_to_key HashMaps
All node access now goes through index → capnp: - scoring.rs: consolidation_priority, replay_queue, consolidation_plan - admin.rs: cmd_init, cmd_fsck, cmd_dedup - engine.rs: run_generator, eval_filter, run_transform - parser.rs: resolve_field, execute_query Added Store::remove_from_index() for dedup cleanup. The relations Vec remains for now (used for graph building). Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
af3e41f1d9
commit
5877fd857a
5 changed files with 65 additions and 47 deletions
|
|
@ -26,7 +26,7 @@ pub fn consolidation_priority(
|
|||
graph: &Graph,
|
||||
spectral_outlier: Option<f64>,
|
||||
) -> f64 {
|
||||
let node = match store.nodes.get(key) {
|
||||
let node = match store.get_node(key).ok().flatten() {
|
||||
Some(n) => n,
|
||||
None => return 0.0,
|
||||
};
|
||||
|
|
@ -97,8 +97,10 @@ pub fn replay_queue_with_graph(
|
|||
HashMap::new()
|
||||
};
|
||||
|
||||
let mut items: Vec<ReplayItem> = store.nodes.iter()
|
||||
.map(|(key, node)| {
|
||||
let all_keys = store.all_keys().unwrap_or_default();
|
||||
let mut items: Vec<ReplayItem> = all_keys.iter()
|
||||
.filter_map(|key| {
|
||||
let node = store.get_node(key).ok()??;
|
||||
let pos = positions.get(key);
|
||||
let outlier_score = pos.map(|p| p.outlier_score).unwrap_or(0.0);
|
||||
let classification = pos
|
||||
|
|
@ -109,7 +111,7 @@ pub fn replay_queue_with_graph(
|
|||
store, key, graph,
|
||||
pos.map(|p| p.outlier_score),
|
||||
);
|
||||
ReplayItem {
|
||||
Some(ReplayItem {
|
||||
key: key.clone(),
|
||||
priority,
|
||||
interval_days: node.spaced_repetition_interval,
|
||||
|
|
@ -117,7 +119,7 @@ pub fn replay_queue_with_graph(
|
|||
cc: graph.clustering_coefficient(key),
|
||||
classification,
|
||||
outlier_score,
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
|
@ -214,11 +216,13 @@ fn consolidation_plan_inner(store: &Store, _detect_interf: bool) -> Consolidatio
|
|||
let gini = graph.degree_gini();
|
||||
let _avg_cc = graph.avg_clustering_coefficient();
|
||||
|
||||
let episodic_count = store.nodes.iter()
|
||||
.filter(|(_, n)| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
|
||||
let all_keys = store.all_keys().unwrap_or_default();
|
||||
let episodic_count = all_keys.iter()
|
||||
.filter_map(|k| store.get_node(k).ok()?)
|
||||
.filter(|n| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
|
||||
.count();
|
||||
let _episodic_ratio = if store.nodes.is_empty() { 0.0 }
|
||||
else { episodic_count as f32 / store.nodes.len() as f32 };
|
||||
let _episodic_ratio = if all_keys.is_empty() { 0.0 }
|
||||
else { episodic_count as f32 / all_keys.len() as f32 };
|
||||
|
||||
let mut plan = ConsolidationPlan {
|
||||
counts: std::collections::HashMap::new(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue