fsck: add cache vs log consistency check
Load store from both cache (rkyv/bincode) and raw capnp logs, then diff: missing nodes, phantom nodes, version mismatches. Auto-rebuilds cache if inconsistencies found. This would have caught the mysterious the-plan deletion — likely caused by a stale/corrupt snapshot that silently dropped the node while the capnp log still had it. Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
57c26d8157
commit
5a0a3d038b
2 changed files with 57 additions and 2 deletions
|
|
@ -844,6 +844,41 @@ fn install_default_file(data_dir: &std::path::Path, name: &str, content: &str) -
|
|||
fn cmd_fsck() -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
// Check cache vs log consistency
|
||||
let log_store = store::Store::load_from_logs()?;
|
||||
let mut cache_issues = 0;
|
||||
|
||||
// Nodes in logs but missing from cache
|
||||
for key in log_store.nodes.keys() {
|
||||
if !store.nodes.contains_key(key) {
|
||||
eprintln!("CACHE MISSING: '{}' exists in capnp log but not in cache", key);
|
||||
cache_issues += 1;
|
||||
}
|
||||
}
|
||||
// Nodes in cache but not in logs (phantom nodes)
|
||||
for key in store.nodes.keys() {
|
||||
if !log_store.nodes.contains_key(key) {
|
||||
eprintln!("CACHE PHANTOM: '{}' exists in cache but not in capnp log", key);
|
||||
cache_issues += 1;
|
||||
}
|
||||
}
|
||||
// Version mismatches
|
||||
for (key, log_node) in &log_store.nodes {
|
||||
if let Some(cache_node) = store.nodes.get(key) {
|
||||
if cache_node.version != log_node.version {
|
||||
eprintln!("CACHE STALE: '{}' cache v{} vs log v{}",
|
||||
key, cache_node.version, log_node.version);
|
||||
cache_issues += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cache_issues > 0 {
|
||||
eprintln!("{} cache inconsistencies found — rebuilding from logs", cache_issues);
|
||||
store = log_store;
|
||||
store.save().map_err(|e| format!("rebuild save: {}", e))?;
|
||||
}
|
||||
|
||||
// Check node-key consistency
|
||||
let mut issues = 0;
|
||||
for (key, node) in &store.nodes {
|
||||
|
|
@ -893,8 +928,8 @@ fn cmd_fsck() -> Result<(), String> {
|
|||
}
|
||||
|
||||
let g = store.build_graph();
|
||||
println!("fsck: {} nodes, {} edges, {} issues, {} dangling",
|
||||
store.nodes.len(), g.edge_count(), issues, dangling);
|
||||
println!("fsck: {} nodes, {} edges, {} issues, {} dangling, {} cache",
|
||||
store.nodes.len(), g.edge_count(), issues, dangling, cache_issues);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -102,6 +102,26 @@ impl Store {
|
|||
Ok(store)
|
||||
}
|
||||
|
||||
/// Load store directly from capnp logs, bypassing all caches.
|
||||
/// Used by fsck to verify cache consistency.
|
||||
pub fn load_from_logs() -> Result<Store, String> {
|
||||
let nodes_p = nodes_path();
|
||||
let rels_p = relations_path();
|
||||
|
||||
let mut store = Store::default();
|
||||
if nodes_p.exists() {
|
||||
store.replay_nodes(&nodes_p)?;
|
||||
}
|
||||
if rels_p.exists() {
|
||||
store.replay_relations(&rels_p)?;
|
||||
}
|
||||
let visits_p = visits_path();
|
||||
if visits_p.exists() {
|
||||
store.replay_visits(&visits_p)?;
|
||||
}
|
||||
Ok(store)
|
||||
}
|
||||
|
||||
/// Replay node log, keeping latest version per UUID.
|
||||
/// Tracks all UUIDs seen per key to detect duplicates.
|
||||
fn replay_nodes(&mut self, path: &Path) -> Result<(), String> {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue