fsck: detect duplicate keys (different UUIDs, same key)
replay_nodes now tracks all UUIDs per key using a temporary multimap. Warns on duplicates so they can be manually resolved. Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
parent
e2f3a5a364
commit
b00e09b091
1 changed files with 21 additions and 2 deletions
|
|
@ -91,12 +91,16 @@ impl Store {
|
||||||
Ok(store)
|
Ok(store)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replay node log, keeping latest version per UUID
|
/// Replay node log, keeping latest version per UUID.
|
||||||
|
/// Tracks all UUIDs seen per key to detect duplicates.
|
||||||
fn replay_nodes(&mut self, path: &Path) -> Result<(), String> {
|
fn replay_nodes(&mut self, path: &Path) -> Result<(), String> {
|
||||||
let file = fs::File::open(path)
|
let file = fs::File::open(path)
|
||||||
.map_err(|e| format!("open {}: {}", path.display(), e))?;
|
.map_err(|e| format!("open {}: {}", path.display(), e))?;
|
||||||
let mut reader = BufReader::new(file);
|
let mut reader = BufReader::new(file);
|
||||||
|
|
||||||
|
// Track all non-deleted UUIDs per key to detect duplicates
|
||||||
|
let mut key_uuids: HashMap<String, Vec<[u8; 16]>> = HashMap::new();
|
||||||
|
|
||||||
while let Ok(msg) = serialize::read_message(&mut reader, message::ReaderOptions::new()) {
|
while let Ok(msg) = serialize::read_message(&mut reader, message::ReaderOptions::new()) {
|
||||||
let log = msg.get_root::<memory_capnp::node_log::Reader>()
|
let log = msg.get_root::<memory_capnp::node_log::Reader>()
|
||||||
.map_err(|e| format!("read node log: {}", e))?;
|
.map_err(|e| format!("read node log: {}", e))?;
|
||||||
|
|
@ -110,13 +114,28 @@ impl Store {
|
||||||
if node.deleted {
|
if node.deleted {
|
||||||
self.nodes.remove(&node.key);
|
self.nodes.remove(&node.key);
|
||||||
self.uuid_to_key.remove(&node.uuid);
|
self.uuid_to_key.remove(&node.uuid);
|
||||||
|
if let Some(uuids) = key_uuids.get_mut(&node.key) {
|
||||||
|
uuids.retain(|u| *u != node.uuid);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
self.uuid_to_key.insert(node.uuid, node.key.clone());
|
self.uuid_to_key.insert(node.uuid, node.key.clone());
|
||||||
self.nodes.insert(node.key.clone(), node);
|
self.nodes.insert(node.key.clone(), node.clone());
|
||||||
|
let uuids = key_uuids.entry(node.key).or_default();
|
||||||
|
if !uuids.contains(&node.uuid) {
|
||||||
|
uuids.push(node.uuid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report duplicate keys
|
||||||
|
for (key, uuids) in &key_uuids {
|
||||||
|
if uuids.len() > 1 {
|
||||||
|
eprintln!("WARNING: key '{}' has {} UUIDs (duplicate nodes)", key, uuids.len());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue