cleanup: auto-fix clippy warnings in poc-memory
Applied cargo clippy --fix for collapsible_if, manual_char_comparison, and other auto-fixable warnings.
This commit is contained in:
parent
3640de444b
commit
653da40dcd
21 changed files with 99 additions and 149 deletions
|
|
@ -53,13 +53,13 @@ impl Store {
|
|||
let nodes_size = fs::metadata(&nodes_p).map(|m| m.len()).unwrap_or(0);
|
||||
let rels_size = fs::metadata(&rels_p).map(|m| m.len()).unwrap_or(0);
|
||||
|
||||
if let Ok(data) = fs::read(&state_p) {
|
||||
if data.len() >= CACHE_HEADER_LEN && data[..4] == CACHE_MAGIC {
|
||||
if let Ok(data) = fs::read(&state_p)
|
||||
&& data.len() >= CACHE_HEADER_LEN && data[..4] == CACHE_MAGIC {
|
||||
let cached_nodes = u64::from_le_bytes(data[4..12].try_into().unwrap());
|
||||
let cached_rels = u64::from_le_bytes(data[12..20].try_into().unwrap());
|
||||
|
||||
if cached_nodes == nodes_size && cached_rels == rels_size {
|
||||
if let Ok(mut store) = bincode::deserialize::<Store>(&data[CACHE_HEADER_LEN..]) {
|
||||
if cached_nodes == nodes_size && cached_rels == rels_size
|
||||
&& let Ok(mut store) = bincode::deserialize::<Store>(&data[CACHE_HEADER_LEN..]) {
|
||||
// Rebuild uuid_to_key (skipped by serde)
|
||||
for (key, node) in &store.nodes {
|
||||
store.uuid_to_key.insert(node.uuid, key.clone());
|
||||
|
|
@ -67,16 +67,13 @@ impl Store {
|
|||
store.loaded_nodes_size = nodes_size;
|
||||
store.loaded_rels_size = rels_size;
|
||||
// Bootstrap: write rkyv snapshot if missing
|
||||
if !snapshot_path().exists() {
|
||||
if let Err(e) = store.save_snapshot(cached_nodes, cached_rels) {
|
||||
if !snapshot_path().exists()
|
||||
&& let Err(e) = store.save_snapshot(cached_nodes, cached_rels) {
|
||||
eprintln!("rkyv bootstrap: {}", e);
|
||||
}
|
||||
}
|
||||
return Ok(store);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stale or no cache — rebuild from capnp logs
|
||||
let mut store = Store::default();
|
||||
|
|
@ -513,7 +510,7 @@ impl Store {
|
|||
pub fn is_segment_mined(&self, transcript_id: &str, segment_index: u32, agent: &str) -> bool {
|
||||
self.transcript_progress
|
||||
.get(&(transcript_id.to_string(), segment_index))
|
||||
.map_or(false, |agents| agents.contains(agent))
|
||||
.is_some_and(|agents| agents.contains(agent))
|
||||
}
|
||||
|
||||
/// Mark a transcript segment as successfully processed.
|
||||
|
|
@ -529,30 +526,27 @@ impl Store {
|
|||
pub fn migrate_transcript_progress(&mut self) -> Result<usize, String> {
|
||||
let mut segments = Vec::new();
|
||||
|
||||
for (key, _node) in &self.nodes {
|
||||
for key in self.nodes.keys() {
|
||||
// _observed-transcripts-f-{UUID}.{segment}
|
||||
if let Some(rest) = key.strip_prefix("_observed-transcripts-f-") {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.') {
|
||||
if let Ok(seg) = seg_str.parse::<u32>() {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.')
|
||||
&& let Ok(seg) = seg_str.parse::<u32>() {
|
||||
segments.push(new_transcript_segment(uuid, seg, "observation"));
|
||||
}
|
||||
}
|
||||
}
|
||||
// _mined-transcripts#f-{UUID}.{segment}
|
||||
else if let Some(rest) = key.strip_prefix("_mined-transcripts#f-") {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.') {
|
||||
if let Ok(seg) = seg_str.parse::<u32>() {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.')
|
||||
&& let Ok(seg) = seg_str.parse::<u32>() {
|
||||
segments.push(new_transcript_segment(uuid, seg, "experience"));
|
||||
}
|
||||
}
|
||||
}
|
||||
// _mined-transcripts-f-{UUID}.{segment}
|
||||
else if let Some(rest) = key.strip_prefix("_mined-transcripts-f-") {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.') {
|
||||
if let Ok(seg) = seg_str.parse::<u32>() {
|
||||
if let Some((uuid, seg_str)) = rest.rsplit_once('.')
|
||||
&& let Ok(seg) = seg_str.parse::<u32>() {
|
||||
segments.push(new_transcript_segment(uuid, seg, "experience"));
|
||||
}
|
||||
}
|
||||
}
|
||||
// _facts-{UUID} (whole-file, segment 0)
|
||||
else if let Some(uuid) = key.strip_prefix("_facts-") {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue