cleanup: auto-fix clippy warnings in poc-memory

Applied cargo clippy --fix for collapsible_if, manual_char_comparison,
and other auto-fixable warnings.
This commit is contained in:
Kent Overstreet 2026-03-21 19:42:38 -04:00
parent 3640de444b
commit 653da40dcd
21 changed files with 99 additions and 149 deletions

View file

@ -76,7 +76,7 @@ pub fn consolidate_full_with_progress(
match knowledge::run_and_apply(store, agent_type, *count, "consolidate") { match knowledge::run_and_apply(store, agent_type, *count, "consolidate") {
Ok(()) => { Ok(()) => {
let msg = format!(" Done"); let msg = " Done".to_string();
log_line(&mut log_buf, &msg); log_line(&mut log_buf, &msg);
on_progress(&msg); on_progress(&msg);
println!("{}", msg); println!("{}", msg);

View file

@ -111,11 +111,10 @@ pub fn get_def(name: &str) -> Option<AgentDef> {
let dir = agents_dir(); let dir = agents_dir();
for ext in ["agent", "md"] { for ext in ["agent", "md"] {
let path = dir.join(format!("{}.{}", name, ext)); let path = dir.join(format!("{}.{}", name, ext));
if let Ok(content) = std::fs::read_to_string(&path) { if let Ok(content) = std::fs::read_to_string(&path)
if let Some(def) = parse_agent_file(&content) { && let Some(def) = parse_agent_file(&content) {
return Some(def); return Some(def);
} }
}
} }
load_defs().into_iter().find(|d| d.agent == name) load_defs().into_iter().find(|d| d.agent == name)
} }
@ -345,7 +344,7 @@ fn resolve(
for (a, b, s) in &cross_links { for (a, b, s) in &cross_links {
out.push_str(&format!(" {}{} ({:.2})\n", a, b, s)); out.push_str(&format!(" {}{} ({:.2})\n", a, b, s));
} }
out.push_str("\n"); out.push('\n');
} }
} }
} }

View file

@ -213,13 +213,11 @@ pub fn select_conversation_fragments(n: usize) -> Vec<(String, String)> {
if let Ok(files) = fs::read_dir(dir.path()) { if let Ok(files) = fs::read_dir(dir.path()) {
for f in files.filter_map(|e| e.ok()) { for f in files.filter_map(|e| e.ok()) {
let p = f.path(); let p = f.path();
if p.extension().map(|x| x == "jsonl").unwrap_or(false) { if p.extension().map(|x| x == "jsonl").unwrap_or(false)
if let Ok(meta) = p.metadata() { && let Ok(meta) = p.metadata()
if meta.len() > 50_000 { && meta.len() > 50_000 {
jsonl_files.push(p); jsonl_files.push(p);
} }
}
}
} }
} }
} }
@ -307,11 +305,10 @@ pub fn mark_observation_done(fragment_ids: &[String]) {
Err(_) => return, Err(_) => return,
}; };
for id in fragment_ids { for id in fragment_ids {
if let Some((session_id, seg_str)) = id.rsplit_once('.') { if let Some((session_id, seg_str)) = id.rsplit_once('.')
if let Ok(seg) = seg_str.parse::<u32>() { && let Ok(seg) = seg_str.parse::<u32>() {
let _ = store.mark_segment_mined(session_id, seg, "observation"); let _ = store.mark_segment_mined(session_id, seg, "observation");
} }
}
} }
} }

View file

@ -216,16 +216,14 @@ pub(crate) fn parse_json_response(response: &str) -> Result<serde_json::Value, S
let re_obj = Regex::new(r"\{[\s\S]*\}").unwrap(); let re_obj = Regex::new(r"\{[\s\S]*\}").unwrap();
let re_arr = Regex::new(r"\[[\s\S]*\]").unwrap(); let re_arr = Regex::new(r"\[[\s\S]*\]").unwrap();
if let Some(m) = re_obj.find(cleaned) { if let Some(m) = re_obj.find(cleaned)
if let Ok(v) = serde_json::from_str(m.as_str()) { && let Ok(v) = serde_json::from_str(m.as_str()) {
return Ok(v); return Ok(v);
} }
} if let Some(m) = re_arr.find(cleaned)
if let Some(m) = re_arr.find(cleaned) { && let Ok(v) = serde_json::from_str(m.as_str()) {
if let Ok(v) = serde_json::from_str(m.as_str()) {
return Ok(v); return Ok(v);
} }
}
let preview = crate::util::first_n_chars(cleaned, 200); let preview = crate::util::first_n_chars(cleaned, 200);
Err(format!("no valid JSON in response: {preview}...")) Err(format!("no valid JSON in response: {preview}..."))

View file

@ -154,13 +154,12 @@ pub fn cmd_fsck() -> Result<(), String> {
} }
// Version mismatches // Version mismatches
for (key, log_node) in &log_store.nodes { for (key, log_node) in &log_store.nodes {
if let Some(cache_node) = store.nodes.get(key) { if let Some(cache_node) = store.nodes.get(key)
if cache_node.version != log_node.version { && cache_node.version != log_node.version {
eprintln!("CACHE STALE: '{}' cache v{} vs log v{}", eprintln!("CACHE STALE: '{}' cache v{} vs log v{}",
key, cache_node.version, log_node.version); key, cache_node.version, log_node.version);
cache_issues += 1; cache_issues += 1;
} }
}
} }
if cache_issues > 0 { if cache_issues > 0 {
@ -310,7 +309,7 @@ pub fn cmd_dedup(apply: bool) -> Result<(), String> {
// For diverged: keep the copy with most edges (it's the one that got // For diverged: keep the copy with most edges (it's the one that got
// woven into the graph — the version that lived). Fall back to highest version. // woven into the graph — the version that lived). Fall back to highest version.
let all_groups: Vec<_> = identical_groups.into_iter() let all_groups: Vec<_> = identical_groups.into_iter()
.chain(diverged_groups.into_iter()) .chain(diverged_groups)
.collect(); .collect();
let mut merged = 0usize; let mut merged = 0usize;

View file

@ -228,13 +228,12 @@ pub fn cmd_evaluate_agents(matchups: usize, model: &str, dry_run: bool) -> Resul
let mut seen = std::collections::HashSet::new(); let mut seen = std::collections::HashSet::new();
for word in report.split_whitespace() { for word in report.split_whitespace() {
let clean = word.trim_matches(|c: char| !c.is_alphanumeric() && c != '-' && c != '_'); let clean = word.trim_matches(|c: char| !c.is_alphanumeric() && c != '-' && c != '_');
if clean.len() > 10 && seen.insert(clean.to_string()) && store.nodes.contains_key(clean) { if clean.len() > 10 && seen.insert(clean.to_string()) && store.nodes.contains_key(clean)
if let Some(node) = store.nodes.get(clean) { && let Some(node) = store.nodes.get(clean) {
let preview = crate::util::truncate(&node.content, 200, "..."); let preview = crate::util::truncate(&node.content, 200, "...");
target_content.push_str(&format!("\n### {}\n{}\n", clean, preview)); target_content.push_str(&format!("\n### {}\n{}\n", clean, preview));
if target_content.len() > 1500 { break; } if target_content.len() > 1500 { break; }
} }
}
} }
let context = format!( let context = format!(

View file

@ -62,15 +62,12 @@ pub fn find_current_transcript() -> Option<String> {
if let Ok(files) = std::fs::read_dir(dir_entry.path()) { if let Ok(files) = std::fs::read_dir(dir_entry.path()) {
for f in files.filter_map(|e| e.ok()) { for f in files.filter_map(|e| e.ok()) {
let p = f.path(); let p = f.path();
if p.extension().map(|x| x == "jsonl").unwrap_or(false) { if p.extension().map(|x| x == "jsonl").unwrap_or(false)
if let Ok(meta) = p.metadata() { && let Ok(meta) = p.metadata()
if let Ok(mtime) = meta.modified() { && let Ok(mtime) = meta.modified()
if newest.as_ref().is_none_or(|(t, _)| mtime > *t) { && newest.as_ref().is_none_or(|(t, _)| mtime > *t) {
newest = Some((mtime, p)); newest = Some((mtime, p));
} }
}
}
}
} }
} }
} }

View file

@ -56,15 +56,14 @@ pub fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full
for (i, (key, score)) in raw.iter().enumerate().take(max_results) { for (i, (key, score)) in raw.iter().enumerate().take(max_results) {
let weight = store.nodes.get(key).map(|n| n.weight).unwrap_or(0.0); let weight = store.nodes.get(key).map(|n| n.weight).unwrap_or(0.0);
println!("{:2}. [{:.2}/{:.2}] {}", i + 1, score, weight, key); println!("{:2}. [{:.2}/{:.2}] {}", i + 1, score, weight, key);
if full { if full
if let Some(node) = store.nodes.get(key) { && let Some(node) = store.nodes.get(key) {
println!(); println!();
for line in node.content.lines() { for line in node.content.lines() {
println!(" {}", line); println!(" {}", line);
} }
println!(); println!();
} }
}
} }
} else { } else {
// Fast MmapView path — algorithm-only pipeline // Fast MmapView path — algorithm-only pipeline
@ -120,15 +119,14 @@ pub fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full
let marker = if r.is_direct { "" } else { " " }; let marker = if r.is_direct { "" } else { " " };
let weight = view.node_weight(&r.key); let weight = view.node_weight(&r.key);
println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key); println!("{}{:2}. [{:.2}/{:.2}] {}", marker, i + 1, r.activation, weight, r.key);
if full { if full
if let Some(content) = view.node_content(&r.key) { && let Some(content) = view.node_content(&r.key) {
println!(); println!();
for line in content.lines() { for line in content.lines() {
println!(" {}", line); println!(" {}", line);
} }
println!(); println!();
} }
}
} }
} }
@ -219,11 +217,10 @@ fn get_group_content(group: &crate::config::ContextGroup, store: &crate::store::
if n.created_at > 0 { return n.created_at; } if n.created_at > 0 { return n.created_at; }
if let Some(caps) = key_date_re.captures(&n.key) { if let Some(caps) = key_date_re.captures(&n.key) {
use chrono::{NaiveDate, TimeZone, Local}; use chrono::{NaiveDate, TimeZone, Local};
if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d") { if let Ok(d) = NaiveDate::parse_from_str(&caps[1], "%Y-%m-%d")
if let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() { && let Some(dt) = Local.from_local_datetime(&d.and_hms_opt(0, 0, 0).unwrap()).earliest() {
return dt.timestamp(); return dt.timestamp();
} }
}
} }
n.timestamp n.timestamp
}; };

View file

@ -12,7 +12,7 @@ pub mod misc;
/// Exit silently if POC_MEMORY_DRY_RUN=1. /// Exit silently if POC_MEMORY_DRY_RUN=1.
pub fn check_dry_run() { pub fn check_dry_run() {
if std::env::var("POC_MEMORY_DRY_RUN").map_or(false, |v| v == "1" || v == "true") { if std::env::var("POC_MEMORY_DRY_RUN").is_ok_and(|v| v == "1" || v == "true") {
std::process::exit(0); std::process::exit(0);
} }
} }

View file

@ -352,13 +352,12 @@ pub fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
} }
} }
if !full { if !full
if let Some(latest) = versions.last() { && let Some(latest) = versions.last() {
eprintln!("\n--- Latest content (v{}, {}) ---", eprintln!("\n--- Latest content (v{}, {}) ---",
latest.version, latest.provenance); latest.version, latest.provenance);
print!("{}", latest.content); print!("{}", latest.content);
} }
}
Ok(()) Ok(())
} }

View file

@ -17,8 +17,10 @@ static CONFIG: OnceLock<RwLock<Arc<Config>>> = OnceLock::new();
#[derive(Debug, Clone, PartialEq, serde::Deserialize)] #[derive(Debug, Clone, PartialEq, serde::Deserialize)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum ContextSource { pub enum ContextSource {
#[serde(alias = "")] #[serde(alias = "")]
#[default]
Store, Store,
File, File,
Journal, Journal,
@ -33,9 +35,6 @@ pub struct ContextGroup {
pub source: ContextSource, pub source: ContextSource,
} }
impl Default for ContextSource {
fn default() -> Self { Self::Store }
}
#[derive(Debug, Clone, serde::Deserialize)] #[derive(Debug, Clone, serde::Deserialize)]
#[serde(default)] #[serde(default)]
@ -133,8 +132,8 @@ impl Config {
config.llm_concurrency = config.llm_concurrency.max(1); config.llm_concurrency = config.llm_concurrency.max(1);
// Resolve API settings: agent_model → models → backend // Resolve API settings: agent_model → models → backend
if let Some(model_name) = &config.agent_model { if let Some(model_name) = &config.agent_model
if let Some(model_cfg) = root.get("models").and_then(|m| m.get(model_name.as_str())) { && let Some(model_cfg) = root.get("models").and_then(|m| m.get(model_name.as_str())) {
let backend_name = model_cfg.get("backend").and_then(|v| v.as_str()).unwrap_or(""); let backend_name = model_cfg.get("backend").and_then(|v| v.as_str()).unwrap_or("");
let model_id = model_cfg.get("model_id").and_then(|v| v.as_str()).unwrap_or(""); let model_id = model_cfg.get("model_id").and_then(|v| v.as_str()).unwrap_or("");
@ -146,7 +145,6 @@ impl Config {
} }
config.api_model = Some(model_id.to_string()); config.api_model = Some(model_id.to_string());
} }
}
Some(config) Some(config)
} }

View file

@ -75,13 +75,11 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
// Look for structural links first (digest:structural provenance) // Look for structural links first (digest:structural provenance)
for r in &store.relations { for r in &store.relations {
if r.deleted { continue; } if r.deleted { continue; }
if r.source_key == key { if r.source_key == key
if let Some(target) = store.nodes.get(&r.target_key) { && let Some(target) = store.nodes.get(&r.target_key)
if target.node_type == parent_type { && target.node_type == parent_type {
return Some(r.target_key.clone()); return Some(r.target_key.clone());
} }
}
}
} }
// Fallback: match by date for journal→daily // Fallback: match by date for journal→daily
@ -92,8 +90,8 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
dates.push(store::format_date(node.timestamp)); dates.push(store::format_date(node.timestamp));
} }
// Extract date from key patterns like "journal#2026-03-03-..." or "journal#j-2026-03-13t..." // Extract date from key patterns like "journal#2026-03-03-..." or "journal#j-2026-03-13t..."
if let Some(rest) = key.strip_prefix("journal#j-").or_else(|| key.strip_prefix("journal#")) { if let Some(rest) = key.strip_prefix("journal#j-").or_else(|| key.strip_prefix("journal#"))
if rest.len() >= 10 { && rest.len() >= 10 {
let candidate = &rest[..10]; let candidate = &rest[..10];
if candidate.chars().nth(4) == Some('-') { if candidate.chars().nth(4) == Some('-') {
let date = candidate.to_string(); let date = candidate.to_string();
@ -102,7 +100,6 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
} }
} }
} }
}
for date in &dates { for date in &dates {
for prefix in [&format!("daily-{}", date), &format!("digest#daily#{}", date)] { for prefix in [&format!("daily-{}", date), &format!("digest#daily#{}", date)] {
for (k, n) in &store.nodes { for (k, n) in &store.nodes {
@ -133,13 +130,11 @@ pub fn digest_children(store: &Store, key: &str) -> Vec<String> {
let mut children: Vec<(String, i64)> = Vec::new(); let mut children: Vec<(String, i64)> = Vec::new();
for r in &store.relations { for r in &store.relations {
if r.deleted { continue; } if r.deleted { continue; }
if r.target_key == key { if r.target_key == key
if let Some(source) = store.nodes.get(&r.source_key) { && let Some(source) = store.nodes.get(&r.source_key)
if source.node_type == child_type { && source.node_type == child_type {
children.push((r.source_key.clone(), source.timestamp)); children.push((r.source_key.clone(), source.timestamp));
} }
}
}
} }
// Fallback for daily → journal: extract date from key and match // Fallback for daily → journal: extract date from key and match
@ -225,26 +220,23 @@ pub fn show(store: &Store) -> Result<(), String> {
// Temporal context // Temporal context
let (prev, next) = temporal_neighbors(store, &key); let (prev, next) = temporal_neighbors(store, &key);
eprintln!(); eprintln!();
if let Some(ref p) = prev { if let Some(ref p) = prev
if let Some(pn) = store.nodes.get(p) { && let Some(pn) = store.nodes.get(p) {
eprintln!("{}", node_summary(pn)); eprintln!("{}", node_summary(pn));
eprintln!(" `cursor back`"); eprintln!(" `cursor back`");
} }
} if let Some(ref n) = next
if let Some(ref n) = next { && let Some(nn) = store.nodes.get(n) {
if let Some(nn) = store.nodes.get(n) {
eprintln!("{}", node_summary(nn)); eprintln!("{}", node_summary(nn));
eprintln!(" `cursor forward`"); eprintln!(" `cursor forward`");
} }
}
// Hierarchy // Hierarchy
if let Some(ref parent) = digest_parent(store, &key) { if let Some(ref parent) = digest_parent(store, &key)
if let Some(pn) = store.nodes.get(parent) { && let Some(pn) = store.nodes.get(parent) {
eprintln!("{}", node_summary(pn)); eprintln!("{}", node_summary(pn));
eprintln!(" `cursor up`"); eprintln!(" `cursor up`");
} }
}
let children = digest_children(store, &key); let children = digest_children(store, &key);
if !children.is_empty() { if !children.is_empty() {
let count = children.len(); let count = children.len();

View file

@ -572,13 +572,11 @@ fn add_implicit_temporal_edges(
fn date_from_key(key: &str) -> Option<NaiveDate> { fn date_from_key(key: &str) -> Option<NaiveDate> {
// Try extracting YYYY-MM-DD after known prefixes // Try extracting YYYY-MM-DD after known prefixes
for prefix in ["daily-", "journal#j-", "journal#"] { for prefix in ["daily-", "journal#j-", "journal#"] {
if let Some(rest) = key.strip_prefix(prefix) { if let Some(rest) = key.strip_prefix(prefix)
if rest.len() >= 10 { && rest.len() >= 10
if let Ok(d) = NaiveDate::parse_from_str(&rest[..10], "%Y-%m-%d") { && let Ok(d) = NaiveDate::parse_from_str(&rest[..10], "%Y-%m-%d") {
return Some(d); return Some(d);
} }
}
}
} }
None None
} }
@ -650,9 +648,8 @@ fn add_implicit_temporal_edges(
monthlies.sort_by_key(|(_, ym)| *ym); monthlies.sort_by_key(|(_, ym)| *ym);
let add_edge = |adj: &mut HashMap<String, Vec<Edge>>, a: &str, b: &str| { let add_edge = |adj: &mut HashMap<String, Vec<Edge>>, a: &str, b: &str| {
if let Some(edges) = adj.get(a) { if let Some(edges) = adj.get(a)
if edges.iter().any(|e| e.target == b) { return; } && edges.iter().any(|e| e.target == b) { return; }
}
adj.entry(a.to_owned()).or_default().push(Edge { adj.entry(a.to_owned()).or_default().push(Edge {
target: b.to_owned(), target: b.to_owned(),
strength: 1.0, strength: 1.0,

View file

@ -384,11 +384,10 @@ impl Stage {
} }
// Try algorithm parse first (bare words, no colon) // Try algorithm parse first (bare words, no colon)
if !s.contains(':') { if !s.contains(':')
if let Ok(algo) = AlgoStage::parse(s) { && let Ok(algo) = AlgoStage::parse(s) {
return Ok(Stage::Algorithm(algo)); return Ok(Stage::Algorithm(algo));
} }
}
// Algorithm with params: "spread,max_hops=4" (contains comma but no colon) // Algorithm with params: "spread,max_hops=4" (contains comma but no colon)
if s.contains(',') && !s.contains(':') { if s.contains(',') && !s.contains(':') {
@ -748,11 +747,10 @@ pub fn run_transform(
value += 1; value += 1;
} }
for (nbr, _) in graph.neighbors(k) { for (nbr, _) in graph.neighbors(k) {
if input_keys.contains(nbr.as_str()) { if input_keys.contains(nbr.as_str())
if cover_count.get(nbr.as_str()).copied().unwrap_or(0) < REQUIRED_COVERAGE { && cover_count.get(nbr.as_str()).copied().unwrap_or(0) < REQUIRED_COVERAGE {
value += 1; value += 1;
} }
}
} }
(k.clone(), value) (k.clone(), value)
}) })
@ -814,7 +812,7 @@ pub fn match_seeds_opts(
key_map.insert(lkey.clone(), (key.to_owned(), weight as f64)); key_map.insert(lkey.clone(), (key.to_owned(), weight as f64));
// Split key on hyphens, underscores, dots, hashes for component matching // Split key on hyphens, underscores, dots, hashes for component matching
for component in lkey.split(|c: char| c == '-' || c == '_' || c == '.' || c == '#') { for component in lkey.split(['-', '_', '.', '#']) {
if component.len() >= 3 { if component.len() >= 3 {
component_map.entry(component.to_owned()) component_map.entry(component.to_owned())
.or_default() .or_default()
@ -833,8 +831,8 @@ pub fn match_seeds_opts(
} }
// Strategy 2: key component match (0.5× weight) — only when explicitly requested // Strategy 2: key component match (0.5× weight) — only when explicitly requested
if component_match { if component_match
if let Some(matches) = component_map.get(term.as_str()) { && let Some(matches) = component_map.get(term.as_str()) {
for (orig_key, node_weight) in matches { for (orig_key, node_weight) in matches {
let score = term_weight * node_weight * 0.5; let score = term_weight * node_weight * 0.5;
*seed_map.entry(orig_key.clone()).or_insert(0.0) += score; *seed_map.entry(orig_key.clone()).or_insert(0.0) += score;
@ -842,7 +840,6 @@ pub fn match_seeds_opts(
} }
continue; continue;
} }
}
// Strategy 3: content match (0.2× weight) — only when explicitly requested // Strategy 3: content match (0.2× weight) — only when explicitly requested
if content_fallback { if content_fallback {

View file

@ -393,11 +393,10 @@ fn execute_parsed(
for r in &mut results { for r in &mut results {
for f in &needed { for f in &needed {
if !r.fields.contains_key(f) { if !r.fields.contains_key(f)
if let Some(v) = resolve_field(f, &r.key, store, graph) { && let Some(v) = resolve_field(f, &r.key, store, graph) {
r.fields.insert(f.clone(), v); r.fields.insert(f.clone(), v);
} }
}
} }
} }
@ -432,7 +431,7 @@ fn execute_parsed(
.map(|r| (r.key.clone(), graph.degree(&r.key) as f64)) .map(|r| (r.key.clone(), graph.degree(&r.key) as f64))
.collect(); .collect();
let xform = super::engine::Transform::DominatingSet; let xform = super::engine::Transform::DominatingSet;
items = super::engine::run_transform(&xform, items, store, &graph); items = super::engine::run_transform(&xform, items, store, graph);
let keep: std::collections::HashSet<String> = items.into_iter().map(|(k, _)| k).collect(); let keep: std::collections::HashSet<String> = items.into_iter().map(|(k, _)| k).collect();
results.retain(|r| keep.contains(&r.key)); results.retain(|r| keep.contains(&r.key));
} }
@ -611,8 +610,8 @@ fn print_connectivity(results: &[QueryResult], graph: &Graph) {
println!(" {} (degree {})", node, graph.degree(node)); println!(" {} (degree {})", node, graph.degree(node));
} }
// Show a sample path between first two nodes // Show a sample path between first two nodes
if component.len() >= 2 { if component.len() >= 2
if let Some(path) = bfs_path(graph, &component[0], &component[1], max_hops) { && let Some(path) = bfs_path(graph, &component[0], &component[1], max_hops) {
print!(" path: "); print!(" path: ");
for (j, step) in path.iter().enumerate() { for (j, step) in path.iter().enumerate() {
if j > 0 { print!(""); } if j > 0 { print!(""); }
@ -624,17 +623,15 @@ fn print_connectivity(results: &[QueryResult], graph: &Graph) {
} }
println!(); println!();
} }
}
} }
} }
// Suggest link-add commands for islands // Suggest link-add commands for islands
if !islands.is_empty() { if !islands.is_empty()
if let Some(ref hub) = largest_cluster { && let Some(ref hub) = largest_cluster {
println!("\nFix islands:"); println!("\nFix islands:");
for island in &islands { for island in &islands {
println!(" poc-memory graph link-add {} {}", island, hub); println!(" poc-memory graph link-add {} {}", island, hub);
} }
} }
}
} }

View file

@ -77,14 +77,13 @@ pub fn decompose(graph: &Graph, k: usize) -> SpectralResult {
for (i, key) in keys.iter().enumerate() { for (i, key) in keys.iter().enumerate() {
for (neighbor, strength) in graph.neighbors(key) { for (neighbor, strength) in graph.neighbors(key) {
if let Some(&j) = key_to_idx.get(neighbor.as_str()) { if let Some(&j) = key_to_idx.get(neighbor.as_str())
if j > i { // each edge once && j > i { // each edge once
let w = strength as f64; let w = strength as f64;
adj_entries.push((i, j, w)); adj_entries.push((i, j, w));
degree[i] += w; degree[i] += w;
degree[j] += w; degree[j] += w;
} }
}
} }
} }
@ -445,13 +444,12 @@ pub fn analyze_positions(
let mut node_dists: Vec<(String, u32, f64)> = Vec::new(); let mut node_dists: Vec<(String, u32, f64)> = Vec::new();
for (key, coords) in &emb.coords { for (key, coords) in &emb.coords {
if let Some(&comm) = communities.get(key) { if let Some(&comm) = communities.get(key)
if let Some(center) = centers.get(&comm) { && let Some(center) = centers.get(&comm) {
let dist = weighted_distance(coords, center, &weights); let dist = weighted_distance(coords, center, &weights);
by_community.entry(comm).or_default().push(dist); by_community.entry(comm).or_default().push(dist);
node_dists.push((key.clone(), comm, dist)); node_dists.push((key.clone(), comm, dist));
} }
}
} }
// Median distance per community for outlier scoring // Median distance per community for outlier scoring

View file

@ -53,13 +53,13 @@ impl Store {
let nodes_size = fs::metadata(&nodes_p).map(|m| m.len()).unwrap_or(0); let nodes_size = fs::metadata(&nodes_p).map(|m| m.len()).unwrap_or(0);
let rels_size = fs::metadata(&rels_p).map(|m| m.len()).unwrap_or(0); let rels_size = fs::metadata(&rels_p).map(|m| m.len()).unwrap_or(0);
if let Ok(data) = fs::read(&state_p) { if let Ok(data) = fs::read(&state_p)
if data.len() >= CACHE_HEADER_LEN && data[..4] == CACHE_MAGIC { && data.len() >= CACHE_HEADER_LEN && data[..4] == CACHE_MAGIC {
let cached_nodes = u64::from_le_bytes(data[4..12].try_into().unwrap()); let cached_nodes = u64::from_le_bytes(data[4..12].try_into().unwrap());
let cached_rels = u64::from_le_bytes(data[12..20].try_into().unwrap()); let cached_rels = u64::from_le_bytes(data[12..20].try_into().unwrap());
if cached_nodes == nodes_size && cached_rels == rels_size { if cached_nodes == nodes_size && cached_rels == rels_size
if let Ok(mut store) = bincode::deserialize::<Store>(&data[CACHE_HEADER_LEN..]) { && let Ok(mut store) = bincode::deserialize::<Store>(&data[CACHE_HEADER_LEN..]) {
// Rebuild uuid_to_key (skipped by serde) // Rebuild uuid_to_key (skipped by serde)
for (key, node) in &store.nodes { for (key, node) in &store.nodes {
store.uuid_to_key.insert(node.uuid, key.clone()); store.uuid_to_key.insert(node.uuid, key.clone());
@ -67,16 +67,13 @@ impl Store {
store.loaded_nodes_size = nodes_size; store.loaded_nodes_size = nodes_size;
store.loaded_rels_size = rels_size; store.loaded_rels_size = rels_size;
// Bootstrap: write rkyv snapshot if missing // Bootstrap: write rkyv snapshot if missing
if !snapshot_path().exists() { if !snapshot_path().exists()
if let Err(e) = store.save_snapshot(cached_nodes, cached_rels) { && let Err(e) = store.save_snapshot(cached_nodes, cached_rels) {
eprintln!("rkyv bootstrap: {}", e); eprintln!("rkyv bootstrap: {}", e);
} }
}
return Ok(store); return Ok(store);
} }
}
} }
}
// Stale or no cache — rebuild from capnp logs // Stale or no cache — rebuild from capnp logs
let mut store = Store::default(); let mut store = Store::default();
@ -513,7 +510,7 @@ impl Store {
pub fn is_segment_mined(&self, transcript_id: &str, segment_index: u32, agent: &str) -> bool { pub fn is_segment_mined(&self, transcript_id: &str, segment_index: u32, agent: &str) -> bool {
self.transcript_progress self.transcript_progress
.get(&(transcript_id.to_string(), segment_index)) .get(&(transcript_id.to_string(), segment_index))
.map_or(false, |agents| agents.contains(agent)) .is_some_and(|agents| agents.contains(agent))
} }
/// Mark a transcript segment as successfully processed. /// Mark a transcript segment as successfully processed.
@ -529,30 +526,27 @@ impl Store {
pub fn migrate_transcript_progress(&mut self) -> Result<usize, String> { pub fn migrate_transcript_progress(&mut self) -> Result<usize, String> {
let mut segments = Vec::new(); let mut segments = Vec::new();
for (key, _node) in &self.nodes { for key in self.nodes.keys() {
// _observed-transcripts-f-{UUID}.{segment} // _observed-transcripts-f-{UUID}.{segment}
if let Some(rest) = key.strip_prefix("_observed-transcripts-f-") { if let Some(rest) = key.strip_prefix("_observed-transcripts-f-") {
if let Some((uuid, seg_str)) = rest.rsplit_once('.') { if let Some((uuid, seg_str)) = rest.rsplit_once('.')
if let Ok(seg) = seg_str.parse::<u32>() { && let Ok(seg) = seg_str.parse::<u32>() {
segments.push(new_transcript_segment(uuid, seg, "observation")); segments.push(new_transcript_segment(uuid, seg, "observation"));
} }
}
} }
// _mined-transcripts#f-{UUID}.{segment} // _mined-transcripts#f-{UUID}.{segment}
else if let Some(rest) = key.strip_prefix("_mined-transcripts#f-") { else if let Some(rest) = key.strip_prefix("_mined-transcripts#f-") {
if let Some((uuid, seg_str)) = rest.rsplit_once('.') { if let Some((uuid, seg_str)) = rest.rsplit_once('.')
if let Ok(seg) = seg_str.parse::<u32>() { && let Ok(seg) = seg_str.parse::<u32>() {
segments.push(new_transcript_segment(uuid, seg, "experience")); segments.push(new_transcript_segment(uuid, seg, "experience"));
} }
}
} }
// _mined-transcripts-f-{UUID}.{segment} // _mined-transcripts-f-{UUID}.{segment}
else if let Some(rest) = key.strip_prefix("_mined-transcripts-f-") { else if let Some(rest) = key.strip_prefix("_mined-transcripts-f-") {
if let Some((uuid, seg_str)) = rest.rsplit_once('.') { if let Some((uuid, seg_str)) = rest.rsplit_once('.')
if let Ok(seg) = seg_str.parse::<u32>() { && let Ok(seg) = seg_str.parse::<u32>() {
segments.push(new_transcript_segment(uuid, seg, "experience")); segments.push(new_transcript_segment(uuid, seg, "experience"));
} }
}
} }
// _facts-{UUID} (whole-file, segment 0) // _facts-{UUID} (whole-file, segment 0)
else if let Some(uuid) = key.strip_prefix("_facts-") { else if let Some(uuid) = key.strip_prefix("_facts-") {

View file

@ -352,11 +352,10 @@ impl Node {
/// is empty (old record), fall back to the deprecated provenanceOld enum. /// is empty (old record), fall back to the deprecated provenanceOld enum.
pub fn from_capnp_migrate(r: memory_capnp::content_node::Reader<'_>) -> Result<Self, String> { pub fn from_capnp_migrate(r: memory_capnp::content_node::Reader<'_>) -> Result<Self, String> {
let mut node = Self::from_capnp(r)?; let mut node = Self::from_capnp(r)?;
if node.provenance.is_empty() { if node.provenance.is_empty()
if let Ok(old) = r.get_provenance_old() { && let Ok(old) = r.get_provenance_old() {
node.provenance = Provenance::from_capnp(old).label().to_string(); node.provenance = Provenance::from_capnp(old).label().to_string();
} }
}
// Sanitize timestamps: old capnp records have raw offsets instead // Sanitize timestamps: old capnp records have raw offsets instead
// of unix epoch. Anything past year 2100 (~4102444800) is bogus. // of unix epoch. Anything past year 2100 (~4102444800) is bogus.
const MAX_SANE_EPOCH: i64 = 4_102_444_800; const MAX_SANE_EPOCH: i64 = 4_102_444_800;
@ -383,11 +382,10 @@ capnp_message!(Relation,
impl Relation { impl Relation {
pub fn from_capnp_migrate(r: memory_capnp::relation::Reader<'_>) -> Result<Self, String> { pub fn from_capnp_migrate(r: memory_capnp::relation::Reader<'_>) -> Result<Self, String> {
let mut rel = Self::from_capnp(r)?; let mut rel = Self::from_capnp(r)?;
if rel.provenance.is_empty() { if rel.provenance.is_empty()
if let Ok(old) = r.get_provenance_old() { && let Ok(old) = r.get_provenance_old() {
rel.provenance = Provenance::from_capnp(old).label().to_string(); rel.provenance = Provenance::from_capnp(old).label().to_string();
} }
}
Ok(rel) Ok(rel)
} }
} }
@ -503,11 +501,10 @@ pub(crate) fn read_text(result: capnp::Result<capnp::text::Reader>) -> String {
/// Read a capnp data field as [u8; 16], zero-padded /// Read a capnp data field as [u8; 16], zero-padded
pub(crate) fn read_uuid(result: capnp::Result<&[u8]>) -> [u8; 16] { pub(crate) fn read_uuid(result: capnp::Result<&[u8]>) -> [u8; 16] {
let mut out = [0u8; 16]; let mut out = [0u8; 16];
if let Ok(data) = result { if let Ok(data) = result
if data.len() >= 16 { && data.len() >= 16 {
out.copy_from_slice(&data[..16]); out.copy_from_slice(&data[..16]);
} }
}
out out
} }

View file

@ -107,12 +107,10 @@ pub fn find_last_compaction(data: &[u8]) -> Option<usize> {
if let Some(content) = obj.get("message") if let Some(content) = obj.get("message")
.and_then(|m| m.get("content")) .and_then(|m| m.get("content"))
.and_then(|c| c.as_str()) .and_then(|c| c.as_str())
{ && content.starts_with("This session is being continued") {
if content.starts_with("This session is being continued") {
let offset = obj_bytes.as_ptr() as usize - data.as_ptr() as usize; let offset = obj_bytes.as_ptr() as usize - data.as_ptr() as usize;
return Some(offset); return Some(offset);
} }
}
} }
None None

View file

@ -391,7 +391,7 @@ fn render_overview(frame: &mut Frame, app: &App, area: Rect) {
let [health_area, tasks_area] = let [health_area, tasks_area] =
Layout::vertical([Constraint::Length(12), Constraint::Min(0)]).areas(area); Layout::vertical([Constraint::Length(12), Constraint::Min(0)]).areas(area);
if let Some(ref gh) = app.status.as_ref().and_then(|s| s.graph_health.as_ref()) { if let Some(gh) = app.status.as_ref().and_then(|s| s.graph_health.as_ref()) {
render_health(frame, gh, health_area); render_health(frame, gh, health_area);
} else { } else {
let p = Paragraph::new(" No graph health data available") let p = Paragraph::new(" No graph health data available")
@ -689,16 +689,14 @@ fn render_agent_tab(frame: &mut Frame, app: &App, agent_type: &str, area: Rect)
} }
// Error // Error
if matches!(t.status, TaskStatus::Failed) { if matches!(t.status, TaskStatus::Failed)
if let Some(ref r) = t.result { && let Some(ref r) = t.result
if let Some(ref err) = r.error { && let Some(ref err) = r.error {
lines.push(Line::from(vec![ lines.push(Line::from(vec![
Span::styled(" error: ", Style::default().fg(Color::Red)), Span::styled(" error: ", Style::default().fg(Color::Red)),
Span::styled(err.as_str(), Style::default().fg(Color::Red)), Span::styled(err.as_str(), Style::default().fg(Color::Red)),
])); ]));
} }
}
}
lines.push(Line::raw("")); lines.push(Line::raw(""));
} }

View file

@ -63,11 +63,10 @@ pub fn parse_timestamp_to_epoch(ts: &str) -> Option<i64> {
use chrono::{Local, NaiveDateTime, TimeZone}; use chrono::{Local, NaiveDateTime, TimeZone};
let formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M"]; let formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M"];
for fmt in &formats { for fmt in &formats {
if let Ok(ndt) = NaiveDateTime::parse_from_str(ts, fmt) { if let Ok(ndt) = NaiveDateTime::parse_from_str(ts, fmt)
if let Some(dt) = Local.from_local_datetime(&ndt).earliest() { && let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
return Some(dt.timestamp()); return Some(dt.timestamp());
} }
}
} }
None None
} }