TurnResult: remove text field, simplify oneshot loop

- Remove TurnResult.text (was dead code - Agent::turn handles text internally)
- Simplify run_with_backend to just iterate over steps (Agent::turn loops
  for tool calls and handles empty responses internally)
- Change run/run_shared/run_forked_shared to return Result<(), String>
- Remove AgentResult.output field (no callers used it)
- Stub out legacy text-parsing code (audit, compare) that needs redesign
- Update digest.rs to not depend on text return
- Add level parameter to journal_new/journal_update for digest support

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-12 02:04:50 -04:00
parent ef80398466
commit f00532bdb7
11 changed files with 82 additions and 422 deletions

View file

@ -2,11 +2,12 @@
//
// Each batch of links gets reviewed by Sonnet, which returns per-link actions:
// KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon.
//
// TODO: Redesign to use tool-based agent instead of text parsing.
use crate::store::{self, Store, new_relation};
use std::collections::HashSet;
use crate::store::Store;
#[allow(dead_code)]
struct LinkInfo {
rel_idx: usize,
source_key: String,
@ -26,6 +27,7 @@ pub struct AuditStats {
pub errors: usize,
}
#[allow(dead_code)]
fn build_audit_prompt(batch: &[LinkInfo], batch_num: usize, total_batches: usize) -> String {
let mut prompt = format!(
"You are auditing memory graph links for quality (batch {}/{}).\n\n\
@ -63,6 +65,7 @@ fn build_audit_prompt(batch: &[LinkInfo], batch_num: usize, total_batches: usize
prompt
}
#[allow(dead_code)]
fn parse_audit_response(response: &str, batch_size: usize) -> Vec<(usize, AuditAction)> {
let mut actions = Vec::new();
@ -109,6 +112,7 @@ fn parse_audit_response(response: &str, batch_size: usize) -> Vec<(usize, AuditA
actions
}
#[allow(dead_code)]
enum AuditAction {
Keep,
Delete,
@ -118,7 +122,11 @@ enum AuditAction {
}
/// Run a full link audit: walk every link, batch to Sonnet, apply results.
pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String> {
pub fn link_audit(_store: &mut Store, _apply: bool) -> Result<AuditStats, String> {
// TODO: Reimplement to use tool-based agent instead of text parsing
Err("link_audit disabled: needs redesign to use tool-based agent".to_string())
/*
// Collect all non-deleted relations with their info
let mut links: Vec<LinkInfo> = Vec::new();
@ -330,4 +338,5 @@ pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String>
}
Ok(stats)
*/
}

View file

@ -284,14 +284,13 @@ fn generate_digest(
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
let digest = crate::agent::oneshot::call_api_with_tools_sync(
// Agent writes digest via memory_write tool - we just run it
crate::agent::oneshot::call_api_with_tools_sync(
&def.agent, &prompts, &phases, def.temperature, def.priority,
&tools, None)?;
let key = digest_node_key(level.name, label);
store.upsert_provenance(&key, &digest, "digest:write")?;
// Structural links: connect all source entries to this digest
let key = digest_node_key(level.name, label);
let mut linked = 0;
for source_key in source_keys {
// Skip if link already exists
@ -299,10 +298,17 @@ fn generate_digest(
!r.deleted && r.source_key == *source_key && r.target_key == key);
if exists { continue; }
// Reload store to pick up agent's writes
*store = Store::load().map_err(|e| format!("reload: {}", e))?;
let source_uuid = store.nodes.get(source_key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
let target_uuid = store.nodes.get(&key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
if target_uuid == [0u8; 16] {
println!(" Warning: digest key {} not found after agent run", key);
continue;
}
let mut rel = new_relation(
source_uuid, target_uuid,
store::RelationType::Link, 0.8,
@ -314,12 +320,10 @@ fn generate_digest(
}
if linked > 0 {
println!(" Linked {} source entries → {}", linked, key);
store.save()?;
}
store.save()?;
println!(" Stored: {}", key);
println!(" Done: {} lines", digest.lines().count());
println!(" Done");
Ok(())
}