consolidate: eliminate second LLM call, apply actions inline

The consolidation pipeline previously made a second Sonnet call to
extract structured JSON actions from agent reports. This was both
wasteful (extra LLM call per consolidation) and lossy (only extracted
links and manual items, ignoring WRITE_NODE/REFINE).

Now actions are parsed and applied inline after each agent runs, using
the same parse_all_actions() parser as the knowledge loop. The daemon
scheduler's separate apply phase is also removed.

Also deletes 8 superseded/orphaned prompt .md files (784 lines) that
have been replaced by .agent files.
This commit is contained in:
ProofOfConcept 2026-03-10 17:22:53 -04:00
parent 42d8e265da
commit f6ea659975
11 changed files with 119 additions and 1024 deletions

View file

@ -149,6 +149,15 @@ fn job_consolidation_agent(
store.upsert_provenance(&report_key, &response,
crate::store::Provenance::AgentConsolidate).ok();
// Parse and apply actions inline
let actions = super::knowledge::parse_all_actions(&response);
let mut applied = 0;
for action in &actions {
if super::knowledge::apply_action(&mut store, action, &agent, &ts, 0) {
applied += 1;
}
}
// Record visits for successfully processed nodes
if !agent_batch.node_keys.is_empty() {
if let Err(e) = store.record_agent_visits(&agent_batch.node_keys, &agent) {
@ -156,7 +165,8 @@ fn job_consolidation_agent(
}
}
ctx.log_line(&format!("done: {} lines → {}", response.lines().count(), report_key));
ctx.log_line(&format!("done: {} actions ({} applied) → {}",
actions.len(), applied, report_key));
Ok(())
})
}
@ -455,16 +465,6 @@ fn job_split_one(
})
}
/// Apply consolidation actions from recent reports.
fn job_consolidation_apply(ctx: &ExecutionContext) -> Result<(), TaskError> {
run_job(ctx, "c-apply", || {
ctx.log_line("loading store");
let mut store = crate::store::Store::load()?;
ctx.log_line("applying consolidation actions");
super::consolidate::apply_consolidation(&mut store, true, None)
})
}
/// Link orphan nodes (CPU-heavy, no LLM).
fn job_link_orphans(ctx: &ExecutionContext) -> Result<(), TaskError> {
run_job(ctx, "c-orphans", || {
@ -1174,31 +1174,23 @@ pub fn run_daemon() -> Result<(), String> {
prev_agent = Some(builder.run());
}
// Phase 2: Apply actions from agent reports
let mut apply = choir_sched.spawn(format!("c-apply:{}", today))
.resource(&llm_sched)
.retries(1)
.init(move |ctx| job_consolidation_apply(ctx));
if let Some(ref dep) = prev_agent {
apply.depend_on(dep);
}
let apply = apply.run();
// Phase 3: Link orphans (CPU-only, no LLM)
// Phase 2: Link orphans (CPU-only, no LLM)
let mut orphans = choir_sched.spawn(format!("c-orphans:{}", today))
.retries(1)
.init(move |ctx| job_link_orphans(ctx));
orphans.depend_on(&apply);
if let Some(ref dep) = prev_agent {
orphans.depend_on(dep);
}
let orphans = orphans.run();
// Phase 4: Cap degree
// Phase 3: Cap degree
let mut cap = choir_sched.spawn(format!("c-cap:{}", today))
.retries(1)
.init(move |ctx| job_cap_degree(ctx));
cap.depend_on(&orphans);
let cap = cap.run();
// Phase 5: Generate digests
// Phase 4: Generate digests
let mut digest = choir_sched.spawn(format!("c-digest:{}", today))
.resource(&llm_sched)
.retries(1)
@ -1206,7 +1198,7 @@ pub fn run_daemon() -> Result<(), String> {
digest.depend_on(&cap);
let digest = digest.run();
// Phase 6: Apply digest links
// Phase 5: Apply digest links
let mut digest_links = choir_sched.spawn(format!("c-digest-links:{}", today))
.retries(1)
.init(move |ctx| job_digest_links(ctx));