migrate agent output to capnp store, add provenance tracking
All agent output now goes to the store as nodes instead of markdown/JSON files. Each node carries a Provenance enum identifying which agent created it (AgentDigest, AgentConsolidate, AgentFactMine, AgentKnowledgeObservation, etc — 14 variants total). Store changes: - upsert_provenance() method for agent-created nodes - Provenance enum expanded from 5 to 14 variants Agent changes: - digest: writes to store nodes (daily-YYYY-MM-DD.md etc) - consolidate: reports/actions/logs stored as _consolidation-* nodes - knowledge: depth DB and agent output stored as _knowledge-* nodes - enrich: experience-mine results go directly to store - llm: --no-session-persistence prevents transcript accumulation Deleted: 14 Python/shell scripts replaced by Rust implementations.
This commit is contained in:
parent
e37f819dd2
commit
552d255dc3
23 changed files with 1381 additions and 4095 deletions
22
src/llm.rs
22
src/llm.rs
|
|
@ -1,6 +1,6 @@
|
|||
// LLM utilities: Sonnet invocation and response parsing
|
||||
// LLM utilities: model invocation and response parsing
|
||||
//
|
||||
// Shared by digest, audit, enrich, and consolidate modules.
|
||||
// Shared by digest, audit, enrich, consolidate, knowledge, and fact_mine.
|
||||
|
||||
use crate::store::Store;
|
||||
|
||||
|
|
@ -8,8 +8,8 @@ use regex::Regex;
|
|||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
/// Call Sonnet via claude CLI. Returns the response text.
|
||||
pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||||
/// Call a model via claude CLI. Returns the response text.
|
||||
fn call_model(model: &str, prompt: &str) -> Result<String, String> {
|
||||
// Write prompt to temp file (claude CLI needs file input for large prompts)
|
||||
// Use thread ID + PID to avoid collisions under parallel rayon calls
|
||||
let tmp = std::env::temp_dir().join(format!("poc-llm-{}-{:?}.txt",
|
||||
|
|
@ -18,7 +18,7 @@ pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, St
|
|||
.map_err(|e| format!("write temp prompt: {}", e))?;
|
||||
|
||||
let result = Command::new("claude")
|
||||
.args(["-p", "--model", "sonnet", "--tools", ""])
|
||||
.args(["-p", "--model", model, "--tools", "", "--no-session-persistence"])
|
||||
.stdin(fs::File::open(&tmp).map_err(|e| format!("open temp: {}", e))?)
|
||||
.env_remove("CLAUDECODE")
|
||||
.output();
|
||||
|
|
@ -38,7 +38,17 @@ pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, St
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse a JSON response from Sonnet, handling markdown fences.
|
||||
/// Call Sonnet via claude CLI.
|
||||
pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||||
call_model("sonnet", prompt)
|
||||
}
|
||||
|
||||
/// Call Haiku via claude CLI (cheaper, faster — good for high-volume extraction).
|
||||
pub(crate) fn call_haiku(prompt: &str) -> Result<String, String> {
|
||||
call_model("haiku", prompt)
|
||||
}
|
||||
|
||||
/// Parse a JSON response, handling markdown fences.
|
||||
pub(crate) fn parse_json_response(response: &str) -> Result<serde_json::Value, String> {
|
||||
let cleaned = response.trim();
|
||||
let cleaned = cleaned.strip_prefix("```json").unwrap_or(cleaned);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue