digest: split into focused modules, externalize prompts
digest.rs was 2328 lines containing 6 distinct subsystems. Split into:
- llm.rs: shared LLM utilities (call_sonnet, parse_json_response, semantic_keys)
- audit.rs: link quality audit with parallel Sonnet batching
- enrich.rs: journal enrichment + experience mining
- consolidate.rs: consolidation pipeline + apply
Externalized all inline prompts to prompts/*.md templates using
neuro::load_prompt with {{PLACEHOLDER}} syntax:
- daily-digest.md, weekly-digest.md, monthly-digest.md
- experience.md, journal-enrich.md, consolidation.md
digest.rs retains temporal digest generation (daily/weekly/monthly/auto)
and date helpers. ~940 lines, down from 2328.
Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
parent
3f644609e1
commit
50da0b7b26
13 changed files with 1642 additions and 1582 deletions
87
src/llm.rs
Normal file
87
src/llm.rs
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
// LLM utilities: Sonnet invocation and response parsing
|
||||
//
|
||||
// Shared by digest, audit, enrich, and consolidate modules.
|
||||
|
||||
use crate::store::Store;
|
||||
|
||||
use regex::Regex;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
|
||||
/// Call Sonnet via claude CLI. Returns the response text.
|
||||
pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||||
// Write prompt to temp file (claude CLI needs file input for large prompts)
|
||||
// Use thread ID + PID to avoid collisions under parallel rayon calls
|
||||
let tmp = std::env::temp_dir().join(format!("poc-llm-{}-{:?}.txt",
|
||||
std::process::id(), std::thread::current().id()));
|
||||
fs::write(&tmp, prompt)
|
||||
.map_err(|e| format!("write temp prompt: {}", e))?;
|
||||
|
||||
let result = Command::new("claude")
|
||||
.args(["-p", "--model", "sonnet", "--tools", ""])
|
||||
.stdin(fs::File::open(&tmp).map_err(|e| format!("open temp: {}", e))?)
|
||||
.env_remove("CLAUDECODE")
|
||||
.output();
|
||||
|
||||
fs::remove_file(&tmp).ok();
|
||||
|
||||
match result {
|
||||
Ok(output) => {
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
Err(format!("claude exited {}: {}", output.status, stderr.trim()))
|
||||
}
|
||||
}
|
||||
Err(e) => Err(format!("spawn claude: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a JSON response from Sonnet, handling markdown fences.
|
||||
pub(crate) fn parse_json_response(response: &str) -> Result<serde_json::Value, String> {
|
||||
let cleaned = response.trim();
|
||||
let cleaned = cleaned.strip_prefix("```json").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.strip_prefix("```").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.strip_suffix("```").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.trim();
|
||||
|
||||
if let Ok(v) = serde_json::from_str(cleaned) {
|
||||
return Ok(v);
|
||||
}
|
||||
|
||||
// Try to find JSON object or array
|
||||
let re_obj = Regex::new(r"\{[\s\S]*\}").unwrap();
|
||||
let re_arr = Regex::new(r"\[[\s\S]*\]").unwrap();
|
||||
|
||||
if let Some(m) = re_obj.find(cleaned) {
|
||||
if let Ok(v) = serde_json::from_str(m.as_str()) {
|
||||
return Ok(v);
|
||||
}
|
||||
}
|
||||
if let Some(m) = re_arr.find(cleaned) {
|
||||
if let Ok(v) = serde_json::from_str(m.as_str()) {
|
||||
return Ok(v);
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!("no valid JSON in response: {}...", &cleaned[..cleaned.len().min(200)]))
|
||||
}
|
||||
|
||||
/// Get semantic keys (non-journal, non-system) for prompt context.
|
||||
pub(crate) fn semantic_keys(store: &Store) -> Vec<String> {
|
||||
let mut keys: Vec<String> = store.nodes.keys()
|
||||
.filter(|k| {
|
||||
!k.starts_with("journal.md#")
|
||||
&& *k != "journal.md"
|
||||
&& *k != "MEMORY.md"
|
||||
&& *k != "where-am-i.md"
|
||||
&& *k != "work-queue.md"
|
||||
&& *k != "work-state"
|
||||
})
|
||||
.cloned()
|
||||
.collect();
|
||||
keys.sort();
|
||||
keys.truncate(200);
|
||||
keys
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue