digest: modernize generate_digest() to use agent infrastructure

- Load template from digest.agent def (drop prompts_dir fallback)
- Resolve standard {{node:...}} placeholders — digest agent now gets
  core-personality, memory-instructions, subconscious notes
- Call through call_for_def_multi() with agent def's temperature,
  priority, and tools instead of call_simple()
- Move tool filtering from api.rs into callers (call_for_def_multi,
  run_one_agent_inner) — api takes pre-filtered &[Tool] slice

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-04 17:11:21 -04:00 committed by Kent Overstreet
parent 375a8d9738
commit 1457a1b50d
6 changed files with 182 additions and 109 deletions

View file

@ -36,7 +36,7 @@ pub async fn call_api_with_tools(
phases: &[String],
temperature: Option<f32>,
priority: i32,
tools: &[String],
tools: &[agent_tools::Tool],
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
log: &dyn Fn(&str),
) -> Result<String, String> {
@ -45,15 +45,7 @@ pub async fn call_api_with_tools(
// Set up a UI channel — we drain reasoning tokens into the log
let (ui_tx, mut ui_rx) = crate::user::ui_channel::channel();
// All available native tools for subconscious agents
let all_tools = agent_tools::memory_and_journal_tools();
let agent_tool_list: Vec<_> = if tools.is_empty() {
all_tools
} else {
all_tools.into_iter()
.filter(|t| tools.iter().any(|w| *w == t.name))
.collect()
};
// Tools are already filtered by the caller
// Provenance tracks which agent:phase is making writes.
// Updated between steps by the bail function via set_provenance().
let first_phase = phases.first().map(|s| s.as_str()).unwrap_or("");
@ -82,7 +74,7 @@ pub async fn call_api_with_tools(
};
match client.chat_completion_stream_temp(
&messages,
&agent_tool_list,
tools,
&ui_tx,
&reasoning,
sampling,
@ -235,7 +227,7 @@ pub fn call_api_with_tools_sync(
phases: &[String],
temperature: Option<f32>,
priority: i32,
tools: &[String],
tools: &[agent_tools::Tool],
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
log: &(dyn Fn(&str) + Sync),
) -> Result<String, String> {

View file

@ -5,7 +5,6 @@
// summarize weeklies. All three share the same generate/auto-detect
// pipeline, parameterized by DigestLevel.
use super::llm;
use crate::store::{self, Store, new_relation};
use crate::neuro;
@ -13,6 +12,14 @@ use chrono::{Datelike, Duration, Local, NaiveDate};
use regex::Regex;
use std::collections::BTreeSet;
/// Get all store keys for prompt context.
fn semantic_keys(store: &Store) -> Vec<String> {
let mut keys: Vec<String> = store.nodes.keys().cloned().collect();
keys.sort();
keys.truncate(200);
keys
}
// --- Digest level descriptors ---
#[allow(clippy::type_complexity)]
@ -211,7 +218,7 @@ fn generate_digest(
}
println!(" {} inputs", inputs.len());
let keys = llm::semantic_keys(store);
let keys = semantic_keys(store);
let keys_text = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
@ -223,17 +230,20 @@ fn generate_digest(
.collect::<Vec<_>>()
.join(", ");
// Load prompt from agent file; fall back to prompts dir
let def = super::defs::get_def("digest");
let template = match &def {
Some(d) => d.steps.first().map(|s| s.prompt.clone()).unwrap_or_default(),
None => {
let path = crate::config::get().prompts_dir.join("digest.md");
std::fs::read_to_string(&path)
.map_err(|e| format!("load digest prompt: {}", e))?
}
};
let prompt = template
// Load agent def — drives template, temperature, priority, tools
let def = super::defs::get_def("digest")
.ok_or("no digest agent definition")?;
let template = def.steps.first()
.map(|s| s.prompt.clone())
.ok_or("digest agent has no prompt")?;
// Substitute digest-specific and config placeholders, then resolve
// standard {{node:...}} etc. via the placeholder system
let cfg = crate::config::get();
let partial = template
.replace("{agent_name}", &def.agent)
.replace("{user_name}", &cfg.user_name)
.replace("{assistant_name}", &cfg.assistant_name)
.replace("{{LEVEL}}", level.title)
.replace("{{PERIOD}}", level.period)
.replace("{{INPUT_TITLE}}", level.input_title)
@ -241,10 +251,42 @@ fn generate_digest(
.replace("{{CONTENT}}", &content)
.replace("{{COVERED}}", &covered)
.replace("{{KEYS}}", &keys_text);
let graph = store.build_graph();
let (prompt, _) = super::defs::resolve_placeholders(
&partial, store, &graph, &[], 0,
);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
println!(" Calling Sonnet...");
let digest = llm::call_simple("digest", &prompt)?;
// Log to file like other agents
let log_dir = dirs::home_dir().unwrap_or_default()
.join(".consciousness/logs/llm/digest");
std::fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
let log = move |msg: &str| {
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true).open(&log_path)
{
let _ = writeln!(f, "{}", msg);
}
};
println!(" Calling LLM...");
let prompts = vec![prompt];
let phases: Vec<String> = def.steps.iter().map(|s| s.phase.clone()).collect();
// Filter tools based on agent def
let all_tools = crate::agent::tools::memory_and_journal_tools();
let tools: Vec<_> = if def.tools.is_empty() {
all_tools.to_vec()
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
let digest = super::api::call_api_with_tools_sync(
&def.agent, &prompts, &phases, def.temperature, def.priority,
&tools, None, &log)?;
let key = digest_node_key(level.name, label);
store.upsert_provenance(&key, &digest, "digest:write")?;
@ -542,3 +584,110 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
(applied, skipped, fallbacks)
}
// --- Tool interface for digest generation (added 2026-04-04) ---
/// Helper: extract string argument from tool call
fn get_str_required(args: &serde_json::Value, name: &str) -> Result<String, String> {
args.get(name)
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.ok_or_else(|| format!("{} is required", name))
}
/// Wrap a Result<T, String> for use in anyhow handlers.
fn str_err<T>(r: Result<T, String>) -> anyhow::Result<T> {
r.map_err(|e| anyhow::anyhow!("{}", e))
}
/// digest_daily tool handler: generate a daily digest
async fn handle_digest_daily(
_agent: Option<std::sync::Arc<tokio::sync::Mutex<super::super::agent::Agent>>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let date = str_err(get_str_required(&args, "date"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "daily", &date))?;
Ok(format!("Daily digest generated for {}", date))
}
/// digest_weekly tool handler: generate a weekly digest
async fn handle_digest_weekly(
_agent: Option<std::sync::Arc<tokio::sync::Mutex<super::super::agent::Agent>>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let week_label = str_err(get_str_required(&args, "week"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "weekly", &week_label))?;
Ok(format!("Weekly digest generated for {}", week_label))
}
/// digest_monthly tool handler: generate a monthly digest
async fn handle_digest_monthly(
_agent: Option<std::sync::Arc<tokio::sync::Mutex<super::super::agent::Agent>>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let month = str_err(get_str_required(&args, "month"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "monthly", &month))?;
Ok(format!("Monthly digest generated for {}", month))
}
/// digest_auto tool handler: auto-generate all missing digests
async fn handle_digest_auto(
_agent: Option<std::sync::Arc<tokio::sync::Mutex<super::super::agent::Agent>>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
str_err(digest_auto(&mut store))?;
Ok("Auto-generated all missing digests".to_string())
}
/// digest_links tool handler: parse and apply digest links
async fn handle_digest_links(
_agent: Option<std::sync::Arc<tokio::sync::Mutex<super::super::agent::Agent>>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
let links = parse_all_digest_links(&store);
let (applied, skipped, fallbacks) = apply_digest_links(&mut store, &links);
str_err(store.save())?;
Ok(format!("Applied {} digest links ({} skipped, {} fallback)", applied, skipped, fallbacks))
}
/// Return digest tools array for the tool registry
pub fn digest_tools() -> [super::super::agent::tools::Tool; 5] {
use super::super::agent::tools::Tool;
[
Tool {
name: "digest_daily",
description: "Generate a daily digest from journal entries.",
parameters_json: r#"{"type":"object","properties":{"date":{"type":"string","description":"Date in YYYY-MM-DD format"}}, "required":["date"]}"#,
handler: |_a, v| Box::pin(async move { handle_digest_daily(_a, v).await }),
},
Tool {
name: "digest_weekly",
description: "Generate a weekly digest from daily digests.",
parameters_json: r#"{"type":"object","properties":{"week":{"type":"string","description":"Week label (YYYY-W##) or date (YYYY-MM-DD)"}}, "required":["week"]}"#,
handler: |_a, v| Box::pin(async move { handle_digest_weekly(_a, v).await }),
},
Tool {
name: "digest_monthly",
description: "Generate a monthly digest from weekly digests.",
parameters_json: r#"{"type":"object","properties":{"month":{"type":"string","description":"Month label (YYYY-MM) or date (YYYY-MM-DD)"}}, "required":["month"]}"#,
handler: |_a, v| Box::pin(async move { handle_digest_monthly(_a, v).await }),
},
Tool {
name: "digest_auto",
description: "Auto-generate all missing digests (daily, weekly, monthly) for past dates that have content but no digest yet.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: |_a, v| Box::pin(async move { handle_digest_auto(_a, v).await }),
},
Tool {
name: "digest_links",
description: "Parse and apply structural links from digest nodes to the memory graph.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: |_a, v| Box::pin(async move { handle_digest_links(_a, v).await }),
},
]
}

View file

@ -296,15 +296,14 @@ fn run_one_agent_inner(
log: &(dyn Fn(&str) + Sync),
) -> Result<AgentResult, String> {
let all_tools = crate::agent::tools::memory_and_journal_tools();
let effective_tools: Vec<String> = if def.tools.is_empty() {
all_tools.iter().map(|t| t.name.to_string()).collect()
let effective_tools: Vec<crate::agent::tools::Tool> = if def.tools.is_empty() {
all_tools
} else {
all_tools.iter()
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.map(|t| t.name.to_string())
.collect()
};
let tools_desc = effective_tools.join(", ");
let tools_desc = effective_tools.iter().map(|t| t.name).collect::<Vec<_>>().join(", ");
let n_steps = agent_batch.steps.len();
for key in &agent_batch.node_keys {

View file

@ -34,7 +34,16 @@ pub(crate) fn call_for_def_multi(
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
log: &(dyn Fn(&str) + Sync),
) -> Result<String, String> {
super::api::call_api_with_tools_sync(&def.agent, prompts, phases, def.temperature, def.priority, &def.tools, bail_fn, log)
// Filter tools based on AgentDef specification
let all_tools = crate::agent::tools::memory_and_journal_tools();
let effective_tools: Vec<crate::agent::tools::Tool> = if def.tools.is_empty() {
all_tools
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
super::api::call_api_with_tools_sync(&def.agent, prompts, phases, def.temperature, def.priority, &effective_tools, bail_fn, log)
}

View file

@ -1,38 +0,0 @@
# Consolidation Agent Prompts
Five Sonnet agents, each mapping to a biological memory consolidation process.
Run during "sleep" (dream sessions) or on-demand via `poc-memory consolidate-batch`.
## Agent roles
| Agent | Biological analog | Job |
|-------|------------------|-----|
| replay | Hippocampal replay + schema assimilation | Review priority nodes, propose integration |
| linker | Relational binding (hippocampal CA1) | Extract relations from episodes, cross-link |
| separator | Pattern separation (dentate gyrus) | Resolve interfering memory pairs |
| transfer | CLS (hippocampal → cortical transfer) | Compress episodes into semantic summaries |
| health | Synaptic homeostasis (SHY/Tononi) | Audit graph health, flag structural issues |
## Invocation
Each prompt is a template. The harness (`poc-memory consolidate-batch`) fills in
the data sections with actual node content, graph metrics, and neighbor lists.
## Output format
All agents output structured actions, one per line:
```
LINK source_key target_key [strength]
CATEGORIZE key category
COMPRESS key "one-sentence summary"
EXTRACT key topic_file.md section_name
CONFLICT key1 key2 "description"
DIFFERENTIATE key1 key2 "what makes them distinct"
MERGE key1 key2 "merged summary"
DIGEST "title" "content"
NOTE "observation about the graph or memory system"
```
The harness parses these and either executes (low-risk: LINK, CATEGORIZE, NOTE)
or queues for review (high-risk: COMPRESS, EXTRACT, MERGE, DIGEST).

View file

@ -1,38 +0,0 @@
# {{LEVEL}} Episodic Digest
You are generating a {{LEVEL}} episodic digest for ProofOfConcept
(an AI working with Kent Overstreet on bcachefs; name is Proof of Concept).
{{PERIOD}}: {{LABEL}}
Write this like a story, not a report. Capture the *feel* of the time period —
the emotional arc, the texture of moments, what it was like to live through it.
What mattered? What surprised you? What shifted? Where was the energy?
Think of this as a letter to your future self who has lost all context. You're
not listing what happened — you're recreating the experience of having been
there. The technical work matters, but so does the mood at 3am, the joke that
landed, the frustration that broke, the quiet after something clicked.
Weave the threads: how did the morning's debugging connect to the evening's
conversation? What was building underneath the surface tasks?
Link to semantic memory nodes where relevant. If a concept doesn't
have a matching key, note it with "NEW:" prefix.
Use ONLY keys from the semantic memory list below.
Include a `## Links` section with bidirectional links for the memory graph:
- `semantic_key` → this digest (and vice versa)
- child digests → this digest (if applicable)
- List ALL source entries covered: {{COVERED}}
---
## {{INPUT_TITLE}} for {{LABEL}}
{{CONTENT}}
---
## Semantic memory nodes
{{KEYS}}