Modernize digest agent: autonomous with journal_tail levels

Rewrite digest.agent to be fully autonomous — it uses journal_tail
to discover what needs digesting and generates digests during its
run. No more pre-populated {{CONTENT}}/{{LEVEL}} placeholders.

Extend journal_tail with level parameter (0=journal, 1=daily,
2=weekly, 3=monthly) and keys_only mode. Also include node keys
in full output for better agent context.

Remove stale format:"neighborhood" case from memory_query.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
ProofOfConcept 2026-04-10 16:02:52 -04:00
parent 5b4f497d94
commit 568ce417fc
2 changed files with 65 additions and 37 deletions

View file

@ -81,8 +81,8 @@ pub fn memory_tools() -> [super::Tool; 13] {
pub fn journal_tools() -> [super::Tool; 3] {
use super::Tool;
[
Tool { name: "journal_tail", description: "Read the last N journal entries (default 1).",
parameters_json: r#"{"type":"object","properties":{"count":{"type":"integer","description":"Number of entries (default 1)"}}}"#,
Tool { name: "journal_tail", description: "Read the last N entries at a given level (0=journal, 1=daily, 2=weekly, 3=monthly).",
parameters_json: r#"{"type":"object","properties":{"count":{"type":"integer","description":"Number of entries (default 1)"},"level":{"type":"integer","description":"0=journal, 1=daily digest, 2=weekly, 3=monthly (default 0)"},"keys_only":{"type":"boolean","description":"Return only node keys, not content"}}}"#,
handler: Arc::new(|_a, v| Box::pin(async move { journal_tail(&v).await })) },
Tool { name: "journal_new", description: "Start a new journal entry.",
parameters_json: r#"{"type":"object","properties":{"name":{"type":"string","description":"Short node name (becomes the key)"},"title":{"type":"string","description":"Descriptive title"},"body":{"type":"string","description":"Entry body"}},"required":["name","title","body"]}"#,
@ -243,13 +243,14 @@ async fn query(args: &serde_json::Value) -> Result<String> {
let store = arc.lock().await;
let graph = store.build_graph();
let stages = crate::search::Stage::parse_pipeline(query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, 100);
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
match format {
"full" => {
// Rich output with full content, graph metrics, hub analysis
let stages = crate::search::Stage::parse_pipeline(query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, 100);
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
let items = crate::subconscious::defs::keys_to_replay_items(&store, &keys, &graph);
Ok(crate::subconscious::prompts::format_nodes_section(&store, &items, &graph))
}
@ -263,21 +264,39 @@ async fn query(args: &serde_json::Value) -> Result<String> {
// ── Journal tools ──────────────────────────────────────────────
async fn journal_tail(args: &serde_json::Value) -> Result<String> {
use crate::store::NodeType;
let count = args.get("count").and_then(|v| v.as_u64()).unwrap_or(1) as usize;
let level = args.get("level").and_then(|v| v.as_u64()).unwrap_or(0);
let keys_only = args.get("keys_only").and_then(|v| v.as_bool()).unwrap_or(false);
let node_type = match level {
0 => NodeType::EpisodicSession,
1 => NodeType::EpisodicDaily,
2 => NodeType::EpisodicWeekly,
3 => NodeType::EpisodicMonthly,
_ => return Err(anyhow::anyhow!("invalid level: {} (0=journal, 1=daily, 2=weekly, 3=monthly)", level)),
};
let arc = cached_store().await?;
let store = arc.lock().await;
let mut entries: Vec<&crate::store::Node> = store.nodes.values()
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
.filter(|n| n.node_type == node_type)
.collect();
entries.sort_by_key(|n| n.created_at);
let start = entries.len().saturating_sub(count);
if entries[start..].is_empty() {
Ok("(no journal entries)".into())
Ok("(no entries)".into())
} else if keys_only {
Ok(entries[start..].iter()
.map(|n| n.key.as_str())
.collect::<Vec<_>>()
.join("\n"))
} else {
Ok(entries[start..].iter()
.map(|n| n.content.as_str())
.map(|n| format!("## {}\n\n{}", n.key, n.content))
.collect::<Vec<_>>()
.join("\n\n"))
.join("\n\n---\n\n"))
}
}