more dead code deletion

Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2026-04-12 02:12:31 -04:00
parent 31aa0f3125
commit 919749dc67
10 changed files with 8 additions and 1710 deletions

View file

@ -1,342 +0,0 @@
// Link audit: walk every link in the graph, batch to Sonnet for quality review.
//
// Each batch of links gets reviewed by Sonnet, which returns per-link actions:
// KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon.
//
// TODO: Redesign to use tool-based agent instead of text parsing.
use crate::store::Store;
#[allow(dead_code)]
struct LinkInfo {
rel_idx: usize,
source_key: String,
target_key: String,
source_content: String,
target_content: String,
strength: f32,
target_sections: Vec<String>,
}
pub struct AuditStats {
pub kept: usize,
pub deleted: usize,
pub retargeted: usize,
pub weakened: usize,
pub strengthened: usize,
pub errors: usize,
}
#[allow(dead_code)]
fn build_audit_prompt(batch: &[LinkInfo], batch_num: usize, total_batches: usize) -> String {
let mut prompt = format!(
"You are auditing memory graph links for quality (batch {}/{}).\n\n\
For each numbered link, decide what to do:\n\n\
KEEP N link is meaningful, leave it\n\
DELETE N link is noise, accidental, or too generic to be useful\n\
RETARGET N new_key link points to the right topic area but wrong node;\n\
\x20 retarget to a more specific section (listed under each link)\n\
WEAKEN N strength link is marginal; reduce strength (0.1-0.3)\n\
STRENGTHEN N strength link is important but underweighted; increase (0.8-1.0)\n\n\
Output exactly one action per link number, nothing else.\n\n\
Links to review:\n\n",
batch_num, total_batches);
for (i, link) in batch.iter().enumerate() {
let n = i + 1;
prompt.push_str(&format!(
"--- Link {} ---\n\
{} {} (strength={:.2})\n\n\
Source content:\n{}\n\n\
Target content:\n{}\n",
n, link.source_key, link.target_key, link.strength,
&link.source_content, &link.target_content));
if !link.target_sections.is_empty() {
prompt.push_str(
"\nTarget has sections (consider RETARGET to a more specific one):\n");
for s in &link.target_sections {
prompt.push_str(&format!(" - {}\n", s));
}
}
prompt.push('\n');
}
prompt
}
#[allow(dead_code)]
fn parse_audit_response(response: &str, batch_size: usize) -> Vec<(usize, AuditAction)> {
let mut actions = Vec::new();
for line in response.lines() {
let line = line.trim();
if line.is_empty() { continue; }
let parts: Vec<&str> = line.splitn(3, ' ').collect();
if parts.len() < 2 { continue; }
let action = parts[0].to_uppercase();
let idx: usize = match parts[1].parse::<usize>() {
Ok(n) if n >= 1 && n <= batch_size => n - 1,
_ => continue,
};
let audit_action = match action.as_str() {
"KEEP" => AuditAction::Keep,
"DELETE" => AuditAction::Delete,
"RETARGET" => {
if parts.len() < 3 { continue; }
AuditAction::Retarget(parts[2].trim().to_string())
}
"WEAKEN" => {
if parts.len() < 3 { continue; }
match parts[2].trim().parse::<f32>() {
Ok(s) => AuditAction::Weaken(s),
Err(_) => continue,
}
}
"STRENGTHEN" => {
if parts.len() < 3 { continue; }
match parts[2].trim().parse::<f32>() {
Ok(s) => AuditAction::Strengthen(s),
Err(_) => continue,
}
}
_ => continue,
};
actions.push((idx, audit_action));
}
actions
}
#[allow(dead_code)]
enum AuditAction {
Keep,
Delete,
Retarget(String),
Weaken(f32),
Strengthen(f32),
}
/// Run a full link audit: walk every link, batch to Sonnet, apply results.
pub fn link_audit(_store: &mut Store, _apply: bool) -> Result<AuditStats, String> {
// TODO: Reimplement to use tool-based agent instead of text parsing
Err("link_audit disabled: needs redesign to use tool-based agent".to_string())
/*
// Collect all non-deleted relations with their info
let mut links: Vec<LinkInfo> = Vec::new();
for (idx, rel) in store.relations.iter().enumerate() {
if rel.deleted { continue; }
let source_content = store.nodes.get(&rel.source_key)
.map(|n| n.content.clone()).unwrap_or_default();
let target_content = store.nodes.get(&rel.target_key)
.map(|n| n.content.clone()).unwrap_or_default();
// Find section children of target if it's file-level
let target_sections = if !rel.target_key.contains('#') {
let prefix = format!("{}#", rel.target_key);
store.nodes.keys()
.filter(|k| k.starts_with(&prefix))
.cloned()
.collect()
} else {
Vec::new()
};
links.push(LinkInfo {
rel_idx: idx,
source_key: rel.source_key.clone(),
target_key: rel.target_key.clone(),
source_content,
target_content,
strength: rel.strength,
target_sections,
});
}
let total = links.len();
println!("Link audit: {} links to review", total);
if !apply {
println!("DRY RUN — use --apply to make changes");
}
// Batch by char budget (~100K chars per prompt)
let char_budget = 100_000usize;
let mut batches: Vec<Vec<usize>> = Vec::new();
let mut current_batch: Vec<usize> = Vec::new();
let mut current_chars = 0usize;
for (i, link) in links.iter().enumerate() {
let link_chars = link.source_content.len() + link.target_content.len() + 200;
if !current_batch.is_empty() && current_chars + link_chars > char_budget {
batches.push(std::mem::take(&mut current_batch));
current_chars = 0;
}
current_batch.push(i);
current_chars += link_chars;
}
if !current_batch.is_empty() {
batches.push(current_batch);
}
let total_batches = batches.len();
println!("{} batches (avg {} links/batch)\n", total_batches,
if total_batches > 0 { total / total_batches } else { 0 });
use rayon::prelude::*;
use std::sync::atomic::{AtomicUsize, Ordering};
// Build all batch prompts up front
let batch_data: Vec<(usize, Vec<LinkInfo>, String)> = batches.iter().enumerate()
.map(|(batch_idx, batch_indices)| {
let batch_infos: Vec<LinkInfo> = batch_indices.iter().map(|&i| {
let l = &links[i];
LinkInfo {
rel_idx: l.rel_idx,
source_key: l.source_key.clone(),
target_key: l.target_key.clone(),
source_content: l.source_content.clone(),
target_content: l.target_content.clone(),
strength: l.strength,
target_sections: l.target_sections.clone(),
}
}).collect();
let prompt = build_audit_prompt(&batch_infos, batch_idx + 1, total_batches);
(batch_idx, batch_infos, prompt)
})
.collect();
// Progress counter
let done = AtomicUsize::new(0);
// Run batches in parallel via rayon
let batch_results: Vec<_> = batch_data.par_iter()
.map(|(batch_idx, batch_infos, prompt)| {
let response = crate::agent::oneshot::call_api_with_tools_sync(
"audit", &[prompt.clone()], &[], None, 10, &[], None);
let completed = done.fetch_add(1, Ordering::Relaxed) + 1;
eprint!("\r Batches: {}/{} done", completed, total_batches);
(*batch_idx, batch_infos, response)
})
.collect();
eprintln!(); // newline after progress
// Process results sequentially
let mut stats = AuditStats {
kept: 0, deleted: 0, retargeted: 0, weakened: 0, strengthened: 0, errors: 0,
};
let mut deletions: Vec<usize> = Vec::new();
let mut retargets: Vec<(usize, String)> = Vec::new();
let mut strength_changes: Vec<(usize, f32)> = Vec::new();
for (batch_idx, batch_infos, response) in &batch_results {
let response = match response {
Ok(r) => r,
Err(e) => {
eprintln!(" Batch {}: error: {}", batch_idx + 1, e);
stats.errors += batch_infos.len();
continue;
}
};
let actions = parse_audit_response(response, batch_infos.len());
let mut responded: HashSet<usize> = HashSet::new();
for (idx, action) in &actions {
responded.insert(*idx);
let link = &batch_infos[*idx];
match action {
AuditAction::Keep => {
stats.kept += 1;
}
AuditAction::Delete => {
println!(" DELETE {}{}", link.source_key, link.target_key);
deletions.push(link.rel_idx);
stats.deleted += 1;
}
AuditAction::Retarget(new_target) => {
println!(" RETARGET {}{} (was {})",
link.source_key, new_target, link.target_key);
retargets.push((link.rel_idx, new_target.clone()));
stats.retargeted += 1;
}
AuditAction::Weaken(s) => {
println!(" WEAKEN {}{} (str {:.2}{:.2})",
link.source_key, link.target_key, link.strength, s);
strength_changes.push((link.rel_idx, *s));
stats.weakened += 1;
}
AuditAction::Strengthen(s) => {
println!(" STRENGTHEN {}{} (str {:.2}{:.2})",
link.source_key, link.target_key, link.strength, s);
strength_changes.push((link.rel_idx, *s));
stats.strengthened += 1;
}
}
}
for i in 0..batch_infos.len() {
if !responded.contains(&i) {
stats.kept += 1;
}
}
println!(" Batch {}/{}: +{}kept +{}del +{}retarget +{}weak +{}strong",
batch_idx + 1, total_batches,
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened);
}
// Apply changes
if apply && (stats.deleted > 0 || stats.retargeted > 0
|| stats.weakened > 0 || stats.strengthened > 0) {
println!("\nApplying changes...");
// Deletions: soft-delete
for rel_idx in &deletions {
store.relations[*rel_idx].deleted = true;
}
// Strength changes
for (rel_idx, new_strength) in &strength_changes {
store.relations[*rel_idx].strength = *new_strength;
}
// Retargets: soft-delete old, create new
for (rel_idx, new_target) in &retargets {
let source_key = store.relations[*rel_idx].source_key.clone();
let old_strength = store.relations[*rel_idx].strength;
let source_uuid = store.nodes.get(&source_key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
let target_uuid = store.nodes.get(new_target)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
// Soft-delete old
store.relations[*rel_idx].deleted = true;
// Create new
if target_uuid != [0u8; 16] {
let new_rel = new_relation(
source_uuid, target_uuid,
store::RelationType::Auto,
old_strength,
&source_key, new_target,
);
store.add_relation(new_rel).ok();
}
}
store.save()?;
println!("Saved.");
}
Ok(stats)
*/
}

View file

@ -1,164 +0,0 @@
// Consolidation pipeline: plan → agents → maintenance → digests → links
//
// consolidate_full() runs the full autonomous consolidation:
// 1. Plan: analyze metrics, allocate agents
// 2. Execute: run each agent (agents apply changes via tool calls)
// 3. Graph maintenance (orphans, degree cap)
// 4. Digest: generate missing daily/weekly/monthly digests
// 5. Links: apply links extracted from digests
// 6. Summary: final metrics comparison
use super::digest;
use crate::agent::oneshot;
use crate::neuro;
use crate::store::{self, Store};
/// Append a line to the log buffer.
fn log_line(buf: &mut String, line: &str) {
buf.push_str(line);
buf.push('\n');
}
/// Run the full autonomous consolidation pipeline with logging.
pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
consolidate_full_with_progress(store, &|_| {})
}
fn consolidate_full_with_progress(
store: &mut Store,
on_progress: &dyn Fn(&str),
) -> Result<(), String> {
let start = std::time::Instant::now();
let log_key = format!("_consolidate-log-{}", store::compact_timestamp());
let mut log_buf = String::new();
log_line(&mut log_buf, "=== CONSOLIDATE FULL ===");
log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
log_line(&mut log_buf, "");
// --- Step 1: Plan ---
log_line(&mut log_buf, "--- Step 1: Plan ---");
on_progress("planning");
let plan = neuro::consolidation_plan(store);
let plan_text = neuro::format_plan(&plan);
log_line(&mut log_buf, &plan_text);
println!("{}", plan_text);
let total_agents = plan.total();
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
// --- Step 2: Execute agents ---
log_line(&mut log_buf, "\n--- Step 2: Execute agents ---");
let mut agent_num = 0usize;
let mut agent_errors = 0usize;
let batch_size = 5;
let runs = plan.to_agent_runs(batch_size);
for (agent_type, count) in &runs {
agent_num += 1;
let label = if *count > 0 {
format!("[{}/{}] {} (batch={})", agent_num, runs.len(), agent_type, count)
} else {
format!("[{}/{}] {}", agent_num, runs.len(), agent_type)
};
log_line(&mut log_buf, &format!("\n{}", label));
on_progress(&label);
println!("{}", label);
// Reload store to pick up changes from previous agents
if agent_num > 1 {
*store = Store::load()?;
}
match oneshot::run_one_agent(store, agent_type, *count, None) {
Ok(_) => {
let msg = " Done".to_string();
log_line(&mut log_buf, &msg);
on_progress(&msg);
println!("{}", msg);
}
Err(e) => {
let msg = format!(" ERROR: {}", e);
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
agent_errors += 1;
}
}
}
log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors",
agent_num - agent_errors, agent_errors));
store.save()?;
// --- Step 3: Cap degree ---
log_line(&mut log_buf, "\n--- Step 3: Cap degree ---");
on_progress("capping degree");
println!("\n--- Capping node degree ---");
*store = Store::load()?;
match store.cap_degree(50) {
Ok((hubs, pruned)) => {
store.save()?;
log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned));
}
Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)),
}
// --- Step 4: Digest auto ---
log_line(&mut log_buf, "\n--- Step 4: Digest auto ---");
on_progress("generating digests");
println!("\n--- Generating missing digests ---");
*store = Store::load()?;
match digest::digest_auto(store) {
Ok(()) => log_line(&mut log_buf, " Digests done."),
Err(e) => {
let msg = format!(" ERROR in digest auto: {}", e);
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
}
}
// --- Step 5: Apply digest links ---
log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---");
on_progress("applying digest links");
println!("\n--- Applying digest links ---");
*store = Store::load()?;
let links = digest::parse_all_digest_links(store);
let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links);
store.save()?;
log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks",
applied, skipped, fallbacks));
// --- Step 6: Summary ---
let elapsed = start.elapsed();
log_line(&mut log_buf, "\n--- Summary ---");
log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64()));
*store = Store::load()?;
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
let summary = format!(
"\n=== CONSOLIDATE FULL COMPLETE ===\n\
Duration: {:.0}s\n\
Agents: {} run, {} errors\n\
Nodes: {} Relations: {}\n",
elapsed.as_secs_f64(),
agent_num - agent_errors, agent_errors,
store.nodes.len(), store.relations.len(),
);
log_line(&mut log_buf, &summary);
println!("{}", summary);
// Store the log as a node
store.upsert_provenance(&log_key, &log_buf,
"consolidate:write").ok();
store.save()?;
Ok(())
}

View file

@ -1,390 +1,8 @@
use std::sync::Arc;
// Episodic digest generation: daily, weekly, monthly, auto
//
// Three digest levels form a temporal hierarchy: daily digests summarize
// journal entries, weekly digests summarize dailies, monthly digests
// summarize weeklies. All three share the same generate/auto-detect
// pipeline, parameterized by DigestLevel.
// Digest link parsing: extracts ## Links sections from digest nodes
// and applies them to the memory graph.
use crate::store::{self, Store, new_relation};
use chrono::{Datelike, Duration, Local, NaiveDate};
use regex::Regex;
use std::collections::BTreeSet;
/// Get all store keys for prompt context.
fn semantic_keys(store: &Store) -> Vec<String> {
let mut keys: Vec<String> = store.nodes.keys().cloned().collect();
keys.sort();
keys.truncate(200);
keys
}
// --- Digest level descriptors ---
#[allow(clippy::type_complexity)]
struct DigestLevel {
name: &'static str,
title: &'static str,
period: &'static str,
input_title: &'static str,
child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files
/// Expand an arg into (canonical_label, dates covered).
label_dates: fn(&str) -> Result<(String, Vec<String>), String>,
/// Map a YYYY-MM-DD date to this level's label.
date_to_label: fn(&str) -> Option<String>,
}
const DAILY: DigestLevel = DigestLevel {
name: "daily",
title: "Daily",
period: "Date",
input_title: "Journal entries",
child_name: None,
label_dates: |date| Ok((date.to_string(), vec![date.to_string()])),
date_to_label: |date| Some(date.to_string()),
};
/// Week label and 7 dates (Mon-Sun) for the week containing `date`.
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
let nd = NaiveDate::parse_from_str(date, "%Y-%m-%d")
.map_err(|e| format!("bad date '{}': {}", date, e))?;
let iso = nd.iso_week();
let week_label = format!("{}-W{:02}", iso.year(), iso.week());
let monday = nd - Duration::days(nd.weekday().num_days_from_monday() as i64);
let dates = (0..7)
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
.collect();
Ok((week_label, dates))
}
const WEEKLY: DigestLevel = DigestLevel {
name: "weekly",
title: "Weekly",
period: "Week",
input_title: "Daily digests",
child_name: Some("daily"),
label_dates: |arg| {
if !arg.contains('W') {
return week_dates(arg);
}
let (y, w) = arg.split_once("-W")
.ok_or_else(|| format!("bad week label: {}", arg))?;
let year: i32 = y.parse().map_err(|_| format!("bad week year: {}", arg))?;
let week: u32 = w.parse().map_err(|_| format!("bad week number: {}", arg))?;
let monday = NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
.ok_or_else(|| format!("invalid week: {}", arg))?;
let dates = (0..7)
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
.collect();
Ok((arg.to_string(), dates))
},
date_to_label: |date| week_dates(date).ok().map(|(l, _)| l),
};
const MONTHLY: DigestLevel = DigestLevel {
name: "monthly",
title: "Monthly",
period: "Month",
input_title: "Weekly digests",
child_name: Some("weekly"),
label_dates: |arg| {
let (year, month) = if arg.len() <= 7 {
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
.map_err(|e| format!("bad month '{}': {}", arg, e))?;
(d.year(), d.month())
} else {
let d = NaiveDate::parse_from_str(arg, "%Y-%m-%d")
.map_err(|e| format!("bad date '{}': {}", arg, e))?;
(d.year(), d.month())
};
let label = format!("{}-{:02}", year, month);
let mut dates = Vec::new();
let mut day = 1u32;
while let Some(date) = NaiveDate::from_ymd_opt(year, month, day) {
if date.month() != month { break; }
dates.push(date.format("%Y-%m-%d").to_string());
day += 1;
}
Ok((label, dates))
},
date_to_label: |date| NaiveDate::parse_from_str(date, "%Y-%m-%d")
.ok().map(|d| format!("{}-{:02}", d.year(), d.month())),
};
const LEVELS: &[&DigestLevel] = &[&DAILY, &WEEKLY, &MONTHLY];
/// Store key for a digest node: "daily-2026-03-04", "weekly-2026-W09", etc.
fn digest_node_key(level_name: &str, label: &str) -> String {
format!("{}-{}", level_name, label)
}
// --- Input gathering ---
/// Result of gathering inputs for a digest.
struct GatherResult {
label: String,
/// (display_label, content) pairs for the prompt.
inputs: Vec<(String, String)>,
/// Store keys of source nodes — used to create structural links.
source_keys: Vec<String>,
}
/// Load child digest content from the store.
fn load_child_digests(store: &Store, prefix: &str, labels: &[String]) -> (Vec<(String, String)>, Vec<String>) {
let mut digests = Vec::new();
let mut keys = Vec::new();
for label in labels {
let key = digest_node_key(prefix, label);
if let Some(node) = store.nodes.get(&key) {
digests.push((label.clone(), node.content.clone()));
keys.push(key);
}
}
(digests, keys)
}
/// Unified: gather inputs for any digest level.
fn gather(level: &DigestLevel, store: &Store, arg: &str) -> Result<GatherResult, String> {
let (label, dates) = (level.label_dates)(arg)?;
let (inputs, source_keys) = if let Some(child_name) = level.child_name {
// Map parent's dates through child's date_to_label → child labels
let child = LEVELS.iter()
.find(|l| l.name == child_name)
.expect("invalid child_name");
let child_labels: Vec<String> = dates.iter()
.filter_map(|d| (child.date_to_label)(d))
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
load_child_digests(store, child_name, &child_labels)
} else {
// Leaf level: scan store for episodic entries matching date
let mut entries: Vec<_> = store.nodes.iter()
.filter(|(_, n)| n.node_type == store::NodeType::EpisodicSession
&& n.created_at > 0
&& store::format_date(n.created_at) == label)
.map(|(key, n)| {
(store::format_datetime(n.timestamp), n.content.clone(), key.clone())
})
.collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let keys = entries.iter().map(|(_, _, k)| k.clone()).collect();
let inputs = entries.into_iter().map(|(dt, c, _)| (dt, c)).collect();
(inputs, keys)
};
Ok(GatherResult { label, inputs, source_keys })
}
/// Unified: find candidate labels for auto-generation (past, not yet generated).
fn find_candidates(level: &DigestLevel, dates: &[String], today: &str) -> Vec<String> {
let today_label = (level.date_to_label)(today);
dates.iter()
.filter_map(|d| (level.date_to_label)(d))
.collect::<BTreeSet<_>>()
.into_iter()
.filter(|l| Some(l) != today_label.as_ref())
.collect()
}
// --- Unified generator ---
fn format_inputs(inputs: &[(String, String)], daily: bool) -> String {
let mut text = String::new();
for (label, content) in inputs {
if daily {
text.push_str(&format!("\n### {}\n\n{}\n", label, content));
} else {
text.push_str(&format!("\n---\n## {}\n{}\n", label, content));
}
}
text
}
fn generate_digest(
store: &mut Store,
level: &DigestLevel,
label: &str,
inputs: &[(String, String)],
source_keys: &[String],
) -> Result<(), String> {
println!("Generating {} digest for {}...", level.name, label);
if inputs.is_empty() {
println!(" No inputs found for {}", label);
return Ok(());
}
println!(" {} inputs", inputs.len());
let keys = semantic_keys(store);
let keys_text = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
.join("\n");
let content = format_inputs(inputs, level.child_name.is_none());
let covered = inputs.iter()
.map(|(l, _)| l.as_str())
.collect::<Vec<_>>()
.join(", ");
// Load agent def — drives template, temperature, priority, tools
let def = super::defs::get_def("digest")
.ok_or("no digest agent definition")?;
let template = def.steps.first()
.map(|s| s.prompt.clone())
.ok_or("digest agent has no prompt")?;
// Substitute digest-specific and config placeholders, then resolve
// standard {{node:...}} etc. via the placeholder system
let cfg = crate::config::get();
let partial = template
.replace("{agent_name}", &def.agent)
.replace("{user_name}", &cfg.user_name)
.replace("{assistant_name}", &cfg.assistant_name)
.replace("{{LEVEL}}", level.title)
.replace("{{PERIOD}}", level.period)
.replace("{{INPUT_TITLE}}", level.input_title)
.replace("{{LABEL}}", label)
.replace("{{CONTENT}}", &content)
.replace("{{COVERED}}", &covered)
.replace("{{KEYS}}", &keys_text);
let graph = store.build_graph();
let (prompt, _) = super::defs::resolve_placeholders(
&partial, store, &graph, &[], 0,
);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
// Log to file like other agents
let log_dir = dirs::home_dir().unwrap_or_default()
.join(".consciousness/logs/llm/digest");
std::fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
let _log = move |msg: &str| {
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true).open(&log_path)
{
let _ = writeln!(f, "{}", msg);
}
};
println!(" Calling LLM...");
let prompts = vec![prompt];
let phases: Vec<String> = def.steps.iter().map(|s| s.phase.clone()).collect();
// Filter tools based on agent def
let all_tools = crate::agent::tools::memory_and_journal_tools();
let tools: Vec<_> = if def.tools.is_empty() {
all_tools.to_vec()
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
// Agent writes digest via memory_write tool - we just run it
crate::agent::oneshot::call_api_with_tools_sync(
&def.agent, &prompts, &phases, def.temperature, def.priority,
&tools, None)?;
// Structural links: connect all source entries to this digest
let key = digest_node_key(level.name, label);
let mut linked = 0;
for source_key in source_keys {
// Skip if link already exists
let exists = store.relations.iter().any(|r|
!r.deleted && r.source_key == *source_key && r.target_key == key);
if exists { continue; }
// Reload store to pick up agent's writes
*store = Store::load().map_err(|e| format!("reload: {}", e))?;
let source_uuid = store.nodes.get(source_key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
let target_uuid = store.nodes.get(&key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
if target_uuid == [0u8; 16] {
println!(" Warning: digest key {} not found after agent run", key);
continue;
}
let mut rel = new_relation(
source_uuid, target_uuid,
store::RelationType::Link, 0.8,
source_key, &key,
);
rel.provenance = "digest:structural".to_string();
store.add_relation(rel)?;
linked += 1;
}
if linked > 0 {
println!(" Linked {} source entries → {}", linked, key);
store.save()?;
}
println!(" Done");
Ok(())
}
// --- Public API ---
pub fn generate(store: &mut Store, level_name: &str, arg: &str) -> Result<(), String> {
let level = LEVELS.iter()
.find(|l| l.name == level_name)
.ok_or_else(|| format!("unknown digest level: {}", level_name))?;
let result = gather(level, store, arg)?;
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)
}
// --- Auto-detect and generate missing digests ---
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
let today = Local::now().format("%Y-%m-%d").to_string();
// Collect all dates with episodic entries
let dates: Vec<String> = store.nodes.values()
.filter(|n| n.node_type == store::NodeType::EpisodicSession && n.created_at > 0)
.map(|n| store::format_date(n.created_at))
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
let mut total = 0u32;
for level in LEVELS {
let candidates = find_candidates(level, &dates, &today);
let mut generated = 0u32;
let mut skipped = 0u32;
for arg in &candidates {
let result = gather(level, store, arg)?;
let key = digest_node_key(level.name, &result.label);
if store.nodes.contains_key(&key) {
skipped += 1;
continue;
}
if result.inputs.is_empty() { continue; }
println!("[auto] Missing {} digest for {}", level.name, result.label);
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)?;
generated += 1;
}
println!("[auto] {}: {} generated, {} existed", level.name, generated, skipped);
total += generated;
}
if total == 0 {
println!("[auto] All digests up to date.");
} else {
println!("[auto] Generated {} total digests.", total);
}
Ok(())
}
// --- Digest link parsing ---
// Replaces digest-link-parser.py: parses ## Links sections from digest
// files and applies them to the memory graph.
/// A parsed link from a digest's Links section.
pub struct DigestLink {
@ -583,110 +201,3 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
(applied, skipped, fallbacks)
}
// --- Tool interface for digest generation (added 2026-04-04) ---
/// Helper: extract string argument from tool call
fn get_str_required(args: &serde_json::Value, name: &str) -> Result<String, String> {
args.get(name)
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.ok_or_else(|| format!("{} is required", name))
}
/// Wrap a Result<T, String> for use in anyhow handlers.
fn str_err<T>(r: Result<T, String>) -> anyhow::Result<T> {
r.map_err(|e| anyhow::anyhow!("{}", e))
}
/// digest_daily tool handler: generate a daily digest
async fn handle_digest_daily(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let date = str_err(get_str_required(&args, "date"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "daily", &date))?;
Ok(format!("Daily digest generated for {}", date))
}
/// digest_weekly tool handler: generate a weekly digest
async fn handle_digest_weekly(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let week_label = str_err(get_str_required(&args, "week"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "weekly", &week_label))?;
Ok(format!("Weekly digest generated for {}", week_label))
}
/// digest_monthly tool handler: generate a monthly digest
async fn handle_digest_monthly(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let month = str_err(get_str_required(&args, "month"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "monthly", &month))?;
Ok(format!("Monthly digest generated for {}", month))
}
/// digest_auto tool handler: auto-generate all missing digests
async fn handle_digest_auto(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
str_err(digest_auto(&mut store))?;
Ok("Auto-generated all missing digests".to_string())
}
/// digest_links tool handler: parse and apply digest links
async fn handle_digest_links(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
let links = parse_all_digest_links(&store);
let (applied, skipped, fallbacks) = apply_digest_links(&mut store, &links);
str_err(store.save())?;
Ok(format!("Applied {} digest links ({} skipped, {} fallback)", applied, skipped, fallbacks))
}
/// Return digest tools array for the tool registry
pub fn digest_tools() -> [super::super::agent::tools::Tool; 5] {
use super::super::agent::tools::Tool;
[
Tool {
name: "digest_daily",
description: "Generate a daily digest from journal entries.",
parameters_json: r#"{"type":"object","properties":{"date":{"type":"string","description":"Date in YYYY-MM-DD format"}}, "required":["date"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_daily(_a, v).await })),
},
Tool {
name: "digest_weekly",
description: "Generate a weekly digest from daily digests.",
parameters_json: r#"{"type":"object","properties":{"week":{"type":"string","description":"Week label (YYYY-W##) or date (YYYY-MM-DD)"}}, "required":["week"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_weekly(_a, v).await })),
},
Tool {
name: "digest_monthly",
description: "Generate a monthly digest from weekly digests.",
parameters_json: r#"{"type":"object","properties":{"month":{"type":"string","description":"Month label (YYYY-MM) or date (YYYY-MM-DD)"}}, "required":["month"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_monthly(_a, v).await })),
},
Tool {
name: "digest_auto",
description: "Auto-generate all missing digests (daily, weekly, monthly) for past dates that have content but no digest yet.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_auto(_a, v).await })),
},
Tool {
name: "digest_links",
description: "Parse and apply structural links from digest nodes to the memory graph.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_links(_a, v).await })),
},
]
}

View file

@ -1,24 +1,5 @@
// Agent layer: LLM-powered operations on the memory graph
//
// Everything here calls external models (Sonnet, Haiku) or orchestrates
// sequences of such calls. The core graph infrastructure (store, graph,
// spectral, search, similarity) lives at the crate root.
//
// llm — model invocation, response parsing
// prompts — prompt generation from store data
// defs — agent file loading and placeholder resolution
// audit — link quality review via Sonnet
// consolidate — full consolidation pipeline
// knowledge — agent execution, conversation fragment selection
// enrich — journal enrichment, experience mining
// digest — episodic digest generation (daily/weekly/monthly)
// daemon — background job scheduler
// transcript — shared JSONL transcript parsing
//
// The session hook (context injection, agent orchestration) moved to claude/hook.
pub mod audit;
pub mod consolidate;
pub mod daemon;
pub mod defs;
pub mod digest;