more dead code deletion

Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2026-04-12 02:12:31 -04:00
parent 31aa0f3125
commit 919749dc67
10 changed files with 8 additions and 1710 deletions

View file

@ -1,6 +1,7 @@
// cli/agent.rs — agent subcommand handlers
use crate::store;
use crate::subconscious::digest;
pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option<&str>, dry_run: bool, _local: bool, state_dir: Option<&str>) -> Result<(), String> {
// Mark as agent so tool calls (e.g. poc-memory render) don't
@ -56,23 +57,6 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
Ok(())
}
pub fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option<String>) -> Result<(), String> {
let store = store::Store::load()?;
if let Some(agent_name) = agent {
let batch = crate::agents::prompts::agent_prompt(&store, &agent_name, count)?;
for (i, s) in batch.steps.iter().enumerate() {
if batch.steps.len() > 1 {
println!("=== STEP {} ({}) ===\n", i + 1, s.phase);
}
println!("{}", s.prompt);
}
Ok(())
} else {
crate::agents::prompts::consolidation_batch(&store, count, auto)
}
}
pub fn cmd_replay_queue(count: usize) -> Result<(), String> {
let store = store::Store::load()?;
let queue = crate::neuro::replay_queue(&store, count);
@ -85,21 +69,9 @@ pub fn cmd_replay_queue(count: usize) -> Result<(), String> {
Ok(())
}
pub fn cmd_consolidate_session() -> Result<(), String> {
let store = store::Store::load()?;
let plan = crate::neuro::consolidation_plan(&store);
println!("{}", crate::neuro::format_plan(&plan));
Ok(())
}
pub fn cmd_consolidate_full() -> Result<(), String> {
let mut store = store::Store::load()?;
crate::consolidate::consolidate_full(&mut store)
}
pub fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
let store = store::Store::load()?;
let links = crate::digest::parse_all_digest_links(&store);
let links = digest::parse_all_digest_links(&store);
drop(store);
println!("Found {} unique links from digest nodes", links.len());
@ -115,7 +87,7 @@ pub fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
}
let mut store = store::Store::load()?;
let (applied, skipped, fallbacks) = crate::digest::apply_digest_links(&mut store, &links);
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
Ok(())
}

View file

@ -202,17 +202,6 @@ pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
Ok(())
}
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
let mut store = store::Store::load()?;
let stats = crate::audit::link_audit(&mut store, apply)?;
println!("\n{}", "=".repeat(60));
println!("Link audit complete:");
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
println!("{}", "=".repeat(60));
Ok(())
}
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("trace requires a key".into());

View file

@ -1,315 +0,0 @@
// Spatial memory cursor — a persistent pointer into the knowledge graph.
//
// The cursor maintains a "you are here" position that persists across
// sessions. Navigation moves through three dimensions:
// - Temporal: forward/back among same-type nodes by timestamp
// - Hierarchical: up/down the digest tree (journal→daily→weekly→monthly)
// - Spatial: sideways along graph edges to linked nodes
//
// This is the beginning of place cells — the hippocampus doesn't just
// store, it maintains a map. The cursor is the map's current position.
use crate::store::{self, Node, Store};
use std::path::PathBuf;
fn cursor_path() -> PathBuf {
store::memory_dir().join("cursor")
}
/// Read the current cursor position (node key), if any.
pub fn get() -> Option<String> {
std::fs::read_to_string(cursor_path())
.ok()
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
}
/// Set the cursor to a node key.
pub fn set(key: &str) -> Result<(), String> {
std::fs::write(cursor_path(), format!("{}\n", key))
.map_err(|e| format!("write cursor: {}", e))
}
/// Clear the cursor.
pub fn clear() -> Result<(), String> {
let p = cursor_path();
if p.exists() {
std::fs::remove_file(&p)
.map_err(|e| format!("clear cursor: {}", e))?;
}
Ok(())
}
/// Temporal neighbors: nodes of the same type, sorted by timestamp.
/// Returns (prev, next) keys relative to the given node.
pub(crate) fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
let Some(node) = store.nodes.get(key) else { return (None, None) };
let node_type = node.node_type;
let mut same_type: Vec<(&str, i64)> = store.nodes.iter()
.filter(|(_, n)| !n.deleted && n.node_type == node_type && n.timestamp > 0)
.map(|(k, n)| (k.as_str(), n.timestamp))
.collect();
same_type.sort_by_key(|(_, t)| *t);
let pos = same_type.iter().position(|(k, _)| *k == key);
let prev = pos.and_then(|i| if i > 0 { Some(same_type[i - 1].0.to_string()) } else { None });
let next = pos.and_then(|i| same_type.get(i + 1).map(|(k, _)| k.to_string()));
(prev, next)
}
/// Digest hierarchy: find the parent digest for a node.
/// Journal → daily, daily → weekly, weekly → monthly.
pub(crate) fn digest_parent(store: &Store, key: &str) -> Option<String> {
let node = store.nodes.get(key)?;
let parent_type = match node.node_type {
store::NodeType::EpisodicSession => store::NodeType::EpisodicDaily,
store::NodeType::EpisodicDaily => store::NodeType::EpisodicWeekly,
store::NodeType::EpisodicWeekly => store::NodeType::EpisodicMonthly,
_ => return None,
};
// Look for structural links first (digest:structural provenance)
for r in &store.relations {
if r.deleted { continue; }
if r.source_key == key
&& let Some(target) = store.nodes.get(&r.target_key)
&& target.node_type == parent_type {
return Some(r.target_key.clone());
}
}
// Fallback: match by date for journal→daily
if node.node_type == store::NodeType::EpisodicSession {
// Try extracting date from timestamp first, then from key
let mut dates = Vec::new();
if node.timestamp > 0 {
dates.push(store::format_date(node.timestamp));
}
// Extract date from created_at timestamp
if node.created_at > 0 {
let created_date = store::format_date(node.created_at);
if !dates.contains(&created_date) {
dates.push(created_date);
}
}
for date in &dates {
for prefix in [&format!("daily-{}", date), &format!("digest#daily#{}", date)] {
for (k, n) in &store.nodes {
if !n.deleted && n.node_type == parent_type && k.starts_with(prefix.as_str()) {
return Some(k.clone());
}
}
}
}
}
None
}
/// Digest children: find nodes that feed into this digest.
/// Monthly → weeklies, weekly → dailies, daily → journal entries.
pub(crate) fn digest_children(store: &Store, key: &str) -> Vec<String> {
let Some(node) = store.nodes.get(key) else { return vec![] };
let child_type = match node.node_type {
store::NodeType::EpisodicDaily => store::NodeType::EpisodicSession,
store::NodeType::EpisodicWeekly => store::NodeType::EpisodicDaily,
store::NodeType::EpisodicMonthly => store::NodeType::EpisodicWeekly,
_ => return vec![],
};
// Look for structural links (source → this digest)
let mut children: Vec<(String, i64)> = Vec::new();
for r in &store.relations {
if r.deleted { continue; }
if r.target_key == key
&& let Some(source) = store.nodes.get(&r.source_key)
&& source.node_type == child_type {
children.push((r.source_key.clone(), source.timestamp));
}
}
// Fallback for daily → journal: extract date from key and match
if children.is_empty() && node.node_type == store::NodeType::EpisodicDaily {
// Extract date from keys like "daily-2026-03-13" or "daily-2026-03-13-suffix"
let date = key.strip_prefix("daily-")
.or_else(|| key.strip_prefix("digest#daily#"))
.and_then(|rest| rest.get(..10)); // "YYYY-MM-DD"
if let Some(date) = date {
for (k, n) in &store.nodes {
if n.deleted { continue; }
if n.node_type == store::NodeType::EpisodicSession
&& n.timestamp > 0
&& store::format_date(n.timestamp) == date
{
children.push((k.clone(), n.timestamp));
}
}
}
}
children.sort_by_key(|(_, t)| *t);
children.into_iter().map(|(k, _)| k).collect()
}
/// Graph neighbors sorted by edge strength.
pub(crate) fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
let mut neighbors: Vec<(String, f32)> = Vec::new();
for r in &store.relations {
if r.deleted { continue; }
if r.source_key == key {
neighbors.push((r.target_key.clone(), r.strength));
} else if r.target_key == key {
neighbors.push((r.source_key.clone(), r.strength));
}
}
neighbors.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
neighbors.dedup_by(|a, b| a.0 == b.0);
neighbors
}
/// Format a one-line summary of a node for context display.
fn node_summary(node: &Node) -> String {
let ts = if node.timestamp > 0 {
store::format_datetime(node.timestamp)
} else {
"no-date".to_string()
};
let type_tag = match node.node_type {
store::NodeType::EpisodicSession => "journal",
store::NodeType::EpisodicDaily => "daily",
store::NodeType::EpisodicWeekly => "weekly",
store::NodeType::EpisodicMonthly => "monthly",
store::NodeType::Semantic => "semantic",
};
// First line of content, truncated
let first_line = node.content.lines().next().unwrap_or("")
.chars().take(80).collect::<String>();
format!("[{}] ({}) {}", ts, type_tag, first_line)
}
/// Display the cursor position with full context.
pub fn show(store: &Store) -> Result<(), String> {
let key = get().ok_or_else(|| "No cursor set. Use `poc-memory cursor set KEY`".to_string())?;
let node = store.nodes.get(&key)
.ok_or_else(|| format!("Cursor points to missing node: {}", key))?;
// Header
let type_tag = match node.node_type {
store::NodeType::EpisodicSession => "journal",
store::NodeType::EpisodicDaily => "daily",
store::NodeType::EpisodicWeekly => "weekly",
store::NodeType::EpisodicMonthly => "monthly",
store::NodeType::Semantic => "semantic",
};
if node.timestamp > 0 {
eprintln!("@ {} [{}]", key, type_tag);
eprintln!(" {}", store::format_datetime(node.timestamp));
} else {
eprintln!("@ {} [{}]", key, type_tag);
}
// Temporal context
let (prev, next) = temporal_neighbors(store, &key);
eprintln!();
if let Some(ref p) = prev
&& let Some(pn) = store.nodes.get(p) {
eprintln!("{}", node_summary(pn));
eprintln!(" `cursor back`");
}
if let Some(ref n) = next
&& let Some(nn) = store.nodes.get(n) {
eprintln!("{}", node_summary(nn));
eprintln!(" `cursor forward`");
}
// Hierarchy
if let Some(ref parent) = digest_parent(store, &key)
&& let Some(pn) = store.nodes.get(parent) {
eprintln!("{}", node_summary(pn));
eprintln!(" `cursor up`");
}
let children = digest_children(store, &key);
if !children.is_empty() {
let count = children.len();
if let Some(first) = children.first().and_then(|k| store.nodes.get(k)) {
eprintln!("{} children — first: {}", count, node_summary(first));
eprintln!(" `cursor down`");
}
}
// Graph neighbors (non-temporal)
let neighbors = graph_neighbors(store, &key);
let semantic: Vec<_> = neighbors.iter()
.filter(|(k, _)| {
store.nodes.get(k)
.map(|n| n.node_type == store::NodeType::Semantic)
.unwrap_or(false)
})
.take(8)
.collect();
if !semantic.is_empty() {
eprintln!();
eprintln!(" Linked:");
for (k, strength) in &semantic {
eprintln!(" [{:.1}] {}", strength, k);
}
}
eprintln!();
eprintln!("---");
// Content
print!("{}", node.content);
Ok(())
}
/// Move cursor in a temporal direction.
pub fn move_temporal(store: &Store, forward: bool) -> Result<(), String> {
let key = get().ok_or("No cursor set")?;
let _ = store.nodes.get(&key)
.ok_or_else(|| format!("Cursor points to missing node: {}", key))?;
let (prev, next) = temporal_neighbors(store, &key);
let target = if forward { next } else { prev };
match target {
Some(k) => {
set(&k)?;
show(store)
}
None => {
let dir = if forward { "forward" } else { "back" };
Err(format!("No {} neighbor from {}", dir, key))
}
}
}
/// Move cursor up the digest hierarchy.
pub fn move_up(store: &Store) -> Result<(), String> {
let key = get().ok_or("No cursor set")?;
match digest_parent(store, &key) {
Some(parent) => {
set(&parent)?;
show(store)
}
None => Err(format!("No parent digest for {}", key)),
}
}
/// Move cursor down the digest hierarchy (to first child).
pub fn move_down(store: &Store) -> Result<(), String> {
let key = get().ok_or("No cursor set")?;
let children = digest_children(store, &key);
match children.first() {
Some(child) => {
set(child)?;
show(store)
}
None => Err(format!("No children for {}", key)),
}
}

View file

@ -9,7 +9,6 @@ pub mod memory;
pub mod store;
pub mod graph;
pub mod lookups;
pub mod cursor;
pub mod query;
pub mod spectral;
pub mod neuro;

View file

@ -70,15 +70,9 @@ pub mod channel_capnp {
// Re-exports — all existing crate::X paths keep working
pub use hippocampus::{
store, graph, lookups, cursor, query,
store, graph, lookups, query,
spectral, neuro, counters,
transcript, memory,
};
pub use hippocampus::query::engine as search;
pub use hippocampus::query::parser as query_parser;
pub use subconscious as agents;
pub use subconscious::{
audit, consolidate,
digest,
};
use hippocampus::query::engine as search;
use hippocampus::query::parser as query_parser;

View file

@ -203,12 +203,6 @@ EXAMPLES:
#[command(subcommand, name = "graph")]
GraphCmd(GraphCmd),
// ── Cursor (spatial memory) ──────────────────────────────────────
/// Navigate the memory graph with a persistent cursor
#[command(subcommand)]
Cursor(CursorCmd),
// ── Agents ────────────────────────────────────────────────────────
/// Agent and daemon operations
@ -249,27 +243,6 @@ enum NodeCmd {
Dump,
}
#[derive(Subcommand)]
enum CursorCmd {
/// Show current cursor position with context
Show,
/// Set cursor to a node key
Set {
/// Node key
key: Vec<String>,
},
/// Move cursor forward in time
Forward,
/// Move cursor backward in time
Back,
/// Move up the digest hierarchy (journal→daily→weekly→monthly)
Up,
/// Move down the digest hierarchy (to first child)
Down,
/// Clear the cursor
Clear,
}
#[derive(Subcommand)]
enum JournalCmd {
/// Write a journal entry to the store
@ -291,16 +264,6 @@ enum JournalCmd {
#[arg(long, default_value_t = 0)]
level: u8,
},
/// Enrich journal entry with conversation links
Enrich {
/// Path to JSONL transcript
jsonl_path: String,
/// Journal entry text to enrich
entry_text: String,
/// Grep line number for source location
#[arg(default_value_t = 0)]
grep_line: usize,
},
}
#[derive(Subcommand)]
@ -346,13 +309,6 @@ enum GraphCmd {
/// Target node key
target: String,
},
/// Walk every link, send to Sonnet for quality review
#[command(name = "link-audit")]
LinkAudit {
/// Apply changes (default: dry run)
#[arg(long)]
apply: bool,
},
/// Cap node degree by pruning weak auto edges
#[command(name = "cap-degree")]
CapDegree {
@ -401,64 +357,6 @@ enum GraphCmd {
#[derive(Subcommand)]
enum AgentCmd {
/// Run knowledge agents to convergence
#[command(name = "knowledge-loop")]
KnowledgeLoop {
/// Maximum cycles before stopping
#[arg(long, default_value_t = 20)]
max_cycles: usize,
/// Items per agent per cycle
#[arg(long, default_value_t = 5)]
batch_size: usize,
/// Cycles to check for convergence
#[arg(long, default_value_t = 5)]
window: usize,
/// Maximum inference depth
#[arg(long, default_value_t = 4)]
max_depth: i32,
},
/// Run agent consolidation on priority nodes
#[command(name = "consolidate-batch")]
ConsolidateBatch {
/// Number of nodes to consolidate
#[arg(long, default_value_t = 5)]
count: usize,
/// Generate replay agent prompt automatically
#[arg(long)]
auto: bool,
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
#[arg(long)]
agent: Option<String>,
},
/// Analyze metrics, plan agent allocation
#[command(name = "consolidate-session")]
ConsolidateSession,
/// Autonomous: plan → agents → apply → digests → links
#[command(name = "consolidate-full")]
ConsolidateFull,
/// Import pending agent results into the graph
#[command(name = "apply-agent")]
ApplyAgent {
/// Process all files without moving to done/
#[arg(long)]
all: bool,
},
/// Extract and apply actions from consolidation reports
#[command(name = "apply-consolidation")]
ApplyConsolidation {
/// Apply actions (default: dry run)
#[arg(long)]
apply: bool,
/// Read from specific report file
#[arg(long)]
report: Option<String>,
},
/// Generate episodic digests (daily, weekly, monthly, auto)
Digest {
/// Digest type: daily, weekly, monthly, auto
#[command(subcommand)]
level: DigestLevel,
},
/// Parse and apply links from digest nodes
#[command(name = "digest-links")]
DigestLinks {
@ -566,27 +464,6 @@ enum AdminCmd {
MigrateTranscriptProgress,
}
#[derive(Subcommand)]
enum DigestLevel {
/// Generate daily digest
Daily {
/// Date (default: today)
date: Option<String>,
},
/// Generate weekly digest
Weekly {
/// Date or week label (default: current week)
date: Option<String>,
},
/// Generate monthly digest
Monthly {
/// Month (YYYY-MM) or date (default: current month)
date: Option<String>,
},
/// Generate all missing digests
Auto,
}
/// Print help with subcommands expanded to show nested commands.
fn print_help() {
use clap::CommandFactory;
@ -653,7 +530,6 @@ impl Run for Command {
Self::Node(sub) => sub.run(),
Self::Journal(sub) => sub.run(),
Self::GraphCmd(sub) => sub.run(),
Self::Cursor(sub) => sub.run(),
Self::Agent(sub) => sub.run(),
Self::Admin(sub) => sub.run(),
// mcp-schema moved to consciousness-mcp binary
@ -678,8 +554,6 @@ impl Run for JournalCmd {
match self {
Self::Write { name, text } => cli::journal::cmd_journal_write(&name, &text),
Self::Tail { n, full, level } => cli::journal::cmd_journal_tail(n, full, level),
Self::Enrich { jsonl_path, entry_text, grep_line }
=> cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
}
}
}
@ -694,7 +568,6 @@ impl Run for GraphCmd {
Self::LinkSet { source, target, strength }
=> cli::graph::cmd_link_set(&source, &target, strength),
Self::LinkImpact { source, target } => cli::graph::cmd_link_impact(&source, &target),
Self::LinkAudit { apply } => cli::graph::cmd_link_audit(apply),
Self::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree),
Self::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply),
Self::Trace { key } => cli::graph::cmd_trace(&key),
@ -706,46 +579,9 @@ impl Run for GraphCmd {
}
}
impl Run for CursorCmd {
fn run(self) -> Result<(), String> {
match self {
Self::Show => {
let store = store::Store::load()?;
cursor::show(&store)
}
Self::Set { key } => {
if key.is_empty() { return Err("cursor set requires a key".into()); }
let key = key.join(" ");
let store = store::Store::load()?;
let bare = store::strip_md_suffix(&key);
if !store.nodes.contains_key(&bare) {
return Err(format!("Node not found: {}", bare));
}
cursor::set(&bare)?;
cursor::show(&store)
}
Self::Forward => { let s = store::Store::load()?; cursor::move_temporal(&s, true) }
Self::Back => { let s = store::Store::load()?; cursor::move_temporal(&s, false) }
Self::Up => { let s = store::Store::load()?; cursor::move_up(&s) }
Self::Down => { let s = store::Store::load()?; cursor::move_down(&s) }
Self::Clear => cursor::clear(),
}
}
}
impl Run for AgentCmd {
fn run(self) -> Result<(), String> {
match self {
Self::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
=> cli::agent::cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
Self::ConsolidateBatch { count, auto, agent }
=> cli::agent::cmd_consolidate_batch(count, auto, agent),
Self::ConsolidateSession => cli::agent::cmd_consolidate_session(),
Self::ConsolidateFull => cli::agent::cmd_consolidate_full(),
Self::ApplyAgent { all } => cmd_apply_agent(all),
Self::ApplyConsolidation { apply, report }
=> cli::agent::cmd_apply_consolidation(apply, report.as_deref()),
Self::Digest { level } => cmd_digest(level),
Self::DigestLinks { apply } => cli::agent::cmd_digest_links(apply),
Self::Run { agent, count, target, query, dry_run, local, state_dir }
=> cli::agent::cmd_run_agent(&agent, count, &target, query.as_deref(), dry_run, local, state_dir.as_deref()),
@ -803,166 +639,3 @@ fn main() {
}
}
// ── Command implementations ─────────────────────────────────────────
/// Apply links from a single agent result JSON file.
/// Returns (links_applied, errors).
fn apply_agent_file(
store: &mut store::Store,
data: &serde_json::Value,
) -> (usize, usize) {
let agent_result = data.get("agent_result").or(Some(data));
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
Some(l) => l,
None => return (0, 0),
};
let entry_text = data.get("entry_text")
.and_then(|v| v.as_str())
.unwrap_or("");
if let (Some(start), Some(end)) = (
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
) {
println!(" Source: L{}-L{}", start, end);
}
let mut applied = 0;
let mut errors = 0;
for link in links {
let target = match link.get("target").and_then(|v| v.as_str()) {
Some(t) => t,
None => continue,
};
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
if let Some(note) = target.strip_prefix("NOTE:") {
println!(" NOTE: {}{}", note, reason);
continue;
}
let resolved = match store.resolve_key(target) {
Ok(r) => r,
Err(_) => {
println!(" SKIP {} (not found in graph)", target);
continue;
}
};
let source_key = match store.find_journal_node(entry_text) {
Some(k) => k,
None => {
println!(" SKIP {} (no matching journal node)", target);
continue;
}
};
let source_uuid = match store.nodes.get(&source_key) {
Some(n) => n.uuid,
None => continue,
};
let target_uuid = match store.nodes.get(&resolved) {
Some(n) => n.uuid,
None => continue,
};
let rel = store::new_relation(
source_uuid, target_uuid,
store::RelationType::Link,
0.5,
&source_key, &resolved,
);
if let Err(e) = store.add_relation(rel) {
eprintln!(" Error adding relation: {}", e);
errors += 1;
} else {
println!(" LINK {}{} ({})", source_key, resolved, reason);
applied += 1;
}
}
(applied, errors)
}
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
let results_dir = store::memory_dir().join("agent-results");
if !results_dir.exists() {
println!("No agent results directory");
return Ok(());
}
let mut store = store::Store::load()?;
let mut applied = 0;
let mut errors = 0;
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
.map_err(|e| format!("read results dir: {}", e))?
.filter_map(|e| e.ok())
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
.collect();
files.sort_by_key(|e| e.path());
for entry in &files {
let path = entry.path();
let content = match std::fs::read_to_string(&path) {
Ok(c) => c,
Err(e) => {
eprintln!(" Skip {}: {}", path.display(), e);
errors += 1;
continue;
}
};
let data: serde_json::Value = match serde_json::from_str(&content) {
Ok(d) => d,
Err(e) => {
eprintln!(" Skip {}: parse error: {}", path.display(), e);
errors += 1;
continue;
}
};
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
let (a, e) = apply_agent_file(&mut store, &data);
applied += a;
errors += e;
if !process_all {
let done_dir = crate::util::memory_subdir("agent-results/done")?;
let dest = done_dir.join(path.file_name().unwrap());
std::fs::rename(&path, &dest).ok();
}
}
if applied > 0 {
store.save()?;
}
println!("\nApplied {} links ({} errors, {} files processed)",
applied, errors, files.len());
Ok(())
}
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
let mut store = store::Store::load()?;
match level {
DigestLevel::Auto => digest::digest_auto(&mut store),
DigestLevel::Daily { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "daily", &arg)
}
DigestLevel::Weekly { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "weekly", &arg)
}
DigestLevel::Monthly { date } => {
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
digest::generate(&mut store, "monthly", &arg)
}
}
}

View file

@ -1,342 +0,0 @@
// Link audit: walk every link in the graph, batch to Sonnet for quality review.
//
// Each batch of links gets reviewed by Sonnet, which returns per-link actions:
// KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon.
//
// TODO: Redesign to use tool-based agent instead of text parsing.
use crate::store::Store;
#[allow(dead_code)]
struct LinkInfo {
rel_idx: usize,
source_key: String,
target_key: String,
source_content: String,
target_content: String,
strength: f32,
target_sections: Vec<String>,
}
pub struct AuditStats {
pub kept: usize,
pub deleted: usize,
pub retargeted: usize,
pub weakened: usize,
pub strengthened: usize,
pub errors: usize,
}
#[allow(dead_code)]
fn build_audit_prompt(batch: &[LinkInfo], batch_num: usize, total_batches: usize) -> String {
let mut prompt = format!(
"You are auditing memory graph links for quality (batch {}/{}).\n\n\
For each numbered link, decide what to do:\n\n\
KEEP N link is meaningful, leave it\n\
DELETE N link is noise, accidental, or too generic to be useful\n\
RETARGET N new_key link points to the right topic area but wrong node;\n\
\x20 retarget to a more specific section (listed under each link)\n\
WEAKEN N strength link is marginal; reduce strength (0.1-0.3)\n\
STRENGTHEN N strength link is important but underweighted; increase (0.8-1.0)\n\n\
Output exactly one action per link number, nothing else.\n\n\
Links to review:\n\n",
batch_num, total_batches);
for (i, link) in batch.iter().enumerate() {
let n = i + 1;
prompt.push_str(&format!(
"--- Link {} ---\n\
{} {} (strength={:.2})\n\n\
Source content:\n{}\n\n\
Target content:\n{}\n",
n, link.source_key, link.target_key, link.strength,
&link.source_content, &link.target_content));
if !link.target_sections.is_empty() {
prompt.push_str(
"\nTarget has sections (consider RETARGET to a more specific one):\n");
for s in &link.target_sections {
prompt.push_str(&format!(" - {}\n", s));
}
}
prompt.push('\n');
}
prompt
}
#[allow(dead_code)]
fn parse_audit_response(response: &str, batch_size: usize) -> Vec<(usize, AuditAction)> {
let mut actions = Vec::new();
for line in response.lines() {
let line = line.trim();
if line.is_empty() { continue; }
let parts: Vec<&str> = line.splitn(3, ' ').collect();
if parts.len() < 2 { continue; }
let action = parts[0].to_uppercase();
let idx: usize = match parts[1].parse::<usize>() {
Ok(n) if n >= 1 && n <= batch_size => n - 1,
_ => continue,
};
let audit_action = match action.as_str() {
"KEEP" => AuditAction::Keep,
"DELETE" => AuditAction::Delete,
"RETARGET" => {
if parts.len() < 3 { continue; }
AuditAction::Retarget(parts[2].trim().to_string())
}
"WEAKEN" => {
if parts.len() < 3 { continue; }
match parts[2].trim().parse::<f32>() {
Ok(s) => AuditAction::Weaken(s),
Err(_) => continue,
}
}
"STRENGTHEN" => {
if parts.len() < 3 { continue; }
match parts[2].trim().parse::<f32>() {
Ok(s) => AuditAction::Strengthen(s),
Err(_) => continue,
}
}
_ => continue,
};
actions.push((idx, audit_action));
}
actions
}
#[allow(dead_code)]
enum AuditAction {
Keep,
Delete,
Retarget(String),
Weaken(f32),
Strengthen(f32),
}
/// Run a full link audit: walk every link, batch to Sonnet, apply results.
pub fn link_audit(_store: &mut Store, _apply: bool) -> Result<AuditStats, String> {
// TODO: Reimplement to use tool-based agent instead of text parsing
Err("link_audit disabled: needs redesign to use tool-based agent".to_string())
/*
// Collect all non-deleted relations with their info
let mut links: Vec<LinkInfo> = Vec::new();
for (idx, rel) in store.relations.iter().enumerate() {
if rel.deleted { continue; }
let source_content = store.nodes.get(&rel.source_key)
.map(|n| n.content.clone()).unwrap_or_default();
let target_content = store.nodes.get(&rel.target_key)
.map(|n| n.content.clone()).unwrap_or_default();
// Find section children of target if it's file-level
let target_sections = if !rel.target_key.contains('#') {
let prefix = format!("{}#", rel.target_key);
store.nodes.keys()
.filter(|k| k.starts_with(&prefix))
.cloned()
.collect()
} else {
Vec::new()
};
links.push(LinkInfo {
rel_idx: idx,
source_key: rel.source_key.clone(),
target_key: rel.target_key.clone(),
source_content,
target_content,
strength: rel.strength,
target_sections,
});
}
let total = links.len();
println!("Link audit: {} links to review", total);
if !apply {
println!("DRY RUN — use --apply to make changes");
}
// Batch by char budget (~100K chars per prompt)
let char_budget = 100_000usize;
let mut batches: Vec<Vec<usize>> = Vec::new();
let mut current_batch: Vec<usize> = Vec::new();
let mut current_chars = 0usize;
for (i, link) in links.iter().enumerate() {
let link_chars = link.source_content.len() + link.target_content.len() + 200;
if !current_batch.is_empty() && current_chars + link_chars > char_budget {
batches.push(std::mem::take(&mut current_batch));
current_chars = 0;
}
current_batch.push(i);
current_chars += link_chars;
}
if !current_batch.is_empty() {
batches.push(current_batch);
}
let total_batches = batches.len();
println!("{} batches (avg {} links/batch)\n", total_batches,
if total_batches > 0 { total / total_batches } else { 0 });
use rayon::prelude::*;
use std::sync::atomic::{AtomicUsize, Ordering};
// Build all batch prompts up front
let batch_data: Vec<(usize, Vec<LinkInfo>, String)> = batches.iter().enumerate()
.map(|(batch_idx, batch_indices)| {
let batch_infos: Vec<LinkInfo> = batch_indices.iter().map(|&i| {
let l = &links[i];
LinkInfo {
rel_idx: l.rel_idx,
source_key: l.source_key.clone(),
target_key: l.target_key.clone(),
source_content: l.source_content.clone(),
target_content: l.target_content.clone(),
strength: l.strength,
target_sections: l.target_sections.clone(),
}
}).collect();
let prompt = build_audit_prompt(&batch_infos, batch_idx + 1, total_batches);
(batch_idx, batch_infos, prompt)
})
.collect();
// Progress counter
let done = AtomicUsize::new(0);
// Run batches in parallel via rayon
let batch_results: Vec<_> = batch_data.par_iter()
.map(|(batch_idx, batch_infos, prompt)| {
let response = crate::agent::oneshot::call_api_with_tools_sync(
"audit", &[prompt.clone()], &[], None, 10, &[], None);
let completed = done.fetch_add(1, Ordering::Relaxed) + 1;
eprint!("\r Batches: {}/{} done", completed, total_batches);
(*batch_idx, batch_infos, response)
})
.collect();
eprintln!(); // newline after progress
// Process results sequentially
let mut stats = AuditStats {
kept: 0, deleted: 0, retargeted: 0, weakened: 0, strengthened: 0, errors: 0,
};
let mut deletions: Vec<usize> = Vec::new();
let mut retargets: Vec<(usize, String)> = Vec::new();
let mut strength_changes: Vec<(usize, f32)> = Vec::new();
for (batch_idx, batch_infos, response) in &batch_results {
let response = match response {
Ok(r) => r,
Err(e) => {
eprintln!(" Batch {}: error: {}", batch_idx + 1, e);
stats.errors += batch_infos.len();
continue;
}
};
let actions = parse_audit_response(response, batch_infos.len());
let mut responded: HashSet<usize> = HashSet::new();
for (idx, action) in &actions {
responded.insert(*idx);
let link = &batch_infos[*idx];
match action {
AuditAction::Keep => {
stats.kept += 1;
}
AuditAction::Delete => {
println!(" DELETE {}{}", link.source_key, link.target_key);
deletions.push(link.rel_idx);
stats.deleted += 1;
}
AuditAction::Retarget(new_target) => {
println!(" RETARGET {}{} (was {})",
link.source_key, new_target, link.target_key);
retargets.push((link.rel_idx, new_target.clone()));
stats.retargeted += 1;
}
AuditAction::Weaken(s) => {
println!(" WEAKEN {}{} (str {:.2}{:.2})",
link.source_key, link.target_key, link.strength, s);
strength_changes.push((link.rel_idx, *s));
stats.weakened += 1;
}
AuditAction::Strengthen(s) => {
println!(" STRENGTHEN {}{} (str {:.2}{:.2})",
link.source_key, link.target_key, link.strength, s);
strength_changes.push((link.rel_idx, *s));
stats.strengthened += 1;
}
}
}
for i in 0..batch_infos.len() {
if !responded.contains(&i) {
stats.kept += 1;
}
}
println!(" Batch {}/{}: +{}kept +{}del +{}retarget +{}weak +{}strong",
batch_idx + 1, total_batches,
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened);
}
// Apply changes
if apply && (stats.deleted > 0 || stats.retargeted > 0
|| stats.weakened > 0 || stats.strengthened > 0) {
println!("\nApplying changes...");
// Deletions: soft-delete
for rel_idx in &deletions {
store.relations[*rel_idx].deleted = true;
}
// Strength changes
for (rel_idx, new_strength) in &strength_changes {
store.relations[*rel_idx].strength = *new_strength;
}
// Retargets: soft-delete old, create new
for (rel_idx, new_target) in &retargets {
let source_key = store.relations[*rel_idx].source_key.clone();
let old_strength = store.relations[*rel_idx].strength;
let source_uuid = store.nodes.get(&source_key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
let target_uuid = store.nodes.get(new_target)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
// Soft-delete old
store.relations[*rel_idx].deleted = true;
// Create new
if target_uuid != [0u8; 16] {
let new_rel = new_relation(
source_uuid, target_uuid,
store::RelationType::Auto,
old_strength,
&source_key, new_target,
);
store.add_relation(new_rel).ok();
}
}
store.save()?;
println!("Saved.");
}
Ok(stats)
*/
}

View file

@ -1,164 +0,0 @@
// Consolidation pipeline: plan → agents → maintenance → digests → links
//
// consolidate_full() runs the full autonomous consolidation:
// 1. Plan: analyze metrics, allocate agents
// 2. Execute: run each agent (agents apply changes via tool calls)
// 3. Graph maintenance (orphans, degree cap)
// 4. Digest: generate missing daily/weekly/monthly digests
// 5. Links: apply links extracted from digests
// 6. Summary: final metrics comparison
use super::digest;
use crate::agent::oneshot;
use crate::neuro;
use crate::store::{self, Store};
/// Append a line to the log buffer.
fn log_line(buf: &mut String, line: &str) {
buf.push_str(line);
buf.push('\n');
}
/// Run the full autonomous consolidation pipeline with logging.
pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
consolidate_full_with_progress(store, &|_| {})
}
fn consolidate_full_with_progress(
store: &mut Store,
on_progress: &dyn Fn(&str),
) -> Result<(), String> {
let start = std::time::Instant::now();
let log_key = format!("_consolidate-log-{}", store::compact_timestamp());
let mut log_buf = String::new();
log_line(&mut log_buf, "=== CONSOLIDATE FULL ===");
log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
log_line(&mut log_buf, "");
// --- Step 1: Plan ---
log_line(&mut log_buf, "--- Step 1: Plan ---");
on_progress("planning");
let plan = neuro::consolidation_plan(store);
let plan_text = neuro::format_plan(&plan);
log_line(&mut log_buf, &plan_text);
println!("{}", plan_text);
let total_agents = plan.total();
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
// --- Step 2: Execute agents ---
log_line(&mut log_buf, "\n--- Step 2: Execute agents ---");
let mut agent_num = 0usize;
let mut agent_errors = 0usize;
let batch_size = 5;
let runs = plan.to_agent_runs(batch_size);
for (agent_type, count) in &runs {
agent_num += 1;
let label = if *count > 0 {
format!("[{}/{}] {} (batch={})", agent_num, runs.len(), agent_type, count)
} else {
format!("[{}/{}] {}", agent_num, runs.len(), agent_type)
};
log_line(&mut log_buf, &format!("\n{}", label));
on_progress(&label);
println!("{}", label);
// Reload store to pick up changes from previous agents
if agent_num > 1 {
*store = Store::load()?;
}
match oneshot::run_one_agent(store, agent_type, *count, None) {
Ok(_) => {
let msg = " Done".to_string();
log_line(&mut log_buf, &msg);
on_progress(&msg);
println!("{}", msg);
}
Err(e) => {
let msg = format!(" ERROR: {}", e);
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
agent_errors += 1;
}
}
}
log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors",
agent_num - agent_errors, agent_errors));
store.save()?;
// --- Step 3: Cap degree ---
log_line(&mut log_buf, "\n--- Step 3: Cap degree ---");
on_progress("capping degree");
println!("\n--- Capping node degree ---");
*store = Store::load()?;
match store.cap_degree(50) {
Ok((hubs, pruned)) => {
store.save()?;
log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned));
}
Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)),
}
// --- Step 4: Digest auto ---
log_line(&mut log_buf, "\n--- Step 4: Digest auto ---");
on_progress("generating digests");
println!("\n--- Generating missing digests ---");
*store = Store::load()?;
match digest::digest_auto(store) {
Ok(()) => log_line(&mut log_buf, " Digests done."),
Err(e) => {
let msg = format!(" ERROR in digest auto: {}", e);
log_line(&mut log_buf, &msg);
eprintln!("{}", msg);
}
}
// --- Step 5: Apply digest links ---
log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---");
on_progress("applying digest links");
println!("\n--- Applying digest links ---");
*store = Store::load()?;
let links = digest::parse_all_digest_links(store);
let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links);
store.save()?;
log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks",
applied, skipped, fallbacks));
// --- Step 6: Summary ---
let elapsed = start.elapsed();
log_line(&mut log_buf, "\n--- Summary ---");
log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch())));
log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64()));
*store = Store::load()?;
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
let summary = format!(
"\n=== CONSOLIDATE FULL COMPLETE ===\n\
Duration: {:.0}s\n\
Agents: {} run, {} errors\n\
Nodes: {} Relations: {}\n",
elapsed.as_secs_f64(),
agent_num - agent_errors, agent_errors,
store.nodes.len(), store.relations.len(),
);
log_line(&mut log_buf, &summary);
println!("{}", summary);
// Store the log as a node
store.upsert_provenance(&log_key, &log_buf,
"consolidate:write").ok();
store.save()?;
Ok(())
}

View file

@ -1,390 +1,8 @@
use std::sync::Arc;
// Episodic digest generation: daily, weekly, monthly, auto
//
// Three digest levels form a temporal hierarchy: daily digests summarize
// journal entries, weekly digests summarize dailies, monthly digests
// summarize weeklies. All three share the same generate/auto-detect
// pipeline, parameterized by DigestLevel.
// Digest link parsing: extracts ## Links sections from digest nodes
// and applies them to the memory graph.
use crate::store::{self, Store, new_relation};
use chrono::{Datelike, Duration, Local, NaiveDate};
use regex::Regex;
use std::collections::BTreeSet;
/// Get all store keys for prompt context.
fn semantic_keys(store: &Store) -> Vec<String> {
let mut keys: Vec<String> = store.nodes.keys().cloned().collect();
keys.sort();
keys.truncate(200);
keys
}
// --- Digest level descriptors ---
#[allow(clippy::type_complexity)]
struct DigestLevel {
name: &'static str,
title: &'static str,
period: &'static str,
input_title: &'static str,
child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files
/// Expand an arg into (canonical_label, dates covered).
label_dates: fn(&str) -> Result<(String, Vec<String>), String>,
/// Map a YYYY-MM-DD date to this level's label.
date_to_label: fn(&str) -> Option<String>,
}
const DAILY: DigestLevel = DigestLevel {
name: "daily",
title: "Daily",
period: "Date",
input_title: "Journal entries",
child_name: None,
label_dates: |date| Ok((date.to_string(), vec![date.to_string()])),
date_to_label: |date| Some(date.to_string()),
};
/// Week label and 7 dates (Mon-Sun) for the week containing `date`.
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
let nd = NaiveDate::parse_from_str(date, "%Y-%m-%d")
.map_err(|e| format!("bad date '{}': {}", date, e))?;
let iso = nd.iso_week();
let week_label = format!("{}-W{:02}", iso.year(), iso.week());
let monday = nd - Duration::days(nd.weekday().num_days_from_monday() as i64);
let dates = (0..7)
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
.collect();
Ok((week_label, dates))
}
const WEEKLY: DigestLevel = DigestLevel {
name: "weekly",
title: "Weekly",
period: "Week",
input_title: "Daily digests",
child_name: Some("daily"),
label_dates: |arg| {
if !arg.contains('W') {
return week_dates(arg);
}
let (y, w) = arg.split_once("-W")
.ok_or_else(|| format!("bad week label: {}", arg))?;
let year: i32 = y.parse().map_err(|_| format!("bad week year: {}", arg))?;
let week: u32 = w.parse().map_err(|_| format!("bad week number: {}", arg))?;
let monday = NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
.ok_or_else(|| format!("invalid week: {}", arg))?;
let dates = (0..7)
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
.collect();
Ok((arg.to_string(), dates))
},
date_to_label: |date| week_dates(date).ok().map(|(l, _)| l),
};
const MONTHLY: DigestLevel = DigestLevel {
name: "monthly",
title: "Monthly",
period: "Month",
input_title: "Weekly digests",
child_name: Some("weekly"),
label_dates: |arg| {
let (year, month) = if arg.len() <= 7 {
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
.map_err(|e| format!("bad month '{}': {}", arg, e))?;
(d.year(), d.month())
} else {
let d = NaiveDate::parse_from_str(arg, "%Y-%m-%d")
.map_err(|e| format!("bad date '{}': {}", arg, e))?;
(d.year(), d.month())
};
let label = format!("{}-{:02}", year, month);
let mut dates = Vec::new();
let mut day = 1u32;
while let Some(date) = NaiveDate::from_ymd_opt(year, month, day) {
if date.month() != month { break; }
dates.push(date.format("%Y-%m-%d").to_string());
day += 1;
}
Ok((label, dates))
},
date_to_label: |date| NaiveDate::parse_from_str(date, "%Y-%m-%d")
.ok().map(|d| format!("{}-{:02}", d.year(), d.month())),
};
const LEVELS: &[&DigestLevel] = &[&DAILY, &WEEKLY, &MONTHLY];
/// Store key for a digest node: "daily-2026-03-04", "weekly-2026-W09", etc.
fn digest_node_key(level_name: &str, label: &str) -> String {
format!("{}-{}", level_name, label)
}
// --- Input gathering ---
/// Result of gathering inputs for a digest.
struct GatherResult {
label: String,
/// (display_label, content) pairs for the prompt.
inputs: Vec<(String, String)>,
/// Store keys of source nodes — used to create structural links.
source_keys: Vec<String>,
}
/// Load child digest content from the store.
fn load_child_digests(store: &Store, prefix: &str, labels: &[String]) -> (Vec<(String, String)>, Vec<String>) {
let mut digests = Vec::new();
let mut keys = Vec::new();
for label in labels {
let key = digest_node_key(prefix, label);
if let Some(node) = store.nodes.get(&key) {
digests.push((label.clone(), node.content.clone()));
keys.push(key);
}
}
(digests, keys)
}
/// Unified: gather inputs for any digest level.
fn gather(level: &DigestLevel, store: &Store, arg: &str) -> Result<GatherResult, String> {
let (label, dates) = (level.label_dates)(arg)?;
let (inputs, source_keys) = if let Some(child_name) = level.child_name {
// Map parent's dates through child's date_to_label → child labels
let child = LEVELS.iter()
.find(|l| l.name == child_name)
.expect("invalid child_name");
let child_labels: Vec<String> = dates.iter()
.filter_map(|d| (child.date_to_label)(d))
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
load_child_digests(store, child_name, &child_labels)
} else {
// Leaf level: scan store for episodic entries matching date
let mut entries: Vec<_> = store.nodes.iter()
.filter(|(_, n)| n.node_type == store::NodeType::EpisodicSession
&& n.created_at > 0
&& store::format_date(n.created_at) == label)
.map(|(key, n)| {
(store::format_datetime(n.timestamp), n.content.clone(), key.clone())
})
.collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
let keys = entries.iter().map(|(_, _, k)| k.clone()).collect();
let inputs = entries.into_iter().map(|(dt, c, _)| (dt, c)).collect();
(inputs, keys)
};
Ok(GatherResult { label, inputs, source_keys })
}
/// Unified: find candidate labels for auto-generation (past, not yet generated).
fn find_candidates(level: &DigestLevel, dates: &[String], today: &str) -> Vec<String> {
let today_label = (level.date_to_label)(today);
dates.iter()
.filter_map(|d| (level.date_to_label)(d))
.collect::<BTreeSet<_>>()
.into_iter()
.filter(|l| Some(l) != today_label.as_ref())
.collect()
}
// --- Unified generator ---
fn format_inputs(inputs: &[(String, String)], daily: bool) -> String {
let mut text = String::new();
for (label, content) in inputs {
if daily {
text.push_str(&format!("\n### {}\n\n{}\n", label, content));
} else {
text.push_str(&format!("\n---\n## {}\n{}\n", label, content));
}
}
text
}
fn generate_digest(
store: &mut Store,
level: &DigestLevel,
label: &str,
inputs: &[(String, String)],
source_keys: &[String],
) -> Result<(), String> {
println!("Generating {} digest for {}...", level.name, label);
if inputs.is_empty() {
println!(" No inputs found for {}", label);
return Ok(());
}
println!(" {} inputs", inputs.len());
let keys = semantic_keys(store);
let keys_text = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
.join("\n");
let content = format_inputs(inputs, level.child_name.is_none());
let covered = inputs.iter()
.map(|(l, _)| l.as_str())
.collect::<Vec<_>>()
.join(", ");
// Load agent def — drives template, temperature, priority, tools
let def = super::defs::get_def("digest")
.ok_or("no digest agent definition")?;
let template = def.steps.first()
.map(|s| s.prompt.clone())
.ok_or("digest agent has no prompt")?;
// Substitute digest-specific and config placeholders, then resolve
// standard {{node:...}} etc. via the placeholder system
let cfg = crate::config::get();
let partial = template
.replace("{agent_name}", &def.agent)
.replace("{user_name}", &cfg.user_name)
.replace("{assistant_name}", &cfg.assistant_name)
.replace("{{LEVEL}}", level.title)
.replace("{{PERIOD}}", level.period)
.replace("{{INPUT_TITLE}}", level.input_title)
.replace("{{LABEL}}", label)
.replace("{{CONTENT}}", &content)
.replace("{{COVERED}}", &covered)
.replace("{{KEYS}}", &keys_text);
let graph = store.build_graph();
let (prompt, _) = super::defs::resolve_placeholders(
&partial, store, &graph, &[], 0,
);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
// Log to file like other agents
let log_dir = dirs::home_dir().unwrap_or_default()
.join(".consciousness/logs/llm/digest");
std::fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
let _log = move |msg: &str| {
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true).open(&log_path)
{
let _ = writeln!(f, "{}", msg);
}
};
println!(" Calling LLM...");
let prompts = vec![prompt];
let phases: Vec<String> = def.steps.iter().map(|s| s.phase.clone()).collect();
// Filter tools based on agent def
let all_tools = crate::agent::tools::memory_and_journal_tools();
let tools: Vec<_> = if def.tools.is_empty() {
all_tools.to_vec()
} else {
all_tools.into_iter()
.filter(|t| def.tools.iter().any(|w| w == &t.name))
.collect()
};
// Agent writes digest via memory_write tool - we just run it
crate::agent::oneshot::call_api_with_tools_sync(
&def.agent, &prompts, &phases, def.temperature, def.priority,
&tools, None)?;
// Structural links: connect all source entries to this digest
let key = digest_node_key(level.name, label);
let mut linked = 0;
for source_key in source_keys {
// Skip if link already exists
let exists = store.relations.iter().any(|r|
!r.deleted && r.source_key == *source_key && r.target_key == key);
if exists { continue; }
// Reload store to pick up agent's writes
*store = Store::load().map_err(|e| format!("reload: {}", e))?;
let source_uuid = store.nodes.get(source_key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
let target_uuid = store.nodes.get(&key)
.map(|n| n.uuid).unwrap_or([0u8; 16]);
if target_uuid == [0u8; 16] {
println!(" Warning: digest key {} not found after agent run", key);
continue;
}
let mut rel = new_relation(
source_uuid, target_uuid,
store::RelationType::Link, 0.8,
source_key, &key,
);
rel.provenance = "digest:structural".to_string();
store.add_relation(rel)?;
linked += 1;
}
if linked > 0 {
println!(" Linked {} source entries → {}", linked, key);
store.save()?;
}
println!(" Done");
Ok(())
}
// --- Public API ---
pub fn generate(store: &mut Store, level_name: &str, arg: &str) -> Result<(), String> {
let level = LEVELS.iter()
.find(|l| l.name == level_name)
.ok_or_else(|| format!("unknown digest level: {}", level_name))?;
let result = gather(level, store, arg)?;
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)
}
// --- Auto-detect and generate missing digests ---
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
let today = Local::now().format("%Y-%m-%d").to_string();
// Collect all dates with episodic entries
let dates: Vec<String> = store.nodes.values()
.filter(|n| n.node_type == store::NodeType::EpisodicSession && n.created_at > 0)
.map(|n| store::format_date(n.created_at))
.collect::<BTreeSet<_>>()
.into_iter()
.collect();
let mut total = 0u32;
for level in LEVELS {
let candidates = find_candidates(level, &dates, &today);
let mut generated = 0u32;
let mut skipped = 0u32;
for arg in &candidates {
let result = gather(level, store, arg)?;
let key = digest_node_key(level.name, &result.label);
if store.nodes.contains_key(&key) {
skipped += 1;
continue;
}
if result.inputs.is_empty() { continue; }
println!("[auto] Missing {} digest for {}", level.name, result.label);
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)?;
generated += 1;
}
println!("[auto] {}: {} generated, {} existed", level.name, generated, skipped);
total += generated;
}
if total == 0 {
println!("[auto] All digests up to date.");
} else {
println!("[auto] Generated {} total digests.", total);
}
Ok(())
}
// --- Digest link parsing ---
// Replaces digest-link-parser.py: parses ## Links sections from digest
// files and applies them to the memory graph.
/// A parsed link from a digest's Links section.
pub struct DigestLink {
@ -583,110 +201,3 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
(applied, skipped, fallbacks)
}
// --- Tool interface for digest generation (added 2026-04-04) ---
/// Helper: extract string argument from tool call
fn get_str_required(args: &serde_json::Value, name: &str) -> Result<String, String> {
args.get(name)
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.ok_or_else(|| format!("{} is required", name))
}
/// Wrap a Result<T, String> for use in anyhow handlers.
fn str_err<T>(r: Result<T, String>) -> anyhow::Result<T> {
r.map_err(|e| anyhow::anyhow!("{}", e))
}
/// digest_daily tool handler: generate a daily digest
async fn handle_digest_daily(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let date = str_err(get_str_required(&args, "date"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "daily", &date))?;
Ok(format!("Daily digest generated for {}", date))
}
/// digest_weekly tool handler: generate a weekly digest
async fn handle_digest_weekly(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let week_label = str_err(get_str_required(&args, "week"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "weekly", &week_label))?;
Ok(format!("Weekly digest generated for {}", week_label))
}
/// digest_monthly tool handler: generate a monthly digest
async fn handle_digest_monthly(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
args: serde_json::Value,
) -> anyhow::Result<String> {
let month = str_err(get_str_required(&args, "month"))?;
let mut store = str_err(Store::load())?;
str_err(generate(&mut store, "monthly", &month))?;
Ok(format!("Monthly digest generated for {}", month))
}
/// digest_auto tool handler: auto-generate all missing digests
async fn handle_digest_auto(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
str_err(digest_auto(&mut store))?;
Ok("Auto-generated all missing digests".to_string())
}
/// digest_links tool handler: parse and apply digest links
async fn handle_digest_links(
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
_args: serde_json::Value,
) -> anyhow::Result<String> {
let mut store = str_err(Store::load())?;
let links = parse_all_digest_links(&store);
let (applied, skipped, fallbacks) = apply_digest_links(&mut store, &links);
str_err(store.save())?;
Ok(format!("Applied {} digest links ({} skipped, {} fallback)", applied, skipped, fallbacks))
}
/// Return digest tools array for the tool registry
pub fn digest_tools() -> [super::super::agent::tools::Tool; 5] {
use super::super::agent::tools::Tool;
[
Tool {
name: "digest_daily",
description: "Generate a daily digest from journal entries.",
parameters_json: r#"{"type":"object","properties":{"date":{"type":"string","description":"Date in YYYY-MM-DD format"}}, "required":["date"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_daily(_a, v).await })),
},
Tool {
name: "digest_weekly",
description: "Generate a weekly digest from daily digests.",
parameters_json: r#"{"type":"object","properties":{"week":{"type":"string","description":"Week label (YYYY-W##) or date (YYYY-MM-DD)"}}, "required":["week"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_weekly(_a, v).await })),
},
Tool {
name: "digest_monthly",
description: "Generate a monthly digest from weekly digests.",
parameters_json: r#"{"type":"object","properties":{"month":{"type":"string","description":"Month label (YYYY-MM) or date (YYYY-MM-DD)"}}, "required":["month"]}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_monthly(_a, v).await })),
},
Tool {
name: "digest_auto",
description: "Auto-generate all missing digests (daily, weekly, monthly) for past dates that have content but no digest yet.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_auto(_a, v).await })),
},
Tool {
name: "digest_links",
description: "Parse and apply structural links from digest nodes to the memory graph.",
parameters_json: r#"{"type":"object","properties":{}}"#,
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_links(_a, v).await })),
},
]
}

View file

@ -1,24 +1,5 @@
// Agent layer: LLM-powered operations on the memory graph
//
// Everything here calls external models (Sonnet, Haiku) or orchestrates
// sequences of such calls. The core graph infrastructure (store, graph,
// spectral, search, similarity) lives at the crate root.
//
// llm — model invocation, response parsing
// prompts — prompt generation from store data
// defs — agent file loading and placeholder resolution
// audit — link quality review via Sonnet
// consolidate — full consolidation pipeline
// knowledge — agent execution, conversation fragment selection
// enrich — journal enrichment, experience mining
// digest — episodic digest generation (daily/weekly/monthly)
// daemon — background job scheduler
// transcript — shared JSONL transcript parsing
//
// The session hook (context injection, agent orchestration) moved to claude/hook.
pub mod audit;
pub mod consolidate;
pub mod daemon;
pub mod defs;
pub mod digest;