port digest-link-parser, journal-agent, apply-consolidation to Rust
Three Python scripts (858 lines) replaced with native Rust subcommands: - digest-links [--apply]: parses ## Links sections from episodic digests, normalizes keys, applies to graph with section-level fallback - journal-enrich JSONL TEXT [LINE]: extracts conversation from JSONL transcript, calls Sonnet for link proposals and source location - apply-consolidation [--apply]: reads consolidation reports, sends to Sonnet for structured action extraction (links, categorizations, manual items) Shared infrastructure: call_sonnet now pub(crate), new parse_json_response helper for Sonnet output parsing with markdown fence stripping.
This commit is contained in:
parent
91122fe1d1
commit
59e2f39479
2 changed files with 818 additions and 2 deletions
761
src/digest.rs
761
src/digest.rs
|
|
@ -12,7 +12,7 @@ use crate::capnp_store::{self, Store};
|
|||
|
||||
use regex::Regex;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
fn memory_dir() -> PathBuf {
|
||||
|
|
@ -32,7 +32,7 @@ fn agent_results_dir() -> PathBuf {
|
|||
}
|
||||
|
||||
/// Call Sonnet via claude CLI. Returns the response text.
|
||||
fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||||
pub(crate) fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||||
// Write prompt to temp file (claude CLI needs file input for large prompts)
|
||||
let tmp = std::env::temp_dir().join(format!("poc-digest-{}.txt", std::process::id()));
|
||||
fs::write(&tmp, prompt)
|
||||
|
|
@ -604,3 +604,760 @@ pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String
|
|||
println!(" Done: {} lines", digest.lines().count());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// --- Digest link parsing ---
|
||||
// Replaces digest-link-parser.py: parses ## Links sections from digest
|
||||
// files and applies them to the memory graph.
|
||||
|
||||
/// A parsed link from a digest's Links section.
|
||||
pub struct DigestLink {
|
||||
pub source: String,
|
||||
pub target: String,
|
||||
pub reason: String,
|
||||
pub file: String,
|
||||
}
|
||||
|
||||
/// Normalize a raw link target to a poc-memory key.
|
||||
fn normalize_link_key(raw: &str) -> String {
|
||||
let key = raw.trim().trim_matches('`').trim();
|
||||
if key.is_empty() { return String::new(); }
|
||||
|
||||
// Self-references
|
||||
let lower = key.to_lowercase();
|
||||
if lower.starts_with("this ") { return String::new(); }
|
||||
|
||||
let mut key = key.to_string();
|
||||
|
||||
// weekly/2026-W06 → weekly-2026-W06, etc.
|
||||
if let Some(pos) = key.find('/') {
|
||||
let prefix = &key[..pos];
|
||||
if prefix == "daily" || prefix == "weekly" || prefix == "monthly" {
|
||||
let rest = &key[pos + 1..];
|
||||
key = format!("{}-{}", prefix, rest);
|
||||
}
|
||||
}
|
||||
|
||||
// daily-2026-02-04 → daily-2026-02-04.md
|
||||
let re = Regex::new(r"^(daily|weekly|monthly)-\d{4}").unwrap();
|
||||
if re.is_match(&key) && !key.ends_with(".md") {
|
||||
key.push_str(".md");
|
||||
}
|
||||
|
||||
// Bare date → daily digest
|
||||
let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
|
||||
if date_re.is_match(key.strip_suffix(".md").unwrap_or(&key)) {
|
||||
let date = key.strip_suffix(".md").unwrap_or(&key);
|
||||
key = format!("daily-{}.md", date);
|
||||
}
|
||||
|
||||
// Ensure .md extension
|
||||
if key.contains('#') {
|
||||
let (file, section) = key.split_once('#').unwrap();
|
||||
if !file.ends_with(".md") {
|
||||
key = format!("{}.md#{}", file, section);
|
||||
}
|
||||
} else if !key.ends_with(".md") && !key.contains('/') && !key.starts_with("NEW:") {
|
||||
key.push_str(".md");
|
||||
}
|
||||
|
||||
key
|
||||
}
|
||||
|
||||
/// Parse the Links section from a single digest file.
|
||||
fn parse_digest_file_links(path: &Path) -> Vec<DigestLink> {
|
||||
let content = match fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => return Vec::new(),
|
||||
};
|
||||
|
||||
let digest_name = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("");
|
||||
let digest_key = format!("{}.md", digest_name);
|
||||
let filename = path.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let link_re = Regex::new(r"^-\s+(.+?)\s*[→↔←]\s*(.+?)(?:\s*\((.+?)\))?\s*$").unwrap();
|
||||
let header_re = Regex::new(r"^##\s+Links").unwrap();
|
||||
let mut links = Vec::new();
|
||||
let mut in_links = false;
|
||||
|
||||
for line in content.lines() {
|
||||
if header_re.is_match(line) {
|
||||
in_links = true;
|
||||
continue;
|
||||
}
|
||||
if in_links && line.starts_with("## ") {
|
||||
in_links = false;
|
||||
continue;
|
||||
}
|
||||
if !in_links { continue; }
|
||||
if line.starts_with("###") || line.starts_with("**") { continue; }
|
||||
|
||||
if let Some(cap) = link_re.captures(line) {
|
||||
let raw_source = cap[1].trim();
|
||||
let raw_target = cap[2].trim();
|
||||
let reason = cap.get(3).map(|m| m.as_str().to_string()).unwrap_or_default();
|
||||
|
||||
let mut source = normalize_link_key(raw_source);
|
||||
let mut target = normalize_link_key(raw_target);
|
||||
|
||||
// Replace self-references with digest key
|
||||
if source.is_empty() { source = digest_key.clone(); }
|
||||
if target.is_empty() { target = digest_key.clone(); }
|
||||
|
||||
// Handle "this daily/weekly/monthly" in raw text
|
||||
let raw_s_lower = raw_source.to_lowercase();
|
||||
let raw_t_lower = raw_target.to_lowercase();
|
||||
if raw_s_lower.contains("this daily") || raw_s_lower.contains("this weekly")
|
||||
|| raw_s_lower.contains("this monthly")
|
||||
{
|
||||
source = digest_key.clone();
|
||||
}
|
||||
if raw_t_lower.contains("this daily") || raw_t_lower.contains("this weekly")
|
||||
|| raw_t_lower.contains("this monthly")
|
||||
{
|
||||
target = digest_key.clone();
|
||||
}
|
||||
|
||||
// Skip NEW: and self-links
|
||||
if source.starts_with("NEW:") || target.starts_with("NEW:") { continue; }
|
||||
if source == target { continue; }
|
||||
|
||||
links.push(DigestLink { source, target, reason, file: filename.clone() });
|
||||
}
|
||||
}
|
||||
|
||||
links
|
||||
}
|
||||
|
||||
/// Parse links from all digest files in the episodic dir.
|
||||
pub fn parse_all_digest_links() -> Vec<DigestLink> {
|
||||
let dir = episodic_dir();
|
||||
let mut all_links = Vec::new();
|
||||
|
||||
for pattern in &["daily-*.md", "weekly-*.md", "monthly-*.md"] {
|
||||
if let Ok(entries) = fs::read_dir(&dir) {
|
||||
let mut files: Vec<PathBuf> = entries
|
||||
.filter_map(|e| e.ok())
|
||||
.map(|e| e.path())
|
||||
.filter(|p| {
|
||||
p.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.map(|n| {
|
||||
let prefix = pattern.split('*').next().unwrap_or("");
|
||||
n.starts_with(prefix) && n.ends_with(".md")
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
for path in files {
|
||||
all_links.extend(parse_digest_file_links(&path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate by (source, target) pair
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
all_links.retain(|link| seen.insert((link.source.clone(), link.target.clone())));
|
||||
|
||||
all_links
|
||||
}
|
||||
|
||||
/// Apply parsed digest links to the store.
|
||||
pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, usize, usize) {
|
||||
let mut applied = 0usize;
|
||||
let mut skipped = 0usize;
|
||||
let mut fallbacks = 0usize;
|
||||
|
||||
for link in links {
|
||||
// Try resolving both keys
|
||||
let source = match store.resolve_key(&link.source) {
|
||||
Ok(s) => s,
|
||||
Err(_) => {
|
||||
// Try stripping section anchor as fallback
|
||||
if let Some(base) = link.source.split('#').next() {
|
||||
match store.resolve_key(base) {
|
||||
Ok(s) => { fallbacks += 1; s }
|
||||
Err(_) => { skipped += 1; continue; }
|
||||
}
|
||||
} else {
|
||||
skipped += 1; continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
let target = match store.resolve_key(&link.target) {
|
||||
Ok(t) => t,
|
||||
Err(_) => {
|
||||
if let Some(base) = link.target.split('#').next() {
|
||||
match store.resolve_key(base) {
|
||||
Ok(t) => { fallbacks += 1; t }
|
||||
Err(_) => { skipped += 1; continue; }
|
||||
}
|
||||
} else {
|
||||
skipped += 1; continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if source == target { skipped += 1; continue; }
|
||||
|
||||
// Check if link already exists
|
||||
let exists = store.relations.iter().any(|r|
|
||||
r.source_key == source && r.target_key == target && !r.deleted
|
||||
);
|
||||
if exists { skipped += 1; continue; }
|
||||
|
||||
let source_uuid = match store.nodes.get(&source) {
|
||||
Some(n) => n.uuid,
|
||||
None => { skipped += 1; continue; }
|
||||
};
|
||||
let target_uuid = match store.nodes.get(&target) {
|
||||
Some(n) => n.uuid,
|
||||
None => { skipped += 1; continue; }
|
||||
};
|
||||
|
||||
let rel = Store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Link,
|
||||
0.5,
|
||||
&source, &target,
|
||||
);
|
||||
if store.add_relation(rel).is_ok() {
|
||||
println!(" + {} → {}", source, target);
|
||||
applied += 1;
|
||||
}
|
||||
}
|
||||
|
||||
(applied, skipped, fallbacks)
|
||||
}
|
||||
|
||||
// --- Journal enrichment ---
|
||||
// Replaces journal-agent.py: enriches journal entries by sending the
|
||||
// conversation context to Sonnet for link proposals and source location.
|
||||
|
||||
/// Extract user/assistant messages with line numbers from a JSONL transcript.
|
||||
fn extract_conversation(jsonl_path: &str) -> Result<Vec<(usize, String, String)>, String> {
|
||||
let content = fs::read_to_string(jsonl_path)
|
||||
.map_err(|e| format!("read {}: {}", jsonl_path, e))?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
for (i, line) in content.lines().enumerate() {
|
||||
let obj: serde_json::Value = match serde_json::from_str(line) {
|
||||
Ok(v) => v,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
let msg_type = obj.get("type").and_then(|v| v.as_str()).unwrap_or("");
|
||||
if msg_type != "user" && msg_type != "assistant" { continue; }
|
||||
|
||||
let msg = obj.get("message").unwrap_or(&obj);
|
||||
let content = msg.get("content");
|
||||
|
||||
let text = match content {
|
||||
Some(serde_json::Value::String(s)) => s.clone(),
|
||||
Some(serde_json::Value::Array(arr)) => {
|
||||
arr.iter()
|
||||
.filter_map(|c| {
|
||||
if let Some(t) = c.get("text").and_then(|v| v.as_str()) {
|
||||
Some(t.to_string())
|
||||
} else {
|
||||
c.as_str().map(|s| s.to_string())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let text = text.trim().to_string();
|
||||
if text.is_empty() { continue; }
|
||||
|
||||
messages.push((i + 1, msg_type.to_string(), text));
|
||||
}
|
||||
|
||||
Ok(messages)
|
||||
}
|
||||
|
||||
/// Format conversation messages for the prompt (truncating long messages).
|
||||
fn format_conversation(messages: &[(usize, String, String)]) -> String {
|
||||
messages.iter()
|
||||
.map(|(line, role, text)| {
|
||||
let text = if text.len() > 2000 {
|
||||
format!("{}...[truncated]", &text[..text.floor_char_boundary(1800)])
|
||||
} else {
|
||||
text.clone()
|
||||
};
|
||||
format!("L{} [{}]: {}", line, role, text)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n\n")
|
||||
}
|
||||
|
||||
fn build_journal_prompt(
|
||||
entry_text: &str,
|
||||
conversation: &str,
|
||||
keys: &[String],
|
||||
grep_line: usize,
|
||||
) -> String {
|
||||
let keys_text: String = keys.iter()
|
||||
.map(|k| format!(" - {}", k))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
format!(r#"You are a memory agent for an AI named ProofOfConcept. A journal entry
|
||||
was just written. Your job is to enrich it by finding its exact source in the
|
||||
conversation and linking it to semantic memory.
|
||||
|
||||
## Task 1: Find exact source
|
||||
|
||||
The journal entry below was written during or after a conversation. Find the
|
||||
exact region of the conversation it refers to — the exchange where the topic
|
||||
was discussed. Return the start and end line numbers.
|
||||
|
||||
The grep-based approximation placed it near line {grep_line} (0 = no match).
|
||||
Use that as a hint but find the true boundaries.
|
||||
|
||||
## Task 2: Propose semantic links
|
||||
|
||||
Which existing semantic memory nodes should this journal entry be linked to?
|
||||
Look for:
|
||||
- Concepts discussed in the entry
|
||||
- Skills/patterns demonstrated
|
||||
- People mentioned
|
||||
- Projects or subsystems involved
|
||||
- Emotional themes
|
||||
|
||||
Each link should be bidirectional — the entry documents WHEN something happened,
|
||||
the semantic node documents WHAT it is. Together they let you traverse:
|
||||
"What was I doing on this day?" ↔ "When did I learn about X?"
|
||||
|
||||
## Task 3: Spot missed insights
|
||||
|
||||
Read the conversation around the journal entry. Is there anything worth
|
||||
capturing that the entry missed? A pattern, a decision, an insight, something
|
||||
Kent said that's worth remembering? Be selective — only flag genuinely valuable
|
||||
things.
|
||||
|
||||
## Output format (JSON)
|
||||
|
||||
Return ONLY a JSON object:
|
||||
```json
|
||||
{{{{
|
||||
"source_start": 1234,
|
||||
"source_end": 1256,
|
||||
"links": [
|
||||
{{{{"target": "memory-key#section", "reason": "why this link exists"}}}}
|
||||
],
|
||||
"missed_insights": [
|
||||
{{{{"text": "insight text", "suggested_key": "where it belongs"}}}}
|
||||
],
|
||||
"temporal_tags": ["2026-02-28", "topology-metrics", "poc-memory"]
|
||||
}}}}
|
||||
```
|
||||
|
||||
For links, use existing keys from the semantic memory list below. If nothing
|
||||
fits, suggest a new key with a NOTE prefix: "NOTE:new-topic-name".
|
||||
|
||||
---
|
||||
|
||||
## Journal entry
|
||||
|
||||
{entry_text}
|
||||
|
||||
---
|
||||
|
||||
## Semantic memory nodes (available link targets)
|
||||
|
||||
{keys_text}
|
||||
|
||||
---
|
||||
|
||||
## Full conversation (with line numbers)
|
||||
|
||||
{conversation}
|
||||
"#)
|
||||
}
|
||||
|
||||
/// Parse a JSON response from Sonnet, handling markdown fences.
|
||||
pub(crate) fn parse_json_response(response: &str) -> Result<serde_json::Value, String> {
|
||||
// Strip markdown fences
|
||||
let cleaned = response.trim();
|
||||
let cleaned = cleaned.strip_prefix("```json").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.strip_prefix("```").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.strip_suffix("```").unwrap_or(cleaned);
|
||||
let cleaned = cleaned.trim();
|
||||
|
||||
// Try direct parse
|
||||
if let Ok(v) = serde_json::from_str(cleaned) {
|
||||
return Ok(v);
|
||||
}
|
||||
|
||||
// Try to find JSON object or array
|
||||
let re_obj = Regex::new(r"\{[\s\S]*\}").unwrap();
|
||||
let re_arr = Regex::new(r"\[[\s\S]*\]").unwrap();
|
||||
|
||||
if let Some(m) = re_obj.find(cleaned) {
|
||||
if let Ok(v) = serde_json::from_str(m.as_str()) {
|
||||
return Ok(v);
|
||||
}
|
||||
}
|
||||
if let Some(m) = re_arr.find(cleaned) {
|
||||
if let Ok(v) = serde_json::from_str(m.as_str()) {
|
||||
return Ok(v);
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!("no valid JSON in response: {}...", &cleaned[..cleaned.len().min(200)]))
|
||||
}
|
||||
|
||||
/// Enrich a journal entry with conversation context and link proposals.
|
||||
pub fn journal_enrich(
|
||||
store: &mut Store,
|
||||
jsonl_path: &str,
|
||||
entry_text: &str,
|
||||
grep_line: usize,
|
||||
) -> Result<(), String> {
|
||||
println!("Extracting conversation from {}...", jsonl_path);
|
||||
let messages = extract_conversation(jsonl_path)?;
|
||||
let conversation = format_conversation(&messages);
|
||||
println!(" {} messages, {} chars", messages.len(), conversation.len());
|
||||
|
||||
let keys = semantic_keys(store);
|
||||
println!(" {} semantic keys", keys.len());
|
||||
|
||||
let prompt = build_journal_prompt(entry_text, &conversation, &keys, grep_line);
|
||||
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
||||
|
||||
println!(" Calling Sonnet...");
|
||||
let response = call_sonnet(&prompt, 300)?;
|
||||
|
||||
let result = parse_json_response(&response)?;
|
||||
|
||||
// Report results
|
||||
let source_start = result.get("source_start").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
let source_end = result.get("source_end").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
let links = result.get("links").and_then(|v| v.as_array());
|
||||
let insights = result.get("missed_insights").and_then(|v| v.as_array());
|
||||
|
||||
println!(" Source: L{}-L{}", source_start, source_end);
|
||||
println!(" Links: {}", links.map_or(0, |l| l.len()));
|
||||
println!(" Missed insights: {}", insights.map_or(0, |l| l.len()));
|
||||
|
||||
// Apply links
|
||||
if let Some(links) = links {
|
||||
for link in links {
|
||||
let target = link.get("target").and_then(|v| v.as_str()).unwrap_or("");
|
||||
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
||||
if target.is_empty() || target.starts_with("NOTE:") {
|
||||
if let Some(note) = target.strip_prefix("NOTE:") {
|
||||
println!(" NOTE: {} — {}", note, reason);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Resolve target and find journal node
|
||||
let resolved = match store.resolve_key(target) {
|
||||
Ok(r) => r,
|
||||
Err(_) => { println!(" SKIP {} (not in graph)", target); continue; }
|
||||
};
|
||||
let source_key = match store.find_journal_node(entry_text) {
|
||||
Some(k) => k,
|
||||
None => { println!(" SKIP {} (no matching journal node)", target); continue; }
|
||||
};
|
||||
|
||||
let source_uuid = match store.nodes.get(&source_key) {
|
||||
Some(n) => n.uuid,
|
||||
None => continue,
|
||||
};
|
||||
let target_uuid = match store.nodes.get(&resolved) {
|
||||
Some(n) => n.uuid,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let rel = Store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Link,
|
||||
0.5,
|
||||
&source_key, &resolved,
|
||||
);
|
||||
if store.add_relation(rel).is_ok() {
|
||||
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save result to agent-results
|
||||
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch())
|
||||
.replace([':', '-'], "");
|
||||
let result_file = agent_results_dir()
|
||||
.join(format!("{}.json", timestamp));
|
||||
let output = serde_json::json!({
|
||||
"timestamp": timestamp,
|
||||
"jsonl_path": jsonl_path,
|
||||
"entry_text": &entry_text[..entry_text.len().min(500)],
|
||||
"agent_result": result,
|
||||
});
|
||||
fs::write(&result_file, serde_json::to_string_pretty(&output).unwrap())
|
||||
.map_err(|e| format!("write {}: {}", result_file.display(), e))?;
|
||||
println!(" Results saved: {}", result_file.display());
|
||||
|
||||
store.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// --- Apply consolidation ---
|
||||
// Replaces apply-consolidation.py: reads consolidation reports, sends
|
||||
// to Sonnet for structured action extraction, then applies them.
|
||||
|
||||
/// Find the most recent set of consolidation reports.
|
||||
fn find_consolidation_reports() -> Vec<PathBuf> {
|
||||
let dir = agent_results_dir();
|
||||
let mut reports: Vec<PathBuf> = fs::read_dir(&dir)
|
||||
.map(|entries| {
|
||||
entries.filter_map(|e| e.ok())
|
||||
.map(|e| e.path())
|
||||
.filter(|p| {
|
||||
p.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.map(|n| n.starts_with("consolidation-") && n.ends_with(".md"))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
reports.sort();
|
||||
reports.reverse();
|
||||
|
||||
if reports.is_empty() { return reports; }
|
||||
|
||||
// Group by timestamp (last segment of stem before .md)
|
||||
let latest_ts = reports[0].file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.rsplit('-').next().unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
reports.retain(|r| {
|
||||
r.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.ends_with(latest_ts.as_str())
|
||||
});
|
||||
|
||||
reports
|
||||
}
|
||||
|
||||
fn build_consolidation_prompt(reports: &[PathBuf]) -> Result<String, String> {
|
||||
let mut report_text = String::new();
|
||||
for r in reports {
|
||||
let content = fs::read_to_string(r)
|
||||
.map_err(|e| format!("read {}: {}", r.display(), e))?;
|
||||
report_text.push_str(&format!("\n{}\n## Report: {}\n\n{}\n",
|
||||
"=".repeat(60),
|
||||
r.file_stem().and_then(|s| s.to_str()).unwrap_or(""),
|
||||
content));
|
||||
}
|
||||
|
||||
Ok(format!(r#"You are converting consolidation analysis reports into structured actions.
|
||||
|
||||
Read the reports below and extract CONCRETE, EXECUTABLE actions.
|
||||
Output ONLY a JSON array. Each action is an object with these fields:
|
||||
|
||||
For adding cross-links:
|
||||
{{"action": "link", "source": "file.md#section", "target": "file.md#section", "reason": "brief explanation"}}
|
||||
|
||||
For categorizing nodes:
|
||||
{{"action": "categorize", "key": "file.md#section", "category": "core|tech|obs|task", "reason": "brief"}}
|
||||
|
||||
For things that need manual attention (splitting files, creating new files, editing content):
|
||||
{{"action": "manual", "priority": "high|medium|low", "description": "what needs to be done"}}
|
||||
|
||||
Rules:
|
||||
- Only output actions that are safe and reversible
|
||||
- Links are the primary action — focus on those
|
||||
- Use exact file names and section slugs from the reports
|
||||
- For categorize: core=identity/relationship, tech=bcachefs/code, obs=experience, task=work item
|
||||
- For manual items: include enough detail that someone can act on them
|
||||
- Output 20-40 actions, prioritized by impact
|
||||
- DO NOT include actions for things that are merely suggestions or speculation
|
||||
- Focus on HIGH CONFIDENCE items from the reports
|
||||
|
||||
{report_text}
|
||||
|
||||
Output ONLY the JSON array, no markdown fences, no explanation.
|
||||
"#))
|
||||
}
|
||||
|
||||
/// Run the full apply-consolidation pipeline.
|
||||
pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_file: Option<&str>) -> Result<(), String> {
|
||||
let reports = if let Some(path) = report_file {
|
||||
vec![PathBuf::from(path)]
|
||||
} else {
|
||||
find_consolidation_reports()
|
||||
};
|
||||
|
||||
if reports.is_empty() {
|
||||
println!("No consolidation reports found.");
|
||||
println!("Run consolidation-agents first.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Found {} reports:", reports.len());
|
||||
for r in &reports {
|
||||
println!(" {}", r.file_name().and_then(|s| s.to_str()).unwrap_or("?"));
|
||||
}
|
||||
|
||||
println!("\nExtracting actions from reports...");
|
||||
let prompt = build_consolidation_prompt(&reports)?;
|
||||
println!(" Prompt: {} chars", prompt.len());
|
||||
|
||||
let response = call_sonnet(&prompt, 300)?;
|
||||
|
||||
let actions_value = parse_json_response(&response)?;
|
||||
let actions = actions_value.as_array()
|
||||
.ok_or("expected JSON array of actions")?;
|
||||
|
||||
println!(" {} actions extracted", actions.len());
|
||||
|
||||
// Save actions
|
||||
let timestamp = capnp_store::format_datetime(capnp_store::now_epoch())
|
||||
.replace([':', '-'], "");
|
||||
let actions_path = agent_results_dir()
|
||||
.join(format!("consolidation-actions-{}.json", timestamp));
|
||||
fs::write(&actions_path, serde_json::to_string_pretty(&actions_value).unwrap())
|
||||
.map_err(|e| format!("write {}: {}", actions_path.display(), e))?;
|
||||
println!(" Saved: {}", actions_path.display());
|
||||
|
||||
let link_actions: Vec<_> = actions.iter()
|
||||
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("link"))
|
||||
.collect();
|
||||
let cat_actions: Vec<_> = actions.iter()
|
||||
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("categorize"))
|
||||
.collect();
|
||||
let manual_actions: Vec<_> = actions.iter()
|
||||
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("manual"))
|
||||
.collect();
|
||||
|
||||
if !do_apply {
|
||||
// Dry run
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("DRY RUN — {} actions proposed", actions.len());
|
||||
println!("{}\n", "=".repeat(60));
|
||||
|
||||
if !link_actions.is_empty() {
|
||||
println!("## Links to add ({})\n", link_actions.len());
|
||||
for (i, a) in link_actions.iter().enumerate() {
|
||||
let src = a.get("source").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let tgt = a.get("target").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let reason = a.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
||||
println!(" {:2}. {} → {} ({})", i + 1, src, tgt, reason);
|
||||
}
|
||||
}
|
||||
if !cat_actions.is_empty() {
|
||||
println!("\n## Categories to set ({})\n", cat_actions.len());
|
||||
for a in &cat_actions {
|
||||
let key = a.get("key").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let cat = a.get("category").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let reason = a.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
||||
println!(" {} → {} ({})", key, cat, reason);
|
||||
}
|
||||
}
|
||||
if !manual_actions.is_empty() {
|
||||
println!("\n## Manual actions needed ({})\n", manual_actions.len());
|
||||
for a in &manual_actions {
|
||||
let prio = a.get("priority").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let desc = a.get("description").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
println!(" [{}] {}", prio, desc);
|
||||
}
|
||||
}
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("To apply: poc-memory apply-consolidation --apply");
|
||||
println!("{}", "=".repeat(60));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Apply
|
||||
let mut applied = 0usize;
|
||||
let mut skipped = 0usize;
|
||||
|
||||
if !link_actions.is_empty() {
|
||||
println!("\nApplying {} links...", link_actions.len());
|
||||
for a in &link_actions {
|
||||
let src = a.get("source").and_then(|v| v.as_str()).unwrap_or("");
|
||||
let tgt = a.get("target").and_then(|v| v.as_str()).unwrap_or("");
|
||||
if src.is_empty() || tgt.is_empty() { skipped += 1; continue; }
|
||||
|
||||
let source = match store.resolve_key(src) {
|
||||
Ok(s) => s,
|
||||
Err(e) => { println!(" ? {} → {}: {}", src, tgt, e); skipped += 1; continue; }
|
||||
};
|
||||
let target = match store.resolve_key(tgt) {
|
||||
Ok(t) => t,
|
||||
Err(e) => { println!(" ? {} → {}: {}", src, tgt, e); skipped += 1; continue; }
|
||||
};
|
||||
|
||||
let exists = store.relations.iter().any(|r|
|
||||
r.source_key == source && r.target_key == target && !r.deleted
|
||||
);
|
||||
if exists { skipped += 1; continue; }
|
||||
|
||||
let source_uuid = match store.nodes.get(&source) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
|
||||
let target_uuid = match store.nodes.get(&target) { Some(n) => n.uuid, None => { skipped += 1; continue; } };
|
||||
|
||||
let rel = Store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
capnp_store::RelationType::Auto,
|
||||
0.5,
|
||||
&source, &target,
|
||||
);
|
||||
if store.add_relation(rel).is_ok() {
|
||||
println!(" + {} → {}", source, target);
|
||||
applied += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !cat_actions.is_empty() {
|
||||
println!("\nApplying {} categorizations...", cat_actions.len());
|
||||
for a in &cat_actions {
|
||||
let key = a.get("key").and_then(|v| v.as_str()).unwrap_or("");
|
||||
let cat = a.get("category").and_then(|v| v.as_str()).unwrap_or("");
|
||||
if key.is_empty() || cat.is_empty() { continue; }
|
||||
|
||||
let resolved = match store.resolve_key(key) {
|
||||
Ok(r) => r,
|
||||
Err(_) => { println!(" ? {} → {}: not found", key, cat); skipped += 1; continue; }
|
||||
};
|
||||
if store.categorize(&resolved, cat).is_ok() {
|
||||
println!(" + {} → {}", resolved, cat);
|
||||
applied += 1;
|
||||
} else {
|
||||
skipped += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !manual_actions.is_empty() {
|
||||
println!("\n## Manual actions (not auto-applied):\n");
|
||||
for a in &manual_actions {
|
||||
let prio = a.get("priority").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
let desc = a.get("description").and_then(|v| v.as_str()).unwrap_or("?");
|
||||
println!(" [{}] {}", prio, desc);
|
||||
}
|
||||
}
|
||||
|
||||
if applied > 0 {
|
||||
store.save()?;
|
||||
}
|
||||
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Applied: {} Skipped: {} Manual: {}", applied, skipped, manual_actions.len());
|
||||
println!("{}", "=".repeat(60));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
59
src/main.rs
59
src/main.rs
|
|
@ -89,6 +89,9 @@ fn main() {
|
|||
"daily-check" => cmd_daily_check(),
|
||||
"apply-agent" => cmd_apply_agent(&args[2..]),
|
||||
"digest" => cmd_digest(&args[2..]),
|
||||
"digest-links" => cmd_digest_links(&args[2..]),
|
||||
"journal-enrich" => cmd_journal_enrich(&args[2..]),
|
||||
"apply-consolidation" => cmd_apply_consolidation(&args[2..]),
|
||||
"trace" => cmd_trace(&args[2..]),
|
||||
"list-keys" => cmd_list_keys(),
|
||||
"list-edges" => cmd_list_edges(),
|
||||
|
|
@ -146,6 +149,11 @@ Commands:
|
|||
digest daily [DATE] Generate daily episodic digest (default: today)
|
||||
digest weekly [DATE] Generate weekly digest (any date in target week)
|
||||
digest monthly [YYYY-MM] Generate monthly digest (default: current month)
|
||||
digest-links [--apply] Parse and apply links from digest files
|
||||
journal-enrich JSONL TEXT [LINE]
|
||||
Enrich journal entry with conversation links
|
||||
apply-consolidation [--apply] [--report FILE]
|
||||
Extract and apply actions from consolidation reports
|
||||
trace KEY Walk temporal links: semantic ↔ episodic ↔ conversation
|
||||
list-keys List all node keys (one per line)
|
||||
list-edges List all edges (tsv: source target strength type)
|
||||
|
|
@ -655,6 +663,57 @@ fn cmd_digest(args: &[String]) -> Result<(), String> {
|
|||
}
|
||||
}
|
||||
|
||||
fn cmd_digest_links(args: &[String]) -> Result<(), String> {
|
||||
let do_apply = args.iter().any(|a| a == "--apply");
|
||||
|
||||
let links = digest::parse_all_digest_links();
|
||||
println!("Found {} unique links from digest files", links.len());
|
||||
|
||||
if !do_apply {
|
||||
for (i, link) in links.iter().enumerate() {
|
||||
println!(" {:3}. {} → {}", i + 1, link.source, link.target);
|
||||
if !link.reason.is_empty() {
|
||||
println!(" ({})", &link.reason[..link.reason.len().min(80)]);
|
||||
}
|
||||
}
|
||||
println!("\nTo apply: poc-memory digest-links --apply");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut store = capnp_store::Store::load()?;
|
||||
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
|
||||
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_journal_enrich(args: &[String]) -> Result<(), String> {
|
||||
if args.len() < 2 {
|
||||
return Err("Usage: poc-memory journal-enrich JSONL_PATH ENTRY_TEXT [GREP_LINE]".into());
|
||||
}
|
||||
let jsonl_path = &args[0];
|
||||
let entry_text = &args[1];
|
||||
let grep_line: usize = args.get(2)
|
||||
.and_then(|a| a.parse().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
if !std::path::Path::new(jsonl_path.as_str()).is_file() {
|
||||
return Err(format!("JSONL not found: {}", jsonl_path));
|
||||
}
|
||||
|
||||
let mut store = capnp_store::Store::load()?;
|
||||
digest::journal_enrich(&mut store, jsonl_path, entry_text, grep_line)
|
||||
}
|
||||
|
||||
fn cmd_apply_consolidation(args: &[String]) -> Result<(), String> {
|
||||
let do_apply = args.iter().any(|a| a == "--apply");
|
||||
let report_file = args.windows(2)
|
||||
.find(|w| w[0] == "--report")
|
||||
.map(|w| w[1].as_str());
|
||||
|
||||
let mut store = capnp_store::Store::load()?;
|
||||
digest::apply_consolidation(&mut store, do_apply, report_file)
|
||||
}
|
||||
|
||||
fn cmd_trace(args: &[String]) -> Result<(), String> {
|
||||
if args.is_empty() {
|
||||
return Err("Usage: poc-memory trace KEY".into());
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue