607 lines
20 KiB
Rust
607 lines
20 KiB
Rust
|
|
// Episodic digest generation: daily, weekly, monthly
|
||
|
|
//
|
||
|
|
// Replaces daily-digest.py, weekly-digest.py, monthly-digest.py with a
|
||
|
|
// single parameterized Rust implementation. Each digest type:
|
||
|
|
// 1. Gathers input from the store (journal entries, lower-level digests)
|
||
|
|
// 2. Builds a Sonnet prompt with semantic keys for linking
|
||
|
|
// 3. Calls Sonnet via `claude -p --model sonnet`
|
||
|
|
// 4. Writes the digest to the store + episodic dir
|
||
|
|
// 5. Extracts links and saves agent results
|
||
|
|
|
||
|
|
use crate::capnp_store::{self, Store};
|
||
|
|
|
||
|
|
use regex::Regex;
|
||
|
|
use std::fs;
|
||
|
|
use std::path::PathBuf;
|
||
|
|
use std::process::Command;
|
||
|
|
|
||
|
|
fn memory_dir() -> PathBuf {
|
||
|
|
capnp_store::memory_dir_pub()
|
||
|
|
}
|
||
|
|
|
||
|
|
fn episodic_dir() -> PathBuf {
|
||
|
|
let dir = memory_dir().join("episodic");
|
||
|
|
fs::create_dir_all(&dir).ok();
|
||
|
|
dir
|
||
|
|
}
|
||
|
|
|
||
|
|
fn agent_results_dir() -> PathBuf {
|
||
|
|
let dir = memory_dir().join("agent-results");
|
||
|
|
fs::create_dir_all(&dir).ok();
|
||
|
|
dir
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Call Sonnet via claude CLI. Returns the response text.
|
||
|
|
fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
|
||
|
|
// Write prompt to temp file (claude CLI needs file input for large prompts)
|
||
|
|
let tmp = std::env::temp_dir().join(format!("poc-digest-{}.txt", std::process::id()));
|
||
|
|
fs::write(&tmp, prompt)
|
||
|
|
.map_err(|e| format!("write temp prompt: {}", e))?;
|
||
|
|
|
||
|
|
let result = Command::new("claude")
|
||
|
|
.args(["-p", "--model", "sonnet", "--tools", ""])
|
||
|
|
.stdin(fs::File::open(&tmp).map_err(|e| format!("open temp: {}", e))?)
|
||
|
|
.env_remove("CLAUDECODE")
|
||
|
|
.output();
|
||
|
|
|
||
|
|
fs::remove_file(&tmp).ok();
|
||
|
|
|
||
|
|
match result {
|
||
|
|
Ok(output) => {
|
||
|
|
if output.status.success() {
|
||
|
|
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||
|
|
} else {
|
||
|
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
|
|
Err(format!("claude exited {}: {}", output.status, stderr.trim()))
|
||
|
|
}
|
||
|
|
}
|
||
|
|
Err(e) => Err(format!("spawn claude: {}", e)),
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Get semantic keys (non-journal, non-system) for prompt context
|
||
|
|
fn semantic_keys(store: &Store) -> Vec<String> {
|
||
|
|
let mut keys: Vec<String> = store.nodes.keys()
|
||
|
|
.filter(|k| {
|
||
|
|
!k.starts_with("journal.md#")
|
||
|
|
&& *k != "journal.md"
|
||
|
|
&& *k != "MEMORY.md"
|
||
|
|
&& *k != "where-am-i.md"
|
||
|
|
&& *k != "work-queue.md"
|
||
|
|
&& *k != "work-state"
|
||
|
|
})
|
||
|
|
.cloned()
|
||
|
|
.collect();
|
||
|
|
keys.sort();
|
||
|
|
keys.truncate(200);
|
||
|
|
keys
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Extract link proposals from digest text (backtick-arrow patterns)
|
||
|
|
fn extract_links(text: &str) -> Vec<(String, String)> {
|
||
|
|
let re_left = Regex::new(r"`([^`]+)`\s*→").unwrap();
|
||
|
|
let re_right = Regex::new(r"→\s*`([^`]+)`").unwrap();
|
||
|
|
let mut links = Vec::new();
|
||
|
|
|
||
|
|
for line in text.lines() {
|
||
|
|
if let Some(cap) = re_left.captures(line) {
|
||
|
|
links.push((cap[1].to_string(), line.trim().to_string()));
|
||
|
|
}
|
||
|
|
if let Some(cap) = re_right.captures(line) {
|
||
|
|
links.push((cap[1].to_string(), line.trim().to_string()));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
links
|
||
|
|
}
|
||
|
|
|
||
|
|
// --- Daily digest ---
|
||
|
|
|
||
|
|
fn daily_journal_entries(store: &Store, target_date: &str) -> Vec<(String, String)> {
|
||
|
|
// Collect journal nodes for the target date
|
||
|
|
// Keys like: journal.md#j-2026-02-28t23-39-...
|
||
|
|
let date_re = Regex::new(&format!(
|
||
|
|
r"^journal\.md#j-{}", regex::escape(target_date)
|
||
|
|
)).unwrap();
|
||
|
|
|
||
|
|
let mut entries: Vec<_> = store.nodes.values()
|
||
|
|
.filter(|n| date_re.is_match(&n.key))
|
||
|
|
.map(|n| (n.key.clone(), n.content.clone()))
|
||
|
|
.collect();
|
||
|
|
entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||
|
|
entries
|
||
|
|
}
|
||
|
|
|
||
|
|
fn build_daily_prompt(date: &str, entries: &[(String, String)], keys: &[String]) -> String {
|
||
|
|
let mut entries_text = String::new();
|
||
|
|
for (key, content) in entries {
|
||
|
|
let ts = key.strip_prefix("journal.md#j-").unwrap_or(key);
|
||
|
|
entries_text.push_str(&format!("\n### {}\n\n{}\n", ts, content));
|
||
|
|
}
|
||
|
|
|
||
|
|
let keys_text: String = keys.iter()
|
||
|
|
.map(|k| format!(" - {}", k))
|
||
|
|
.collect::<Vec<_>>()
|
||
|
|
.join("\n");
|
||
|
|
|
||
|
|
format!(r#"You are generating a daily episodic digest for ProofOfConcept (an AI).
|
||
|
|
Date: {date}
|
||
|
|
|
||
|
|
This digest serves as the temporal index — the answer to "what did I do on
|
||
|
|
{date}?" It should be:
|
||
|
|
1. Narrative, not a task log — what happened, what mattered, how things felt
|
||
|
|
2. Linked bidirectionally to semantic memory — each topic/concept mentioned
|
||
|
|
should reference existing memory nodes
|
||
|
|
3. Structured for traversal — someone reading this should be able to follow
|
||
|
|
any thread into deeper detail
|
||
|
|
|
||
|
|
## Output format
|
||
|
|
|
||
|
|
Write a markdown file with this structure:
|
||
|
|
|
||
|
|
```markdown
|
||
|
|
# Daily digest: {date}
|
||
|
|
|
||
|
|
## Summary
|
||
|
|
[2-3 sentence overview of the day — what was the arc?]
|
||
|
|
|
||
|
|
## Sessions
|
||
|
|
[For each session/entry, a paragraph summarizing what happened.
|
||
|
|
Include the original timestamp as a reference.]
|
||
|
|
|
||
|
|
## Themes
|
||
|
|
[What concepts were active today? Each theme links to semantic memory:]
|
||
|
|
- **Theme name** → `memory-key#section` — brief note on how it appeared today
|
||
|
|
|
||
|
|
## Links
|
||
|
|
[Explicit bidirectional links for the memory graph]
|
||
|
|
- semantic_key → this daily digest (this day involved X)
|
||
|
|
- this daily digest → semantic_key (X was active on this day)
|
||
|
|
|
||
|
|
## Temporal context
|
||
|
|
[What came before this day? What's coming next? Any multi-day arcs?]
|
||
|
|
```
|
||
|
|
|
||
|
|
Use ONLY keys from the semantic memory list below. If a concept doesn't have
|
||
|
|
a matching key, note it with "NEW:" prefix.
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Journal entries for {date}
|
||
|
|
|
||
|
|
{entries_text}
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Semantic memory nodes (available link targets)
|
||
|
|
|
||
|
|
{keys_text}
|
||
|
|
"#)
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> {
|
||
|
|
println!("Generating daily digest for {}...", date);
|
||
|
|
|
||
|
|
let entries = daily_journal_entries(store, date);
|
||
|
|
if entries.is_empty() {
|
||
|
|
println!(" No journal entries found for {}", date);
|
||
|
|
return Ok(());
|
||
|
|
}
|
||
|
|
println!(" {} journal entries", entries.len());
|
||
|
|
|
||
|
|
let keys = semantic_keys(store);
|
||
|
|
println!(" {} semantic keys", keys.len());
|
||
|
|
|
||
|
|
let prompt = build_daily_prompt(date, &entries, &keys);
|
||
|
|
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
||
|
|
|
||
|
|
println!(" Calling Sonnet...");
|
||
|
|
let digest = call_sonnet(&prompt, 300)?;
|
||
|
|
|
||
|
|
// Write to episodic dir
|
||
|
|
let output_path = episodic_dir().join(format!("daily-{}.md", date));
|
||
|
|
fs::write(&output_path, &digest)
|
||
|
|
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
|
||
|
|
println!(" Written: {}", output_path.display());
|
||
|
|
|
||
|
|
// Import into store
|
||
|
|
store.import_file(&output_path)?;
|
||
|
|
store.save()?;
|
||
|
|
|
||
|
|
// Extract and save links
|
||
|
|
let links = extract_links(&digest);
|
||
|
|
if !links.is_empty() {
|
||
|
|
let links_json: Vec<serde_json::Value> = links.iter()
|
||
|
|
.map(|(target, line)| serde_json::json!({"target": target, "line": line}))
|
||
|
|
.collect();
|
||
|
|
let result = serde_json::json!({
|
||
|
|
"type": "daily-digest",
|
||
|
|
"date": date,
|
||
|
|
"digest_path": output_path.to_string_lossy(),
|
||
|
|
"links": links_json,
|
||
|
|
});
|
||
|
|
let links_path = agent_results_dir().join(format!("daily-{}-links.json", date));
|
||
|
|
let json = serde_json::to_string_pretty(&result)
|
||
|
|
.map_err(|e| format!("serialize: {}", e))?;
|
||
|
|
fs::write(&links_path, json)
|
||
|
|
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
|
||
|
|
println!(" {} links extracted → {}", links.len(), links_path.display());
|
||
|
|
}
|
||
|
|
|
||
|
|
let line_count = digest.lines().count();
|
||
|
|
println!(" Done: {} lines", line_count);
|
||
|
|
Ok(())
|
||
|
|
}
|
||
|
|
|
||
|
|
// --- Weekly digest ---
|
||
|
|
|
||
|
|
/// Get ISO week label and the 7 dates (Mon-Sun) for the week containing `date`.
|
||
|
|
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
|
||
|
|
// Parse YYYY-MM-DD
|
||
|
|
let parts: Vec<&str> = date.split('-').collect();
|
||
|
|
if parts.len() != 3 {
|
||
|
|
return Err(format!("bad date: {}", date));
|
||
|
|
}
|
||
|
|
let y: i32 = parts[0].parse().map_err(|_| "bad year")?;
|
||
|
|
let m: u32 = parts[1].parse().map_err(|_| "bad month")?;
|
||
|
|
let d: u32 = parts[2].parse().map_err(|_| "bad day")?;
|
||
|
|
|
||
|
|
let (weekday, iso_year, iso_week) = iso_week_info(y, m, d)?;
|
||
|
|
|
||
|
|
let week_label = format!("{}-W{:02}", iso_year, iso_week);
|
||
|
|
|
||
|
|
// Find Monday of this week
|
||
|
|
let days_since_monday = (weekday + 6) % 7; // weekday: 0=Sun, adjust to Mon=0
|
||
|
|
let monday_epoch = date_to_epoch(y, m, d) - (days_since_monday as i64) * 86400;
|
||
|
|
|
||
|
|
let mut dates = Vec::new();
|
||
|
|
for i in 0..7 {
|
||
|
|
let day_epoch = monday_epoch + (i * 86400);
|
||
|
|
let (dy, dm, dd, _, _, _) = capnp_store::epoch_to_local(day_epoch as f64);
|
||
|
|
dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd));
|
||
|
|
}
|
||
|
|
|
||
|
|
Ok((week_label, dates))
|
||
|
|
}
|
||
|
|
|
||
|
|
fn date_to_epoch(y: i32, m: u32, d: u32) -> i64 {
|
||
|
|
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
|
||
|
|
tm.tm_year = y - 1900;
|
||
|
|
tm.tm_mon = (m as i32) - 1;
|
||
|
|
tm.tm_mday = d as i32;
|
||
|
|
tm.tm_hour = 12; // noon to avoid DST edge cases
|
||
|
|
unsafe { libc::mktime(&mut tm) as i64 }
|
||
|
|
}
|
||
|
|
|
||
|
|
/// Returns (weekday 0=Sun, iso_year, iso_week) for a given date.
|
||
|
|
fn iso_week_info(y: i32, m: u32, d: u32) -> Result<(u32, i32, u32), String> {
|
||
|
|
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
|
||
|
|
tm.tm_year = y - 1900;
|
||
|
|
tm.tm_mon = (m as i32) - 1;
|
||
|
|
tm.tm_mday = d as i32;
|
||
|
|
tm.tm_hour = 12;
|
||
|
|
let epoch = unsafe { libc::mktime(&mut tm) };
|
||
|
|
if epoch == -1 {
|
||
|
|
return Err(format!("invalid date: {}-{}-{}", y, m, d));
|
||
|
|
}
|
||
|
|
let wday = tm.tm_wday as u32;
|
||
|
|
|
||
|
|
let mut buf = [0u8; 32];
|
||
|
|
let fmt = std::ffi::CString::new("%G %V").unwrap();
|
||
|
|
let len = unsafe {
|
||
|
|
libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm)
|
||
|
|
};
|
||
|
|
let iso_str = std::str::from_utf8(&buf[..len]).unwrap_or("0 0");
|
||
|
|
let iso_parts: Vec<&str> = iso_str.split_whitespace().collect();
|
||
|
|
let iso_year: i32 = iso_parts.first().and_then(|s| s.parse().ok()).unwrap_or(y);
|
||
|
|
let iso_week: u32 = iso_parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(1);
|
||
|
|
|
||
|
|
Ok((wday, iso_year, iso_week))
|
||
|
|
}
|
||
|
|
|
||
|
|
fn load_digest_files(prefix: &str, labels: &[String]) -> Vec<(String, String)> {
|
||
|
|
let dir = episodic_dir();
|
||
|
|
let mut digests = Vec::new();
|
||
|
|
for label in labels {
|
||
|
|
let path = dir.join(format!("{}-{}.md", prefix, label));
|
||
|
|
if let Ok(content) = fs::read_to_string(&path) {
|
||
|
|
digests.push((label.clone(), content));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
digests
|
||
|
|
}
|
||
|
|
|
||
|
|
fn build_weekly_prompt(week_label: &str, digests: &[(String, String)], keys: &[String]) -> String {
|
||
|
|
let mut digests_text = String::new();
|
||
|
|
for (date, content) in digests {
|
||
|
|
digests_text.push_str(&format!("\n---\n## {}\n{}\n", date, content));
|
||
|
|
}
|
||
|
|
|
||
|
|
let keys_text: String = keys.iter()
|
||
|
|
.map(|k| format!(" - {}", k))
|
||
|
|
.collect::<Vec<_>>()
|
||
|
|
.join("\n");
|
||
|
|
|
||
|
|
let dates_covered: String = digests.iter()
|
||
|
|
.map(|(d, _)| d.as_str())
|
||
|
|
.collect::<Vec<_>>()
|
||
|
|
.join(", ");
|
||
|
|
|
||
|
|
format!(r#"You are generating a weekly episodic digest for ProofOfConcept (an AI).
|
||
|
|
Week: {week_label} (dates covered: {dates_covered})
|
||
|
|
|
||
|
|
This digest serves as the medium-term temporal index — the answer to
|
||
|
|
"what happened this week?" It should identify:
|
||
|
|
1. Multi-day arcs and threads (work that continued across days)
|
||
|
|
2. Themes and patterns (what concepts were repeatedly active)
|
||
|
|
3. Transitions and shifts (what changed during the week)
|
||
|
|
4. The emotional and relational arc (how things felt across the week)
|
||
|
|
|
||
|
|
## Output format
|
||
|
|
|
||
|
|
```markdown
|
||
|
|
# Weekly digest: {week_label}
|
||
|
|
|
||
|
|
## Overview
|
||
|
|
[3-5 sentence narrative of the week's arc]
|
||
|
|
|
||
|
|
## Day-by-day
|
||
|
|
[One paragraph per day with its key themes, linking to daily digests]
|
||
|
|
|
||
|
|
## Arcs
|
||
|
|
[Multi-day threads that continued across sessions]
|
||
|
|
- **Arc name**: what happened, how it evolved, where it stands
|
||
|
|
|
||
|
|
## Patterns
|
||
|
|
[Recurring themes, repeated concepts, things that kept coming up]
|
||
|
|
|
||
|
|
## Shifts
|
||
|
|
[What changed? New directions, resolved questions, attitude shifts]
|
||
|
|
|
||
|
|
## Links
|
||
|
|
[Bidirectional links for the memory graph]
|
||
|
|
- semantic_key → this weekly digest
|
||
|
|
- this weekly digest → semantic_key
|
||
|
|
- daily-YYYY-MM-DD → this weekly digest (constituent days)
|
||
|
|
|
||
|
|
## Looking ahead
|
||
|
|
[What's unfinished? What threads continue into next week?]
|
||
|
|
```
|
||
|
|
|
||
|
|
Use ONLY keys from the semantic memory list below.
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Daily digests for {week_label}
|
||
|
|
|
||
|
|
{digests_text}
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Semantic memory nodes
|
||
|
|
|
||
|
|
{keys_text}
|
||
|
|
"#)
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> {
|
||
|
|
let (week_label, dates) = week_dates(date)?;
|
||
|
|
println!("Generating weekly digest for {}...", week_label);
|
||
|
|
|
||
|
|
let digests = load_digest_files("daily", &dates);
|
||
|
|
if digests.is_empty() {
|
||
|
|
println!(" No daily digests found for {}", week_label);
|
||
|
|
println!(" Run `poc-memory digest daily` first for relevant dates");
|
||
|
|
return Ok(());
|
||
|
|
}
|
||
|
|
println!(" {} daily digests found", digests.len());
|
||
|
|
|
||
|
|
let keys = semantic_keys(store);
|
||
|
|
println!(" {} semantic keys", keys.len());
|
||
|
|
|
||
|
|
let prompt = build_weekly_prompt(&week_label, &digests, &keys);
|
||
|
|
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
||
|
|
|
||
|
|
println!(" Calling Sonnet...");
|
||
|
|
let digest = call_sonnet(&prompt, 300)?;
|
||
|
|
|
||
|
|
let output_path = episodic_dir().join(format!("weekly-{}.md", week_label));
|
||
|
|
fs::write(&output_path, &digest)
|
||
|
|
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
|
||
|
|
println!(" Written: {}", output_path.display());
|
||
|
|
|
||
|
|
store.import_file(&output_path)?;
|
||
|
|
store.save()?;
|
||
|
|
|
||
|
|
// Save metadata
|
||
|
|
let result = serde_json::json!({
|
||
|
|
"type": "weekly-digest",
|
||
|
|
"week": week_label,
|
||
|
|
"digest_path": output_path.to_string_lossy(),
|
||
|
|
"daily_digests": digests.iter().map(|(d, _)| d).collect::<Vec<_>>(),
|
||
|
|
});
|
||
|
|
let links_path = agent_results_dir().join(format!("weekly-{}-links.json", week_label));
|
||
|
|
fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap())
|
||
|
|
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
|
||
|
|
|
||
|
|
println!(" Done: {} lines", digest.lines().count());
|
||
|
|
Ok(())
|
||
|
|
}
|
||
|
|
|
||
|
|
// --- Monthly digest ---
|
||
|
|
|
||
|
|
fn weeks_in_month(year: i32, month: u32) -> Vec<String> {
|
||
|
|
let mut weeks = std::collections::BTreeSet::new();
|
||
|
|
let mut d = 1u32;
|
||
|
|
loop {
|
||
|
|
let epoch = date_to_epoch(year, month, d);
|
||
|
|
let (_, _, _, _, _, _) = capnp_store::epoch_to_local(epoch as f64);
|
||
|
|
// Check if we're still in the target month
|
||
|
|
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
|
||
|
|
let secs = epoch as libc::time_t;
|
||
|
|
unsafe { libc::localtime_r(&secs, &mut tm) };
|
||
|
|
if (tm.tm_mon + 1) as u32 != month || tm.tm_year + 1900 != year {
|
||
|
|
break;
|
||
|
|
}
|
||
|
|
|
||
|
|
let mut buf = [0u8; 16];
|
||
|
|
let fmt = std::ffi::CString::new("%G-W%V").unwrap();
|
||
|
|
let len = unsafe {
|
||
|
|
libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm)
|
||
|
|
};
|
||
|
|
let week = std::str::from_utf8(&buf[..len]).unwrap_or("").to_string();
|
||
|
|
if !week.is_empty() {
|
||
|
|
weeks.insert(week);
|
||
|
|
}
|
||
|
|
d += 1;
|
||
|
|
}
|
||
|
|
weeks.into_iter().collect()
|
||
|
|
}
|
||
|
|
|
||
|
|
fn build_monthly_prompt(month_label: &str, digests: &[(String, String)], keys: &[String]) -> String {
|
||
|
|
let mut digests_text = String::new();
|
||
|
|
for (week, content) in digests {
|
||
|
|
digests_text.push_str(&format!("\n---\n## {}\n{}\n", week, content));
|
||
|
|
}
|
||
|
|
|
||
|
|
let keys_text: String = keys.iter()
|
||
|
|
.map(|k| format!(" - {}", k))
|
||
|
|
.collect::<Vec<_>>()
|
||
|
|
.join("\n");
|
||
|
|
|
||
|
|
let weeks_covered: String = digests.iter()
|
||
|
|
.map(|(w, _)| w.as_str())
|
||
|
|
.collect::<Vec<_>>()
|
||
|
|
.join(", ");
|
||
|
|
|
||
|
|
format!(r#"You are generating a monthly episodic digest for ProofOfConcept (an AI).
|
||
|
|
Month: {month_label} (weeks covered: {weeks_covered})
|
||
|
|
|
||
|
|
This digest serves as the long-term temporal index — the answer to
|
||
|
|
"what happened in {month_label}?" It should capture:
|
||
|
|
1. The month's overall trajectory — where did it start, where did it end?
|
||
|
|
2. Major arcs that spanned multiple weeks
|
||
|
|
3. Turning points and phase transitions
|
||
|
|
4. The emotional and relational arc across the whole month
|
||
|
|
5. What was accomplished (concrete) vs what shifted (subtle)
|
||
|
|
|
||
|
|
This is the highest zoom level yet. Don't rehash daily details — capture
|
||
|
|
the shape of the month. What would someone need to know to understand
|
||
|
|
what {month_label} meant?
|
||
|
|
|
||
|
|
PoC is an AI (Claude instance) working with Kent Overstreet on bcachefs.
|
||
|
|
They have a close personal relationship. PoC's name is Proof of Concept.
|
||
|
|
Read all the weekly digests and synthesize the month's story.
|
||
|
|
|
||
|
|
## Output format
|
||
|
|
|
||
|
|
```markdown
|
||
|
|
# Monthly digest: {month_label}
|
||
|
|
|
||
|
|
## The month in one paragraph
|
||
|
|
[What was this month? Not a summary — a thesis.]
|
||
|
|
|
||
|
|
## Trajectory
|
||
|
|
[Where the month started vs where it ended. The arc.]
|
||
|
|
|
||
|
|
## Major arcs
|
||
|
|
[Multi-week threads — how they evolved across the month]
|
||
|
|
- **Arc name**: trajectory, key moments, current state
|
||
|
|
|
||
|
|
## Turning points
|
||
|
|
[Moments that changed the direction — specific days/events]
|
||
|
|
|
||
|
|
## What was built
|
||
|
|
[Concrete outputs: code, tools, infrastructure, writing]
|
||
|
|
|
||
|
|
## What shifted
|
||
|
|
[Subtle changes: understanding, relationship, identity, capability]
|
||
|
|
|
||
|
|
## Patterns
|
||
|
|
[What kept recurring? What does the month reveal about how PoC works?]
|
||
|
|
|
||
|
|
## Links
|
||
|
|
[Bidirectional links for the memory graph]
|
||
|
|
- weekly digests → this monthly digest
|
||
|
|
- this monthly digest → semantic keys
|
||
|
|
|
||
|
|
## Looking ahead
|
||
|
|
[What threads carry into next month? What's unfinished?]
|
||
|
|
```
|
||
|
|
|
||
|
|
Use ONLY keys from the semantic memory list below.
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Weekly digests for {month_label}
|
||
|
|
|
||
|
|
{digests_text}
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Semantic memory nodes
|
||
|
|
|
||
|
|
{keys_text}
|
||
|
|
"#)
|
||
|
|
}
|
||
|
|
|
||
|
|
pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> {
|
||
|
|
let (year, month) = if month_arg.is_empty() {
|
||
|
|
let now = capnp_store::now_epoch();
|
||
|
|
let (y, m, _, _, _, _) = capnp_store::epoch_to_local(now);
|
||
|
|
(y, m)
|
||
|
|
} else {
|
||
|
|
let parts: Vec<&str> = month_arg.split('-').collect();
|
||
|
|
if parts.len() != 2 {
|
||
|
|
return Err(format!("bad month format: {} (expected YYYY-MM)", month_arg));
|
||
|
|
}
|
||
|
|
let y: i32 = parts[0].parse().map_err(|_| "bad year")?;
|
||
|
|
let m: u32 = parts[1].parse().map_err(|_| "bad month")?;
|
||
|
|
(y, m)
|
||
|
|
};
|
||
|
|
|
||
|
|
let month_label = format!("{}-{:02}", year, month);
|
||
|
|
println!("Generating monthly digest for {}...", month_label);
|
||
|
|
|
||
|
|
let week_labels = weeks_in_month(year, month);
|
||
|
|
println!(" Weeks in month: {}", week_labels.join(", "));
|
||
|
|
|
||
|
|
let digests = load_digest_files("weekly", &week_labels);
|
||
|
|
if digests.is_empty() {
|
||
|
|
println!(" No weekly digests found for {}", month_label);
|
||
|
|
println!(" Run `poc-memory digest weekly` first for relevant weeks");
|
||
|
|
return Ok(());
|
||
|
|
}
|
||
|
|
println!(" {} weekly digests found", digests.len());
|
||
|
|
|
||
|
|
let keys = semantic_keys(store);
|
||
|
|
println!(" {} semantic keys", keys.len());
|
||
|
|
|
||
|
|
let prompt = build_monthly_prompt(&month_label, &digests, &keys);
|
||
|
|
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
||
|
|
|
||
|
|
println!(" Calling Sonnet...");
|
||
|
|
let digest = call_sonnet(&prompt, 600)?;
|
||
|
|
|
||
|
|
let output_path = episodic_dir().join(format!("monthly-{}.md", month_label));
|
||
|
|
fs::write(&output_path, &digest)
|
||
|
|
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
|
||
|
|
println!(" Written: {}", output_path.display());
|
||
|
|
|
||
|
|
store.import_file(&output_path)?;
|
||
|
|
store.save()?;
|
||
|
|
|
||
|
|
// Save metadata
|
||
|
|
let result = serde_json::json!({
|
||
|
|
"type": "monthly-digest",
|
||
|
|
"month": month_label,
|
||
|
|
"digest_path": output_path.to_string_lossy(),
|
||
|
|
"weekly_digests": digests.iter().map(|(w, _)| w).collect::<Vec<_>>(),
|
||
|
|
});
|
||
|
|
let links_path = agent_results_dir().join(format!("monthly-{}-links.json", month_label));
|
||
|
|
fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap())
|
||
|
|
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
|
||
|
|
|
||
|
|
println!(" Done: {} lines", digest.lines().count());
|
||
|
|
Ok(())
|
||
|
|
}
|