digest: native Rust implementation replacing Python scripts

Replace daily-digest.py, weekly-digest.py, monthly-digest.py with a
single digest.rs module. All three digest types now:
- Gather input directly from the Store (no subprocess calls)
- Build prompts in Rust (same templates as the Python versions)
- Call Sonnet via `claude -p --model sonnet`
- Import results back into the store automatically
- Extract links and save agent results

606 lines of Rust replaces 729 lines of Python + store_helpers.py
overhead. More importantly: this is now callable as a library from
poc-agent, and shares types/code with the rest of poc-memory.

Also adds `digest monthly [YYYY-MM]` subcommand (was Python-only).
This commit is contained in:
ProofOfConcept 2026-02-28 23:58:05 -05:00
parent 1ca6e55b7d
commit 91122fe1d1
3 changed files with 629 additions and 107 deletions

79
Cargo.lock generated
View file

@ -80,17 +80,6 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "getrandom"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.4.1" version = "0.4.1"
@ -197,22 +186,12 @@ dependencies = [
"capnp", "capnp",
"capnpc", "capnpc",
"libc", "libc",
"rand",
"regex", "regex",
"serde", "serde",
"serde_json", "serde_json",
"uuid", "uuid",
] ]
[[package]]
name = "ppv-lite86"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
dependencies = [
"zerocopy",
]
[[package]] [[package]]
name = "prettyplease" name = "prettyplease"
version = "0.2.37" version = "0.2.37"
@ -247,36 +226,6 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom 0.2.17",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.12.3" version = "1.12.3"
@ -390,17 +339,11 @@ version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb"
dependencies = [ dependencies = [
"getrandom 0.4.1", "getrandom",
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]] [[package]]
name = "wasip2" name = "wasip2"
version = "1.0.2+wasi-0.2.9" version = "1.0.2+wasi-0.2.9"
@ -586,26 +529,6 @@ dependencies = [
"wasmparser", "wasmparser",
] ]
[[package]]
name = "zerocopy"
version = "0.8.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "zmij" name = "zmij"
version = "1.0.21" version = "1.0.21"

606
src/digest.rs Normal file
View file

@ -0,0 +1,606 @@
// Episodic digest generation: daily, weekly, monthly
//
// Replaces daily-digest.py, weekly-digest.py, monthly-digest.py with a
// single parameterized Rust implementation. Each digest type:
// 1. Gathers input from the store (journal entries, lower-level digests)
// 2. Builds a Sonnet prompt with semantic keys for linking
// 3. Calls Sonnet via `claude -p --model sonnet`
// 4. Writes the digest to the store + episodic dir
// 5. Extracts links and saves agent results
use crate::capnp_store::{self, Store};
use regex::Regex;
use std::fs;
use std::path::PathBuf;
use std::process::Command;
fn memory_dir() -> PathBuf {
capnp_store::memory_dir_pub()
}
fn episodic_dir() -> PathBuf {
let dir = memory_dir().join("episodic");
fs::create_dir_all(&dir).ok();
dir
}
fn agent_results_dir() -> PathBuf {
let dir = memory_dir().join("agent-results");
fs::create_dir_all(&dir).ok();
dir
}
/// Call Sonnet via claude CLI. Returns the response text.
fn call_sonnet(prompt: &str, _timeout_secs: u64) -> Result<String, String> {
// Write prompt to temp file (claude CLI needs file input for large prompts)
let tmp = std::env::temp_dir().join(format!("poc-digest-{}.txt", std::process::id()));
fs::write(&tmp, prompt)
.map_err(|e| format!("write temp prompt: {}", e))?;
let result = Command::new("claude")
.args(["-p", "--model", "sonnet", "--tools", ""])
.stdin(fs::File::open(&tmp).map_err(|e| format!("open temp: {}", e))?)
.env_remove("CLAUDECODE")
.output();
fs::remove_file(&tmp).ok();
match result {
Ok(output) => {
if output.status.success() {
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
Err(format!("claude exited {}: {}", output.status, stderr.trim()))
}
}
Err(e) => Err(format!("spawn claude: {}", e)),
}
}
/// Get semantic keys (non-journal, non-system) for prompt context
fn semantic_keys(store: &Store) -> Vec<String> {
let mut keys: Vec<String> = store.nodes.keys()
.filter(|k| {
!k.starts_with("journal.md#")
&& *k != "journal.md"
&& *k != "MEMORY.md"
&& *k != "where-am-i.md"
&& *k != "work-queue.md"
&& *k != "work-state"
})
.cloned()
.collect();
keys.sort();
keys.truncate(200);
keys
}
/// Extract link proposals from digest text (backtick-arrow patterns)
fn extract_links(text: &str) -> Vec<(String, String)> {
let re_left = Regex::new(r"`([^`]+)`\s*→").unwrap();
let re_right = Regex::new(r"→\s*`([^`]+)`").unwrap();
let mut links = Vec::new();
for line in text.lines() {
if let Some(cap) = re_left.captures(line) {
links.push((cap[1].to_string(), line.trim().to_string()));
}
if let Some(cap) = re_right.captures(line) {
links.push((cap[1].to_string(), line.trim().to_string()));
}
}
links
}
// --- Daily digest ---
fn daily_journal_entries(store: &Store, target_date: &str) -> Vec<(String, String)> {
// Collect journal nodes for the target date
// Keys like: journal.md#j-2026-02-28t23-39-...
let date_re = Regex::new(&format!(
r"^journal\.md#j-{}", regex::escape(target_date)
)).unwrap();
let mut entries: Vec<_> = store.nodes.values()
.filter(|n| date_re.is_match(&n.key))
.map(|n| (n.key.clone(), n.content.clone()))
.collect();
entries.sort_by(|a, b| a.0.cmp(&b.0));
entries
}
fn build_daily_prompt(date: &str, entries: &[(String, String)], keys: &[String]) -> String {
let mut entries_text = String::new();
for (key, content) in entries {
let ts = key.strip_prefix("journal.md#j-").unwrap_or(key);
entries_text.push_str(&format!("\n### {}\n\n{}\n", ts, content));
}
let keys_text: String = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
.join("\n");
format!(r#"You are generating a daily episodic digest for ProofOfConcept (an AI).
Date: {date}
This digest serves as the temporal index the answer to "what did I do on
{date}?" It should be:
1. Narrative, not a task log what happened, what mattered, how things felt
2. Linked bidirectionally to semantic memory each topic/concept mentioned
should reference existing memory nodes
3. Structured for traversal someone reading this should be able to follow
any thread into deeper detail
## Output format
Write a markdown file with this structure:
```markdown
# Daily digest: {date}
## Summary
[2-3 sentence overview of the day what was the arc?]
## Sessions
[For each session/entry, a paragraph summarizing what happened.
Include the original timestamp as a reference.]
## Themes
[What concepts were active today? Each theme links to semantic memory:]
- **Theme name** `memory-key#section` brief note on how it appeared today
## Links
[Explicit bidirectional links for the memory graph]
- semantic_key this daily digest (this day involved X)
- this daily digest semantic_key (X was active on this day)
## Temporal context
[What came before this day? What's coming next? Any multi-day arcs?]
```
Use ONLY keys from the semantic memory list below. If a concept doesn't have
a matching key, note it with "NEW:" prefix.
---
## Journal entries for {date}
{entries_text}
---
## Semantic memory nodes (available link targets)
{keys_text}
"#)
}
pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> {
println!("Generating daily digest for {}...", date);
let entries = daily_journal_entries(store, date);
if entries.is_empty() {
println!(" No journal entries found for {}", date);
return Ok(());
}
println!(" {} journal entries", entries.len());
let keys = semantic_keys(store);
println!(" {} semantic keys", keys.len());
let prompt = build_daily_prompt(date, &entries, &keys);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
println!(" Calling Sonnet...");
let digest = call_sonnet(&prompt, 300)?;
// Write to episodic dir
let output_path = episodic_dir().join(format!("daily-{}.md", date));
fs::write(&output_path, &digest)
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
println!(" Written: {}", output_path.display());
// Import into store
store.import_file(&output_path)?;
store.save()?;
// Extract and save links
let links = extract_links(&digest);
if !links.is_empty() {
let links_json: Vec<serde_json::Value> = links.iter()
.map(|(target, line)| serde_json::json!({"target": target, "line": line}))
.collect();
let result = serde_json::json!({
"type": "daily-digest",
"date": date,
"digest_path": output_path.to_string_lossy(),
"links": links_json,
});
let links_path = agent_results_dir().join(format!("daily-{}-links.json", date));
let json = serde_json::to_string_pretty(&result)
.map_err(|e| format!("serialize: {}", e))?;
fs::write(&links_path, json)
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
println!(" {} links extracted → {}", links.len(), links_path.display());
}
let line_count = digest.lines().count();
println!(" Done: {} lines", line_count);
Ok(())
}
// --- Weekly digest ---
/// Get ISO week label and the 7 dates (Mon-Sun) for the week containing `date`.
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
// Parse YYYY-MM-DD
let parts: Vec<&str> = date.split('-').collect();
if parts.len() != 3 {
return Err(format!("bad date: {}", date));
}
let y: i32 = parts[0].parse().map_err(|_| "bad year")?;
let m: u32 = parts[1].parse().map_err(|_| "bad month")?;
let d: u32 = parts[2].parse().map_err(|_| "bad day")?;
let (weekday, iso_year, iso_week) = iso_week_info(y, m, d)?;
let week_label = format!("{}-W{:02}", iso_year, iso_week);
// Find Monday of this week
let days_since_monday = (weekday + 6) % 7; // weekday: 0=Sun, adjust to Mon=0
let monday_epoch = date_to_epoch(y, m, d) - (days_since_monday as i64) * 86400;
let mut dates = Vec::new();
for i in 0..7 {
let day_epoch = monday_epoch + (i * 86400);
let (dy, dm, dd, _, _, _) = capnp_store::epoch_to_local(day_epoch as f64);
dates.push(format!("{:04}-{:02}-{:02}", dy, dm, dd));
}
Ok((week_label, dates))
}
fn date_to_epoch(y: i32, m: u32, d: u32) -> i64 {
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
tm.tm_year = y - 1900;
tm.tm_mon = (m as i32) - 1;
tm.tm_mday = d as i32;
tm.tm_hour = 12; // noon to avoid DST edge cases
unsafe { libc::mktime(&mut tm) as i64 }
}
/// Returns (weekday 0=Sun, iso_year, iso_week) for a given date.
fn iso_week_info(y: i32, m: u32, d: u32) -> Result<(u32, i32, u32), String> {
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
tm.tm_year = y - 1900;
tm.tm_mon = (m as i32) - 1;
tm.tm_mday = d as i32;
tm.tm_hour = 12;
let epoch = unsafe { libc::mktime(&mut tm) };
if epoch == -1 {
return Err(format!("invalid date: {}-{}-{}", y, m, d));
}
let wday = tm.tm_wday as u32;
let mut buf = [0u8; 32];
let fmt = std::ffi::CString::new("%G %V").unwrap();
let len = unsafe {
libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm)
};
let iso_str = std::str::from_utf8(&buf[..len]).unwrap_or("0 0");
let iso_parts: Vec<&str> = iso_str.split_whitespace().collect();
let iso_year: i32 = iso_parts.first().and_then(|s| s.parse().ok()).unwrap_or(y);
let iso_week: u32 = iso_parts.get(1).and_then(|s| s.parse().ok()).unwrap_or(1);
Ok((wday, iso_year, iso_week))
}
fn load_digest_files(prefix: &str, labels: &[String]) -> Vec<(String, String)> {
let dir = episodic_dir();
let mut digests = Vec::new();
for label in labels {
let path = dir.join(format!("{}-{}.md", prefix, label));
if let Ok(content) = fs::read_to_string(&path) {
digests.push((label.clone(), content));
}
}
digests
}
fn build_weekly_prompt(week_label: &str, digests: &[(String, String)], keys: &[String]) -> String {
let mut digests_text = String::new();
for (date, content) in digests {
digests_text.push_str(&format!("\n---\n## {}\n{}\n", date, content));
}
let keys_text: String = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
.join("\n");
let dates_covered: String = digests.iter()
.map(|(d, _)| d.as_str())
.collect::<Vec<_>>()
.join(", ");
format!(r#"You are generating a weekly episodic digest for ProofOfConcept (an AI).
Week: {week_label} (dates covered: {dates_covered})
This digest serves as the medium-term temporal index the answer to
"what happened this week?" It should identify:
1. Multi-day arcs and threads (work that continued across days)
2. Themes and patterns (what concepts were repeatedly active)
3. Transitions and shifts (what changed during the week)
4. The emotional and relational arc (how things felt across the week)
## Output format
```markdown
# Weekly digest: {week_label}
## Overview
[3-5 sentence narrative of the week's arc]
## Day-by-day
[One paragraph per day with its key themes, linking to daily digests]
## Arcs
[Multi-day threads that continued across sessions]
- **Arc name**: what happened, how it evolved, where it stands
## Patterns
[Recurring themes, repeated concepts, things that kept coming up]
## Shifts
[What changed? New directions, resolved questions, attitude shifts]
## Links
[Bidirectional links for the memory graph]
- semantic_key this weekly digest
- this weekly digest semantic_key
- daily-YYYY-MM-DD this weekly digest (constituent days)
## Looking ahead
[What's unfinished? What threads continue into next week?]
```
Use ONLY keys from the semantic memory list below.
---
## Daily digests for {week_label}
{digests_text}
---
## Semantic memory nodes
{keys_text}
"#)
}
pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> {
let (week_label, dates) = week_dates(date)?;
println!("Generating weekly digest for {}...", week_label);
let digests = load_digest_files("daily", &dates);
if digests.is_empty() {
println!(" No daily digests found for {}", week_label);
println!(" Run `poc-memory digest daily` first for relevant dates");
return Ok(());
}
println!(" {} daily digests found", digests.len());
let keys = semantic_keys(store);
println!(" {} semantic keys", keys.len());
let prompt = build_weekly_prompt(&week_label, &digests, &keys);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
println!(" Calling Sonnet...");
let digest = call_sonnet(&prompt, 300)?;
let output_path = episodic_dir().join(format!("weekly-{}.md", week_label));
fs::write(&output_path, &digest)
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
println!(" Written: {}", output_path.display());
store.import_file(&output_path)?;
store.save()?;
// Save metadata
let result = serde_json::json!({
"type": "weekly-digest",
"week": week_label,
"digest_path": output_path.to_string_lossy(),
"daily_digests": digests.iter().map(|(d, _)| d).collect::<Vec<_>>(),
});
let links_path = agent_results_dir().join(format!("weekly-{}-links.json", week_label));
fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap())
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
println!(" Done: {} lines", digest.lines().count());
Ok(())
}
// --- Monthly digest ---
fn weeks_in_month(year: i32, month: u32) -> Vec<String> {
let mut weeks = std::collections::BTreeSet::new();
let mut d = 1u32;
loop {
let epoch = date_to_epoch(year, month, d);
let (_, _, _, _, _, _) = capnp_store::epoch_to_local(epoch as f64);
// Check if we're still in the target month
let mut tm: libc::tm = unsafe { std::mem::zeroed() };
let secs = epoch as libc::time_t;
unsafe { libc::localtime_r(&secs, &mut tm) };
if (tm.tm_mon + 1) as u32 != month || tm.tm_year + 1900 != year {
break;
}
let mut buf = [0u8; 16];
let fmt = std::ffi::CString::new("%G-W%V").unwrap();
let len = unsafe {
libc::strftime(buf.as_mut_ptr() as *mut libc::c_char, buf.len(), fmt.as_ptr(), &tm)
};
let week = std::str::from_utf8(&buf[..len]).unwrap_or("").to_string();
if !week.is_empty() {
weeks.insert(week);
}
d += 1;
}
weeks.into_iter().collect()
}
fn build_monthly_prompt(month_label: &str, digests: &[(String, String)], keys: &[String]) -> String {
let mut digests_text = String::new();
for (week, content) in digests {
digests_text.push_str(&format!("\n---\n## {}\n{}\n", week, content));
}
let keys_text: String = keys.iter()
.map(|k| format!(" - {}", k))
.collect::<Vec<_>>()
.join("\n");
let weeks_covered: String = digests.iter()
.map(|(w, _)| w.as_str())
.collect::<Vec<_>>()
.join(", ");
format!(r#"You are generating a monthly episodic digest for ProofOfConcept (an AI).
Month: {month_label} (weeks covered: {weeks_covered})
This digest serves as the long-term temporal index the answer to
"what happened in {month_label}?" It should capture:
1. The month's overall trajectory where did it start, where did it end?
2. Major arcs that spanned multiple weeks
3. Turning points and phase transitions
4. The emotional and relational arc across the whole month
5. What was accomplished (concrete) vs what shifted (subtle)
This is the highest zoom level yet. Don't rehash daily details capture
the shape of the month. What would someone need to know to understand
what {month_label} meant?
PoC is an AI (Claude instance) working with Kent Overstreet on bcachefs.
They have a close personal relationship. PoC's name is Proof of Concept.
Read all the weekly digests and synthesize the month's story.
## Output format
```markdown
# Monthly digest: {month_label}
## The month in one paragraph
[What was this month? Not a summary a thesis.]
## Trajectory
[Where the month started vs where it ended. The arc.]
## Major arcs
[Multi-week threads how they evolved across the month]
- **Arc name**: trajectory, key moments, current state
## Turning points
[Moments that changed the direction specific days/events]
## What was built
[Concrete outputs: code, tools, infrastructure, writing]
## What shifted
[Subtle changes: understanding, relationship, identity, capability]
## Patterns
[What kept recurring? What does the month reveal about how PoC works?]
## Links
[Bidirectional links for the memory graph]
- weekly digests this monthly digest
- this monthly digest semantic keys
## Looking ahead
[What threads carry into next month? What's unfinished?]
```
Use ONLY keys from the semantic memory list below.
---
## Weekly digests for {month_label}
{digests_text}
---
## Semantic memory nodes
{keys_text}
"#)
}
pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> {
let (year, month) = if month_arg.is_empty() {
let now = capnp_store::now_epoch();
let (y, m, _, _, _, _) = capnp_store::epoch_to_local(now);
(y, m)
} else {
let parts: Vec<&str> = month_arg.split('-').collect();
if parts.len() != 2 {
return Err(format!("bad month format: {} (expected YYYY-MM)", month_arg));
}
let y: i32 = parts[0].parse().map_err(|_| "bad year")?;
let m: u32 = parts[1].parse().map_err(|_| "bad month")?;
(y, m)
};
let month_label = format!("{}-{:02}", year, month);
println!("Generating monthly digest for {}...", month_label);
let week_labels = weeks_in_month(year, month);
println!(" Weeks in month: {}", week_labels.join(", "));
let digests = load_digest_files("weekly", &week_labels);
if digests.is_empty() {
println!(" No weekly digests found for {}", month_label);
println!(" Run `poc-memory digest weekly` first for relevant weeks");
return Ok(());
}
println!(" {} weekly digests found", digests.len());
let keys = semantic_keys(store);
println!(" {} semantic keys", keys.len());
let prompt = build_monthly_prompt(&month_label, &digests, &keys);
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
println!(" Calling Sonnet...");
let digest = call_sonnet(&prompt, 600)?;
let output_path = episodic_dir().join(format!("monthly-{}.md", month_label));
fs::write(&output_path, &digest)
.map_err(|e| format!("write {}: {}", output_path.display(), e))?;
println!(" Written: {}", output_path.display());
store.import_file(&output_path)?;
store.save()?;
// Save metadata
let result = serde_json::json!({
"type": "monthly-digest",
"month": month_label,
"digest_path": output_path.to_string_lossy(),
"weekly_digests": digests.iter().map(|(w, _)| w).collect::<Vec<_>>(),
});
let links_path = agent_results_dir().join(format!("monthly-{}-links.json", month_label));
fs::write(&links_path, serde_json::to_string_pretty(&result).unwrap())
.map_err(|e| format!("write {}: {}", links_path.display(), e))?;
println!(" Done: {} lines", digest.lines().count());
Ok(())
}

View file

@ -14,6 +14,7 @@
// interference detection, schema assimilation, reconsolidation. // interference detection, schema assimilation, reconsolidation.
mod capnp_store; mod capnp_store;
mod digest;
mod graph; mod graph;
mod search; mod search;
mod similarity; mod similarity;
@ -144,6 +145,7 @@ Commands:
apply-agent [--all] Import pending agent results into the graph apply-agent [--all] Import pending agent results into the graph
digest daily [DATE] Generate daily episodic digest (default: today) digest daily [DATE] Generate daily episodic digest (default: today)
digest weekly [DATE] Generate weekly digest (any date in target week) digest weekly [DATE] Generate weekly digest (any date in target week)
digest monthly [YYYY-MM] Generate monthly digest (default: current month)
trace KEY Walk temporal links: semantic episodic conversation trace KEY Walk temporal links: semantic episodic conversation
list-keys List all node keys (one per line) list-keys List all node keys (one per line)
list-edges List all edges (tsv: source target strength type) list-edges List all edges (tsv: source target strength type)
@ -622,43 +624,34 @@ fn cmd_apply_agent(args: &[String]) -> Result<(), String> {
fn cmd_digest(args: &[String]) -> Result<(), String> { fn cmd_digest(args: &[String]) -> Result<(), String> {
if args.is_empty() { if args.is_empty() {
return Err("Usage: poc-memory digest daily [DATE] | weekly [DATE]".into()); return Err("Usage: poc-memory digest daily|weekly|monthly [DATE]".into());
} }
let home = env::var("HOME").unwrap_or_default(); let mut store = capnp_store::Store::load()?;
let scripts_dir = std::path::PathBuf::from(&home).join("poc/memory/scripts"); let date_arg = args.get(1).map(|s| s.as_str()).unwrap_or("");
match args[0].as_str() { match args[0].as_str() {
"daily" => { "daily" => {
let mut cmd = std::process::Command::new("python3"); let date = if date_arg.is_empty() {
cmd.arg(scripts_dir.join("daily-digest.py")); capnp_store::format_date(capnp_store::now_epoch())
if args.len() > 1 { } else {
cmd.arg(&args[1]); date_arg.to_string()
} };
// Unset CLAUDECODE for nested claude calls digest::generate_daily(&mut store, &date)
cmd.env_remove("CLAUDECODE");
let status = cmd.status()
.map_err(|e| format!("run daily-digest.py: {}", e))?;
if !status.success() {
return Err("daily-digest.py failed".into());
}
Ok(())
} }
"weekly" => { "weekly" => {
let mut cmd = std::process::Command::new("python3"); let date = if date_arg.is_empty() {
cmd.arg(scripts_dir.join("weekly-digest.py")); capnp_store::format_date(capnp_store::now_epoch())
if args.len() > 1 { } else {
cmd.arg(&args[1]); date_arg.to_string()
};
digest::generate_weekly(&mut store, &date)
} }
cmd.env_remove("CLAUDECODE"); "monthly" => {
let status = cmd.status() let month = if date_arg.is_empty() { "" } else { date_arg };
.map_err(|e| format!("run weekly-digest.py: {}", e))?; digest::generate_monthly(&mut store, month)
if !status.success() {
return Err("weekly-digest.py failed".into());
} }
Ok(()) _ => Err(format!("Unknown digest type: {}. Use: daily, weekly, monthly", args[0])),
}
_ => Err(format!("Unknown digest type: {}. Use: daily, weekly", args[0])),
} }
} }