228 lines
6.5 KiB
Python
228 lines
6.5 KiB
Python
|
|
#!/usr/bin/env python3
|
||
|
|
"""weekly-digest.py — generate a weekly episodic digest from daily digests.
|
||
|
|
|
||
|
|
Collects all daily digests for a given week, identifies cross-day patterns
|
||
|
|
and arcs, and produces a weekly summary. Links to daily digests (up) and
|
||
|
|
semantic memory (down).
|
||
|
|
|
||
|
|
Usage:
|
||
|
|
weekly-digest.py [DATE] # any date in the target week (default: today)
|
||
|
|
weekly-digest.py 2026-02-28 # generates digest for week containing Feb 28
|
||
|
|
|
||
|
|
Output:
|
||
|
|
~/.claude/memory/episodic/weekly-YYYY-WNN.md
|
||
|
|
"""
|
||
|
|
|
||
|
|
import json
|
||
|
|
import os
|
||
|
|
import re
|
||
|
|
import subprocess
|
||
|
|
import sys
|
||
|
|
from datetime import date, datetime, timedelta
|
||
|
|
from pathlib import Path
|
||
|
|
|
||
|
|
MEMORY_DIR = Path.home() / ".claude" / "memory"
|
||
|
|
EPISODIC_DIR = MEMORY_DIR / "episodic"
|
||
|
|
AGENT_RESULTS_DIR = MEMORY_DIR / "agent-results"
|
||
|
|
|
||
|
|
EPISODIC_DIR.mkdir(parents=True, exist_ok=True)
|
||
|
|
|
||
|
|
|
||
|
|
def get_week_dates(target: date) -> tuple[str, list[date]]:
|
||
|
|
"""Get ISO week label and all dates in that week (Mon-Sun)."""
|
||
|
|
iso = target.isocalendar()
|
||
|
|
week_label = f"{iso.year}-W{iso.week:02d}"
|
||
|
|
monday = target - timedelta(days=target.weekday())
|
||
|
|
dates = [monday + timedelta(days=i) for i in range(7)]
|
||
|
|
return week_label, dates
|
||
|
|
|
||
|
|
|
||
|
|
def load_daily_digests(dates: list[date]) -> list[dict]:
|
||
|
|
"""Load daily digest files for the given dates."""
|
||
|
|
digests = []
|
||
|
|
for d in dates:
|
||
|
|
path = EPISODIC_DIR / f"daily-{d.isoformat()}.md"
|
||
|
|
if path.exists():
|
||
|
|
with open(path) as f:
|
||
|
|
content = f.read()
|
||
|
|
digests.append({
|
||
|
|
"date": d.isoformat(),
|
||
|
|
"content": content,
|
||
|
|
"path": str(path),
|
||
|
|
})
|
||
|
|
return digests
|
||
|
|
|
||
|
|
|
||
|
|
def get_semantic_keys() -> list[str]:
|
||
|
|
"""Get semantic memory file keys."""
|
||
|
|
keys = []
|
||
|
|
for md in sorted(MEMORY_DIR.glob("*.md")):
|
||
|
|
name = md.name
|
||
|
|
if name in ("journal.md", "work-queue.md", "MEMORY.md"):
|
||
|
|
continue
|
||
|
|
keys.append(name)
|
||
|
|
try:
|
||
|
|
with open(md) as f:
|
||
|
|
for line in f:
|
||
|
|
if line.startswith("## "):
|
||
|
|
slug = re.sub(r'[^a-z0-9-]', '',
|
||
|
|
line[3:].strip().lower().replace(' ', '-'))
|
||
|
|
if slug:
|
||
|
|
keys.append(f"{name}#{slug}")
|
||
|
|
except Exception:
|
||
|
|
pass
|
||
|
|
return keys
|
||
|
|
|
||
|
|
|
||
|
|
def build_weekly_prompt(week_label: str, digests: list[dict],
|
||
|
|
semantic_keys: list[str]) -> str:
|
||
|
|
"""Build the prompt for Sonnet."""
|
||
|
|
digests_text = ""
|
||
|
|
for d in digests:
|
||
|
|
digests_text += f"\n---\n## {d['date']}\n{d['content']}\n"
|
||
|
|
|
||
|
|
keys_text = "\n".join(f" - {k}" for k in semantic_keys[:200])
|
||
|
|
dates_covered = ", ".join(d["date"] for d in digests)
|
||
|
|
|
||
|
|
return f"""You are generating a weekly episodic digest for ProofOfConcept (an AI).
|
||
|
|
Week: {week_label} (dates covered: {dates_covered})
|
||
|
|
|
||
|
|
This digest serves as the medium-term temporal index — the answer to
|
||
|
|
"what happened this week?" It should identify:
|
||
|
|
1. Multi-day arcs and threads (work that continued across days)
|
||
|
|
2. Themes and patterns (what concepts were repeatedly active)
|
||
|
|
3. Transitions and shifts (what changed during the week)
|
||
|
|
4. The emotional and relational arc (how things felt across the week)
|
||
|
|
|
||
|
|
## Output format
|
||
|
|
|
||
|
|
```markdown
|
||
|
|
# Weekly digest: {week_label}
|
||
|
|
|
||
|
|
## Overview
|
||
|
|
[3-5 sentence narrative of the week's arc]
|
||
|
|
|
||
|
|
## Day-by-day
|
||
|
|
[One paragraph per day with its key themes, linking to daily digests]
|
||
|
|
|
||
|
|
## Arcs
|
||
|
|
[Multi-day threads that continued across sessions]
|
||
|
|
- **Arc name**: what happened, how it evolved, where it stands
|
||
|
|
|
||
|
|
## Patterns
|
||
|
|
[Recurring themes, repeated concepts, things that kept coming up]
|
||
|
|
|
||
|
|
## Shifts
|
||
|
|
[What changed? New directions, resolved questions, attitude shifts]
|
||
|
|
|
||
|
|
## Links
|
||
|
|
[Bidirectional links for the memory graph]
|
||
|
|
- semantic_key → this weekly digest
|
||
|
|
- this weekly digest → semantic_key
|
||
|
|
- daily-YYYY-MM-DD → this weekly digest (constituent days)
|
||
|
|
|
||
|
|
## Looking ahead
|
||
|
|
[What's unfinished? What threads continue into next week?]
|
||
|
|
```
|
||
|
|
|
||
|
|
Use ONLY keys from the semantic memory list below.
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Daily digests for {week_label}
|
||
|
|
|
||
|
|
{digests_text}
|
||
|
|
|
||
|
|
---
|
||
|
|
|
||
|
|
## Semantic memory nodes
|
||
|
|
|
||
|
|
{keys_text}
|
||
|
|
"""
|
||
|
|
|
||
|
|
|
||
|
|
def call_sonnet(prompt: str) -> str:
|
||
|
|
"""Call Sonnet via claude CLI."""
|
||
|
|
import tempfile
|
||
|
|
|
||
|
|
env = dict(os.environ)
|
||
|
|
env.pop("CLAUDECODE", None)
|
||
|
|
|
||
|
|
# Write prompt to temp file — avoids Python subprocess pipe issues
|
||
|
|
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt',
|
||
|
|
delete=False) as f:
|
||
|
|
f.write(prompt)
|
||
|
|
prompt_file = f.name
|
||
|
|
|
||
|
|
try:
|
||
|
|
scripts_dir = os.path.dirname(os.path.abspath(__file__))
|
||
|
|
wrapper = os.path.join(scripts_dir, "call-sonnet.sh")
|
||
|
|
|
||
|
|
result = subprocess.run(
|
||
|
|
[wrapper, prompt_file],
|
||
|
|
capture_output=True,
|
||
|
|
text=True,
|
||
|
|
timeout=300,
|
||
|
|
env=env,
|
||
|
|
)
|
||
|
|
return result.stdout.strip()
|
||
|
|
except subprocess.TimeoutExpired:
|
||
|
|
return "Error: Sonnet call timed out"
|
||
|
|
except Exception as e:
|
||
|
|
return f"Error: {e}"
|
||
|
|
finally:
|
||
|
|
os.unlink(prompt_file)
|
||
|
|
|
||
|
|
|
||
|
|
def main():
|
||
|
|
if len(sys.argv) > 1:
|
||
|
|
target = date.fromisoformat(sys.argv[1])
|
||
|
|
else:
|
||
|
|
target = date.today()
|
||
|
|
|
||
|
|
week_label, week_dates = get_week_dates(target)
|
||
|
|
print(f"Generating weekly digest for {week_label}...")
|
||
|
|
|
||
|
|
digests = load_daily_digests(week_dates)
|
||
|
|
if not digests:
|
||
|
|
print(f" No daily digests found for {week_label}")
|
||
|
|
print(f" Run daily-digest.py first for relevant dates")
|
||
|
|
sys.exit(0)
|
||
|
|
print(f" {len(digests)} daily digests found")
|
||
|
|
|
||
|
|
semantic_keys = get_semantic_keys()
|
||
|
|
print(f" {len(semantic_keys)} semantic keys")
|
||
|
|
|
||
|
|
prompt = build_weekly_prompt(week_label, digests, semantic_keys)
|
||
|
|
print(f" Prompt: {len(prompt):,} chars (~{len(prompt)//4:,} tokens)")
|
||
|
|
|
||
|
|
print(" Calling Sonnet...")
|
||
|
|
digest = call_sonnet(prompt)
|
||
|
|
|
||
|
|
if digest.startswith("Error:"):
|
||
|
|
print(f" {digest}", file=sys.stderr)
|
||
|
|
sys.exit(1)
|
||
|
|
|
||
|
|
output_path = EPISODIC_DIR / f"weekly-{week_label}.md"
|
||
|
|
with open(output_path, "w") as f:
|
||
|
|
f.write(digest)
|
||
|
|
print(f" Written: {output_path}")
|
||
|
|
|
||
|
|
# Save links for poc-memory
|
||
|
|
links_path = AGENT_RESULTS_DIR / f"weekly-{week_label}-links.json"
|
||
|
|
with open(links_path, "w") as f:
|
||
|
|
json.dump({
|
||
|
|
"type": "weekly-digest",
|
||
|
|
"week": week_label,
|
||
|
|
"digest_path": str(output_path),
|
||
|
|
"daily_digests": [d["path"] for d in digests],
|
||
|
|
}, f, indent=2)
|
||
|
|
print(f" Links saved: {links_path}")
|
||
|
|
|
||
|
|
line_count = len(digest.split("\n"))
|
||
|
|
print(f" Done: {line_count} lines")
|
||
|
|
|
||
|
|
|
||
|
|
if __name__ == "__main__":
|
||
|
|
main()
|