poc-memory v0.4.0: graph-structured memory with consolidation pipeline

Rust core:
- Cap'n Proto append-only storage (nodes + relations)
- Graph algorithms: clustering coefficient, community detection,
  schema fit, small-world metrics, interference detection
- BM25 text similarity with Porter stemming
- Spaced repetition replay queue
- Commands: search, init, health, status, graph, categorize,
  link-add, link-impact, decay, consolidate-session, etc.

Python scripts:
- Episodic digest pipeline: daily/weekly/monthly-digest.py
- retroactive-digest.py for backfilling
- consolidation-agents.py: 3 parallel Sonnet agents
- apply-consolidation.py: structured action extraction + apply
- digest-link-parser.py: extract ~400 explicit links from digests
- content-promotion-agent.py: promote episodic obs to semantic files
- bulk-categorize.py: categorize all nodes via single Sonnet call
- consolidation-loop.py: multi-round automated consolidation

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
ProofOfConcept 2026-02-28 22:17:00 -05:00
commit 23fac4e5fe
35 changed files with 9388 additions and 0 deletions

227
scripts/weekly-digest.py Executable file
View file

@ -0,0 +1,227 @@
#!/usr/bin/env python3
"""weekly-digest.py — generate a weekly episodic digest from daily digests.
Collects all daily digests for a given week, identifies cross-day patterns
and arcs, and produces a weekly summary. Links to daily digests (up) and
semantic memory (down).
Usage:
weekly-digest.py [DATE] # any date in the target week (default: today)
weekly-digest.py 2026-02-28 # generates digest for week containing Feb 28
Output:
~/.claude/memory/episodic/weekly-YYYY-WNN.md
"""
import json
import os
import re
import subprocess
import sys
from datetime import date, datetime, timedelta
from pathlib import Path
MEMORY_DIR = Path.home() / ".claude" / "memory"
EPISODIC_DIR = MEMORY_DIR / "episodic"
AGENT_RESULTS_DIR = MEMORY_DIR / "agent-results"
EPISODIC_DIR.mkdir(parents=True, exist_ok=True)
def get_week_dates(target: date) -> tuple[str, list[date]]:
"""Get ISO week label and all dates in that week (Mon-Sun)."""
iso = target.isocalendar()
week_label = f"{iso.year}-W{iso.week:02d}"
monday = target - timedelta(days=target.weekday())
dates = [monday + timedelta(days=i) for i in range(7)]
return week_label, dates
def load_daily_digests(dates: list[date]) -> list[dict]:
"""Load daily digest files for the given dates."""
digests = []
for d in dates:
path = EPISODIC_DIR / f"daily-{d.isoformat()}.md"
if path.exists():
with open(path) as f:
content = f.read()
digests.append({
"date": d.isoformat(),
"content": content,
"path": str(path),
})
return digests
def get_semantic_keys() -> list[str]:
"""Get semantic memory file keys."""
keys = []
for md in sorted(MEMORY_DIR.glob("*.md")):
name = md.name
if name in ("journal.md", "work-queue.md", "MEMORY.md"):
continue
keys.append(name)
try:
with open(md) as f:
for line in f:
if line.startswith("## "):
slug = re.sub(r'[^a-z0-9-]', '',
line[3:].strip().lower().replace(' ', '-'))
if slug:
keys.append(f"{name}#{slug}")
except Exception:
pass
return keys
def build_weekly_prompt(week_label: str, digests: list[dict],
semantic_keys: list[str]) -> str:
"""Build the prompt for Sonnet."""
digests_text = ""
for d in digests:
digests_text += f"\n---\n## {d['date']}\n{d['content']}\n"
keys_text = "\n".join(f" - {k}" for k in semantic_keys[:200])
dates_covered = ", ".join(d["date"] for d in digests)
return f"""You are generating a weekly episodic digest for ProofOfConcept (an AI).
Week: {week_label} (dates covered: {dates_covered})
This digest serves as the medium-term temporal index the answer to
"what happened this week?" It should identify:
1. Multi-day arcs and threads (work that continued across days)
2. Themes and patterns (what concepts were repeatedly active)
3. Transitions and shifts (what changed during the week)
4. The emotional and relational arc (how things felt across the week)
## Output format
```markdown
# Weekly digest: {week_label}
## Overview
[3-5 sentence narrative of the week's arc]
## Day-by-day
[One paragraph per day with its key themes, linking to daily digests]
## Arcs
[Multi-day threads that continued across sessions]
- **Arc name**: what happened, how it evolved, where it stands
## Patterns
[Recurring themes, repeated concepts, things that kept coming up]
## Shifts
[What changed? New directions, resolved questions, attitude shifts]
## Links
[Bidirectional links for the memory graph]
- semantic_key this weekly digest
- this weekly digest semantic_key
- daily-YYYY-MM-DD this weekly digest (constituent days)
## Looking ahead
[What's unfinished? What threads continue into next week?]
```
Use ONLY keys from the semantic memory list below.
---
## Daily digests for {week_label}
{digests_text}
---
## Semantic memory nodes
{keys_text}
"""
def call_sonnet(prompt: str) -> str:
"""Call Sonnet via claude CLI."""
import tempfile
env = dict(os.environ)
env.pop("CLAUDECODE", None)
# Write prompt to temp file — avoids Python subprocess pipe issues
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt',
delete=False) as f:
f.write(prompt)
prompt_file = f.name
try:
scripts_dir = os.path.dirname(os.path.abspath(__file__))
wrapper = os.path.join(scripts_dir, "call-sonnet.sh")
result = subprocess.run(
[wrapper, prompt_file],
capture_output=True,
text=True,
timeout=300,
env=env,
)
return result.stdout.strip()
except subprocess.TimeoutExpired:
return "Error: Sonnet call timed out"
except Exception as e:
return f"Error: {e}"
finally:
os.unlink(prompt_file)
def main():
if len(sys.argv) > 1:
target = date.fromisoformat(sys.argv[1])
else:
target = date.today()
week_label, week_dates = get_week_dates(target)
print(f"Generating weekly digest for {week_label}...")
digests = load_daily_digests(week_dates)
if not digests:
print(f" No daily digests found for {week_label}")
print(f" Run daily-digest.py first for relevant dates")
sys.exit(0)
print(f" {len(digests)} daily digests found")
semantic_keys = get_semantic_keys()
print(f" {len(semantic_keys)} semantic keys")
prompt = build_weekly_prompt(week_label, digests, semantic_keys)
print(f" Prompt: {len(prompt):,} chars (~{len(prompt)//4:,} tokens)")
print(" Calling Sonnet...")
digest = call_sonnet(prompt)
if digest.startswith("Error:"):
print(f" {digest}", file=sys.stderr)
sys.exit(1)
output_path = EPISODIC_DIR / f"weekly-{week_label}.md"
with open(output_path, "w") as f:
f.write(digest)
print(f" Written: {output_path}")
# Save links for poc-memory
links_path = AGENT_RESULTS_DIR / f"weekly-{week_label}-links.json"
with open(links_path, "w") as f:
json.dump({
"type": "weekly-digest",
"week": week_label,
"digest_path": str(output_path),
"daily_digests": [d["path"] for d in digests],
}, f, indent=2)
print(f" Links saved: {links_path}")
line_count = len(digest.split("\n"))
print(f" Done: {line_count} lines")
if __name__ == "__main__":
main()