scripts: use capnp store instead of reading markdown directly

Add store_helpers.py with shared helpers that call poc-memory commands
(list-keys, render, journal-tail) instead of globbing ~/.claude/memory/*.md
and parsing section headers.

All 9 Python scripts updated: get_semantic_keys(), get_topic_file_index(),
get_recent_journal(), parse_journal_entries(), read_journal_range(),
collect_topic_stems(), and file preview rendering now go through the store.

This completes the clean switch — no script reads archived markdown files.
This commit is contained in:
ProofOfConcept 2026-02-28 23:32:47 -05:00
parent f20ea4f827
commit d14710e477
10 changed files with 324 additions and 297 deletions

View file

@ -23,7 +23,6 @@ from datetime import date, datetime
from pathlib import Path
MEMORY_DIR = Path.home() / ".claude" / "memory"
JOURNAL = MEMORY_DIR / "journal.md"
EPISODIC_DIR = MEMORY_DIR / "episodic"
AGENT_RESULTS_DIR = MEMORY_DIR / "agent-results"
@ -31,41 +30,9 @@ EPISODIC_DIR.mkdir(parents=True, exist_ok=True)
def parse_journal_entries(target_date: str) -> list[dict]:
"""Extract all journal entries for a given date (YYYY-MM-DD)."""
entries = []
current = None
with open(JOURNAL) as f:
for line in f:
# Match entry header: ## 2026-02-28T19:42
m = re.match(r'^## (\d{4}-\d{2}-\d{2})T(\d{2}:\d{2})', line)
if m:
if current is not None:
entries.append(current)
entry_date = m.group(1)
entry_time = m.group(2)
current = {
"date": entry_date,
"time": entry_time,
"timestamp": f"{entry_date}T{entry_time}",
"source_ref": None,
"text": "",
}
continue
if current is not None:
# Check for source comment
sm = re.match(r'<!-- source: (.+?) -->', line)
if sm:
current["source_ref"] = sm.group(1)
continue
current["text"] += line
if current is not None:
entries.append(current)
# Filter to target date
return [e for e in entries if e["date"] == target_date]
"""Get journal entries for a given date from the store."""
from store_helpers import get_journal_entries_by_date
return get_journal_entries_by_date(target_date)
def load_agent_results(target_date: str) -> list[dict]:
@ -90,24 +57,9 @@ def load_agent_results(target_date: str) -> list[dict]:
def get_semantic_keys() -> list[str]:
"""Get all semantic memory file keys."""
keys = []
for md in sorted(MEMORY_DIR.glob("*.md")):
name = md.name
if name in ("journal.md", "work-queue.md", "MEMORY.md"):
continue
keys.append(name)
try:
with open(md) as f:
for line in f:
if line.startswith("## "):
slug = re.sub(r'[^a-z0-9-]', '',
line[3:].strip().lower().replace(' ', '-'))
if slug:
keys.append(f"{name}#{slug}")
except Exception:
pass
return keys
"""Get semantic memory keys from the store."""
from store_helpers import get_semantic_keys as _get_keys
return _get_keys()
def build_digest_prompt(target_date: str, entries: list[dict],