store: strip .md suffix from all keys

Keys were a vestige of the file-based era. resolve_key() added .md
to lookups while upsert() used bare keys, creating phantom duplicate
nodes (the instructions bug: writes went to "instructions", reads
found "instructions.md").

- Remove .md normalization from resolve_key, strip instead
- Update all hardcoded key patterns (journal.md# → journal#, etc)
- Add strip_md_keys() migration to fsck: renames nodes and relations
- Add broken link detection to health report
- Delete redirect table (no longer needed)
- Update config defaults and config.jsonl

Migration: run `poc-memory fsck` to rename existing keys.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-08 19:41:26 -04:00
parent 77fc533631
commit 46f8fe662e
12 changed files with 289 additions and 132 deletions

View file

@ -364,11 +364,11 @@ fn cmd_init() -> Result<(), String> {
// Initialize store and seed default identity node if empty
let mut store = store::Store::load()?;
let count = store.init_from_markdown()?;
if !store.nodes.contains_key("identity.md") {
if !store.nodes.contains_key("identity") {
let default_identity = include_str!("../defaults/identity.md");
store.upsert("identity.md", default_identity)
store.upsert("identity", default_identity)
.map_err(|e| format!("seed identity: {}", e))?;
println!("Seeded identity.md in store");
println!("Seeded identity in store");
}
store.save()?;
println!("Indexed {} memory units", count);
@ -413,7 +413,25 @@ fn cmd_migrate() -> Result<(), String> {
}
fn cmd_fsck() -> Result<(), String> {
store::fsck()
store::fsck()?;
store::strip_md_keys()?;
// Check for broken links
let store = store::Store::load()?;
let mut orphans = 0usize;
for rel in &store.relations {
if rel.deleted { continue; }
if !store.nodes.contains_key(&rel.source_key)
|| !store.nodes.contains_key(&rel.target_key) {
orphans += 1;
}
}
if orphans > 0 {
eprintln!("{} broken links (run `health` for details)", orphans);
} else {
eprintln!("No broken links");
}
Ok(())
}
fn cmd_health() -> Result<(), String> {
@ -1582,7 +1600,12 @@ fn cmd_history(args: &[String]) -> Result<(), String> {
};
let full = parsed.full;
let key = parsed.key.join(" ");
let raw_key = parsed.key.join(" ");
// Resolve key consistently with render/write
let store = store::Store::load()?;
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
drop(store);
// Replay the node log, collecting all versions of this key
let path = store::nodes_path();
@ -1650,7 +1673,7 @@ fn cmd_write(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory write KEY < content\n\
Reads content from stdin, upserts into the store.".into());
}
let key = args.join(" ");
let raw_key = args.join(" ");
let mut content = String::new();
std::io::Read::read_to_string(&mut std::io::stdin(), &mut content)
.map_err(|e| format!("read stdin: {}", e))?;
@ -1660,6 +1683,9 @@ fn cmd_write(args: &[String]) -> Result<(), String> {
}
let mut store = store::Store::load()?;
// Resolve the key the same way render/search do, so writes and reads
// always hit the same node. Fall back to raw key for new nodes.
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
let result = store.upsert(&key, &content)?;
match result {
"unchanged" => println!("No change: '{}'", key),
@ -1721,12 +1747,8 @@ fn cmd_export(args: &[String]) -> Result<(), String> {
return Err("Usage: poc-memory export FILE [FILE...] | --all".into());
} else {
args.iter().map(|a| {
// If it doesn't end in .md, try resolving
if a.ends_with(".md") {
a.clone()
} else {
format!("{}.md", a)
}
// Strip .md if user supplied it — store keys are bare
a.strip_suffix(".md").unwrap_or(a).to_string()
}).collect()
};
@ -1735,7 +1757,7 @@ fn cmd_export(args: &[String]) -> Result<(), String> {
for file_key in &targets {
match store.export_to_markdown(file_key) {
Some(content) => {
let out_path = mem_dir.join(file_key);
let out_path = mem_dir.join(format!("{}.md", file_key));
std::fs::write(&out_path, &content)
.map_err(|e| format!("write {}: {}", out_path.display(), e))?;
let section_count = content.matches("<!-- mem:").count() + 1;
@ -1767,7 +1789,7 @@ fn cmd_journal_write(args: &[String]) -> Result<(), String> {
.join("-");
let slug = if slug.len() > 50 { &slug[..50] } else { &slug };
let key = format!("journal.md#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
let key = format!("journal#j-{}-{}", timestamp.to_lowercase().replace(':', "-"), slug);
// Build content with header
let content = format!("## {}\n\n{}", timestamp, text);
@ -1891,7 +1913,6 @@ fn journal_tail_digests(store: &store::Store, prefix: &str, n: usize, full: bool
let skip = if digests.len() > n { digests.len() - n } else { 0 };
for node in digests.iter().skip(skip) {
let label = node.key.strip_prefix(prefix)
.and_then(|s| s.strip_suffix(".md"))
.unwrap_or(&node.key);
let title = extract_title(&node.content);
if full {
@ -1959,7 +1980,7 @@ fn cmd_query(args: &[String]) -> Result<(), String> {
Expressions:\n \
degree > 15 property filter\n \
key ~ 'journal.*' AND degree > 10 boolean + regex\n \
neighbors('identity.md') WHERE ... graph traversal\n \
neighbors('identity') WHERE ... graph traversal\n \
community_id = community('key') function as value\n \
* all nodes\n\n\
Pipe stages:\n \