remove decay, fix_categories, and categorize

Graph-wide decay is the wrong approach — node importance should emerge
from graph topology (degree, centrality, usage patterns), not a global
weight field multiplied by a category-specific factor.

Remove: Store::decay(), Store::categorize(), Store::fix_categories(),
Category::decay_factor(), cmd_decay, cmd_categorize, cmd_fix_categories,
job_decay, and all category assignments at node creation time.

Category remains in the schema as a vestigial field (removing it
requires a capnp migration) but no longer affects behavior.

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-08 20:22:38 -04:00
parent 804578b977
commit 4bc74ca4a2
6 changed files with 0 additions and 198 deletions

View file

@ -334,9 +334,6 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_key: Option
let link_actions: Vec<_> = actions.iter()
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("link"))
.collect();
let cat_actions: Vec<_> = actions.iter()
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("categorize"))
.collect();
let manual_actions: Vec<_> = actions.iter()
.filter(|a| a.get("action").and_then(|v| v.as_str()) == Some("manual"))
.collect();
@ -356,15 +353,6 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_key: Option
println!(" {:2}. {}{} ({})", i + 1, src, tgt, reason);
}
}
if !cat_actions.is_empty() {
println!("\n## Categories to set ({})\n", cat_actions.len());
for a in &cat_actions {
let key = a.get("key").and_then(|v| v.as_str()).unwrap_or("?");
let cat = a.get("category").and_then(|v| v.as_str()).unwrap_or("?");
let reason = a.get("reason").and_then(|v| v.as_str()).unwrap_or("");
println!(" {}{} ({})", key, cat, reason);
}
}
if !manual_actions.is_empty() {
println!("\n## Manual actions needed ({})\n", manual_actions.len());
for a in &manual_actions {
@ -425,26 +413,6 @@ pub fn apply_consolidation(store: &mut Store, do_apply: bool, report_key: Option
}
}
if !cat_actions.is_empty() {
println!("\nApplying {} categorizations...", cat_actions.len());
for a in &cat_actions {
let key = a.get("key").and_then(|v| v.as_str()).unwrap_or("");
let cat = a.get("category").and_then(|v| v.as_str()).unwrap_or("");
if key.is_empty() || cat.is_empty() { continue; }
let resolved = match store.resolve_key(key) {
Ok(r) => r,
Err(_) => { println!(" ? {}{}: not found", key, cat); skipped += 1; continue; }
};
if store.categorize(&resolved, cat).is_ok() {
println!(" + {}{}", resolved, cat);
applied += 1;
} else {
skipped += 1;
}
}
}
if !manual_actions.is_empty() {
println!("\n## Manual actions (not auto-applied):\n");
for a in &manual_actions {

View file

@ -121,18 +121,6 @@ fn job_fact_mine(ctx: &ExecutionContext, path: &str) -> Result<(), TaskError> {
})
}
fn job_decay(ctx: &ExecutionContext) -> Result<(), TaskError> {
run_job(ctx, "decay", || {
ctx.log_line("loading store");
let mut store = crate::store::Store::load()?;
ctx.log_line("decaying");
let (decayed, pruned) = store.decay();
store.save()?;
ctx.log_line(&format!("{} decayed, {} pruned", decayed, pruned));
Ok(())
})
}
fn job_consolidate(ctx: &ExecutionContext) -> Result<(), TaskError> {
run_job(ctx, "consolidate", || {
ctx.log_line("loading store");
@ -499,7 +487,6 @@ pub fn run_daemon() -> Result<(), String> {
for (fname_key, path_str, seg_count) in &mark_transcript_done {
let content = format!("All {} segments mined for {}", seg_count, path_str);
let mut node = crate::store::new_node(fname_key, &content);
node.category = crate::store::Category::Task;
node.provenance = crate::store::Provenance::AgentExperienceMine;
let _ = store.upsert_node(node);
seg_cache.remove(path_str);

View file

@ -307,7 +307,6 @@ pub fn experience_mine(
let fname_key = transcript_filename_key(jsonl_path);
if !store.nodes.contains_key(&fname_key) {
let mut node = new_node(&fname_key, &format!("Backfilled from {}", dedup_key));
node.category = store::Category::Task;
node.provenance = store::Provenance::AgentExperienceMine;
let _ = store.upsert_node(node);
store.save()?;
@ -415,7 +414,6 @@ pub fn experience_mine(
// Write to store — use event timestamp, not mining time
let mut node = new_node(&key, &full_content);
node.node_type = store::NodeType::EpisodicSession;
node.category = store::Category::Observation;
node.provenance = store::Provenance::AgentExperienceMine;
if !ts.is_empty() {
if let Some(epoch) = parse_timestamp_to_epoch(ts) {
@ -441,14 +439,12 @@ pub fn experience_mine(
};
let dedup_content = format!("Mined {} ({} entries)", jsonl_path, count);
let mut fname_node = new_node(&fname_key, &dedup_content);
fname_node.category = store::Category::Task;
fname_node.provenance = store::Provenance::AgentExperienceMine;
let _ = store.upsert_node(fname_node);
// For unsegmented calls, also write the content-hash key for backwards compat
if segment.is_none() {
let mut dedup_node = new_node(&dedup_key, &dedup_content);
dedup_node.category = store::Category::Task;
dedup_node.provenance = store::Provenance::AgentExperienceMine;
let _ = store.upsert_node(dedup_node);
}

View file

@ -69,11 +69,8 @@ fn main() {
"used" => cmd_used(&args[2..]),
"wrong" => cmd_wrong(&args[2..]),
"gap" => cmd_gap(&args[2..]),
"categorize" => cmd_categorize(&args[2..]),
"fix-categories" => cmd_fix_categories(),
"cap-degree" => cmd_cap_degree(&args[2..]),
"link-orphans" => cmd_link_orphans(&args[2..]),
"decay" => cmd_decay(),
"consolidate-batch" => cmd_consolidate_batch(&args[2..]),
"log" => cmd_log(),
"params" => cmd_params(),
@ -147,8 +144,6 @@ Commands:
used KEY Mark a memory as useful (boosts weight)
wrong KEY [CONTEXT] Mark a memory as wrong/irrelevant
gap DESCRIPTION Record a gap in memory coverage
categorize KEY CATEGORY Reassign category (core/tech/gen/obs/task)
decay Apply daily weight decay
consolidate-batch [--count N] [--auto]
Run agent consolidation on priority nodes
log Show recent retrieval log
@ -523,32 +518,6 @@ fn cmd_gap(args: &[String]) -> Result<(), String> {
Ok(())
}
fn cmd_categorize(args: &[String]) -> Result<(), String> {
if args.len() < 2 {
return Err("Usage: poc-memory categorize KEY CATEGORY".into());
}
let key = &args[0];
let cat = &args[1];
let mut store = store::Store::load()?;
let resolved = store.resolve_key(key)?;
store.categorize(&resolved, cat)?;
store.save()?;
println!("Set '{}' category to {}", resolved, cat);
Ok(())
}
fn cmd_fix_categories() -> Result<(), String> {
let mut store = store::Store::load()?;
let before = format!("{:?}", store.category_counts());
let (changed, kept) = store.fix_categories()?;
store.save()?;
let after = format!("{:?}", store.category_counts());
println!("Category fix: {} changed, {} kept", changed, kept);
println!("\nBefore: {}", before);
println!("After: {}", after);
Ok(())
}
fn cmd_link_orphans(args: &[String]) -> Result<(), String> {
let min_deg: usize = args.first().and_then(|s| s.parse().ok()).unwrap_or(2);
let links_per: usize = args.get(1).and_then(|s| s.parse().ok()).unwrap_or(3);
@ -569,14 +538,6 @@ fn cmd_cap_degree(args: &[String]) -> Result<(), String> {
Ok(())
}
fn cmd_decay() -> Result<(), String> {
let mut store = store::Store::load()?;
let (decayed, pruned) = store.decay();
store.save()?;
println!("Decayed {} nodes, pruned {} below threshold", decayed, pruned);
Ok(())
}
fn cmd_consolidate_batch(args: &[String]) -> Result<(), String> {
let mut count = 5usize;
let mut auto = false;

View file

@ -172,106 +172,6 @@ impl Store {
});
}
pub fn categorize(&mut self, key: &str, cat_str: &str) -> Result<(), String> {
let cat = Category::from_str(cat_str)
.ok_or_else(|| format!("Unknown category '{}'. Use: core/tech/gen/obs/task", cat_str))?;
self.modify_node(key, |n| { n.category = cat; })
}
pub fn decay(&mut self) -> (usize, usize) {
let base = self.params.decay_factor;
let threshold = self.params.prune_threshold as f32;
let mut decayed = 0;
let mut pruned = 0;
let mut updated = Vec::new();
for (_key, node) in &mut self.nodes {
let factor = node.category.decay_factor(base) as f32;
let old_weight = node.weight;
node.weight *= factor;
// Clamp near-prune nodes instead of removing
if node.weight < threshold {
node.weight = node.weight.max(0.01);
pruned += 1;
}
// Only persist nodes whose weight actually changed
// Don't bump version — decay is metadata, not content change
if (node.weight - old_weight).abs() > 1e-6 {
updated.push(node.clone());
decayed += 1;
}
}
if !updated.is_empty() {
let _ = self.append_nodes(&updated);
}
(decayed, pruned)
}
/// Bulk recategorize nodes using rule-based logic.
/// Returns (changed, unchanged) counts.
pub fn fix_categories(&mut self) -> Result<(usize, usize), String> {
let cfg = crate::config::get();
let core_files: Vec<&str> = cfg.core_nodes.iter().map(|s| s.as_str()).collect();
let tech_files = [
"language-theory", "zoom-navigation",
"rust-conversion", "poc-architecture",
];
let tech_prefixes = ["design-"];
let obs_files = [
"reflections", "reflections-zoom", "differentiation",
"cognitive-modes", "paper-notes", "inner-life",
"conversation", "interests", "stuck-toolkit",
];
let obs_prefixes = ["skill-", "worked-example-"];
let mut changed_nodes = Vec::new();
let mut unchanged = 0;
let keys: Vec<String> = self.nodes.keys().cloned().collect();
for key in &keys {
let node = self.nodes.get(key).unwrap();
if node.category != Category::Core {
unchanged += 1;
continue;
}
let file = key.split('#').next().unwrap_or(key);
let new_cat = if core_files.iter().any(|&f| file == f) {
None
} else if tech_files.iter().any(|&f| file == f)
|| tech_prefixes.iter().any(|p| file.starts_with(p))
{
Some(Category::Technical)
} else if obs_files.iter().any(|&f| file == f)
|| obs_prefixes.iter().any(|p| file.starts_with(p))
{
Some(Category::Observation)
} else {
Some(Category::General)
};
if let Some(cat) = new_cat {
let node = self.nodes.get_mut(key).unwrap();
node.category = cat;
node.version += 1;
changed_nodes.push(node.clone());
} else {
unchanged += 1;
}
}
if !changed_nodes.is_empty() {
self.append_nodes(&changed_nodes)?;
}
Ok((changed_nodes.len(), unchanged))
}
/// Cap node degree by soft-deleting edges from mega-hubs.
pub fn cap_degree(&mut self, max_degree: usize) -> Result<(usize, usize), String> {
let mut node_degree: HashMap<String, usize> = HashMap::new();

View file

@ -305,16 +305,6 @@ pub enum Category {
}
impl Category {
pub fn decay_factor(&self, base: f64) -> f64 {
match self {
Category::Core => 1.0 - (1.0 - base) * 0.2,
Category::Technical => 1.0 - (1.0 - base) * 0.5,
Category::General => base,
Category::Observation => 1.0 - (1.0 - base) * 1.5,
Category::Task => 1.0 - (1.0 - base) * 2.5,
}
}
pub fn label(&self) -> &str {
match self {
Category::Core => "core",