remove Category from the type system
Category was a manually-assigned label with no remaining functional purpose (decay was the only behavior it drove, and that's gone). Remove the enum, its methods, category_counts, the --category search filter, and all category display. The field remains in the capnp schema for backwards compatibility but is no longer read or written. Status and health reports now show NodeType breakdown (semantic, episodic, daily, weekly, monthly) instead of categories. Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
parent
ba30f5b3e4
commit
488fd5a0aa
7 changed files with 45 additions and 120 deletions
26
src/graph.rs
26
src/graph.rs
|
|
@ -625,8 +625,18 @@ pub fn health_report(graph: &Graph, store: &Store) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
// Category breakdown
|
||||
let cats = store.category_counts();
|
||||
// NodeType breakdown
|
||||
let mut type_counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
|
||||
for node in store.nodes.values() {
|
||||
let label = match node.node_type {
|
||||
crate::store::NodeType::EpisodicSession => "episodic",
|
||||
crate::store::NodeType::EpisodicDaily => "daily",
|
||||
crate::store::NodeType::EpisodicWeekly => "weekly",
|
||||
crate::store::NodeType::EpisodicMonthly => "monthly",
|
||||
crate::store::NodeType::Semantic => "semantic",
|
||||
};
|
||||
*type_counts.entry(label).or_default() += 1;
|
||||
}
|
||||
|
||||
// Load history for deltas
|
||||
let history = load_metrics_history();
|
||||
|
|
@ -665,16 +675,16 @@ Power-law α: {alpha:.2}{alpha_d} (2=hub-dominated, 3=healthy, >3=egalitarian)
|
|||
Degree Gini: {gini:.3}{gini_d} (0=equal, 1=one-hub)
|
||||
|
||||
Community sizes (top 5): {top5}
|
||||
Categories: core={core} tech={tech} gen={gen} obs={obs} task={task}",
|
||||
Types: semantic={semantic} episodic={episodic} daily={daily} weekly={weekly} monthly={monthly}",
|
||||
top5 = sizes.iter().take(5)
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
core = cats.get("core").unwrap_or(&0),
|
||||
tech = cats.get("tech").unwrap_or(&0),
|
||||
gen = cats.get("gen").unwrap_or(&0),
|
||||
obs = cats.get("obs").unwrap_or(&0),
|
||||
task = cats.get("task").unwrap_or(&0),
|
||||
semantic = type_counts.get("semantic").unwrap_or(&0),
|
||||
episodic = type_counts.get("episodic").unwrap_or(&0),
|
||||
daily = type_counts.get("daily").unwrap_or(&0),
|
||||
weekly = type_counts.get("weekly").unwrap_or(&0),
|
||||
monthly = type_counts.get("monthly").unwrap_or(&0),
|
||||
);
|
||||
|
||||
// Orphan edges
|
||||
|
|
|
|||
64
src/main.rs
64
src/main.rs
|
|
@ -135,7 +135,7 @@ fn usage() {
|
|||
eprintln!("poc-memory v0.4.0 — graph-structured memory store
|
||||
|
||||
Commands:
|
||||
search QUERY [--expand] [--category CAT] Search memory (AND logic)
|
||||
search QUERY [--expand] Search memory (AND logic)
|
||||
init Scan markdown files, index all memory units
|
||||
migrate Migrate from old weights.json system
|
||||
health Report graph metrics (CC, communities, small-world)
|
||||
|
|
@ -224,55 +224,24 @@ Search memory using spreading activation (AND logic across terms).
|
|||
|
||||
Options:
|
||||
--expand Show 15 results instead of 5, plus spectral neighbors
|
||||
--category CAT Filter results to category: core, tech, gen, obs, task
|
||||
--help, -h Show this help
|
||||
|
||||
Examples:
|
||||
poc-memory search irc connection
|
||||
poc-memory search bcachefs transaction --expand
|
||||
poc-memory search rust --category tech");
|
||||
poc-memory search bcachefs transaction --expand");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let expand = args.iter().any(|a| a == "--expand");
|
||||
|
||||
let category_filter: Option<String> = {
|
||||
let mut cat = None;
|
||||
let mut iter = args.iter();
|
||||
while let Some(a) = iter.next() {
|
||||
if a == "--category" {
|
||||
cat = iter.next().cloned();
|
||||
break;
|
||||
}
|
||||
}
|
||||
cat
|
||||
};
|
||||
|
||||
let query: String = args.iter()
|
||||
.filter(|a| *a != "--expand" && *a != "--category")
|
||||
.scan(false, |skip_next, a| {
|
||||
if *skip_next { *skip_next = false; return Some(None); }
|
||||
if a == "--category" { *skip_next = true; return Some(None); }
|
||||
Some(Some(a.as_str()))
|
||||
})
|
||||
.flatten()
|
||||
.filter(|a| *a != "--expand")
|
||||
.map(|a| a.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
let view = store::AnyView::load()?;
|
||||
let mut results = search::search(&query, &view);
|
||||
|
||||
// Filter by category if requested
|
||||
if let Some(ref cat_str) = category_filter {
|
||||
let cat = store::Category::from_str(cat_str)
|
||||
.ok_or_else(|| format!("Unknown category '{}' (use: core, tech, gen, obs, task)", cat_str))?;
|
||||
let store = store::Store::load()?;
|
||||
results.retain(|r| {
|
||||
store.nodes.get(&r.key)
|
||||
.map(|n| n.category.label() == cat.label())
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
let results = search::search(&query, &view);
|
||||
|
||||
if results.is_empty() {
|
||||
eprintln!("No results for '{}'", query);
|
||||
|
|
@ -459,16 +428,19 @@ fn cmd_status() -> Result<(), String> {
|
|||
let store = store::Store::load()?;
|
||||
let node_count = store.nodes.len();
|
||||
let rel_count = store.relations.len();
|
||||
let categories = store.category_counts();
|
||||
|
||||
let mut episodic = 0usize;
|
||||
let mut semantic = 0usize;
|
||||
for n in store.nodes.values() {
|
||||
if matches!(n.node_type, store::NodeType::Semantic) {
|
||||
semantic += 1;
|
||||
} else {
|
||||
episodic += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!("Nodes: {} Relations: {}", node_count, rel_count);
|
||||
println!("Categories: core={} tech={} gen={} obs={} task={}",
|
||||
categories.get("core").unwrap_or(&0),
|
||||
categories.get("tech").unwrap_or(&0),
|
||||
categories.get("gen").unwrap_or(&0),
|
||||
categories.get("obs").unwrap_or(&0),
|
||||
categories.get("task").unwrap_or(&0),
|
||||
);
|
||||
println!("Types: semantic={} episodic={}", semantic, episodic);
|
||||
|
||||
let g = store.build_graph();
|
||||
println!("Graph edges: {} Communities: {}",
|
||||
|
|
@ -1058,8 +1030,8 @@ fn cmd_trace(args: &[String]) -> Result<(), String> {
|
|||
|
||||
// Display the node itself
|
||||
println!("=== {} ===", resolved);
|
||||
println!("Type: {:?} Category: {} Weight: {:.2}",
|
||||
node.node_type, node.category.label(), node.weight);
|
||||
println!("Type: {:?} Weight: {:.2}",
|
||||
node.node_type, node.weight);
|
||||
if !node.source_ref.is_empty() {
|
||||
println!("Source: {}", node.source_ref);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
// Old files are preserved as backup. Run once.
|
||||
|
||||
use crate::store::{
|
||||
self, Store, Node, Category, NodeType, Provenance, RelationType,
|
||||
self, Store, Node, NodeType, Provenance, RelationType,
|
||||
parse_units, new_relation,
|
||||
};
|
||||
|
||||
|
|
@ -108,16 +108,6 @@ fn default_0_3() -> f64 { 0.3 }
|
|||
fn default_3() -> u32 { 3 }
|
||||
fn default_0_05() -> f64 { 0.05 }
|
||||
|
||||
fn parse_old_category(s: &str) -> Category {
|
||||
match s {
|
||||
"Core" | "core" => Category::Core,
|
||||
"Technical" | "technical" | "tech" => Category::Technical,
|
||||
"Observation" | "observation" | "obs" => Category::Observation,
|
||||
"Task" | "task" => Category::Task,
|
||||
_ => Category::General,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn migrate() -> Result<(), String> {
|
||||
let weights_path = home().join(".claude/memory/weights.json");
|
||||
let memory_dir = home().join(".claude/memory");
|
||||
|
|
@ -207,7 +197,6 @@ pub fn migrate() -> Result<(), String> {
|
|||
key: key.clone(),
|
||||
content,
|
||||
weight: old_entry.weight as f32,
|
||||
category: parse_old_category(&old_entry.category),
|
||||
emotion: 0.0,
|
||||
deleted: false,
|
||||
source_ref: String::new(),
|
||||
|
|
@ -247,7 +236,6 @@ pub fn migrate() -> Result<(), String> {
|
|||
key: key.clone(),
|
||||
content: unit.content.clone(),
|
||||
weight: 0.7,
|
||||
category: Category::General,
|
||||
emotion: 0.0,
|
||||
deleted: false,
|
||||
source_ref: String::new(),
|
||||
|
|
|
|||
|
|
@ -85,8 +85,8 @@ fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> S
|
|||
out.push_str(&format!("## {} \n", item.key));
|
||||
out.push_str(&format!("Priority: {:.3} CC: {:.3} Emotion: {:.1} ",
|
||||
item.priority, item.cc, item.emotion));
|
||||
out.push_str(&format!("Category: {} Interval: {}d\n",
|
||||
node.category.label(), node.spaced_repetition_interval));
|
||||
out.push_str(&format!("Interval: {}d\n",
|
||||
node.spaced_repetition_interval));
|
||||
if item.outlier_score > 0.0 {
|
||||
out.push_str(&format!("Spectral: {} (outlier={:.1})\n",
|
||||
item.classification, item.outlier_score));
|
||||
|
|
@ -254,8 +254,8 @@ fn format_pairs_section(
|
|||
} else {
|
||||
node.content.clone()
|
||||
};
|
||||
out.push_str(&format!("Category: {} Weight: {:.2}\n{}\n",
|
||||
node.category.label(), node.weight, content));
|
||||
out.push_str(&format!("Weight: {:.2}\n{}\n",
|
||||
node.weight, content));
|
||||
}
|
||||
|
||||
// Node B
|
||||
|
|
@ -267,8 +267,8 @@ fn format_pairs_section(
|
|||
} else {
|
||||
node.content.clone()
|
||||
};
|
||||
out.push_str(&format!("Category: {} Weight: {:.2}\n{}\n",
|
||||
node.category.label(), node.weight, content));
|
||||
out.push_str(&format!("Weight: {:.2}\n{}\n",
|
||||
node.weight, content));
|
||||
}
|
||||
|
||||
out.push_str("\n---\n\n");
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ fn resolve_field(field: &str, key: &str, store: &Store, graph: &Graph) -> Option
|
|||
match field {
|
||||
"key" => Some(Value::Str(key.to_string())),
|
||||
"weight" => Some(Value::Num(node.weight as f64)),
|
||||
"category" => Some(Value::Str(node.category.label().to_string())),
|
||||
"category" => None, // vestigial, kept for query compat
|
||||
"node_type" => Some(Value::Str(node_type_label(node.node_type).to_string())),
|
||||
"provenance" => Some(Value::Str(node.provenance.label().to_string())),
|
||||
"emotion" => Some(Value::Num(node.emotion as f64)),
|
||||
|
|
|
|||
|
|
@ -251,14 +251,6 @@ impl Store {
|
|||
Ok((hubs_capped, to_delete.len()))
|
||||
}
|
||||
|
||||
pub fn category_counts(&self) -> HashMap<&str, usize> {
|
||||
let mut counts = HashMap::new();
|
||||
for node in self.nodes.values() {
|
||||
*counts.entry(node.category.label()).or_insert(0) += 1;
|
||||
}
|
||||
counts
|
||||
}
|
||||
|
||||
/// Update graph-derived fields on all nodes
|
||||
pub fn update_graph_metrics(&mut self) {
|
||||
let g = self.build_graph();
|
||||
|
|
|
|||
|
|
@ -173,7 +173,6 @@ pub struct Node {
|
|||
pub key: String,
|
||||
pub content: String,
|
||||
pub weight: f32,
|
||||
pub category: Category,
|
||||
pub emotion: f32,
|
||||
pub deleted: bool,
|
||||
pub source_ref: String,
|
||||
|
|
@ -294,39 +293,6 @@ impl Provenance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
|
||||
#[archive(check_bytes)]
|
||||
pub enum Category {
|
||||
General,
|
||||
Core,
|
||||
Technical,
|
||||
Observation,
|
||||
Task,
|
||||
}
|
||||
|
||||
impl Category {
|
||||
pub fn label(&self) -> &str {
|
||||
match self {
|
||||
Category::Core => "core",
|
||||
Category::Technical => "tech",
|
||||
Category::General => "gen",
|
||||
Category::Observation => "obs",
|
||||
Category::Task => "task",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
match s {
|
||||
"core" => Some(Category::Core),
|
||||
"tech" | "technical" => Some(Category::Technical),
|
||||
"gen" | "general" => Some(Category::General),
|
||||
"obs" | "observation" => Some(Category::Observation),
|
||||
"task" => Some(Category::Task),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
|
||||
#[archive(check_bytes)]
|
||||
pub enum RelationType {
|
||||
|
|
@ -344,8 +310,6 @@ capnp_enum!(Provenance, memory_capnp::Provenance,
|
|||
AgentKnowledgeConnector, AgentKnowledgeChallenger, AgentConsolidate,
|
||||
AgentDigest, AgentFactMine, AgentDecay]);
|
||||
|
||||
capnp_enum!(Category, memory_capnp::Category,
|
||||
[General, Core, Technical, Observation, Task]);
|
||||
|
||||
capnp_enum!(RelationType, memory_capnp::RelationType,
|
||||
[Link, Causal, Auto]);
|
||||
|
|
@ -358,7 +322,7 @@ capnp_message!(Node,
|
|||
prim: [version, timestamp, weight, emotion, deleted,
|
||||
retrievals, uses, wrongs, last_replayed,
|
||||
spaced_repetition_interval, position, created_at],
|
||||
enm: [node_type: NodeType, provenance: Provenance, category: Category],
|
||||
enm: [node_type: NodeType, provenance: Provenance],
|
||||
skip: [community_id, clustering_coefficient, degree],
|
||||
);
|
||||
|
||||
|
|
@ -493,7 +457,6 @@ pub fn new_node(key: &str, content: &str) -> Node {
|
|||
key: key.to_string(),
|
||||
content: content.to_string(),
|
||||
weight: 0.7,
|
||||
category: Category::General,
|
||||
emotion: 0.0,
|
||||
deleted: false,
|
||||
source_ref: String::new(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue