query: generalized composite sort for tunable agent priorities
Add sort:field*weight+field*weight+... syntax for weighted multi-field sorting. Each field computes a 0-1 score, multiplied by weight, summed. Available score fields: isolation — community isolation ratio (1.0 = fully isolated) degree — graph degree (normalized to max) weight — node weight content-len — content size (normalized to max) priority — consolidation priority score recency(X) — time since agent X last visited (sigmoid decay) Example: sort:isolation*0.7+recency(linker)*0.3 Linker agents prioritize isolated communities that haven't been visited recently. Scores are pre-computed per sort (CompositeCache) to avoid redundant graph traversals inside the sort comparator. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
e6613f97bb
commit
3a45b6144e
1 changed files with 148 additions and 8 deletions
|
|
@ -168,6 +168,20 @@ pub enum SortField {
|
||||||
Degree,
|
Degree,
|
||||||
Weight,
|
Weight,
|
||||||
Isolation,
|
Isolation,
|
||||||
|
Composite(Vec<(ScoreField, f64)>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Individual scoring dimensions for composite sorts.
|
||||||
|
/// Each computes a 0.0-1.0 score per node.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum ScoreField {
|
||||||
|
Isolation,
|
||||||
|
Degree,
|
||||||
|
Weight,
|
||||||
|
ContentLen,
|
||||||
|
Priority,
|
||||||
|
/// Time since last visit by named agent. 1.0 = never visited, decays toward 0.
|
||||||
|
Recency(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Numeric comparison operator.
|
/// Numeric comparison operator.
|
||||||
|
|
@ -229,6 +243,111 @@ fn parse_duration_or_number(s: &str) -> Result<f64, String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse composite sort: "isolation*0.7+recency(linker)*0.3"
|
||||||
|
/// Each term is field or field(arg), optionally *weight (default 1.0).
|
||||||
|
fn parse_composite_sort(s: &str) -> Result<Vec<(ScoreField, f64)>, String> {
|
||||||
|
let mut terms = Vec::new();
|
||||||
|
for term in s.split('+') {
|
||||||
|
let term = term.trim();
|
||||||
|
let (field_part, weight) = if let Some((f, w)) = term.rsplit_once('*') {
|
||||||
|
(f, w.parse::<f64>().map_err(|_| format!("bad weight: {}", w))?)
|
||||||
|
} else {
|
||||||
|
(term, 1.0)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse field, possibly with (arg)
|
||||||
|
let field = if let Some((name, arg)) = field_part.split_once('(') {
|
||||||
|
let arg = arg.strip_suffix(')').ok_or("missing ) in sort field")?;
|
||||||
|
match name {
|
||||||
|
"recency" => ScoreField::Recency(arg.to_string()),
|
||||||
|
_ => return Err(format!("unknown parameterized sort field: {}", name)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
match field_part {
|
||||||
|
"isolation" => ScoreField::Isolation,
|
||||||
|
"degree" => ScoreField::Degree,
|
||||||
|
"weight" => ScoreField::Weight,
|
||||||
|
"content-len" => ScoreField::ContentLen,
|
||||||
|
"priority" => ScoreField::Priority,
|
||||||
|
_ => return Err(format!("unknown sort field: {}", field_part)),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
terms.push((field, weight));
|
||||||
|
}
|
||||||
|
if terms.is_empty() {
|
||||||
|
return Err("empty composite sort".into());
|
||||||
|
}
|
||||||
|
Ok(terms)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute a 0-1 score for a node on a single dimension.
|
||||||
|
fn score_field(
|
||||||
|
field: &ScoreField,
|
||||||
|
key: &str,
|
||||||
|
store: &Store,
|
||||||
|
graph: &Graph,
|
||||||
|
precomputed: &CompositeCache,
|
||||||
|
) -> f64 {
|
||||||
|
match field {
|
||||||
|
ScoreField::Isolation => {
|
||||||
|
let comm = graph.communities().get(key).copied().unwrap_or(0);
|
||||||
|
precomputed.isolation.get(&comm).copied().unwrap_or(1.0) as f64
|
||||||
|
}
|
||||||
|
ScoreField::Degree => {
|
||||||
|
let d = graph.degree(key) as f64;
|
||||||
|
let max = precomputed.max_degree.max(1.0);
|
||||||
|
(d / max).min(1.0)
|
||||||
|
}
|
||||||
|
ScoreField::Weight => {
|
||||||
|
store.nodes.get(key).map(|n| n.weight as f64).unwrap_or(0.0)
|
||||||
|
}
|
||||||
|
ScoreField::ContentLen => {
|
||||||
|
let len = store.nodes.get(key).map(|n| n.content.len()).unwrap_or(0) as f64;
|
||||||
|
let max = precomputed.max_content_len.max(1.0);
|
||||||
|
(len / max).min(1.0)
|
||||||
|
}
|
||||||
|
ScoreField::Priority => {
|
||||||
|
let p = crate::neuro::consolidation_priority(store, key, graph, None);
|
||||||
|
// Priority is already roughly 0-1 from the scoring function
|
||||||
|
p.min(1.0)
|
||||||
|
}
|
||||||
|
ScoreField::Recency(agent) => {
|
||||||
|
let last = store.last_visited(key, agent);
|
||||||
|
if last == 0 {
|
||||||
|
1.0 // never visited = highest recency score
|
||||||
|
} else {
|
||||||
|
let age = (crate::store::now_epoch() - last) as f64;
|
||||||
|
// Sigmoid decay: 1.0 at 7+ days, ~0.5 at 1 day, ~0.1 at 1 hour
|
||||||
|
let hours = age / 3600.0;
|
||||||
|
1.0 - (-0.03 * hours).exp()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cached values for composite scoring (computed once per sort).
|
||||||
|
struct CompositeCache {
|
||||||
|
isolation: HashMap<u32, f32>,
|
||||||
|
max_degree: f64,
|
||||||
|
max_content_len: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompositeCache {
|
||||||
|
fn build(items: &[(String, f64)], store: &Store, graph: &Graph) -> Self {
|
||||||
|
let max_degree = items.iter()
|
||||||
|
.map(|(k, _)| graph.degree(k) as f64)
|
||||||
|
.fold(0.0f64, f64::max);
|
||||||
|
let max_content_len = items.iter()
|
||||||
|
.map(|(k, _)| store.nodes.get(k).map(|n| n.content.len()).unwrap_or(0) as f64)
|
||||||
|
.fold(0.0f64, f64::max);
|
||||||
|
Self {
|
||||||
|
isolation: graph.community_isolation(),
|
||||||
|
max_degree,
|
||||||
|
max_content_len,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse a NodeType from a label.
|
/// Parse a NodeType from a label.
|
||||||
fn parse_node_type(s: &str) -> Result<NodeType, String> {
|
fn parse_node_type(s: &str) -> Result<NodeType, String> {
|
||||||
match s {
|
match s {
|
||||||
|
|
@ -302,14 +421,19 @@ impl Stage {
|
||||||
agent: value.to_string(),
|
agent: value.to_string(),
|
||||||
}),
|
}),
|
||||||
"sort" => {
|
"sort" => {
|
||||||
let field = match value {
|
// Check for composite sort: field*weight+field*weight+...
|
||||||
"priority" => SortField::Priority,
|
let field = if value.contains('+') || value.contains('*') {
|
||||||
"timestamp" => SortField::Timestamp,
|
SortField::Composite(parse_composite_sort(value)?)
|
||||||
"content-len" => SortField::ContentLen,
|
} else {
|
||||||
"degree" => SortField::Degree,
|
match value {
|
||||||
"weight" => SortField::Weight,
|
"priority" => SortField::Priority,
|
||||||
"isolation" => SortField::Isolation,
|
"timestamp" => SortField::Timestamp,
|
||||||
_ => return Err(format!("unknown sort field: {}", value)),
|
"content-len" => SortField::ContentLen,
|
||||||
|
"degree" => SortField::Degree,
|
||||||
|
"weight" => SortField::Weight,
|
||||||
|
"isolation" => SortField::Isolation,
|
||||||
|
_ => return Err(format!("unknown sort field: {}", value)),
|
||||||
|
}
|
||||||
};
|
};
|
||||||
Stage::Transform(Transform::Sort(field))
|
Stage::Transform(Transform::Sort(field))
|
||||||
}
|
}
|
||||||
|
|
@ -579,6 +703,22 @@ pub fn run_transform(
|
||||||
pb.total_cmp(&pa) // desc
|
pb.total_cmp(&pa) // desc
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
SortField::Composite(terms) => {
|
||||||
|
let cache = CompositeCache::build(&items, store, graph);
|
||||||
|
let scores: HashMap<String, f64> = items.iter()
|
||||||
|
.map(|(key, _)| {
|
||||||
|
let s: f64 = terms.iter()
|
||||||
|
.map(|(field, w)| score_field(field, key, store, graph, &cache) * w)
|
||||||
|
.sum();
|
||||||
|
(key.clone(), s)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
items.sort_by(|a, b| {
|
||||||
|
let sa = scores.get(&a.0).copied().unwrap_or(0.0);
|
||||||
|
let sb = scores.get(&b.0).copied().unwrap_or(0.0);
|
||||||
|
sb.total_cmp(&sa) // highest composite score first
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
items
|
items
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue