cleanup: remove model name string matching

model_context_window() now reads from config.api_context_window
instead of guessing from model name strings. is_anthropic_model()
replaced with backend == "anthropic" checks. Dead model field
removed from AgentDef/AgentHeader.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-02 14:09:54 -04:00
parent 47c6694b10
commit 078dcf22d0
5 changed files with 15 additions and 32 deletions

View file

@ -6,15 +6,8 @@ use crate::agent::types::*;
use tiktoken_rs::CoreBPE; use tiktoken_rs::CoreBPE;
/// Look up a model's context window size in tokens. /// Look up a model's context window size in tokens.
pub fn model_context_window(model: &str) -> usize { pub fn model_context_window(_model: &str) -> usize {
let m = model.to_lowercase(); crate::config::get().api_context_window
if m.contains("opus") || m.contains("sonnet") {
200_000
} else if m.contains("qwen") {
131_072
} else {
128_000
}
} }
/// Context budget in tokens: 60% of the model's context window. /// Context budget in tokens: 60% of the model's context window.

View file

@ -53,6 +53,7 @@ pub struct ContextGroup {
} }
fn default_true() -> bool { true } fn default_true() -> bool { true }
fn default_context_window() -> usize { 128_000 }
fn default_identity_dir() -> PathBuf { fn default_identity_dir() -> PathBuf {
PathBuf::from(std::env::var("HOME").expect("HOME not set")).join(".consciousness/identity") PathBuf::from(std::env::var("HOME").expect("HOME not set")).join(".consciousness/identity")
} }
@ -85,6 +86,8 @@ pub struct Config {
pub api_key: Option<String>, pub api_key: Option<String>,
#[serde(skip)] #[serde(skip)]
pub api_model: Option<String>, pub api_model: Option<String>,
#[serde(skip, default = "default_context_window")]
pub api_context_window: usize,
/// Used to resolve API settings, not stored on Config /// Used to resolve API settings, not stored on Config
#[serde(default)] #[serde(default)]
agent_model: Option<String>, agent_model: Option<String>,
@ -134,6 +137,7 @@ impl Default for Config {
api_base_url: None, api_base_url: None,
api_key: None, api_key: None,
api_model: None, api_model: None,
api_context_window: default_context_window(),
agent_model: None, agent_model: None,
api_reasoning: "high".to_string(), api_reasoning: "high".to_string(),
agent_types: vec![ agent_types: vec![
@ -178,6 +182,9 @@ impl Config {
.and_then(|v| v.as_str()).map(String::from); .and_then(|v| v.as_str()).map(String::from);
} }
config.api_model = Some(model_id.to_string()); config.api_model = Some(model_id.to_string());
if let Some(cw) = model_cfg.get("context_window").and_then(|v| v.as_u64()) {
config.api_context_window = cw as usize;
}
} }
Some(config) Some(config)
@ -479,7 +486,7 @@ impl AppConfig {
api_base = base; api_base = base;
api_key = key; api_key = key;
model = mdl; model = mdl;
prompt_file = if is_anthropic_model(&model) { prompt_file = if self.backend == "anthropic" {
self.prompts.anthropic.clone() self.prompts.anthropic.clone()
} else { } else {
self.prompts.other.clone() self.prompts.other.clone()
@ -546,7 +553,7 @@ impl AppConfig {
let prompt_file = model.prompt_file.clone() let prompt_file = model.prompt_file.clone()
.unwrap_or_else(|| { .unwrap_or_else(|| {
if is_anthropic_model(&model.model_id) { if model.backend == "anthropic" {
self.prompts.anthropic.clone() self.prompts.anthropic.clone()
} else { } else {
self.prompts.other.clone() self.prompts.other.clone()
@ -651,11 +658,6 @@ pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, V
Ok((system_prompt, context_parts)) Ok((system_prompt, context_parts))
} }
fn is_anthropic_model(model: &str) -> bool {
let m = model.to_lowercase();
m.contains("claude") || m.contains("opus") || m.contains("sonnet")
}
pub fn show_config(app: &AppConfig, figment: &Figment) { pub fn show_config(app: &AppConfig, figment: &Figment) {
fn mask(key: &str) -> String { fn mask(key: &str) -> String {
if key.is_empty() { "(not set)".into() } if key.is_empty() { "(not set)".into() }

View file

@ -39,7 +39,6 @@ pub struct AgentDef {
/// Steps — single-step agents have one entry, multi-step have several. /// Steps — single-step agents have one entry, multi-step have several.
/// Steps are separated by `=== PROMPT ===` in the .agent file. /// Steps are separated by `=== PROMPT ===` in the .agent file.
pub steps: Vec<AgentStep>, pub steps: Vec<AgentStep>,
pub model: String,
pub schedule: String, pub schedule: String,
pub tools: Vec<String>, pub tools: Vec<String>,
pub count: Option<usize>, pub count: Option<usize>,
@ -60,8 +59,6 @@ struct AgentHeader {
agent: String, agent: String,
#[serde(default)] #[serde(default)]
query: String, query: String,
#[serde(default = "default_model")]
model: String,
#[serde(default)] #[serde(default)]
schedule: String, schedule: String,
#[serde(default)] #[serde(default)]
@ -87,7 +84,6 @@ struct AgentHeader {
bail: Option<String>, bail: Option<String>,
} }
fn default_model() -> String { "sonnet".into() }
fn default_priority() -> i32 { 10 } fn default_priority() -> i32 { 10 }
/// Parse an agent file: first line is JSON config, rest is the prompt(s). /// Parse an agent file: first line is JSON config, rest is the prompt(s).
@ -149,7 +145,6 @@ fn parse_agent_file(content: &str) -> Option<AgentDef> {
agent: header.agent, agent: header.agent,
query: header.query, query: header.query,
steps, steps,
model: header.model,
schedule: header.schedule, schedule: header.schedule,
tools: header.tools, tools: header.tools,
count: header.count, count: header.count,

View file

@ -340,8 +340,8 @@ fn run_one_agent_inner(
}; };
let phases: Vec<&str> = agent_batch.steps.iter().map(|s| s.phase.as_str()).collect(); let phases: Vec<&str> = agent_batch.steps.iter().map(|s| s.phase.as_str()).collect();
log(&format!("{} step(s) {:?}, {}KB initial, model={}, {}, {} nodes, output={}", log(&format!("{} step(s) {:?}, {}KB initial, {}, {} nodes, output={}",
n_steps, phases, first_len / 1024, def.model, tools_desc, n_steps, phases, first_len / 1024, tools_desc,
agent_batch.node_keys.len(), state_dir.display())); agent_batch.node_keys.len(), state_dir.display()));
let prompts: Vec<String> = agent_batch.steps.iter() let prompts: Vec<String> = agent_batch.steps.iter()

View file

@ -11,15 +11,8 @@ use chrono::{DateTime, Utc};
use tiktoken_rs::CoreBPE; use tiktoken_rs::CoreBPE;
/// Look up a model's context window size in tokens. /// Look up a model's context window size in tokens.
pub fn model_context_window(model: &str) -> usize { pub fn model_context_window(_model: &str) -> usize {
let m = model.to_lowercase(); crate::config::get().api_context_window
if m.contains("opus") || m.contains("sonnet") {
200_000
} else if m.contains("qwen") {
131_072
} else {
128_000
}
} }
/// Context budget in tokens: 60% of the model's context window. /// Context budget in tokens: 60% of the model's context window.