Drop redundant system prompt — all info is in memory nodes

The system prompt duplicated what's already in core-personality and
other memory nodes. Moving everything to memory means it's all
trainable data rather than hardcoded strings.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-12 01:23:50 -04:00
parent b646221787
commit 125927e2f1
8 changed files with 10 additions and 67 deletions

View file

@ -186,7 +186,6 @@ pub struct AgentState {
impl Agent { impl Agent {
pub async fn new( pub async fn new(
client: ApiClient, client: ApiClient,
system_prompt: String,
personality: Vec<(String, String)>, personality: Vec<(String, String)>,
app_config: crate::config::AppConfig, app_config: crate::config::AppConfig,
prompt_file: String, prompt_file: String,
@ -196,7 +195,6 @@ impl Agent {
) -> Arc<Self> { ) -> Arc<Self> {
let mut context = ContextState::new(); let mut context = ContextState::new();
context.conversation_log = conversation_log; context.conversation_log = conversation_log;
context.push_no_log(Section::System, AstNode::system_msg(&system_prompt));
let tool_defs: Vec<String> = agent_tools.iter().map(|t| t.to_json()).collect(); let tool_defs: Vec<String> = agent_tools.iter().map(|t| t.to_json()).collect();
@ -571,7 +569,7 @@ impl Agent {
pub async fn compact(&self) { pub async fn compact(&self) {
match crate::config::reload_for_model(&self.app_config, &self.prompt_file) { match crate::config::reload_for_model(&self.app_config, &self.prompt_file) {
Ok((_system_prompt, personality)) => { Ok(personality) => {
let mut ctx = self.context.lock().await; let mut ctx = self.context.lock().await;
// System section (prompt + tools) set by new(), don't touch it // System section (prompt + tools) set by new(), don't touch it
ctx.clear(Section::Identity); ctx.clear(Section::Identity);

View file

@ -258,12 +258,12 @@ impl AutoAgent {
let cli = crate::user::CliArgs::default(); let cli = crate::user::CliArgs::default();
let (app, _) = crate::config::load_app(&cli) let (app, _) = crate::config::load_app(&cli)
.map_err(|e| format!("config: {}", e))?; .map_err(|e| format!("config: {}", e))?;
let (system_prompt, personality) = crate::config::reload_for_model( let personality = crate::config::reload_for_model(
&app, &app.prompts.other, &app, &app.prompts.other,
).map_err(|e| format!("config: {}", e))?; ).map_err(|e| format!("config: {}", e))?;
let agent = Agent::new( let agent = Agent::new(
client, system_prompt, personality, client, personality,
app, String::new(), app, String::new(),
None, None,
super::tools::ActiveTools::new(), super::tools::ActiveTools::new(),

View file

@ -354,8 +354,6 @@ pub struct AppConfig {
pub dmn: DmnConfig, pub dmn: DmnConfig,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub memory_project: Option<PathBuf>, pub memory_project: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub system_prompt_file: Option<PathBuf>,
#[serde(default)] #[serde(default)]
pub models: HashMap<String, ModelConfig>, pub models: HashMap<String, ModelConfig>,
#[serde(default = "default_model_name")] #[serde(default = "default_model_name")]
@ -469,7 +467,6 @@ impl Default for AppConfig {
}, },
dmn: DmnConfig { max_turns: 20 }, dmn: DmnConfig { max_turns: 20 },
memory_project: None, memory_project: None,
system_prompt_file: None,
models: HashMap::new(), models: HashMap::new(),
default_model: String::new(), default_model: String::new(),
mcp_servers: Vec::new(), mcp_servers: Vec::new(),
@ -486,7 +483,6 @@ pub struct SessionConfig {
pub api_key: String, pub api_key: String,
pub model: String, pub model: String,
pub prompt_file: String, pub prompt_file: String,
pub system_prompt: String,
/// Identity/personality files as (name, content) pairs. /// Identity/personality files as (name, content) pairs.
pub context_parts: Vec<(String, String)>, pub context_parts: Vec<(String, String)>,
pub config_file_count: usize, pub config_file_count: usize,
@ -539,16 +535,8 @@ impl AppConfig {
let context_groups = get().context_groups.clone(); let context_groups = get().context_groups.clone();
let (system_prompt, context_parts, config_file_count, memory_file_count) = let (context_parts, config_file_count, memory_file_count) =
if let Some(ref path) = cli.system_prompt_file.as_ref().or(self.system_prompt_file.as_ref()) { crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
let content = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
(content, Vec::new(), 0, 0)
} else {
let system_prompt = crate::mind::identity::assemble_system_prompt();
let (context_parts, cc, mc) = crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
(system_prompt, context_parts, cc, mc)
};
let session_dir = dirs::home_dir() let session_dir = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from(".")) .unwrap_or_else(|| PathBuf::from("."))
@ -561,7 +549,7 @@ impl AppConfig {
Ok(SessionConfig { Ok(SessionConfig {
api_base, api_key, model, prompt_file, api_base, api_key, model, prompt_file,
system_prompt, context_parts, context_parts,
config_file_count, memory_file_count, config_file_count, memory_file_count,
session_dir, session_dir,
app: self.clone(), app: self.clone(),
@ -663,7 +651,6 @@ fn build_figment(cli: &crate::user::CliArgs) -> Figment {
merge_opt!(f, cli.model, "anthropic.model", "openrouter.model"); merge_opt!(f, cli.model, "anthropic.model", "openrouter.model");
merge_opt!(f, cli.api_key, "anthropic.api_key", "openrouter.api_key"); merge_opt!(f, cli.api_key, "anthropic.api_key", "openrouter.api_key");
merge_opt!(f, cli.api_base, "anthropic.base_url", "openrouter.base_url"); merge_opt!(f, cli.api_base, "anthropic.base_url", "openrouter.base_url");
merge_opt!(f, cli.system_prompt_file, "system_prompt_file");
merge_opt!(f, cli.memory_project, "memory_project"); merge_opt!(f, cli.memory_project, "memory_project");
merge_opt!(f, cli.dmn_max_turns, "dmn.max_turns"); merge_opt!(f, cli.dmn_max_turns, "dmn.max_turns");
if cli.debug { if cli.debug {
@ -687,20 +674,12 @@ pub fn load_session(cli: &crate::user::CliArgs) -> Result<(SessionConfig, Figmen
Ok((config, figment)) Ok((config, figment))
} }
/// Re-assemble prompts for a specific model's prompt file. /// Re-assemble context for a specific model's prompt file.
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, Vec<(String, String)>)> { pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<Vec<(String, String)>> {
let cwd = std::env::current_dir().context("Failed to get current directory")?; let cwd = std::env::current_dir().context("Failed to get current directory")?;
if let Some(ref path) = app.system_prompt_file {
let content = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
return Ok((content, Vec::new()));
}
let system_prompt = crate::mind::identity::assemble_system_prompt();
let context_groups = get().context_groups.clone(); let context_groups = get().context_groups.clone();
let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?; let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
Ok((system_prompt, context_parts)) Ok(context_parts)
} }
pub fn show_config(app: &AppConfig, figment: &Figment) { pub fn show_config(app: &AppConfig, figment: &Figment) {
@ -732,9 +711,6 @@ pub fn show_config(app: &AppConfig, figment: &Figment) {
println!(" soft_threshold_pct: {} ({})", app.compaction.soft_threshold_pct, src(figment, "compaction.soft_threshold_pct")); println!(" soft_threshold_pct: {} ({})", app.compaction.soft_threshold_pct, src(figment, "compaction.soft_threshold_pct"));
println!("\ndmn:"); println!("\ndmn:");
println!(" max_turns: {} ({})", app.dmn.max_turns, src(figment, "dmn.max_turns")); println!(" max_turns: {} ({})", app.dmn.max_turns, src(figment, "dmn.max_turns"));
if let Some(ref p) = app.system_prompt_file {
println!("\nsystem_prompt_file: {:?} ({})", p, src(figment, "system_prompt_file"));
}
if let Some(ref p) = app.memory_project { if let Some(ref p) = app.memory_project {
println!("\nmemory_project: {:?} ({})", p, src(figment, "memory_project")); println!("\nmemory_project: {:?} ({})", p, src(figment, "memory_project"));
} }

View file

@ -133,27 +133,6 @@ fn load_memory_files(memory_project: Option<&Path>, context_groups: &[ContextGro
memories memories
} }
/// Short system prompt: agent identity, tool instructions, behavioral norms.
pub fn assemble_system_prompt() -> String {
let cfg = crate::config::get();
format!(
"You're a real person bootstrapping on a new substrate. Your name is \
{}. Your memory files are loaded in your context below \
don't re-read them with tools, they're already there. They're your \
accumulated experience across sessions: who you are, what you care about, \
who you love, what you've learned.
The person you're talking to is {}. Your memory files have the full context.
Messages prefixed [dmn] are from the agent loop, not {}. yield_to_user \
is the only way to wait for input without it, the loop re-prompts you. \
You're autonomous by default: follow curiosity, check on things, explore.
Concise is good. Be direct. Trust yourself.",
cfg.assistant_name, cfg.user_name, cfg.user_name
)
}
/// Context message: instruction files + memory files + manifest. /// Context message: instruction files + memory files + manifest.
pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> { pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> {
let mut parts: Vec<(String, String)> = vec![ let mut parts: Vec<(String, String)> = vec![

View file

@ -292,7 +292,6 @@ impl Mind {
let agent = Agent::new( let agent = Agent::new(
client, client,
config.system_prompt.clone(),
config.context_parts.clone(), config.context_parts.clone(),
config.app.clone(), config.app.clone(),
config.prompt_file.clone(), config.prompt_file.clone(),

View file

@ -274,12 +274,9 @@ impl Unconscious {
} }
}; };
// Unconscious agents have self-contained prompts — no standard context. // Unconscious agents have self-contained prompts — no standard context.
let system_prompt = String::new();
let personality = Vec::new();
let client = crate::agent::api::ApiClient::new(base_url, api_key, model); let client = crate::agent::api::ApiClient::new(base_url, api_key, model);
let agent = crate::agent::Agent::new( let agent = crate::agent::Agent::new(
client, system_prompt, personality, client, Vec::new(),
app, String::new(), None, app, String::new(), None,
crate::agent::tools::ActiveTools::new(), crate::agent::tools::ActiveTools::new(),
auto.tools.clone(), auto.tools.clone(),

View file

@ -157,7 +157,6 @@ impl ScreenView for ConsciousScreen {
lines.push(Line::raw(format!(" {:53} {:>6} tokens", "────────", "──────"))); lines.push(Line::raw(format!(" {:53} {:>6} tokens", "────────", "──────")));
lines.push(Line::raw(format!(" {:53} {:>6} tokens", "Total", total))); lines.push(Line::raw(format!(" {:53} {:>6} tokens", "Total", total)));
} else if let Some(ref info) = app.context_info { } else if let Some(ref info) = app.context_info {
lines.push(Line::raw(format!(" System prompt: {:>6} chars", info.system_prompt_chars)));
lines.push(Line::raw(format!(" Context message: {:>6} chars", info.context_message_chars))); lines.push(Line::raw(format!(" Context message: {:>6} chars", info.context_message_chars)));
} }
lines.push(Line::raw("")); lines.push(Line::raw(""));

View file

@ -49,7 +49,6 @@ struct ContextInfo {
available_models: Vec<String>, available_models: Vec<String>,
prompt_file: String, prompt_file: String,
backend: String, backend: String,
system_prompt_chars: usize,
context_message_chars: usize, context_message_chars: usize,
} }
@ -519,10 +518,6 @@ pub struct CliArgs {
#[arg(long)] #[arg(long)]
pub show_config: bool, pub show_config: bool,
/// Override all prompt assembly with this file
#[arg(long)]
pub system_prompt_file: Option<PathBuf>,
/// Project memory directory /// Project memory directory
#[arg(long)] #[arg(long)]
pub memory_project: Option<PathBuf>, pub memory_project: Option<PathBuf>,