Drop redundant system prompt — all info is in memory nodes

The system prompt duplicated what's already in core-personality and
other memory nodes. Moving everything to memory means it's all
trainable data rather than hardcoded strings.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-12 01:23:50 -04:00
parent b646221787
commit 125927e2f1
8 changed files with 10 additions and 67 deletions

View file

@ -354,8 +354,6 @@ pub struct AppConfig {
pub dmn: DmnConfig,
#[serde(skip_serializing_if = "Option::is_none")]
pub memory_project: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub system_prompt_file: Option<PathBuf>,
#[serde(default)]
pub models: HashMap<String, ModelConfig>,
#[serde(default = "default_model_name")]
@ -469,7 +467,6 @@ impl Default for AppConfig {
},
dmn: DmnConfig { max_turns: 20 },
memory_project: None,
system_prompt_file: None,
models: HashMap::new(),
default_model: String::new(),
mcp_servers: Vec::new(),
@ -486,7 +483,6 @@ pub struct SessionConfig {
pub api_key: String,
pub model: String,
pub prompt_file: String,
pub system_prompt: String,
/// Identity/personality files as (name, content) pairs.
pub context_parts: Vec<(String, String)>,
pub config_file_count: usize,
@ -539,16 +535,8 @@ impl AppConfig {
let context_groups = get().context_groups.clone();
let (system_prompt, context_parts, config_file_count, memory_file_count) =
if let Some(ref path) = cli.system_prompt_file.as_ref().or(self.system_prompt_file.as_ref()) {
let content = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
(content, Vec::new(), 0, 0)
} else {
let system_prompt = crate::mind::identity::assemble_system_prompt();
let (context_parts, cc, mc) = crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
(system_prompt, context_parts, cc, mc)
};
let (context_parts, config_file_count, memory_file_count) =
crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
let session_dir = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
@ -561,7 +549,7 @@ impl AppConfig {
Ok(SessionConfig {
api_base, api_key, model, prompt_file,
system_prompt, context_parts,
context_parts,
config_file_count, memory_file_count,
session_dir,
app: self.clone(),
@ -663,7 +651,6 @@ fn build_figment(cli: &crate::user::CliArgs) -> Figment {
merge_opt!(f, cli.model, "anthropic.model", "openrouter.model");
merge_opt!(f, cli.api_key, "anthropic.api_key", "openrouter.api_key");
merge_opt!(f, cli.api_base, "anthropic.base_url", "openrouter.base_url");
merge_opt!(f, cli.system_prompt_file, "system_prompt_file");
merge_opt!(f, cli.memory_project, "memory_project");
merge_opt!(f, cli.dmn_max_turns, "dmn.max_turns");
if cli.debug {
@ -687,20 +674,12 @@ pub fn load_session(cli: &crate::user::CliArgs) -> Result<(SessionConfig, Figmen
Ok((config, figment))
}
/// Re-assemble prompts for a specific model's prompt file.
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, Vec<(String, String)>)> {
/// Re-assemble context for a specific model's prompt file.
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<Vec<(String, String)>> {
let cwd = std::env::current_dir().context("Failed to get current directory")?;
if let Some(ref path) = app.system_prompt_file {
let content = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read {}", path.display()))?;
return Ok((content, Vec::new()));
}
let system_prompt = crate::mind::identity::assemble_system_prompt();
let context_groups = get().context_groups.clone();
let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
Ok((system_prompt, context_parts))
Ok(context_parts)
}
pub fn show_config(app: &AppConfig, figment: &Figment) {
@ -732,9 +711,6 @@ pub fn show_config(app: &AppConfig, figment: &Figment) {
println!(" soft_threshold_pct: {} ({})", app.compaction.soft_threshold_pct, src(figment, "compaction.soft_threshold_pct"));
println!("\ndmn:");
println!(" max_turns: {} ({})", app.dmn.max_turns, src(figment, "dmn.max_turns"));
if let Some(ref p) = app.system_prompt_file {
println!("\nsystem_prompt_file: {:?} ({})", p, src(figment, "system_prompt_file"));
}
if let Some(ref p) = app.memory_project {
println!("\nmemory_project: {:?} ({})", p, src(figment, "memory_project"));
}