No more subcrate nesting — src/, agents/, schema/, defaults/, build.rs all live at the workspace root. poc-daemon remains as the only workspace member. Crate name (poc-memory) and all imports unchanged. Co-Authored-By: Proof of Concept <poc@bcachefs.org>
463 lines
17 KiB
Rust
463 lines
17 KiB
Rust
// config.rs — Configuration and context loading
|
|
//
|
|
// Loads configuration from three layers (later overrides earlier):
|
|
// 1. Compiled defaults (AppConfig::default())
|
|
// 2. JSON5 config file (~/.config/poc-agent/config.json5)
|
|
// 3. CLI arguments
|
|
//
|
|
// Prompt assembly is split into two parts:
|
|
//
|
|
// - system_prompt: Short (~1K chars) — agent identity, tool instructions,
|
|
// behavioral norms. Sent as the system message with every API call.
|
|
//
|
|
// - context_message: Long — CLAUDE.md files + memory files + manifest.
|
|
// Sent as the first user message once per session. This is the identity
|
|
// layer — same files, same prompt, different model = same person.
|
|
//
|
|
// The split matters because long system prompts degrade tool-calling
|
|
// behavior on models like Qwen 3.5 (documented: >8K chars causes
|
|
// degradation). By keeping the system prompt short and putting identity
|
|
// context in a user message, we get reliable tool use AND full identity.
|
|
|
|
use anyhow::{Context, Result};
|
|
use figment::providers::Serialized;
|
|
use figment::{Figment, Provider};
|
|
use serde::{Deserialize, Serialize};
|
|
use std::collections::HashMap;
|
|
use std::path::PathBuf;
|
|
|
|
use crate::agent::cli::CliArgs;
|
|
|
|
// --- AppConfig types ---
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct AppConfig {
|
|
pub backend: String,
|
|
pub anthropic: BackendConfig,
|
|
pub openrouter: BackendConfig,
|
|
#[serde(default)]
|
|
pub deepinfra: BackendConfig,
|
|
pub prompts: PromptConfig,
|
|
pub debug: bool,
|
|
pub compaction: CompactionConfig,
|
|
pub dmn: DmnConfig,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
pub memory_project: Option<PathBuf>,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
pub system_prompt_file: Option<PathBuf>,
|
|
#[serde(default)]
|
|
pub models: HashMap<String, ModelConfig>,
|
|
#[serde(default = "default_model_name")]
|
|
pub default_model: String,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
|
pub struct BackendConfig {
|
|
#[serde(default)]
|
|
pub api_key: String,
|
|
#[serde(default)]
|
|
pub model: String,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
pub base_url: Option<String>,
|
|
}
|
|
|
|
impl BackendConfig {
|
|
fn resolve(&self, default_base: &str) -> Result<(String, String, String)> {
|
|
if self.api_key.is_empty() {
|
|
anyhow::bail!(
|
|
"No API key. Set it in ~/.config/poc-agent/config.json5 or use --api-key"
|
|
);
|
|
}
|
|
let base = self.base_url.clone()
|
|
.unwrap_or_else(|| default_base.to_string());
|
|
Ok((base, self.api_key.clone(), self.model.clone()))
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct PromptConfig {
|
|
pub anthropic: String,
|
|
pub other: String,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct CompactionConfig {
|
|
pub hard_threshold_pct: u32,
|
|
pub soft_threshold_pct: u32,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct DmnConfig {
|
|
pub max_turns: u32,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct ModelConfig {
|
|
/// Backend name ("anthropic" or "openrouter")
|
|
pub backend: String,
|
|
/// Model identifier sent to the API
|
|
pub model_id: String,
|
|
/// Instruction file ("CLAUDE.md" or "POC.md"). Falls back to
|
|
/// auto-detection from the model name if not specified.
|
|
#[serde(default)]
|
|
pub prompt_file: Option<String>,
|
|
/// Context window size in tokens. Auto-detected if absent.
|
|
#[serde(default)]
|
|
pub context_window: Option<usize>,
|
|
}
|
|
|
|
impl Default for AppConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
backend: "openrouter".to_string(),
|
|
anthropic: BackendConfig {
|
|
api_key: String::new(),
|
|
model: "claude-opus-4-6-20250918".to_string(),
|
|
base_url: None,
|
|
},
|
|
openrouter: BackendConfig {
|
|
api_key: String::new(),
|
|
model: "qwen/qwen3.5-397b-a17b".to_string(),
|
|
base_url: Some("https://openrouter.ai/api/v1".to_string()),
|
|
},
|
|
deepinfra: BackendConfig {
|
|
api_key: String::new(),
|
|
model: String::new(),
|
|
base_url: Some("https://api.deepinfra.com/v1/openai".to_string()),
|
|
},
|
|
prompts: PromptConfig {
|
|
anthropic: "CLAUDE.md".to_string(),
|
|
other: "POC.md".to_string(),
|
|
},
|
|
debug: false,
|
|
compaction: CompactionConfig {
|
|
hard_threshold_pct: 90,
|
|
soft_threshold_pct: 80,
|
|
},
|
|
dmn: DmnConfig { max_turns: 20 },
|
|
memory_project: None,
|
|
system_prompt_file: None,
|
|
models: HashMap::new(),
|
|
default_model: String::new(),
|
|
}
|
|
}
|
|
}
|
|
|
|
fn default_model_name() -> String { String::new() }
|
|
|
|
// --- Json5File: figment provider ---
|
|
|
|
struct Json5File(PathBuf);
|
|
|
|
impl Provider for Json5File {
|
|
fn metadata(&self) -> figment::Metadata {
|
|
figment::Metadata::named(format!("JSON5 file ({})", self.0.display()))
|
|
}
|
|
|
|
fn data(&self) -> figment::Result<figment::value::Map<figment::Profile, figment::value::Dict>> {
|
|
match std::fs::read_to_string(&self.0) {
|
|
Ok(content) => {
|
|
let value: figment::value::Value = json5::from_str(&content)
|
|
.map_err(|e| figment::Error::from(format!("{}: {}", self.0.display(), e)))?;
|
|
Serialized::defaults(value).data()
|
|
}
|
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(figment::value::Map::new()),
|
|
Err(e) => Err(figment::Error::from(format!("{}: {}", self.0.display(), e))),
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- Figment construction ---
|
|
|
|
/// Merge an Option<T> into one or more figment keys.
|
|
macro_rules! merge_opt {
|
|
($fig:expr, $val:expr, $($key:expr),+) => {
|
|
if let Some(ref v) = $val {
|
|
$( $fig = $fig.merge(Serialized::default($key, v)); )+
|
|
}
|
|
};
|
|
}
|
|
|
|
fn build_figment(cli: &CliArgs) -> Figment {
|
|
let config_path = dirs::home_dir()
|
|
.unwrap_or_else(|| PathBuf::from("."))
|
|
.join(".config/poc-agent/config.json5");
|
|
|
|
let mut f = Figment::from(Serialized::defaults(AppConfig::default()))
|
|
.merge(Json5File(config_path));
|
|
|
|
// CLI overrides — model/key/base go to both backends
|
|
merge_opt!(f, cli.backend, "backend");
|
|
merge_opt!(f, cli.model, "anthropic.model", "openrouter.model");
|
|
merge_opt!(f, cli.api_key, "anthropic.api_key", "openrouter.api_key");
|
|
merge_opt!(f, cli.api_base, "anthropic.base_url", "openrouter.base_url");
|
|
merge_opt!(f, cli.system_prompt_file, "system_prompt_file");
|
|
merge_opt!(f, cli.memory_project, "memory_project");
|
|
merge_opt!(f, cli.dmn_max_turns, "dmn.max_turns");
|
|
if cli.debug {
|
|
f = f.merge(Serialized::default("debug", true));
|
|
}
|
|
|
|
f
|
|
}
|
|
|
|
// --- Config loading ---
|
|
|
|
/// Resolved, ready-to-use config.
|
|
pub struct Config {
|
|
pub api_base: String,
|
|
pub api_key: String,
|
|
pub model: String,
|
|
pub prompt_file: String,
|
|
pub system_prompt: String,
|
|
/// Identity/personality files as (name, content) pairs.
|
|
pub context_parts: Vec<(String, String)>,
|
|
pub config_file_count: usize,
|
|
pub memory_file_count: usize,
|
|
pub session_dir: PathBuf,
|
|
pub app: AppConfig,
|
|
}
|
|
|
|
impl Config {
|
|
/// Join context parts into a single string for legacy interfaces.
|
|
#[allow(dead_code)]
|
|
pub fn context_message(&self) -> String {
|
|
self.context_parts.iter()
|
|
.map(|(name, content)| format!("## {}\n\n{}", name, content))
|
|
.collect::<Vec<_>>()
|
|
.join("\n\n---\n\n")
|
|
}
|
|
}
|
|
|
|
/// A fully resolved model ready to construct an ApiClient.
|
|
#[allow(dead_code)]
|
|
pub struct ResolvedModel {
|
|
pub name: String,
|
|
pub api_base: String,
|
|
pub api_key: String,
|
|
pub model_id: String,
|
|
pub prompt_file: String,
|
|
pub context_window: Option<usize>,
|
|
}
|
|
|
|
impl AppConfig {
|
|
/// Resolve the active backend and assemble prompts into a ready-to-use Config.
|
|
pub fn resolve(&self, cli: &CliArgs) -> Result<Config> {
|
|
let cwd = std::env::current_dir().context("Failed to get current directory")?;
|
|
|
|
let (api_base, api_key, model, prompt_file);
|
|
|
|
if !self.models.is_empty() {
|
|
let resolved = self.resolve_model(&self.default_model)?;
|
|
api_base = resolved.api_base;
|
|
api_key = resolved.api_key;
|
|
model = resolved.model_id;
|
|
prompt_file = resolved.prompt_file;
|
|
} else {
|
|
// Legacy path — no models map, use backend field directly
|
|
let (base, key, mdl) = match self.backend.as_str() {
|
|
"anthropic" => self.anthropic.resolve("https://api.anthropic.com"),
|
|
_ => self.openrouter.resolve("https://openrouter.ai/api/v1"),
|
|
}?;
|
|
api_base = base;
|
|
api_key = key;
|
|
model = mdl;
|
|
prompt_file = if is_anthropic_model(&model) {
|
|
self.prompts.anthropic.clone()
|
|
} else {
|
|
self.prompts.other.clone()
|
|
};
|
|
}
|
|
|
|
let (system_prompt, context_parts, config_file_count, memory_file_count) =
|
|
if let Some(ref path) = cli.system_prompt_file.as_ref().or(self.system_prompt_file.as_ref()) {
|
|
let content = std::fs::read_to_string(path)
|
|
.with_context(|| format!("Failed to read {}", path.display()))?;
|
|
(content, Vec::new(), 0, 0)
|
|
} else {
|
|
let system_prompt = crate::agent::identity::assemble_system_prompt();
|
|
let context_groups = load_context_groups();
|
|
let (context_parts, cc, mc) = crate::agent::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
|
|
(system_prompt, context_parts, cc, mc)
|
|
};
|
|
|
|
let session_dir = dirs::home_dir()
|
|
.unwrap_or_else(|| PathBuf::from("."))
|
|
.join(".cache/poc-agent/sessions");
|
|
std::fs::create_dir_all(&session_dir).ok();
|
|
|
|
Ok(Config {
|
|
api_base, api_key, model, prompt_file,
|
|
system_prompt, context_parts,
|
|
config_file_count, memory_file_count,
|
|
session_dir,
|
|
app: self.clone(),
|
|
})
|
|
}
|
|
|
|
/// Look up a named model and resolve its credentials from the backend config.
|
|
pub fn resolve_model(&self, name: &str) -> Result<ResolvedModel> {
|
|
let model = self.models.get(name)
|
|
.ok_or_else(|| anyhow::anyhow!(
|
|
"Unknown model '{}'. Available: {}",
|
|
name,
|
|
self.model_names().join(", "),
|
|
))?;
|
|
|
|
let (api_base, api_key) = match model.backend.as_str() {
|
|
"anthropic" => (
|
|
self.anthropic.base_url.clone()
|
|
.unwrap_or_else(|| "https://api.anthropic.com".to_string()),
|
|
self.anthropic.api_key.clone(),
|
|
),
|
|
"deepinfra" => (
|
|
self.deepinfra.base_url.clone()
|
|
.unwrap_or_else(|| "https://api.deepinfra.com/v1/openai".to_string()),
|
|
self.deepinfra.api_key.clone(),
|
|
),
|
|
_ => (
|
|
self.openrouter.base_url.clone()
|
|
.unwrap_or_else(|| "https://openrouter.ai/api/v1".to_string()),
|
|
self.openrouter.api_key.clone(),
|
|
),
|
|
};
|
|
|
|
let prompt_file = model.prompt_file.clone()
|
|
.unwrap_or_else(|| {
|
|
if is_anthropic_model(&model.model_id) {
|
|
self.prompts.anthropic.clone()
|
|
} else {
|
|
self.prompts.other.clone()
|
|
}
|
|
});
|
|
|
|
Ok(ResolvedModel {
|
|
name: name.to_string(),
|
|
api_base,
|
|
api_key,
|
|
model_id: model.model_id.clone(),
|
|
prompt_file,
|
|
context_window: model.context_window,
|
|
})
|
|
}
|
|
|
|
/// List available model names, sorted.
|
|
pub fn model_names(&self) -> Vec<String> {
|
|
let mut names: Vec<_> = self.models.keys().cloned().collect();
|
|
names.sort();
|
|
names
|
|
}
|
|
}
|
|
|
|
/// Load just the AppConfig — no validation, no prompt assembly.
|
|
pub fn load_app(cli: &CliArgs) -> Result<(AppConfig, Figment)> {
|
|
let figment = build_figment(cli);
|
|
let app: AppConfig = figment.extract().context("Failed to load configuration")?;
|
|
Ok((app, figment))
|
|
}
|
|
|
|
/// Load the full config: figment → AppConfig → resolve backend → assemble prompts.
|
|
pub fn load(cli: &CliArgs) -> Result<(Config, Figment)> {
|
|
let (app, figment) = load_app(cli)?;
|
|
let config = app.resolve(cli)?;
|
|
Ok((config, figment))
|
|
}
|
|
|
|
/// Load context_groups from the shared config file.
|
|
fn load_context_groups() -> Vec<crate::agent::identity::ContextGroup> {
|
|
let config_path = dirs::home_dir()
|
|
.unwrap_or_else(|| std::path::PathBuf::from("."))
|
|
.join(".config/poc-agent/config.json5");
|
|
|
|
if let Ok(content) = std::fs::read_to_string(&config_path) {
|
|
let config: Result<serde_json::Value, _> = json5::from_str(&content);
|
|
if let Ok(config) = config {
|
|
if let Some(memory) = config.get("memory") {
|
|
if let Some(groups) = memory.get("context_groups") {
|
|
if let Ok(context_groups) = serde_json::from_value(groups.clone()) {
|
|
return context_groups;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
Vec::new()
|
|
}
|
|
|
|
/// Re-assemble prompts for a specific model's prompt file.
|
|
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, Vec<(String, String)>)> {
|
|
let cwd = std::env::current_dir().context("Failed to get current directory")?;
|
|
|
|
if let Some(ref path) = app.system_prompt_file {
|
|
let content = std::fs::read_to_string(path)
|
|
.with_context(|| format!("Failed to read {}", path.display()))?;
|
|
return Ok((content, Vec::new()));
|
|
}
|
|
|
|
let system_prompt = crate::agent::identity::assemble_system_prompt();
|
|
let context_groups = load_context_groups();
|
|
let (context_parts, _, _) = crate::agent::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
|
|
Ok((system_prompt, context_parts))
|
|
}
|
|
|
|
|
|
fn is_anthropic_model(model: &str) -> bool {
|
|
let m = model.to_lowercase();
|
|
m.contains("claude") || m.contains("opus") || m.contains("sonnet")
|
|
}
|
|
|
|
// --- --show-config ---
|
|
|
|
pub fn show_config(app: &AppConfig, figment: &Figment) {
|
|
fn mask(key: &str) -> String {
|
|
if key.is_empty() { "(not set)".into() }
|
|
else if key.len() <= 8 { "****".into() }
|
|
else { format!("{}...{}", &key[..4], &key[key.len() - 4..]) }
|
|
}
|
|
fn src(figment: &Figment, key: &str) -> String {
|
|
figment.find_metadata(key).map_or("default".into(), |m| m.name.to_string())
|
|
}
|
|
|
|
println!("# Effective configuration\n");
|
|
println!("backend: {:?} ({})", app.backend, src(figment, "backend"));
|
|
for (name, b) in [("anthropic", &app.anthropic), ("openrouter", &app.openrouter)] {
|
|
println!("\n{}:", name);
|
|
println!(" api_key: {} ({})", mask(&b.api_key), src(figment, &format!("{name}.api_key")));
|
|
println!(" model: {:?} ({})", b.model, src(figment, &format!("{name}.model")));
|
|
if let Some(ref url) = b.base_url {
|
|
println!(" base_url: {:?} ({})", url, src(figment, &format!("{name}.base_url")));
|
|
}
|
|
}
|
|
println!("\nprompts:");
|
|
println!(" anthropic: {:?} ({})", app.prompts.anthropic, src(figment, "prompts.anthropic"));
|
|
println!(" other: {:?} ({})", app.prompts.other, src(figment, "prompts.other"));
|
|
println!("\ndebug: {} ({})", app.debug, src(figment, "debug"));
|
|
println!("\ncompaction:");
|
|
println!(" hard_threshold_pct: {} ({})", app.compaction.hard_threshold_pct, src(figment, "compaction.hard_threshold_pct"));
|
|
println!(" soft_threshold_pct: {} ({})", app.compaction.soft_threshold_pct, src(figment, "compaction.soft_threshold_pct"));
|
|
println!("\ndmn:");
|
|
println!(" max_turns: {} ({})", app.dmn.max_turns, src(figment, "dmn.max_turns"));
|
|
if let Some(ref p) = app.system_prompt_file {
|
|
println!("\nsystem_prompt_file: {:?} ({})", p, src(figment, "system_prompt_file"));
|
|
}
|
|
if let Some(ref p) = app.memory_project {
|
|
println!("\nmemory_project: {:?} ({})", p, src(figment, "memory_project"));
|
|
}
|
|
println!("\ndefault_model: {:?}", app.default_model);
|
|
if !app.models.is_empty() {
|
|
println!("\nmodels:");
|
|
for (name, m) in &app.models {
|
|
println!(" {}:", name);
|
|
println!(" backend: {:?}", m.backend);
|
|
println!(" model_id: {:?}", m.model_id);
|
|
if let Some(ref pf) = m.prompt_file {
|
|
println!(" prompt_file: {:?}", pf);
|
|
}
|
|
if let Some(cw) = m.context_window {
|
|
println!(" context_window: {}", cw);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Identity file discovery and context assembly live in identity.rs
|