config: derive Deserialize on Config, eliminate manual field extraction

Config now derives serde::Deserialize with #[serde(default)] for all
fields. Path fields use custom deserialize_path/deserialize_path_opt
for ~ expansion. ContextGroup and ContextSource also derive Deserialize.

try_load_shared() is now 20 lines instead of 100: json5 → serde →
Config directly, then resolve API settings from the model/backend
cross-reference.

Removes MemoryConfigRaw intermediate struct entirely.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Kent Overstreet 2026-03-20 14:10:57 -04:00
parent f0086e2eaf
commit 378a09a9f8

View file

@ -15,55 +15,58 @@ use std::sync::{Arc, OnceLock, RwLock};
static CONFIG: OnceLock<RwLock<Arc<Config>>> = OnceLock::new();
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, serde::Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ContextSource {
#[serde(alias = "")]
Store,
File,
Journal,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, serde::Deserialize)]
pub struct ContextGroup {
pub label: String,
#[serde(default)]
pub keys: Vec<String>,
#[serde(default)]
pub source: ContextSource,
}
#[derive(Debug, Clone)]
impl Default for ContextSource {
fn default() -> Self { Self::Store }
}
#[derive(Debug, Clone, serde::Deserialize)]
#[serde(default)]
pub struct Config {
/// Display name for the human user in transcripts/prompts.
pub user_name: String,
/// Display name for the AI assistant.
pub assistant_name: String,
/// Base directory for memory data (store, logs, status).
#[serde(deserialize_with = "deserialize_path")]
pub data_dir: PathBuf,
/// Directory containing Claude session transcripts.
#[serde(deserialize_with = "deserialize_path")]
pub projects_dir: PathBuf,
/// Core node keys that should never be decayed/deleted.
pub core_nodes: Vec<String>,
/// How many days of journal to include in load-context.
pub journal_days: u32,
/// Max journal entries to include in load-context.
pub journal_max: usize,
/// Ordered context groups for session-start loading.
pub context_groups: Vec<ContextGroup>,
/// Max concurrent LLM calls in the daemon.
pub llm_concurrency: usize,
/// Total agent runs per consolidation cycle.
pub agent_budget: usize,
/// Directory containing prompt templates for agents.
#[serde(deserialize_with = "deserialize_path")]
pub prompts_dir: PathBuf,
/// Separate Claude config dir for background agent work (daemon jobs).
#[serde(default, deserialize_with = "deserialize_path_opt")]
pub agent_config_dir: Option<PathBuf>,
/// OpenAI-compatible API base URL for direct LLM calls.
/// Resolved from agent_model → models → backend (not in config directly)
#[serde(skip)]
pub api_base_url: Option<String>,
/// API key for the direct API endpoint.
#[serde(skip)]
pub api_key: Option<String>,
/// Model name to use with the direct API endpoint.
#[serde(skip)]
pub api_model: Option<String>,
/// Reasoning effort for API calls ("none", "low", "medium", "high").
/// Used to resolve API settings, not stored on Config
#[serde(default)]
agent_model: Option<String>,
pub api_reasoning: String,
/// Active agent types for consolidation cycles.
pub agent_types: Vec<String>,
}
@ -97,6 +100,7 @@ impl Default for Config {
api_base_url: None,
api_key: None,
api_model: None,
agent_model: None,
api_reasoning: "high".to_string(),
agent_types: vec![
"linker".into(), "organize".into(), "distill".into(),
@ -119,105 +123,26 @@ impl Config {
/// Memory settings live in the "memory" section; API settings are
/// resolved from the shared model/backend configuration.
fn try_load_shared() -> Option<Self> {
let home = PathBuf::from(std::env::var("HOME").ok()?);
let path = home.join(".config/poc-agent/config.json5");
let path = PathBuf::from(std::env::var("HOME").ok()?)
.join(".config/poc-agent/config.json5");
let content = std::fs::read_to_string(&path).ok()?;
let root: serde_json::Value = json5::from_str(&content).ok()?;
let mem_value = root.get("memory")?;
let mem = root.get("memory")?;
let mut config = Config::default();
let mut config: Config = serde_json::from_value(mem_value.clone()).ok()?;
config.llm_concurrency = config.llm_concurrency.max(1);
// Memory-specific fields
if let Some(s) = mem.get("user_name").and_then(|v| v.as_str()) {
config.user_name = s.to_string();
}
if let Some(s) = mem.get("assistant_name").and_then(|v| v.as_str()) {
config.assistant_name = s.to_string();
}
if let Some(s) = mem.get("data_dir").and_then(|v| v.as_str()) {
config.data_dir = expand_home(s);
}
if let Some(s) = mem.get("projects_dir").and_then(|v| v.as_str()) {
config.projects_dir = expand_home(s);
}
if let Some(arr) = mem.get("core_nodes").and_then(|v| v.as_array()) {
config.core_nodes = arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
}
if let Some(d) = mem.get("journal_days").and_then(|v| v.as_u64()) {
config.journal_days = d as u32;
}
if let Some(m) = mem.get("journal_max").and_then(|v| v.as_u64()) {
config.journal_max = m as usize;
}
if let Some(n) = mem.get("llm_concurrency").and_then(|v| v.as_u64()) {
config.llm_concurrency = n.max(1) as usize;
}
if let Some(n) = mem.get("agent_budget").and_then(|v| v.as_u64()) {
config.agent_budget = n as usize;
}
if let Some(s) = mem.get("prompts_dir").and_then(|v| v.as_str()) {
config.prompts_dir = expand_home(s);
}
if let Some(s) = mem.get("agent_config_dir").and_then(|v| v.as_str()) {
config.agent_config_dir = Some(expand_home(s));
}
// Context groups
if let Some(groups) = mem.get("context_groups").and_then(|v| v.as_array()) {
let mut cgs = Vec::new();
for g in groups {
if let Some(label) = g.get("label").and_then(|v| v.as_str()) {
let keys = g.get("keys")
.and_then(|v| v.as_array())
.map(|arr| arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect())
.unwrap_or_default();
let source = match g.get("source").and_then(|v| v.as_str()) {
Some("file") => ContextSource::File,
Some("journal") => ContextSource::Journal,
_ => ContextSource::Store,
};
cgs.push(ContextGroup { label: label.to_string(), keys, source });
}
}
if !cgs.is_empty() {
config.context_groups = cgs;
}
}
if let Some(s) = mem.get("api_reasoning").and_then(|v| v.as_str()) {
config.api_reasoning = s.to_string();
}
if let Some(arr) = mem.get("agent_types").and_then(|v| v.as_array()) {
let types: Vec<String> = arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
if !types.is_empty() {
config.agent_types = types;
}
}
// Resolve API settings from the shared model/backend config.
// memory.agent_model references a named model; we look up its
// backend to get base_url and api_key.
if let Some(model_name) = mem.get("agent_model").and_then(|v| v.as_str()) {
if let Some(model_cfg) = root.get("models")
.and_then(|m| m.get(model_name))
{
// Resolve API settings: agent_model → models → backend
if let Some(model_name) = &config.agent_model {
if let Some(model_cfg) = root.get("models").and_then(|m| m.get(model_name.as_str())) {
let backend_name = model_cfg.get("backend").and_then(|v| v.as_str()).unwrap_or("");
let model_id = model_cfg.get("model_id").and_then(|v| v.as_str()).unwrap_or("");
if let Some(backend) = root.get(backend_name) {
config.api_base_url = backend.get("base_url")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
.and_then(|v| v.as_str()).map(String::from);
config.api_key = backend.get("api_key")
.and_then(|v| v.as_str())
.map(|s| s.to_string());
.and_then(|v| v.as_str()).map(String::from);
}
config.api_model = Some(model_id.to_string());
}
@ -323,6 +248,17 @@ impl Config {
}
}
fn deserialize_path<'de, D: serde::Deserializer<'de>>(d: D) -> Result<PathBuf, D::Error> {
let s: String = serde::Deserialize::deserialize(d)?;
Ok(expand_home(&s))
}
fn deserialize_path_opt<'de, D: serde::Deserializer<'de>>(d: D) -> Result<Option<PathBuf>, D::Error> {
let s: Option<String> = serde::Deserialize::deserialize(d)?;
Ok(s.map(|s| expand_home(&s)))
}
fn expand_home(path: &str) -> PathBuf {
if let Some(rest) = path.strip_prefix("~/") {
PathBuf::from(std::env::var("HOME").expect("HOME not set")).join(rest)