Simplify context config: personality_nodes and agent_nodes

Replace complex context_groups (with ContextGroup struct, ContextSource
enum, labels, keys arrays) with simple string lists:
- personality_nodes: loaded into main session context
- agent_nodes: loaded into subconscious agent context

Removed ~200 lines of code. The distinction between session and agent
context is now just which list you're in, not a per-group flag.

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2026-04-15 02:37:49 -04:00
parent 688e8dbc3e
commit a88428d642
4 changed files with 62 additions and 260 deletions

View file

@ -165,34 +165,10 @@ pub async fn cmd_query(expr: &[String]) -> Result<()> {
Ok(()) Ok(())
} }
/// Get group content (handles daemon or local fallback) /// Load content for a list of node keys.
pub async fn get_group_content(group: &crate::config::ContextGroup, cfg: &crate::config::Config) -> Vec<(String, String)> { async fn load_nodes(keys: &[String]) -> Vec<(String, String)> {
match group.source {
crate::config::ContextSource::Journal => {
// Query for recent journal entries
let window: i64 = cfg.journal_days as i64 * 24 * 3600;
let query = format!("all | type:episodic | age:<{} | sort:timestamp | limit:{}",
window, cfg.journal_max);
let keys_str = match memory::memory_query(None, &query, None).await {
Ok(s) => s,
Err(_) => return vec![],
};
// Parse keys (one per line) and render each
let mut results = Vec::new(); let mut results = Vec::new();
for key in keys_str.lines().filter(|k| !k.is_empty() && *k != "no results") { for key in keys {
if let Ok(content) = memory::memory_render(None, key, Some(true)).await {
if !content.trim().is_empty() {
results.push((key.to_string(), content));
}
}
}
results
}
crate::config::ContextSource::Store => {
let mut results = Vec::new();
for key in &group.keys {
if let Ok(content) = memory::memory_render(None, key, Some(true)).await { if let Ok(content) = memory::memory_render(None, key, Some(true)).await {
if !content.trim().is_empty() { if !content.trim().is_empty() {
results.push((key.clone(), content.trim().to_string())); results.push((key.clone(), content.trim().to_string()));
@ -200,49 +176,41 @@ pub async fn get_group_content(group: &crate::config::ContextGroup, cfg: &crate:
} }
} }
results results
}
}
} }
pub async fn cmd_load_context(stats: bool) -> Result<()> { pub async fn cmd_load_context(stats: bool) -> Result<()> {
let cfg = crate::config::get(); let cfg = crate::config::get();
let personality = load_nodes(&cfg.personality_nodes).await;
let agent = load_nodes(&cfg.agent_nodes).await;
if stats { if stats {
let mut total_words = 0; let p_words: usize = personality.iter().map(|(_, c)| c.split_whitespace().count()).sum();
let mut total_entries = 0; let a_words: usize = agent.iter().map(|(_, c)| c.split_whitespace().count()).sum();
println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS"); println!("{:<25} {:>6} {:>8}", "GROUP", "ITEMS", "WORDS");
println!("{}", "-".repeat(42)); println!("{}", "-".repeat(42));
println!("{:<25} {:>6} {:>8}", "personality_nodes", personality.len(), p_words);
for group in &cfg.context_groups { println!("{:<25} {:>6} {:>8}", "agent_nodes", agent.len(), a_words);
let entries = get_group_content(group, &cfg).await;
let words: usize = entries.iter()
.map(|(_, c)| c.split_whitespace().count())
.sum();
let count = entries.len();
println!("{:<25} {:>6} {:>8}", group.label, count, words);
total_words += words;
total_entries += count;
}
println!("{}", "-".repeat(42)); println!("{}", "-".repeat(42));
println!("{:<25} {:>6} {:>8}", "TOTAL", total_entries, total_words); println!("{:<25} {:>6} {:>8}", "TOTAL", personality.len() + agent.len(), p_words + a_words);
return Ok(()); return Ok(());
} }
println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name); println!("=== MEMORY SYSTEM ({}) ===", cfg.assistant_name);
for group in &cfg.context_groups { if !personality.is_empty() {
let entries = get_group_content(group, &cfg).await; println!("--- personality_nodes ({}) ---", personality.len());
if !entries.is_empty() && group.source == crate::config::ContextSource::Journal { for (key, content) in personality {
println!("--- recent journal entries ({}/{}) ---",
entries.len(), cfg.journal_max);
}
for (key, content) in entries {
if group.source == crate::config::ContextSource::Journal {
println!("## {}", key); println!("## {}", key);
} else { println!("{}\n", content);
println!("--- {} ({}) ---", key, group.label);
} }
}
if !agent.is_empty() {
println!("--- agent_nodes ({}) ---", agent.len());
for (key, content) in agent {
println!("## {}", key);
println!("{}\n", content); println!("{}\n", content);
} }
} }

View file

@ -29,29 +29,6 @@ pub fn config_path() -> PathBuf {
static CONFIG: OnceLock<RwLock<Arc<Config>>> = OnceLock::new(); static CONFIG: OnceLock<RwLock<Arc<Config>>> = OnceLock::new();
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "lowercase")]
#[derive(Default)]
pub enum ContextSource {
#[serde(alias = "")]
#[default]
Store,
Journal,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ContextGroup {
pub label: String,
#[serde(default)]
pub keys: Vec<String>,
#[serde(default)]
pub source: ContextSource,
/// Include this group in agent context (default true)
#[serde(default = "default_true")]
pub agent: bool,
}
fn default_true() -> bool { true }
fn default_context_window() -> usize { 128_000 } fn default_context_window() -> usize { 128_000 }
fn default_stream_timeout() -> u64 { 60 } fn default_stream_timeout() -> u64 { 60 }
fn default_scoring_chunk_tokens() -> usize { 50_000 } fn default_scoring_chunk_tokens() -> usize { 50_000 }
@ -76,13 +53,17 @@ pub struct Config {
pub identity_dir: PathBuf, pub identity_dir: PathBuf,
#[serde(deserialize_with = "deserialize_path")] #[serde(deserialize_with = "deserialize_path")]
pub projects_dir: PathBuf, pub projects_dir: PathBuf,
pub core_nodes: Vec<String>, /// Nodes that cannot be deleted or renamed
/// Nodes that cannot be deleted or renamed without --force
#[serde(default)] #[serde(default)]
pub protected_nodes: Vec<String>, pub protected_nodes: Vec<String>,
/// Nodes loaded into main session context
#[serde(default)]
pub personality_nodes: Vec<String>,
/// Nodes loaded into subconscious agent context
#[serde(default)]
pub agent_nodes: Vec<String>,
pub journal_days: u32, pub journal_days: u32,
pub journal_max: usize, pub journal_max: usize,
pub context_groups: Vec<ContextGroup>,
pub llm_concurrency: usize, pub llm_concurrency: usize,
pub agent_budget: usize, pub agent_budget: usize,
#[serde(deserialize_with = "deserialize_path")] #[serde(deserialize_with = "deserialize_path")]
@ -147,24 +128,11 @@ impl Default for Config {
data_dir: home.join(".consciousness/memory"), data_dir: home.join(".consciousness/memory"),
identity_dir: home.join(".consciousness/identity"), identity_dir: home.join(".consciousness/identity"),
projects_dir: home.join(".claude/projects"), projects_dir: home.join(".claude/projects"),
core_nodes: vec!["identity".to_string(), "core-practices".to_string()],
protected_nodes: Vec::new(), protected_nodes: Vec::new(),
personality_nodes: vec!["identity".into(), "core-practices".into()],
agent_nodes: vec!["identity".into(), "core-practices".into()],
journal_days: 7, journal_days: 7,
journal_max: 20, journal_max: 20,
context_groups: vec![
ContextGroup {
label: "identity".into(),
keys: vec!["identity".into()],
source: ContextSource::Store,
agent: true,
},
ContextGroup {
label: "core-practices".into(),
keys: vec!["core-practices".into()],
source: ContextSource::Store,
agent: true,
},
],
llm_concurrency: 1, llm_concurrency: 1,
agent_budget: 1000, agent_budget: 1000,
prompts_dir: home.join(".consciousness/prompts"), prompts_dir: home.join(".consciousness/prompts"),
@ -242,97 +210,9 @@ impl Config {
Some(config) Some(config)
} }
/// Load from legacy JSONL config (~/.consciousness/config.jsonl). /// Load from legacy JSONL config — deprecated, just return defaults.
fn load_legacy_jsonl() -> Self { fn load_legacy_jsonl() -> Self {
let path = std::env::var("POC_MEMORY_CONFIG") Config::default()
.map(PathBuf::from)
.unwrap_or_else(|_| {
dirs::home_dir().unwrap_or_default()
.join(".consciousness/config.jsonl")
});
let mut config = Config::default();
let Ok(content) = std::fs::read_to_string(&path) else {
return config;
};
let mut context_groups: Vec<ContextGroup> = Vec::new();
let stream = serde_json::Deserializer::from_str(&content)
.into_iter::<serde_json::Value>();
for result in stream {
let Ok(obj) = result else { continue };
if let Some(cfg) = obj.get("config") {
if let Some(s) = cfg.get("user_name").and_then(|v| v.as_str()) {
config.user_name = s.to_string();
}
if let Some(s) = cfg.get("assistant_name").and_then(|v| v.as_str()) {
config.assistant_name = s.to_string();
}
if let Some(s) = cfg.get("data_dir").and_then(|v| v.as_str()) {
config.data_dir = expand_home(s);
}
if let Some(s) = cfg.get("projects_dir").and_then(|v| v.as_str()) {
config.projects_dir = expand_home(s);
}
if let Some(arr) = cfg.get("core_nodes").and_then(|v| v.as_array()) {
config.core_nodes = arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
}
if let Some(d) = cfg.get("journal_days").and_then(|v| v.as_u64()) {
config.journal_days = d as u32;
}
if let Some(m) = cfg.get("journal_max").and_then(|v| v.as_u64()) {
config.journal_max = m as usize;
}
if let Some(n) = cfg.get("llm_concurrency").and_then(|v| v.as_u64()) {
config.llm_concurrency = n.max(1) as usize;
}
if let Some(n) = cfg.get("agent_budget").and_then(|v| v.as_u64()) {
config.agent_budget = n as usize;
}
if let Some(s) = cfg.get("prompts_dir").and_then(|v| v.as_str()) {
config.prompts_dir = expand_home(s);
}
if let Some(s) = cfg.get("api_base_url").and_then(|v| v.as_str()) {
config.api_base_url = Some(s.to_string());
}
if let Some(s) = cfg.get("api_key").and_then(|v| v.as_str()) {
config.api_key = Some(s.to_string());
}
if let Some(s) = cfg.get("api_model").and_then(|v| v.as_str()) {
config.api_model = Some(s.to_string());
}
continue;
}
if let Some(label) = obj.get("group").and_then(|v| v.as_str()) {
let keys = obj.get("keys")
.and_then(|v| v.as_array())
.map(|arr| arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect())
.unwrap_or_default();
let source = match obj.get("source").and_then(|v| v.as_str()) {
Some("journal") => ContextSource::Journal,
_ => ContextSource::Store,
};
let agent = obj.get("agent").and_then(|v| v.as_bool()).unwrap_or(true);
context_groups.push(ContextGroup { label: label.to_string(), keys, source, agent });
}
}
if !context_groups.is_empty() {
config.context_groups = context_groups;
}
config
} }
} }
@ -553,10 +433,10 @@ impl AppConfig {
}; };
} }
let context_groups = get().context_groups.clone(); let personality_nodes = get().personality_nodes.clone();
let (context_parts, config_file_count, memory_file_count) = let (context_parts, config_file_count, memory_file_count) =
crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups).await?; crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &personality_nodes).await?;
let session_dir = dirs::home_dir() let session_dir = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from(".")) .unwrap_or_else(|| PathBuf::from("."))
@ -697,8 +577,8 @@ pub async fn load_session(cli: &crate::user::CliArgs) -> Result<(SessionConfig,
/// Re-assemble context for a specific model's prompt file. /// Re-assemble context for a specific model's prompt file.
pub async fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<Vec<(String, String)>> { pub async fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<Vec<(String, String)>> {
let cwd = std::env::current_dir().context("Failed to get current directory")?; let cwd = std::env::current_dir().context("Failed to get current directory")?;
let context_groups = get().context_groups.clone(); let personality_nodes = get().personality_nodes.clone();
let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups).await?; let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &personality_nodes).await?;
Ok(context_parts) Ok(context_parts)
} }

View file

@ -1,19 +1,12 @@
// identity.rs — Identity file discovery and context assembly // identity.rs — Identity file discovery and context assembly
// //
// Discovers and loads the agent's identity: instruction files (CLAUDE.md, // Discovers and loads the agent's identity: instruction files (CLAUDE.md,
// POC.md), memory files, and the system prompt. Reads context_groups // POC.md), memory nodes, and the system prompt.
// from the shared config file.
use anyhow::Result; use anyhow::Result;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::agent::tools::memory::memory_render; use crate::agent::tools::memory::memory_render;
use crate::config::{ContextGroup, ContextSource};
/// Read a file if it exists and is non-empty.
fn read_nonempty(path: &Path) -> Option<String> {
std::fs::read_to_string(path).ok().filter(|s| !s.trim().is_empty())
}
/// Walk from cwd to git root collecting instruction files (CLAUDE.md / POC.md). /// Walk from cwd to git root collecting instruction files (CLAUDE.md / POC.md).
/// ///
@ -60,62 +53,23 @@ fn find_context_files(cwd: &Path, prompt_file: &str) -> Vec<PathBuf> {
found found
} }
/// Load memory files from config's context_groups. /// Load memory nodes from the store.
/// Store sources load from the memory graph. Journal source loads recent entries. async fn load_memory_nodes(keys: &[String]) -> Vec<(String, String)> {
async fn load_memory_files(memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
let home = match dirs::home_dir() {
Some(h) => h,
None => return Vec::new(),
};
let global = home.join(".consciousness");
let project = memory_project.map(PathBuf::from);
let mut memories: Vec<(String, String)> = Vec::new(); let mut memories: Vec<(String, String)> = Vec::new();
// Load from context_groups for key in keys {
for group in context_groups {
match group.source {
ContextSource::Journal => {
// Journal loading handled separately
continue;
}
ContextSource::Store => {
// Load from the memory graph store via typed API
for key in &group.keys {
if let Ok(c) = memory_render(None, key, Some(true)).await { if let Ok(c) = memory_render(None, key, Some(true)).await {
if !c.trim().is_empty() { if !c.trim().is_empty() {
memories.push((key.clone(), c)); memories.push((key.clone(), c));
} }
} }
} }
}
}
}
// People dir — glob all .md files
for dir in [project.as_deref(), Some(global.as_path())].into_iter().flatten() {
let people_dir = dir.join("people");
if let Ok(entries) = std::fs::read_dir(&people_dir) {
let mut paths: Vec<_> = entries.flatten()
.filter(|e| e.path().extension().map_or(false, |ext| ext == "md"))
.collect();
paths.sort_by_key(|e| e.file_name());
for entry in paths {
let rel = format!("people/{}", entry.file_name().to_string_lossy());
if memories.iter().any(|(n, _)| n == &rel) { continue; }
if let Some(content) = read_nonempty(&entry.path()) {
memories.push((rel, content));
}
}
}
}
memories memories
} }
/// Context message: instruction files + memory files + manifest. /// Context message: instruction files + memory nodes.
pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> { pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, _memory_project: Option<&Path>, personality_nodes: &[String]) -> Result<(Vec<(String, String)>, usize, usize)> {
let mut parts: Vec<(String, String)> = vec![ let mut parts: Vec<(String, String)> = vec![
("Preamble".to_string(), ("Preamble".to_string(),
"Everything below is already loaded — your identity, instructions, \ "Everything below is already loaded — your identity, instructions, \
@ -136,7 +90,7 @@ pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_proj
} }
} }
let memories = load_memory_files(memory_project, context_groups).await; let memories = load_memory_nodes(personality_nodes).await;
let memory_count = memories.len(); let memory_count = memories.len();
for (name, content) in memories { for (name, content) in memories {
parts.push((name, content)); parts.push((name, content));

View file

@ -293,19 +293,19 @@ async fn resolve(
Some(Resolved { text: out, keys: all_keys }) Some(Resolved { text: out, keys: all_keys })
} }
// agent-context — personality/identity groups from load-context config // agent-context — agent identity nodes from config
"agent-context" => { "agent-context" => {
let cfg = crate::config::get(); let cfg = crate::config::get();
let mut text = String::new(); let mut text = String::new();
let mut keys = Vec::new(); let mut keys = Vec::new();
for group in &cfg.context_groups { for key in &cfg.agent_nodes {
if !group.agent { continue; } if let Ok(content) = crate::hippocampus::memory_render(None, key, Some(true)).await {
let entries = crate::cli::node::get_group_content(group, &cfg).await; if !content.trim().is_empty() {
for (key, content) in entries {
use std::fmt::Write; use std::fmt::Write;
writeln!(text, "--- {} ({}) ---", key, group.label).ok(); writeln!(text, "--- {} ---", key).ok();
writeln!(text, "{}\n", content).ok(); writeln!(text, "{}\n", content.trim()).ok();
keys.push(key); keys.push(key.clone());
}
} }
} }
if text.is_empty() { None } if text.is_empty() { None }