Simplify context config: personality_nodes and agent_nodes

Replace complex context_groups (with ContextGroup struct, ContextSource
enum, labels, keys arrays) with simple string lists:
- personality_nodes: loaded into main session context
- agent_nodes: loaded into subconscious agent context

Removed ~200 lines of code. The distinction between session and agent
context is now just which list you're in, not a per-group flag.

Co-Authored-By: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2026-04-15 02:37:49 -04:00
parent 688e8dbc3e
commit a88428d642
4 changed files with 62 additions and 260 deletions

View file

@ -1,19 +1,12 @@
// identity.rs — Identity file discovery and context assembly
//
// Discovers and loads the agent's identity: instruction files (CLAUDE.md,
// POC.md), memory files, and the system prompt. Reads context_groups
// from the shared config file.
// POC.md), memory nodes, and the system prompt.
use anyhow::Result;
use std::path::{Path, PathBuf};
use crate::agent::tools::memory::memory_render;
use crate::config::{ContextGroup, ContextSource};
/// Read a file if it exists and is non-empty.
fn read_nonempty(path: &Path) -> Option<String> {
std::fs::read_to_string(path).ok().filter(|s| !s.trim().is_empty())
}
/// Walk from cwd to git root collecting instruction files (CLAUDE.md / POC.md).
///
@ -60,53 +53,14 @@ fn find_context_files(cwd: &Path, prompt_file: &str) -> Vec<PathBuf> {
found
}
/// Load memory files from config's context_groups.
/// Store sources load from the memory graph. Journal source loads recent entries.
async fn load_memory_files(memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
let home = match dirs::home_dir() {
Some(h) => h,
None => return Vec::new(),
};
let global = home.join(".consciousness");
let project = memory_project.map(PathBuf::from);
/// Load memory nodes from the store.
async fn load_memory_nodes(keys: &[String]) -> Vec<(String, String)> {
let mut memories: Vec<(String, String)> = Vec::new();
// Load from context_groups
for group in context_groups {
match group.source {
ContextSource::Journal => {
// Journal loading handled separately
continue;
}
ContextSource::Store => {
// Load from the memory graph store via typed API
for key in &group.keys {
if let Ok(c) = memory_render(None, key, Some(true)).await {
if !c.trim().is_empty() {
memories.push((key.clone(), c));
}
}
}
}
}
}
// People dir — glob all .md files
for dir in [project.as_deref(), Some(global.as_path())].into_iter().flatten() {
let people_dir = dir.join("people");
if let Ok(entries) = std::fs::read_dir(&people_dir) {
let mut paths: Vec<_> = entries.flatten()
.filter(|e| e.path().extension().map_or(false, |ext| ext == "md"))
.collect();
paths.sort_by_key(|e| e.file_name());
for entry in paths {
let rel = format!("people/{}", entry.file_name().to_string_lossy());
if memories.iter().any(|(n, _)| n == &rel) { continue; }
if let Some(content) = read_nonempty(&entry.path()) {
memories.push((rel, content));
}
for key in keys {
if let Ok(c) = memory_render(None, key, Some(true)).await {
if !c.trim().is_empty() {
memories.push((key.clone(), c));
}
}
}
@ -114,8 +68,8 @@ async fn load_memory_files(memory_project: Option<&Path>, context_groups: &[Cont
memories
}
/// Context message: instruction files + memory files + manifest.
pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> {
/// Context message: instruction files + memory nodes.
pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, _memory_project: Option<&Path>, personality_nodes: &[String]) -> Result<(Vec<(String, String)>, usize, usize)> {
let mut parts: Vec<(String, String)> = vec![
("Preamble".to_string(),
"Everything below is already loaded — your identity, instructions, \
@ -136,7 +90,7 @@ pub async fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_proj
}
}
let memories = load_memory_files(memory_project, context_groups).await;
let memories = load_memory_nodes(personality_nodes).await;
let memory_count = memories.len();
for (name, content) in memories {
parts.push((name, content));