Move API code from user/ to agent/

Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2026-04-04 00:29:11 -04:00
parent 021eafe6da
commit 9bebbcb635
22 changed files with 259 additions and 251 deletions

View file

@ -4,10 +4,30 @@
// Journal entries are loaded from the memory graph store, not from
// a flat file — the parse functions are gone.
use crate::user::types::*;
use std::sync::{Arc, RwLock};
use crate::agent::api::types::*;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use tiktoken_rs::CoreBPE;
/// A section of the context window, possibly with children.
#[derive(Debug, Clone)]
pub struct ContextSection {
pub name: String,
pub tokens: usize,
pub content: String,
pub children: Vec<ContextSection>,
}
/// Shared, live context state — agent writes, TUI reads for the debug screen.
pub type SharedContextState = Arc<RwLock<Vec<ContextSection>>>;
/// Create a new shared context state.
pub fn shared_context_state() -> SharedContextState {
Arc::new(RwLock::new(Vec::new()))
}
/// A single journal entry with its timestamp and content.
#[derive(Debug, Clone)]
pub struct JournalEntry {
@ -123,3 +143,171 @@ pub fn is_context_overflow(err: &anyhow::Error) -> bool {
pub fn is_stream_error(err: &anyhow::Error) -> bool {
err.to_string().contains("model stream error")
}
// --- Context state types ---
/// Conversation entry — either a regular message or memory content.
/// Memory entries preserve the original message for KV cache round-tripping.
#[derive(Debug, Clone)]
pub enum ConversationEntry {
Message(Message),
Memory { key: String, message: Message },
}
// Custom serde: serialize Memory with a "memory_key" field added to the message,
// plain messages serialize as-is. This keeps the conversation log readable.
impl Serialize for ConversationEntry {
fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeMap;
match self {
Self::Message(m) => m.serialize(s),
Self::Memory { key, message } => {
let json = serde_json::to_value(message).map_err(serde::ser::Error::custom)?;
let mut map = s.serialize_map(None)?;
if let serde_json::Value::Object(obj) = json {
for (k, v) in obj {
map.serialize_entry(&k, &v)?;
}
}
map.serialize_entry("memory_key", key)?;
map.end()
}
}
}
}
impl<'de> Deserialize<'de> for ConversationEntry {
fn deserialize<D: serde::Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let mut json: serde_json::Value = serde_json::Value::deserialize(d)?;
if let Some(key) = json.as_object_mut().and_then(|o| o.remove("memory_key")) {
let key = key.as_str().unwrap_or("").to_string();
let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?;
Ok(Self::Memory { key, message })
} else {
let message: Message = serde_json::from_value(json).map_err(serde::de::Error::custom)?;
Ok(Self::Message(message))
}
}
}
impl ConversationEntry {
/// Get the API message for sending to the model.
pub fn api_message(&self) -> &Message {
match self {
Self::Message(m) => m,
Self::Memory { message, .. } => message,
}
}
pub fn is_memory(&self) -> bool {
matches!(self, Self::Memory { .. })
}
/// Get a reference to the inner message.
pub fn message(&self) -> &Message {
match self {
Self::Message(m) => m,
Self::Memory { message, .. } => message,
}
}
/// Get a mutable reference to the inner message.
pub fn message_mut(&mut self) -> &mut Message {
match self {
Self::Message(m) => m,
Self::Memory { message, .. } => message,
}
}
}
#[derive(Clone)]
pub struct ContextState {
pub system_prompt: String,
pub personality: Vec<(String, String)>,
pub journal: Vec<JournalEntry>,
pub working_stack: Vec<String>,
/// Conversation entries — messages and memory, interleaved in order.
/// Does NOT include system prompt, personality, or journal.
pub entries: Vec<ConversationEntry>,
}
// TODO: these should not be hardcoded absolute paths
pub fn working_stack_instructions_path() -> std::path::PathBuf {
dirs::home_dir().unwrap_or_default().join(".consciousness/config/working-stack.md")
}
pub fn working_stack_file_path() -> std::path::PathBuf {
dirs::home_dir().unwrap_or_default().join(".consciousness/working-stack.json")
}
impl ContextState {
/// Compute the context budget from typed sources.
pub fn budget(&self, count_str: &dyn Fn(&str) -> usize,
count_msg: &dyn Fn(&Message) -> usize,
window_tokens: usize) -> ContextBudget {
let id = count_str(&self.system_prompt)
+ self.personality.iter().map(|(_, c)| count_str(c)).sum::<usize>();
let jnl: usize = self.journal.iter().map(|e| count_str(&e.content)).sum();
let mut mem = 0;
let mut conv = 0;
for entry in &self.entries {
let tokens = count_msg(entry.api_message());
if entry.is_memory() { mem += tokens } else { conv += tokens }
}
ContextBudget {
identity_tokens: id,
memory_tokens: mem,
journal_tokens: jnl,
conversation_tokens: conv,
window_tokens,
}
}
pub fn render_context_message(&self) -> String {
let mut parts: Vec<String> = self.personality.iter()
.map(|(name, content)| format!("## {}\n\n{}", name, content))
.collect();
let instructions = std::fs::read_to_string(working_stack_instructions_path()).unwrap_or_default();
let mut stack_section = instructions;
if self.working_stack.is_empty() {
stack_section.push_str("\n## Current stack\n\n(empty)\n");
} else {
stack_section.push_str("\n## Current stack\n\n");
for (i, item) in self.working_stack.iter().enumerate() {
if i == self.working_stack.len() - 1 {
stack_section.push_str(&format!("{}\n", item));
} else {
stack_section.push_str(&format!(" [{}] {}\n", i, item));
}
}
}
parts.push(stack_section);
parts.join("\n\n---\n\n")
}
}
#[derive(Debug, Clone, Default)]
pub struct ContextBudget {
pub identity_tokens: usize,
pub memory_tokens: usize,
pub journal_tokens: usize,
pub conversation_tokens: usize,
pub window_tokens: usize,
}
impl ContextBudget {
pub fn used(&self) -> usize {
self.identity_tokens + self.memory_tokens + self.journal_tokens + self.conversation_tokens
}
pub fn free(&self) -> usize {
self.window_tokens.saturating_sub(self.used())
}
pub fn status_string(&self) -> String {
let total = self.window_tokens;
if total == 0 { return String::new(); }
let pct = |n: usize| if n == 0 { 0 } else { ((n * 100) / total).max(1) };
format!("id:{}% mem:{}% jnl:{}% conv:{}% free:{}%",
pct(self.identity_tokens), pct(self.memory_tokens),
pct(self.journal_tokens), pct(self.conversation_tokens), pct(self.free()))
}
}