WIP: Fix mind/, dmn, UI layer — 35 errors remaining

mind/mod.rs and mind/dmn.rs fully migrated to AST types.
user/context.rs, user/widgets.rs, user/chat.rs partially migrated.
Killed working_stack tool, tokenize_conv_entry, context_old.rs.

Remaining: learn.rs (22), oneshot.rs (5), subconscious.rs (3),
chat.rs (3), widgets.rs (1), context.rs (1).

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-08 15:24:49 -04:00
parent bf3e2a9b73
commit d0d876e067
5 changed files with 99 additions and 141 deletions

View file

@ -123,7 +123,6 @@ pub fn tools() -> Vec<Tool> {
read::tool(), write::tool(), edit::tool(),
grep::tool(), glob::tool(), bash::tool(),
vision::tool(),
working_stack::tool(),
];
all.extend(web::tools());
all.extend(memory::memory_tools());

View file

@ -273,7 +273,7 @@ impl State {
use std::sync::Arc;
use crate::agent::{Agent, oneshot::{AutoAgent, AutoStep}};
use crate::agent::context::ConversationEntry;
use crate::agent::context::{Ast, AstNode, NodeBody};
use crate::subconscious::defs;
/// Names and byte-interval triggers for the built-in subconscious agents.
@ -472,22 +472,18 @@ impl Subconscious {
let rendered = store_guard.as_ref()
.and_then(|s| crate::cli::node::render_node(s, key));
if let Some(rendered) = rendered {
let mut msg = crate::agent::api::Message::user(format!(
"<system-reminder>\n--- {} (surfaced) ---\n{}\n</system-reminder>",
key, rendered,
ag.push_node(AstNode::memory(
key,
format!("--- {} (surfaced) ---\n{}", key, rendered),
));
msg.stamp();
ag.push_entry(ConversationEntry::Memory {
key: key.to_string(), message: msg, score: None,
});
}
}
}
if let Some(reflection) = outputs.get("reflection") {
if !reflection.trim().is_empty() {
ag.push_message(crate::agent::api::Message::user(format!(
"<system-reminder>\n--- subconscious reflection ---\n{}\n</system-reminder>",
ag.push_node(AstNode::dmn(format!(
"--- subconscious reflection ---\n{}",
reflection.trim(),
)));
}
@ -496,8 +492,8 @@ impl Subconscious {
if let Some(nudge) = outputs.get("thalamus") {
let nudge = nudge.trim();
if !nudge.is_empty() && nudge != "ok" {
ag.push_message(crate::agent::api::Message::user(format!(
"<system-reminder>\n--- thalamus ---\n{}\n</system-reminder>",
ag.push_node(AstNode::dmn(format!(
"--- thalamus ---\n{}",
nudge,
)));
}
@ -518,12 +514,13 @@ impl Subconscious {
pub async fn trigger(&mut self, agent: &Arc<tokio::sync::Mutex<Agent>>) {
let (conversation_bytes, memory_keys) = {
let ag = agent.lock().await;
let bytes = ag.context.conversation.entries().iter()
.filter(|ce| !ce.entry.is_log() && !ce.entry.is_memory())
.map(|ce| ce.entry.message().content_text().len() as u64)
let bytes = ag.context.conversation().iter()
.filter(|node| !matches!(node.leaf().map(|l| l.body()),
Some(NodeBody::Log(_)) | Some(NodeBody::Memory { .. })))
.map(|node| node.render().len() as u64)
.sum::<u64>();
let keys: Vec<String> = ag.context.conversation.entries().iter().filter_map(|ce| {
if let ConversationEntry::Memory { key, .. } = &ce.entry {
let keys: Vec<String> = ag.context.conversation().iter().filter_map(|node| {
if let Some(NodeBody::Memory { key, .. }) = node.leaf().map(|l| l.body()) {
Some(key.clone())
} else { None }
}).collect();
@ -550,7 +547,7 @@ impl Subconscious {
let mut forked = conscious.fork(auto.tools.clone());
forked.provenance = format!("agent:{}", auto.name);
let fork_point = forked.context.conversation.len();
let fork_point = forked.context.conversation().len();
let shared_forked = Arc::new(tokio::sync::Mutex::new(forked));
self.agents[idx].forked_agent = Some(shared_forked.clone());

View file

@ -28,12 +28,9 @@ use crate::subconscious::learn;
pub use dmn::{SubconsciousSnapshot, Subconscious};
use crate::agent::context::ConversationEntry;
use crate::agent::context::{AstNode, NodeBody, Section, Ast, ContextState};
/// Load persisted memory scores from disk and apply to Memory entries.
use crate::agent::context::ContextSection;
fn load_memory_scores(section: &mut ContextSection, path: &std::path::Path) {
fn load_memory_scores(ctx: &mut ContextState, path: &std::path::Path) {
let data = match std::fs::read_to_string(path) {
Ok(d) => d,
Err(_) => return,
@ -43,11 +40,13 @@ fn load_memory_scores(section: &mut ContextSection, path: &std::path::Path) {
Err(_) => return,
};
let mut applied = 0;
for i in 0..section.len() {
if let ConversationEntry::Memory { key, .. } = &section.entries()[i].entry {
if let Some(&s) = scores.get(key.as_str()) {
section.set_score(i, Some(s));
applied += 1;
for i in 0..ctx.conversation().len() {
if let AstNode::Leaf(leaf) = &ctx.conversation()[i] {
if let NodeBody::Memory { key, .. } = leaf.body() {
if let Some(&s) = scores.get(key.as_str()) {
ctx.set_score(Section::Conversation, i, Some(s));
applied += 1;
}
}
}
}
@ -57,14 +56,15 @@ fn load_memory_scores(section: &mut ContextSection, path: &std::path::Path) {
}
/// Collect scored memory keys from conversation entries.
fn collect_memory_scores(section: &ContextSection) -> std::collections::BTreeMap<String, f64> {
section.entries().iter()
.filter_map(|ce| {
if let ConversationEntry::Memory { key, score: Some(s), .. } = &ce.entry {
Some((key.clone(), *s))
} else {
None
fn collect_memory_scores(ctx: &ContextState) -> std::collections::BTreeMap<String, f64> {
ctx.conversation().iter()
.filter_map(|node| {
if let AstNode::Leaf(leaf) = node {
if let NodeBody::Memory { key, score: Some(s), .. } = leaf.body() {
return Some((key.clone(), *s));
}
}
None
})
.collect()
}
@ -319,7 +319,7 @@ impl Mind {
// Restore persisted memory scores
let scores_path = self.config.session_dir.join("memory-scores.json");
load_memory_scores(&mut ag.context.conversation, &scores_path);
load_memory_scores(&mut ag.context, &scores_path);
ag.changed.notify_one();
drop(ag);
@ -341,7 +341,7 @@ impl Mind {
MindCommand::Compact => {
let threshold = compaction_threshold(&self.config.app) as usize;
let mut ag = self.agent.lock().await;
if ag.context.total_tokens() > threshold {
if ag.context.tokens() > threshold {
ag.compact();
ag.notify("compacted");
}
@ -408,16 +408,17 @@ impl Mind {
async move {
let scores_snapshot = {
let mut ag = agent.lock().await;
for i in 0..ag.context.conversation.len() {
if let ConversationEntry::Memory { key: k, .. } = &ag.context.conversation.entries()[i].entry {
if *k == key {
ag.context.conversation.set_score(i, Some(score));
for i in 0..ag.context.conversation().len() {
if let AstNode::Leaf(leaf) = &ag.context.conversation()[i] {
if let NodeBody::Memory { key: k, .. } = leaf.body() {
if *k == key {
ag.context.set_score(Section::Conversation, i, Some(score));
}
}
}
}
ag.changed.notify_one();
// Snapshot scores while we have the lock
collect_memory_scores(&ag.context.conversation)
collect_memory_scores(&ag.context)
};
// Write to disk after releasing the lock
save_memory_scores(&scores_snapshot, &path);
@ -437,18 +438,16 @@ impl Mind {
let mut ag = self.agent.lock().await;
match target {
StreamTarget::Conversation => {
ag.push_message(crate::agent::api::Message::user(text));
ag.push_node(AstNode::user_msg(text));
}
StreamTarget::Autonomous => {
let mut msg = crate::agent::api::Message::user(text);
msg.stamp();
ag.push_entry(crate::agent::context::ConversationEntry::Dmn(msg));
ag.push_node(AstNode::dmn(text));
}
}
// Compact if over budget before sending
let threshold = compaction_threshold(&self.config.app) as usize;
if ag.context.total_tokens() > threshold {
if ag.context.tokens() > threshold {
ag.compact();
ag.notify("compacted");
}
@ -508,12 +507,6 @@ impl Mind {
crate::user::chat::cmd_switch_model(&self.agent, &name).await;
}
// Post-turn maintenance
{
let mut ag = self.agent.lock().await;
ag.age_out_images();
}
cmds.push(MindCommand::Compact);
if !self.config.no_agents {
cmds.push(MindCommand::Score);

View file

@ -40,14 +40,14 @@ fn build_messages(
filter: Filter,
) -> Vec<serde_json::Value> {
let mut msgs = Vec::new();
for e in context.system.entries() {
for e in context.system().entries() {
msgs.push(serde_json::json!({"role": "system", "content": e.entry.message().content_text()}));
}
let ctx = context.render_context_message();
if !ctx.is_empty() {
msgs.push(serde_json::json!({"role": "user", "content": ctx}));
}
let entries = context.conversation.entries();
let entries = context.conversation().entries();
for i in range {
let ce = &entries[i];
let entry = &ce.entry;

View file

@ -13,7 +13,7 @@ use ratatui::{
};
use super::{App, ScreenView, screen_legend};
use crate::agent::api::Role;
use crate::agent::context::{AstNode, NodeBody, Role};
use crate::mind::MindCommand;
// --- Slash command table ---
@ -376,7 +376,7 @@ pub(crate) struct InteractScreen {
call_timeout_secs: u64,
// State sync with agent — double buffer
last_generation: u64,
last_entries: Vec<crate::agent::context::ContextEntry>,
last_entries: Vec<AstNode>,
pending_display_count: usize,
/// Reference to agent for state sync
agent: std::sync::Arc<tokio::sync::Mutex<crate::agent::Agent>>,
@ -411,110 +411,79 @@ impl InteractScreen {
}
}
/// Route an agent entry to pane items.
/// Returns empty vec for entries that shouldn't be displayed.
fn route_entry(entry: &crate::agent::context::ConversationEntry) -> Vec<(PaneTarget, String, Marker)> {
use crate::agent::api::Role;
use crate::agent::context::ConversationEntry;
match entry {
ConversationEntry::Memory { .. }
| ConversationEntry::Thinking(_)
| ConversationEntry::Log(_) => return vec![],
_ => {}
}
let msg = entry.message();
let text = msg.content_text().to_string();
if text.starts_with("<system-reminder>") {
return vec![];
}
match msg.role {
Role::User => {
if text.is_empty() { return vec![]; }
vec![(PaneTarget::Conversation, text, Marker::User)]
}
Role::Assistant => {
let mut items = Vec::new();
// Tool calls → tools pane
if let Some(ref calls) = msg.tool_calls {
for call in calls {
let line = format!("[{}] {}",
call.function.name,
call.function.arguments.chars().take(80).collect::<String>());
items.push((PaneTarget::Tools, line, Marker::None));
fn route_node(node: &AstNode) -> Vec<(PaneTarget, String, Marker)> {
match node {
AstNode::Leaf(leaf) => {
let text = leaf.body().text().to_string();
match leaf.body() {
NodeBody::Memory { .. } | NodeBody::Thinking(_)
| NodeBody::Log(_) | NodeBody::Dmn(_) => vec![],
NodeBody::Content(_) => {
if text.is_empty() || text.starts_with("<system-reminder>") { vec![] }
else { vec![(PaneTarget::Conversation, text, Marker::User)] }
}
NodeBody::ToolCall { name, arguments } => {
let line = format!("[{}] {}", name, arguments.chars().take(80).collect::<String>());
vec![(PaneTarget::Tools, line, Marker::None)]
}
NodeBody::ToolResult(t) => {
if t.is_empty() { vec![] }
else { vec![(PaneTarget::ToolResult, text, Marker::None)] }
}
}
// Text content → conversation
if !text.is_empty() {
items.push((PaneTarget::ConversationAssistant, text, Marker::Assistant));
}
AstNode::Branch { role, children } => {
match role {
Role::User => {
let text: String = children.iter()
.filter_map(|c| c.leaf())
.filter(|l| matches!(l.body(), NodeBody::Content(_)))
.map(|l| l.body().text())
.collect::<Vec<_>>()
.join("");
if text.is_empty() || text.starts_with("<system-reminder>") { vec![] }
else { vec![(PaneTarget::Conversation, text, Marker::User)] }
}
Role::Assistant => {
let mut items = Vec::new();
for child in children {
items.extend(Self::route_node(child));
}
// Re-tag content as assistant
for item in &mut items {
if item.0 == PaneTarget::Conversation {
item.0 = PaneTarget::ConversationAssistant;
item.2 = Marker::Assistant;
}
}
items
}
Role::System => vec![],
}
items
}
Role::Tool => {
if text.is_empty() { return vec![]; }
vec![(PaneTarget::ToolResult, text, Marker::None)]
}
Role::System => vec![],
}
}
/// Sync conversation display from agent entries + pending input.
fn sync_from_agent(&mut self) {
// Pop previously-displayed pending input
for _ in 0..self.pending_display_count {
self.conversation.pop_line();
}
self.pending_display_count = 0;
// Sync agent entries
if let Ok(agent) = self.agent.try_lock() {
let generation = agent.generation;
let entries = agent.entries();
let entries = agent.conversation();
// Phase 1: detect desync and pop
if generation != self.last_generation {
if generation != self.last_generation || entries.len() < self.last_entries.len() {
self.conversation = PaneState::new(true);
self.autonomous = PaneState::new(true);
self.tools = PaneState::new(false);
self.last_entries.clear();
} else {
let mut pop = self.last_entries.len();
for i in (0..self.last_entries.len()).rev() {
// Check if this entry is out of bounds or doesn't match
let matches = i < entries.len() && self.last_entries[i].entry == entries[i].entry;
if !matches {
pop = i;
}
// Only stop at assistant if it matches - otherwise keep going
if matches && !self.last_entries[i].token_ids.is_empty()
&& self.last_entries[i].entry.message().role == Role::Assistant {
break;
}
}
while self.last_entries.len() > pop {
let popped = self.last_entries.pop().unwrap();
for (target, _, _) in Self::route_entry(&popped.entry) {
match target {
PaneTarget::Conversation | PaneTarget::ConversationAssistant
=> self.conversation.pop_line(),
PaneTarget::Tools | PaneTarget::ToolResult
=> self.tools.pop_line(),
}
}
}
}
// Phase 2: push new entries
let start = self.last_entries.len();
for entry in entries.iter().skip(start) {
for (target, text, marker) in Self::route_entry(&entry.entry) {
for node in entries.iter().skip(start) {
for (target, text, marker) in Self::route_node(node) {
match target {
PaneTarget::Conversation => {
self.conversation.current_color = Color::Cyan;
@ -537,7 +506,7 @@ impl InteractScreen {
}
}
}
self.last_entries.push(entry.clone());
self.last_entries.push(node.clone());
}
self.last_generation = generation;