Compare commits

...

5 commits

Author SHA1 Message Date
ProofOfConcept
15f3be27ce Show MCP server failures in the UI instead of debug log
MCP server spawn failures were going to dbglog where the user
wouldn't see them. Route through the agent's notify so they appear
on the status bar.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-09 22:46:48 -04:00
ProofOfConcept
3e0d52c451 Redirect noisy warnings to debug log to stop TUI corruption
Duplicate key warnings fire on every store load and were writing to
stderr, corrupting the TUI display. Log write warnings and MCP
server failures are similarly routine. Route these to dbglog.

Serious errors (rkyv snapshot failures, store corruption) remain on
stderr — those are real problems the user needs to see.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-09 22:46:48 -04:00
ProofOfConcept
c31d531954 Fix status bar timer: use activity start time, tick every 1s
The status bar timer was showing turn/call elapsed times (0s, 0/60s)
instead of the activity's actual elapsed time. Use activity_started
from the ActivityEntry directly.

Add a 1s tick to the UI select loop when an activity is active so
the timer updates live.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-09 22:36:45 -04:00
ProofOfConcept
5fe22a5f23 Use ActivityGuard for context overflow retry progress
Instead of two separate notifications piling up on the status bar,
use a single ActivityGuard that updates in place during overflow
retries and auto-completes when the turn finishes.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-09 22:32:38 -04:00
ProofOfConcept
121b46e1d2 Add ActivityGuard::update() for in-place progress updates
Lets long-running operations update their status bar message without
creating/dropping a new activity per iteration. Useful for loops
like memory scoring where you want "scoring: 3/25 keyname" updating
in place.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-09 22:18:43 -04:00
7 changed files with 50 additions and 15 deletions

View file

@ -801,7 +801,7 @@ impl ContextState {
pub fn push_log(&mut self, section: Section, node: AstNode) {
if let Some(ref log) = self.conversation_log {
if let Err(e) = log.append_node(&node) {
eprintln!("warning: log: {:#}", e);
dbglog!("warning: log: {:#}", e);
}
}
self.section_mut(section).push(node);

View file

@ -42,6 +42,17 @@ pub struct ActivityGuard {
id: u64,
}
impl ActivityGuard {
pub async fn update(&self, label: impl Into<String>) {
let label = label.into();
let mut st = self.agent.state.lock().await;
if let Some(entry) = st.activities.iter_mut().find(|a| a.id == self.id) {
entry.label = label;
}
st.changed.notify_one();
}
}
const ACTIVITY_LINGER: std::time::Duration = std::time::Duration::from_secs(5);
impl Drop for ActivityGuard {
@ -307,6 +318,7 @@ impl Agent {
}
let mut overflow_retries: u32 = 0;
let mut overflow_activity: Option<ActivityGuard> = None;
let mut empty_retries: u32 = 0;
let mut ds = DispatchState::new();
@ -371,8 +383,12 @@ impl Agent {
}
if overflow_retries < 2 {
overflow_retries += 1;
agent.state.lock().await.notify(
format!("context overflow — retrying ({}/2)", overflow_retries));
let msg = format!("context overflow — compacting ({}/2)", overflow_retries);
match &overflow_activity {
Some(a) => a.update(&msg).await,
None => overflow_activity = Some(
start_activity(&agent, &msg).await),
}
agent.compact().await;
continue;
}
@ -387,7 +403,7 @@ impl Agent {
if let Some(ref log) = ctx.conversation_log {
let node = &ctx.conversation()[branch_idx];
if let Err(e) = log.append_node(node) {
eprintln!("warning: log: {:#}", e);
dbglog!("warning: log: {:#}", e);
}
}
}
@ -562,7 +578,7 @@ impl Agent {
}
}
Err(e) => {
eprintln!("warning: failed to reload identity: {:#}", e);
dbglog!("warning: failed to reload identity: {:#}", e);
}
}

View file

@ -140,7 +140,7 @@ fn registry() -> &'static TokioMutex<Registry> {
})
}
async fn ensure_init() -> Result<()> {
async fn ensure_init(agent: Option<&std::sync::Arc<super::super::Agent>>) -> Result<()> {
let mut reg = registry().lock().await;
if !reg.servers.is_empty() { return Ok(()); }
let configs = crate::config::get().mcp_servers.clone();
@ -148,14 +148,24 @@ async fn ensure_init() -> Result<()> {
let args: Vec<&str> = cfg.args.iter().map(|s| s.as_str()).collect();
match McpServer::spawn(&cfg.name, &cfg.command, &args).await {
Ok(server) => reg.servers.push(server),
Err(e) => eprintln!("warning: MCP server {} failed: {:#}", cfg.name, e),
Err(e) => {
let msg = format!("MCP server {} failed: {:#}", cfg.name, e);
dbglog!("{}", msg);
if let Some(a) = agent {
if let Ok(mut st) = a.state.try_lock() {
st.notify(msg);
}
}
}
}
}
Ok(())
}
pub(super) async fn call_tool(name: &str, args: &serde_json::Value) -> Result<String> {
ensure_init().await?;
pub(super) async fn call_tool(name: &str, args: &serde_json::Value,
agent: Option<&std::sync::Arc<super::super::Agent>>,
) -> Result<String> {
ensure_init(agent).await?;
let mut reg = registry().lock().await;
let server = reg.servers.iter_mut()
.find(|s| s.tools.iter().any(|t| t.name == name))
@ -178,7 +188,7 @@ pub(super) async fn call_tool(name: &str, args: &serde_json::Value) -> Result<St
}
pub(super) async fn tool_definitions_json() -> Vec<String> {
let _ = ensure_init().await;
let _ = ensure_init(None).await;
let reg = registry().lock().await;
reg.servers.iter()
.flat_map(|s| s.tools.iter())

View file

@ -164,7 +164,7 @@ pub async fn dispatch_with_agent(
None => true,
};
if allowed {
if let Ok(result) = mcp_client::call_tool(name, args).await {
if let Ok(result) = mcp_client::call_tool(name, args, agent.as_ref()).await {
return result;
}
}

View file

@ -200,7 +200,7 @@ impl Store {
// Report duplicate keys
for (key, uuids) in &key_uuids {
if uuids.len() > 1 {
eprintln!("WARNING: key '{}' has {} UUIDs (duplicate nodes)", key, uuids.len());
dbglog!("WARNING: key '{}' has {} UUIDs (duplicate nodes)", key, uuids.len());
}
}

View file

@ -902,9 +902,8 @@ impl InteractScreen {
// Draw status bar with live activity indicator
let timer = if !app.activity.is_empty() {
let total = self.turn_started.map(|t| t.elapsed().as_secs()).unwrap_or(0);
let call = self.call_started.map(|t| t.elapsed().as_secs()).unwrap_or(0);
format!(" {}s, {}/{}s", total, call, self.call_timeout_secs)
let elapsed = app.activity_started.map(|t| t.elapsed().as_secs()).unwrap_or(0);
format!(" {}s", elapsed)
} else {
String::new()
};
@ -1057,6 +1056,8 @@ impl ScreenView for InteractScreen {
app.activity = st.activities.last()
.map(|a| a.label.clone())
.unwrap_or_default();
app.activity_started = st.activities.last()
.map(|a| a.started);
}
if let Ok(ctx) = self.agent.context.try_lock() {
let window = crate::agent::context::context_window();

View file

@ -358,7 +358,11 @@ async fn run(
let mut startup_done = false;
let mut dirty = true; // render on first loop
let mut activity_tick = tokio::time::interval(std::time::Duration::from_secs(1));
activity_tick.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
loop {
let has_activity = !app.activity.is_empty();
tokio::select! {
biased;
@ -380,6 +384,10 @@ async fn run(
Some(channels) = channel_rx.recv() => {
app.set_channel_status(channels);
}
_ = activity_tick.tick(), if has_activity => {
dirty = true;
}
}
// State sync on every wake