forked from kent/consciousness
refactor: runner owns stream routing, suppress tool call XML from display
Split the streaming pipeline: API backends yield StreamEvents through a channel, the runner reads them and routes to the appropriate UI pane. - Add StreamEvent enum (Content, Reasoning, ToolCallDelta, etc.) - API start_stream() spawns backend as a task, returns event receiver - Runner loops over events, sends content to conversation pane but suppresses <tool_call> XML with a buffered tail for partial tags - OpenAI backend refactored to stream_events() — no more UI coupling - Anthropic backend gets a wrapper that synthesizes events from the existing stream() (TODO: native event streaming) - chat_completion_stream() kept for subconscious agents, reimplemented on top of the event stream - Usage derives Clone Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
912626c5f0
commit
13453606ae
6 changed files with 338 additions and 114 deletions
|
|
@ -15,8 +15,11 @@ use reqwest::Client;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::agent::types::*;
|
||||
use crate::agent::ui_channel::{StreamTarget, UiMessage, UiSender};
|
||||
use super::StreamEvent;
|
||||
|
||||
// --- Anthropic wire types ---
|
||||
|
||||
|
|
@ -653,3 +656,58 @@ pub async fn stream(
|
|||
|
||||
Ok((super::build_response_message(content, tool_calls), usage))
|
||||
}
|
||||
|
||||
/// Wrapper that calls the existing stream() and synthesizes StreamEvents.
|
||||
/// TODO: refactor to emit events during streaming like the OpenAI backend.
|
||||
pub async fn stream_events(
|
||||
client: &Client,
|
||||
api_key: &str,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: Option<&[crate::agent::types::ToolDef]>,
|
||||
tx: &mpsc::UnboundedSender<StreamEvent>,
|
||||
ui_tx: &UiSender,
|
||||
reasoning_effort: &str,
|
||||
) -> Result<()> {
|
||||
let (msg, usage) = stream(
|
||||
client, api_key, model, messages, tools,
|
||||
ui_tx, StreamTarget::Conversation, reasoning_effort,
|
||||
).await?;
|
||||
|
||||
// Synthesize events from the completed message.
|
||||
if let Some(text) = msg.content.as_ref().and_then(|c| match c {
|
||||
MessageContent::Text(t) => Some(t.as_str()),
|
||||
_ => None,
|
||||
}) {
|
||||
if !text.is_empty() {
|
||||
let _ = tx.send(StreamEvent::Content(text.to_string()));
|
||||
}
|
||||
}
|
||||
if let Some(ref tcs) = msg.tool_calls {
|
||||
for (i, tc) in tcs.iter().enumerate() {
|
||||
let _ = tx.send(StreamEvent::ToolCallDelta {
|
||||
index: i,
|
||||
id: Some(tc.id.clone()),
|
||||
call_type: Some(tc.call_type.clone()),
|
||||
name: Some(tc.function.name.clone()),
|
||||
arguments: Some(tc.function.arguments.clone()),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(u) = usage {
|
||||
let _ = tx.send(StreamEvent::Usage(u.clone()));
|
||||
let _ = tx.send(StreamEvent::Finished {
|
||||
reason: "stop".into(),
|
||||
prompt_tokens: u.prompt_tokens,
|
||||
completion_tokens: u.completion_tokens,
|
||||
});
|
||||
} else {
|
||||
let _ = tx.send(StreamEvent::Finished {
|
||||
reason: "stop".into(),
|
||||
prompt_tokens: 0,
|
||||
completion_tokens: 0,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue