Reduce pub visibility in agent::api and user modules

api/: parsing module private, SamplingParams/StreamEvent/StreamResult/
AbortOnDrop/build_response_message/collect_stream to pub(crate).
Internal types (ChatRequest, ChunkChoice, Delta, etc.) to pub(crate).
StreamResult fields to pub(crate). Parsing functions to pub(super).

user/: context, subconscious, unconscious, thalamus modules private
(only chat needs pub(crate) for mind/ access).

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 13:43:25 -04:00
parent f33b1767da
commit c64295ddb2
4 changed files with 29 additions and 29 deletions

View file

@ -7,7 +7,7 @@
// Set POC_DEBUG=1 for verbose per-turn logging.
pub mod http;
pub mod parsing;
mod parsing;
mod types;
mod openai;
@ -26,7 +26,7 @@ use tokio::sync::mpsc;
use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall};
/// A JoinHandle that aborts its task when dropped.
pub struct AbortOnDrop(tokio::task::JoinHandle<()>);
pub(crate) struct AbortOnDrop(tokio::task::JoinHandle<()>);
impl Drop for AbortOnDrop {
fn drop(&mut self) {
@ -36,7 +36,7 @@ impl Drop for AbortOnDrop {
/// Sampling parameters for model generation.
#[derive(Clone, Copy)]
pub struct SamplingParams {
pub(crate) struct SamplingParams {
pub temperature: f32,
pub top_p: f32,
pub top_k: u32,
@ -54,7 +54,7 @@ fn tools_to_json_str(tools: &[agent_tools::Tool]) -> String {
/// Events produced by the streaming API backends.
/// The runner reads these and decides what to display where.
pub enum StreamEvent {
pub(crate) enum StreamEvent {
/// Content token from the model's response.
Content(String),
/// Reasoning/thinking token (internal monologue).
@ -447,7 +447,7 @@ impl SseReader {
/// from models that emit tool calls as text), parse them out and
/// promote them to structured tool_calls. This way all consumers
/// see tool calls uniformly regardless of backend.
pub fn build_response_message(
pub(crate) fn build_response_message(
content: String,
tool_calls: Vec<ToolCall>,
) -> Message {
@ -574,16 +574,16 @@ pub(crate) fn log_diagnostics(
// ---------------------------------------------------------------------------
/// Result of collecting a complete response from the stream.
pub struct StreamResult {
pub content: String,
pub tool_calls: Vec<ToolCall>,
pub usage: Option<Usage>,
pub finish_reason: Option<String>,
pub error: Option<String>,
pub(crate) struct StreamResult {
pub(crate) content: String,
pub(crate) tool_calls: Vec<ToolCall>,
pub(crate) usage: Option<Usage>,
pub(crate) finish_reason: Option<String>,
pub(crate) error: Option<String>,
/// Remaining display buffer (caller should flush if not in a tool call).
pub display_buf: String,
pub(crate) display_buf: String,
/// Whether we were mid-tool-call when the stream ended.
pub in_tool_call: bool,
pub(crate) in_tool_call: bool,
}
/// Collect stream events into a complete response. Handles:
@ -591,7 +591,7 @@ pub struct StreamResult {
/// - Leaked tool call detection and dispatch (Qwen XML in content)
/// - Structured tool call delta assembly (OpenAI-style)
/// - UI forwarding (text deltas, reasoning, tool call notifications)
pub async fn collect_stream(
pub(crate) async fn collect_stream(
rx: &mut mpsc::UnboundedReceiver<StreamEvent>,
agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>,
active_tools: &crate::agent::tools::SharedActiveTools,