Reduce pub visibility in agent::api and user modules

api/: parsing module private, SamplingParams/StreamEvent/StreamResult/
AbortOnDrop/build_response_message/collect_stream to pub(crate).
Internal types (ChatRequest, ChunkChoice, Delta, etc.) to pub(crate).
StreamResult fields to pub(crate). Parsing functions to pub(super).

user/: context, subconscious, unconscious, thalamus modules private
(only chat needs pub(crate) for mind/ access).

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 13:43:25 -04:00
parent f33b1767da
commit c64295ddb2
4 changed files with 29 additions and 29 deletions

View file

@ -7,7 +7,7 @@
// Set POC_DEBUG=1 for verbose per-turn logging.
pub mod http;
pub mod parsing;
mod parsing;
mod types;
mod openai;
@ -26,7 +26,7 @@ use tokio::sync::mpsc;
use crate::agent::tools::{self as agent_tools, summarize_args, ActiveToolCall};
/// A JoinHandle that aborts its task when dropped.
pub struct AbortOnDrop(tokio::task::JoinHandle<()>);
pub(crate) struct AbortOnDrop(tokio::task::JoinHandle<()>);
impl Drop for AbortOnDrop {
fn drop(&mut self) {
@ -36,7 +36,7 @@ impl Drop for AbortOnDrop {
/// Sampling parameters for model generation.
#[derive(Clone, Copy)]
pub struct SamplingParams {
pub(crate) struct SamplingParams {
pub temperature: f32,
pub top_p: f32,
pub top_k: u32,
@ -54,7 +54,7 @@ fn tools_to_json_str(tools: &[agent_tools::Tool]) -> String {
/// Events produced by the streaming API backends.
/// The runner reads these and decides what to display where.
pub enum StreamEvent {
pub(crate) enum StreamEvent {
/// Content token from the model's response.
Content(String),
/// Reasoning/thinking token (internal monologue).
@ -447,7 +447,7 @@ impl SseReader {
/// from models that emit tool calls as text), parse them out and
/// promote them to structured tool_calls. This way all consumers
/// see tool calls uniformly regardless of backend.
pub fn build_response_message(
pub(crate) fn build_response_message(
content: String,
tool_calls: Vec<ToolCall>,
) -> Message {
@ -574,16 +574,16 @@ pub(crate) fn log_diagnostics(
// ---------------------------------------------------------------------------
/// Result of collecting a complete response from the stream.
pub struct StreamResult {
pub content: String,
pub tool_calls: Vec<ToolCall>,
pub usage: Option<Usage>,
pub finish_reason: Option<String>,
pub error: Option<String>,
pub(crate) struct StreamResult {
pub(crate) content: String,
pub(crate) tool_calls: Vec<ToolCall>,
pub(crate) usage: Option<Usage>,
pub(crate) finish_reason: Option<String>,
pub(crate) error: Option<String>,
/// Remaining display buffer (caller should flush if not in a tool call).
pub display_buf: String,
pub(crate) display_buf: String,
/// Whether we were mid-tool-call when the stream ended.
pub in_tool_call: bool,
pub(crate) in_tool_call: bool,
}
/// Collect stream events into a complete response. Handles:
@ -591,7 +591,7 @@ pub struct StreamResult {
/// - Leaked tool call detection and dispatch (Qwen XML in content)
/// - Structured tool call delta assembly (OpenAI-style)
/// - UI forwarding (text deltas, reasoning, tool call notifications)
pub async fn collect_stream(
pub(crate) async fn collect_stream(
rx: &mut mpsc::UnboundedReceiver<StreamEvent>,
agent: &std::sync::Arc<tokio::sync::Mutex<super::Agent>>,
active_tools: &crate::agent::tools::SharedActiveTools,

View file

@ -17,7 +17,7 @@ use super::types::{ToolCall, FunctionCall};
/// Looks for `<tool_call>...</tool_call>` blocks and tries both
/// XML and JSON formats for the body.
/// Parse a single tool call body (content between `<tool_call>` and `</tool_call>`).
pub fn parse_tool_call_body(body: &str) -> Option<ToolCall> {
pub(super) fn parse_tool_call_body(body: &str) -> Option<ToolCall> {
let normalized = normalize_xml_tags(body);
let body = normalized.trim();
let mut counter = 0u32;
@ -25,7 +25,7 @@ pub fn parse_tool_call_body(body: &str) -> Option<ToolCall> {
.or_else(|| parse_json_tool_call(body, &mut counter))
}
pub fn parse_leaked_tool_calls(text: &str) -> Vec<ToolCall> {
pub(super) fn parse_leaked_tool_calls(text: &str) -> Vec<ToolCall> {
// Normalize whitespace inside XML tags: "<\nfunction\n=\nbash\n>" → "<function=bash>"
// This handles streaming tokenizers that split tags across tokens.
let normalized = normalize_xml_tags(text);
@ -147,7 +147,7 @@ fn parse_json_tool_call(body: &str, counter: &mut u32) -> Option<ToolCall> {
/// Strip tool call XML and thinking tokens from text so the conversation
/// history stays clean. Removes `<tool_call>...</tool_call>` blocks and
/// `</think>` tags (thinking content before them is kept — it's useful context).
pub fn strip_leaked_artifacts(text: &str) -> String {
pub(super) fn strip_leaked_artifacts(text: &str) -> String {
let normalized = normalize_xml_tags(text);
let mut result = normalized.clone();

View file

@ -18,7 +18,7 @@ pub struct FunctionCall {
/// Partial function call within a streaming delta.
#[derive(Debug, Deserialize)]
pub struct FunctionCallDelta {
pub(crate) struct FunctionCallDelta {
pub name: Option<String>,
pub arguments: Option<String>,
}
@ -34,7 +34,7 @@ pub struct ToolCall {
/// A partial tool call within a streaming delta.
#[derive(Debug, Deserialize)]
pub struct ToolCallDelta {
pub(crate) struct ToolCallDelta {
pub index: usize,
pub id: Option<String>,
#[serde(rename = "type")]
@ -114,7 +114,7 @@ pub enum Role {
/// Chat completion request.
#[derive(Debug, Serialize)]
pub struct ChatRequest {
pub(crate) struct ChatRequest {
pub model: String,
pub messages: Vec<Message>,
#[serde(skip_serializing_if = "Option::is_none")]
@ -146,7 +146,7 @@ pub struct ChatRequest {
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningConfig {
pub(crate) struct ReasoningConfig {
pub enabled: bool,
/// "none" disables reasoning entirely per OpenRouter docs.
#[serde(skip_serializing_if = "Option::is_none")]
@ -165,14 +165,14 @@ pub struct Usage {
/// A single chunk from a streaming chat completion response (SSE).
#[derive(Debug, Deserialize)]
pub struct ChatCompletionChunk {
pub(crate) struct ChatCompletionChunk {
pub choices: Vec<ChunkChoice>,
pub usage: Option<Usage>,
}
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
pub struct ChunkChoice {
pub(crate) struct ChunkChoice {
pub delta: Delta,
pub finish_reason: Option<String>,
}
@ -181,7 +181,7 @@ pub struct ChunkChoice {
/// chunk only carries the incremental change.
#[derive(Debug, Deserialize, Default)]
#[allow(dead_code)]
pub struct Delta {
pub(crate) struct Delta {
pub role: Option<Role>,
pub content: Option<String>,
/// Reasoning/thinking content — sent by some models (Qwen, DeepSeek)

View file

@ -3,11 +3,11 @@
// TUI, UI channel, parsing. The cognitive layer (session state
// machine, DMN, identity) lives in mind/.
pub mod chat;
pub mod context;
pub mod subconscious;
pub mod unconscious;
pub mod thalamus;
pub(crate) mod chat;
mod context;
mod subconscious;
mod unconscious;
mod thalamus;
use anyhow::Result;
use std::io::Write;