Fix build warnings across workspace

- Remove redundant token fields from StreamEvent::Finished (data
  already delivered via Usage event)
- Remove dead hotkey_adjust_sampling, MAX_HISTORY, now()
- Fix unused variable warnings (delta, log)
- Suppress deserialization-only field warnings (jsonrpc, role)
- Make start_stream/chat_completion_stream_temp pub(crate)
- Remove unnecessary pub(crate) re-export of internal types

Remaining warnings are TODO items: SkipIndex (scoring not wired),
notify (MCP notifications not wired).

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 13:55:30 -04:00
parent c64295ddb2
commit 9737641c86
10 changed files with 11 additions and 42 deletions

View file

@ -29,7 +29,6 @@ use poc_memory::thalamus::channel_log;
// ── Constants ──────────────────────────────────────────────────
const MAX_HISTORY: usize = 1000;
const RECONNECT_BASE_SECS: u64 = 5;
const RECONNECT_MAX_SECS: u64 = 300;
const PING_INTERVAL_SECS: u64 = 120;
@ -236,12 +235,6 @@ fn append_log(target: &str, nick: &str, text: &str) {
channel_log::append_disk_log(&log_dir(), target, nick, text);
}
fn now() -> f64 {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs_f64()
}
// ── TLS ────────────────────────────────────────────────────────

View file

@ -13,8 +13,6 @@ mod openai;
// Public API types — used outside agent::api
pub use types::{Message, MessageContent, ContentPart, ImageUrl, Role, ToolCall, FunctionCall, Usage};
// Internal types — re-exported for sibling modules within agent/
pub(crate) use types::{ChatRequest, ReasoningConfig, ChatCompletionChunk, ChunkChoice, Delta, ToolCallDelta, FunctionCallDelta};
use anyhow::Result;
use std::time::{Duration, Instant};
@ -72,8 +70,6 @@ pub(crate) enum StreamEvent {
/// Stream finished.
Finished {
reason: String,
prompt_tokens: u32,
completion_tokens: u32,
},
/// Error from the stream.
Error(String),
@ -105,7 +101,7 @@ impl ApiClient {
/// Start a streaming chat completion. Returns a receiver of StreamEvents.
/// The caller (runner) reads events and handles routing to the UI.
///
pub fn start_stream(
pub(crate) fn start_stream(
&self,
messages: &[Message],
tools: &[agent_tools::Tool],
@ -137,7 +133,7 @@ impl ApiClient {
(rx, AbortOnDrop(handle))
}
pub async fn chat_completion_stream_temp(
pub(crate) async fn chat_completion_stream_temp(
&self,
messages: &[Message],
tools: &[agent_tools::Tool],

View file

@ -181,14 +181,7 @@ pub(super) async fn stream_events(
);
let reason = finish_reason.unwrap_or_default();
let (pt, ct) = usage.as_ref()
.map(|u| (u.prompt_tokens, u.completion_tokens))
.unwrap_or((0, 0));
let _ = tx.send(StreamEvent::Finished {
reason,
prompt_tokens: pt,
completion_tokens: ct,
});
let _ = tx.send(StreamEvent::Finished { reason });
Ok(())
}

View file

@ -154,7 +154,6 @@ pub(crate) struct ReasoningConfig {
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)]
pub struct Usage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
@ -171,7 +170,6 @@ pub(crate) struct ChatCompletionChunk {
}
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
pub(crate) struct ChunkChoice {
pub delta: Delta,
pub finish_reason: Option<String>,
@ -180,8 +178,8 @@ pub(crate) struct ChunkChoice {
/// The delta within a streaming chunk. All fields optional because each
/// chunk only carries the incremental change.
#[derive(Debug, Deserialize, Default)]
#[allow(dead_code)]
pub(crate) struct Delta {
#[allow(dead_code)] // present for deserialization
pub role: Option<Role>,
pub content: Option<String>,
/// Reasoning/thinking content — sent by some models (Qwen, DeepSeek)
@ -276,7 +274,6 @@ impl Message {
}
}
#[allow(dead_code)]
pub fn assistant(content: impl Into<String>) -> Self {
Self {
role: Role::Assistant,

View file

@ -14,7 +14,7 @@ use std::fs;
use std::path::PathBuf;
use std::sync::OnceLock;
use super::api::{self, ApiClient, Message, Usage};
use super::api::{ApiClient, Message, Usage};
use super::tools::{self as agent_tools};
use super::Agent;

View file

@ -13,6 +13,7 @@ use std::io::{self, BufRead, Write};
#[derive(Deserialize)]
struct Request {
#[allow(dead_code)]
jsonrpc: String,
method: String,
#[serde(default)]

View file

@ -71,7 +71,7 @@ fn find_context_files(cwd: &Path, prompt_file: &str) -> Vec<PathBuf> {
/// 2. Project dir (if set)
/// 3. Global (~/.consciousness/)
/// For journal source, loads recent journal entries.
fn load_memory_files(cwd: &Path, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
fn load_memory_files(memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Vec<(String, String)> {
let home = match dirs::home_dir() {
Some(h) => h,
None => return Vec::new(),
@ -178,7 +178,7 @@ pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: O
}
}
let memories = load_memory_files(cwd, memory_project, context_groups);
let memories = load_memory_files(memory_project, context_groups);
let memory_count = memories.len();
for (name, content) in memories {
parts.push((name, content));

View file

@ -263,7 +263,7 @@ fn generate_digest(
.join(".consciousness/logs/llm/digest");
std::fs::create_dir_all(&log_dir).ok();
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
let log = move |msg: &str| {
let _log = move |msg: &str| {
use std::io::Write;
if let Ok(mut f) = std::fs::OpenOptions::new()
.create(true).append(true).open(&log_path)

View file

@ -263,17 +263,6 @@ fn hotkey_cycle_autonomy(mind: &crate::mind::Mind) {
}
}
fn hotkey_adjust_sampling(mind: &crate::mind::Mind, param: usize, delta: f32) {
if let Ok(mut ag) = mind.agent.try_lock() {
match param {
0 => ag.temperature = (ag.temperature + delta).clamp(0.0, 2.0),
1 => ag.top_p = (ag.top_p + delta).clamp(0.0, 1.0),
2 => ag.top_k = (ag.top_k as f32 + delta).max(0.0) as u32,
_ => {}
}
}
}
/// Returns true if this is an event the main loop handles (F-keys, Ctrl combos, resize).
fn is_global_event(event: &ratatui::crossterm::event::Event) -> bool {
use ratatui::crossterm::event::{Event, KeyCode, KeyModifiers, KeyEventKind};

View file

@ -35,12 +35,12 @@ impl ScreenView for ThalamusScreen {
KeyCode::Up => { self.sampling_selected = self.sampling_selected.saturating_sub(1); }
KeyCode::Down => { self.sampling_selected = (self.sampling_selected + 1).min(2); }
KeyCode::Right => {
let delta = match self.sampling_selected {
let _delta = match self.sampling_selected {
0 => 0.05, 1 => 0.05, 2 => 5.0, _ => 0.0,
};
}
KeyCode::Left => {
let delta = match self.sampling_selected {
let _delta = match self.sampling_selected {
0 => -0.05, 1 => -0.05, 2 => -5.0, _ => 0.0,
};
}