delete 20 dead public functions across 12 files

Removed functions with zero callers: parse_timestamp_to_epoch,
hash_key, search_weighted_debug, extract_query_terms, format_results,
move_to_neighbor, adjust_edge_strength, update_graph_metrics,
nearest_to_seeds, nystrom_project, chat_completion_stream, cmd_read,
context_message, split_candidates, split_plan_prompt,
split_extract_prompt, log_event_pub, log_verbose, rpc_record_hits,
memory_definitions. -245 lines.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-02 16:21:01 -04:00
parent b0e852a05f
commit 91eb9c95cc
12 changed files with 0 additions and 245 deletions

View file

@ -74,8 +74,6 @@ impl ApiClient {
/// Start a streaming chat completion. Returns a receiver of StreamEvents.
/// The caller (runner) reads events and handles routing to the UI.
///
/// The old `chat_completion_stream` method is kept for the subconscious
/// agents which don't need fine-grained stream control.
pub fn start_stream(
&self,
messages: &[Message],
@ -109,20 +107,6 @@ impl ApiClient {
rx
}
/// Streaming chat completion. Returns the assembled response message
/// plus optional usage stats. Text tokens stream through the UI channel.
///
/// Used by subconscious agents that don't need per-token routing.
pub async fn chat_completion_stream(
&self,
messages: &[Message],
tools: Option<&[ToolDef]>,
ui_tx: &UiSender,
reasoning_effort: &str,
) -> Result<(Message, Option<Usage>)> {
self.chat_completion_stream_temp(messages, tools, ui_tx, reasoning_effort, None, None).await
}
pub async fn chat_completion_stream_temp(
&self,
messages: &[Message],

View file

@ -70,11 +70,6 @@ fn cursor_path() -> PathBuf { session_dir().join("read-cursor") }
// --- Client commands ---
/// Print new output since last read. With -f, also stream live from socket.
pub async fn cmd_read(follow: bool, debug: bool) -> anyhow::Result<()> {
cmd_read_inner(follow, false, debug).await
}
/// Print new output since last read. With -f, stream live. With block, wait for one response.
pub async fn cmd_read_inner(follow: bool, block: bool, debug: bool) -> anyhow::Result<()> {
use std::io::{Read, Seek, SeekFrom, Write};

View file

@ -443,17 +443,6 @@ pub struct SessionConfig {
pub app: AppConfig,
}
impl SessionConfig {
/// Join context parts into a single string for legacy interfaces.
#[allow(dead_code)]
pub fn context_message(&self) -> String {
self.context_parts.iter()
.map(|(name, content)| format!("## {}\n\n{}", name, content))
.collect::<Vec<_>>()
.join("\n\n---\n\n")
}
}
/// A fully resolved model ready to construct an ApiClient.
#[allow(dead_code)]
pub struct ResolvedModel {

View file

@ -313,13 +313,3 @@ pub fn move_down(store: &Store) -> Result<(), String> {
None => Err(format!("No children for {}", key)),
}
}
/// Move cursor to a graph neighbor by index (from the neighbors list).
pub fn move_to_neighbor(store: &Store, index: usize) -> Result<(), String> {
let key = get().ok_or("No cursor set")?;
let neighbors = graph_neighbors(store, &key);
let (target, _) = neighbors.get(index)
.ok_or_else(|| format!("Neighbor index {} out of range (have {})", index, neighbors.len()))?;
set(target)?;
show(store)
}

View file

@ -197,8 +197,3 @@ pub fn dump_resolved(date: &str, keys: &[String]) -> Result<Vec<(String, u32)>,
Ok(resolved)
}
/// Hash a key (exposed for testing/external use).
pub fn hash_key(key: &str) -> u64 {
fnv1a(key)
}

View file

@ -1441,15 +1441,6 @@ pub fn search_weighted(
search_weighted_inner(terms, store, false, 5)
}
/// Like search_weighted but with debug output and configurable result count.
pub fn search_weighted_debug(
terms: &BTreeMap<String, f64>,
store: &impl StoreView,
max_results: usize,
) -> Vec<SearchResult> {
search_weighted_inner(terms, store, true, max_results)
}
fn search_weighted_inner(
terms: &BTreeMap<String, f64>,
store: &impl StoreView,
@ -1496,41 +1487,3 @@ pub fn search(query: &str, store: &impl StoreView) -> Vec<SearchResult> {
search_weighted(&terms, store)
}
/// Extract meaningful search terms from natural language.
/// Strips common English stop words, returns up to max_terms words.
pub fn extract_query_terms(text: &str, max_terms: usize) -> String {
const STOP_WORDS: &[&str] = &[
"the", "a", "an", "is", "are", "was", "were", "do", "does", "did",
"have", "has", "had", "will", "would", "could", "should", "can",
"may", "might", "shall", "been", "being", "to", "of", "in", "for",
"on", "with", "at", "by", "from", "as", "but", "or", "and", "not",
"no", "if", "then", "than", "that", "this", "it", "its", "my",
"your", "our", "we", "you", "i", "me", "he", "she", "they", "them",
"what", "how", "why", "when", "where", "about", "just", "let",
"want", "tell", "show", "think", "know", "see", "look", "make",
"get", "go", "some", "any", "all", "very", "really", "also", "too",
"so", "up", "out", "here", "there",
];
text.to_lowercase()
.split(|c: char| !c.is_alphanumeric())
.filter(|w| !w.is_empty() && w.len() > 2 && !STOP_WORDS.contains(w))
.take(max_terms)
.collect::<Vec<_>>()
.join(" ")
}
/// Format search results as text lines (for hook consumption).
pub fn format_results(results: &[SearchResult]) -> String {
let mut out = String::new();
for (i, r) in results.iter().enumerate() {
let marker = if r.is_direct { "" } else { " " };
out.push_str(&format!("{}{:2}. [{:.2}/{:.2}] {}",
marker, i + 1, r.activation, r.activation, r.key));
out.push('\n');
if let Some(ref snippet) = r.snippet {
out.push_str(&format!(" {}\n", snippet));
}
}
out
}

View file

@ -287,16 +287,6 @@ pub fn nearest_neighbors(
distances
}
/// Find nearest neighbors to a set of seed nodes (multi-seed query).
/// Returns nodes ranked by minimum distance to any seed.
pub fn nearest_to_seeds(
emb: &SpectralEmbedding,
seeds: &[&str],
k: usize,
) -> Vec<(String, f64)> {
nearest_to_seeds_weighted(emb, &seeds.iter().map(|&s| (s, 1.0)).collect::<Vec<_>>(), None, k)
}
/// Find nearest neighbors to weighted seed nodes, using link weights.
///
/// Each seed has a weight (from query term weighting). For candidates
@ -531,35 +521,6 @@ pub fn unlinked_neighbors(
pairs
}
/// Approximate spectral coordinates for a new node using Nyström extension.
///
/// Given a new node's edges to existing nodes, estimate where it would
/// land in spectral space without recomputing the full decomposition.
/// Uses weighted average of neighbors' coordinates, weighted by edge strength.
pub fn nystrom_project(
emb: &SpectralEmbedding,
neighbors: &[(&str, f32)], // (key, edge_strength)
) -> Option<Vec<f64>> {
let mut weighted_sum = vec![0.0f64; emb.dims];
let mut total_weight = 0.0f64;
for &(key, strength) in neighbors {
if let Some(coords) = emb.coords.get(key) {
let w = strength as f64;
for (i, &c) in coords.iter().enumerate() {
weighted_sum[i] += w * c;
}
total_weight += w;
}
}
if total_weight < 1e-8 {
return None;
}
Some(weighted_sum.iter().map(|s| s / total_weight).collect())
}
/// Classify a spectral position: well-integrated, outlier, bridge, or orphan.
pub fn classify_position(pos: &SpectralPosition) -> &'static str {
if pos.bridge_score > 0.7 {

View file

@ -200,27 +200,6 @@ impl Store {
});
}
/// Adjust edge strength between two nodes by a delta.
/// Clamps to [0.05, 0.95]. Returns (old_strength, new_strength, edges_modified).
pub fn adjust_edge_strength(&mut self, key_a: &str, key_b: &str, delta: f32) -> (f32, f32, usize) {
let mut old = 0.0f32;
let mut new = 0.0f32;
let mut count = 0;
for rel in &mut self.relations {
if rel.deleted { continue; }
if (rel.source_key == key_a && rel.target_key == key_b)
|| (rel.source_key == key_b && rel.target_key == key_a)
{
old = rel.strength;
rel.strength = (rel.strength + delta).clamp(0.05, 0.95);
new = rel.strength;
rel.version += 1;
count += 1;
}
}
(old, new, count)
}
pub fn record_gap(&mut self, desc: &str) {
self.gaps.push(GapRecord {
description: desc.to_string(),
@ -307,18 +286,6 @@ impl Store {
Ok((hubs_capped, to_delete.len()))
}
/// Update graph-derived fields on all nodes
pub fn update_graph_metrics(&mut self) {
let g = self.build_graph();
let communities = g.communities();
for (key, node) in &mut self.nodes {
node.community_id = communities.get(key).copied();
node.clustering_coefficient = Some(g.clustering_coefficient(key));
node.degree = Some(g.degree(key) as u32);
}
}
/// Set a node's weight directly. Returns (old, new).
pub fn set_weight(&mut self, key: &str, weight: f32) -> Result<(f32, f32), String> {
let weight = weight.clamp(0.01, 1.0);

View file

@ -97,16 +97,6 @@ fn log_event(job: &str, event: &str, detail: &str) {
jobkit::daemon::event_log::log(&logs_dir(), job, event, detail);
}
/// Public wrapper for logging from other agent modules.
pub fn log_event_pub(job: &str, event: &str, detail: &str) {
log_event(job, event, detail);
}
/// Verbose log — only written if verbose logging is enabled.
pub fn log_verbose(job: &str, event: &str, detail: &str) {
jobkit::daemon::event_log::verbose(&crate::config::get().data_dir, job, event, detail);
}
// --- Job functions (direct, no subprocess) ---
static DAEMON_POOL: std::sync::OnceLock<Arc<jobkit::ResourcePool>> = std::sync::OnceLock::new();
@ -1164,16 +1154,6 @@ pub fn rpc_consolidate() -> Result<(), String> {
}
}
/// Record search hits for the given keys (fire-and-forget from memory-search).
pub fn rpc_record_hits(keys: &[&str]) -> Result<(), String> {
if keys.is_empty() { return Ok(()); }
let cmd = format!("record-hits {}", keys.join("\t"));
match send_rpc(&cmd) {
Some(_) => Ok(()),
None => Err("Daemon not running.".into()),
}
}
pub fn rpc_run_agent(agent: &str, count: usize) -> Result<(), String> {
let cmd = format!("run-agent {} {}", agent, count);
match send_rpc(&cmd) {

View file

@ -335,20 +335,6 @@ pub fn format_rename_targets(store: &Store, keys: &[String]) -> String {
out
}
/// Get split candidates sorted by size (largest first)
pub fn split_candidates(store: &Store) -> Vec<String> {
let mut candidates: Vec<(&str, usize)> = store.nodes.iter()
.filter(|(key, node)| {
!key.starts_with('_')
&& !node.deleted
&& matches!(node.node_type, crate::store::NodeType::Semantic)
})
.map(|(k, n)| (k.as_str(), n.content.len()))
.collect();
candidates.sort_by(|a, b| b.1.cmp(&a.1));
candidates.into_iter().map(|(k, _)| k.to_string()).collect()
}
/// Format a single node for split-plan prompt (phase 1)
pub fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String {
let communities = graph.communities();
@ -393,32 +379,6 @@ pub fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String
out
}
/// Build split-plan prompt for a single node (phase 1).
/// Uses the split.agent template with placeholders resolved for the given key.
pub fn split_plan_prompt(store: &Store, key: &str) -> Result<String, String> {
let def = super::defs::get_def("split")
.ok_or_else(|| "no split.agent file".to_string())?;
let graph = store.build_graph();
// Override the query — we have a specific key to split
let keys = vec![key.to_string()];
let template = def.steps.first().map(|s| &s.prompt).ok_or_else(|| "split.agent has no steps".to_string())?;
let (prompt, _) = super::defs::resolve_placeholders(template, store, &graph, &keys, 1);
Ok(prompt)
}
/// Build split-extract prompt for one child (phase 2)
pub fn split_extract_prompt(store: &Store, parent_key: &str, child_key: &str, child_desc: &str, child_sections: &str) -> Result<String, String> {
let parent_content = store.nodes.get(parent_key)
.map(|n| n.content.as_str())
.ok_or_else(|| format!("No node '{}'", parent_key))?;
load_prompt("split-extract", &[
("{{CHILD_KEY}}", child_key),
("{{CHILD_DESC}}", child_desc),
("{{CHILD_SECTIONS}}", child_sections),
("{{PARENT_CONTENT}}", parent_content),
])
}
/// Show consolidation batch status or generate an agent prompt.
pub fn consolidation_batch(store: &Store, count: usize, auto: bool) -> Result<(), String> {
if auto {

View file

@ -120,12 +120,6 @@ pub fn all_definitions() -> Vec<ToolDef> {
defs
}
/// Return only memory tool definitions (no filesystem access).
/// Used by subconscious agents which should not write files.
pub fn memory_definitions() -> Vec<ToolDef> {
memory::definitions()
}
/// Return memory + journal tool definitions.
/// Used by the journal agent only.
pub fn memory_and_journal_definitions() -> Vec<ToolDef> {

View file

@ -57,16 +57,3 @@ pub fn jsonl_append<T: Serialize>(path: &Path, item: &T) -> Result<(), String> {
.map_err(|e| format!("write {}: {}", path.display(), e))
}
/// Parse a timestamp string to unix epoch seconds.
/// Handles: "2026-03-05T19:56:00", "2026-03-05T19:56", "2026-03-05 19:56:00", "2026-03-05 19:56"
pub fn parse_timestamp_to_epoch(ts: &str) -> Option<i64> {
use chrono::{Local, NaiveDateTime, TimeZone};
let formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M"];
for fmt in &formats {
if let Ok(ndt) = NaiveDateTime::parse_from_str(ts, fmt)
&& let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
return Some(dt.timestamp());
}
}
None
}