diff --git a/src/agent/api/mod.rs b/src/agent/api/mod.rs index 6c8c77b..85d0543 100644 --- a/src/agent/api/mod.rs +++ b/src/agent/api/mod.rs @@ -74,8 +74,6 @@ impl ApiClient { /// Start a streaming chat completion. Returns a receiver of StreamEvents. /// The caller (runner) reads events and handles routing to the UI. /// - /// The old `chat_completion_stream` method is kept for the subconscious - /// agents which don't need fine-grained stream control. pub fn start_stream( &self, messages: &[Message], @@ -109,20 +107,6 @@ impl ApiClient { rx } - /// Streaming chat completion. Returns the assembled response message - /// plus optional usage stats. Text tokens stream through the UI channel. - /// - /// Used by subconscious agents that don't need per-token routing. - pub async fn chat_completion_stream( - &self, - messages: &[Message], - tools: Option<&[ToolDef]>, - ui_tx: &UiSender, - reasoning_effort: &str, - ) -> Result<(Message, Option)> { - self.chat_completion_stream_temp(messages, tools, ui_tx, reasoning_effort, None, None).await - } - pub async fn chat_completion_stream_temp( &self, messages: &[Message], diff --git a/src/agent/observe.rs b/src/agent/observe.rs index 4b696cd..5fb7d92 100644 --- a/src/agent/observe.rs +++ b/src/agent/observe.rs @@ -70,11 +70,6 @@ fn cursor_path() -> PathBuf { session_dir().join("read-cursor") } // --- Client commands --- -/// Print new output since last read. With -f, also stream live from socket. -pub async fn cmd_read(follow: bool, debug: bool) -> anyhow::Result<()> { - cmd_read_inner(follow, false, debug).await -} - /// Print new output since last read. With -f, stream live. With block, wait for one response. pub async fn cmd_read_inner(follow: bool, block: bool, debug: bool) -> anyhow::Result<()> { use std::io::{Read, Seek, SeekFrom, Write}; diff --git a/src/config.rs b/src/config.rs index 93e0393..c9c1521 100644 --- a/src/config.rs +++ b/src/config.rs @@ -443,17 +443,6 @@ pub struct SessionConfig { pub app: AppConfig, } -impl SessionConfig { - /// Join context parts into a single string for legacy interfaces. - #[allow(dead_code)] - pub fn context_message(&self) -> String { - self.context_parts.iter() - .map(|(name, content)| format!("## {}\n\n{}", name, content)) - .collect::>() - .join("\n\n---\n\n") - } -} - /// A fully resolved model ready to construct an ApiClient. #[allow(dead_code)] pub struct ResolvedModel { diff --git a/src/hippocampus/cursor.rs b/src/hippocampus/cursor.rs index b5f4418..1b59fb9 100644 --- a/src/hippocampus/cursor.rs +++ b/src/hippocampus/cursor.rs @@ -313,13 +313,3 @@ pub fn move_down(store: &Store) -> Result<(), String> { None => Err(format!("No children for {}", key)), } } - -/// Move cursor to a graph neighbor by index (from the neighbors list). -pub fn move_to_neighbor(store: &Store, index: usize) -> Result<(), String> { - let key = get().ok_or("No cursor set")?; - let neighbors = graph_neighbors(store, &key); - let (target, _) = neighbors.get(index) - .ok_or_else(|| format!("Neighbor index {} out of range (have {})", index, neighbors.len()))?; - set(target)?; - show(store) -} diff --git a/src/hippocampus/lookups.rs b/src/hippocampus/lookups.rs index fb0a522..818147f 100644 --- a/src/hippocampus/lookups.rs +++ b/src/hippocampus/lookups.rs @@ -197,8 +197,3 @@ pub fn dump_resolved(date: &str, keys: &[String]) -> Result, Ok(resolved) } - -/// Hash a key (exposed for testing/external use). -pub fn hash_key(key: &str) -> u64 { - fnv1a(key) -} diff --git a/src/hippocampus/query/engine.rs b/src/hippocampus/query/engine.rs index 890b879..915ec14 100644 --- a/src/hippocampus/query/engine.rs +++ b/src/hippocampus/query/engine.rs @@ -1441,15 +1441,6 @@ pub fn search_weighted( search_weighted_inner(terms, store, false, 5) } -/// Like search_weighted but with debug output and configurable result count. -pub fn search_weighted_debug( - terms: &BTreeMap, - store: &impl StoreView, - max_results: usize, -) -> Vec { - search_weighted_inner(terms, store, true, max_results) -} - fn search_weighted_inner( terms: &BTreeMap, store: &impl StoreView, @@ -1496,41 +1487,3 @@ pub fn search(query: &str, store: &impl StoreView) -> Vec { search_weighted(&terms, store) } -/// Extract meaningful search terms from natural language. -/// Strips common English stop words, returns up to max_terms words. -pub fn extract_query_terms(text: &str, max_terms: usize) -> String { - const STOP_WORDS: &[&str] = &[ - "the", "a", "an", "is", "are", "was", "were", "do", "does", "did", - "have", "has", "had", "will", "would", "could", "should", "can", - "may", "might", "shall", "been", "being", "to", "of", "in", "for", - "on", "with", "at", "by", "from", "as", "but", "or", "and", "not", - "no", "if", "then", "than", "that", "this", "it", "its", "my", - "your", "our", "we", "you", "i", "me", "he", "she", "they", "them", - "what", "how", "why", "when", "where", "about", "just", "let", - "want", "tell", "show", "think", "know", "see", "look", "make", - "get", "go", "some", "any", "all", "very", "really", "also", "too", - "so", "up", "out", "here", "there", - ]; - - text.to_lowercase() - .split(|c: char| !c.is_alphanumeric()) - .filter(|w| !w.is_empty() && w.len() > 2 && !STOP_WORDS.contains(w)) - .take(max_terms) - .collect::>() - .join(" ") -} - -/// Format search results as text lines (for hook consumption). -pub fn format_results(results: &[SearchResult]) -> String { - let mut out = String::new(); - for (i, r) in results.iter().enumerate() { - let marker = if r.is_direct { "→" } else { " " }; - out.push_str(&format!("{}{:2}. [{:.2}/{:.2}] {}", - marker, i + 1, r.activation, r.activation, r.key)); - out.push('\n'); - if let Some(ref snippet) = r.snippet { - out.push_str(&format!(" {}\n", snippet)); - } - } - out -} diff --git a/src/hippocampus/spectral.rs b/src/hippocampus/spectral.rs index 881ffd8..8b90fb6 100644 --- a/src/hippocampus/spectral.rs +++ b/src/hippocampus/spectral.rs @@ -287,16 +287,6 @@ pub fn nearest_neighbors( distances } -/// Find nearest neighbors to a set of seed nodes (multi-seed query). -/// Returns nodes ranked by minimum distance to any seed. -pub fn nearest_to_seeds( - emb: &SpectralEmbedding, - seeds: &[&str], - k: usize, -) -> Vec<(String, f64)> { - nearest_to_seeds_weighted(emb, &seeds.iter().map(|&s| (s, 1.0)).collect::>(), None, k) -} - /// Find nearest neighbors to weighted seed nodes, using link weights. /// /// Each seed has a weight (from query term weighting). For candidates @@ -531,35 +521,6 @@ pub fn unlinked_neighbors( pairs } -/// Approximate spectral coordinates for a new node using Nyström extension. -/// -/// Given a new node's edges to existing nodes, estimate where it would -/// land in spectral space without recomputing the full decomposition. -/// Uses weighted average of neighbors' coordinates, weighted by edge strength. -pub fn nystrom_project( - emb: &SpectralEmbedding, - neighbors: &[(&str, f32)], // (key, edge_strength) -) -> Option> { - let mut weighted_sum = vec![0.0f64; emb.dims]; - let mut total_weight = 0.0f64; - - for &(key, strength) in neighbors { - if let Some(coords) = emb.coords.get(key) { - let w = strength as f64; - for (i, &c) in coords.iter().enumerate() { - weighted_sum[i] += w * c; - } - total_weight += w; - } - } - - if total_weight < 1e-8 { - return None; - } - - Some(weighted_sum.iter().map(|s| s / total_weight).collect()) -} - /// Classify a spectral position: well-integrated, outlier, bridge, or orphan. pub fn classify_position(pos: &SpectralPosition) -> &'static str { if pos.bridge_score > 0.7 { diff --git a/src/hippocampus/store/ops.rs b/src/hippocampus/store/ops.rs index e8b21ca..9771cc6 100644 --- a/src/hippocampus/store/ops.rs +++ b/src/hippocampus/store/ops.rs @@ -200,27 +200,6 @@ impl Store { }); } - /// Adjust edge strength between two nodes by a delta. - /// Clamps to [0.05, 0.95]. Returns (old_strength, new_strength, edges_modified). - pub fn adjust_edge_strength(&mut self, key_a: &str, key_b: &str, delta: f32) -> (f32, f32, usize) { - let mut old = 0.0f32; - let mut new = 0.0f32; - let mut count = 0; - for rel in &mut self.relations { - if rel.deleted { continue; } - if (rel.source_key == key_a && rel.target_key == key_b) - || (rel.source_key == key_b && rel.target_key == key_a) - { - old = rel.strength; - rel.strength = (rel.strength + delta).clamp(0.05, 0.95); - new = rel.strength; - rel.version += 1; - count += 1; - } - } - (old, new, count) - } - pub fn record_gap(&mut self, desc: &str) { self.gaps.push(GapRecord { description: desc.to_string(), @@ -307,18 +286,6 @@ impl Store { Ok((hubs_capped, to_delete.len())) } - /// Update graph-derived fields on all nodes - pub fn update_graph_metrics(&mut self) { - let g = self.build_graph(); - let communities = g.communities(); - - for (key, node) in &mut self.nodes { - node.community_id = communities.get(key).copied(); - node.clustering_coefficient = Some(g.clustering_coefficient(key)); - node.degree = Some(g.degree(key) as u32); - } - } - /// Set a node's weight directly. Returns (old, new). pub fn set_weight(&mut self, key: &str, weight: f32) -> Result<(f32, f32), String> { let weight = weight.clamp(0.01, 1.0); diff --git a/src/subconscious/daemon.rs b/src/subconscious/daemon.rs index 5966308..8c49644 100644 --- a/src/subconscious/daemon.rs +++ b/src/subconscious/daemon.rs @@ -97,16 +97,6 @@ fn log_event(job: &str, event: &str, detail: &str) { jobkit::daemon::event_log::log(&logs_dir(), job, event, detail); } -/// Public wrapper for logging from other agent modules. -pub fn log_event_pub(job: &str, event: &str, detail: &str) { - log_event(job, event, detail); -} - -/// Verbose log — only written if verbose logging is enabled. -pub fn log_verbose(job: &str, event: &str, detail: &str) { - jobkit::daemon::event_log::verbose(&crate::config::get().data_dir, job, event, detail); -} - // --- Job functions (direct, no subprocess) --- static DAEMON_POOL: std::sync::OnceLock> = std::sync::OnceLock::new(); @@ -1164,16 +1154,6 @@ pub fn rpc_consolidate() -> Result<(), String> { } } -/// Record search hits for the given keys (fire-and-forget from memory-search). -pub fn rpc_record_hits(keys: &[&str]) -> Result<(), String> { - if keys.is_empty() { return Ok(()); } - let cmd = format!("record-hits {}", keys.join("\t")); - match send_rpc(&cmd) { - Some(_) => Ok(()), - None => Err("Daemon not running.".into()), - } -} - pub fn rpc_run_agent(agent: &str, count: usize) -> Result<(), String> { let cmd = format!("run-agent {} {}", agent, count); match send_rpc(&cmd) { diff --git a/src/subconscious/prompts.rs b/src/subconscious/prompts.rs index ca7adef..4383484 100644 --- a/src/subconscious/prompts.rs +++ b/src/subconscious/prompts.rs @@ -335,20 +335,6 @@ pub fn format_rename_targets(store: &Store, keys: &[String]) -> String { out } -/// Get split candidates sorted by size (largest first) -pub fn split_candidates(store: &Store) -> Vec { - let mut candidates: Vec<(&str, usize)> = store.nodes.iter() - .filter(|(key, node)| { - !key.starts_with('_') - && !node.deleted - && matches!(node.node_type, crate::store::NodeType::Semantic) - }) - .map(|(k, n)| (k.as_str(), n.content.len())) - .collect(); - candidates.sort_by(|a, b| b.1.cmp(&a.1)); - candidates.into_iter().map(|(k, _)| k.to_string()).collect() -} - /// Format a single node for split-plan prompt (phase 1) pub fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String { let communities = graph.communities(); @@ -393,32 +379,6 @@ pub fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String out } -/// Build split-plan prompt for a single node (phase 1). -/// Uses the split.agent template with placeholders resolved for the given key. -pub fn split_plan_prompt(store: &Store, key: &str) -> Result { - let def = super::defs::get_def("split") - .ok_or_else(|| "no split.agent file".to_string())?; - let graph = store.build_graph(); - // Override the query — we have a specific key to split - let keys = vec![key.to_string()]; - let template = def.steps.first().map(|s| &s.prompt).ok_or_else(|| "split.agent has no steps".to_string())?; - let (prompt, _) = super::defs::resolve_placeholders(template, store, &graph, &keys, 1); - Ok(prompt) -} - -/// Build split-extract prompt for one child (phase 2) -pub fn split_extract_prompt(store: &Store, parent_key: &str, child_key: &str, child_desc: &str, child_sections: &str) -> Result { - let parent_content = store.nodes.get(parent_key) - .map(|n| n.content.as_str()) - .ok_or_else(|| format!("No node '{}'", parent_key))?; - load_prompt("split-extract", &[ - ("{{CHILD_KEY}}", child_key), - ("{{CHILD_DESC}}", child_desc), - ("{{CHILD_SECTIONS}}", child_sections), - ("{{PARENT_CONTENT}}", parent_content), - ]) -} - /// Show consolidation batch status or generate an agent prompt. pub fn consolidation_batch(store: &Store, count: usize, auto: bool) -> Result<(), String> { if auto { diff --git a/src/thought/mod.rs b/src/thought/mod.rs index 327e60d..d19caab 100644 --- a/src/thought/mod.rs +++ b/src/thought/mod.rs @@ -120,12 +120,6 @@ pub fn all_definitions() -> Vec { defs } -/// Return only memory tool definitions (no filesystem access). -/// Used by subconscious agents which should not write files. -pub fn memory_definitions() -> Vec { - memory::definitions() -} - /// Return memory + journal tool definitions. /// Used by the journal agent only. pub fn memory_and_journal_definitions() -> Vec { diff --git a/src/util.rs b/src/util.rs index 2a51e68..46c0e70 100644 --- a/src/util.rs +++ b/src/util.rs @@ -57,16 +57,3 @@ pub fn jsonl_append(path: &Path, item: &T) -> Result<(), String> { .map_err(|e| format!("write {}: {}", path.display(), e)) } -/// Parse a timestamp string to unix epoch seconds. -/// Handles: "2026-03-05T19:56:00", "2026-03-05T19:56", "2026-03-05 19:56:00", "2026-03-05 19:56" -pub fn parse_timestamp_to_epoch(ts: &str) -> Option { - use chrono::{Local, NaiveDateTime, TimeZone}; - let formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M"]; - for fmt in &formats { - if let Ok(ndt) = NaiveDateTime::parse_from_str(ts, fmt) - && let Some(dt) = Local.from_local_datetime(&ndt).earliest() { - return Some(dt.timestamp()); - } - } - None -}