Reduce pub visibility: hippocampus, subconscious internals

hippocampus: cursor navigation, transcript parsing, similarity
functions to pub(crate). counters::open() made private.

subconscious: all format_* prompts helpers to pub(super),
load_defs and keys_to_replay_items made private,
consolidate_full_with_progress made private.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 17:29:12 -04:00
parent 9737641c86
commit 1f873140ae
7 changed files with 20 additions and 20 deletions

View file

@ -18,7 +18,7 @@ fn db_path() -> PathBuf {
}
/// Open (or create) the counters database.
pub fn open() -> Result<Database, String> {
fn open() -> Result<Database, String> {
Database::create(db_path()).map_err(|e| format!("open counters db: {}", e))
}

View file

@ -43,7 +43,7 @@ pub fn clear() -> Result<(), String> {
/// Temporal neighbors: nodes of the same type, sorted by timestamp.
/// Returns (prev, next) keys relative to the given node.
pub fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
pub(crate) fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
let Some(node) = store.nodes.get(key) else { return (None, None) };
let node_type = node.node_type;
@ -62,7 +62,7 @@ pub fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<S
/// Digest hierarchy: find the parent digest for a node.
/// Journal → daily, daily → weekly, weekly → monthly.
pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
pub(crate) fn digest_parent(store: &Store, key: &str) -> Option<String> {
let node = store.nodes.get(key)?;
let parent_type = match node.node_type {
@ -112,7 +112,7 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
/// Digest children: find nodes that feed into this digest.
/// Monthly → weeklies, weekly → dailies, daily → journal entries.
pub fn digest_children(store: &Store, key: &str) -> Vec<String> {
pub(crate) fn digest_children(store: &Store, key: &str) -> Vec<String> {
let Some(node) = store.nodes.get(key) else { return vec![] };
let child_type = match node.node_type {
@ -157,7 +157,7 @@ pub fn digest_children(store: &Store, key: &str) -> Vec<String> {
}
/// Graph neighbors sorted by edge strength.
pub fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
pub(crate) fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
let mut neighbors: Vec<(String, f32)> = Vec::new();
for r in &store.relations {
if r.deleted { continue; }

View file

@ -14,7 +14,7 @@ use std::collections::HashMap;
/// with a reversed-suffix trie: single pass from the end of the word
/// matches the longest applicable suffix in O(suffix_len) instead of
/// O(n_rules).
pub fn stem(word: &str) -> String {
pub(crate) fn stem(word: &str) -> String {
let mut w = word.to_lowercase();
if w.len() <= 3 { return w; }
@ -48,7 +48,7 @@ fn strip_suffix_inplace(word: &mut String, suffix: &str, replacement: &str) {
}
/// Tokenize and stem a text into a term frequency map
pub fn term_frequencies(text: &str) -> HashMap<String, u32> {
pub(crate) fn term_frequencies(text: &str) -> HashMap<String, u32> {
let mut tf = HashMap::new();
for word in text.split(|c: char| !c.is_alphanumeric()) {
if word.len() > 2 {

View file

@ -103,7 +103,7 @@ impl<'a> Iterator for JsonlBackwardIter<'a> {
/// Scans backward for a user-type message whose content starts with
/// "This session is being continued". Returns the byte offset of the
/// JSON object's opening brace.
pub fn find_last_compaction(data: &[u8]) -> Option<usize> {
pub(crate) fn find_last_compaction(data: &[u8]) -> Option<usize> {
let marker = b"This session is being continued";
for obj_bytes in JsonlBackwardIter::new(data) {
@ -135,7 +135,7 @@ pub fn find_last_compaction(data: &[u8]) -> Option<usize> {
/// Find the byte offset of the last compaction in a transcript file.
/// Returns None if the file can't be opened or has no compaction.
pub fn find_last_compaction_in_file(path: &str) -> Option<u64> {
pub(crate) fn find_last_compaction_in_file(path: &str) -> Option<u64> {
if path.is_empty() { return None; }
let file = fs::File::open(path).ok()?;
@ -147,7 +147,7 @@ pub fn find_last_compaction_in_file(path: &str) -> Option<u64> {
}
/// Mmap a transcript file. Returns (Mmap, File) to keep both alive.
pub fn mmap_transcript(path: &str) -> Option<(Mmap, fs::File)> {
pub(crate) fn mmap_transcript(path: &str) -> Option<(Mmap, fs::File)> {
let file = fs::File::open(path).ok()?;
let meta = file.metadata().ok()?;
if meta.len() == 0 { return None; }

View file

@ -25,7 +25,7 @@ pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
consolidate_full_with_progress(store, &|_| {})
}
pub fn consolidate_full_with_progress(
fn consolidate_full_with_progress(
store: &mut Store,
on_progress: &dyn Fn(&str),
) -> Result<(), String> {

View file

@ -163,7 +163,7 @@ pub fn agents_dir() -> PathBuf {
}
/// Load all agent definitions.
pub fn load_defs() -> Vec<AgentDef> {
fn load_defs() -> Vec<AgentDef> {
let dir = agents_dir();
let Ok(entries) = std::fs::read_dir(&dir) else { return Vec::new() };
@ -814,7 +814,7 @@ pub fn run_agent(
}
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &Graph,

View file

@ -23,7 +23,7 @@ pub struct AgentBatch {
pub node_keys: Vec<String>,
}
pub fn format_topology_header(graph: &Graph) -> String {
pub(super) fn format_topology_header(graph: &Graph) -> String {
let sigma = graph.small_world_sigma();
let alpha = graph.degree_power_law_exponent();
let gini = graph.degree_gini();
@ -66,7 +66,7 @@ pub fn format_topology_header(graph: &Graph) -> String {
n, e, graph.community_count(), sigma, alpha, gini, avg_cc, hub_list)
}
pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> String {
pub(super) fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph) -> String {
let hub_thresh = graph.hub_threshold();
let mut out = String::new();
for item in items {
@ -139,7 +139,7 @@ pub fn format_nodes_section(store: &Store, items: &[ReplayItem], graph: &Graph)
out
}
pub fn format_health_section(store: &Store, graph: &Graph) -> String {
pub(super) fn format_health_section(store: &Store, graph: &Graph) -> String {
use crate::graph;
let health = graph::health_report(graph, store);
@ -195,7 +195,7 @@ pub fn format_health_section(store: &Store, graph: &Graph) -> String {
out
}
pub fn format_pairs_section(
pub(super) fn format_pairs_section(
pairs: &[(String, String, f32)],
store: &Store,
graph: &Graph,
@ -230,7 +230,7 @@ pub fn format_pairs_section(
out
}
pub fn format_rename_candidates(store: &Store, count: usize) -> (Vec<String>, String) {
pub(super) fn format_rename_candidates(store: &Store, count: usize) -> (Vec<String>, String) {
let mut candidates: Vec<(&str, &crate::store::Node)> = store.nodes.iter()
.filter(|(key, node)| {
if key.starts_with("_facts-") { return true; }
@ -293,7 +293,7 @@ pub fn format_rename_candidates(store: &Store, count: usize) -> (Vec<String>, St
}
/// Format specific target keys as rename candidates (for --target mode)
pub fn format_rename_targets(store: &Store, keys: &[String]) -> String {
pub(super) fn format_rename_targets(store: &Store, keys: &[String]) -> String {
let mut out = String::new();
out.push_str(&format!("## Nodes to rename ({} targets)\n\n", keys.len()));
@ -325,7 +325,7 @@ pub fn format_rename_targets(store: &Store, keys: &[String]) -> String {
}
/// Format a single node for split-plan prompt (phase 1)
pub fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String {
pub(super) fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) -> String {
let communities = graph.communities();
let node = match store.nodes.get(key) {
Some(n) => n,