Reduce pub visibility: hippocampus, subconscious internals
hippocampus: cursor navigation, transcript parsing, similarity functions to pub(crate). counters::open() made private. subconscious: all format_* prompts helpers to pub(super), load_defs and keys_to_replay_items made private, consolidate_full_with_progress made private. Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
9737641c86
commit
1f873140ae
7 changed files with 20 additions and 20 deletions
|
|
@ -18,7 +18,7 @@ fn db_path() -> PathBuf {
|
|||
}
|
||||
|
||||
/// Open (or create) the counters database.
|
||||
pub fn open() -> Result<Database, String> {
|
||||
fn open() -> Result<Database, String> {
|
||||
Database::create(db_path()).map_err(|e| format!("open counters db: {}", e))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ pub fn clear() -> Result<(), String> {
|
|||
|
||||
/// Temporal neighbors: nodes of the same type, sorted by timestamp.
|
||||
/// Returns (prev, next) keys relative to the given node.
|
||||
pub fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
|
||||
pub(crate) fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
|
||||
let Some(node) = store.nodes.get(key) else { return (None, None) };
|
||||
let node_type = node.node_type;
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ pub fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<S
|
|||
|
||||
/// Digest hierarchy: find the parent digest for a node.
|
||||
/// Journal → daily, daily → weekly, weekly → monthly.
|
||||
pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
|
||||
pub(crate) fn digest_parent(store: &Store, key: &str) -> Option<String> {
|
||||
let node = store.nodes.get(key)?;
|
||||
|
||||
let parent_type = match node.node_type {
|
||||
|
|
@ -112,7 +112,7 @@ pub fn digest_parent(store: &Store, key: &str) -> Option<String> {
|
|||
|
||||
/// Digest children: find nodes that feed into this digest.
|
||||
/// Monthly → weeklies, weekly → dailies, daily → journal entries.
|
||||
pub fn digest_children(store: &Store, key: &str) -> Vec<String> {
|
||||
pub(crate) fn digest_children(store: &Store, key: &str) -> Vec<String> {
|
||||
let Some(node) = store.nodes.get(key) else { return vec![] };
|
||||
|
||||
let child_type = match node.node_type {
|
||||
|
|
@ -157,7 +157,7 @@ pub fn digest_children(store: &Store, key: &str) -> Vec<String> {
|
|||
}
|
||||
|
||||
/// Graph neighbors sorted by edge strength.
|
||||
pub fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
|
||||
pub(crate) fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
|
||||
let mut neighbors: Vec<(String, f32)> = Vec::new();
|
||||
for r in &store.relations {
|
||||
if r.deleted { continue; }
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use std::collections::HashMap;
|
|||
/// with a reversed-suffix trie: single pass from the end of the word
|
||||
/// matches the longest applicable suffix in O(suffix_len) instead of
|
||||
/// O(n_rules).
|
||||
pub fn stem(word: &str) -> String {
|
||||
pub(crate) fn stem(word: &str) -> String {
|
||||
let mut w = word.to_lowercase();
|
||||
if w.len() <= 3 { return w; }
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ fn strip_suffix_inplace(word: &mut String, suffix: &str, replacement: &str) {
|
|||
}
|
||||
|
||||
/// Tokenize and stem a text into a term frequency map
|
||||
pub fn term_frequencies(text: &str) -> HashMap<String, u32> {
|
||||
pub(crate) fn term_frequencies(text: &str) -> HashMap<String, u32> {
|
||||
let mut tf = HashMap::new();
|
||||
for word in text.split(|c: char| !c.is_alphanumeric()) {
|
||||
if word.len() > 2 {
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ impl<'a> Iterator for JsonlBackwardIter<'a> {
|
|||
/// Scans backward for a user-type message whose content starts with
|
||||
/// "This session is being continued". Returns the byte offset of the
|
||||
/// JSON object's opening brace.
|
||||
pub fn find_last_compaction(data: &[u8]) -> Option<usize> {
|
||||
pub(crate) fn find_last_compaction(data: &[u8]) -> Option<usize> {
|
||||
let marker = b"This session is being continued";
|
||||
|
||||
for obj_bytes in JsonlBackwardIter::new(data) {
|
||||
|
|
@ -135,7 +135,7 @@ pub fn find_last_compaction(data: &[u8]) -> Option<usize> {
|
|||
|
||||
/// Find the byte offset of the last compaction in a transcript file.
|
||||
/// Returns None if the file can't be opened or has no compaction.
|
||||
pub fn find_last_compaction_in_file(path: &str) -> Option<u64> {
|
||||
pub(crate) fn find_last_compaction_in_file(path: &str) -> Option<u64> {
|
||||
if path.is_empty() { return None; }
|
||||
|
||||
let file = fs::File::open(path).ok()?;
|
||||
|
|
@ -147,7 +147,7 @@ pub fn find_last_compaction_in_file(path: &str) -> Option<u64> {
|
|||
}
|
||||
|
||||
/// Mmap a transcript file. Returns (Mmap, File) to keep both alive.
|
||||
pub fn mmap_transcript(path: &str) -> Option<(Mmap, fs::File)> {
|
||||
pub(crate) fn mmap_transcript(path: &str) -> Option<(Mmap, fs::File)> {
|
||||
let file = fs::File::open(path).ok()?;
|
||||
let meta = file.metadata().ok()?;
|
||||
if meta.len() == 0 { return None; }
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue