remove dead code: unused imports, functions, and fields

- Remove #![allow(dead_code)] from main.rs, fix all revealed warnings
- Delete unused schema_assimilation() from neuro/scoring.rs
- Delete duplicate memory_dir() wrapper from knowledge.rs
- Deduplicate load_prompt: knowledge.rs now calls neuro::load_prompt
- Remove unused timeout field from DigestLevel
- Remove unused imports (regex::Regex, Provenance, AnyView, Write)
- Mark OldEntry fields as #[allow(dead_code)] (needed for deserialization)

Co-Authored-By: ProofOfConcept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-03-08 20:51:56 -04:00
parent fc48ac7c7f
commit d5634c0034
9 changed files with 6 additions and 43 deletions

View file

@ -338,7 +338,7 @@ pub fn run_daemon() -> Result<(), String> {
// Load fact-mined keys too // Load fact-mined keys too
let fact_keys: HashSet<String> = { let fact_keys: HashSet<String> = {
use crate::store::{AnyView, StoreView}; use crate::store::StoreView;
let view = crate::store::AnyView::load().ok(); let view = crate::store::AnyView::load().ok();
view.map(|v| { view.map(|v| {
let mut keys = HashSet::new(); let mut keys = HashSet::new();

View file

@ -20,7 +20,6 @@ struct DigestLevel {
title: &'static str, title: &'static str,
period: &'static str, period: &'static str,
input_title: &'static str, input_title: &'static str,
timeout: u64,
child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files
/// Expand an arg into (canonical_label, dates covered). /// Expand an arg into (canonical_label, dates covered).
label_dates: fn(&str) -> Result<(String, Vec<String>), String>, label_dates: fn(&str) -> Result<(String, Vec<String>), String>,
@ -33,7 +32,6 @@ const DAILY: DigestLevel = DigestLevel {
title: "Daily", title: "Daily",
period: "Date", period: "Date",
input_title: "Journal entries", input_title: "Journal entries",
timeout: 300,
child_name: None, child_name: None,
label_dates: |date| Ok((date.to_string(), vec![date.to_string()])), label_dates: |date| Ok((date.to_string(), vec![date.to_string()])),
date_to_label: |date| Some(date.to_string()), date_to_label: |date| Some(date.to_string()),
@ -57,7 +55,6 @@ const WEEKLY: DigestLevel = DigestLevel {
title: "Weekly", title: "Weekly",
period: "Week", period: "Week",
input_title: "Daily digests", input_title: "Daily digests",
timeout: 300,
child_name: Some("daily"), child_name: Some("daily"),
label_dates: |arg| { label_dates: |arg| {
if !arg.contains('W') { if !arg.contains('W') {
@ -82,7 +79,6 @@ const MONTHLY: DigestLevel = DigestLevel {
title: "Monthly", title: "Monthly",
period: "Month", period: "Month",
input_title: "Weekly digests", input_title: "Weekly digests",
timeout: 600,
child_name: Some("weekly"), child_name: Some("weekly"),
label_dates: |arg| { label_dates: |arg| {
let (year, month) = if arg.len() <= 7 { let (year, month) = if arg.len() <= 7 {

View file

@ -11,7 +11,6 @@ use crate::llm::{call_sonnet, parse_json_response, semantic_keys};
use crate::neuro; use crate::neuro;
use crate::store::{self, Store, new_node, new_relation}; use crate::store::{self, Store, new_node, new_relation};
use regex::Regex;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::collections::HashSet; use std::collections::HashSet;
use std::fs; use std::fs;

View file

@ -21,15 +21,6 @@ use std::collections::{HashMap, HashSet};
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
fn memory_dir() -> PathBuf {
store::memory_dir()
}
fn prompts_dir() -> PathBuf {
let manifest = env!("CARGO_MANIFEST_DIR");
PathBuf::from(manifest).join("prompts")
}
fn projects_dir() -> PathBuf { fn projects_dir() -> PathBuf {
let home = std::env::var("HOME").unwrap_or_else(|_| ".".into()); let home = std::env::var("HOME").unwrap_or_else(|_| ".".into());
PathBuf::from(home).join(".claude/projects") PathBuf::from(home).join(".claude/projects")
@ -339,8 +330,7 @@ fn agent_provenance(agent: &str) -> store::Provenance {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
fn load_prompt(name: &str) -> Result<String, String> { fn load_prompt(name: &str) -> Result<String, String> {
let path = prompts_dir().join(format!("{}.md", name)); crate::neuro::load_prompt(name, &[])
fs::read_to_string(&path).map_err(|e| format!("load prompt {}: {}", name, e))
} }
fn get_graph_topology(store: &Store, graph: &Graph) -> String { fn get_graph_topology(store: &Store, graph: &Graph) -> String {

View file

@ -1,4 +1,4 @@
#![allow(dead_code)]
// poc-memory: graph-structured memory for AI assistants // poc-memory: graph-structured memory for AI assistants
// //
// Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet // Authors: ProofOfConcept <poc@bcachefs.org> and Kent Overstreet

View file

@ -40,6 +40,7 @@ struct OldStore {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(dead_code)] // fields needed for deserialization of old format
struct OldEntry { struct OldEntry {
weight: f64, weight: f64,
created: String, created: String,

View file

@ -163,29 +163,6 @@ pub fn detect_interference(
.collect() .collect()
} }
/// Schema assimilation scoring for a new node.
/// Returns how easily the node integrates into existing structure.
///
/// High fit (>0.5): auto-link, done
/// Medium fit (0.2-0.5): agent reviews, proposes links
/// Low fit (<0.2): deep examination needed — new schema seed, bridge, or noise?
pub fn schema_assimilation(store: &Store, key: &str) -> (f32, &'static str) {
let graph = store.build_graph();
let fit = graph.clustering_coefficient(key);
let recommendation = if fit > 0.5 {
"auto-integrate"
} else if fit > 0.2 {
"agent-review"
} else if graph.degree(key) > 0 {
"deep-examine-bridge"
} else {
"deep-examine-orphan"
};
(fit, recommendation)
}
/// Agent allocation from the control loop /// Agent allocation from the control loop
pub struct ConsolidationPlan { pub struct ConsolidationPlan {
pub replay_count: usize, pub replay_count: usize,

View file

@ -21,7 +21,7 @@
// key ~ 'journal.*' AND degree > 10 | count // key ~ 'journal.*' AND degree > 10 | count
// * | sort weight asc | limit 20 // * | sort weight asc | limit 20
use crate::store::{NodeType, Provenance, RelationType, Store}; use crate::store::{NodeType, RelationType, Store};
use crate::graph::Graph; use crate::graph::Graph;
use regex::Regex; use regex::Regex;
use std::collections::BTreeMap; use std::collections::BTreeMap;

View file

@ -16,7 +16,7 @@ use capnp::serialize;
use std::collections::HashMap; use std::collections::HashMap;
use std::fs; use std::fs;
use std::io::{BufReader, BufWriter, Seek, Write as IoWrite}; use std::io::{BufReader, BufWriter, Seek};
use std::path::Path; use std::path::Path;
impl Store { impl Store {