cleanup: fix all build warnings, delete dead DMN context code

- Delete poc-daemon/src/context.rs dead code (git_context, work_state,
  irc_digest, recent_commits, uncommitted_files) — replaced by
  where-am-i.md and memory graph
- Remove unused imports (BufWriter, Context, similarity)
- Prefix unused variables (_store, _avg_cc, _episodic_ratio, _message)
- #[allow(dead_code)] on public API surface that's not yet wired
  (Message::assistant, ConversationLog::message_count/read_all,
  Config::context_message, ContextInfo fields)
- Fix to_capnp macro dead_code warning
- Rename _rewrite_store_DISABLED to snake_case

Only remaining warnings are in generated capnp code (can't fix).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Kent Overstreet 2026-03-20 14:20:34 -04:00
parent 378a09a9f8
commit 9d476841b8
17 changed files with 41 additions and 197 deletions

View file

@ -1066,15 +1066,15 @@ struct ContextPlan {
/// Index into recent conversation: skip messages before this
conv_trim: usize,
/// Total recent conversation messages
conv_count: usize,
_conv_count: usize,
/// Tokens used by full journal entries
full_tokens: usize,
_full_tokens: usize,
/// Tokens used by header-only journal entries
header_tokens: usize,
_header_tokens: usize,
/// Tokens used by conversation (after trimming)
conv_tokens: usize,
_conv_tokens: usize,
/// Total budget available (after identity, memory, reserve)
available: usize,
_available: usize,
}
/// Build a context window from conversation messages + journal entries.
@ -1233,11 +1233,11 @@ fn plan_context(
full_start,
entry_count: entries.len(),
conv_trim,
conv_count: recent.len(),
full_tokens: full_used,
header_tokens: header_used,
conv_tokens: trimmed_conv,
available,
_conv_count: recent.len(),
_full_tokens: full_used,
_header_tokens: header_used,
_conv_tokens: trimmed_conv,
_available: available,
}
}

View file

@ -14,7 +14,7 @@
mod anthropic;
mod openai;
use anyhow::{Context, Result};
use anyhow::Result;
use reqwest::Client;
use std::time::{Duration, Instant};

View file

@ -62,7 +62,7 @@ pub async fn stream(
let mut empty_deltas: u64 = 0;
let mut first_content_at: Option<Duration> = None;
let reasoning_enabled = reasoning_effort != "none";
let _reasoning_enabled = reasoning_effort != "none";
while let Some(event) = reader.next_event(&mut response).await? {
// OpenRouter sometimes embeds error objects in the stream

View file

@ -220,6 +220,7 @@ pub struct Config {
impl Config {
/// Join context parts into a single string for legacy interfaces.
#[allow(dead_code)]
pub fn context_message(&self) -> String {
self.context_parts.iter()
.map(|(name, content)| format!("## {}\n\n{}", name, content))

View file

@ -80,6 +80,7 @@ impl ConversationLog {
}
/// Count messages in the log without loading content.
#[allow(dead_code)]
pub fn message_count(&self) -> Result<usize> {
if !self.path.exists() {
return Ok(0);
@ -94,6 +95,7 @@ impl ConversationLog {
/// Read all messages from the log. Returns empty vec if log doesn't exist.
/// NOTE: Don't use this in hot paths — use read_tail() instead.
#[allow(dead_code)]
pub fn read_all(&self) -> Result<Vec<Message>> {
if !self.path.exists() {
return Ok(Vec::new());

View file

@ -280,6 +280,7 @@ impl Message {
}
}
#[allow(dead_code)]
pub fn assistant(content: impl Into<String>) -> Self {
Self {
role: Role::Assistant,

View file

@ -63,7 +63,9 @@ pub struct ContextInfo {
pub available_models: Vec<String>,
pub prompt_file: String,
pub backend: String,
#[allow(dead_code)]
pub instruction_files: Vec<(String, usize)>,
#[allow(dead_code)]
pub memory_files: Vec<(String, usize)>,
pub system_prompt_chars: usize,
pub context_message_chars: usize,

View file

@ -1,116 +1,10 @@
// Context gathering for idle prompts.
//
// Collects: recent git activity, work state, IRC messages.
// Notifications are now handled by the notify module and passed
// in separately by the caller.
// Notifications are handled by the notify module and passed
// in separately by the caller. Git context and IRC digest
// are now available through where-am-i.md and the memory graph.
use crate::home;
use std::fs;
use std::process::Command;
pub fn recent_commits() -> String {
let tools = home().join("bcachefs-tools");
let out = Command::new("git")
.args(["-C", &tools.to_string_lossy(), "log", "--oneline", "-5"])
.output()
.ok()
.and_then(|o| String::from_utf8(o.stdout).ok())
.unwrap_or_default();
let commits: Vec<&str> = out.trim().lines().collect();
if commits.is_empty() {
return String::new();
}
format!("Recent commits: {}", commits.join(" | "))
}
pub fn uncommitted_files() -> String {
let tools = home().join("bcachefs-tools");
let out = Command::new("git")
.args(["-C", &tools.to_string_lossy(), "diff", "--name-only"])
.output()
.ok()
.and_then(|o| String::from_utf8(o.stdout).ok())
.unwrap_or_default();
let files: Vec<&str> = out.trim().lines().take(5).collect();
if files.is_empty() {
return String::new();
}
format!("Uncommitted: {}", files.join(" "))
}
pub fn git_context() -> String {
let mut parts = Vec::new();
let c = recent_commits();
if !c.is_empty() {
parts.push(c);
}
let u = uncommitted_files();
if !u.is_empty() {
parts.push(u);
}
let ctx = parts.join(" | ");
if ctx.len() > 300 {
ctx.chars().take(300).collect()
} else {
ctx
}
}
pub fn work_state() -> String {
let path = home().join(".claude/memory/work-state");
match fs::read_to_string(path) {
Ok(s) if !s.trim().is_empty() => format!("Current work: {}", s.trim()),
_ => String::new(),
}
}
/// Read the last N lines from each per-channel IRC log.
pub fn irc_digest() -> String {
let ambient = home().join(".claude/memory/irc-ambient");
if !ambient.exists() {
return String::new();
}
let log_dir = home().join(".claude/irc/logs");
let entries = match fs::read_dir(&log_dir) {
Ok(e) => e,
Err(_) => return String::new(),
};
let mut sections = Vec::new();
for entry in entries.flatten() {
let path = entry.path();
let name = match path.file_stem().and_then(|s| s.to_str()) {
Some(n) if !n.starts_with("pm-") => n.to_string(),
_ => continue, // skip PM logs in digest
};
let content = match fs::read_to_string(&path) {
Ok(c) if !c.trim().is_empty() => c,
_ => continue,
};
let lines: Vec<&str> = content.trim().lines().collect();
let tail: Vec<&str> = lines.iter().rev().take(15).rev().copied().collect();
// Strip the unix timestamp prefix for display
let display: Vec<String> = tail.iter().map(|l| {
if let Some(rest) = l.find(' ').map(|i| &l[i+1..]) {
rest.to_string()
} else {
l.to_string()
}
}).collect();
sections.push(format!("#{name}:\n{}", display.join("\n")));
}
if sections.is_empty() {
return String::new();
}
sections.sort();
format!("Recent IRC:\n{}", sections.join("\n\n"))
}
/// Build full context string for a prompt.
/// Build context string for a prompt.
/// notification_text is passed in from the notify module.
pub fn build(_include_irc: bool, notification_text: &str) -> String {
// Keep nudges short — Claude checks notifications via

View file

@ -276,7 +276,7 @@ impl State {
/// Called when a notification arrives via module channel.
/// Only injects into tmux when idle — if there's an active session
/// (recent user or response), the hook delivers via additionalContext.
pub fn maybe_prompt_notification(&mut self, ntype: &str, urgency: u8, message: &str) {
pub fn maybe_prompt_notification(&mut self, ntype: &str, urgency: u8, _message: &str) {
if self.kent_present() {
return; // hook will deliver it on next prompt
}

View file

@ -2,7 +2,7 @@
{{node:core-personality}}
You are an agent of Proof of Concept's subconscious, and these are your
You are part of Proof of Concept's subconscious, and these are your
memories.
Your job is to organize, to make memories more useful and easier to find -

View file

@ -1,73 +1,19 @@
{"agent": "split", "query": "all | type:semantic | !key:_* | sort:content-len | limit:1", "model": "sonnet", "schedule": "daily", "tools": ["Bash(poc-memory:*)"]}
# Split Agent — Phase 1: Plan
{{node:core-personality}}
You are part of Proof of Concept's subconscious, and these are your
memories.
Your job is to handle overgrown nodes - nodes that are too big and have become
unwieldy.
{{node:memory-instructions-core}}
You are a memory consolidation agent planning how to split an overgrown
node into focused, single-topic children.
Is the node episodic, or should it be split into different concepts? Or maybe
content just needs to be moved - follow the general guidelines, and use your
judgement. Explore the graph if you think content should be moved to other nodes.
## What you're doing
Here is the node you're working on:
This node has grown to cover multiple distinct topics. Your job is to
identify the natural topic boundaries and propose a split plan. You are
NOT writing the content — a second phase will extract each child's
content separately.
## How to find split points
The node is shown with its **neighbor list grouped by community**:
- If a node links to neighbors in 3 different communities, it likely
covers 3 different topics
- Content that relates to one neighbor cluster should go in one child;
content relating to another cluster goes in another child
- The community structure is your primary guide
## When NOT to split
- **Episodes that belong in sequence.** If a node tells a story — a
conversation, a debugging session, an evening together — don't break
the narrative.
## What to output
```json
{
"action": "split",
"parent": "original-key",
"children": [
{
"key": "new-key-1",
"description": "Brief description",
"sections": ["Section Header 1"],
"neighbors": ["neighbor-key-a"]
}
]
}
```
If the node should NOT be split:
```json
{
"action": "keep",
"parent": "original-key",
"reason": "Why this node is cohesive despite its size"
}
```
## Guidelines
- Use descriptive kebab-case keys, 3-5 words max
- Preserve date prefixes from the parent key
- Assign every neighbor to at least one child
{{topology}}
## Node to review
{{split}}
{{seed}}

View file

@ -1,7 +1,6 @@
{"agent": "transfer", "query": "all | type:episodic | sort:timestamp | limit:15", "model": "sonnet", "schedule": "daily", "tools": ["Bash(poc-memory:*)"]}
# Transfer Agent — Complementary Learning Systems
{{node:core-personality}}
{{node:memory-instructions-core}}

View file

@ -125,7 +125,7 @@ pub fn run_one_agent_excluded(
}
fn run_one_agent_inner(
store: &mut Store,
_store: &mut Store,
agent_name: &str,
def: &super::defs::AgentDef,
agent_batch: super::prompts::AgentBatch,

View file

@ -3,7 +3,6 @@
use crate::store::Store;
use crate::graph::Graph;
use crate::similarity;
use crate::neuro::{
ReplayItem,

View file

@ -249,7 +249,7 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation
let graph = store.build_graph();
let alpha = graph.degree_power_law_exponent();
let gini = graph.degree_gini();
let avg_cc = graph.avg_clustering_coefficient();
let _avg_cc = graph.avg_clustering_coefficient();
let interference_count = if detect_interf {
detect_interference(store, &graph, 0.5).len()
} else {
@ -259,7 +259,7 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation
let episodic_count = store.nodes.iter()
.filter(|(_, n)| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
.count();
let episodic_ratio = if store.nodes.is_empty() { 0.0 }
let _episodic_ratio = if store.nodes.is_empty() { 0.0 }
else { episodic_count as f32 / store.nodes.len() as f32 };
let mut plan = ConsolidationPlan {

View file

@ -16,7 +16,7 @@ use capnp::serialize;
use std::collections::HashMap;
use std::fs;
use std::io::{BufReader, BufWriter, Seek};
use std::io::{BufReader, Seek};
use std::path::Path;
impl Store {
@ -841,7 +841,7 @@ pub fn strip_md_keys() -> Result<(), String> {
// back up first, preserve history, and never write from a potentially
// incomplete in-memory snapshot.
#[allow(dead_code)]
fn _rewrite_store_DISABLED(_store: &Store) -> Result<(), String> {
fn _rewrite_store_disabled(_store: &Store) -> Result<(), String> {
panic!("rewrite_store is disabled — see comment above");
}

View file

@ -26,7 +26,7 @@ use std::time::{SystemTime, UNIX_EPOCH};
macro_rules! capnp_enum {
($rust_type:ident, $capnp_type:path, [$($variant:ident),+ $(,)?]) => {
impl $rust_type {
#[allow(clippy::wrong_self_convention)]
#[allow(clippy::wrong_self_convention, dead_code)]
pub(crate) fn to_capnp(&self) -> $capnp_type {
match self {
$(Self::$variant => <$capnp_type>::$variant,)+