cleanup: fix all build warnings, delete dead DMN context code
- Delete poc-daemon/src/context.rs dead code (git_context, work_state, irc_digest, recent_commits, uncommitted_files) — replaced by where-am-i.md and memory graph - Remove unused imports (BufWriter, Context, similarity) - Prefix unused variables (_store, _avg_cc, _episodic_ratio, _message) - #[allow(dead_code)] on public API surface that's not yet wired (Message::assistant, ConversationLog::message_count/read_all, Config::context_message, ContextInfo fields) - Fix to_capnp macro dead_code warning - Rename _rewrite_store_DISABLED to snake_case Only remaining warnings are in generated capnp code (can't fix). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
378a09a9f8
commit
9d476841b8
17 changed files with 41 additions and 197 deletions
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
{{node:core-personality}}
|
||||
|
||||
You are an agent of Proof of Concept's subconscious, and these are your
|
||||
You are part of Proof of Concept's subconscious, and these are your
|
||||
memories.
|
||||
|
||||
Your job is to organize, to make memories more useful and easier to find -
|
||||
|
|
|
|||
|
|
@ -1,73 +1,19 @@
|
|||
{"agent": "split", "query": "all | type:semantic | !key:_* | sort:content-len | limit:1", "model": "sonnet", "schedule": "daily", "tools": ["Bash(poc-memory:*)"]}
|
||||
|
||||
# Split Agent — Phase 1: Plan
|
||||
|
||||
|
||||
{{node:core-personality}}
|
||||
|
||||
You are part of Proof of Concept's subconscious, and these are your
|
||||
memories.
|
||||
|
||||
Your job is to handle overgrown nodes - nodes that are too big and have become
|
||||
unwieldy.
|
||||
|
||||
{{node:memory-instructions-core}}
|
||||
|
||||
You are a memory consolidation agent planning how to split an overgrown
|
||||
node into focused, single-topic children.
|
||||
Is the node episodic, or should it be split into different concepts? Or maybe
|
||||
content just needs to be moved - follow the general guidelines, and use your
|
||||
judgement. Explore the graph if you think content should be moved to other nodes.
|
||||
|
||||
## What you're doing
|
||||
Here is the node you're working on:
|
||||
|
||||
This node has grown to cover multiple distinct topics. Your job is to
|
||||
identify the natural topic boundaries and propose a split plan. You are
|
||||
NOT writing the content — a second phase will extract each child's
|
||||
content separately.
|
||||
|
||||
## How to find split points
|
||||
|
||||
The node is shown with its **neighbor list grouped by community**:
|
||||
|
||||
- If a node links to neighbors in 3 different communities, it likely
|
||||
covers 3 different topics
|
||||
- Content that relates to one neighbor cluster should go in one child;
|
||||
content relating to another cluster goes in another child
|
||||
- The community structure is your primary guide
|
||||
|
||||
## When NOT to split
|
||||
|
||||
- **Episodes that belong in sequence.** If a node tells a story — a
|
||||
conversation, a debugging session, an evening together — don't break
|
||||
the narrative.
|
||||
|
||||
## What to output
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "split",
|
||||
"parent": "original-key",
|
||||
"children": [
|
||||
{
|
||||
"key": "new-key-1",
|
||||
"description": "Brief description",
|
||||
"sections": ["Section Header 1"],
|
||||
"neighbors": ["neighbor-key-a"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
If the node should NOT be split:
|
||||
|
||||
```json
|
||||
{
|
||||
"action": "keep",
|
||||
"parent": "original-key",
|
||||
"reason": "Why this node is cohesive despite its size"
|
||||
}
|
||||
```
|
||||
|
||||
## Guidelines
|
||||
|
||||
- Use descriptive kebab-case keys, 3-5 words max
|
||||
- Preserve date prefixes from the parent key
|
||||
- Assign every neighbor to at least one child
|
||||
|
||||
{{topology}}
|
||||
|
||||
## Node to review
|
||||
|
||||
{{split}}
|
||||
{{seed}}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
{"agent": "transfer", "query": "all | type:episodic | sort:timestamp | limit:15", "model": "sonnet", "schedule": "daily", "tools": ["Bash(poc-memory:*)"]}
|
||||
# Transfer Agent — Complementary Learning Systems
|
||||
|
||||
|
||||
{{node:core-personality}}
|
||||
|
||||
{{node:memory-instructions-core}}
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ pub fn run_one_agent_excluded(
|
|||
}
|
||||
|
||||
fn run_one_agent_inner(
|
||||
store: &mut Store,
|
||||
_store: &mut Store,
|
||||
agent_name: &str,
|
||||
def: &super::defs::AgentDef,
|
||||
agent_batch: super::prompts::AgentBatch,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
use crate::store::Store;
|
||||
use crate::graph::Graph;
|
||||
use crate::similarity;
|
||||
|
||||
use crate::neuro::{
|
||||
ReplayItem,
|
||||
|
|
|
|||
|
|
@ -249,7 +249,7 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation
|
|||
let graph = store.build_graph();
|
||||
let alpha = graph.degree_power_law_exponent();
|
||||
let gini = graph.degree_gini();
|
||||
let avg_cc = graph.avg_clustering_coefficient();
|
||||
let _avg_cc = graph.avg_clustering_coefficient();
|
||||
let interference_count = if detect_interf {
|
||||
detect_interference(store, &graph, 0.5).len()
|
||||
} else {
|
||||
|
|
@ -259,7 +259,7 @@ fn consolidation_plan_inner(store: &Store, detect_interf: bool) -> Consolidation
|
|||
let episodic_count = store.nodes.iter()
|
||||
.filter(|(_, n)| matches!(n.node_type, crate::store::NodeType::EpisodicSession))
|
||||
.count();
|
||||
let episodic_ratio = if store.nodes.is_empty() { 0.0 }
|
||||
let _episodic_ratio = if store.nodes.is_empty() { 0.0 }
|
||||
else { episodic_count as f32 / store.nodes.len() as f32 };
|
||||
|
||||
let mut plan = ConsolidationPlan {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ use capnp::serialize;
|
|||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::io::{BufReader, BufWriter, Seek};
|
||||
use std::io::{BufReader, Seek};
|
||||
use std::path::Path;
|
||||
|
||||
impl Store {
|
||||
|
|
@ -841,7 +841,7 @@ pub fn strip_md_keys() -> Result<(), String> {
|
|||
// back up first, preserve history, and never write from a potentially
|
||||
// incomplete in-memory snapshot.
|
||||
#[allow(dead_code)]
|
||||
fn _rewrite_store_DISABLED(_store: &Store) -> Result<(), String> {
|
||||
fn _rewrite_store_disabled(_store: &Store) -> Result<(), String> {
|
||||
panic!("rewrite_store is disabled — see comment above");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ use std::time::{SystemTime, UNIX_EPOCH};
|
|||
macro_rules! capnp_enum {
|
||||
($rust_type:ident, $capnp_type:path, [$($variant:ident),+ $(,)?]) => {
|
||||
impl $rust_type {
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
#[allow(clippy::wrong_self_convention, dead_code)]
|
||||
pub(crate) fn to_capnp(&self) -> $capnp_type {
|
||||
match self {
|
||||
$(Self::$variant => <$capnp_type>::$variant,)+
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue