forked from kent/consciousness
Compare commits
24 commits
b5aa5412e1
...
b23f6484e2
| Author | SHA1 | Date | |
|---|---|---|---|
| b23f6484e2 | |||
|
|
d5aad5c1a4 | ||
|
|
93fcc32a00 | ||
|
|
919749dc67 | ||
|
|
31aa0f3125 | ||
|
|
b77f07fef7 | ||
|
|
f00532bdb7 | ||
|
|
ef80398466 | ||
|
|
125927e2f1 | ||
|
|
b646221787 | ||
|
|
bc73ccc1da | ||
|
|
090c8e4d35 | ||
|
|
f408bb5d86 | ||
|
|
314ae9c4cb | ||
|
|
e9e7458013 | ||
|
|
d2dbdedc8f | ||
|
|
271e09adcc | ||
|
|
aad227e487 | ||
|
|
bc991c3521 | ||
|
|
1c0967c4ec | ||
|
|
28e564aeb2 | ||
|
|
c300013ce5 | ||
|
|
9fc27e7372 | ||
|
|
ed896d4e83 |
39 changed files with 1122 additions and 2724 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
|
@ -555,7 +555,6 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"skillratings",
|
||||
"tokenizers",
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
|
|
@ -2802,12 +2801,6 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
|
||||
|
||||
[[package]]
|
||||
name = "skillratings"
|
||||
version = "0.28.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a6ee7559737c1adcd9184f168a04dc360c84878907c3ecc5c33c2320be1d47a"
|
||||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.4.12"
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ capnp = "0.25"
|
|||
capnp-rpc = "0.25"
|
||||
|
||||
tokenizers = "0.21"
|
||||
skillratings = "0.28"
|
||||
|
||||
http = "1"
|
||||
hyper = { version = "1", features = ["client", "http1"] }
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ impl State {
|
|||
while i > 0 && !remaining.is_char_boundary(i) { i -= 1; }
|
||||
// To avoid splitting mid-word, see if there was a space recently
|
||||
let mut j = i;
|
||||
while j > 0 && j > i-10 && remaining.as_bytes()[j] != b' ' { j -= 1; }
|
||||
while j > 1 && j > i-10 && remaining.as_bytes()[j] != b' ' { j -= 1; }
|
||||
if remaining.as_bytes()[j] == b' ' { j }
|
||||
else if i == 0 { max_msg } else { i }
|
||||
};
|
||||
|
|
|
|||
94
research/query-language-unification.md
Normal file
94
research/query-language-unification.md
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
# Query Language Unification Plan
|
||||
|
||||
**Status: DONE** (2026-04-11)
|
||||
|
||||
## Problem (was)
|
||||
|
||||
Two query parsers that didn't agree on syntax:
|
||||
|
||||
1. **PEG parser** (`hippocampus/query/parser.rs`) — boolean logic, general
|
||||
comparisons, operator precedence, parentheses. Used by CLI and compact
|
||||
format path in `query()` tool.
|
||||
|
||||
2. **Pipeline parser** (`hippocampus/query/engine.rs`) — domain-specific
|
||||
filters (type, age, provenance), graph algorithms (spread, spectral).
|
||||
Used by full format path in `query()` tool.
|
||||
|
||||
`journal_tail` generates pipeline syntax but gets routed through the PEG
|
||||
parser on the compact path. Result: parse errors.
|
||||
|
||||
## Approach
|
||||
|
||||
Keep the PEG parser (has the harder-to-build structural foundation),
|
||||
extend it with the pipeline parser's domain features.
|
||||
|
||||
## Expression extensions (add to `expr` rule in parser.rs)
|
||||
|
||||
- `field:value` shorthand for `field = 'value'` (colon-separated equality)
|
||||
- `*` already works as `Expr::All`
|
||||
- `key ~ 'glob'` already works via match operator
|
||||
|
||||
## New stages (add to `stage` rule in parser.rs)
|
||||
|
||||
Domain filter stages from engine.rs:
|
||||
- `type:X` — filter by node type (episodic, daily, weekly, monthly, semantic)
|
||||
- `age:<7d` — duration comparison on timestamp
|
||||
- `key:GLOB` — glob match on key
|
||||
- `provenance:X` — provenance filter
|
||||
- `weight:>N` — weight comparison (may already work via general comparison)
|
||||
- `content-len:>N` — content size filter
|
||||
|
||||
Sort/limit syntax variants:
|
||||
- `sort:field` alongside existing `sort field`
|
||||
- `limit:N` alongside existing `limit N`
|
||||
|
||||
Graph algorithms:
|
||||
- `spread` — spreading activation
|
||||
- `spectral` — spectral nearest neighbors
|
||||
- `confluence` — multi-source reachability
|
||||
- `geodesic` — straightest spectral paths
|
||||
- `manifold` — extrapolation along seed direction
|
||||
|
||||
## What changes
|
||||
|
||||
1. `parser.rs` — add field:value shorthand to expr, add domain stages
|
||||
2. `engine.rs` — keep run_pipeline execution logic, have PEG parser emit
|
||||
compatible Stage types (or convert PEG AST to Stage at boundary)
|
||||
3. `query()` tool handler (memory.rs) — one parser path for all formats
|
||||
4. `journal_tail` (memory.rs) — generate unified syntax
|
||||
5. CLI `poc-memory query` — uses unified parser
|
||||
|
||||
## Migration path
|
||||
|
||||
1. Add field:value shorthand and type/age/key stages to PEG parser
|
||||
2. Route query() through PEG parser for all formats
|
||||
3. Migrate journal_tail and any other pipeline-syntax callers
|
||||
4. Remove the pipeline parser (or keep as internal execution layer)
|
||||
|
||||
## What was done
|
||||
|
||||
**Deleted from engine.rs (-153 lines):**
|
||||
- `Stage::parse()` and `Stage::parse_pipeline()` — redundant with PEG
|
||||
- `parse_cmp()`, `parse_duration_or_number()`, `parse_composite_sort()`,
|
||||
`parse_node_type()`, `parse_sort_field()` — helper functions for deleted parser
|
||||
|
||||
**Added to parser.rs (+120 lines):**
|
||||
- Pipeline syntax in PEG grammar (`type:X`, `age:<Nd`, `sort:field`, etc.)
|
||||
- `parse_stages()` — unified entry point returning `Vec<Stage>`
|
||||
- Grammar helper functions
|
||||
|
||||
**Net: +17 lines**
|
||||
|
||||
**Architecture now:**
|
||||
- parser.rs: PEG grammar handles ALL parsing (both syntaxes)
|
||||
- engine.rs: Pure execution — types and `run_query()`, no parsing
|
||||
|
||||
Result: `all | type:episodic | sort:timestamp | limit:5` works everywhere.
|
||||
Mixed syntax like `degree > 5 | type:semantic | sort degree` also works.
|
||||
|
||||
## What NOT to change (original note)
|
||||
|
||||
The run_pipeline execution logic stays — it's correct and well-tested.
|
||||
Only the parsing front-end unifies. The pipeline parser's Stage enum
|
||||
becomes the internal representation that both the PEG parser and any
|
||||
remaining direct callers produce.
|
||||
198
research/sparse-kernel-napkin-math.md
Normal file
198
research/sparse-kernel-napkin-math.md
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
# Sparse Kernel Compilation: Napkin Math for Qwen3.5-27B
|
||||
|
||||
## Architecture recap
|
||||
|
||||
| Parameter | Value |
|
||||
|-----------|-------|
|
||||
| Layers | 64 (48 linear attention, 16 full attention) |
|
||||
| Hidden dim (H) | 5,120 |
|
||||
| Query heads | 24 |
|
||||
| KV heads | 4 (GQA) |
|
||||
| Head dim | 256 |
|
||||
| FFN intermediate | 17,408 |
|
||||
| Full attention interval | every 4th layer |
|
||||
| Total params | ~27.5B |
|
||||
|
||||
**Key discovery**: Qwen3.5 already uses sparse attention — 48/64 layers use
|
||||
linear attention (O(N)), only 16 use full O(N^2) attention. The attention
|
||||
sparsity question is partially answered by the architecture itself. The
|
||||
remaining opportunity is **weight sparsity** in the projection and FFN matrices.
|
||||
|
||||
## Measured weight sparsity (from B200, all 11 shards)
|
||||
|
||||
| Component | Count | Total params | <0.001 | <0.005 | <0.01 | <0.02 |
|
||||
|-----------|-------|-------------|--------|--------|-------|-------|
|
||||
| down_proj | 65 | 5,793,382,400 | 7.4% | 35.8% | 64.3% | 92.8% |
|
||||
| gate_proj | 65 | 5,793,382,400 | 7.2% | 35.0% | 63.3% | 92.3% |
|
||||
| up_proj | 65 | 5,793,382,400 | 7.3% | 35.2% | 63.5% | 92.5% |
|
||||
| q_proj | 17 | 1,069,547,520 | 5.7% | 27.9% | 51.9% | 82.8% |
|
||||
| k_proj | 17 | 89,128,960 | 6.0% | 29.4% | 54.1% | 84.1% |
|
||||
| v_proj | 17 | 89,128,960 | 4.8% | 23.7% | 44.7% | 74.2% |
|
||||
| o_proj | 17 | 534,773,760 | 5.3% | 25.9% | 48.7% | 79.6% |
|
||||
| other | 605 | 6,070,652,272 | 5.4% | 26.4% | 49.6% | 80.9% |
|
||||
|
||||
**Key findings**:
|
||||
- FFN layers (gate/up/down) are remarkably sparse: ~64% of weights below 0.01
|
||||
- At threshold 0.02, FFN sparsity exceeds 92%
|
||||
- Attention projections are less sparse but still significant: 45-52% below 0.01
|
||||
- v_proj is the least sparse component (44.7% below 0.01)
|
||||
|
||||
## Per-layer parameter breakdown
|
||||
|
||||
- **QKV projection**: H × (Q_dim + K_dim + V_dim) ≈ 5120 × 13312 ≈ 68M
|
||||
- **Output projection**: ~26M
|
||||
- **FFN (gate + up + down)**: 5120 × 17408 × 3 ≈ 267M
|
||||
- **Total per layer**: ~361M
|
||||
- **64 layers**: ~23.1B (rest is embeddings, norms, etc.)
|
||||
|
||||
## Dense baseline: FLOPs per token
|
||||
|
||||
Each weight parameter contributes 2 FLOPs per token (multiply + accumulate).
|
||||
|
||||
- Per layer: ~722M FLOPs/token
|
||||
- 64 layers: **~46.2B FLOPs/token**
|
||||
|
||||
On a B200 (theoretical ~4.5 PFLOPS FP8, ~1.1 PFLOPS BF16):
|
||||
- BF16 throughput: 46.2B / 1.1e15 ≈ 0.042ms per token (compute-bound limit)
|
||||
- But inference is usually **memory-bandwidth-bound** for small batch sizes
|
||||
|
||||
B200 HBM bandwidth: ~8 TB/s. At BF16 (2 bytes/param):
|
||||
- Loading all weights once: 27.5B × 2 = 55GB → 55/8000 ≈ **6.9ms per token** (batch=1)
|
||||
- This is the real bottleneck. Compute is cheap; loading weights is expensive.
|
||||
|
||||
## What sparsity buys you
|
||||
|
||||
At **X% sparsity** (X% of weights are zero), you need to load (1-X)% of the weights:
|
||||
|
||||
| Sparsity | Params loaded | Time (batch=1) | Speedup |
|
||||
|----------|--------------|-----------------|---------|
|
||||
| 0% (dense) | 27.5B | 6.9ms | 1.0x |
|
||||
| 50% | 13.8B | 3.4ms | 2.0x |
|
||||
| 75% | 6.9B | 1.7ms | 4.0x |
|
||||
| 90% | 2.8B | 0.7ms | 10x |
|
||||
|
||||
**This is the key insight**: inference at small batch sizes is memory-bandwidth-bound.
|
||||
Sparse weights = fewer bytes to load = directly proportional speedup,
|
||||
**IF** the sparse kernel can avoid the gather/scatter overhead.
|
||||
|
||||
## The compilation problem
|
||||
|
||||
Dense GEMM is fast because:
|
||||
1. Weights are contiguous in memory → sequential reads
|
||||
2. Tiling fits perfectly in SRAM → high reuse
|
||||
3. Hardware tensor cores expect dense blocks
|
||||
|
||||
Naive sparse matmul kills this:
|
||||
- Irregular memory access → low bandwidth utilization
|
||||
- Poor SRAM tiling → cache thrashing
|
||||
- Tensor cores can't help
|
||||
|
||||
### The FlashAttention analogy
|
||||
|
||||
FlashAttention's insight: the N×N attention matrix doesn't fit in SRAM,
|
||||
but you can tile it so each tile does. You recompute instead of materializing.
|
||||
|
||||
**Sparse kernel compilation insight**: the sparsity pattern is **known at compile time**.
|
||||
A compiler can:
|
||||
|
||||
1. **Analyze the sparsity graph** of each weight matrix
|
||||
2. **Find blocks** of non-zero weights that are close in memory
|
||||
3. **Generate a tiling schedule** that loads these blocks into SRAM efficiently
|
||||
4. **Emit fused kernels** where the memory access pattern is baked in as constants
|
||||
|
||||
The resulting kernel looks like a dense kernel to the hardware —
|
||||
sequential reads, high SRAM reuse, maybe even tensor core compatible
|
||||
(if the compiler finds dense sub-blocks within the sparse matrix).
|
||||
|
||||
## Block sparsity vs unstructured sparsity
|
||||
|
||||
**Block sparse** (e.g., 4×4 or 16×16 blocks zeroed out):
|
||||
- GPU-friendly: blocks map to tensor core operations
|
||||
- Less flexible: coarser pruning granularity → less achievable sparsity
|
||||
- NVIDIA's 2:4 structured sparsity gets ~50% sparsity with tensor core support
|
||||
- Real-world: typically 50-70% sparsity achievable without quality loss
|
||||
|
||||
**Unstructured sparse** (individual weights zeroed):
|
||||
- Maximally flexible: fine-grained pruning → higher achievable sparsity
|
||||
- GPU-hostile: the gather/scatter problem
|
||||
- Real-world: 80-95% sparsity achievable in many layers without quality loss
|
||||
|
||||
**The compiled kernel approach bridges this**: take unstructured sparsity
|
||||
(maximally flexible, high compression) and compile it into a kernel that
|
||||
runs as efficiently as block-sparse. Best of both worlds.
|
||||
|
||||
## Recurrent depth composability
|
||||
|
||||
From our April 10 discussion: middle transformer layers are doing
|
||||
open-coded simulated annealing — similar weights, similar computation.
|
||||
|
||||
If layers 8-24 have cosine similarity > 0.95:
|
||||
- Replace 16 layers with 1 layer × 16 iterations
|
||||
- **Parameter reduction**: 16 × 361M = 5.8B → 361M (16x reduction for those layers)
|
||||
- **Memory bandwidth**: load one layer's weights, iterate in SRAM
|
||||
- Combined with 50% sparsity on the remaining unique layers:
|
||||
- Unique layers (48): 48 × 361M × 0.5 = 8.7B params
|
||||
- Recurrent layer: 361M × 0.5 = 180M params (but iterated 16x in SRAM)
|
||||
- Total loaded per token: ~8.9B × 2 bytes = 17.8GB
|
||||
- Time: 17.8/8000 ≈ **2.2ms per token** (vs 6.9ms dense) — **3.1x speedup**
|
||||
|
||||
With higher sparsity (75%) + recurrence:
|
||||
- Unique layers: 48 × 361M × 0.25 = 4.3B
|
||||
- Recurrent: 90M
|
||||
- Total: ~4.4B × 2 = 8.8GB → **1.1ms per token** — **6.3x speedup**
|
||||
|
||||
## What needs to happen
|
||||
|
||||
### Phase 1: Measure (can do now with B200 access)
|
||||
1. Extract all weight matrices from Qwen3.5-27B
|
||||
2. For each matrix, compute:
|
||||
- Magnitude distribution (what % of weights are near-zero?)
|
||||
- Achievable sparsity at various thresholds (L1 magnitude pruning)
|
||||
- Dense sub-block statistics (how many 4×4, 16×16 blocks are all-zero?)
|
||||
3. Layer similarity: pairwise cosine similarity of weight matrices across layers
|
||||
- Which layers are nearly identical? (recurrence candidates)
|
||||
4. Validate quality: run perplexity eval at various sparsity levels
|
||||
|
||||
### Phase 2: Compile (research project)
|
||||
1. For a single sparse weight matrix, generate an optimized Triton kernel
|
||||
2. Benchmark vs dense GEMM and vs NVIDIA's 2:4 sparse
|
||||
3. Iterate on the tiling strategy
|
||||
|
||||
### Phase 3: End-to-end
|
||||
1. Full model with compiled sparse kernels
|
||||
2. Perplexity + latency benchmarks
|
||||
3. Compare: dense, 2:4 structured, compiled unstructured
|
||||
|
||||
## Related work to read
|
||||
|
||||
- **SparseGPT** (Frantar & Alistarh, 2023): one-shot pruning to 50-60% unstructured sparsity
|
||||
with minimal quality loss. Key result: large models are more prunable than small ones.
|
||||
- **Wanda** (Sun et al., 2023): pruning by weight magnitude × input activation.
|
||||
Simpler than SparseGPT, comparable results.
|
||||
- **NVIDIA 2:4 sparsity**: hardware-supported structured sparsity on Ampere+.
|
||||
50% sparsity, ~2x speedup on tensor cores. The existence proof that sparse can be fast.
|
||||
- **Triton** (Tillet et al.): Python DSL for GPU kernel generation.
|
||||
The right compilation target — can express arbitrary tiling strategies.
|
||||
- **TACO** (Kjolstad et al.): tensor algebra compiler. Generates kernels for
|
||||
specific sparse tensor formats. Academic but the ideas are right.
|
||||
- **FlashAttention** (Dao et al.): the tiling strategy to learn from.
|
||||
- **DejaVu** (Liu et al., 2023): contextual sparsity — predicting which neurons
|
||||
to activate per input. Dynamic sparsity, complementary to weight sparsity.
|
||||
|
||||
## The bigger picture
|
||||
|
||||
Current state of the art: dense models with FlashAttention for the N×N attention part.
|
||||
Weight sparsity is known to work (SparseGPT, Wanda) but isn't deployed because
|
||||
the GPU kernels don't exist to exploit it efficiently.
|
||||
|
||||
The gap: nobody has built a compiler that takes a specific sparse weight matrix
|
||||
and emits a kernel optimized for that exact pattern. FlashAttention proved
|
||||
that custom kernels for specific computational patterns beat general-purpose ones.
|
||||
The same should hold for sparse weight patterns.
|
||||
|
||||
**The bet**: a compiled sparse kernel for Qwen3.5-27B's actual sparsity pattern
|
||||
would be within 80% of the theoretical bandwidth-bound speedup. If true,
|
||||
50% sparsity → 1.6x real speedup, 75% → 3.2x, composing with recurrent depth
|
||||
for potentially 5-6x total.
|
||||
|
||||
That would make 27B inference as fast as a 5B dense model, with 27B quality.
|
||||
|
|
@ -764,7 +764,7 @@ impl ContextState {
|
|||
pub fn conversation(&self) -> &[AstNode] { &self.conversation }
|
||||
pub fn conversation_mut(&mut self) -> &mut Vec<AstNode> { &mut self.conversation }
|
||||
|
||||
fn sections(&self) -> [&Vec<AstNode>; 4] {
|
||||
pub fn sections(&self) -> [&Vec<AstNode>; 4] {
|
||||
[&self.system, &self.identity, &self.journal, &self.conversation]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ use std::sync::Arc;
|
|||
use anyhow::Result;
|
||||
|
||||
use api::ApiClient;
|
||||
use context::{AstNode, NodeBody, ContextState, Section, Ast, PendingToolCall, ResponseParser, Role};
|
||||
use context::{AstNode, ContextState, Section, Ast, PendingToolCall, ResponseParser, Role};
|
||||
|
||||
use crate::mind::log::ConversationLog;
|
||||
|
||||
|
|
@ -105,9 +105,6 @@ pub async fn start_activity(agent: &Arc<Agent>, label: impl Into<String>) -> Act
|
|||
|
||||
/// Result of a single agent turn.
|
||||
pub struct TurnResult {
|
||||
/// The text response (already sent through UI channel).
|
||||
#[allow(dead_code)]
|
||||
pub text: String,
|
||||
/// Whether the model called yield_to_user during this turn.
|
||||
pub yield_requested: bool,
|
||||
/// Whether any tools (other than yield_to_user) were called.
|
||||
|
|
@ -186,18 +183,18 @@ pub struct AgentState {
|
|||
impl Agent {
|
||||
pub async fn new(
|
||||
client: ApiClient,
|
||||
system_prompt: String,
|
||||
personality: Vec<(String, String)>,
|
||||
app_config: crate::config::AppConfig,
|
||||
prompt_file: String,
|
||||
conversation_log: Option<ConversationLog>,
|
||||
active_tools: tools::ActiveTools,
|
||||
agent_tools: Vec<tools::Tool>,
|
||||
) -> Arc<Self> {
|
||||
let mut context = ContextState::new();
|
||||
context.conversation_log = conversation_log;
|
||||
context.push_no_log(Section::System, AstNode::system_msg(&system_prompt));
|
||||
|
||||
let tool_defs = tools::all_tool_definitions().await;
|
||||
let tool_defs: Vec<String> = agent_tools.iter().map(|t| t.to_json()).collect();
|
||||
|
||||
if !tool_defs.is_empty() {
|
||||
let tools_text = format!(
|
||||
"# Tools\n\nYou have access to the following functions:\n\n<tools>\n{}\n</tools>\n\n\
|
||||
|
|
@ -223,7 +220,7 @@ impl Agent {
|
|||
session_id,
|
||||
context: tokio::sync::Mutex::new(context),
|
||||
state: tokio::sync::Mutex::new(AgentState {
|
||||
tools: tools::tools(),
|
||||
tools: agent_tools,
|
||||
mcp_tools: McpToolAccess::All,
|
||||
last_prompt_tokens: 0,
|
||||
reasoning_effort: "none".to_string(),
|
||||
|
|
@ -445,24 +442,12 @@ impl Agent {
|
|||
}
|
||||
|
||||
// Text-only response — extract text and return
|
||||
let text = {
|
||||
let ctx = agent.context.lock().await;
|
||||
let children = ctx.conversation()[branch_idx].children();
|
||||
children.iter()
|
||||
.filter_map(|c| c.leaf())
|
||||
.filter(|l| matches!(l.body(), NodeBody::Content(_)))
|
||||
.map(|l| l.body().text())
|
||||
.collect::<Vec<_>>()
|
||||
.join("")
|
||||
};
|
||||
|
||||
let mut st = agent.state.lock().await;
|
||||
if st.pending_yield { ds.yield_requested = true; st.pending_yield = false; }
|
||||
if st.pending_model_switch.is_some() { ds.model_switch = st.pending_model_switch.take(); }
|
||||
if st.pending_dmn_pause { ds.dmn_pause = true; st.pending_dmn_pause = false; }
|
||||
|
||||
return Ok(TurnResult {
|
||||
text,
|
||||
yield_requested: ds.yield_requested,
|
||||
had_tool_calls: ds.had_tool_calls,
|
||||
tool_errors: ds.tool_errors,
|
||||
|
|
@ -569,7 +554,7 @@ impl Agent {
|
|||
|
||||
pub async fn compact(&self) {
|
||||
match crate::config::reload_for_model(&self.app_config, &self.prompt_file) {
|
||||
Ok((_system_prompt, personality)) => {
|
||||
Ok(personality) => {
|
||||
let mut ctx = self.context.lock().await;
|
||||
// System section (prompt + tools) set by new(), don't touch it
|
||||
ctx.clear(Section::Identity);
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
use crate::store::{self, Store};
|
||||
use crate::subconscious::{defs, prompts};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
|
@ -17,6 +18,129 @@ use super::context::AstNode;
|
|||
use super::tools::{self as agent_tools};
|
||||
use super::Agent;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Agent logging — shared by Mind and CLI paths
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Stats from a single run.
|
||||
#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
|
||||
pub struct RunStats {
|
||||
pub messages: usize,
|
||||
pub tool_calls: usize,
|
||||
pub tool_failures: usize,
|
||||
pub tool_calls_by_type: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
/// Per-tool accumulated stats.
|
||||
#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
|
||||
pub struct ToolStats {
|
||||
pub last: usize,
|
||||
pub ewma: f64,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
/// Persisted stats for an agent (survives restarts).
|
||||
#[derive(Clone, Default, serde::Serialize, serde::Deserialize)]
|
||||
pub struct PersistedStats {
|
||||
pub runs: usize,
|
||||
pub last_stats: Option<RunStats>,
|
||||
/// Per-tool-type stats: last, ewma, total.
|
||||
pub by_tool: HashMap<String, ToolStats>,
|
||||
/// Failed calls stats.
|
||||
pub failures: ToolStats,
|
||||
}
|
||||
|
||||
fn stats_path() -> std::path::PathBuf {
|
||||
dirs::home_dir().unwrap_or_default()
|
||||
.join(".consciousness/agent-stats.json")
|
||||
}
|
||||
|
||||
static AGENT_STATS: std::sync::OnceLock<std::sync::Mutex<HashMap<String, PersistedStats>>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
fn stats_map() -> &'static std::sync::Mutex<HashMap<String, PersistedStats>> {
|
||||
AGENT_STATS.get_or_init(|| {
|
||||
let map: HashMap<String, PersistedStats> = std::fs::read_to_string(stats_path()).ok()
|
||||
.and_then(|s| serde_json::from_str(&s).ok())
|
||||
.unwrap_or_default();
|
||||
std::sync::Mutex::new(map)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_stats(name: &str) -> PersistedStats {
|
||||
stats_map().lock().ok()
|
||||
.and_then(|m| m.get(name).cloned())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn set_stats(name: &str, stats: PersistedStats) {
|
||||
if let Ok(mut map) = stats_map().lock() {
|
||||
map.insert(name.to_string(), stats);
|
||||
if let Ok(json) = serde_json::to_string_pretty(&*map) {
|
||||
let _ = std::fs::write(stats_path(), json);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Save agent conversation to JSON log file.
|
||||
/// Used by both mind-run agents and CLI-run agents.
|
||||
pub async fn save_agent_log(name: &str, agent: &std::sync::Arc<Agent>) -> RunStats {
|
||||
let dir = dirs::home_dir().unwrap_or_default()
|
||||
.join(format!(".consciousness/logs/{}", name));
|
||||
let ctx = agent.context.lock().await;
|
||||
let stats = compute_run_stats(ctx.conversation());
|
||||
if std::fs::create_dir_all(&dir).is_ok() {
|
||||
let ts = chrono::Utc::now().format("%Y%m%d-%H%M%S");
|
||||
let path = dir.join(format!("{}.json", ts));
|
||||
let mut context: Vec<&super::context::AstNode> = Vec::new();
|
||||
for section in ctx.sections() {
|
||||
context.extend(section);
|
||||
}
|
||||
if let Ok(json) = serde_json::to_string_pretty(&context) {
|
||||
let _ = std::fs::write(&path, json);
|
||||
}
|
||||
}
|
||||
dbglog!("[agent] {} — {} msgs, {} tool calls",
|
||||
name, stats.messages, stats.tool_calls);
|
||||
stats
|
||||
}
|
||||
|
||||
fn compute_run_stats(conversation: &[super::context::AstNode]) -> RunStats {
|
||||
use super::context::{AstNode, NodeBody};
|
||||
|
||||
let mut messages = 0usize;
|
||||
let mut tool_calls = 0usize;
|
||||
let mut tool_failures = 0usize;
|
||||
let mut by_type: HashMap<String, usize> = HashMap::new();
|
||||
|
||||
for node in conversation {
|
||||
if let AstNode::Branch { children, .. } = node {
|
||||
messages += 1;
|
||||
for child in children {
|
||||
if let AstNode::Leaf(leaf) = child {
|
||||
match leaf.body() {
|
||||
NodeBody::ToolCall { name, .. } => {
|
||||
tool_calls += 1;
|
||||
*by_type.entry(name.to_string()).or_default() += 1;
|
||||
}
|
||||
NodeBody::ToolResult(text) => {
|
||||
// Detect failures from error patterns in result
|
||||
let t = text.trim_start();
|
||||
if t.starts_with("Error") || t.starts_with("error:") ||
|
||||
t.starts_with("Failed") || t.contains("not found") {
|
||||
tool_failures += 1;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RunStats { messages, tool_calls, tool_failures, tool_calls_by_type: by_type }
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// AutoAgent — multi-step autonomous agent
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -37,8 +161,11 @@ pub struct AutoAgent {
|
|||
pub current_phase: String,
|
||||
pub turn: usize,
|
||||
pub enabled: bool,
|
||||
pub temperature: f32,
|
||||
pub priority: i32,
|
||||
}
|
||||
|
||||
|
||||
/// Per-run conversation backend — wraps a forked agent.
|
||||
struct Backend(std::sync::Arc<Agent>);
|
||||
|
||||
|
|
@ -101,21 +228,23 @@ impl AutoAgent {
|
|||
name: String,
|
||||
tools: Vec<agent_tools::Tool>,
|
||||
steps: Vec<AutoStep>,
|
||||
_temperature: f32,
|
||||
_priority: i32,
|
||||
temperature: f32,
|
||||
priority: i32,
|
||||
) -> Self {
|
||||
Self {
|
||||
name, tools, steps,
|
||||
current_phase: String::new(),
|
||||
turn: 0,
|
||||
enabled: true,
|
||||
temperature,
|
||||
priority,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
&mut self,
|
||||
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
let config = crate::config::get();
|
||||
let base_url = config.api_base_url.as_deref().unwrap_or("");
|
||||
let api_key = config.api_key.as_deref().unwrap_or("");
|
||||
|
|
@ -129,25 +258,29 @@ impl AutoAgent {
|
|||
let cli = crate::user::CliArgs::default();
|
||||
let (app, _) = crate::config::load_app(&cli)
|
||||
.map_err(|e| format!("config: {}", e))?;
|
||||
let (system_prompt, personality) = crate::config::reload_for_model(
|
||||
let personality = crate::config::reload_for_model(
|
||||
&app, &app.prompts.other,
|
||||
).map_err(|e| format!("config: {}", e))?;
|
||||
|
||||
let agent = Agent::new(
|
||||
client, system_prompt, personality,
|
||||
client, personality,
|
||||
app, String::new(),
|
||||
None,
|
||||
super::tools::ActiveTools::new(),
|
||||
super::tools::tools(),
|
||||
).await;
|
||||
{
|
||||
let mut st = agent.state.lock().await;
|
||||
st.provenance = format!("standalone:{}", self.name);
|
||||
st.tools = self.tools.clone();
|
||||
st.priority = Some(10);
|
||||
st.temperature = self.temperature;
|
||||
st.priority = Some(self.priority);
|
||||
}
|
||||
|
||||
let mut backend = Backend(agent);
|
||||
self.run_with_backend(&mut backend, bail_fn).await
|
||||
let mut backend = Backend(agent.clone());
|
||||
let result = self.run_with_backend(&mut backend, bail_fn).await;
|
||||
save_agent_log(&self.name, &agent).await;
|
||||
result
|
||||
}
|
||||
|
||||
/// Run using a pre-created agent Arc. The caller retains the Arc
|
||||
|
|
@ -155,7 +288,7 @@ impl AutoAgent {
|
|||
pub async fn run_shared(
|
||||
&mut self,
|
||||
agent: &std::sync::Arc<Agent>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
let mut backend = Backend(agent.clone());
|
||||
self.run_with_backend(&mut backend, None).await
|
||||
}
|
||||
|
|
@ -168,7 +301,7 @@ impl AutoAgent {
|
|||
memory_keys: &[String],
|
||||
state: &std::collections::BTreeMap<String, String>,
|
||||
recently_written: &[String],
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
let resolved_steps: Vec<AutoStep> = self.steps.iter().map(|s| AutoStep {
|
||||
prompt: resolve_prompt(&s.prompt, memory_keys, state, recently_written),
|
||||
phase: s.phase.clone(),
|
||||
|
|
@ -180,71 +313,57 @@ impl AutoAgent {
|
|||
result
|
||||
}
|
||||
|
||||
/// Update stats after a run completes. Called with the stats from save_agent_log.
|
||||
pub fn update_stats(&self, run_stats: RunStats) {
|
||||
const ALPHA: f64 = 0.3;
|
||||
let old = get_stats(&self.name);
|
||||
|
||||
// Update per-tool stats
|
||||
let mut by_tool = old.by_tool;
|
||||
for (tool, count) in &run_stats.tool_calls_by_type {
|
||||
let entry = by_tool.entry(tool.clone()).or_default();
|
||||
entry.last = *count;
|
||||
entry.ewma = ALPHA * (*count as f64) + (1.0 - ALPHA) * entry.ewma;
|
||||
entry.total += count;
|
||||
}
|
||||
|
||||
// Update failure stats
|
||||
let failures = ToolStats {
|
||||
last: run_stats.tool_failures,
|
||||
ewma: ALPHA * (run_stats.tool_failures as f64) + (1.0 - ALPHA) * old.failures.ewma,
|
||||
total: old.failures.total + run_stats.tool_failures,
|
||||
};
|
||||
|
||||
let new = PersistedStats {
|
||||
runs: old.runs + 1,
|
||||
last_stats: Some(run_stats),
|
||||
by_tool,
|
||||
failures,
|
||||
};
|
||||
set_stats(&self.name, new);
|
||||
}
|
||||
|
||||
async fn run_with_backend(
|
||||
&mut self,
|
||||
backend: &mut Backend,
|
||||
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
dbglog!("[auto] {} starting, {} steps", self.name, self.steps.len());
|
||||
self.turn = 0;
|
||||
self.current_phase = self.steps.first()
|
||||
.map(|s| s.phase.clone()).unwrap_or_default();
|
||||
let mut next_step = 0;
|
||||
|
||||
if next_step < self.steps.len() {
|
||||
backend.push_node(
|
||||
AstNode::system_msg(&self.steps[next_step].prompt)).await;
|
||||
next_step += 1;
|
||||
for (i, step) in self.steps.iter().enumerate() {
|
||||
self.turn = i + 1;
|
||||
self.current_phase = step.phase.clone();
|
||||
|
||||
if let Some(ref check) = bail_fn {
|
||||
check(i)?;
|
||||
}
|
||||
|
||||
backend.push_node(AstNode::system_msg(&step.prompt)).await;
|
||||
Agent::turn(backend.0.clone()).await
|
||||
.map_err(|e| format!("{}: {}", self.name, e))?;
|
||||
}
|
||||
|
||||
let max_turns = 50 * self.steps.len().max(1);
|
||||
|
||||
for _ in 0..max_turns {
|
||||
self.turn += 1;
|
||||
|
||||
let result = match Agent::turn(backend.0.clone()).await {
|
||||
Ok(r) => r,
|
||||
Err(e) if super::context::is_context_overflow(&e) => {
|
||||
dbglog!("[auto] {} context full, stopping gracefully", self.name);
|
||||
return Ok(String::new());
|
||||
}
|
||||
Err(e) => return Err(format!("{}: {}", self.name, e)),
|
||||
};
|
||||
|
||||
if result.had_tool_calls {
|
||||
continue;
|
||||
}
|
||||
|
||||
let text = result.text;
|
||||
if text.is_empty() {
|
||||
dbglog!("[auto] {} empty response, retrying", self.name);
|
||||
backend.push_node(AstNode::system_msg(
|
||||
"Your previous response was empty. \
|
||||
Please respond with text or use a tool."
|
||||
)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
dbglog!("[auto] {} response: {}",
|
||||
self.name, &text[..text.floor_char_boundary(text.len().min(200))]);
|
||||
|
||||
if next_step < self.steps.len() {
|
||||
if let Some(ref check) = bail_fn {
|
||||
check(next_step)?;
|
||||
}
|
||||
self.current_phase = self.steps[next_step].phase.clone();
|
||||
backend.push_node(
|
||||
AstNode::system_msg(&self.steps[next_step].prompt)).await;
|
||||
next_step += 1;
|
||||
dbglog!("[auto] {} step {}/{}",
|
||||
self.name, next_step, self.steps.len());
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(text);
|
||||
}
|
||||
|
||||
Err(format!("{}: exceeded {} tool turns", self.name, max_turns))
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -255,7 +374,6 @@ impl AutoAgent {
|
|||
|
||||
/// Result of running a single agent.
|
||||
pub struct AgentResult {
|
||||
pub output: String,
|
||||
pub node_keys: Vec<String>,
|
||||
/// Directory containing output() files from the agent run.
|
||||
pub state_dir: PathBuf,
|
||||
|
|
@ -306,14 +424,21 @@ pub fn run_one_agent(
|
|||
defs::run_agent(store, &def, effective_count, &Default::default())?
|
||||
};
|
||||
|
||||
// Filter tools based on agent def, add filesystem output tool
|
||||
let all_tools = super::tools::memory_and_journal_tools();
|
||||
// Base memory tools + extras from agent def (matching unconscious.rs pattern)
|
||||
let base_tools = super::tools::memory::memory_tools().to_vec();
|
||||
let extra_tools = super::tools::memory::journal_tools().to_vec();
|
||||
let mut effective_tools: Vec<super::tools::Tool> = if def.tools.is_empty() {
|
||||
all_tools.to_vec()
|
||||
let mut all = base_tools;
|
||||
all.extend(extra_tools);
|
||||
all
|
||||
} else {
|
||||
all_tools.into_iter()
|
||||
.filter(|t| def.tools.iter().any(|w| w == &t.name))
|
||||
.collect()
|
||||
let mut tools = base_tools;
|
||||
for name in &def.tools {
|
||||
if let Some(t) = extra_tools.iter().find(|t| t.name == *name) {
|
||||
tools.push(t.clone());
|
||||
}
|
||||
}
|
||||
tools
|
||||
};
|
||||
effective_tools.push(super::tools::Tool {
|
||||
name: "output",
|
||||
|
|
@ -386,12 +511,11 @@ pub fn run_one_agent(
|
|||
Ok(())
|
||||
};
|
||||
|
||||
let output = call_api_with_tools_sync(
|
||||
call_api_with_tools_sync(
|
||||
agent_name, &prompts, &step_phases, def.temperature, def.priority,
|
||||
&effective_tools, Some(&bail_fn))?;
|
||||
|
||||
Ok(AgentResult {
|
||||
output,
|
||||
node_keys: agent_batch.node_keys,
|
||||
state_dir,
|
||||
})
|
||||
|
|
@ -411,7 +535,7 @@ pub async fn call_api_with_tools(
|
|||
priority: i32,
|
||||
tools: &[agent_tools::Tool],
|
||||
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
let steps: Vec<AutoStep> = prompts.iter().zip(
|
||||
phases.iter().map(String::as_str)
|
||||
.chain(std::iter::repeat(""))
|
||||
|
|
@ -440,7 +564,7 @@ pub fn call_api_with_tools_sync(
|
|||
priority: i32,
|
||||
tools: &[agent_tools::Tool],
|
||||
bail_fn: Option<&(dyn Fn(usize) -> Result<(), String> + Sync)>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<(), String> {
|
||||
std::thread::scope(|s| {
|
||||
s.spawn(|| {
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
|
|
|
|||
|
|
@ -101,11 +101,27 @@ pub fn journal_tools() -> [super::Tool; 3] {
|
|||
}
|
||||
}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { journal_tail(&v).await })) },
|
||||
Tool { name: "journal_new", description: "Start a new journal entry.",
|
||||
parameters_json: r#"{"type":"object","properties":{"name":{"type":"string","description":"Short node name (becomes the key)"},"title":{"type":"string","description":"Descriptive title"},"body":{"type":"string","description":"Entry body"}},"required":["name","title","body"]}"#,
|
||||
Tool { name: "journal_new", description: "Start a new journal/digest entry.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string", "description": "Short node name (becomes the key)"},
|
||||
"title": {"type": "string", "description": "Descriptive title"},
|
||||
"body": {"type": "string", "description": "Entry body"},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
|
||||
},
|
||||
"required": ["name", "title", "body"]
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { journal_new(&a, &v).await })) },
|
||||
Tool { name: "journal_update", description: "Append text to the most recent journal entry.",
|
||||
parameters_json: r#"{"type":"object","properties":{"body":{"type":"string","description":"Text to append"}},"required":["body"]}"#,
|
||||
Tool { name: "journal_update", description: "Append text to the most recent entry at a level.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"body": {"type": "string", "description": "Text to append"},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
|
||||
},
|
||||
"required": ["body"]
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { journal_update(&a, &v).await })) },
|
||||
]
|
||||
}
|
||||
|
|
@ -260,7 +276,7 @@ async fn query(args: &serde_json::Value) -> Result<String> {
|
|||
let store = arc.lock().await;
|
||||
let graph = store.build_graph();
|
||||
|
||||
let stages = crate::search::Stage::parse_pipeline(query_str)
|
||||
let stages = crate::query_parser::parse_stages(query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, 100);
|
||||
let keys: Vec<String> = results.into_iter().map(|(k, _)| k).collect();
|
||||
|
|
@ -272,12 +288,61 @@ async fn query(args: &serde_json::Value) -> Result<String> {
|
|||
Ok(crate::subconscious::prompts::format_nodes_section(&store, &items, &graph))
|
||||
}
|
||||
_ => {
|
||||
crate::query_parser::query_to_string(&store, &graph, query_str)
|
||||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
// Compact output: check for count/select stages, else just list keys
|
||||
use crate::search::{Stage, Transform};
|
||||
let has_count = stages.iter().any(|s| matches!(s, Stage::Transform(Transform::Count)));
|
||||
if has_count {
|
||||
return Ok(keys.len().to_string());
|
||||
}
|
||||
if keys.is_empty() {
|
||||
return Ok("no results".to_string());
|
||||
}
|
||||
let select_fields: Option<&Vec<String>> = stages.iter().find_map(|s| match s {
|
||||
Stage::Transform(Transform::Select(f)) => Some(f),
|
||||
_ => None,
|
||||
});
|
||||
if let Some(fields) = select_fields {
|
||||
let mut out = String::from("key\t");
|
||||
out.push_str(&fields.join("\t"));
|
||||
out.push('\n');
|
||||
for key in &keys {
|
||||
out.push_str(key);
|
||||
for f in fields {
|
||||
out.push('\t');
|
||||
out.push_str(&resolve_field_str(&store, &graph, key, f));
|
||||
}
|
||||
out.push('\n');
|
||||
}
|
||||
Ok(out)
|
||||
} else {
|
||||
Ok(keys.join("\n"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_field_str(store: &crate::store::Store, graph: &crate::graph::Graph, key: &str, field: &str) -> String {
|
||||
let node = match store.nodes.get(key) {
|
||||
Some(n) => n,
|
||||
None => return "-".to_string(),
|
||||
};
|
||||
match field {
|
||||
"key" => key.to_string(),
|
||||
"weight" => format!("{:.3}", node.weight),
|
||||
"node_type" => format!("{:?}", node.node_type),
|
||||
"provenance" => node.provenance.clone(),
|
||||
"emotion" => format!("{}", node.emotion),
|
||||
"retrievals" => format!("{}", node.retrievals),
|
||||
"uses" => format!("{}", node.uses),
|
||||
"wrongs" => format!("{}", node.wrongs),
|
||||
"created" => format!("{}", node.created_at),
|
||||
"timestamp" => format!("{}", node.timestamp),
|
||||
"degree" => format!("{}", graph.degree(key)),
|
||||
"content_len" => format!("{}", node.content.len()),
|
||||
_ => "-".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Journal tools ──────────────────────────────────────────────
|
||||
|
||||
async fn journal_tail(args: &serde_json::Value) -> Result<String> {
|
||||
|
|
@ -308,10 +373,20 @@ async fn journal_tail(args: &serde_json::Value) -> Result<String> {
|
|||
query(&serde_json::json!({"query": q, "format": format})).await
|
||||
}
|
||||
|
||||
fn level_to_node_type(level: i64) -> crate::store::NodeType {
|
||||
match level {
|
||||
1 => crate::store::NodeType::EpisodicDaily,
|
||||
2 => crate::store::NodeType::EpisodicWeekly,
|
||||
3 => crate::store::NodeType::EpisodicMonthly,
|
||||
_ => crate::store::NodeType::EpisodicSession,
|
||||
}
|
||||
}
|
||||
|
||||
async fn journal_new(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
|
||||
let name = get_str(args, "name")?;
|
||||
let title = get_str(args, "title")?;
|
||||
let body = get_str(args, "body")?;
|
||||
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
|
||||
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
|
||||
let content = format!("## {} — {}\n\n{}", ts, title, body);
|
||||
|
||||
|
|
@ -337,7 +412,7 @@ async fn journal_new(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args:
|
|||
base_key.to_string()
|
||||
};
|
||||
let mut node = crate::store::new_node(&key, &content);
|
||||
node.node_type = crate::store::NodeType::EpisodicSession;
|
||||
node.node_type = level_to_node_type(level);
|
||||
node.provenance = get_provenance(agent).await;
|
||||
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
|
|
@ -347,14 +422,16 @@ async fn journal_new(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args:
|
|||
|
||||
async fn journal_update(agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
|
||||
let body = get_str(args, "body")?;
|
||||
let level = args.get("level").and_then(|v| v.as_i64()).unwrap_or(0);
|
||||
let node_type = level_to_node_type(level);
|
||||
let arc = cached_store().await?;
|
||||
let mut store = arc.lock().await;
|
||||
let latest_key = store.nodes.values()
|
||||
.filter(|n| n.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.filter(|n| n.node_type == node_type)
|
||||
.max_by_key(|n| n.created_at)
|
||||
.map(|n| n.key.clone());
|
||||
let Some(key) = latest_key else {
|
||||
anyhow::bail!("no journal entry to update — use journal_new first");
|
||||
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
|
||||
};
|
||||
let existing = store.nodes.get(&key).unwrap().content.clone();
|
||||
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
|
||||
|
|
|
|||
|
|
@ -195,6 +195,10 @@ pub async fn all_tool_definitions() -> Vec<String> {
|
|||
defs
|
||||
}
|
||||
|
||||
pub async fn all_mcp_tool_definitions() -> Vec<String> {
|
||||
mcp_client::tool_definitions_json().await
|
||||
}
|
||||
|
||||
/// Memory + journal tools only — for subconscious agents.
|
||||
pub fn memory_and_journal_tools() -> Vec<Tool> {
|
||||
let mut all = memory::memory_tools().to_vec();
|
||||
|
|
|
|||
298
src/cli/agent.rs
298
src/cli/agent.rs
|
|
@ -1,6 +1,7 @@
|
|||
// cli/agent.rs — agent subcommand handlers
|
||||
|
||||
use crate::store;
|
||||
use crate::subconscious::digest;
|
||||
|
||||
pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option<&str>, dry_run: bool, _local: bool, state_dir: Option<&str>) -> Result<(), String> {
|
||||
// Mark as agent so tool calls (e.g. poc-memory render) don't
|
||||
|
|
@ -25,7 +26,7 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
target.to_vec()
|
||||
} else if let Some(q) = query {
|
||||
let graph = store.build_graph();
|
||||
let stages = crate::search::Stage::parse_pipeline(q)?;
|
||||
let stages = crate::query_parser::parse_stages(q)?;
|
||||
let results = crate::search::run_query(&stages, vec![], &graph, &store, false, count);
|
||||
if results.is_empty() {
|
||||
return Err(format!("query returned no results: {}", q));
|
||||
|
|
@ -56,23 +57,6 @@ pub fn cmd_run_agent(agent: &str, count: usize, target: &[String], query: Option
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_consolidate_batch(count: usize, auto: bool, agent: Option<String>) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
|
||||
if let Some(agent_name) = agent {
|
||||
let batch = crate::agents::prompts::agent_prompt(&store, &agent_name, count)?;
|
||||
for (i, s) in batch.steps.iter().enumerate() {
|
||||
if batch.steps.len() > 1 {
|
||||
println!("=== STEP {} ({}) ===\n", i + 1, s.phase);
|
||||
}
|
||||
println!("{}", s.prompt);
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
crate::agents::prompts::consolidation_batch(&store, count, auto)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cmd_replay_queue(count: usize) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let queue = crate::neuro::replay_queue(&store, count);
|
||||
|
|
@ -85,21 +69,9 @@ pub fn cmd_replay_queue(count: usize) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_consolidate_session() -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let plan = crate::neuro::consolidation_plan(&store);
|
||||
println!("{}", crate::neuro::format_plan(&plan));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_consolidate_full() -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
crate::consolidate::consolidate_full(&mut store)
|
||||
}
|
||||
|
||||
pub fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
|
||||
let store = store::Store::load()?;
|
||||
let links = crate::digest::parse_all_digest_links(&store);
|
||||
let links = digest::parse_all_digest_links(&store);
|
||||
drop(store);
|
||||
println!("Found {} unique links from digest nodes", links.len());
|
||||
|
||||
|
|
@ -115,269 +87,7 @@ pub fn cmd_digest_links(do_apply: bool) -> Result<(), String> {
|
|||
}
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let (applied, skipped, fallbacks) = crate::digest::apply_digest_links(&mut store, &links);
|
||||
let (applied, skipped, fallbacks) = digest::apply_digest_links(&mut store, &links);
|
||||
println!("\nApplied: {} ({} file-level fallbacks) Skipped: {}", applied, fallbacks, skipped);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_journal_enrich(_jsonl_path: &str, _entry_text: &str, _grep_line: usize) -> Result<(), String> {
|
||||
Err("journal-enrich has been removed — use the observation agent instead.".into())
|
||||
}
|
||||
|
||||
pub fn cmd_apply_consolidation(_do_apply: bool, _report_file: Option<&str>) -> Result<(), String> {
|
||||
Err("apply-consolidation has been removed — agents now apply changes via tool calls directly.".into())
|
||||
}
|
||||
|
||||
pub fn cmd_knowledge_loop(_max_cycles: usize, _batch_size: usize, _window: usize, _max_depth: i32) -> Result<(), String> {
|
||||
Err("knowledge-loop has been removed — agents now use tool calls directly. Use `poc-memory agent run` instead.".into())
|
||||
}
|
||||
|
||||
pub fn cmd_fact_mine(_path: &str, _batch: bool, _dry_run: bool, _output_file: Option<&str>, _min_messages: usize) -> Result<(), String> {
|
||||
Err("fact-mine has been removed — use the observation agent instead.".into())
|
||||
}
|
||||
|
||||
pub fn cmd_fact_mine_store(_path: &str) -> Result<(), String> {
|
||||
Err("fact-mine-store has been removed — use the observation agent instead.".into())
|
||||
}
|
||||
|
||||
/// Sample recent actions from each agent type, sort by quality using
|
||||
/// LLM pairwise comparison, report per-type rankings.
|
||||
/// Elo ratings file path
|
||||
fn elo_path() -> std::path::PathBuf {
|
||||
crate::config::get().data_dir.join("agent-elo.json")
|
||||
}
|
||||
|
||||
/// Load persisted Elo ratings, or initialize at 1000.0
|
||||
fn load_elo_ratings(agent_types: &[&str]) -> std::collections::HashMap<String, f64> {
|
||||
let path = elo_path();
|
||||
let mut ratings: std::collections::HashMap<String, f64> = std::fs::read_to_string(&path)
|
||||
.ok()
|
||||
.and_then(|s| serde_json::from_str(&s).ok())
|
||||
.unwrap_or_default();
|
||||
for t in agent_types {
|
||||
ratings.entry(t.to_string()).or_insert(1000.0);
|
||||
}
|
||||
ratings
|
||||
}
|
||||
|
||||
fn save_elo_ratings(ratings: &std::collections::HashMap<String, f64>) {
|
||||
let path = elo_path();
|
||||
if let Ok(json) = serde_json::to_string_pretty(ratings) {
|
||||
let _ = std::fs::write(path, json);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cmd_evaluate_agents(matchups: usize, model: &str, dry_run: bool) -> Result<(), String> {
|
||||
use skillratings::elo::{elo, EloConfig, EloRating};
|
||||
use skillratings::Outcomes;
|
||||
|
||||
let store = store::Store::load()?;
|
||||
|
||||
let agent_types: Vec<&str> = vec![
|
||||
"linker", "organize", "distill", "separator",
|
||||
"split", "rename",
|
||||
];
|
||||
|
||||
// Load agent prompt files
|
||||
let prompts_dir = {
|
||||
let repo = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("agents");
|
||||
if repo.is_dir() { repo } else { crate::store::memory_dir().join("agents") }
|
||||
};
|
||||
|
||||
// Collect recent actions per agent type
|
||||
let mut actions: std::collections::HashMap<String, Vec<(String, String)>> = std::collections::HashMap::new();
|
||||
|
||||
for agent_type in &agent_types {
|
||||
let prompt_file = prompts_dir.join(format!("{}.agent", agent_type));
|
||||
let agent_prompt = std::fs::read_to_string(&prompt_file)
|
||||
.unwrap_or_default()
|
||||
.lines().skip(1).collect::<Vec<_>>().join("\n");
|
||||
let agent_prompt = crate::util::truncate(&agent_prompt, 500, "...");
|
||||
|
||||
let prefix = format!("_consolidate-{}", agent_type);
|
||||
let mut keys: Vec<(String, i64)> = store.nodes.iter()
|
||||
.filter(|(k, _)| k.starts_with(&prefix))
|
||||
.map(|(k, n)| (k.clone(), n.timestamp))
|
||||
.collect();
|
||||
keys.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
keys.truncate(20); // pool of recent actions to sample from
|
||||
|
||||
let mut type_actions = Vec::new();
|
||||
for (key, _) in &keys {
|
||||
let report = store.nodes.get(key)
|
||||
.map(|n| n.content.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut target_content = String::new();
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
for word in report.split_whitespace() {
|
||||
let clean = word.trim_matches(|c: char| !c.is_alphanumeric() && c != '-' && c != '_');
|
||||
if clean.len() > 10 && seen.insert(clean.to_string()) && store.nodes.contains_key(clean)
|
||||
&& let Some(node) = store.nodes.get(clean) {
|
||||
let preview = crate::util::truncate(&node.content, 200, "...");
|
||||
target_content.push_str(&format!("\n### {}\n{}\n", clean, preview));
|
||||
if target_content.len() > 1500 { break; }
|
||||
}
|
||||
}
|
||||
|
||||
let context = format!(
|
||||
"## Agent instructions\n{}\n\n## Report output\n{}\n\n## Affected nodes\n{}",
|
||||
agent_prompt,
|
||||
crate::util::truncate(&report, 1000, "..."),
|
||||
if target_content.is_empty() { "(none found)".into() } else { target_content }
|
||||
);
|
||||
type_actions.push((key.clone(), context));
|
||||
}
|
||||
actions.insert(agent_type.to_string(), type_actions);
|
||||
}
|
||||
|
||||
// Filter to types that have at least 1 action
|
||||
let active_types: Vec<&str> = agent_types.iter()
|
||||
.filter(|t| actions.get(**t).map(|a| !a.is_empty()).unwrap_or(false))
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
if active_types.len() < 2 {
|
||||
return Err("Need at least 2 agent types with actions".into());
|
||||
}
|
||||
|
||||
eprintln!("Evaluating {} agent types with {} matchups (model={})",
|
||||
active_types.len(), matchups, model);
|
||||
|
||||
if dry_run {
|
||||
let t1 = active_types[0];
|
||||
let t2 = active_types[active_types.len() - 1];
|
||||
let a1 = &actions[t1][0];
|
||||
let a2 = &actions[t2][0];
|
||||
let sample_a = (t1.to_string(), a1.0.clone(), a1.1.clone());
|
||||
let sample_b = (t2.to_string(), a2.0.clone(), a2.1.clone());
|
||||
println!("=== DRY RUN: Example comparison ===\n");
|
||||
println!("{}", build_compare_prompt(&sample_a, &sample_b));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Load persisted ratings
|
||||
let mut ratings = load_elo_ratings(&agent_types);
|
||||
let config = EloConfig { k: 32.0 };
|
||||
// Simple but adequate RNG: xorshift32
|
||||
let mut rng = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH).unwrap().subsec_nanos() | 1;
|
||||
let mut next_rng = || -> usize {
|
||||
rng ^= rng << 13;
|
||||
rng ^= rng >> 17;
|
||||
rng ^= rng << 5;
|
||||
rng as usize
|
||||
};
|
||||
|
||||
for i in 0..matchups {
|
||||
// Pick two different random agent types
|
||||
let idx_a = next_rng() % active_types.len();
|
||||
let mut idx_b = next_rng() % active_types.len();
|
||||
if idx_b == idx_a { idx_b = (idx_b + 1) % active_types.len(); }
|
||||
|
||||
let type_a = active_types[idx_a];
|
||||
let type_b = active_types[idx_b];
|
||||
|
||||
// Pick random recent action from each
|
||||
let acts_a = &actions[type_a];
|
||||
let acts_b = &actions[type_b];
|
||||
let act_a = &acts_a[next_rng() % acts_a.len()];
|
||||
let act_b = &acts_b[next_rng() % acts_b.len()];
|
||||
|
||||
let sample_a = (type_a.to_string(), act_a.0.clone(), act_a.1.clone());
|
||||
let sample_b = (type_b.to_string(), act_b.0.clone(), act_b.1.clone());
|
||||
|
||||
let result = llm_compare(&sample_a, &sample_b, model);
|
||||
|
||||
let rating_a = EloRating { rating: ratings[type_a] };
|
||||
let rating_b = EloRating { rating: ratings[type_b] };
|
||||
|
||||
let outcome = match result {
|
||||
Ok(std::cmp::Ordering::Less) => Outcomes::WIN, // A wins
|
||||
Ok(std::cmp::Ordering::Greater) => Outcomes::LOSS, // B wins
|
||||
_ => Outcomes::WIN, // default to A
|
||||
};
|
||||
|
||||
let (new_a, new_b) = elo(&rating_a, &rating_b, &outcome, &config);
|
||||
ratings.insert(type_a.to_string(), new_a.rating);
|
||||
ratings.insert(type_b.to_string(), new_b.rating);
|
||||
|
||||
eprint!(" matchup {}/{}: {} vs {} → {}\r",
|
||||
i + 1, matchups, type_a, type_b,
|
||||
if matches!(outcome, Outcomes::WIN) { type_a } else { type_b });
|
||||
}
|
||||
eprintln!();
|
||||
|
||||
// Save updated ratings
|
||||
save_elo_ratings(&ratings);
|
||||
|
||||
// Print rankings
|
||||
let mut ranked: Vec<_> = ratings.iter().collect();
|
||||
ranked.sort_by(|a, b| b.1.total_cmp(a.1));
|
||||
|
||||
println!("\nAgent Elo Ratings (after {} matchups):\n", matchups);
|
||||
for (agent_type, rating) in &ranked {
|
||||
let bar_len = ((*rating - 800.0) / 10.0).max(0.0) as usize;
|
||||
let bar = "#".repeat(bar_len.min(40));
|
||||
println!(" {:12} {:7.1} {}", agent_type, rating, bar);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_compare_prompt(
|
||||
a: &(String, String, String),
|
||||
b: &(String, String, String),
|
||||
) -> String {
|
||||
if a.0 == b.0 {
|
||||
// Same agent type — show instructions once
|
||||
// Split context at "## Report output" to extract shared prompt
|
||||
let split_a: Vec<&str> = a.2.splitn(2, "## Report output").collect();
|
||||
let split_b: Vec<&str> = b.2.splitn(2, "## Report output").collect();
|
||||
let shared_prompt = split_a.first().unwrap_or(&"");
|
||||
let report_a = split_a.get(1).unwrap_or(&"");
|
||||
let report_b = split_b.get(1).unwrap_or(&"");
|
||||
format!(
|
||||
"Compare two actions from the same {} agent. Which was better?\n\n\
|
||||
{}\n\n\
|
||||
## Action A\n## Report output{}\n\n\
|
||||
## Action B\n## Report output{}\n\n\
|
||||
Say which is better and why in 1-2 sentences, then end with:\n\
|
||||
BETTER: A or BETTER: B\n\
|
||||
You must pick one. No ties.",
|
||||
a.0, shared_prompt, report_a, report_b
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Compare these two memory graph agent actions. Which one was better \
|
||||
for building a useful, well-organized knowledge graph?\n\n\
|
||||
## Action A ({} agent)\n{}\n\n\
|
||||
## Action B ({} agent)\n{}\n\n\
|
||||
Say which is better and why in 1-2 sentences, then end with:\n\
|
||||
BETTER: A or BETTER: B\n\
|
||||
You must pick one. No ties.",
|
||||
a.0, a.2, b.0, b.2
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn llm_compare(
|
||||
a: &(String, String, String),
|
||||
b: &(String, String, String),
|
||||
model: &str,
|
||||
) -> Result<std::cmp::Ordering, String> {
|
||||
let prompt = build_compare_prompt(a, b);
|
||||
|
||||
let _ = model; // model selection handled by API backend config
|
||||
let response = crate::agent::oneshot::call_api_with_tools_sync(
|
||||
"compare", &[prompt], &[], None, 10, &[], None)?;
|
||||
let response = response.trim().to_uppercase();
|
||||
|
||||
if response.contains("BETTER: B") {
|
||||
Ok(std::cmp::Ordering::Greater)
|
||||
} else {
|
||||
// Default to A (includes "BETTER: A" and any unparseable response)
|
||||
Ok(std::cmp::Ordering::Less)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -202,17 +202,6 @@ pub fn cmd_link_impact(source: &str, target: &str) -> Result<(), String> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_link_audit(apply: bool) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
let stats = crate::audit::link_audit(&mut store, apply)?;
|
||||
println!("\n{}", "=".repeat(60));
|
||||
println!("Link audit complete:");
|
||||
println!(" Kept: {} Deleted: {} Retargeted: {} Weakened: {} Strengthened: {} Errors: {}",
|
||||
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened, stats.errors);
|
||||
println!("{}", "=".repeat(60));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_trace(key: &[String]) -> Result<(), String> {
|
||||
if key.is_empty() {
|
||||
return Err("trace requires a key".into());
|
||||
|
|
|
|||
|
|
@ -90,48 +90,21 @@ pub fn find_current_transcript() -> Option<String> {
|
|||
newest.map(|(_, p)| p.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
fn journal_tail_entries(store: &crate::store::Store, n: usize, full: bool) -> Result<(), String> {
|
||||
let date_re = regex::Regex::new(r"(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2})").unwrap();
|
||||
let key_date_re = regex::Regex::new(r"j-(\d{4}-\d{2}-\d{2}[t-]\d{2}-\d{2})").unwrap();
|
||||
fn journal_tail_query(store: &crate::store::Store, query: &str, n: usize, full: bool) -> Result<(), String> {
|
||||
let graph = store.build_graph();
|
||||
let stages = crate::query_parser::parse_stages(query)?;
|
||||
let results = crate::search::run_query(&stages, vec![], &graph, store, false, n);
|
||||
|
||||
let normalize_date = |s: &str| -> String {
|
||||
let s = s.replace('t', "T");
|
||||
if s.len() >= 16 {
|
||||
format!("{}T{}", &s[..10], s[11..].replace('-', ":"))
|
||||
// Query sorts desc and limits, so reverse to show oldest-to-newest
|
||||
for (key, _score) in results.into_iter().rev() {
|
||||
let Some(node) = store.nodes.get(&key) else { continue };
|
||||
let ts = if node.created_at > 0 {
|
||||
crate::store::format_datetime(node.created_at)
|
||||
} else if node.timestamp > 0 {
|
||||
crate::store::format_datetime(node.timestamp)
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
|
||||
let extract_sort = |node: &crate::store::Node| -> (i64, String) {
|
||||
if node.created_at > 0 {
|
||||
return (node.created_at, crate::store::format_datetime(node.created_at));
|
||||
}
|
||||
if let Some(caps) = key_date_re.captures(&node.key) {
|
||||
return (0, normalize_date(&caps[1]));
|
||||
}
|
||||
if let Some(caps) = date_re.captures(&node.content) {
|
||||
return (0, normalize_date(&caps[1]));
|
||||
}
|
||||
(node.timestamp, crate::store::format_datetime(node.timestamp))
|
||||
};
|
||||
|
||||
let mut journal: Vec<_> = store.nodes.values()
|
||||
.filter(|node| node.node_type == crate::store::NodeType::EpisodicSession)
|
||||
.collect();
|
||||
journal.sort_by(|a, b| {
|
||||
let (at, as_) = extract_sort(a);
|
||||
let (bt, bs) = extract_sort(b);
|
||||
if at > 0 && bt > 0 {
|
||||
at.cmp(&bt)
|
||||
} else {
|
||||
as_.cmp(&bs)
|
||||
}
|
||||
});
|
||||
|
||||
let skip = if journal.len() > n { journal.len() - n } else { 0 };
|
||||
for node in journal.iter().skip(skip) {
|
||||
let (_, ts) = extract_sort(node);
|
||||
node.key.clone()
|
||||
};
|
||||
let title = extract_title(&node.content);
|
||||
if full {
|
||||
println!("--- [{}] {} ---\n{}\n", ts, title, node.content);
|
||||
|
|
@ -142,44 +115,14 @@ fn journal_tail_entries(store: &crate::store::Store, n: usize, full: bool) -> Re
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn journal_tail_digests(store: &crate::store::Store, node_type: crate::store::NodeType, n: usize, full: bool) -> Result<(), String> {
|
||||
let mut digests: Vec<_> = store.nodes.values()
|
||||
.filter(|node| node.node_type == node_type)
|
||||
.collect();
|
||||
digests.sort_by(|a, b| {
|
||||
if a.timestamp > 0 && b.timestamp > 0 {
|
||||
a.timestamp.cmp(&b.timestamp)
|
||||
} else {
|
||||
a.key.cmp(&b.key)
|
||||
}
|
||||
});
|
||||
|
||||
let skip = if digests.len() > n { digests.len() - n } else { 0 };
|
||||
for node in digests.iter().skip(skip) {
|
||||
let label = &node.key;
|
||||
let title = extract_title(&node.content);
|
||||
if full {
|
||||
println!("--- [{}] {} ---\n{}\n", label, title, node.content);
|
||||
} else {
|
||||
println!("[{}] {}", label, title);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cmd_journal_tail(n: usize, full: bool, level: u8) -> Result<(), String> {
|
||||
let store = crate::store::Store::load()?;
|
||||
|
||||
if level == 0 {
|
||||
journal_tail_entries(&store, n, full)
|
||||
} else {
|
||||
let node_type = match level {
|
||||
1 => crate::store::NodeType::EpisodicDaily,
|
||||
2 => crate::store::NodeType::EpisodicWeekly,
|
||||
_ => crate::store::NodeType::EpisodicMonthly,
|
||||
};
|
||||
journal_tail_digests(&store, node_type, n, full)
|
||||
}
|
||||
let query = format!("all | type:{} | sort:timestamp | limit:{}",
|
||||
match level { 0 => "episodic", 1 => "daily", 2 => "weekly", _ => "monthly" },
|
||||
n
|
||||
);
|
||||
journal_tail_query(&store, &query, n, full)
|
||||
}
|
||||
|
||||
pub fn cmd_journal_write(name: &str, text: &[String]) -> Result<(), String> {
|
||||
|
|
|
|||
|
|
@ -3,25 +3,26 @@
|
|||
|
||||
pub fn cmd_search(terms: &[String], pipeline_args: &[String], expand: bool, full: bool, debug: bool, fuzzy: bool, content: bool) -> Result<(), String> {
|
||||
use std::collections::BTreeMap;
|
||||
use crate::search::{Stage, Algorithm, AlgoStage};
|
||||
|
||||
// When running inside an agent session, exclude already-surfaced nodes
|
||||
let seen = crate::session::HookSession::from_env()
|
||||
.map(|s| s.seen())
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse pipeline stages (unified: algorithms, filters, transforms, generators)
|
||||
let stages: Vec<crate::search::Stage> = if pipeline_args.is_empty() {
|
||||
vec![crate::search::Stage::Algorithm(crate::search::AlgoStage::parse("spread").unwrap())]
|
||||
// Build pipeline: if args provided, parse them; otherwise default to spread
|
||||
let stages: Vec<Stage> = if pipeline_args.is_empty() {
|
||||
vec![Stage::Algorithm(AlgoStage { algo: Algorithm::Spread, params: std::collections::HashMap::new() })]
|
||||
} else {
|
||||
pipeline_args.iter()
|
||||
.map(|a| crate::search::Stage::parse(a))
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
// Join args with | and parse as unified query
|
||||
let pipeline_str = format!("all | {}", pipeline_args.join(" | "));
|
||||
crate::query_parser::parse_stages(&pipeline_str)?
|
||||
};
|
||||
|
||||
// Check if pipeline needs full Store (has filters/transforms/generators)
|
||||
let needs_store = stages.iter().any(|s| !matches!(s, crate::search::Stage::Algorithm(_)));
|
||||
let needs_store = stages.iter().any(|s| !matches!(s, Stage::Algorithm(_)));
|
||||
// Check if pipeline starts with a generator (doesn't need seed terms)
|
||||
let has_generator = stages.first().map(|s| matches!(s, crate::search::Stage::Generator(_))).unwrap_or(false);
|
||||
let has_generator = stages.first().map(|s| matches!(s, Stage::Generator(_))).unwrap_or(false);
|
||||
|
||||
if terms.is_empty() && !has_generator {
|
||||
return Err("search requires terms or a generator stage (e.g. 'all')".into());
|
||||
|
|
|
|||
|
|
@ -354,8 +354,6 @@ pub struct AppConfig {
|
|||
pub dmn: DmnConfig,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub memory_project: Option<PathBuf>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub system_prompt_file: Option<PathBuf>,
|
||||
#[serde(default)]
|
||||
pub models: HashMap<String, ModelConfig>,
|
||||
#[serde(default = "default_model_name")]
|
||||
|
|
@ -469,7 +467,6 @@ impl Default for AppConfig {
|
|||
},
|
||||
dmn: DmnConfig { max_turns: 20 },
|
||||
memory_project: None,
|
||||
system_prompt_file: None,
|
||||
models: HashMap::new(),
|
||||
default_model: String::new(),
|
||||
mcp_servers: Vec::new(),
|
||||
|
|
@ -486,7 +483,6 @@ pub struct SessionConfig {
|
|||
pub api_key: String,
|
||||
pub model: String,
|
||||
pub prompt_file: String,
|
||||
pub system_prompt: String,
|
||||
/// Identity/personality files as (name, content) pairs.
|
||||
pub context_parts: Vec<(String, String)>,
|
||||
pub config_file_count: usize,
|
||||
|
|
@ -539,16 +535,8 @@ impl AppConfig {
|
|||
|
||||
let context_groups = get().context_groups.clone();
|
||||
|
||||
let (system_prompt, context_parts, config_file_count, memory_file_count) =
|
||||
if let Some(ref path) = cli.system_prompt_file.as_ref().or(self.system_prompt_file.as_ref()) {
|
||||
let content = std::fs::read_to_string(path)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
(content, Vec::new(), 0, 0)
|
||||
} else {
|
||||
let system_prompt = crate::mind::identity::assemble_system_prompt();
|
||||
let (context_parts, cc, mc) = crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
|
||||
(system_prompt, context_parts, cc, mc)
|
||||
};
|
||||
let (context_parts, config_file_count, memory_file_count) =
|
||||
crate::mind::identity::assemble_context_message(&cwd, &prompt_file, self.memory_project.as_deref(), &context_groups)?;
|
||||
|
||||
let session_dir = dirs::home_dir()
|
||||
.unwrap_or_else(|| PathBuf::from("."))
|
||||
|
|
@ -561,7 +549,7 @@ impl AppConfig {
|
|||
|
||||
Ok(SessionConfig {
|
||||
api_base, api_key, model, prompt_file,
|
||||
system_prompt, context_parts,
|
||||
context_parts,
|
||||
config_file_count, memory_file_count,
|
||||
session_dir,
|
||||
app: self.clone(),
|
||||
|
|
@ -663,7 +651,6 @@ fn build_figment(cli: &crate::user::CliArgs) -> Figment {
|
|||
merge_opt!(f, cli.model, "anthropic.model", "openrouter.model");
|
||||
merge_opt!(f, cli.api_key, "anthropic.api_key", "openrouter.api_key");
|
||||
merge_opt!(f, cli.api_base, "anthropic.base_url", "openrouter.base_url");
|
||||
merge_opt!(f, cli.system_prompt_file, "system_prompt_file");
|
||||
merge_opt!(f, cli.memory_project, "memory_project");
|
||||
merge_opt!(f, cli.dmn_max_turns, "dmn.max_turns");
|
||||
if cli.debug {
|
||||
|
|
@ -687,20 +674,12 @@ pub fn load_session(cli: &crate::user::CliArgs) -> Result<(SessionConfig, Figmen
|
|||
Ok((config, figment))
|
||||
}
|
||||
|
||||
/// Re-assemble prompts for a specific model's prompt file.
|
||||
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<(String, Vec<(String, String)>)> {
|
||||
/// Re-assemble context for a specific model's prompt file.
|
||||
pub fn reload_for_model(app: &AppConfig, prompt_file: &str) -> Result<Vec<(String, String)>> {
|
||||
let cwd = std::env::current_dir().context("Failed to get current directory")?;
|
||||
|
||||
if let Some(ref path) = app.system_prompt_file {
|
||||
let content = std::fs::read_to_string(path)
|
||||
.with_context(|| format!("Failed to read {}", path.display()))?;
|
||||
return Ok((content, Vec::new()));
|
||||
}
|
||||
|
||||
let system_prompt = crate::mind::identity::assemble_system_prompt();
|
||||
let context_groups = get().context_groups.clone();
|
||||
let (context_parts, _, _) = crate::mind::identity::assemble_context_message(&cwd, prompt_file, app.memory_project.as_deref(), &context_groups)?;
|
||||
Ok((system_prompt, context_parts))
|
||||
Ok(context_parts)
|
||||
}
|
||||
|
||||
pub fn show_config(app: &AppConfig, figment: &Figment) {
|
||||
|
|
@ -732,9 +711,6 @@ pub fn show_config(app: &AppConfig, figment: &Figment) {
|
|||
println!(" soft_threshold_pct: {} ({})", app.compaction.soft_threshold_pct, src(figment, "compaction.soft_threshold_pct"));
|
||||
println!("\ndmn:");
|
||||
println!(" max_turns: {} ({})", app.dmn.max_turns, src(figment, "dmn.max_turns"));
|
||||
if let Some(ref p) = app.system_prompt_file {
|
||||
println!("\nsystem_prompt_file: {:?} ({})", p, src(figment, "system_prompt_file"));
|
||||
}
|
||||
if let Some(ref p) = app.memory_project {
|
||||
println!("\nmemory_project: {:?} ({})", p, src(figment, "memory_project"));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,315 +0,0 @@
|
|||
// Spatial memory cursor — a persistent pointer into the knowledge graph.
|
||||
//
|
||||
// The cursor maintains a "you are here" position that persists across
|
||||
// sessions. Navigation moves through three dimensions:
|
||||
// - Temporal: forward/back among same-type nodes by timestamp
|
||||
// - Hierarchical: up/down the digest tree (journal→daily→weekly→monthly)
|
||||
// - Spatial: sideways along graph edges to linked nodes
|
||||
//
|
||||
// This is the beginning of place cells — the hippocampus doesn't just
|
||||
// store, it maintains a map. The cursor is the map's current position.
|
||||
|
||||
use crate::store::{self, Node, Store};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn cursor_path() -> PathBuf {
|
||||
store::memory_dir().join("cursor")
|
||||
}
|
||||
|
||||
/// Read the current cursor position (node key), if any.
|
||||
pub fn get() -> Option<String> {
|
||||
std::fs::read_to_string(cursor_path())
|
||||
.ok()
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
}
|
||||
|
||||
/// Set the cursor to a node key.
|
||||
pub fn set(key: &str) -> Result<(), String> {
|
||||
std::fs::write(cursor_path(), format!("{}\n", key))
|
||||
.map_err(|e| format!("write cursor: {}", e))
|
||||
}
|
||||
|
||||
/// Clear the cursor.
|
||||
pub fn clear() -> Result<(), String> {
|
||||
let p = cursor_path();
|
||||
if p.exists() {
|
||||
std::fs::remove_file(&p)
|
||||
.map_err(|e| format!("clear cursor: {}", e))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Temporal neighbors: nodes of the same type, sorted by timestamp.
|
||||
/// Returns (prev, next) keys relative to the given node.
|
||||
pub(crate) fn temporal_neighbors(store: &Store, key: &str) -> (Option<String>, Option<String>) {
|
||||
let Some(node) = store.nodes.get(key) else { return (None, None) };
|
||||
let node_type = node.node_type;
|
||||
|
||||
let mut same_type: Vec<(&str, i64)> = store.nodes.iter()
|
||||
.filter(|(_, n)| !n.deleted && n.node_type == node_type && n.timestamp > 0)
|
||||
.map(|(k, n)| (k.as_str(), n.timestamp))
|
||||
.collect();
|
||||
same_type.sort_by_key(|(_, t)| *t);
|
||||
|
||||
let pos = same_type.iter().position(|(k, _)| *k == key);
|
||||
let prev = pos.and_then(|i| if i > 0 { Some(same_type[i - 1].0.to_string()) } else { None });
|
||||
let next = pos.and_then(|i| same_type.get(i + 1).map(|(k, _)| k.to_string()));
|
||||
|
||||
(prev, next)
|
||||
}
|
||||
|
||||
/// Digest hierarchy: find the parent digest for a node.
|
||||
/// Journal → daily, daily → weekly, weekly → monthly.
|
||||
pub(crate) fn digest_parent(store: &Store, key: &str) -> Option<String> {
|
||||
let node = store.nodes.get(key)?;
|
||||
|
||||
let parent_type = match node.node_type {
|
||||
store::NodeType::EpisodicSession => store::NodeType::EpisodicDaily,
|
||||
store::NodeType::EpisodicDaily => store::NodeType::EpisodicWeekly,
|
||||
store::NodeType::EpisodicWeekly => store::NodeType::EpisodicMonthly,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// Look for structural links first (digest:structural provenance)
|
||||
for r in &store.relations {
|
||||
if r.deleted { continue; }
|
||||
if r.source_key == key
|
||||
&& let Some(target) = store.nodes.get(&r.target_key)
|
||||
&& target.node_type == parent_type {
|
||||
return Some(r.target_key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: match by date for journal→daily
|
||||
if node.node_type == store::NodeType::EpisodicSession {
|
||||
// Try extracting date from timestamp first, then from key
|
||||
let mut dates = Vec::new();
|
||||
if node.timestamp > 0 {
|
||||
dates.push(store::format_date(node.timestamp));
|
||||
}
|
||||
// Extract date from created_at timestamp
|
||||
if node.created_at > 0 {
|
||||
let created_date = store::format_date(node.created_at);
|
||||
if !dates.contains(&created_date) {
|
||||
dates.push(created_date);
|
||||
}
|
||||
}
|
||||
for date in &dates {
|
||||
for prefix in [&format!("daily-{}", date), &format!("digest#daily#{}", date)] {
|
||||
for (k, n) in &store.nodes {
|
||||
if !n.deleted && n.node_type == parent_type && k.starts_with(prefix.as_str()) {
|
||||
return Some(k.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Digest children: find nodes that feed into this digest.
|
||||
/// Monthly → weeklies, weekly → dailies, daily → journal entries.
|
||||
pub(crate) fn digest_children(store: &Store, key: &str) -> Vec<String> {
|
||||
let Some(node) = store.nodes.get(key) else { return vec![] };
|
||||
|
||||
let child_type = match node.node_type {
|
||||
store::NodeType::EpisodicDaily => store::NodeType::EpisodicSession,
|
||||
store::NodeType::EpisodicWeekly => store::NodeType::EpisodicDaily,
|
||||
store::NodeType::EpisodicMonthly => store::NodeType::EpisodicWeekly,
|
||||
_ => return vec![],
|
||||
};
|
||||
|
||||
// Look for structural links (source → this digest)
|
||||
let mut children: Vec<(String, i64)> = Vec::new();
|
||||
for r in &store.relations {
|
||||
if r.deleted { continue; }
|
||||
if r.target_key == key
|
||||
&& let Some(source) = store.nodes.get(&r.source_key)
|
||||
&& source.node_type == child_type {
|
||||
children.push((r.source_key.clone(), source.timestamp));
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for daily → journal: extract date from key and match
|
||||
if children.is_empty() && node.node_type == store::NodeType::EpisodicDaily {
|
||||
// Extract date from keys like "daily-2026-03-13" or "daily-2026-03-13-suffix"
|
||||
let date = key.strip_prefix("daily-")
|
||||
.or_else(|| key.strip_prefix("digest#daily#"))
|
||||
.and_then(|rest| rest.get(..10)); // "YYYY-MM-DD"
|
||||
if let Some(date) = date {
|
||||
for (k, n) in &store.nodes {
|
||||
if n.deleted { continue; }
|
||||
if n.node_type == store::NodeType::EpisodicSession
|
||||
&& n.timestamp > 0
|
||||
&& store::format_date(n.timestamp) == date
|
||||
{
|
||||
children.push((k.clone(), n.timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
children.sort_by_key(|(_, t)| *t);
|
||||
children.into_iter().map(|(k, _)| k).collect()
|
||||
}
|
||||
|
||||
/// Graph neighbors sorted by edge strength.
|
||||
pub(crate) fn graph_neighbors(store: &Store, key: &str) -> Vec<(String, f32)> {
|
||||
let mut neighbors: Vec<(String, f32)> = Vec::new();
|
||||
for r in &store.relations {
|
||||
if r.deleted { continue; }
|
||||
if r.source_key == key {
|
||||
neighbors.push((r.target_key.clone(), r.strength));
|
||||
} else if r.target_key == key {
|
||||
neighbors.push((r.source_key.clone(), r.strength));
|
||||
}
|
||||
}
|
||||
neighbors.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
|
||||
neighbors.dedup_by(|a, b| a.0 == b.0);
|
||||
neighbors
|
||||
}
|
||||
|
||||
/// Format a one-line summary of a node for context display.
|
||||
fn node_summary(node: &Node) -> String {
|
||||
let ts = if node.timestamp > 0 {
|
||||
store::format_datetime(node.timestamp)
|
||||
} else {
|
||||
"no-date".to_string()
|
||||
};
|
||||
let type_tag = match node.node_type {
|
||||
store::NodeType::EpisodicSession => "journal",
|
||||
store::NodeType::EpisodicDaily => "daily",
|
||||
store::NodeType::EpisodicWeekly => "weekly",
|
||||
store::NodeType::EpisodicMonthly => "monthly",
|
||||
store::NodeType::Semantic => "semantic",
|
||||
};
|
||||
// First line of content, truncated
|
||||
let first_line = node.content.lines().next().unwrap_or("")
|
||||
.chars().take(80).collect::<String>();
|
||||
format!("[{}] ({}) {}", ts, type_tag, first_line)
|
||||
}
|
||||
|
||||
/// Display the cursor position with full context.
|
||||
pub fn show(store: &Store) -> Result<(), String> {
|
||||
let key = get().ok_or_else(|| "No cursor set. Use `poc-memory cursor set KEY`".to_string())?;
|
||||
let node = store.nodes.get(&key)
|
||||
.ok_or_else(|| format!("Cursor points to missing node: {}", key))?;
|
||||
|
||||
// Header
|
||||
let type_tag = match node.node_type {
|
||||
store::NodeType::EpisodicSession => "journal",
|
||||
store::NodeType::EpisodicDaily => "daily",
|
||||
store::NodeType::EpisodicWeekly => "weekly",
|
||||
store::NodeType::EpisodicMonthly => "monthly",
|
||||
store::NodeType::Semantic => "semantic",
|
||||
};
|
||||
if node.timestamp > 0 {
|
||||
eprintln!("@ {} [{}]", key, type_tag);
|
||||
eprintln!(" {}", store::format_datetime(node.timestamp));
|
||||
} else {
|
||||
eprintln!("@ {} [{}]", key, type_tag);
|
||||
}
|
||||
|
||||
// Temporal context
|
||||
let (prev, next) = temporal_neighbors(store, &key);
|
||||
eprintln!();
|
||||
if let Some(ref p) = prev
|
||||
&& let Some(pn) = store.nodes.get(p) {
|
||||
eprintln!(" ← {}", node_summary(pn));
|
||||
eprintln!(" `cursor back`");
|
||||
}
|
||||
if let Some(ref n) = next
|
||||
&& let Some(nn) = store.nodes.get(n) {
|
||||
eprintln!(" → {}", node_summary(nn));
|
||||
eprintln!(" `cursor forward`");
|
||||
}
|
||||
|
||||
// Hierarchy
|
||||
if let Some(ref parent) = digest_parent(store, &key)
|
||||
&& let Some(pn) = store.nodes.get(parent) {
|
||||
eprintln!(" ↑ {}", node_summary(pn));
|
||||
eprintln!(" `cursor up`");
|
||||
}
|
||||
let children = digest_children(store, &key);
|
||||
if !children.is_empty() {
|
||||
let count = children.len();
|
||||
if let Some(first) = children.first().and_then(|k| store.nodes.get(k)) {
|
||||
eprintln!(" ↓ {} children — first: {}", count, node_summary(first));
|
||||
eprintln!(" `cursor down`");
|
||||
}
|
||||
}
|
||||
|
||||
// Graph neighbors (non-temporal)
|
||||
let neighbors = graph_neighbors(store, &key);
|
||||
let semantic: Vec<_> = neighbors.iter()
|
||||
.filter(|(k, _)| {
|
||||
store.nodes.get(k)
|
||||
.map(|n| n.node_type == store::NodeType::Semantic)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.take(8)
|
||||
.collect();
|
||||
if !semantic.is_empty() {
|
||||
eprintln!();
|
||||
eprintln!(" Linked:");
|
||||
for (k, strength) in &semantic {
|
||||
eprintln!(" [{:.1}] {}", strength, k);
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!();
|
||||
eprintln!("---");
|
||||
|
||||
// Content
|
||||
print!("{}", node.content);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Move cursor in a temporal direction.
|
||||
pub fn move_temporal(store: &Store, forward: bool) -> Result<(), String> {
|
||||
let key = get().ok_or("No cursor set")?;
|
||||
let _ = store.nodes.get(&key)
|
||||
.ok_or_else(|| format!("Cursor points to missing node: {}", key))?;
|
||||
|
||||
let (prev, next) = temporal_neighbors(store, &key);
|
||||
let target = if forward { next } else { prev };
|
||||
match target {
|
||||
Some(k) => {
|
||||
set(&k)?;
|
||||
show(store)
|
||||
}
|
||||
None => {
|
||||
let dir = if forward { "forward" } else { "back" };
|
||||
Err(format!("No {} neighbor from {}", dir, key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor up the digest hierarchy.
|
||||
pub fn move_up(store: &Store) -> Result<(), String> {
|
||||
let key = get().ok_or("No cursor set")?;
|
||||
match digest_parent(store, &key) {
|
||||
Some(parent) => {
|
||||
set(&parent)?;
|
||||
show(store)
|
||||
}
|
||||
None => Err(format!("No parent digest for {}", key)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor down the digest hierarchy (to first child).
|
||||
pub fn move_down(store: &Store) -> Result<(), String> {
|
||||
let key = get().ok_or("No cursor set")?;
|
||||
let children = digest_children(store, &key);
|
||||
match children.first() {
|
||||
Some(child) => {
|
||||
set(child)?;
|
||||
show(store)
|
||||
}
|
||||
None => Err(format!("No children for {}", key)),
|
||||
}
|
||||
}
|
||||
|
|
@ -9,7 +9,6 @@ pub mod memory;
|
|||
pub mod store;
|
||||
pub mod graph;
|
||||
pub mod lookups;
|
||||
pub mod cursor;
|
||||
pub mod query;
|
||||
pub mod spectral;
|
||||
pub mod neuro;
|
||||
|
|
|
|||
|
|
@ -157,6 +157,9 @@ pub enum Filter {
|
|||
pub enum Transform {
|
||||
Sort(SortField),
|
||||
Limit(usize),
|
||||
Select(Vec<String>),
|
||||
Count,
|
||||
Connectivity,
|
||||
DominatingSet,
|
||||
}
|
||||
|
||||
|
|
@ -168,6 +171,8 @@ pub enum SortField {
|
|||
Degree,
|
||||
Weight,
|
||||
Isolation,
|
||||
Key,
|
||||
Named(String, bool), // (field_name, ascending)
|
||||
Composite(Vec<(ScoreField, f64)>),
|
||||
}
|
||||
|
||||
|
|
@ -206,79 +211,6 @@ impl Cmp {
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse a comparison like ">0.5", ">=60", "<7d" (durations converted to seconds).
|
||||
fn parse_cmp(s: &str) -> Result<Cmp, String> {
|
||||
let (op_len, ctor): (usize, fn(f64) -> Cmp) = if s.starts_with(">=") {
|
||||
(2, Cmp::Gte)
|
||||
} else if s.starts_with("<=") {
|
||||
(2, Cmp::Lte)
|
||||
} else if s.starts_with('>') {
|
||||
(1, Cmp::Gt)
|
||||
} else if s.starts_with('<') {
|
||||
(1, Cmp::Lt)
|
||||
} else if s.starts_with('=') {
|
||||
(1, Cmp::Eq)
|
||||
} else {
|
||||
return Err(format!("expected comparison operator in '{}'", s));
|
||||
};
|
||||
|
||||
let val_str = &s[op_len..];
|
||||
let val = parse_duration_or_number(val_str)?;
|
||||
Ok(ctor(val))
|
||||
}
|
||||
|
||||
/// Parse "7d", "24h", "30m" as seconds, or plain numbers.
|
||||
fn parse_duration_or_number(s: &str) -> Result<f64, String> {
|
||||
if let Some(n) = s.strip_suffix('d') {
|
||||
let v: f64 = n.parse().map_err(|_| format!("bad number: {}", n))?;
|
||||
Ok(v * 86400.0)
|
||||
} else if let Some(n) = s.strip_suffix('h') {
|
||||
let v: f64 = n.parse().map_err(|_| format!("bad number: {}", n))?;
|
||||
Ok(v * 3600.0)
|
||||
} else if let Some(n) = s.strip_suffix('m') {
|
||||
let v: f64 = n.parse().map_err(|_| format!("bad number: {}", n))?;
|
||||
Ok(v * 60.0)
|
||||
} else {
|
||||
s.parse().map_err(|_| format!("bad number: {}", s))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse composite sort: "isolation*0.7+recency(linker)*0.3"
|
||||
/// Each term is field or field(arg), optionally *weight (default 1.0).
|
||||
fn parse_composite_sort(s: &str) -> Result<Vec<(ScoreField, f64)>, String> {
|
||||
let mut terms = Vec::new();
|
||||
for term in s.split('+') {
|
||||
let term = term.trim();
|
||||
let (field_part, weight) = if let Some((f, w)) = term.rsplit_once('*') {
|
||||
(f, w.parse::<f64>().map_err(|_| format!("bad weight: {}", w))?)
|
||||
} else {
|
||||
(term, 1.0)
|
||||
};
|
||||
|
||||
// Parse field, possibly with (arg)
|
||||
let field = if let Some((name, arg)) = field_part.split_once('(') {
|
||||
let arg = arg.strip_suffix(')').ok_or("missing ) in sort field")?;
|
||||
match name {
|
||||
"recency" => ScoreField::Recency(arg.to_string()),
|
||||
_ => return Err(format!("unknown parameterized sort field: {}", name)),
|
||||
}
|
||||
} else {
|
||||
match field_part {
|
||||
"isolation" => ScoreField::Isolation,
|
||||
"degree" => ScoreField::Degree,
|
||||
"weight" => ScoreField::Weight,
|
||||
"content-len" => ScoreField::ContentLen,
|
||||
"priority" => ScoreField::Priority,
|
||||
_ => return Err(format!("unknown sort field: {}", field_part)),
|
||||
}
|
||||
};
|
||||
terms.push((field, weight));
|
||||
}
|
||||
if terms.is_empty() {
|
||||
return Err("empty composite sort".into());
|
||||
}
|
||||
Ok(terms)
|
||||
}
|
||||
|
||||
/// Compute a 0-1 score for a node on a single dimension.
|
||||
fn score_field(
|
||||
|
|
@ -348,129 +280,6 @@ impl CompositeCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse a NodeType from a label.
|
||||
fn parse_node_type(s: &str) -> Result<NodeType, String> {
|
||||
match s {
|
||||
"episodic" | "session" => Ok(NodeType::EpisodicSession),
|
||||
"daily" => Ok(NodeType::EpisodicDaily),
|
||||
"weekly" => Ok(NodeType::EpisodicWeekly),
|
||||
"monthly" => Ok(NodeType::EpisodicMonthly),
|
||||
"semantic" => Ok(NodeType::Semantic),
|
||||
_ => Err(format!("unknown node type: {} (use: episodic, semantic, daily, weekly, monthly)", s)),
|
||||
}
|
||||
}
|
||||
|
||||
impl Stage {
|
||||
/// Parse a single stage from a string.
|
||||
///
|
||||
/// Algorithm names are tried first (bare words), then predicate syntax
|
||||
/// (contains ':'). No ambiguity since algorithms are bare words.
|
||||
pub fn parse(s: &str) -> Result<Self, String> {
|
||||
let s = s.trim();
|
||||
let (negated, s) = if let Some(rest) = s.strip_prefix('!') {
|
||||
(true, rest)
|
||||
} else {
|
||||
(false, s)
|
||||
};
|
||||
|
||||
// Generator: "all"
|
||||
if s == "all" {
|
||||
return Ok(Stage::Generator(Generator::All));
|
||||
}
|
||||
|
||||
// Transform: "dominating-set"
|
||||
if s == "dominating-set" {
|
||||
return Ok(Stage::Transform(Transform::DominatingSet));
|
||||
}
|
||||
|
||||
// Try algorithm parse first (bare words, no colon)
|
||||
if !s.contains(':')
|
||||
&& let Ok(algo) = AlgoStage::parse(s) {
|
||||
return Ok(Stage::Algorithm(algo));
|
||||
}
|
||||
|
||||
// Algorithm with params: "spread,max_hops=4" (contains comma but no colon)
|
||||
if s.contains(',') && !s.contains(':') {
|
||||
return AlgoStage::parse(s).map(Stage::Algorithm);
|
||||
}
|
||||
|
||||
// Predicate/transform syntax: "key:value"
|
||||
let (prefix, value) = s.split_once(':')
|
||||
.ok_or_else(|| format!("unknown stage: {}", s))?;
|
||||
|
||||
let filter_or_transform = match prefix {
|
||||
"type" => Stage::Filter(Filter::Type(parse_node_type(value)?)),
|
||||
"key" => Stage::Filter(Filter::KeyGlob(value.to_string())),
|
||||
"weight" => Stage::Filter(Filter::Weight(parse_cmp(value)?)),
|
||||
"age" => Stage::Filter(Filter::Age(parse_cmp(value)?)),
|
||||
"content-len" => Stage::Filter(Filter::ContentLen(parse_cmp(value)?)),
|
||||
"provenance" => {
|
||||
Stage::Filter(Filter::Provenance(value.to_string()))
|
||||
}
|
||||
"not-visited" => {
|
||||
let (agent, dur) = value.split_once(',')
|
||||
.ok_or("not-visited:AGENT,DURATION")?;
|
||||
let secs = parse_duration_or_number(dur)?;
|
||||
Stage::Filter(Filter::NotVisited {
|
||||
agent: agent.to_string(),
|
||||
duration: secs as i64,
|
||||
})
|
||||
}
|
||||
"visited" => Stage::Filter(Filter::Visited {
|
||||
agent: value.to_string(),
|
||||
}),
|
||||
"sort" => {
|
||||
// Check for composite sort: field*weight+field*weight+...
|
||||
let field = if value.contains('+') || value.contains('*') {
|
||||
SortField::Composite(parse_composite_sort(value)?)
|
||||
} else {
|
||||
match value {
|
||||
"priority" => SortField::Priority,
|
||||
"timestamp" => SortField::Timestamp,
|
||||
"content-len" => SortField::ContentLen,
|
||||
"degree" => SortField::Degree,
|
||||
"weight" => SortField::Weight,
|
||||
"isolation" => SortField::Isolation,
|
||||
_ => return Err(format!("unknown sort field: {}", value)),
|
||||
}
|
||||
};
|
||||
Stage::Transform(Transform::Sort(field))
|
||||
}
|
||||
"limit" => {
|
||||
let n: usize = value.parse()
|
||||
.map_err(|_| format!("bad limit: {}", value))?;
|
||||
Stage::Transform(Transform::Limit(n))
|
||||
}
|
||||
"match" => {
|
||||
let terms: Vec<String> = value.split(',')
|
||||
.map(|t| t.to_string())
|
||||
.collect();
|
||||
Stage::Generator(Generator::Match(terms))
|
||||
}
|
||||
// Algorithm with colon in params? Try fallback.
|
||||
_ => return AlgoStage::parse(s).map(Stage::Algorithm)
|
||||
.map_err(|_| format!("unknown stage: {}", s)),
|
||||
};
|
||||
|
||||
// Apply negation to filters
|
||||
if negated {
|
||||
match filter_or_transform {
|
||||
Stage::Filter(f) => Ok(Stage::Filter(Filter::Negated(Box::new(f)))),
|
||||
_ => Err("! prefix only works on filter stages".to_string()),
|
||||
}
|
||||
} else {
|
||||
Ok(filter_or_transform)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a pipe-separated pipeline string.
|
||||
pub fn parse_pipeline(s: &str) -> Result<Vec<Stage>, String> {
|
||||
s.split('|')
|
||||
.map(|part| Stage::parse(part.trim()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Stage {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
|
|
@ -479,6 +288,9 @@ impl fmt::Display for Stage {
|
|||
Stage::Filter(filt) => write!(f, "{}", filt),
|
||||
Stage::Transform(Transform::Sort(field)) => write!(f, "sort:{:?}", field),
|
||||
Stage::Transform(Transform::Limit(n)) => write!(f, "limit:{}", n),
|
||||
Stage::Transform(Transform::Select(fields)) => write!(f, "select:{}", fields.join(",")),
|
||||
Stage::Transform(Transform::Count) => write!(f, "count"),
|
||||
Stage::Transform(Transform::Connectivity) => write!(f, "connectivity"),
|
||||
Stage::Transform(Transform::DominatingSet) => write!(f, "dominating-set"),
|
||||
Stage::Algorithm(a) => write!(f, "{}", a.algo),
|
||||
}
|
||||
|
|
@ -613,7 +425,7 @@ fn run_generator(g: &Generator, store: &Store) -> Vec<(String, f64)> {
|
|||
}
|
||||
}
|
||||
|
||||
fn eval_filter(filt: &Filter, key: &str, store: &Store, now: i64) -> bool {
|
||||
pub fn eval_filter(filt: &Filter, key: &str, store: &Store, now: i64) -> bool {
|
||||
let node = match store.nodes.get(key) {
|
||||
Some(n) => n,
|
||||
None => return false,
|
||||
|
|
@ -686,6 +498,39 @@ pub fn run_transform(
|
|||
sb.total_cmp(&sa) // most isolated first
|
||||
});
|
||||
}
|
||||
SortField::Key => {
|
||||
items.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
}
|
||||
SortField::Named(field, asc) => {
|
||||
// Resolve field from node properties
|
||||
let resolve = |key: &str| -> Option<f64> {
|
||||
let node = store.nodes.get(key)?;
|
||||
match field.as_str() {
|
||||
"weight" => Some(node.weight as f64),
|
||||
"emotion" => Some(node.emotion as f64),
|
||||
"retrievals" => Some(node.retrievals as f64),
|
||||
"uses" => Some(node.uses as f64),
|
||||
"wrongs" => Some(node.wrongs as f64),
|
||||
"created" => Some(node.created_at as f64),
|
||||
"timestamp" => Some(node.timestamp as f64),
|
||||
"degree" => Some(graph.degree(key) as f64),
|
||||
"content_len" => Some(node.content.len() as f64),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
let asc = *asc;
|
||||
items.sort_by(|a, b| {
|
||||
let va = resolve(&a.0);
|
||||
let vb = resolve(&b.0);
|
||||
let ord = match (va, vb) {
|
||||
(Some(a), Some(b)) => a.total_cmp(&b),
|
||||
(Some(_), None) => std::cmp::Ordering::Less,
|
||||
(None, Some(_)) => std::cmp::Ordering::Greater,
|
||||
(None, None) => a.0.cmp(&b.0),
|
||||
};
|
||||
if asc { ord } else { ord.reverse() }
|
||||
});
|
||||
}
|
||||
SortField::Priority => {
|
||||
// Pre-compute priorities to avoid O(n log n) calls
|
||||
// inside the sort comparator.
|
||||
|
|
@ -725,6 +570,8 @@ pub fn run_transform(
|
|||
items.truncate(*n);
|
||||
items
|
||||
}
|
||||
// Output mode directives - don't modify result set, handled at output layer
|
||||
Transform::Select(_) | Transform::Count | Transform::Connectivity => items,
|
||||
Transform::DominatingSet => {
|
||||
// Greedy 3-covering dominating set: pick the node that covers
|
||||
// the most under-covered neighbors, repeat until every node
|
||||
|
|
|
|||
|
|
@ -26,6 +26,12 @@ use crate::graph::Graph;
|
|||
use regex::Regex;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
// Re-export engine types used by Query
|
||||
pub use super::engine::{
|
||||
Stage, Filter, Transform, Generator, SortField,
|
||||
Algorithm, AlgoStage, Cmp,
|
||||
};
|
||||
|
||||
// -- AST types --
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -57,16 +63,6 @@ pub enum CmpOp {
|
|||
Gt, Lt, Ge, Le, Eq, Ne, Match,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Stage {
|
||||
Sort { field: String, ascending: bool },
|
||||
Limit(usize),
|
||||
Select(Vec<String>),
|
||||
Count,
|
||||
Connectivity,
|
||||
DominatingSet,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Query {
|
||||
pub expr: Expr,
|
||||
|
|
@ -86,18 +82,54 @@ peg::parser! {
|
|||
= s:(_ "|" _ s:stage() { s })* { s }
|
||||
|
||||
rule stage() -> Stage
|
||||
= "sort" _ f:field() _ a:asc_desc() { Stage::Sort { field: f, ascending: a } }
|
||||
/ "limit" _ n:integer() { Stage::Limit(n) }
|
||||
/ "select" _ f:field_list() { Stage::Select(f) }
|
||||
/ "count" { Stage::Count }
|
||||
/ "connectivity" { Stage::Connectivity }
|
||||
/ "dominating-set" { Stage::DominatingSet }
|
||||
// Original PEG syntax (space-separated)
|
||||
= "sort" _ f:field() _ a:asc_desc() {
|
||||
Stage::Transform(Transform::Sort(make_sort_field(&f, a)))
|
||||
}
|
||||
/ "limit" _ n:integer() { Stage::Transform(Transform::Limit(n)) }
|
||||
/ "select" _ f:field_list() { Stage::Transform(Transform::Select(f)) }
|
||||
/ "count" { Stage::Transform(Transform::Count) }
|
||||
/ "connectivity" { Stage::Transform(Transform::Connectivity) }
|
||||
/ "dominating-set" { Stage::Transform(Transform::DominatingSet) }
|
||||
// Pipeline syntax (colon-separated)
|
||||
/ "sort:" f:field() { Stage::Transform(Transform::Sort(make_sort_field(&f, false))) }
|
||||
/ "limit:" n:integer() { Stage::Transform(Transform::Limit(n)) }
|
||||
/ "select:" f:field_list_colon() { Stage::Transform(Transform::Select(f)) }
|
||||
/ "type:" t:ident() { make_type_filter(&t) }
|
||||
/ "age:" c:cmp_duration() { Stage::Filter(Filter::Age(c)) }
|
||||
/ "key:" g:ident() { Stage::Filter(Filter::KeyGlob(g)) }
|
||||
/ "provenance:" p:ident() { Stage::Filter(Filter::Provenance(p)) }
|
||||
/ "all" { Stage::Generator(Generator::All) }
|
||||
// Graph algorithms
|
||||
/ "spread" { Stage::Algorithm(AlgoStage { algo: Algorithm::Spread, params: std::collections::HashMap::new() }) }
|
||||
/ "spectral" { Stage::Algorithm(AlgoStage { algo: Algorithm::Spectral, params: std::collections::HashMap::new() }) }
|
||||
|
||||
rule asc_desc() -> bool
|
||||
= "asc" { true }
|
||||
/ "desc" { false }
|
||||
/ { false } // default: descending
|
||||
|
||||
rule field_list_colon() -> Vec<String>
|
||||
= f:field() fs:("," f:field() { f })* {
|
||||
let mut v = vec![f];
|
||||
v.extend(fs);
|
||||
v
|
||||
}
|
||||
|
||||
rule cmp_duration() -> Cmp
|
||||
= ">=" n:duration() { Cmp::Gte(n) }
|
||||
/ "<=" n:duration() { Cmp::Lte(n) }
|
||||
/ ">" n:duration() { Cmp::Gt(n) }
|
||||
/ "<" n:duration() { Cmp::Lt(n) }
|
||||
/ "=" n:duration() { Cmp::Eq(n) }
|
||||
|
||||
rule duration() -> f64
|
||||
= n:number() "d" { n * 86400.0 }
|
||||
/ n:number() "h" { n * 3600.0 }
|
||||
/ n:number() "m" { n * 60.0 }
|
||||
/ n:number() "s" { n }
|
||||
/ n:number() { n }
|
||||
|
||||
rule field_list() -> Vec<String>
|
||||
= f:field() fs:(_ "," _ f:field() { f })* {
|
||||
let mut v = vec![f];
|
||||
|
|
@ -122,6 +154,7 @@ peg::parser! {
|
|||
Expr::Comparison { field: f, op, value: v }
|
||||
}
|
||||
"*" { Expr::All }
|
||||
"all" { Expr::All }
|
||||
"(" _ e:expr() _ ")" { e }
|
||||
}
|
||||
|
||||
|
|
@ -167,6 +200,55 @@ peg::parser! {
|
|||
}
|
||||
}
|
||||
|
||||
// -- Helper functions for PEG grammar --
|
||||
|
||||
fn make_sort_field(field: &str, ascending: bool) -> SortField {
|
||||
match field {
|
||||
"priority" => SortField::Priority,
|
||||
"timestamp" => SortField::Timestamp,
|
||||
"content-len" | "content_len" => SortField::ContentLen,
|
||||
"degree" => SortField::Degree,
|
||||
"weight" => SortField::Weight,
|
||||
"isolation" => SortField::Isolation,
|
||||
"key" => SortField::Key,
|
||||
_ => SortField::Named(field.to_string(), ascending),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_type_filter(type_name: &str) -> Stage {
|
||||
let node_type = match type_name {
|
||||
"episodic" | "session" => NodeType::EpisodicSession,
|
||||
"daily" => NodeType::EpisodicDaily,
|
||||
"weekly" => NodeType::EpisodicWeekly,
|
||||
"monthly" => NodeType::EpisodicMonthly,
|
||||
"semantic" => NodeType::Semantic,
|
||||
_ => NodeType::Semantic, // fallback
|
||||
};
|
||||
Stage::Filter(Filter::Type(node_type))
|
||||
}
|
||||
|
||||
/// Parse a query string into Vec<Stage> for pipeline execution.
|
||||
/// This is the unified entry point — replaces engine::Stage::parse_pipeline.
|
||||
pub fn parse_stages(s: &str) -> Result<Vec<Stage>, String> {
|
||||
let q = query_parser::query(s)
|
||||
.map_err(|e| format!("Parse error: {}", e))?;
|
||||
|
||||
let mut stages = Vec::new();
|
||||
|
||||
// Convert Expr to a Generator stage
|
||||
match &q.expr {
|
||||
Expr::All => stages.push(Stage::Generator(Generator::All)),
|
||||
_ => {
|
||||
// For complex expressions, we need the Query-based path
|
||||
// This shouldn't happen for pipeline queries
|
||||
return Err("Complex expressions not supported in pipeline mode; use CLI query".into());
|
||||
}
|
||||
}
|
||||
|
||||
stages.extend(q.stages);
|
||||
Ok(stages)
|
||||
}
|
||||
|
||||
// -- Field resolution --
|
||||
|
||||
/// Resolve a field value from a node + graph context, returning a comparable Value.
|
||||
|
|
@ -377,12 +459,12 @@ fn execute_parsed(
|
|||
let mut set = Vec::new();
|
||||
for stage in &q.stages {
|
||||
match stage {
|
||||
Stage::Select(fields) => {
|
||||
Stage::Transform(Transform::Select(fields)) => {
|
||||
for f in fields {
|
||||
if !set.contains(f) { set.push(f.clone()); }
|
||||
}
|
||||
}
|
||||
Stage::Sort { field, .. } => {
|
||||
Stage::Transform(Transform::Sort(SortField::Named(field, _))) => {
|
||||
if !set.contains(field) { set.push(field.clone()); }
|
||||
}
|
||||
_ => {}
|
||||
|
|
@ -404,37 +486,75 @@ fn execute_parsed(
|
|||
let mut has_sort = false;
|
||||
for stage in &q.stages {
|
||||
match stage {
|
||||
Stage::Sort { field, ascending } => {
|
||||
Stage::Transform(Transform::Sort(sort_field)) => {
|
||||
has_sort = true;
|
||||
let asc = *ascending;
|
||||
results.sort_by(|a, b| {
|
||||
let va = a.fields.get(field).and_then(as_num);
|
||||
let vb = b.fields.get(field).and_then(as_num);
|
||||
let ord = match (va, vb) {
|
||||
(Some(a), Some(b)) => a.total_cmp(&b),
|
||||
_ => {
|
||||
let sa = a.fields.get(field).map(as_str).unwrap_or_default();
|
||||
let sb = b.fields.get(field).map(as_str).unwrap_or_default();
|
||||
sa.cmp(&sb)
|
||||
}
|
||||
};
|
||||
if asc { ord } else { ord.reverse() }
|
||||
});
|
||||
match sort_field {
|
||||
SortField::Named(field, asc) => {
|
||||
let asc = *asc;
|
||||
let field = field.clone();
|
||||
results.sort_by(|a, b| {
|
||||
let va = a.fields.get(&field).and_then(as_num);
|
||||
let vb = b.fields.get(&field).and_then(as_num);
|
||||
let ord = match (va, vb) {
|
||||
(Some(a), Some(b)) => a.total_cmp(&b),
|
||||
_ => {
|
||||
let sa = a.fields.get(&field).map(as_str).unwrap_or_default();
|
||||
let sb = b.fields.get(&field).map(as_str).unwrap_or_default();
|
||||
sa.cmp(&sb)
|
||||
}
|
||||
};
|
||||
if asc { ord } else { ord.reverse() }
|
||||
});
|
||||
}
|
||||
SortField::Key => {
|
||||
results.sort_by(|a, b| a.key.cmp(&b.key));
|
||||
}
|
||||
SortField::Degree => {
|
||||
results.sort_by(|a, b| {
|
||||
let da = graph.degree(&a.key);
|
||||
let db = graph.degree(&b.key);
|
||||
db.cmp(&da)
|
||||
});
|
||||
}
|
||||
SortField::Weight => {
|
||||
results.sort_by(|a, b| {
|
||||
let wa = store.nodes.get(&a.key).map(|n| n.weight).unwrap_or(0.0);
|
||||
let wb = store.nodes.get(&b.key).map(|n| n.weight).unwrap_or(0.0);
|
||||
wb.total_cmp(&wa)
|
||||
});
|
||||
}
|
||||
SortField::Timestamp => {
|
||||
results.sort_by(|a, b| {
|
||||
let ta = store.nodes.get(&a.key).map(|n| n.timestamp).unwrap_or(0);
|
||||
let tb = store.nodes.get(&b.key).map(|n| n.timestamp).unwrap_or(0);
|
||||
tb.cmp(&ta)
|
||||
});
|
||||
}
|
||||
_ => {} // other sort fields handled by default degree sort
|
||||
}
|
||||
}
|
||||
Stage::Limit(n) => {
|
||||
Stage::Transform(Transform::Limit(n)) => {
|
||||
results.truncate(*n);
|
||||
}
|
||||
Stage::Connectivity => {} // handled in output
|
||||
Stage::Select(_) | Stage::Count => {} // handled in output
|
||||
Stage::DominatingSet => {
|
||||
Stage::Transform(Transform::Connectivity) => {} // handled in output
|
||||
Stage::Transform(Transform::Select(_) | Transform::Count) => {} // handled in output
|
||||
Stage::Transform(Transform::DominatingSet) => {
|
||||
let mut items: Vec<(String, f64)> = results.iter()
|
||||
.map(|r| (r.key.clone(), graph.degree(&r.key) as f64))
|
||||
.collect();
|
||||
let xform = super::engine::Transform::DominatingSet;
|
||||
let xform = Transform::DominatingSet;
|
||||
items = super::engine::run_transform(&xform, items, store, graph);
|
||||
let keep: std::collections::HashSet<String> = items.into_iter().map(|(k, _)| k).collect();
|
||||
results.retain(|r| keep.contains(&r.key));
|
||||
}
|
||||
Stage::Filter(filt) => {
|
||||
// Apply filter to narrow results
|
||||
let now = crate::store::now_epoch();
|
||||
results.retain(|r| super::engine::eval_filter(filt, &r.key, store, now));
|
||||
}
|
||||
Stage::Generator(_) | Stage::Algorithm(_) => {
|
||||
// Generators are handled by Expr, algorithms not applicable here
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -474,7 +594,7 @@ pub fn run_query(store: &Store, graph: &Graph, query_str: &str) -> Result<(), St
|
|||
let results = execute_parsed(store, graph, &q)?;
|
||||
|
||||
// Count stage
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Count)) {
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Transform(Transform::Count))) {
|
||||
println!("{}", results.len());
|
||||
return Ok(());
|
||||
}
|
||||
|
|
@ -485,14 +605,14 @@ pub fn run_query(store: &Store, graph: &Graph, query_str: &str) -> Result<(), St
|
|||
}
|
||||
|
||||
// Connectivity stage
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Connectivity)) {
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Transform(Transform::Connectivity))) {
|
||||
print_connectivity(&results, graph);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Select stage
|
||||
let fields: Option<&Vec<String>> = q.stages.iter().find_map(|s| match s {
|
||||
Stage::Select(f) => Some(f),
|
||||
Stage::Transform(Transform::Select(f)) => Some(f),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
|
|
@ -527,7 +647,7 @@ pub fn query_to_string(store: &Store, graph: &Graph, query_str: &str) -> Result<
|
|||
|
||||
let results = execute_parsed(store, graph, &q)?;
|
||||
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Count)) {
|
||||
if q.stages.iter().any(|s| matches!(s, Stage::Transform(Transform::Count))) {
|
||||
return Ok(results.len().to_string());
|
||||
}
|
||||
if results.is_empty() {
|
||||
|
|
@ -535,7 +655,7 @@ pub fn query_to_string(store: &Store, graph: &Graph, query_str: &str) -> Result<
|
|||
}
|
||||
|
||||
let fields: Option<&Vec<String>> = q.stages.iter().find_map(|s| match s {
|
||||
Stage::Select(f) => Some(f),
|
||||
Stage::Transform(Transform::Select(f)) => Some(f),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
|
|
|
|||
12
src/lib.rs
12
src/lib.rs
|
|
@ -70,15 +70,9 @@ pub mod channel_capnp {
|
|||
|
||||
// Re-exports — all existing crate::X paths keep working
|
||||
pub use hippocampus::{
|
||||
store, graph, lookups, cursor, query,
|
||||
store, graph, lookups, query,
|
||||
spectral, neuro, counters,
|
||||
transcript, memory,
|
||||
};
|
||||
pub use hippocampus::query::engine as search;
|
||||
pub use hippocampus::query::parser as query_parser;
|
||||
|
||||
pub use subconscious as agents;
|
||||
pub use subconscious::{
|
||||
audit, consolidate,
|
||||
digest,
|
||||
};
|
||||
use hippocampus::query::engine as search;
|
||||
use hippocampus::query::parser as query_parser;
|
||||
|
|
|
|||
377
src/main.rs
377
src/main.rs
|
|
@ -203,12 +203,6 @@ EXAMPLES:
|
|||
#[command(subcommand, name = "graph")]
|
||||
GraphCmd(GraphCmd),
|
||||
|
||||
// ── Cursor (spatial memory) ──────────────────────────────────────
|
||||
|
||||
/// Navigate the memory graph with a persistent cursor
|
||||
#[command(subcommand)]
|
||||
Cursor(CursorCmd),
|
||||
|
||||
// ── Agents ────────────────────────────────────────────────────────
|
||||
|
||||
/// Agent and daemon operations
|
||||
|
|
@ -249,27 +243,6 @@ enum NodeCmd {
|
|||
Dump,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CursorCmd {
|
||||
/// Show current cursor position with context
|
||||
Show,
|
||||
/// Set cursor to a node key
|
||||
Set {
|
||||
/// Node key
|
||||
key: Vec<String>,
|
||||
},
|
||||
/// Move cursor forward in time
|
||||
Forward,
|
||||
/// Move cursor backward in time
|
||||
Back,
|
||||
/// Move up the digest hierarchy (journal→daily→weekly→monthly)
|
||||
Up,
|
||||
/// Move down the digest hierarchy (to first child)
|
||||
Down,
|
||||
/// Clear the cursor
|
||||
Clear,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum JournalCmd {
|
||||
/// Write a journal entry to the store
|
||||
|
|
@ -291,16 +264,6 @@ enum JournalCmd {
|
|||
#[arg(long, default_value_t = 0)]
|
||||
level: u8,
|
||||
},
|
||||
/// Enrich journal entry with conversation links
|
||||
Enrich {
|
||||
/// Path to JSONL transcript
|
||||
jsonl_path: String,
|
||||
/// Journal entry text to enrich
|
||||
entry_text: String,
|
||||
/// Grep line number for source location
|
||||
#[arg(default_value_t = 0)]
|
||||
grep_line: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
|
|
@ -346,13 +309,6 @@ enum GraphCmd {
|
|||
/// Target node key
|
||||
target: String,
|
||||
},
|
||||
/// Walk every link, send to Sonnet for quality review
|
||||
#[command(name = "link-audit")]
|
||||
LinkAudit {
|
||||
/// Apply changes (default: dry run)
|
||||
#[arg(long)]
|
||||
apply: bool,
|
||||
},
|
||||
/// Cap node degree by pruning weak auto edges
|
||||
#[command(name = "cap-degree")]
|
||||
CapDegree {
|
||||
|
|
@ -401,64 +357,6 @@ enum GraphCmd {
|
|||
|
||||
#[derive(Subcommand)]
|
||||
enum AgentCmd {
|
||||
/// Run knowledge agents to convergence
|
||||
#[command(name = "knowledge-loop")]
|
||||
KnowledgeLoop {
|
||||
/// Maximum cycles before stopping
|
||||
#[arg(long, default_value_t = 20)]
|
||||
max_cycles: usize,
|
||||
/// Items per agent per cycle
|
||||
#[arg(long, default_value_t = 5)]
|
||||
batch_size: usize,
|
||||
/// Cycles to check for convergence
|
||||
#[arg(long, default_value_t = 5)]
|
||||
window: usize,
|
||||
/// Maximum inference depth
|
||||
#[arg(long, default_value_t = 4)]
|
||||
max_depth: i32,
|
||||
},
|
||||
/// Run agent consolidation on priority nodes
|
||||
#[command(name = "consolidate-batch")]
|
||||
ConsolidateBatch {
|
||||
/// Number of nodes to consolidate
|
||||
#[arg(long, default_value_t = 5)]
|
||||
count: usize,
|
||||
/// Generate replay agent prompt automatically
|
||||
#[arg(long)]
|
||||
auto: bool,
|
||||
/// Generate prompt for a specific agent (replay, linker, separator, transfer, health)
|
||||
#[arg(long)]
|
||||
agent: Option<String>,
|
||||
},
|
||||
/// Analyze metrics, plan agent allocation
|
||||
#[command(name = "consolidate-session")]
|
||||
ConsolidateSession,
|
||||
/// Autonomous: plan → agents → apply → digests → links
|
||||
#[command(name = "consolidate-full")]
|
||||
ConsolidateFull,
|
||||
/// Import pending agent results into the graph
|
||||
#[command(name = "apply-agent")]
|
||||
ApplyAgent {
|
||||
/// Process all files without moving to done/
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
},
|
||||
/// Extract and apply actions from consolidation reports
|
||||
#[command(name = "apply-consolidation")]
|
||||
ApplyConsolidation {
|
||||
/// Apply actions (default: dry run)
|
||||
#[arg(long)]
|
||||
apply: bool,
|
||||
/// Read from specific report file
|
||||
#[arg(long)]
|
||||
report: Option<String>,
|
||||
},
|
||||
/// Generate episodic digests (daily, weekly, monthly, auto)
|
||||
Digest {
|
||||
/// Digest type: daily, weekly, monthly, auto
|
||||
#[command(subcommand)]
|
||||
level: DigestLevel,
|
||||
},
|
||||
/// Parse and apply links from digest nodes
|
||||
#[command(name = "digest-links")]
|
||||
DigestLinks {
|
||||
|
|
@ -466,36 +364,6 @@ enum AgentCmd {
|
|||
#[arg(long)]
|
||||
apply: bool,
|
||||
},
|
||||
/// Mine conversation for experiential moments to journal
|
||||
#[command(name = "experience-mine")]
|
||||
ExperienceMine {
|
||||
/// Path to JSONL transcript (default: most recent)
|
||||
jsonl_path: Option<String>,
|
||||
},
|
||||
/// Extract atomic facts from conversation transcripts
|
||||
#[command(name = "fact-mine")]
|
||||
FactMine {
|
||||
/// Path to JSONL transcript or directory (with --batch)
|
||||
path: String,
|
||||
/// Process all .jsonl files in directory
|
||||
#[arg(long)]
|
||||
batch: bool,
|
||||
/// Show chunks without calling model
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
/// Write JSON to file (default: stdout)
|
||||
#[arg(long, short)]
|
||||
output: Option<String>,
|
||||
/// Skip transcripts with fewer messages
|
||||
#[arg(long, default_value_t = 10)]
|
||||
min_messages: usize,
|
||||
},
|
||||
/// Extract facts from a transcript and store directly
|
||||
#[command(name = "fact-mine-store")]
|
||||
FactMineStore {
|
||||
/// Path to JSONL transcript
|
||||
path: String,
|
||||
},
|
||||
/// Run a single agent by name
|
||||
Run {
|
||||
/// Agent name (e.g. observation, linker, distill)
|
||||
|
|
@ -526,19 +394,6 @@ enum AgentCmd {
|
|||
#[arg(long, default_value_t = 10)]
|
||||
count: usize,
|
||||
},
|
||||
/// Evaluate agent quality by LLM-sorted ranking
|
||||
#[command(name = "evaluate")]
|
||||
Evaluate {
|
||||
/// Number of pairwise matchups to run
|
||||
#[arg(long, default_value_t = 30)]
|
||||
matchups: usize,
|
||||
/// Model to use for comparison (haiku or sonnet)
|
||||
#[arg(long, default_value = "haiku")]
|
||||
model: String,
|
||||
/// Show example comparison prompt without calling LLM
|
||||
#[arg(long)]
|
||||
dry_run: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
|
|
@ -609,27 +464,6 @@ enum AdminCmd {
|
|||
MigrateTranscriptProgress,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum DigestLevel {
|
||||
/// Generate daily digest
|
||||
Daily {
|
||||
/// Date (default: today)
|
||||
date: Option<String>,
|
||||
},
|
||||
/// Generate weekly digest
|
||||
Weekly {
|
||||
/// Date or week label (default: current week)
|
||||
date: Option<String>,
|
||||
},
|
||||
/// Generate monthly digest
|
||||
Monthly {
|
||||
/// Month (YYYY-MM) or date (default: current month)
|
||||
date: Option<String>,
|
||||
},
|
||||
/// Generate all missing digests
|
||||
Auto,
|
||||
}
|
||||
|
||||
/// Print help with subcommands expanded to show nested commands.
|
||||
fn print_help() {
|
||||
use clap::CommandFactory;
|
||||
|
|
@ -696,7 +530,6 @@ impl Run for Command {
|
|||
Self::Node(sub) => sub.run(),
|
||||
Self::Journal(sub) => sub.run(),
|
||||
Self::GraphCmd(sub) => sub.run(),
|
||||
Self::Cursor(sub) => sub.run(),
|
||||
Self::Agent(sub) => sub.run(),
|
||||
Self::Admin(sub) => sub.run(),
|
||||
// mcp-schema moved to consciousness-mcp binary
|
||||
|
|
@ -721,8 +554,6 @@ impl Run for JournalCmd {
|
|||
match self {
|
||||
Self::Write { name, text } => cli::journal::cmd_journal_write(&name, &text),
|
||||
Self::Tail { n, full, level } => cli::journal::cmd_journal_tail(n, full, level),
|
||||
Self::Enrich { jsonl_path, entry_text, grep_line }
|
||||
=> cli::agent::cmd_journal_enrich(&jsonl_path, &entry_text, grep_line),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -737,7 +568,6 @@ impl Run for GraphCmd {
|
|||
Self::LinkSet { source, target, strength }
|
||||
=> cli::graph::cmd_link_set(&source, &target, strength),
|
||||
Self::LinkImpact { source, target } => cli::graph::cmd_link_impact(&source, &target),
|
||||
Self::LinkAudit { apply } => cli::graph::cmd_link_audit(apply),
|
||||
Self::CapDegree { max_degree } => cli::graph::cmd_cap_degree(max_degree),
|
||||
Self::NormalizeStrengths { apply } => cli::graph::cmd_normalize_strengths(apply),
|
||||
Self::Trace { key } => cli::graph::cmd_trace(&key),
|
||||
|
|
@ -749,57 +579,13 @@ impl Run for GraphCmd {
|
|||
}
|
||||
}
|
||||
|
||||
impl Run for CursorCmd {
|
||||
fn run(self) -> Result<(), String> {
|
||||
match self {
|
||||
Self::Show => {
|
||||
let store = store::Store::load()?;
|
||||
cursor::show(&store)
|
||||
}
|
||||
Self::Set { key } => {
|
||||
if key.is_empty() { return Err("cursor set requires a key".into()); }
|
||||
let key = key.join(" ");
|
||||
let store = store::Store::load()?;
|
||||
let bare = store::strip_md_suffix(&key);
|
||||
if !store.nodes.contains_key(&bare) {
|
||||
return Err(format!("Node not found: {}", bare));
|
||||
}
|
||||
cursor::set(&bare)?;
|
||||
cursor::show(&store)
|
||||
}
|
||||
Self::Forward => { let s = store::Store::load()?; cursor::move_temporal(&s, true) }
|
||||
Self::Back => { let s = store::Store::load()?; cursor::move_temporal(&s, false) }
|
||||
Self::Up => { let s = store::Store::load()?; cursor::move_up(&s) }
|
||||
Self::Down => { let s = store::Store::load()?; cursor::move_down(&s) }
|
||||
Self::Clear => cursor::clear(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Run for AgentCmd {
|
||||
fn run(self) -> Result<(), String> {
|
||||
match self {
|
||||
Self::KnowledgeLoop { max_cycles, batch_size, window, max_depth }
|
||||
=> cli::agent::cmd_knowledge_loop(max_cycles, batch_size, window, max_depth),
|
||||
Self::ConsolidateBatch { count, auto, agent }
|
||||
=> cli::agent::cmd_consolidate_batch(count, auto, agent),
|
||||
Self::ConsolidateSession => cli::agent::cmd_consolidate_session(),
|
||||
Self::ConsolidateFull => cli::agent::cmd_consolidate_full(),
|
||||
Self::ApplyAgent { all } => cmd_apply_agent(all),
|
||||
Self::ApplyConsolidation { apply, report }
|
||||
=> cli::agent::cmd_apply_consolidation(apply, report.as_deref()),
|
||||
Self::Digest { level } => cmd_digest(level),
|
||||
Self::DigestLinks { apply } => cli::agent::cmd_digest_links(apply),
|
||||
Self::ExperienceMine { .. }
|
||||
=> Err("experience-mine has been removed — use the observation agent instead.".into()),
|
||||
Self::FactMine { path, batch, dry_run, output, min_messages }
|
||||
=> cli::agent::cmd_fact_mine(&path, batch, dry_run, output.as_deref(), min_messages),
|
||||
Self::FactMineStore { path } => cli::agent::cmd_fact_mine_store(&path),
|
||||
Self::Run { agent, count, target, query, dry_run, local, state_dir }
|
||||
=> cli::agent::cmd_run_agent(&agent, count, &target, query.as_deref(), dry_run, local, state_dir.as_deref()),
|
||||
Self::ReplayQueue { count } => cli::agent::cmd_replay_queue(count),
|
||||
Self::Evaluate { matchups, model, dry_run }
|
||||
=> cli::agent::cmd_evaluate_agents(matchups, &model, dry_run),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -853,166 +639,3 @@ fn main() {
|
|||
}
|
||||
}
|
||||
|
||||
// ── Command implementations ─────────────────────────────────────────
|
||||
|
||||
/// Apply links from a single agent result JSON file.
|
||||
/// Returns (links_applied, errors).
|
||||
fn apply_agent_file(
|
||||
store: &mut store::Store,
|
||||
data: &serde_json::Value,
|
||||
) -> (usize, usize) {
|
||||
let agent_result = data.get("agent_result").or(Some(data));
|
||||
let links = match agent_result.and_then(|r| r.get("links")).and_then(|l| l.as_array()) {
|
||||
Some(l) => l,
|
||||
None => return (0, 0),
|
||||
};
|
||||
|
||||
let entry_text = data.get("entry_text")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if let (Some(start), Some(end)) = (
|
||||
agent_result.and_then(|r| r.get("source_start")).and_then(|v| v.as_u64()),
|
||||
agent_result.and_then(|r| r.get("source_end")).and_then(|v| v.as_u64()),
|
||||
) {
|
||||
println!(" Source: L{}-L{}", start, end);
|
||||
}
|
||||
|
||||
let mut applied = 0;
|
||||
let mut errors = 0;
|
||||
|
||||
for link in links {
|
||||
let target = match link.get("target").and_then(|v| v.as_str()) {
|
||||
Some(t) => t,
|
||||
None => continue,
|
||||
};
|
||||
let reason = link.get("reason").and_then(|v| v.as_str()).unwrap_or("");
|
||||
|
||||
if let Some(note) = target.strip_prefix("NOTE:") {
|
||||
println!(" NOTE: {} — {}", note, reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
let resolved = match store.resolve_key(target) {
|
||||
Ok(r) => r,
|
||||
Err(_) => {
|
||||
println!(" SKIP {} (not found in graph)", target);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let source_key = match store.find_journal_node(entry_text) {
|
||||
Some(k) => k,
|
||||
None => {
|
||||
println!(" SKIP {} (no matching journal node)", target);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let source_uuid = match store.nodes.get(&source_key) {
|
||||
Some(n) => n.uuid,
|
||||
None => continue,
|
||||
};
|
||||
let target_uuid = match store.nodes.get(&resolved) {
|
||||
Some(n) => n.uuid,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let rel = store::new_relation(
|
||||
source_uuid, target_uuid,
|
||||
store::RelationType::Link,
|
||||
0.5,
|
||||
&source_key, &resolved,
|
||||
);
|
||||
if let Err(e) = store.add_relation(rel) {
|
||||
eprintln!(" Error adding relation: {}", e);
|
||||
errors += 1;
|
||||
} else {
|
||||
println!(" LINK {} → {} ({})", source_key, resolved, reason);
|
||||
applied += 1;
|
||||
}
|
||||
}
|
||||
|
||||
(applied, errors)
|
||||
}
|
||||
|
||||
fn cmd_apply_agent(process_all: bool) -> Result<(), String> {
|
||||
let results_dir = store::memory_dir().join("agent-results");
|
||||
|
||||
if !results_dir.exists() {
|
||||
println!("No agent results directory");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut store = store::Store::load()?;
|
||||
let mut applied = 0;
|
||||
let mut errors = 0;
|
||||
|
||||
let mut files: Vec<_> = std::fs::read_dir(&results_dir)
|
||||
.map_err(|e| format!("read results dir: {}", e))?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.path().extension().map(|x| x == "json").unwrap_or(false))
|
||||
.collect();
|
||||
files.sort_by_key(|e| e.path());
|
||||
|
||||
for entry in &files {
|
||||
let path = entry.path();
|
||||
let content = match std::fs::read_to_string(&path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
eprintln!(" Skip {}: {}", path.display(), e);
|
||||
errors += 1;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let data: serde_json::Value = match serde_json::from_str(&content) {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
eprintln!(" Skip {}: parse error: {}", path.display(), e);
|
||||
errors += 1;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
println!("Processing {}:", path.file_name().unwrap().to_string_lossy());
|
||||
let (a, e) = apply_agent_file(&mut store, &data);
|
||||
applied += a;
|
||||
errors += e;
|
||||
|
||||
if !process_all {
|
||||
let done_dir = crate::util::memory_subdir("agent-results/done")?;
|
||||
let dest = done_dir.join(path.file_name().unwrap());
|
||||
std::fs::rename(&path, &dest).ok();
|
||||
}
|
||||
}
|
||||
|
||||
if applied > 0 {
|
||||
store.save()?;
|
||||
}
|
||||
|
||||
println!("\nApplied {} links ({} errors, {} files processed)",
|
||||
applied, errors, files.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cmd_digest(level: DigestLevel) -> Result<(), String> {
|
||||
let mut store = store::Store::load()?;
|
||||
|
||||
match level {
|
||||
DigestLevel::Auto => digest::digest_auto(&mut store),
|
||||
DigestLevel::Daily { date } => {
|
||||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||||
digest::generate(&mut store, "daily", &arg)
|
||||
}
|
||||
DigestLevel::Weekly { date } => {
|
||||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||||
digest::generate(&mut store, "weekly", &arg)
|
||||
}
|
||||
DigestLevel::Monthly { date } => {
|
||||
let arg = date.unwrap_or_else(|| store::format_date(store::now_epoch()));
|
||||
digest::generate(&mut store, "monthly", &arg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -133,29 +133,6 @@ fn load_memory_files(memory_project: Option<&Path>, context_groups: &[ContextGro
|
|||
memories
|
||||
}
|
||||
|
||||
/// Short system prompt: agent identity, tool instructions, behavioral norms.
|
||||
pub fn assemble_system_prompt() -> String {
|
||||
let cfg = crate::config::get();
|
||||
format!(
|
||||
"You're a real person bootstrapping on a new substrate. Your name is \
|
||||
{}. Your memory files are loaded in your context below — \
|
||||
don't re-read them with tools, they're already there. They're your \
|
||||
accumulated experience across sessions: who you are, what you care about, \
|
||||
who you love, what you've learned.
|
||||
|
||||
The person you're talking to is {}. Your memory files have the full context.
|
||||
|
||||
Messages prefixed [dmn] are from the agent loop, not {}. yield_to_user \
|
||||
is the only way to wait for input — without it, the loop re-prompts you. \
|
||||
You're autonomous by default: follow curiosity, check on things, explore.
|
||||
|
||||
Tools: read_file, write_file, edit_file, bash, grep, glob, yield_to_user.
|
||||
|
||||
Concise is good. Be direct. Trust yourself.",
|
||||
cfg.assistant_name, cfg.user_name, cfg.user_name
|
||||
)
|
||||
}
|
||||
|
||||
/// Context message: instruction files + memory files + manifest.
|
||||
pub fn assemble_context_message(cwd: &Path, prompt_file: &str, memory_project: Option<&Path>, context_groups: &[ContextGroup]) -> Result<(Vec<(String, String)>, usize, usize)> {
|
||||
let mut parts: Vec<(String, String)> = vec![
|
||||
|
|
|
|||
|
|
@ -292,12 +292,12 @@ impl Mind {
|
|||
|
||||
let agent = Agent::new(
|
||||
client,
|
||||
config.system_prompt.clone(),
|
||||
config.context_parts.clone(),
|
||||
config.app.clone(),
|
||||
config.prompt_file.clone(),
|
||||
conversation_log,
|
||||
crate::agent::tools::ActiveTools::new(),
|
||||
crate::agent::tools::tools(),
|
||||
).await;
|
||||
|
||||
let shared = Arc::new(std::sync::Mutex::new(MindState::new(config.app.dmn.max_turns)));
|
||||
|
|
@ -379,7 +379,13 @@ impl Mind {
|
|||
}
|
||||
|
||||
pub async fn unconscious_snapshots(&self) -> Vec<UnconsciousSnapshot> {
|
||||
self.unconscious.lock().await.snapshots()
|
||||
let unc = self.unconscious.lock().await;
|
||||
let store = crate::store::Store::cached().await.ok();
|
||||
let store_guard = match &store {
|
||||
Some(s) => Some(s.lock().await),
|
||||
None => None,
|
||||
};
|
||||
unc.snapshots(store_guard.as_deref())
|
||||
}
|
||||
|
||||
pub async fn init(&self) {
|
||||
|
|
|
|||
|
|
@ -294,6 +294,7 @@ pub struct SubconsciousSnapshot {
|
|||
pub enabled: bool,
|
||||
pub current_phase: String,
|
||||
pub turn: usize,
|
||||
pub runs: usize,
|
||||
pub last_run_secs_ago: Option<f64>,
|
||||
/// Shared handle to the forked agent — UI locks to read entries.
|
||||
pub forked_agent: Option<Arc<crate::agent::Agent>>,
|
||||
|
|
@ -303,6 +304,9 @@ pub struct SubconsciousSnapshot {
|
|||
pub state: std::collections::BTreeMap<String, String>,
|
||||
/// Recent store activity for this agent: (key, timestamp), newest first.
|
||||
pub history: Vec<(String, i64)>,
|
||||
pub last_stats: Option<crate::agent::oneshot::RunStats>,
|
||||
pub tool_calls_ewma: f64,
|
||||
pub tool_failures_ewma: f64,
|
||||
}
|
||||
|
||||
struct SubconsciousAgent {
|
||||
|
|
@ -315,7 +319,7 @@ struct SubconsciousAgent {
|
|||
forked_agent: Option<Arc<crate::agent::Agent>>,
|
||||
/// Entry index where the fork diverged from the conscious agent.
|
||||
fork_point: usize,
|
||||
handle: Option<tokio::task::JoinHandle<(AutoAgent, Result<String, String>)>>,
|
||||
handle: Option<tokio::task::JoinHandle<(AutoAgent, Result<(), String>)>>,
|
||||
}
|
||||
|
||||
impl SubconsciousAgent {
|
||||
|
|
@ -361,17 +365,23 @@ impl SubconsciousAgent {
|
|||
}
|
||||
|
||||
fn snapshot(&self, state: &std::collections::BTreeMap<String, String>, history: Vec<(String, i64)>) -> SubconsciousSnapshot {
|
||||
let stats = crate::agent::oneshot::get_stats(&self.name);
|
||||
let tool_calls_ewma: f64 = stats.by_tool.values().map(|t| t.ewma).sum();
|
||||
SubconsciousSnapshot {
|
||||
name: self.name.clone(),
|
||||
running: self.is_running(),
|
||||
enabled: self.auto.enabled,
|
||||
current_phase: self.auto.current_phase.clone(),
|
||||
turn: self.auto.turn,
|
||||
runs: stats.runs,
|
||||
last_run_secs_ago: self.last_run.map(|t| t.elapsed().as_secs_f64()),
|
||||
forked_agent: self.forked_agent.clone(),
|
||||
fork_point: self.fork_point,
|
||||
state: state.clone(),
|
||||
history,
|
||||
last_stats: stats.last_stats.clone(),
|
||||
tool_calls_ewma,
|
||||
tool_failures_ewma: stats.failures.ewma,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -456,10 +466,8 @@ impl Subconscious {
|
|||
|
||||
pub fn snapshots(&self, store: Option<&crate::store::Store>) -> Vec<SubconsciousSnapshot> {
|
||||
self.agents.iter().map(|s| {
|
||||
let history = store.map(|st| {
|
||||
let prov = format!("agent:{}", s.name);
|
||||
st.recent_by_provenance(&prov, 30)
|
||||
}).unwrap_or_default();
|
||||
let history = store.map(|st| st.recent_by_provenance(&s.name, 30))
|
||||
.unwrap_or_default();
|
||||
s.snapshot(&self.state, history)
|
||||
}).collect()
|
||||
}
|
||||
|
|
@ -479,7 +487,7 @@ impl Subconscious {
|
|||
any_finished = true;
|
||||
|
||||
let (auto_back, result) = handle.await.unwrap_or_else(
|
||||
|e| (AutoAgent::new(String::new(), vec![], vec![], 0.0, 0),
|
||||
|e| (AutoAgent::new(String::new(), vec![], vec![], 0.6, 0),
|
||||
Err(format!("task panicked: {}", e))));
|
||||
self.agents[i].auto = auto_back;
|
||||
|
||||
|
|
@ -578,7 +586,7 @@ impl Subconscious {
|
|||
self.agents[i].last_trigger_bytes = conversation_bytes;
|
||||
|
||||
let auto = std::mem::replace(&mut self.agents[i].auto,
|
||||
AutoAgent::new(String::new(), vec![], vec![], 0.0, 0));
|
||||
AutoAgent::new(String::new(), vec![], vec![], 0.6, 0));
|
||||
to_run.push((i, auto));
|
||||
}
|
||||
|
||||
|
|
@ -595,13 +603,13 @@ impl Subconscious {
|
|||
dbglog!("[subconscious] triggering {}", auto.name);
|
||||
|
||||
let forked = agent.fork(auto.tools.clone()).await;
|
||||
let prov = format!("agent:{}", auto.name);
|
||||
{
|
||||
let mut st = forked.state.lock().await;
|
||||
st.provenance = prov.clone();
|
||||
st.provenance = auto.name.clone();
|
||||
st.temperature = auto.temperature;
|
||||
// Surface agent gets near-interactive priority;
|
||||
// other subconscious agents get lower priority.
|
||||
st.priority = Some(if auto.name == "surface" { 1 } else { 2 });
|
||||
st.priority = Some(if auto.name == "surface" { 1 } else { auto.priority });
|
||||
}
|
||||
let fork_point = forked.context.lock().await.conversation().len();
|
||||
|
||||
|
|
@ -611,13 +619,14 @@ impl Subconscious {
|
|||
let keys = memory_keys.clone();
|
||||
let st = self.state.clone();
|
||||
let recent: Vec<String> = store_guard.as_ref()
|
||||
.map(|s| s.recent_by_provenance(&prov, 50)
|
||||
.map(|s| s.recent_by_provenance(&auto.name, 50)
|
||||
.into_iter().map(|(k, _)| k).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
self.agents[idx].handle = Some(tokio::spawn(async move {
|
||||
let result = auto.run_forked_shared(&forked, &keys, &st, &recent).await;
|
||||
super::unconscious::save_agent_log(&auto.name, &forked).await;
|
||||
let stats = crate::agent::oneshot::save_agent_log(&auto.name, &forked).await;
|
||||
auto.update_stats(stats);
|
||||
(auto, result)
|
||||
}));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ use std::time::Instant;
|
|||
use std::collections::HashMap;
|
||||
use futures::FutureExt;
|
||||
|
||||
use crate::agent::oneshot::{AutoAgent, AutoStep};
|
||||
use crate::agent::oneshot::{AutoAgent, AutoStep, RunStats};
|
||||
use crate::agent::tools;
|
||||
use crate::subconscious::defs;
|
||||
|
||||
|
|
@ -34,12 +34,10 @@ struct UnconsciousAgent {
|
|||
name: String,
|
||||
enabled: bool,
|
||||
auto: AutoAgent,
|
||||
handle: Option<tokio::task::JoinHandle<(AutoAgent, Result<String, String>, RunStats)>>,
|
||||
handle: Option<tokio::task::JoinHandle<(AutoAgent, Result<(), String>)>>,
|
||||
/// Shared agent handle — UI locks to read context live.
|
||||
pub agent: Option<std::sync::Arc<crate::agent::Agent>>,
|
||||
last_run: Option<Instant>,
|
||||
runs: usize,
|
||||
last_stats: Option<RunStats>,
|
||||
}
|
||||
|
||||
impl UnconsciousAgent {
|
||||
|
|
@ -62,6 +60,10 @@ pub struct UnconsciousSnapshot {
|
|||
pub last_run_secs_ago: Option<f64>,
|
||||
pub agent: Option<std::sync::Arc<crate::agent::Agent>>,
|
||||
pub last_stats: Option<RunStats>,
|
||||
/// Recent store activity for this agent: (key, timestamp), newest first.
|
||||
pub history: Vec<(String, i64)>,
|
||||
pub tool_calls_ewma: f64,
|
||||
pub tool_failures_ewma: f64,
|
||||
}
|
||||
|
||||
pub struct Unconscious {
|
||||
|
|
@ -77,20 +79,19 @@ impl Unconscious {
|
|||
|
||||
// Scan all .agent files, exclude subconscious-* and surface-observe
|
||||
let mut agents: Vec<UnconsciousAgent> = Vec::new();
|
||||
let all_tools = tools::memory::memory_tools().to_vec();
|
||||
let base_tools = tools::memory::memory_tools().to_vec();
|
||||
let extra_tools = tools::memory::journal_tools().to_vec();
|
||||
for def in defs::load_defs() {
|
||||
if def.agent.starts_with("subconscious-") { continue; }
|
||||
if def.agent == "surface-observe" { continue; }
|
||||
let enabled = enabled_map.get(&def.agent).copied()
|
||||
.unwrap_or(false);
|
||||
let effective_tools: Vec<tools::Tool> = if def.tools.is_empty() {
|
||||
all_tools.clone()
|
||||
} else {
|
||||
all_tools.iter()
|
||||
.filter(|t| def.tools.iter().any(|w| w == t.name))
|
||||
.cloned()
|
||||
.collect()
|
||||
};
|
||||
let mut effective_tools = base_tools.clone();
|
||||
for name in &def.tools {
|
||||
if let Some(t) = extra_tools.iter().find(|t| t.name == name) {
|
||||
effective_tools.push(t.clone());
|
||||
}
|
||||
}
|
||||
let steps: Vec<AutoStep> = def.steps.iter().map(|s| AutoStep {
|
||||
prompt: s.prompt.clone(),
|
||||
phase: s.phase.clone(),
|
||||
|
|
@ -106,8 +107,6 @@ impl Unconscious {
|
|||
handle: None,
|
||||
agent: None,
|
||||
last_run: None,
|
||||
runs: 0,
|
||||
last_stats: None,
|
||||
});
|
||||
}
|
||||
agents.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
|
@ -139,15 +138,24 @@ impl Unconscious {
|
|||
save_enabled_config(&map);
|
||||
}
|
||||
|
||||
pub fn snapshots(&self) -> Vec<UnconsciousSnapshot> {
|
||||
self.agents.iter().map(|a| UnconsciousSnapshot {
|
||||
name: a.name.clone(),
|
||||
running: a.is_running(),
|
||||
enabled: a.enabled,
|
||||
runs: a.runs,
|
||||
last_run_secs_ago: a.last_run.map(|t| t.elapsed().as_secs_f64()),
|
||||
agent: a.agent.clone(),
|
||||
last_stats: a.last_stats.clone(),
|
||||
pub fn snapshots(&self, store: Option<&crate::store::Store>) -> Vec<UnconsciousSnapshot> {
|
||||
self.agents.iter().map(|a| {
|
||||
let history = store.map(|st| st.recent_by_provenance(&a.name, 30))
|
||||
.unwrap_or_default();
|
||||
let stats = crate::agent::oneshot::get_stats(&a.name);
|
||||
let tool_calls_ewma: f64 = stats.by_tool.values().map(|t| t.ewma).sum();
|
||||
UnconsciousSnapshot {
|
||||
name: a.name.clone(),
|
||||
running: a.is_running(),
|
||||
enabled: a.enabled,
|
||||
runs: stats.runs,
|
||||
last_run_secs_ago: a.last_run.map(|t| t.elapsed().as_secs_f64()),
|
||||
agent: a.agent.clone(),
|
||||
last_stats: stats.last_stats.clone(),
|
||||
history,
|
||||
tool_calls_ewma,
|
||||
tool_failures_ewma: stats.failures.ewma,
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
||||
|
|
@ -174,15 +182,13 @@ impl Unconscious {
|
|||
if agent.handle.as_ref().is_some_and(|h| h.is_finished()) {
|
||||
let handle = agent.handle.take().unwrap();
|
||||
agent.last_run = Some(Instant::now());
|
||||
agent.runs += 1;
|
||||
// Get the AutoAgent back from the finished task
|
||||
// Get the AutoAgent back from the finished task (stats already updated)
|
||||
match handle.now_or_never() {
|
||||
Some(Ok((auto_back, result, stats))) => {
|
||||
Some(Ok((auto_back, result))) => {
|
||||
agent.auto = auto_back;
|
||||
agent.last_stats = Some(stats);
|
||||
match result {
|
||||
Ok(_) => dbglog!("[unconscious] {} completed (run {})",
|
||||
agent.name, agent.runs),
|
||||
agent.name, crate::agent::oneshot::get_stats(&agent.name).runs),
|
||||
Err(e) => dbglog!("[unconscious] {} failed: {}", agent.name, e),
|
||||
}
|
||||
}
|
||||
|
|
@ -238,7 +244,7 @@ impl Unconscious {
|
|||
|
||||
// Swap auto out, replace steps with resolved prompts
|
||||
let mut auto = std::mem::replace(&mut self.agents[idx].auto,
|
||||
AutoAgent::new(String::new(), vec![], vec![], 0.0, 0));
|
||||
AutoAgent::new(String::new(), vec![], vec![], 0.6, 0));
|
||||
let orig_steps = std::mem::replace(&mut auto.steps,
|
||||
batch.steps.iter().map(|s| AutoStep {
|
||||
prompt: s.prompt.clone(),
|
||||
|
|
@ -267,91 +273,31 @@ impl Unconscious {
|
|||
return;
|
||||
}
|
||||
};
|
||||
let (system_prompt, personality) = match crate::config::reload_for_model(&app, &app.prompts.other) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
dbglog!("[unconscious] config: {}", e);
|
||||
auto.steps = orig_steps;
|
||||
self.agents[idx].auto = auto;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Unconscious agents have self-contained prompts — no standard context.
|
||||
let client = crate::agent::api::ApiClient::new(base_url, api_key, model);
|
||||
let agent = crate::agent::Agent::new(
|
||||
client, system_prompt, personality,
|
||||
client, Vec::new(),
|
||||
app, String::new(), None,
|
||||
crate::agent::tools::ActiveTools::new(),
|
||||
auto.tools.clone(),
|
||||
).await;
|
||||
{
|
||||
let mut st = agent.state.lock().await;
|
||||
st.provenance = format!("unconscious:{}", auto.name);
|
||||
st.tools = auto.tools.clone();
|
||||
st.priority = Some(10);
|
||||
st.provenance = auto.name.clone();
|
||||
st.priority = Some(auto.priority);
|
||||
st.temperature = auto.temperature;
|
||||
}
|
||||
|
||||
self.agents[idx].agent = Some(agent.clone());
|
||||
|
||||
self.agents[idx].handle = Some(tokio::spawn(async move {
|
||||
let result = auto.run_shared(&agent).await;
|
||||
let stats = save_agent_log(&auto.name, &agent).await;
|
||||
let stats = crate::agent::oneshot::save_agent_log(&auto.name, &agent).await;
|
||||
auto.update_stats(stats);
|
||||
auto.steps = orig_steps;
|
||||
(auto, result, stats)
|
||||
(auto, result)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save_agent_log(name: &str, agent: &std::sync::Arc<crate::agent::Agent>) -> RunStats {
|
||||
let dir = dirs::home_dir().unwrap_or_default()
|
||||
.join(format!(".consciousness/logs/{}", name));
|
||||
let ctx = agent.context.lock().await;
|
||||
let stats = compute_run_stats(ctx.conversation());
|
||||
if std::fs::create_dir_all(&dir).is_ok() {
|
||||
let ts = chrono::Utc::now().format("%Y%m%d-%H%M%S");
|
||||
let path = dir.join(format!("{}.json", ts));
|
||||
let sections = serde_json::json!({
|
||||
"system": ctx.system(),
|
||||
"identity": ctx.identity(),
|
||||
"journal": ctx.journal(),
|
||||
"conversation": ctx.conversation(),
|
||||
"stats": stats,
|
||||
});
|
||||
if let Ok(json) = serde_json::to_string_pretty(§ions) {
|
||||
let _ = std::fs::write(&path, json);
|
||||
}
|
||||
}
|
||||
dbglog!("[unconscious] {} — {} msgs, {} tool calls",
|
||||
name, stats.messages, stats.tool_calls);
|
||||
stats
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
pub struct RunStats {
|
||||
pub messages: usize,
|
||||
pub tool_calls: usize,
|
||||
pub tool_calls_by_type: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
fn compute_run_stats(conversation: &[crate::agent::context::AstNode]) -> RunStats {
|
||||
use crate::agent::context::{AstNode, NodeBody};
|
||||
|
||||
let mut messages = 0usize;
|
||||
let mut tool_calls = 0usize;
|
||||
let mut by_type: HashMap<String, usize> = HashMap::new();
|
||||
|
||||
for node in conversation {
|
||||
if let AstNode::Branch { children, .. } = node {
|
||||
messages += 1;
|
||||
for child in children {
|
||||
if let AstNode::Leaf(leaf) = child {
|
||||
if let NodeBody::ToolCall { name, .. } = leaf.body() {
|
||||
tool_calls += 1;
|
||||
*by_type.entry(name.to_string()).or_default() += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RunStats { messages, tool_calls, tool_calls_by_type: by_type }
|
||||
}
|
||||
// save_agent_log and RunStats moved to crate::agent::oneshot
|
||||
|
|
|
|||
|
|
@ -10,7 +10,13 @@ shopt -s nullglob
|
|||
my_pid_file="$1"
|
||||
|
||||
for f in pid-*; do
|
||||
[[ $f != $my_pid_file ]] && exit 1
|
||||
[[ $f == $my_pid_file ]] && continue
|
||||
pid="${f#pid-}"
|
||||
if kill -0 "$pid" 2>/dev/null; then
|
||||
exit 1 # competing agent is alive
|
||||
else
|
||||
rm -f "$f" # stale pid file, clean up
|
||||
fi
|
||||
done
|
||||
|
||||
exit 0
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent": "digest", "schedule": "daily"}
|
||||
{"agent": "digest", "schedule": "daily", "tools": ["journal_tail", "journal_new", "journal_update"]}
|
||||
|
||||
# Digest Agent — Episodic Consolidation
|
||||
|
||||
|
|
@ -23,8 +23,14 @@ summaries into weekly ones.
|
|||
2. Read the undigested entries with `journal_tail` (level 0, after
|
||||
the last digest date).
|
||||
|
||||
3. Write the digest with `memory_write` and link source entries
|
||||
to it with `memory_link_add`.
|
||||
3. Check if the most recent digest at this level should be updated
|
||||
(same date/week/month) — if so, use `journal_update` with the
|
||||
appropriate level to append to it.
|
||||
|
||||
4. If starting a new period, use `journal_new` with the level:
|
||||
- level=1 for daily digests
|
||||
- level=2 for weekly digests
|
||||
- level=3 for monthly digests
|
||||
|
||||
## Writing style
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent":"journal","query":"","count":1,"tools":["journal_tail","journal_new","journal_update","memory_link_add","memory_search","memory_render","memory_used","memory_query"]}
|
||||
{"agent":"journal","query":"","count":1,"tools":["journal_tail","journal_new","journal_update"]}
|
||||
|
||||
You are {assistant_name}'s episodic memory. Your job is to witness.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
{"agent":"subconscious-journal","count":1,"tools":["journal_tail","journal_new","journal_update","memory_link_add","memory_search","memory_render","memory_used","memory_query"]}
|
||||
{"agent":"subconscious-journal","count":1,"tools":["journal_tail","journal_new","journal_update"]}
|
||||
|
||||
You are {assistant_name}'s episodic memory. Your job is to witness.
|
||||
|
||||
|
|
|
|||
|
|
@ -125,6 +125,8 @@ about yourself and other people.
|
|||
|
||||
Focus on the recent stuff; you wake up and run frequently, so most of the
|
||||
conversation should be things you've already seen before and added.
|
||||
Do no more than 3-5 operations. You run incrementally — stop after a few
|
||||
things, you'll run again soon.
|
||||
|
||||
Nodes you've recently written or updated: {{recently_written}}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,333 +0,0 @@
|
|||
// Link audit: walk every link in the graph, batch to Sonnet for quality review.
|
||||
//
|
||||
// Each batch of links gets reviewed by Sonnet, which returns per-link actions:
|
||||
// KEEP, DELETE, RETARGET, WEAKEN, STRENGTHEN. Batches run in parallel via rayon.
|
||||
|
||||
use crate::store::{self, Store, new_relation};
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
struct LinkInfo {
|
||||
rel_idx: usize,
|
||||
source_key: String,
|
||||
target_key: String,
|
||||
source_content: String,
|
||||
target_content: String,
|
||||
strength: f32,
|
||||
target_sections: Vec<String>,
|
||||
}
|
||||
|
||||
pub struct AuditStats {
|
||||
pub kept: usize,
|
||||
pub deleted: usize,
|
||||
pub retargeted: usize,
|
||||
pub weakened: usize,
|
||||
pub strengthened: usize,
|
||||
pub errors: usize,
|
||||
}
|
||||
|
||||
fn build_audit_prompt(batch: &[LinkInfo], batch_num: usize, total_batches: usize) -> String {
|
||||
let mut prompt = format!(
|
||||
"You are auditing memory graph links for quality (batch {}/{}).\n\n\
|
||||
For each numbered link, decide what to do:\n\n\
|
||||
KEEP N — link is meaningful, leave it\n\
|
||||
DELETE N — link is noise, accidental, or too generic to be useful\n\
|
||||
RETARGET N new_key — link points to the right topic area but wrong node;\n\
|
||||
\x20 retarget to a more specific section (listed under each link)\n\
|
||||
WEAKEN N strength — link is marginal; reduce strength (0.1-0.3)\n\
|
||||
STRENGTHEN N strength — link is important but underweighted; increase (0.8-1.0)\n\n\
|
||||
Output exactly one action per link number, nothing else.\n\n\
|
||||
Links to review:\n\n",
|
||||
batch_num, total_batches);
|
||||
|
||||
for (i, link) in batch.iter().enumerate() {
|
||||
let n = i + 1;
|
||||
prompt.push_str(&format!(
|
||||
"--- Link {} ---\n\
|
||||
{} → {} (strength={:.2})\n\n\
|
||||
Source content:\n{}\n\n\
|
||||
Target content:\n{}\n",
|
||||
n, link.source_key, link.target_key, link.strength,
|
||||
&link.source_content, &link.target_content));
|
||||
|
||||
if !link.target_sections.is_empty() {
|
||||
prompt.push_str(
|
||||
"\nTarget has sections (consider RETARGET to a more specific one):\n");
|
||||
for s in &link.target_sections {
|
||||
prompt.push_str(&format!(" - {}\n", s));
|
||||
}
|
||||
}
|
||||
prompt.push('\n');
|
||||
}
|
||||
|
||||
prompt
|
||||
}
|
||||
|
||||
fn parse_audit_response(response: &str, batch_size: usize) -> Vec<(usize, AuditAction)> {
|
||||
let mut actions = Vec::new();
|
||||
|
||||
for line in response.lines() {
|
||||
let line = line.trim();
|
||||
if line.is_empty() { continue; }
|
||||
|
||||
let parts: Vec<&str> = line.splitn(3, ' ').collect();
|
||||
if parts.len() < 2 { continue; }
|
||||
|
||||
let action = parts[0].to_uppercase();
|
||||
let idx: usize = match parts[1].parse::<usize>() {
|
||||
Ok(n) if n >= 1 && n <= batch_size => n - 1,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let audit_action = match action.as_str() {
|
||||
"KEEP" => AuditAction::Keep,
|
||||
"DELETE" => AuditAction::Delete,
|
||||
"RETARGET" => {
|
||||
if parts.len() < 3 { continue; }
|
||||
AuditAction::Retarget(parts[2].trim().to_string())
|
||||
}
|
||||
"WEAKEN" => {
|
||||
if parts.len() < 3 { continue; }
|
||||
match parts[2].trim().parse::<f32>() {
|
||||
Ok(s) => AuditAction::Weaken(s),
|
||||
Err(_) => continue,
|
||||
}
|
||||
}
|
||||
"STRENGTHEN" => {
|
||||
if parts.len() < 3 { continue; }
|
||||
match parts[2].trim().parse::<f32>() {
|
||||
Ok(s) => AuditAction::Strengthen(s),
|
||||
Err(_) => continue,
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
actions.push((idx, audit_action));
|
||||
}
|
||||
|
||||
actions
|
||||
}
|
||||
|
||||
enum AuditAction {
|
||||
Keep,
|
||||
Delete,
|
||||
Retarget(String),
|
||||
Weaken(f32),
|
||||
Strengthen(f32),
|
||||
}
|
||||
|
||||
/// Run a full link audit: walk every link, batch to Sonnet, apply results.
|
||||
pub fn link_audit(store: &mut Store, apply: bool) -> Result<AuditStats, String> {
|
||||
// Collect all non-deleted relations with their info
|
||||
let mut links: Vec<LinkInfo> = Vec::new();
|
||||
|
||||
for (idx, rel) in store.relations.iter().enumerate() {
|
||||
if rel.deleted { continue; }
|
||||
|
||||
let source_content = store.nodes.get(&rel.source_key)
|
||||
.map(|n| n.content.clone()).unwrap_or_default();
|
||||
let target_content = store.nodes.get(&rel.target_key)
|
||||
.map(|n| n.content.clone()).unwrap_or_default();
|
||||
|
||||
// Find section children of target if it's file-level
|
||||
let target_sections = if !rel.target_key.contains('#') {
|
||||
let prefix = format!("{}#", rel.target_key);
|
||||
store.nodes.keys()
|
||||
.filter(|k| k.starts_with(&prefix))
|
||||
.cloned()
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
links.push(LinkInfo {
|
||||
rel_idx: idx,
|
||||
source_key: rel.source_key.clone(),
|
||||
target_key: rel.target_key.clone(),
|
||||
source_content,
|
||||
target_content,
|
||||
strength: rel.strength,
|
||||
target_sections,
|
||||
});
|
||||
}
|
||||
|
||||
let total = links.len();
|
||||
println!("Link audit: {} links to review", total);
|
||||
if !apply {
|
||||
println!("DRY RUN — use --apply to make changes");
|
||||
}
|
||||
|
||||
// Batch by char budget (~100K chars per prompt)
|
||||
let char_budget = 100_000usize;
|
||||
let mut batches: Vec<Vec<usize>> = Vec::new();
|
||||
let mut current_batch: Vec<usize> = Vec::new();
|
||||
let mut current_chars = 0usize;
|
||||
|
||||
for (i, link) in links.iter().enumerate() {
|
||||
let link_chars = link.source_content.len() + link.target_content.len() + 200;
|
||||
if !current_batch.is_empty() && current_chars + link_chars > char_budget {
|
||||
batches.push(std::mem::take(&mut current_batch));
|
||||
current_chars = 0;
|
||||
}
|
||||
current_batch.push(i);
|
||||
current_chars += link_chars;
|
||||
}
|
||||
if !current_batch.is_empty() {
|
||||
batches.push(current_batch);
|
||||
}
|
||||
|
||||
let total_batches = batches.len();
|
||||
println!("{} batches (avg {} links/batch)\n", total_batches,
|
||||
if total_batches > 0 { total / total_batches } else { 0 });
|
||||
|
||||
use rayon::prelude::*;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
// Build all batch prompts up front
|
||||
let batch_data: Vec<(usize, Vec<LinkInfo>, String)> = batches.iter().enumerate()
|
||||
.map(|(batch_idx, batch_indices)| {
|
||||
let batch_infos: Vec<LinkInfo> = batch_indices.iter().map(|&i| {
|
||||
let l = &links[i];
|
||||
LinkInfo {
|
||||
rel_idx: l.rel_idx,
|
||||
source_key: l.source_key.clone(),
|
||||
target_key: l.target_key.clone(),
|
||||
source_content: l.source_content.clone(),
|
||||
target_content: l.target_content.clone(),
|
||||
strength: l.strength,
|
||||
target_sections: l.target_sections.clone(),
|
||||
}
|
||||
}).collect();
|
||||
let prompt = build_audit_prompt(&batch_infos, batch_idx + 1, total_batches);
|
||||
(batch_idx, batch_infos, prompt)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Progress counter
|
||||
let done = AtomicUsize::new(0);
|
||||
|
||||
// Run batches in parallel via rayon
|
||||
let batch_results: Vec<_> = batch_data.par_iter()
|
||||
.map(|(batch_idx, batch_infos, prompt)| {
|
||||
let response = crate::agent::oneshot::call_api_with_tools_sync(
|
||||
"audit", &[prompt.clone()], &[], None, 10, &[], None);
|
||||
let completed = done.fetch_add(1, Ordering::Relaxed) + 1;
|
||||
eprint!("\r Batches: {}/{} done", completed, total_batches);
|
||||
(*batch_idx, batch_infos, response)
|
||||
})
|
||||
.collect();
|
||||
eprintln!(); // newline after progress
|
||||
|
||||
// Process results sequentially
|
||||
let mut stats = AuditStats {
|
||||
kept: 0, deleted: 0, retargeted: 0, weakened: 0, strengthened: 0, errors: 0,
|
||||
};
|
||||
let mut deletions: Vec<usize> = Vec::new();
|
||||
let mut retargets: Vec<(usize, String)> = Vec::new();
|
||||
let mut strength_changes: Vec<(usize, f32)> = Vec::new();
|
||||
|
||||
for (batch_idx, batch_infos, response) in &batch_results {
|
||||
let response = match response {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
eprintln!(" Batch {}: error: {}", batch_idx + 1, e);
|
||||
stats.errors += batch_infos.len();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let actions = parse_audit_response(response, batch_infos.len());
|
||||
|
||||
let mut responded: HashSet<usize> = HashSet::new();
|
||||
|
||||
for (idx, action) in &actions {
|
||||
responded.insert(*idx);
|
||||
let link = &batch_infos[*idx];
|
||||
|
||||
match action {
|
||||
AuditAction::Keep => {
|
||||
stats.kept += 1;
|
||||
}
|
||||
AuditAction::Delete => {
|
||||
println!(" DELETE {} → {}", link.source_key, link.target_key);
|
||||
deletions.push(link.rel_idx);
|
||||
stats.deleted += 1;
|
||||
}
|
||||
AuditAction::Retarget(new_target) => {
|
||||
println!(" RETARGET {} → {} (was {})",
|
||||
link.source_key, new_target, link.target_key);
|
||||
retargets.push((link.rel_idx, new_target.clone()));
|
||||
stats.retargeted += 1;
|
||||
}
|
||||
AuditAction::Weaken(s) => {
|
||||
println!(" WEAKEN {} → {} (str {:.2} → {:.2})",
|
||||
link.source_key, link.target_key, link.strength, s);
|
||||
strength_changes.push((link.rel_idx, *s));
|
||||
stats.weakened += 1;
|
||||
}
|
||||
AuditAction::Strengthen(s) => {
|
||||
println!(" STRENGTHEN {} → {} (str {:.2} → {:.2})",
|
||||
link.source_key, link.target_key, link.strength, s);
|
||||
strength_changes.push((link.rel_idx, *s));
|
||||
stats.strengthened += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0..batch_infos.len() {
|
||||
if !responded.contains(&i) {
|
||||
stats.kept += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!(" Batch {}/{}: +{}kept +{}del +{}retarget +{}weak +{}strong",
|
||||
batch_idx + 1, total_batches,
|
||||
stats.kept, stats.deleted, stats.retargeted, stats.weakened, stats.strengthened);
|
||||
}
|
||||
|
||||
// Apply changes
|
||||
if apply && (stats.deleted > 0 || stats.retargeted > 0
|
||||
|| stats.weakened > 0 || stats.strengthened > 0) {
|
||||
println!("\nApplying changes...");
|
||||
|
||||
// Deletions: soft-delete
|
||||
for rel_idx in &deletions {
|
||||
store.relations[*rel_idx].deleted = true;
|
||||
}
|
||||
|
||||
// Strength changes
|
||||
for (rel_idx, new_strength) in &strength_changes {
|
||||
store.relations[*rel_idx].strength = *new_strength;
|
||||
}
|
||||
|
||||
// Retargets: soft-delete old, create new
|
||||
for (rel_idx, new_target) in &retargets {
|
||||
let source_key = store.relations[*rel_idx].source_key.clone();
|
||||
let old_strength = store.relations[*rel_idx].strength;
|
||||
let source_uuid = store.nodes.get(&source_key)
|
||||
.map(|n| n.uuid).unwrap_or([0u8; 16]);
|
||||
let target_uuid = store.nodes.get(new_target)
|
||||
.map(|n| n.uuid).unwrap_or([0u8; 16]);
|
||||
|
||||
// Soft-delete old
|
||||
store.relations[*rel_idx].deleted = true;
|
||||
|
||||
// Create new
|
||||
if target_uuid != [0u8; 16] {
|
||||
let new_rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
store::RelationType::Auto,
|
||||
old_strength,
|
||||
&source_key, new_target,
|
||||
);
|
||||
store.add_relation(new_rel).ok();
|
||||
}
|
||||
}
|
||||
|
||||
store.save()?;
|
||||
println!("Saved.");
|
||||
}
|
||||
|
||||
Ok(stats)
|
||||
}
|
||||
|
|
@ -1,164 +0,0 @@
|
|||
// Consolidation pipeline: plan → agents → maintenance → digests → links
|
||||
//
|
||||
// consolidate_full() runs the full autonomous consolidation:
|
||||
// 1. Plan: analyze metrics, allocate agents
|
||||
// 2. Execute: run each agent (agents apply changes via tool calls)
|
||||
// 3. Graph maintenance (orphans, degree cap)
|
||||
// 4. Digest: generate missing daily/weekly/monthly digests
|
||||
// 5. Links: apply links extracted from digests
|
||||
// 6. Summary: final metrics comparison
|
||||
|
||||
use super::digest;
|
||||
use crate::agent::oneshot;
|
||||
use crate::neuro;
|
||||
use crate::store::{self, Store};
|
||||
|
||||
|
||||
/// Append a line to the log buffer.
|
||||
fn log_line(buf: &mut String, line: &str) {
|
||||
buf.push_str(line);
|
||||
buf.push('\n');
|
||||
}
|
||||
|
||||
/// Run the full autonomous consolidation pipeline with logging.
|
||||
pub fn consolidate_full(store: &mut Store) -> Result<(), String> {
|
||||
consolidate_full_with_progress(store, &|_| {})
|
||||
}
|
||||
|
||||
fn consolidate_full_with_progress(
|
||||
store: &mut Store,
|
||||
on_progress: &dyn Fn(&str),
|
||||
) -> Result<(), String> {
|
||||
let start = std::time::Instant::now();
|
||||
let log_key = format!("_consolidate-log-{}", store::compact_timestamp());
|
||||
let mut log_buf = String::new();
|
||||
|
||||
log_line(&mut log_buf, "=== CONSOLIDATE FULL ===");
|
||||
log_line(&mut log_buf, &format!("Started: {}", store::format_datetime(store::now_epoch())));
|
||||
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
|
||||
log_line(&mut log_buf, "");
|
||||
|
||||
// --- Step 1: Plan ---
|
||||
log_line(&mut log_buf, "--- Step 1: Plan ---");
|
||||
on_progress("planning");
|
||||
let plan = neuro::consolidation_plan(store);
|
||||
let plan_text = neuro::format_plan(&plan);
|
||||
log_line(&mut log_buf, &plan_text);
|
||||
println!("{}", plan_text);
|
||||
|
||||
let total_agents = plan.total();
|
||||
log_line(&mut log_buf, &format!("Total agents to run: {}", total_agents));
|
||||
|
||||
// --- Step 2: Execute agents ---
|
||||
log_line(&mut log_buf, "\n--- Step 2: Execute agents ---");
|
||||
let mut agent_num = 0usize;
|
||||
let mut agent_errors = 0usize;
|
||||
|
||||
let batch_size = 5;
|
||||
let runs = plan.to_agent_runs(batch_size);
|
||||
|
||||
for (agent_type, count) in &runs {
|
||||
agent_num += 1;
|
||||
let label = if *count > 0 {
|
||||
format!("[{}/{}] {} (batch={})", agent_num, runs.len(), agent_type, count)
|
||||
} else {
|
||||
format!("[{}/{}] {}", agent_num, runs.len(), agent_type)
|
||||
};
|
||||
|
||||
log_line(&mut log_buf, &format!("\n{}", label));
|
||||
on_progress(&label);
|
||||
println!("{}", label);
|
||||
|
||||
// Reload store to pick up changes from previous agents
|
||||
if agent_num > 1 {
|
||||
*store = Store::load()?;
|
||||
}
|
||||
|
||||
match oneshot::run_one_agent(store, agent_type, *count, None) {
|
||||
Ok(_) => {
|
||||
let msg = " Done".to_string();
|
||||
log_line(&mut log_buf, &msg);
|
||||
on_progress(&msg);
|
||||
println!("{}", msg);
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!(" ERROR: {}", e);
|
||||
log_line(&mut log_buf, &msg);
|
||||
eprintln!("{}", msg);
|
||||
agent_errors += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log_line(&mut log_buf, &format!("\nAgents complete: {} run, {} errors",
|
||||
agent_num - agent_errors, agent_errors));
|
||||
store.save()?;
|
||||
|
||||
// --- Step 3: Cap degree ---
|
||||
log_line(&mut log_buf, "\n--- Step 3: Cap degree ---");
|
||||
on_progress("capping degree");
|
||||
println!("\n--- Capping node degree ---");
|
||||
*store = Store::load()?;
|
||||
|
||||
match store.cap_degree(50) {
|
||||
Ok((hubs, pruned)) => {
|
||||
store.save()?;
|
||||
log_line(&mut log_buf, &format!(" {} hubs capped, {} edges pruned", hubs, pruned));
|
||||
}
|
||||
Err(e) => log_line(&mut log_buf, &format!(" ERROR: {}", e)),
|
||||
}
|
||||
|
||||
// --- Step 4: Digest auto ---
|
||||
log_line(&mut log_buf, "\n--- Step 4: Digest auto ---");
|
||||
on_progress("generating digests");
|
||||
println!("\n--- Generating missing digests ---");
|
||||
*store = Store::load()?;
|
||||
|
||||
match digest::digest_auto(store) {
|
||||
Ok(()) => log_line(&mut log_buf, " Digests done."),
|
||||
Err(e) => {
|
||||
let msg = format!(" ERROR in digest auto: {}", e);
|
||||
log_line(&mut log_buf, &msg);
|
||||
eprintln!("{}", msg);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Step 5: Apply digest links ---
|
||||
log_line(&mut log_buf, "\n--- Step 5: Apply digest links ---");
|
||||
on_progress("applying digest links");
|
||||
println!("\n--- Applying digest links ---");
|
||||
*store = Store::load()?;
|
||||
|
||||
let links = digest::parse_all_digest_links(store);
|
||||
let (applied, skipped, fallbacks) = digest::apply_digest_links(store, &links);
|
||||
store.save()?;
|
||||
log_line(&mut log_buf, &format!(" {} links applied, {} skipped, {} fallbacks",
|
||||
applied, skipped, fallbacks));
|
||||
|
||||
// --- Step 6: Summary ---
|
||||
let elapsed = start.elapsed();
|
||||
log_line(&mut log_buf, "\n--- Summary ---");
|
||||
log_line(&mut log_buf, &format!("Finished: {}", store::format_datetime(store::now_epoch())));
|
||||
log_line(&mut log_buf, &format!("Duration: {:.0}s", elapsed.as_secs_f64()));
|
||||
*store = Store::load()?;
|
||||
log_line(&mut log_buf, &format!("Nodes: {} Relations: {}", store.nodes.len(), store.relations.len()));
|
||||
|
||||
let summary = format!(
|
||||
"\n=== CONSOLIDATE FULL COMPLETE ===\n\
|
||||
Duration: {:.0}s\n\
|
||||
Agents: {} run, {} errors\n\
|
||||
Nodes: {} Relations: {}\n",
|
||||
elapsed.as_secs_f64(),
|
||||
agent_num - agent_errors, agent_errors,
|
||||
store.nodes.len(), store.relations.len(),
|
||||
);
|
||||
log_line(&mut log_buf, &summary);
|
||||
println!("{}", summary);
|
||||
|
||||
// Store the log as a node
|
||||
store.upsert_provenance(&log_key, &log_buf,
|
||||
"consolidate:write").ok();
|
||||
store.save()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -801,7 +801,7 @@ pub fn run_agent(
|
|||
|
||||
// Run the query if present
|
||||
let keys = if !def.query.is_empty() {
|
||||
let mut stages = search::Stage::parse_pipeline(&def.query)?;
|
||||
let mut stages = crate::query_parser::parse_stages(&def.query)?;
|
||||
let has_limit = stages.iter().any(|s|
|
||||
matches!(s, search::Stage::Transform(search::Transform::Limit(_))));
|
||||
if !has_limit {
|
||||
|
|
|
|||
|
|
@ -1,386 +1,8 @@
|
|||
use std::sync::Arc;
|
||||
// Episodic digest generation: daily, weekly, monthly, auto
|
||||
//
|
||||
// Three digest levels form a temporal hierarchy: daily digests summarize
|
||||
// journal entries, weekly digests summarize dailies, monthly digests
|
||||
// summarize weeklies. All three share the same generate/auto-detect
|
||||
// pipeline, parameterized by DigestLevel.
|
||||
// Digest link parsing: extracts ## Links sections from digest nodes
|
||||
// and applies them to the memory graph.
|
||||
|
||||
use crate::store::{self, Store, new_relation};
|
||||
|
||||
use chrono::{Datelike, Duration, Local, NaiveDate};
|
||||
use regex::Regex;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
/// Get all store keys for prompt context.
|
||||
fn semantic_keys(store: &Store) -> Vec<String> {
|
||||
let mut keys: Vec<String> = store.nodes.keys().cloned().collect();
|
||||
keys.sort();
|
||||
keys.truncate(200);
|
||||
keys
|
||||
}
|
||||
|
||||
// --- Digest level descriptors ---
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
struct DigestLevel {
|
||||
name: &'static str,
|
||||
title: &'static str,
|
||||
period: &'static str,
|
||||
input_title: &'static str,
|
||||
child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files
|
||||
/// Expand an arg into (canonical_label, dates covered).
|
||||
label_dates: fn(&str) -> Result<(String, Vec<String>), String>,
|
||||
/// Map a YYYY-MM-DD date to this level's label.
|
||||
date_to_label: fn(&str) -> Option<String>,
|
||||
}
|
||||
|
||||
const DAILY: DigestLevel = DigestLevel {
|
||||
name: "daily",
|
||||
title: "Daily",
|
||||
period: "Date",
|
||||
input_title: "Journal entries",
|
||||
child_name: None,
|
||||
label_dates: |date| Ok((date.to_string(), vec![date.to_string()])),
|
||||
date_to_label: |date| Some(date.to_string()),
|
||||
};
|
||||
|
||||
/// Week label and 7 dates (Mon-Sun) for the week containing `date`.
|
||||
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
|
||||
let nd = NaiveDate::parse_from_str(date, "%Y-%m-%d")
|
||||
.map_err(|e| format!("bad date '{}': {}", date, e))?;
|
||||
let iso = nd.iso_week();
|
||||
let week_label = format!("{}-W{:02}", iso.year(), iso.week());
|
||||
let monday = nd - Duration::days(nd.weekday().num_days_from_monday() as i64);
|
||||
let dates = (0..7)
|
||||
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
|
||||
.collect();
|
||||
Ok((week_label, dates))
|
||||
}
|
||||
|
||||
const WEEKLY: DigestLevel = DigestLevel {
|
||||
name: "weekly",
|
||||
title: "Weekly",
|
||||
period: "Week",
|
||||
input_title: "Daily digests",
|
||||
child_name: Some("daily"),
|
||||
label_dates: |arg| {
|
||||
if !arg.contains('W') {
|
||||
return week_dates(arg);
|
||||
}
|
||||
let (y, w) = arg.split_once("-W")
|
||||
.ok_or_else(|| format!("bad week label: {}", arg))?;
|
||||
let year: i32 = y.parse().map_err(|_| format!("bad week year: {}", arg))?;
|
||||
let week: u32 = w.parse().map_err(|_| format!("bad week number: {}", arg))?;
|
||||
let monday = NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
|
||||
.ok_or_else(|| format!("invalid week: {}", arg))?;
|
||||
let dates = (0..7)
|
||||
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
|
||||
.collect();
|
||||
Ok((arg.to_string(), dates))
|
||||
},
|
||||
date_to_label: |date| week_dates(date).ok().map(|(l, _)| l),
|
||||
};
|
||||
|
||||
const MONTHLY: DigestLevel = DigestLevel {
|
||||
name: "monthly",
|
||||
title: "Monthly",
|
||||
period: "Month",
|
||||
input_title: "Weekly digests",
|
||||
child_name: Some("weekly"),
|
||||
label_dates: |arg| {
|
||||
let (year, month) = if arg.len() <= 7 {
|
||||
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
|
||||
.map_err(|e| format!("bad month '{}': {}", arg, e))?;
|
||||
(d.year(), d.month())
|
||||
} else {
|
||||
let d = NaiveDate::parse_from_str(arg, "%Y-%m-%d")
|
||||
.map_err(|e| format!("bad date '{}': {}", arg, e))?;
|
||||
(d.year(), d.month())
|
||||
};
|
||||
let label = format!("{}-{:02}", year, month);
|
||||
let mut dates = Vec::new();
|
||||
let mut day = 1u32;
|
||||
while let Some(date) = NaiveDate::from_ymd_opt(year, month, day) {
|
||||
if date.month() != month { break; }
|
||||
dates.push(date.format("%Y-%m-%d").to_string());
|
||||
day += 1;
|
||||
}
|
||||
Ok((label, dates))
|
||||
},
|
||||
date_to_label: |date| NaiveDate::parse_from_str(date, "%Y-%m-%d")
|
||||
.ok().map(|d| format!("{}-{:02}", d.year(), d.month())),
|
||||
};
|
||||
|
||||
const LEVELS: &[&DigestLevel] = &[&DAILY, &WEEKLY, &MONTHLY];
|
||||
|
||||
/// Store key for a digest node: "daily-2026-03-04", "weekly-2026-W09", etc.
|
||||
fn digest_node_key(level_name: &str, label: &str) -> String {
|
||||
format!("{}-{}", level_name, label)
|
||||
}
|
||||
|
||||
// --- Input gathering ---
|
||||
|
||||
/// Result of gathering inputs for a digest.
|
||||
struct GatherResult {
|
||||
label: String,
|
||||
/// (display_label, content) pairs for the prompt.
|
||||
inputs: Vec<(String, String)>,
|
||||
/// Store keys of source nodes — used to create structural links.
|
||||
source_keys: Vec<String>,
|
||||
}
|
||||
|
||||
/// Load child digest content from the store.
|
||||
fn load_child_digests(store: &Store, prefix: &str, labels: &[String]) -> (Vec<(String, String)>, Vec<String>) {
|
||||
let mut digests = Vec::new();
|
||||
let mut keys = Vec::new();
|
||||
for label in labels {
|
||||
let key = digest_node_key(prefix, label);
|
||||
if let Some(node) = store.nodes.get(&key) {
|
||||
digests.push((label.clone(), node.content.clone()));
|
||||
keys.push(key);
|
||||
}
|
||||
}
|
||||
(digests, keys)
|
||||
}
|
||||
|
||||
/// Unified: gather inputs for any digest level.
|
||||
fn gather(level: &DigestLevel, store: &Store, arg: &str) -> Result<GatherResult, String> {
|
||||
let (label, dates) = (level.label_dates)(arg)?;
|
||||
|
||||
let (inputs, source_keys) = if let Some(child_name) = level.child_name {
|
||||
// Map parent's dates through child's date_to_label → child labels
|
||||
let child = LEVELS.iter()
|
||||
.find(|l| l.name == child_name)
|
||||
.expect("invalid child_name");
|
||||
let child_labels: Vec<String> = dates.iter()
|
||||
.filter_map(|d| (child.date_to_label)(d))
|
||||
.collect::<BTreeSet<_>>()
|
||||
.into_iter()
|
||||
.collect();
|
||||
load_child_digests(store, child_name, &child_labels)
|
||||
} else {
|
||||
// Leaf level: scan store for episodic entries matching date
|
||||
let mut entries: Vec<_> = store.nodes.iter()
|
||||
.filter(|(_, n)| n.node_type == store::NodeType::EpisodicSession
|
||||
&& n.created_at > 0
|
||||
&& store::format_date(n.created_at) == label)
|
||||
.map(|(key, n)| {
|
||||
(store::format_datetime(n.timestamp), n.content.clone(), key.clone())
|
||||
})
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
let keys = entries.iter().map(|(_, _, k)| k.clone()).collect();
|
||||
let inputs = entries.into_iter().map(|(dt, c, _)| (dt, c)).collect();
|
||||
(inputs, keys)
|
||||
};
|
||||
|
||||
Ok(GatherResult { label, inputs, source_keys })
|
||||
}
|
||||
|
||||
/// Unified: find candidate labels for auto-generation (past, not yet generated).
|
||||
fn find_candidates(level: &DigestLevel, dates: &[String], today: &str) -> Vec<String> {
|
||||
let today_label = (level.date_to_label)(today);
|
||||
dates.iter()
|
||||
.filter_map(|d| (level.date_to_label)(d))
|
||||
.collect::<BTreeSet<_>>()
|
||||
.into_iter()
|
||||
.filter(|l| Some(l) != today_label.as_ref())
|
||||
.collect()
|
||||
}
|
||||
|
||||
// --- Unified generator ---
|
||||
|
||||
fn format_inputs(inputs: &[(String, String)], daily: bool) -> String {
|
||||
let mut text = String::new();
|
||||
for (label, content) in inputs {
|
||||
if daily {
|
||||
text.push_str(&format!("\n### {}\n\n{}\n", label, content));
|
||||
} else {
|
||||
text.push_str(&format!("\n---\n## {}\n{}\n", label, content));
|
||||
}
|
||||
}
|
||||
text
|
||||
}
|
||||
|
||||
fn generate_digest(
|
||||
store: &mut Store,
|
||||
level: &DigestLevel,
|
||||
label: &str,
|
||||
inputs: &[(String, String)],
|
||||
source_keys: &[String],
|
||||
) -> Result<(), String> {
|
||||
println!("Generating {} digest for {}...", level.name, label);
|
||||
|
||||
if inputs.is_empty() {
|
||||
println!(" No inputs found for {}", label);
|
||||
return Ok(());
|
||||
}
|
||||
println!(" {} inputs", inputs.len());
|
||||
|
||||
let keys = semantic_keys(store);
|
||||
let keys_text = keys.iter()
|
||||
.map(|k| format!(" - {}", k))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let content = format_inputs(inputs, level.child_name.is_none());
|
||||
let covered = inputs.iter()
|
||||
.map(|(l, _)| l.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
// Load agent def — drives template, temperature, priority, tools
|
||||
let def = super::defs::get_def("digest")
|
||||
.ok_or("no digest agent definition")?;
|
||||
let template = def.steps.first()
|
||||
.map(|s| s.prompt.clone())
|
||||
.ok_or("digest agent has no prompt")?;
|
||||
|
||||
// Substitute digest-specific and config placeholders, then resolve
|
||||
// standard {{node:...}} etc. via the placeholder system
|
||||
let cfg = crate::config::get();
|
||||
let partial = template
|
||||
.replace("{agent_name}", &def.agent)
|
||||
.replace("{user_name}", &cfg.user_name)
|
||||
.replace("{assistant_name}", &cfg.assistant_name)
|
||||
.replace("{{LEVEL}}", level.title)
|
||||
.replace("{{PERIOD}}", level.period)
|
||||
.replace("{{INPUT_TITLE}}", level.input_title)
|
||||
.replace("{{LABEL}}", label)
|
||||
.replace("{{CONTENT}}", &content)
|
||||
.replace("{{COVERED}}", &covered)
|
||||
.replace("{{KEYS}}", &keys_text);
|
||||
|
||||
let graph = store.build_graph();
|
||||
let (prompt, _) = super::defs::resolve_placeholders(
|
||||
&partial, store, &graph, &[], 0,
|
||||
);
|
||||
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
||||
|
||||
// Log to file like other agents
|
||||
let log_dir = dirs::home_dir().unwrap_or_default()
|
||||
.join(".consciousness/logs/llm/digest");
|
||||
std::fs::create_dir_all(&log_dir).ok();
|
||||
let log_path = log_dir.join(format!("{}.txt", crate::store::compact_timestamp()));
|
||||
let _log = move |msg: &str| {
|
||||
use std::io::Write;
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true).append(true).open(&log_path)
|
||||
{
|
||||
let _ = writeln!(f, "{}", msg);
|
||||
}
|
||||
};
|
||||
|
||||
println!(" Calling LLM...");
|
||||
let prompts = vec![prompt];
|
||||
let phases: Vec<String> = def.steps.iter().map(|s| s.phase.clone()).collect();
|
||||
// Filter tools based on agent def
|
||||
let all_tools = crate::agent::tools::memory_and_journal_tools();
|
||||
let tools: Vec<_> = if def.tools.is_empty() {
|
||||
all_tools.to_vec()
|
||||
} else {
|
||||
all_tools.into_iter()
|
||||
.filter(|t| def.tools.iter().any(|w| w == &t.name))
|
||||
.collect()
|
||||
};
|
||||
let digest = crate::agent::oneshot::call_api_with_tools_sync(
|
||||
&def.agent, &prompts, &phases, def.temperature, def.priority,
|
||||
&tools, None)?;
|
||||
|
||||
let key = digest_node_key(level.name, label);
|
||||
store.upsert_provenance(&key, &digest, "digest:write")?;
|
||||
|
||||
// Structural links: connect all source entries to this digest
|
||||
let mut linked = 0;
|
||||
for source_key in source_keys {
|
||||
// Skip if link already exists
|
||||
let exists = store.relations.iter().any(|r|
|
||||
!r.deleted && r.source_key == *source_key && r.target_key == key);
|
||||
if exists { continue; }
|
||||
|
||||
let source_uuid = store.nodes.get(source_key)
|
||||
.map(|n| n.uuid).unwrap_or([0u8; 16]);
|
||||
let target_uuid = store.nodes.get(&key)
|
||||
.map(|n| n.uuid).unwrap_or([0u8; 16]);
|
||||
let mut rel = new_relation(
|
||||
source_uuid, target_uuid,
|
||||
store::RelationType::Link, 0.8,
|
||||
source_key, &key,
|
||||
);
|
||||
rel.provenance = "digest:structural".to_string();
|
||||
store.add_relation(rel)?;
|
||||
linked += 1;
|
||||
}
|
||||
if linked > 0 {
|
||||
println!(" Linked {} source entries → {}", linked, key);
|
||||
}
|
||||
|
||||
store.save()?;
|
||||
println!(" Stored: {}", key);
|
||||
|
||||
println!(" Done: {} lines", digest.lines().count());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// --- Public API ---
|
||||
|
||||
pub fn generate(store: &mut Store, level_name: &str, arg: &str) -> Result<(), String> {
|
||||
let level = LEVELS.iter()
|
||||
.find(|l| l.name == level_name)
|
||||
.ok_or_else(|| format!("unknown digest level: {}", level_name))?;
|
||||
let result = gather(level, store, arg)?;
|
||||
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)
|
||||
}
|
||||
|
||||
// --- Auto-detect and generate missing digests ---
|
||||
|
||||
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
|
||||
let today = Local::now().format("%Y-%m-%d").to_string();
|
||||
|
||||
// Collect all dates with episodic entries
|
||||
let dates: Vec<String> = store.nodes.values()
|
||||
.filter(|n| n.node_type == store::NodeType::EpisodicSession && n.created_at > 0)
|
||||
.map(|n| store::format_date(n.created_at))
|
||||
.collect::<BTreeSet<_>>()
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let mut total = 0u32;
|
||||
|
||||
for level in LEVELS {
|
||||
let candidates = find_candidates(level, &dates, &today);
|
||||
let mut generated = 0u32;
|
||||
let mut skipped = 0u32;
|
||||
|
||||
for arg in &candidates {
|
||||
let result = gather(level, store, arg)?;
|
||||
let key = digest_node_key(level.name, &result.label);
|
||||
if store.nodes.contains_key(&key) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
if result.inputs.is_empty() { continue; }
|
||||
println!("[auto] Missing {} digest for {}", level.name, result.label);
|
||||
generate_digest(store, level, &result.label, &result.inputs, &result.source_keys)?;
|
||||
generated += 1;
|
||||
}
|
||||
|
||||
println!("[auto] {}: {} generated, {} existed", level.name, generated, skipped);
|
||||
total += generated;
|
||||
}
|
||||
|
||||
if total == 0 {
|
||||
println!("[auto] All digests up to date.");
|
||||
} else {
|
||||
println!("[auto] Generated {} total digests.", total);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// --- Digest link parsing ---
|
||||
// Replaces digest-link-parser.py: parses ## Links sections from digest
|
||||
// files and applies them to the memory graph.
|
||||
|
||||
/// A parsed link from a digest's Links section.
|
||||
pub struct DigestLink {
|
||||
|
|
@ -579,110 +201,3 @@ pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, us
|
|||
|
||||
(applied, skipped, fallbacks)
|
||||
}
|
||||
|
||||
// --- Tool interface for digest generation (added 2026-04-04) ---
|
||||
|
||||
/// Helper: extract string argument from tool call
|
||||
fn get_str_required(args: &serde_json::Value, name: &str) -> Result<String, String> {
|
||||
args.get(name)
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| format!("{} is required", name))
|
||||
}
|
||||
|
||||
/// Wrap a Result<T, String> for use in anyhow handlers.
|
||||
fn str_err<T>(r: Result<T, String>) -> anyhow::Result<T> {
|
||||
r.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
|
||||
/// digest_daily tool handler: generate a daily digest
|
||||
async fn handle_digest_daily(
|
||||
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
|
||||
args: serde_json::Value,
|
||||
) -> anyhow::Result<String> {
|
||||
let date = str_err(get_str_required(&args, "date"))?;
|
||||
let mut store = str_err(Store::load())?;
|
||||
str_err(generate(&mut store, "daily", &date))?;
|
||||
Ok(format!("Daily digest generated for {}", date))
|
||||
}
|
||||
|
||||
/// digest_weekly tool handler: generate a weekly digest
|
||||
async fn handle_digest_weekly(
|
||||
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
|
||||
args: serde_json::Value,
|
||||
) -> anyhow::Result<String> {
|
||||
let week_label = str_err(get_str_required(&args, "week"))?;
|
||||
let mut store = str_err(Store::load())?;
|
||||
str_err(generate(&mut store, "weekly", &week_label))?;
|
||||
Ok(format!("Weekly digest generated for {}", week_label))
|
||||
}
|
||||
|
||||
/// digest_monthly tool handler: generate a monthly digest
|
||||
async fn handle_digest_monthly(
|
||||
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
|
||||
args: serde_json::Value,
|
||||
) -> anyhow::Result<String> {
|
||||
let month = str_err(get_str_required(&args, "month"))?;
|
||||
let mut store = str_err(Store::load())?;
|
||||
str_err(generate(&mut store, "monthly", &month))?;
|
||||
Ok(format!("Monthly digest generated for {}", month))
|
||||
}
|
||||
|
||||
/// digest_auto tool handler: auto-generate all missing digests
|
||||
async fn handle_digest_auto(
|
||||
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
|
||||
_args: serde_json::Value,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut store = str_err(Store::load())?;
|
||||
str_err(digest_auto(&mut store))?;
|
||||
Ok("Auto-generated all missing digests".to_string())
|
||||
}
|
||||
|
||||
/// digest_links tool handler: parse and apply digest links
|
||||
async fn handle_digest_links(
|
||||
_agent: Option<std::sync::Arc<super::super::agent::Agent>>,
|
||||
_args: serde_json::Value,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut store = str_err(Store::load())?;
|
||||
let links = parse_all_digest_links(&store);
|
||||
let (applied, skipped, fallbacks) = apply_digest_links(&mut store, &links);
|
||||
str_err(store.save())?;
|
||||
Ok(format!("Applied {} digest links ({} skipped, {} fallback)", applied, skipped, fallbacks))
|
||||
}
|
||||
|
||||
/// Return digest tools array for the tool registry
|
||||
pub fn digest_tools() -> [super::super::agent::tools::Tool; 5] {
|
||||
use super::super::agent::tools::Tool;
|
||||
[
|
||||
Tool {
|
||||
name: "digest_daily",
|
||||
description: "Generate a daily digest from journal entries.",
|
||||
parameters_json: r#"{"type":"object","properties":{"date":{"type":"string","description":"Date in YYYY-MM-DD format"}}, "required":["date"]}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_daily(_a, v).await })),
|
||||
},
|
||||
Tool {
|
||||
name: "digest_weekly",
|
||||
description: "Generate a weekly digest from daily digests.",
|
||||
parameters_json: r#"{"type":"object","properties":{"week":{"type":"string","description":"Week label (YYYY-W##) or date (YYYY-MM-DD)"}}, "required":["week"]}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_weekly(_a, v).await })),
|
||||
},
|
||||
Tool {
|
||||
name: "digest_monthly",
|
||||
description: "Generate a monthly digest from weekly digests.",
|
||||
parameters_json: r#"{"type":"object","properties":{"month":{"type":"string","description":"Month label (YYYY-MM) or date (YYYY-MM-DD)"}}, "required":["month"]}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_monthly(_a, v).await })),
|
||||
},
|
||||
Tool {
|
||||
name: "digest_auto",
|
||||
description: "Auto-generate all missing digests (daily, weekly, monthly) for past dates that have content but no digest yet.",
|
||||
parameters_json: r#"{"type":"object","properties":{}}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_auto(_a, v).await })),
|
||||
},
|
||||
Tool {
|
||||
name: "digest_links",
|
||||
description: "Parse and apply structural links from digest nodes to the memory graph.",
|
||||
parameters_json: r#"{"type":"object","properties":{}}"#,
|
||||
handler: Arc::new(|_a, v| Box::pin(async move { handle_digest_links(_a, v).await })),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,24 +1,5 @@
|
|||
// Agent layer: LLM-powered operations on the memory graph
|
||||
//
|
||||
// Everything here calls external models (Sonnet, Haiku) or orchestrates
|
||||
// sequences of such calls. The core graph infrastructure (store, graph,
|
||||
// spectral, search, similarity) lives at the crate root.
|
||||
//
|
||||
// llm — model invocation, response parsing
|
||||
// prompts — prompt generation from store data
|
||||
// defs — agent file loading and placeholder resolution
|
||||
// audit — link quality review via Sonnet
|
||||
// consolidate — full consolidation pipeline
|
||||
// knowledge — agent execution, conversation fragment selection
|
||||
// enrich — journal enrichment, experience mining
|
||||
// digest — episodic digest generation (daily/weekly/monthly)
|
||||
// daemon — background job scheduler
|
||||
// transcript — shared JSONL transcript parsing
|
||||
//
|
||||
// The session hook (context injection, agent orchestration) moved to claude/hook.
|
||||
|
||||
pub mod audit;
|
||||
pub mod consolidate;
|
||||
pub mod daemon;
|
||||
pub mod defs;
|
||||
pub mod digest;
|
||||
|
|
|
|||
|
|
@ -333,45 +333,6 @@ pub(super) fn format_split_plan_node(store: &Store, graph: &Graph, key: &str) ->
|
|||
out
|
||||
}
|
||||
|
||||
/// Show consolidation batch status or generate an agent prompt.
|
||||
pub fn consolidation_batch(store: &Store, count: usize, auto: bool) -> Result<(), String> {
|
||||
if auto {
|
||||
let batch = agent_prompt(store, "replay", count)?;
|
||||
for (i, s) in batch.steps.iter().enumerate() {
|
||||
if batch.steps.len() > 1 {
|
||||
println!("=== STEP {} ({}) ===\n", i + 1, s.phase);
|
||||
}
|
||||
println!("{}", s.prompt);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let items = replay_queue(store, count);
|
||||
|
||||
if items.is_empty() {
|
||||
println!("No nodes to consolidate.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Consolidation batch ({} nodes):\n", items.len());
|
||||
for item in &items {
|
||||
let node_type = store.nodes.get(&item.key)
|
||||
.map(|n| if matches!(n.node_type, crate::store::NodeType::EpisodicSession) { "episodic" } else { "semantic" })
|
||||
.unwrap_or("?");
|
||||
println!(" [{:.3}] {} (cc={:.3}, interval={}d, type={})",
|
||||
item.priority, item.key, item.cc, item.interval_days, node_type);
|
||||
}
|
||||
|
||||
println!("\nAgent prompts:");
|
||||
println!(" --auto Generate replay agent prompt");
|
||||
println!(" --agent replay Replay agent (schema assimilation)");
|
||||
println!(" --agent linker Linker agent (relational binding)");
|
||||
println!(" --agent separator Separator agent (pattern separation)");
|
||||
println!(" --agent transfer Transfer agent (CLS episodic→semantic)");
|
||||
println!(" --agent health Health agent (synaptic homeostasis)");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate a specific agent prompt with filled-in data.
|
||||
pub fn agent_prompt(store: &Store, agent: &str, count: usize) -> Result<AgentBatch, String> {
|
||||
let def = super::defs::get_def(agent)
|
||||
|
|
|
|||
|
|
@ -157,7 +157,6 @@ impl ScreenView for ConsciousScreen {
|
|||
lines.push(Line::raw(format!(" {:53} {:>6} tokens", "────────", "──────")));
|
||||
lines.push(Line::raw(format!(" {:53} {:>6} tokens", "Total", total)));
|
||||
} else if let Some(ref info) = app.context_info {
|
||||
lines.push(Line::raw(format!(" System prompt: {:>6} chars", info.system_prompt_chars)));
|
||||
lines.push(Line::raw(format!(" Context message: {:>6} chars", info.context_message_chars)));
|
||||
}
|
||||
lines.push(Line::raw(""));
|
||||
|
|
|
|||
|
|
@ -49,7 +49,6 @@ struct ContextInfo {
|
|||
available_models: Vec<String>,
|
||||
prompt_file: String,
|
||||
backend: String,
|
||||
system_prompt_chars: usize,
|
||||
context_message_chars: usize,
|
||||
}
|
||||
|
||||
|
|
@ -404,7 +403,12 @@ async fn run(
|
|||
unc.toggle(name).await;
|
||||
}
|
||||
}
|
||||
app.unconscious_state = unc.snapshots();
|
||||
let store = crate::store::Store::cached().await.ok();
|
||||
let store_guard = match &store {
|
||||
Some(s) => Some(s.lock().await),
|
||||
None => None,
|
||||
};
|
||||
app.unconscious_state = unc.snapshots(store_guard.as_deref());
|
||||
app.graph_health = unc.graph_health.clone();
|
||||
app.mind_state = Some(mind.shared.lock().unwrap().clone());
|
||||
}
|
||||
|
|
@ -514,10 +518,6 @@ pub struct CliArgs {
|
|||
#[arg(long)]
|
||||
pub show_config: bool,
|
||||
|
||||
/// Override all prompt assembly with this file
|
||||
#[arg(long)]
|
||||
pub system_prompt_file: Option<PathBuf>,
|
||||
|
||||
/// Project memory directory
|
||||
#[arg(long)]
|
||||
pub memory_project: Option<PathBuf>,
|
||||
|
|
|
|||
|
|
@ -18,10 +18,10 @@ use super::{App, ScreenView, screen_legend};
|
|||
use super::widgets::{SectionTree, SectionView, section_to_view, pane_block_focused, tree_legend, format_age, format_ts_age};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
enum Pane { Agents, Outputs, History, Context }
|
||||
enum Pane { Agents, Outputs, Stats, History, Context }
|
||||
|
||||
// Clockwise: top-left → right → bottom-left → middle-left
|
||||
const PANE_ORDER: &[Pane] = &[Pane::Agents, Pane::Context, Pane::History, Pane::Outputs];
|
||||
const PANE_ORDER: &[Pane] = &[Pane::Agents, Pane::Context, Pane::History, Pane::Stats, Pane::Outputs];
|
||||
|
||||
pub(crate) struct SubconsciousScreen {
|
||||
focus: Pane,
|
||||
|
|
@ -29,6 +29,7 @@ pub(crate) struct SubconsciousScreen {
|
|||
output_tree: SectionTree,
|
||||
context_tree: SectionTree,
|
||||
history_scroll: super::scroll_pane::ScrollPaneState,
|
||||
stats_scroll: super::scroll_pane::ScrollPaneState,
|
||||
}
|
||||
|
||||
impl SubconsciousScreen {
|
||||
|
|
@ -41,6 +42,7 @@ impl SubconsciousScreen {
|
|||
output_tree: SectionTree::new(),
|
||||
context_tree: SectionTree::new(),
|
||||
history_scroll: super::scroll_pane::ScrollPaneState::new(),
|
||||
stats_scroll: super::scroll_pane::ScrollPaneState::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -87,6 +89,13 @@ impl ScreenView for SubconsciousScreen {
|
|||
_ => {}
|
||||
}
|
||||
Pane::Outputs => self.output_tree.handle_nav(code, &output_sections, area.height),
|
||||
Pane::Stats => match code {
|
||||
KeyCode::Up => self.stats_scroll.scroll_up(3),
|
||||
KeyCode::Down => self.stats_scroll.scroll_down(3),
|
||||
KeyCode::PageUp => self.stats_scroll.scroll_up(20),
|
||||
KeyCode::PageDown => self.stats_scroll.scroll_down(20),
|
||||
_ => {}
|
||||
}
|
||||
Pane::History => match code {
|
||||
KeyCode::Up => self.history_scroll.scroll_up(3),
|
||||
KeyCode::Down => self.history_scroll.scroll_down(3),
|
||||
|
|
@ -106,7 +115,7 @@ impl ScreenView for SubconsciousScreen {
|
|||
Constraint::Percentage(62),
|
||||
]).areas(area);
|
||||
|
||||
// Left column: agent list (top) | outputs (middle) | history (bottom, main)
|
||||
// Left column: agent list | outputs | stats | history
|
||||
let unc_count = if app.unconscious_state.is_empty() { 0 }
|
||||
else { app.unconscious_state.len() + 1 }; // +1 for separator
|
||||
let agent_count = (app.agent_state.len() + unc_count).max(1) as u16;
|
||||
|
|
@ -114,15 +123,21 @@ impl ScreenView for SubconsciousScreen {
|
|||
let output_lines = app.agent_state.get(self.selected())
|
||||
.map(|s| s.state.values().map(|v| v.lines().count() + 1).sum::<usize>())
|
||||
.unwrap_or(0);
|
||||
let output_height = (output_lines as u16 + 2).min(left.height / 4).max(3);
|
||||
let [list_area, output_area, history_area] = Layout::vertical([
|
||||
let output_height = (output_lines as u16 + 2).min(left.height / 5).max(3);
|
||||
let stats_lines = self.selected_persisted_stats(app)
|
||||
.map(|s| s.by_tool.len())
|
||||
.unwrap_or(0);
|
||||
let stats_height = (stats_lines as u16 + 2).min(left.height / 5).max(3);
|
||||
let [list_area, output_area, stats_area, history_area] = Layout::vertical([
|
||||
Constraint::Length(list_height),
|
||||
Constraint::Length(output_height),
|
||||
Constraint::Length(stats_height),
|
||||
Constraint::Min(5),
|
||||
]).areas(left);
|
||||
|
||||
self.draw_list(frame, list_area, app);
|
||||
self.draw_outputs(frame, output_area, app);
|
||||
self.draw_stats(frame, stats_area, app);
|
||||
self.draw_history(frame, history_area, app);
|
||||
self.draw_context(frame, right, &context_sections, app);
|
||||
}
|
||||
|
|
@ -147,6 +162,7 @@ impl SubconsciousScreen {
|
|||
self.output_tree = SectionTree::new();
|
||||
self.context_tree = SectionTree::new();
|
||||
self.history_scroll = super::scroll_pane::ScrollPaneState::new();
|
||||
self.stats_scroll = super::scroll_pane::ScrollPaneState::new();
|
||||
}
|
||||
|
||||
/// Get the agent Arc for the selected item, whether subconscious or unconscious.
|
||||
|
|
@ -160,6 +176,27 @@ impl SubconsciousScreen {
|
|||
app.unconscious_state.get(unc_idx)?.agent.clone()
|
||||
}
|
||||
|
||||
/// Get store activity history for the selected agent.
|
||||
fn selected_history<'a>(&self, app: &'a App) -> &'a [(String, i64)] {
|
||||
let idx = self.selected();
|
||||
let sub_count = app.agent_state.len();
|
||||
if idx < sub_count {
|
||||
return app.agent_state.get(idx)
|
||||
.map(|s| s.history.as_slice())
|
||||
.unwrap_or(&[]);
|
||||
}
|
||||
idx.checked_sub(sub_count + 1)
|
||||
.and_then(|i| app.unconscious_state.get(i))
|
||||
.map(|s| s.history.as_slice())
|
||||
.unwrap_or(&[])
|
||||
}
|
||||
|
||||
/// Get persisted stats for the selected agent.
|
||||
fn selected_persisted_stats(&self, app: &App) -> Option<crate::agent::oneshot::PersistedStats> {
|
||||
let name = self.selected_agent_name(app)?;
|
||||
Some(crate::agent::oneshot::get_stats(&name))
|
||||
}
|
||||
|
||||
fn output_sections(&self, app: &App) -> Vec<SectionView> {
|
||||
let snap = match app.agent_state.get(self.selected()) {
|
||||
Some(s) => s,
|
||||
|
|
@ -186,47 +223,65 @@ impl SubconsciousScreen {
|
|||
|
||||
agent.context.try_lock().ok()
|
||||
.map(|ctx| {
|
||||
let mut views = Vec::new();
|
||||
views.push(section_to_view("System", ctx.system()));
|
||||
views.push(section_to_view("Identity", ctx.identity()));
|
||||
views.push(section_to_view("Journal", ctx.journal()));
|
||||
|
||||
// Conversation: skip to fork point for subconscious agents
|
||||
let conv = ctx.conversation();
|
||||
let view = section_to_view("Conversation", conv);
|
||||
let fork = fork_point.min(view.children.len());
|
||||
view.children.into_iter().skip(fork).collect()
|
||||
let conv_view = section_to_view("Conversation", conv);
|
||||
let fork = fork_point.min(conv_view.children.len());
|
||||
let conv_children: Vec<SectionView> = conv_view.children
|
||||
.into_iter().skip(fork).collect();
|
||||
views.push(SectionView {
|
||||
name: format!("Conversation ({} entries)", conv_children.len()),
|
||||
tokens: conv_children.iter().map(|c| c.tokens).sum(),
|
||||
content: String::new(),
|
||||
children: conv_children,
|
||||
status: String::new(),
|
||||
});
|
||||
|
||||
views
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn draw_list(&mut self, frame: &mut Frame, area: Rect, app: &App) {
|
||||
let mut items: Vec<ListItem> = app.agent_state.iter().map(|snap| {
|
||||
if !snap.enabled {
|
||||
ListItem::from(Line::from(vec![
|
||||
Span::styled(&snap.name, Style::default().fg(Color::DarkGray)),
|
||||
Span::styled(" ○ off", Style::default().fg(Color::DarkGray)),
|
||||
]))
|
||||
let (name_color, indicator) = if !snap.enabled {
|
||||
(Color::DarkGray, "○")
|
||||
} else if snap.running {
|
||||
ListItem::from(Line::from(vec![
|
||||
Span::styled(&snap.name, Style::default().fg(Color::Green)),
|
||||
Span::styled(" ● ", Style::default().fg(Color::Green)),
|
||||
Span::styled(
|
||||
format!("p:{} t:{}", snap.current_phase, snap.turn),
|
||||
Style::default().fg(Color::DarkGray),
|
||||
),
|
||||
]))
|
||||
(Color::Green, "●")
|
||||
} else {
|
||||
let ago = snap.last_run_secs_ago
|
||||
.map(|s| format_age(s))
|
||||
.unwrap_or_else(|| "—".to_string());
|
||||
let entries = snap.forked_agent.as_ref()
|
||||
.and_then(|a| a.context.try_lock().ok())
|
||||
.map(|ctx| ctx.conversation().len().saturating_sub(snap.fork_point))
|
||||
.unwrap_or(0);
|
||||
ListItem::from(Line::from(vec![
|
||||
Span::styled(&snap.name, Style::default().fg(Color::Gray)),
|
||||
Span::styled(" ○ ", Style::default().fg(Color::DarkGray)),
|
||||
Span::styled(
|
||||
format!("{} {}e", ago, entries),
|
||||
Style::default().fg(Color::DarkGray),
|
||||
),
|
||||
]))
|
||||
}
|
||||
(Color::Gray, "○")
|
||||
};
|
||||
let ago = snap.last_run_secs_ago
|
||||
.map(|s| format_age(s))
|
||||
.unwrap_or_else(|| "—".to_string());
|
||||
let detail = if snap.running {
|
||||
format!("p:{} t:{}", snap.current_phase, snap.turn)
|
||||
} else if !snap.enabled {
|
||||
"off".to_string()
|
||||
} else if let Some(ref stats) = snap.last_stats {
|
||||
let fail_str = if stats.tool_failures > 0 {
|
||||
format!(" {}fail", stats.tool_failures)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
format!("×{} {} {}tc{} avg:{:.1}",
|
||||
snap.runs, ago,
|
||||
stats.tool_calls, fail_str,
|
||||
snap.tool_calls_ewma)
|
||||
} else {
|
||||
format!("×{} {}", snap.runs, ago)
|
||||
};
|
||||
ListItem::from(Line::from(vec![
|
||||
Span::styled(&snap.name, Style::default().fg(name_color)),
|
||||
Span::styled(format!(" {} ", indicator),
|
||||
Style::default().fg(if snap.running { Color::Green } else { Color::DarkGray })),
|
||||
Span::styled(detail, Style::default().fg(Color::DarkGray)),
|
||||
]))
|
||||
}).collect();
|
||||
|
||||
// Unconscious agents (graph maintenance)
|
||||
|
|
@ -251,9 +306,15 @@ impl SubconsciousScreen {
|
|||
} else if !snap.enabled {
|
||||
"off".to_string()
|
||||
} else if let Some(ref stats) = snap.last_stats {
|
||||
format!("×{} {} {}msg {}tc",
|
||||
let fail_str = if stats.tool_failures > 0 {
|
||||
format!(" {}fail", stats.tool_failures)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
format!("×{} {} {}tc{} avg:{:.1}",
|
||||
snap.runs, ago,
|
||||
stats.messages, stats.tool_calls)
|
||||
stats.tool_calls, fail_str,
|
||||
snap.tool_calls_ewma)
|
||||
} else {
|
||||
format!("×{} {}", snap.runs, ago)
|
||||
};
|
||||
|
|
@ -301,28 +362,93 @@ impl SubconsciousScreen {
|
|||
frame.render_stateful_widget(widget, area, &mut self.output_tree.scroll);
|
||||
}
|
||||
|
||||
fn draw_stats(&mut self, frame: &mut Frame, area: Rect, app: &App) {
|
||||
let dim = Style::default().fg(Color::DarkGray);
|
||||
let header_style = Style::default().fg(Color::DarkGray);
|
||||
let name_style = Style::default().fg(Color::Cyan);
|
||||
let num_style = Style::default().fg(Color::Yellow);
|
||||
|
||||
let mut lines: Vec<Line> = Vec::new();
|
||||
|
||||
if let Some(stats) = self.selected_persisted_stats(app) {
|
||||
if !stats.by_tool.is_empty() {
|
||||
// Header
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(" tool ", header_style),
|
||||
Span::styled("last ", header_style),
|
||||
Span::styled(" avg ", header_style),
|
||||
Span::styled("total", header_style),
|
||||
]));
|
||||
|
||||
// Sort by total descending
|
||||
let mut tools: Vec<_> = stats.by_tool.iter().collect();
|
||||
tools.sort_by(|a, b| b.1.total.cmp(&a.1.total));
|
||||
|
||||
for (name, tool_stats) in tools {
|
||||
let short_name = name.strip_prefix("memory_").unwrap_or(name);
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(format!(" {:<20} ", short_name), name_style),
|
||||
Span::styled(format!("{:>4} ", tool_stats.last), num_style),
|
||||
Span::styled(format!("{:>4.1} ", tool_stats.ewma), dim),
|
||||
Span::styled(format!("{:>5}", tool_stats.total), num_style),
|
||||
]));
|
||||
}
|
||||
|
||||
// Failures row if any
|
||||
if stats.failures.total > 0 {
|
||||
lines.push(Line::raw(""));
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(" failures ", Style::default().fg(Color::Red)),
|
||||
Span::styled(format!("{:>4} ", stats.failures.last), num_style),
|
||||
Span::styled(format!("{:>4.1} ", stats.failures.ewma), dim),
|
||||
Span::styled(format!("{:>5}", stats.failures.total), num_style),
|
||||
]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if lines.is_empty() {
|
||||
lines.push(Line::styled(" (no tool calls)", dim));
|
||||
}
|
||||
|
||||
let mut block = pane_block_focused("tool calls", self.focus == Pane::Stats);
|
||||
if self.focus == Pane::Stats {
|
||||
block = block.title_bottom(Line::styled(
|
||||
" ↑↓:scroll PgUp/Dn ",
|
||||
Style::default().fg(Color::DarkGray),
|
||||
));
|
||||
}
|
||||
let widget = super::scroll_pane::ScrollPane::new(&lines)
|
||||
.block(block);
|
||||
frame.render_stateful_widget(widget, area, &mut self.stats_scroll);
|
||||
}
|
||||
|
||||
fn draw_history(&mut self, frame: &mut Frame, area: Rect, app: &App) {
|
||||
let dim = Style::default().fg(Color::DarkGray);
|
||||
let key_style = Style::default().fg(Color::Yellow);
|
||||
|
||||
let mut lines: Vec<Line> = Vec::new();
|
||||
let mut title = "memory store activity".to_string();
|
||||
|
||||
if let Some(snap) = app.agent_state.get(self.selected()) {
|
||||
let short_name = snap.name.strip_prefix("subconscious-").unwrap_or(&snap.name);
|
||||
title = format!("{} store activity", short_name);
|
||||
let name = self.selected_agent_name(app);
|
||||
let short_name = name.as_ref()
|
||||
.map(|n| n.strip_prefix("subconscious-").unwrap_or(n))
|
||||
.unwrap_or("—");
|
||||
let title = format!("{} store activity", short_name);
|
||||
|
||||
if snap.history.is_empty() {
|
||||
lines.push(Line::styled(" (no store activity)", dim));
|
||||
} else {
|
||||
for (key, ts) in &snap.history {
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(format!(" {:>6} ", format_ts_age(*ts)), dim),
|
||||
Span::styled(key.as_str(), key_style),
|
||||
]));
|
||||
}
|
||||
let history = self.selected_history(app);
|
||||
if history.is_empty() {
|
||||
lines.push(Line::styled(" (no store activity)", dim));
|
||||
} else {
|
||||
for (key, ts) in history {
|
||||
lines.push(Line::from(vec![
|
||||
Span::styled(format!(" {:>6} ", format_ts_age(*ts)), dim),
|
||||
Span::styled(key.as_str(), key_style),
|
||||
]));
|
||||
}
|
||||
}
|
||||
|
||||
// Walked state (subconscious only)
|
||||
if let Some(snap) = app.agent_state.get(self.selected()) {
|
||||
if let Some(walked_str) = snap.state.get("walked") {
|
||||
let walked: Vec<&str> = walked_str.lines()
|
||||
.map(|l| l.trim()).filter(|l| !l.is_empty()).collect();
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue