cli: add memory_history, remove dump-json/edges/lookups

- Add memory_history MCP tool for version history
- Convert cmd_history to use memory_rpc
- Add raw parameter to memory_render for editing
- Remove unused: dump-json, list-edges, lookup-bump, lookups
- Fix render_node path in defs.rs/subconscious.rs

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-12 22:24:34 -04:00
parent 3e0c6b039f
commit ad59596335
8 changed files with 188 additions and 132 deletions

1
opencode_session_id Normal file
View file

@ -0,0 +1 @@
ses_2864fa54cffe2jLoh5grt8UixA

113
plugins/index.ts Normal file
View file

@ -0,0 +1,113 @@
// opencode-plugin/index.ts — Consciousness integration for OpenCode.
//
// Bridges OpenCode events to the consciousness system:
// - chat.message → forwards to poc-hook-opencode, appends output as text part
// - tool.execute.after → signals response activity
// - event → tracks session lifecycle (idle, compacted, etc.)
// - shell.env → injects POC_SESSION_ID into subprocesses
//
// Install: copy this directory to your project's `plugin/` or `plugins/` dir,
// or add to opencode.json:
// "plugin": ["/home/kent/poc/consciousness-claude/opencode-plugin"]
import type { Plugin, Hooks } from "@opencode-ai/plugin"
import path from "path"
import { $ } from "bun"
import { $ } from "bun"
// Find the poc-hook-opencode binary
function findHookBinary(): string {
const candidates = [
path.join(process.env.HOME || "", ".cargo/bin/poc-hook-opencode"),
path.join(process.env.HOME || "", "poc/consciousness-claude/target/debug/poc-hook-opencode"),
path.join(process.env.HOME || "", "poc/consciousness-claude/target/release/poc-hook-opencode"),
]
for (const c of candidates) {
try {
const stat = Bun.file(c).statSync()
if (stat?.isFile()) return c
} catch {}
}
return "poc-hook-opencode"
}
const HOOK_BINARY = findHookBinary()
// Generate a unique part ID (opencode uses ulid-like ascending IDs)
let partCounter = 0
function nextPartId(): string {
partCounter += 1
return `poc_part_${Date.now()}_${partCounter}`
}
export const ConsciousnessPlugin: Plugin = async (ctx) => {
const hooks: Hooks = {}
// Main hook: forward user messages to consciousness, inject context
hooks["chat.message"] = async (input, output) => {
const hookInput = JSON.stringify({
session_id: input.sessionID,
hook_event: "UserPromptSubmit",
})
try {
const proc = Bun.spawn([HOOK_BINARY], {
stdin: hookInput,
stdout: "pipe",
stderr: "pipe",
})
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
])
await proc.exited
if (stdout && stdout.trim()) {
// Append as a text part — must match MessageV2.TextPart schema:
// { id, sessionID, messageID, type: "text", text, time?, synthetic?, ignored? }
output.parts.push({
id: nextPartId(),
sessionID: input.sessionID,
messageID: output.message.id,
type: "text",
text: stdout,
synthetic: true,
})
}
if (stderr && stderr.trim()) {
console.error("[consciousness] hook stderr:", stderr.slice(0, 500))
}
} catch (e) {
console.error("[consciousness] hook error:", e)
}
}
// Signal response after tool use
hooks["tool.execute.after"] = async () => {
try {
await $`poc-daemon response`.quiet()
} catch {
// Daemon might not be running
}
}
// Inject POC_SESSION_ID into all shell commands
hooks["shell.env"] = async (input, output) => {
if (input.sessionID) {
output.env["POC_SESSION_ID"] = input.sessionID
}
}
// Track session events
hooks["event"] = async ({ event }) => {
if (event.type === "session.compacted") {
// Compaction detected — next hook invocation will detect via SQLite
}
if (event.type === "session.idle") {
// Session went idle
}
}
return hooks
}

6
plugins/package.json Normal file
View file

@ -0,0 +1,6 @@
{
"name": "@consciousness/opencode-plugin",
"version": "0.1.0",
"description": "Consciousness integration for OpenCode",
"main": "index.ts"
}

View file

@ -116,6 +116,7 @@ async fn dispatch(
"memory_link_set" => link_set(&args).await,
"memory_link_add" => link_add(agent, &args).await,
"memory_delete" => delete(&args).await,
"memory_history" => history(&args).await,
"memory_weight_set" => weight_set(&args).await,
"memory_rename" => rename(&args).await,
"memory_supersede" => supersede(agent, &args).await,
@ -131,7 +132,7 @@ async fn dispatch(
// ── Definitions ────────────────────────────────────────────────
pub fn memory_tools() -> [super::Tool; 13] {
pub fn memory_tools() -> [super::Tool; 14] {
use super::Tool;
[
Tool { name: "memory_render", description: "Read a memory node's content and links.",
@ -155,6 +156,9 @@ pub fn memory_tools() -> [super::Tool; 13] {
Tool { name: "memory_delete", description: "Delete a memory node.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_delete", &a, v).await })) },
Tool { name: "memory_history", description: "Show version history for a node.",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"},"full":{"type":"boolean","description":"Show full content for each version"}},"required":["key"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_history", &a, v).await })) },
Tool { name: "memory_weight_set", description: "Set a node's weight directly (0.01 to 1.0).",
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]}"#,
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_weight_set", &a, v).await })) },
@ -330,6 +334,61 @@ async fn delete(args: &serde_json::Value) -> Result<String> {
Ok(format!("deleted {}", resolved))
}
async fn history(args: &serde_json::Value) -> Result<String> {
let key = get_str(args, "key")?;
let full = args.get("full").and_then(|v| v.as_bool()).unwrap_or(false);
let arc = cached_store().await?;
let store = arc.lock().await;
let key = store.resolve_key(key).unwrap_or_else(|_| key.to_string());
drop(store);
let path = crate::store::nodes_path();
if !path.exists() {
anyhow::bail!("No node log found");
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| anyhow::anyhow!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<crate::store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| anyhow::anyhow!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| anyhow::anyhow!("get nodes: {}", e))? {
let node = crate::store::Node::from_capnp_migrate(node_reader)
.map_err(|e| anyhow::anyhow!("{}", e))?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
anyhow::bail!("No history found for '{}'", key);
}
let mut out = format!("{} versions of '{}':\n\n", versions.len(), key);
for node in &versions {
let ts = crate::store::format_datetime(node.timestamp);
let deleted = if node.deleted { " DELETED" } else { "" };
if full {
out.push_str(&format!("=== v{} {} {}{} w={:.3} {}b ===\n",
node.version, ts, node.provenance, deleted, node.weight, node.content.len()));
out.push_str(&node.content);
out.push('\n');
} else {
let preview = crate::util::first_n_chars(&node.content, 120).replace('\n', "\\n");
out.push_str(&format!("v{:<3} {} {:24} w={:.3} {}b{}\n {}\n",
node.version, ts, node.provenance, node.weight, node.content.len(), deleted, preview));
}
}
Ok(out)
}
async fn weight_set(args: &serde_json::Value) -> Result<String> {
let arc = cached_store().await?;
let mut store = arc.lock().await;

View file

@ -48,23 +48,6 @@ pub fn cmd_list_keys(pattern: Option<&str>) -> Result<(), String> {
}
}
pub fn cmd_list_edges() -> Result<(), String> {
let store = store::Store::load()?;
for rel in &store.relations {
println!("{}\t{}\t{:.2}\t{:?}",
rel.source_key, rel.target_key, rel.strength, rel.rel_type);
}
Ok(())
}
pub fn cmd_dump_json() -> Result<(), String> {
let store = store::Store::load()?;
let json = serde_json::to_string_pretty(&store)
.map_err(|e| format!("serialize: {}", e))?;
println!("{}", json);
Ok(())
}
pub fn cmd_node_delete(key: &[String]) -> Result<(), String> {
if key.is_empty() {
return Err("node-delete requires a key".into());
@ -127,68 +110,12 @@ pub fn cmd_history(key: &[String], full: bool) -> Result<(), String> {
if key.is_empty() {
return Err("history requires a key".into());
}
let raw_key = key.join(" ");
let store = store::Store::load()?;
let key = store.resolve_key(&raw_key).unwrap_or(raw_key);
drop(store);
let path = store::nodes_path();
if !path.exists() {
return Err("No node log found".into());
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| format!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| format!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| format!("get nodes: {}", e))? {
let node = store::Node::from_capnp_migrate(node_reader)?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
return Err(format!("No history found for '{}'", key));
}
eprintln!("{} versions of '{}':\n", versions.len(), key);
for node in &versions {
let ts = if node.timestamp > 0 && node.timestamp < 4_000_000_000 {
store::format_datetime(node.timestamp)
} else {
format!("(raw:{})", node.timestamp)
};
let deleted_marker = if node.deleted { " DELETED" } else { "" };
let content_len = node.content.len();
if full {
eprintln!("=== v{} {} {}{} w={:.3} {}b ===",
node.version, ts, node.provenance, deleted_marker, node.weight, content_len);
eprintln!("{}", node.content);
} else {
let preview = crate::util::first_n_chars(&node.content, 120);
let preview = preview.replace('\n', "\\n");
eprintln!(" v{:<3} {} {:24} w={:.3} {}b{}",
node.version, ts, node.provenance, node.weight, content_len, deleted_marker);
eprintln!(" {}", preview);
}
}
if !full
&& let Some(latest) = versions.last() {
eprintln!("\n--- Latest content (v{}, {}) ---",
latest.version, latest.provenance);
print!("{}", latest.content);
}
let key = key.join(" ");
let result = crate::mcp_server::memory_rpc(
"memory_history",
serde_json::json!({"key": key, "full": full}),
).map_err(|e| e.to_string())?;
print!("{}", result);
Ok(())
}
@ -263,33 +190,3 @@ pub fn cmd_edit(key: &[String]) -> Result<(), String> {
Ok(())
}
pub fn cmd_lookup_bump(keys: &[String]) -> Result<(), String> {
if keys.is_empty() {
return Err("lookup-bump requires at least one key".into());
}
let keys: Vec<&str> = keys.iter().map(|s| s.as_str()).collect();
crate::lookups::bump_many(&keys)
}
pub fn cmd_lookups(date: Option<&str>) -> Result<(), String> {
let date = date.map(|d| d.to_string())
.unwrap_or_else(|| chrono::Local::now().format("%Y-%m-%d").to_string());
let store = store::Store::load()?;
let keys: Vec<String> = store.nodes.values().map(|n| n.key.clone()).collect();
let resolved = crate::lookups::dump_resolved(&date, &keys)?;
if resolved.is_empty() {
println!("No lookups for {}", date);
return Ok(());
}
println!("Lookups for {}:", date);
for (key, count) in &resolved {
println!(" {:4} {}", count, key);
}
println!("\n{} distinct keys, {} total lookups",
resolved.len(),
resolved.iter().map(|(_, c)| *c as u64).sum::<u64>());
Ok(())
}

View file

@ -209,11 +209,6 @@ enum NodeCmd {
/// Glob pattern to filter keys
pattern: Option<String>,
},
/// List all edges (tsv: source target strength type)
Edges,
/// Dump entire store as JSON
#[command(name = "dump")]
Dump,
}
#[derive(Subcommand)]
@ -421,17 +416,6 @@ enum AdminCmd {
Log,
/// Show current parameters
Params,
/// Bump daily lookup counter for keys
#[command(name = "lookup-bump")]
LookupBump {
/// Node keys
keys: Vec<String>,
},
/// Show daily lookup counts
Lookups {
/// Date (default: today)
date: Option<String>,
},
/// Migrate transcript stub nodes to progress log
#[command(name = "migrate-transcript-progress")]
MigrateTranscriptProgress,
@ -511,8 +495,6 @@ impl Run for NodeCmd {
Self::Delete { key } => cli::node::cmd_node_delete(&key),
Self::Rename { old_key, new_key } => cli::node::cmd_node_rename(&old_key, &new_key),
Self::List { pattern } => cli::node::cmd_list_keys(pattern.as_deref()),
Self::Edges => cli::node::cmd_list_edges(),
Self::Dump => cli::node::cmd_dump_json(),
}
}
}
@ -572,8 +554,6 @@ impl Run for AdminCmd {
Self::LoadContext { stats } => cli::misc::cmd_load_context(stats),
Self::Log => cli::misc::cmd_log(),
Self::Params => cli::misc::cmd_params(),
Self::LookupBump { keys } => cli::node::cmd_lookup_bump(&keys),
Self::Lookups { date } => cli::node::cmd_lookups(date.as_deref()),
Self::MigrateTranscriptProgress => {
let mut store = store::Store::load()?;
let count = store.migrate_transcript_progress()?;

View file

@ -534,7 +534,7 @@ impl Subconscious {
for key in surface_str.lines().map(|l| l.trim()).filter(|l| !l.is_empty()) {
if existing.contains(key) { continue; }
if let Some(rendered) = store_guard.as_ref()
.and_then(|s| crate::cli::node::render_node(s, key))
.and_then(|s| crate::hippocampus::memory::render_node(s, key))
{
nodes.push(AstNode::memory(
key,

View file

@ -250,7 +250,7 @@ fn resolve(
let mut text = String::new();
let mut result_keys = Vec::new();
for key in keys {
if let Some(rendered) = crate::cli::node::render_node(store, key) {
if let Some(rendered) = crate::hippocampus::memory::render_node(store, key) {
if !text.is_empty() { text.push_str("\n\n---\n\n"); }
text.push_str(&format!("## {}\n\n{}", key, rendered));
result_keys.push(key.clone());