memory.rs: clean up store access and tool dispatch
- Single access() function returns StoreAccess enum (Daemon/Client/None) - OnceLock for daemon store, thread-local RefCell for client socket - Remove dispatch() - Tool handlers call jsonargs_* directly - get_provenance() takes agent ref, no JSON round-trip - Expose missing graph tools (communities, normalize, link_impact, trace) - Local tool! macro for cleaner Tool definitions Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
fb46ab095d
commit
9bb07bc26a
2 changed files with 264 additions and 259 deletions
|
|
@ -1,50 +1,75 @@
|
|||
use std::sync::Arc;
|
||||
// tools/memory.rs — Native memory graph operations
|
||||
//
|
||||
// If running in the daemon process (STORE_HANDLE set), accesses
|
||||
// the store directly. Otherwise forwards to the daemon via socket.
|
||||
// Daemon: calls set_store() at startup for direct store access.
|
||||
// Clients: lazy init tries socket, falls back to local store.
|
||||
|
||||
#![allow(unused_variables)] // macro-generated args for no-param tools
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use std::sync::OnceLock;
|
||||
use std::cell::RefCell;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use crate::store::Store;
|
||||
|
||||
// ── Store handle ───────────────────────────────────────────────
|
||||
// ── Store access ───────────────────────────────────────────────
|
||||
|
||||
/// Global store handle. Set by daemon at startup.
|
||||
/// If None, tools forward to daemon socket.
|
||||
static STORE_HANDLE: OnceLock<Arc<crate::Mutex<Store>>> = OnceLock::new();
|
||||
/// Daemon's store (eager init) or client's fallback local store.
|
||||
static STORE_ACCESS: OnceLock<Option<Arc<crate::Mutex<Store>>>> = OnceLock::new();
|
||||
|
||||
// Thread-local store for rpc_local fallback path.
|
||||
// Client's socket connection (thread-local for lock-free access).
|
||||
thread_local! {
|
||||
static LOCAL_STORE: std::cell::RefCell<Option<Arc<crate::Mutex<Store>>>> =
|
||||
const { std::cell::RefCell::new(None) };
|
||||
static SOCKET_CONN: RefCell<Option<SocketConn>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
/// Set the global store handle. Call once at daemon startup.
|
||||
/// How we access the memory store.
|
||||
enum StoreAccess {
|
||||
Daemon(Arc<crate::Mutex<Store>>), // Direct store access
|
||||
Client, // Socket to daemon (in thread-local)
|
||||
None(String), // Error: couldn't get access
|
||||
}
|
||||
|
||||
/// Set the global store handle. Call once at daemon startup (eager init).
|
||||
pub fn set_store(store: Arc<crate::Mutex<Store>>) {
|
||||
STORE_HANDLE.set(store).ok();
|
||||
STORE_ACCESS.set(Some(store)).ok();
|
||||
}
|
||||
|
||||
/// Check if we're running in daemon mode (have direct store access).
|
||||
pub fn is_daemon() -> bool {
|
||||
STORE_HANDLE.get().is_some() || LOCAL_STORE.with(|s| s.borrow().is_some())
|
||||
/// Get store access: daemon's store, socket, or local fallback.
|
||||
fn access() -> StoreAccess {
|
||||
// Daemon: already set via set_store()
|
||||
if let Some(Some(store)) = STORE_ACCESS.get() {
|
||||
return StoreAccess::Daemon(store.clone());
|
||||
}
|
||||
|
||||
// Client: check if socket already cached in thread-local
|
||||
let have_socket = SOCKET_CONN.with(|cell| cell.borrow().is_some());
|
||||
if have_socket {
|
||||
return StoreAccess::Client;
|
||||
}
|
||||
|
||||
// No socket cached, try connecting
|
||||
if let Ok(conn) = SocketConn::connect() {
|
||||
SOCKET_CONN.with(|cell| *cell.borrow_mut() = Some(conn));
|
||||
return StoreAccess::Client;
|
||||
}
|
||||
|
||||
// Socket failed - try local store as fallback (cached in STORE_ACCESS)
|
||||
let store_opt = STORE_ACCESS.get_or_init(|| {
|
||||
Store::load().ok().map(|s| Arc::new(crate::Mutex::new(s)))
|
||||
});
|
||||
|
||||
match store_opt {
|
||||
Some(store) => StoreAccess::Daemon(store.clone()),
|
||||
None => StoreAccess::None("could not connect to daemon or open store locally".into()),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Socket RPC ─────────────────────────────────────────────────
|
||||
|
||||
use std::sync::Mutex;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn socket_path() -> PathBuf {
|
||||
dirs::home_dir()
|
||||
.unwrap_or_default()
|
||||
.join(".consciousness/mcp.sock")
|
||||
}
|
||||
|
||||
// Cached socket connection for RPC forwarding
|
||||
static SOCKET_CONN: OnceLock<Mutex<Option<SocketConn>>> = OnceLock::new();
|
||||
|
||||
struct SocketConn {
|
||||
reader: std::io::BufReader<std::os::unix::net::UnixStream>,
|
||||
writer: std::io::BufWriter<std::os::unix::net::UnixStream>,
|
||||
|
|
@ -100,44 +125,14 @@ impl SocketConn {
|
|||
}
|
||||
}
|
||||
|
||||
/// Forward a tool call to the daemon socket, or execute locally if daemon is down.
|
||||
/// Used by external processes that don't have direct store access.
|
||||
pub fn memory_rpc(tool_name: &str, args: serde_json::Value) -> Result<String> {
|
||||
let conn_lock = SOCKET_CONN.get_or_init(|| Mutex::new(None));
|
||||
let mut guard = conn_lock.lock().unwrap();
|
||||
|
||||
// Try cached connection first
|
||||
if let Some(conn) = guard.as_mut() {
|
||||
match conn.call(tool_name, &args) {
|
||||
Ok(result) => return Ok(result),
|
||||
Err(_) => {
|
||||
// Connection broken, clear cache and retry
|
||||
*guard = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to establish new connection
|
||||
match SocketConn::connect() {
|
||||
Ok(mut conn) => {
|
||||
let result = conn.call(tool_name, &args);
|
||||
*guard = Some(conn);
|
||||
result
|
||||
}
|
||||
Err(_) => {
|
||||
// Socket unavailable - fall back to local store
|
||||
drop(guard); // Release lock before blocking
|
||||
tokio::task::block_in_place(|| {
|
||||
tokio::runtime::Handle::current()
|
||||
.block_on(rpc_local(tool_name, &args))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a tool locally when daemon isn't running.
|
||||
async fn rpc_local(tool_name: &str, args: &serde_json::Value) -> Result<String> {
|
||||
run_with_local_store(tool_name, args.clone()).await
|
||||
/// Forward a tool call to the daemon via socket.
|
||||
/// Only valid when access() returns Client.
|
||||
fn memory_rpc(tool_name: &str, args: serde_json::Value) -> Result<String> {
|
||||
SOCKET_CONN.with(|cell| {
|
||||
let mut conn = cell.borrow_mut();
|
||||
let conn = conn.as_mut().expect("access() returned Client but SOCKET_CONN is None");
|
||||
conn.call(tool_name, &args)
|
||||
})
|
||||
}
|
||||
|
||||
// ── Helpers ────────────────────────────────────────────────────
|
||||
|
|
@ -150,36 +145,12 @@ fn get_f64(args: &serde_json::Value, name: &str) -> Result<f64> {
|
|||
args.get(name).and_then(|v| v.as_f64()).context(format!("{} is required", name))
|
||||
}
|
||||
|
||||
async fn cached_store() -> Result<Arc<crate::Mutex<Store>>> {
|
||||
// Check thread-local first (rpc_local fallback path)
|
||||
if let Some(store) = LOCAL_STORE.with(|s| s.borrow().clone()) {
|
||||
return Ok(store);
|
||||
/// Get provenance from agent state, or "manual".
|
||||
async fn get_provenance(agent: &Option<std::sync::Arc<crate::agent::Agent>>) -> String {
|
||||
match agent {
|
||||
Some(a) => a.state.lock().await.provenance.clone(),
|
||||
None => "manual".to_string(),
|
||||
}
|
||||
// Use global handle if set (daemon mode)
|
||||
if let Some(store) = STORE_HANDLE.get() {
|
||||
return Ok(store.clone());
|
||||
}
|
||||
// Fallback to loading (for backwards compat during transition)
|
||||
Store::cached().await.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
|
||||
/// Run a tool with a temporarily-opened store (for rpc_local fallback).
|
||||
pub async fn run_with_local_store(tool_name: &str, args: serde_json::Value) -> Result<String> {
|
||||
let store = Store::cached().await.map_err(|e| anyhow::anyhow!("{}", e))?;
|
||||
|
||||
LOCAL_STORE.with(|s| *s.borrow_mut() = Some(store));
|
||||
let result = dispatch(tool_name, &None, args).await;
|
||||
LOCAL_STORE.with(|s| *s.borrow_mut() = None);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Get provenance from args._provenance, or "manual".
|
||||
fn get_provenance(args: &serde_json::Value) -> String {
|
||||
args.get("_provenance")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("manual")
|
||||
.to_string()
|
||||
}
|
||||
|
||||
// ── Macro for generating tool wrappers ─────────────────────────
|
||||
|
|
@ -275,28 +246,38 @@ macro_rules! memory_tool {
|
|||
// Mutable store variant
|
||||
($name:ident, mut $(, $($arg:ident : [$($typ:tt)+]),* $(,)?)?) => {
|
||||
paste::paste! {
|
||||
async fn [<jsonargs_ $name>](args: &serde_json::Value) -> Result<String> {
|
||||
async fn [<jsonargs_ $name>](agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
|
||||
$($(let $arg = memory_tool!(@extract args, $arg, $($typ)+);)*)?
|
||||
let prov = get_provenance(args);
|
||||
let arc = cached_store().await?;
|
||||
let mut store = arc.lock().await;
|
||||
crate::hippocampus::$name(&mut store, &prov $($(, $arg)*)?)
|
||||
let prov = get_provenance(agent).await;
|
||||
match access() {
|
||||
StoreAccess::Daemon(arc) => {
|
||||
let mut store = arc.lock().await;
|
||||
crate::hippocampus::$name(&mut store, &prov $($(, $arg)*)?)
|
||||
}
|
||||
StoreAccess::Client => anyhow::bail!("jsonargs called in client mode"),
|
||||
StoreAccess::None(err) => anyhow::bail!("{}", err),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn $name(agent: Option<&crate::agent::Agent> $($(, $arg: memory_tool!(@param_type $($typ)+))*)?) -> Result<String> {
|
||||
if !is_daemon() {
|
||||
#[allow(unused_mut)]
|
||||
let mut map = serde_json::Map::new();
|
||||
$($(memory_tool!(@insert_json map, $arg, $($typ)+);)*)?
|
||||
return memory_rpc(stringify!($name), serde_json::Value::Object(map));
|
||||
}
|
||||
let prov = match agent {
|
||||
Some(a) => a.state.lock().await.provenance.clone(),
|
||||
None => "manual".to_string(),
|
||||
};
|
||||
let arc = cached_store().await?;
|
||||
let mut store = arc.lock().await;
|
||||
crate::hippocampus::$name(&mut store, &prov $($(, $arg)*)?)
|
||||
|
||||
match access() {
|
||||
StoreAccess::Daemon(arc) => {
|
||||
let mut store = arc.lock().await;
|
||||
crate::hippocampus::$name(&mut store, &prov $($(, $arg)*)?)
|
||||
}
|
||||
StoreAccess::Client => {
|
||||
#[allow(unused_mut)]
|
||||
let mut map = serde_json::Map::new();
|
||||
$($(memory_tool!(@insert_json map, $arg, $($typ)+);)*)?
|
||||
memory_rpc(stringify!($name), serde_json::Value::Object(map))
|
||||
}
|
||||
StoreAccess::None(err) => anyhow::bail!("{}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -304,28 +285,38 @@ macro_rules! memory_tool {
|
|||
// Immutable store variant
|
||||
($name:ident, ref $(, $($arg:ident : [$($typ:tt)+]),* $(,)?)?) => {
|
||||
paste::paste! {
|
||||
async fn [<jsonargs_ $name>](args: &serde_json::Value) -> Result<String> {
|
||||
async fn [<jsonargs_ $name>](agent: &Option<std::sync::Arc<crate::agent::Agent>>, args: &serde_json::Value) -> Result<String> {
|
||||
$($(let $arg = memory_tool!(@extract args, $arg, $($typ)+);)*)?
|
||||
let prov = get_provenance(args);
|
||||
let arc = cached_store().await?;
|
||||
let store = arc.lock().await;
|
||||
crate::hippocampus::$name(&store, &prov $($(, $arg)*)?)
|
||||
let prov = get_provenance(agent).await;
|
||||
match access() {
|
||||
StoreAccess::Daemon(arc) => {
|
||||
let store = arc.lock().await;
|
||||
crate::hippocampus::$name(&store, &prov $($(, $arg)*)?)
|
||||
}
|
||||
StoreAccess::Client => anyhow::bail!("jsonargs called in client mode"),
|
||||
StoreAccess::None(err) => anyhow::bail!("{}", err),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn $name(agent: Option<&crate::agent::Agent> $($(, $arg: memory_tool!(@param_type $($typ)+))*)?) -> Result<String> {
|
||||
if !is_daemon() {
|
||||
#[allow(unused_mut)]
|
||||
let mut map = serde_json::Map::new();
|
||||
$($(memory_tool!(@insert_json map, $arg, $($typ)+);)*)?
|
||||
return memory_rpc(stringify!($name), serde_json::Value::Object(map));
|
||||
}
|
||||
let prov = match agent {
|
||||
Some(a) => a.state.lock().await.provenance.clone(),
|
||||
None => "manual".to_string(),
|
||||
};
|
||||
let arc = cached_store().await?;
|
||||
let store = arc.lock().await;
|
||||
crate::hippocampus::$name(&store, &prov $($(, $arg)*)?)
|
||||
|
||||
match access() {
|
||||
StoreAccess::Daemon(arc) => {
|
||||
let store = arc.lock().await;
|
||||
crate::hippocampus::$name(&store, &prov $($(, $arg)*)?)
|
||||
}
|
||||
StoreAccess::Client => {
|
||||
#[allow(unused_mut)]
|
||||
let mut map = serde_json::Map::new();
|
||||
$($(memory_tool!(@insert_json map, $arg, $($typ)+);)*)?
|
||||
memory_rpc(stringify!($name), serde_json::Value::Object(map))
|
||||
}
|
||||
StoreAccess::None(err) => anyhow::bail!("{}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -362,152 +353,168 @@ memory_tool!(graph_link_impact, ref, source: [str], target: [str]);
|
|||
memory_tool!(graph_hubs, ref, count: [Option<usize>]);
|
||||
memory_tool!(graph_trace, ref, key: [str]);
|
||||
|
||||
/// Single entry point for all memory/journal tool calls.
|
||||
/// If not daemon, forwards to daemon with provenance attached.
|
||||
async fn dispatch(
|
||||
tool_name: &str,
|
||||
agent: &Option<std::sync::Arc<crate::agent::Agent>>,
|
||||
args: serde_json::Value,
|
||||
) -> Result<String> {
|
||||
let mut args = args;
|
||||
if let Some(a) = agent {
|
||||
let prov = a.state.lock().await.provenance.clone();
|
||||
args.as_object_mut().map(|o| o.insert("_provenance".into(), prov.into()));
|
||||
}
|
||||
|
||||
if !is_daemon() {
|
||||
// Forward to daemon
|
||||
let name = tool_name.to_string();
|
||||
return tokio::task::spawn_blocking(move || {
|
||||
memory_rpc(&name, args)
|
||||
}).await.map_err(|e| anyhow::anyhow!("spawn_blocking: {}", e))?;
|
||||
}
|
||||
|
||||
// Daemon path - dispatch to implementation
|
||||
match tool_name {
|
||||
"memory_render" => jsonargs_memory_render(&args).await,
|
||||
"memory_write" => jsonargs_memory_write(&args).await,
|
||||
"memory_search" => jsonargs_memory_search(&args).await,
|
||||
"memory_links" => jsonargs_memory_links(&args).await,
|
||||
"memory_link_set" => jsonargs_memory_link_set(&args).await,
|
||||
"memory_link_add" => jsonargs_memory_link_add(&args).await,
|
||||
"memory_delete" => jsonargs_memory_delete(&args).await,
|
||||
"memory_history" => jsonargs_memory_history(&args).await,
|
||||
"memory_weight_set" => jsonargs_memory_weight_set(&args).await,
|
||||
"memory_rename" => jsonargs_memory_rename(&args).await,
|
||||
"memory_supersede" => jsonargs_memory_supersede(&args).await,
|
||||
"memory_query" => jsonargs_memory_query(&args).await,
|
||||
"graph_topology" => jsonargs_graph_topology(&args).await,
|
||||
"graph_health" => jsonargs_graph_health(&args).await,
|
||||
"graph_communities" => jsonargs_graph_communities(&args).await,
|
||||
"graph_normalize_strengths" => jsonargs_graph_normalize_strengths(&args).await,
|
||||
"graph_trace" => jsonargs_graph_trace(&args).await,
|
||||
"graph_link_impact" => jsonargs_graph_link_impact(&args).await,
|
||||
"graph_hubs" => jsonargs_graph_hubs(&args).await,
|
||||
"journal_tail" => jsonargs_journal_tail(&args).await,
|
||||
"journal_new" => jsonargs_journal_new(&args).await,
|
||||
"journal_update" => jsonargs_journal_update(&args).await,
|
||||
_ => anyhow::bail!("unknown tool: {}", tool_name),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Definitions ────────────────────────────────────────────────
|
||||
|
||||
pub fn memory_tools() -> [super::Tool; 15] {
|
||||
pub fn memory_tools() -> [super::Tool; 19] {
|
||||
use super::Tool;
|
||||
macro_rules! tool {
|
||||
($name:ident, $desc:expr, $params:expr) => {
|
||||
Tool {
|
||||
name: stringify!($name),
|
||||
description: $desc,
|
||||
parameters_json: $params,
|
||||
handler: Arc::new(|a, v| Box::pin(async move {
|
||||
paste::paste! { [<jsonargs_ $name>](&a, &v).await }
|
||||
})),
|
||||
}
|
||||
};
|
||||
}
|
||||
[
|
||||
Tool { name: "memory_render", description: "Read a memory node's content and links.",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_render", &a, v).await })) },
|
||||
Tool { name: "memory_write", description: "Create or update a memory node.",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"},"content":{"type":"string","description":"Full content (markdown)"}},"required":["key","content"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_write", &a, v).await })) },
|
||||
Tool { name: "memory_search", description: "Search the memory graph via spreading activation. Give 2-4 seed node keys.",
|
||||
parameters_json: r#"{"type":"object","properties":{"keys":{"type":"array","items":{"type":"string"},"description":"Seed node keys to activate from"},"max_hops":{"type":"integer","description":"Max graph hops (default 3)"},"edge_decay":{"type":"number","description":"Decay per hop (default 0.3)"},"min_activation":{"type":"number","description":"Cutoff threshold (default 0.01)"},"limit":{"type":"integer","description":"Max results (default 20)"}},"required":["keys"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_search", &a, v).await })) },
|
||||
Tool { name: "memory_links", description: "Show a node's neighbors with link strengths.",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_links", &a, v).await })) },
|
||||
Tool { name: "memory_link_set", description: "Set link strength between two nodes.",
|
||||
parameters_json: r#"{"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"},"strength":{"type":"number","description":"0.01 to 1.0"}},"required":["source","target","strength"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_link_set", &a, v).await })) },
|
||||
Tool { name: "memory_link_add", description: "Add a new link between two nodes.",
|
||||
parameters_json: r#"{"type":"object","properties":{"source":{"type":"string"},"target":{"type":"string"}},"required":["source","target"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_link_add", &a, v).await })) },
|
||||
Tool { name: "memory_delete", description: "Delete a memory node.",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"}},"required":["key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_delete", &a, v).await })) },
|
||||
Tool { name: "memory_history", description: "Show version history for a node.",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string","description":"Node key"},"full":{"type":"boolean","description":"Show full content for each version"}},"required":["key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_history", &a, v).await })) },
|
||||
Tool { name: "memory_weight_set", description: "Set a node's weight directly (0.01 to 1.0).",
|
||||
parameters_json: r#"{"type":"object","properties":{"key":{"type":"string"},"weight":{"type":"number","description":"0.01 to 1.0"}},"required":["key","weight"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_weight_set", &a, v).await })) },
|
||||
Tool { name: "memory_rename", description: "Rename a node key in place.",
|
||||
parameters_json: r#"{"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"}},"required":["old_key","new_key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_rename", &a, v).await })) },
|
||||
Tool { name: "memory_supersede", description: "Mark a node as superseded by another (sets weight to 0.01).",
|
||||
parameters_json: r#"{"type":"object","properties":{"old_key":{"type":"string"},"new_key":{"type":"string"},"reason":{"type":"string"}},"required":["old_key","new_key"]}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_supersede", &a, v).await })) },
|
||||
Tool { name: "memory_query",
|
||||
description: "Run a structured query against the memory graph.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {"type": "string", "description": "Query expression"},
|
||||
"format": {"type": "string", "description": "compact (default) or full (with content and graph metrics)", "default": "compact"}
|
||||
},
|
||||
"required": ["query"]
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("memory_query", &a, v).await })) },
|
||||
Tool { name: "graph_topology", description: "Show graph topology stats (nodes, edges, clustering, hubs).",
|
||||
parameters_json: r#"{"type":"object","properties":{}}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_topology", &a, v).await })) },
|
||||
Tool { name: "graph_health", description: "Show graph health report with maintenance recommendations.",
|
||||
parameters_json: r#"{"type":"object","properties":{}}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_health", &a, v).await })) },
|
||||
Tool { name: "graph_hubs", description: "Show top hub nodes by degree, spread apart for diverse link targets.",
|
||||
parameters_json: r#"{"type":"object","properties":{"count":{"type":"integer","description":"Number of hubs to return (default 20)"}}}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("graph_hubs", &a, v).await })) },
|
||||
tool!(memory_render, "Read a memory node's content and links.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"}, "raw": {"type": "boolean"} },
|
||||
"required": ["key"]
|
||||
}"#),
|
||||
tool!(memory_write, "Create or update a memory node.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"}, "content": {"type": "string"} },
|
||||
"required": ["key", "content"]
|
||||
}"#),
|
||||
tool!(memory_search, "Search via spreading activation from seed keys.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"keys": {"type": "array", "items": {"type": "string"}},
|
||||
"max_hops": {"type": "integer"},
|
||||
"edge_decay": {"type": "number"},
|
||||
"min_activation": {"type": "number"},
|
||||
"limit": {"type": "integer"}
|
||||
},
|
||||
"required": ["keys"]
|
||||
}"#),
|
||||
tool!(memory_links, "Show a node's neighbors with link strengths.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"} },
|
||||
"required": ["key"]
|
||||
}"#),
|
||||
tool!(memory_link_set, "Set link strength between two nodes.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"strength": {"type": "number", "description": "0.01 to 1.0"}
|
||||
},
|
||||
"required": ["source", "target", "strength"]
|
||||
}"#),
|
||||
tool!(memory_link_add, "Add a new link between two nodes.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "source": {"type": "string"}, "target": {"type": "string"} },
|
||||
"required": ["source", "target"]
|
||||
}"#),
|
||||
tool!(memory_delete, "Delete a memory node.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"} },
|
||||
"required": ["key"]
|
||||
}"#),
|
||||
tool!(memory_history, "Show version history for a node.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"}, "full": {"type": "boolean"} },
|
||||
"required": ["key"]
|
||||
}"#),
|
||||
tool!(memory_weight_set, "Set a node's weight (0.01 to 1.0).", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"}, "weight": {"type": "number"} },
|
||||
"required": ["key", "weight"]
|
||||
}"#),
|
||||
tool!(memory_rename, "Rename a node key.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "old_key": {"type": "string"}, "new_key": {"type": "string"} },
|
||||
"required": ["old_key", "new_key"]
|
||||
}"#),
|
||||
tool!(memory_supersede, "Mark a node as superseded by another.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"old_key": {"type": "string"},
|
||||
"new_key": {"type": "string"},
|
||||
"reason": {"type": "string"}
|
||||
},
|
||||
"required": ["old_key", "new_key"]
|
||||
}"#),
|
||||
tool!(memory_query, "Run a structured query against the memory graph.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {"type": "string"},
|
||||
"format": {"type": "string", "description": "compact or full"}
|
||||
},
|
||||
"required": ["query"]
|
||||
}"#),
|
||||
tool!(graph_topology, "Show graph topology stats.", r#"{"type": "object"}"#),
|
||||
tool!(graph_health, "Show graph health report.", r#"{"type": "object"}"#),
|
||||
tool!(graph_hubs, "Show top hub nodes by degree.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "count": {"type": "integer"} }
|
||||
}"#),
|
||||
tool!(graph_communities, "Show communities by isolation.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "top_n": {"type": "integer"}, "min_size": {"type": "integer"} }
|
||||
}"#),
|
||||
tool!(graph_normalize_strengths, "Set link strengths from Jaccard similarity.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "apply": {"type": "boolean"} }
|
||||
}"#),
|
||||
tool!(graph_link_impact, "Simulate adding an edge, report impact.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "source": {"type": "string"}, "target": {"type": "string"} },
|
||||
"required": ["source", "target"]
|
||||
}"#),
|
||||
tool!(graph_trace, "Walk temporal links from a node.", r#"{
|
||||
"type": "object",
|
||||
"properties": { "key": {"type": "string"} },
|
||||
"required": ["key"]
|
||||
}"#),
|
||||
]
|
||||
}
|
||||
|
||||
pub fn journal_tools() -> [super::Tool; 3] {
|
||||
use super::Tool;
|
||||
macro_rules! tool {
|
||||
($name:ident, $desc:expr, $params:expr) => {
|
||||
Tool {
|
||||
name: stringify!($name),
|
||||
description: $desc,
|
||||
parameters_json: $params,
|
||||
handler: Arc::new(|a, v| Box::pin(async move {
|
||||
paste::paste! { [<jsonargs_ $name>](&a, &v).await }
|
||||
})),
|
||||
}
|
||||
};
|
||||
}
|
||||
[
|
||||
Tool { name: "journal_tail",
|
||||
description: "Read the last N entries at a given level.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {"type": "integer", "description": "Number of entries", "default": 1},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0},
|
||||
"format": {"type": "string", "description": "compact or full (with content)", "default": "full"},
|
||||
"after": {"type": "string", "description": "Only entries after this date (YYYY-MM-DD)"}
|
||||
}
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_tail", &a, v).await })) },
|
||||
Tool { name: "journal_new", description: "Start a new journal/digest entry.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string", "description": "Short node name (becomes the key)"},
|
||||
"title": {"type": "string", "description": "Descriptive title"},
|
||||
"body": {"type": "string", "description": "Entry body"},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
|
||||
},
|
||||
"required": ["name", "title", "body"]
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_new", &a, v).await })) },
|
||||
Tool { name: "journal_update", description: "Append text to the most recent entry at a level.",
|
||||
parameters_json: r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"body": {"type": "string", "description": "Text to append"},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly", "default": 0}
|
||||
},
|
||||
"required": ["body"]
|
||||
}"#,
|
||||
handler: Arc::new(|a, v| Box::pin(async move { dispatch("journal_update", &a, v).await })) },
|
||||
tool!(journal_tail, "Read the last N entries at a given level.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {"type": "integer"},
|
||||
"level": {"type": "integer", "description": "0=journal, 1=daily, 2=weekly, 3=monthly"},
|
||||
"format": {"type": "string", "description": "compact or full"},
|
||||
"after": {"type": "string", "description": "Only entries after this date (YYYY-MM-DD)"}
|
||||
}
|
||||
}"#),
|
||||
tool!(journal_new, "Start a new journal/digest entry.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"title": {"type": "string"},
|
||||
"body": {"type": "string"},
|
||||
"level": {"type": "integer"}
|
||||
},
|
||||
"required": ["name", "title", "body"]
|
||||
}"#),
|
||||
tool!(journal_update, "Append text to the most recent entry.", r#"{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"body": {"type": "string"},
|
||||
"level": {"type": "integer"}
|
||||
},
|
||||
"required": ["body"]
|
||||
}"#),
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,9 +13,7 @@ use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, BufWriter};
|
|||
use tokio::net::{UnixListener, UnixStream};
|
||||
|
||||
use crate::agent::tools::Tool;
|
||||
|
||||
// Re-export for backwards compatibility
|
||||
pub use crate::agent::tools::memory::{socket_path, memory_rpc};
|
||||
use crate::agent::tools::memory::socket_path;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue