consciousness/src/hippocampus/local.rs
Kent Overstreet b3d0a3ab25 store: internal locking, remove Arc<Mutex<Store>> wrapper
Store now has internal Mutex for capnp appends and AtomicU64 for
size tracking. All methods take &self. The external Arc<Mutex<Store>>
is replaced with Arc<Store>.

- Store::append_lock protects file appends
- local.rs functions take &Store (not &mut Store)
- access_local() returns Arc<Store>
- All .lock().await calls removed from callers

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
2026-04-13 21:49:54 -04:00

605 lines
24 KiB
Rust

use anyhow::Result;
use super::memory::MemoryNode;
use super::store::Store;
use crate::graph::Graph;
use crate::neuro::{consolidation_priority, ReplayItem};
// ── Memory operations ──────────────────────────────────────────
pub fn memory_render(store: &Store, _provenance: &str, key: &str, raw: Option<bool>) -> Result<String> {
let node = MemoryNode::from_store(store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
if raw.unwrap_or(false) {
Ok(node.content)
} else {
Ok(node.render())
}
}
pub fn memory_write(store: &Store, provenance: &str, key: &str, content: &str) -> Result<String> {
let result = store.upsert_provenance(key, content, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{} '{}'", result, key))
}
pub fn memory_search(
store: &Store,
_provenance: &str,
keys: Vec<String>,
max_hops: Option<u32>,
edge_decay: Option<f64>,
min_activation: Option<f64>,
limit: Option<usize>,
) -> Result<String> {
if keys.is_empty() {
anyhow::bail!("memory_search requires at least one seed key");
}
let max_hops = max_hops.unwrap_or(3);
let edge_decay = edge_decay.unwrap_or(0.3);
let min_activation = min_activation.unwrap_or(0.01);
let limit = limit.unwrap_or(20);
let graph = crate::graph::build_graph_fast(store);
let seeds: Vec<(String, f64)> = keys.iter()
.filter_map(|k| {
let resolved = store.resolve_key(k).ok()?;
Some((resolved, 1.0))
})
.collect();
if seeds.is_empty() {
anyhow::bail!("no valid seed keys found");
}
let seed_set: std::collections::HashSet<&str> = seeds.iter()
.map(|(k, _)| k.as_str()).collect();
let results = crate::search::spreading_activation(
&seeds, &graph, store,
max_hops, edge_decay, min_activation,
);
Ok(results.iter()
.filter(|(k, _)| !seed_set.contains(k.as_str()))
.take(limit)
.map(|(key, score)| format!(" {:.2} {}", score, key))
.collect::<Vec<_>>().join("\n"))
}
/// Info about a linked neighbor node.
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct LinkInfo {
pub key: String,
pub link_strength: f32,
pub node_weight: f32,
}
pub fn memory_links(store: &Store, _provenance: &str, key: &str) -> Result<Vec<LinkInfo>> {
let node = MemoryNode::from_store(store, key)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", key))?;
let mut links = Vec::new();
for (target, strength, _is_new) in &node.links {
let node_weight = store.get_node(target)
.ok()
.flatten()
.map(|n| n.weight)
.unwrap_or(0.5);
links.push(LinkInfo {
key: target.clone(),
link_strength: *strength,
node_weight,
});
}
Ok(links)
}
pub fn memory_link_set(store: &Store, _provenance: &str, source: &str, target: &str, strength: f32) -> Result<String> {
let s = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let old = store.set_link_strength(&s, &t, strength).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("{}{} strength {:.2}{:.2}", s, t, old, strength))
}
pub fn memory_link_add(store: &Store, provenance: &str, source: &str, target: &str) -> Result<String> {
let s = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let t = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let strength = store.add_link(&s, &t, provenance).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("linked {}{} (strength={:.2})", s, t, strength))
}
pub fn memory_delete(store: &Store, _provenance: &str, key: &str) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.delete_node(&resolved).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("deleted {}", resolved))
}
pub fn memory_history(store: &Store, _provenance: &str, key: &str, full: Option<bool>) -> Result<String> {
let key = store.resolve_key(key).unwrap_or_else(|_| key.to_string());
let full = full.unwrap_or(false);
let path = crate::store::nodes_path();
if !path.exists() {
anyhow::bail!("No node log found");
}
use std::io::BufReader;
let file = std::fs::File::open(&path)
.map_err(|e| anyhow::anyhow!("open {}: {}", path.display(), e))?;
let mut reader = BufReader::new(file);
let mut versions: Vec<crate::store::Node> = Vec::new();
while let Ok(msg) = capnp::serialize::read_message(&mut reader, capnp::message::ReaderOptions::new()) {
let log = msg.get_root::<crate::memory_capnp::node_log::Reader>()
.map_err(|e| anyhow::anyhow!("read log: {}", e))?;
for node_reader in log.get_nodes()
.map_err(|e| anyhow::anyhow!("get nodes: {}", e))? {
let node = crate::store::Node::from_capnp_migrate(node_reader)
.map_err(|e| anyhow::anyhow!("{}", e))?;
if node.key == key {
versions.push(node);
}
}
}
if versions.is_empty() {
anyhow::bail!("No history found for '{}'", key);
}
let mut out = format!("{} versions of '{}':\n\n", versions.len(), key);
for node in &versions {
let ts = crate::store::format_datetime(node.timestamp);
let deleted = if node.deleted { " DELETED" } else { "" };
if full {
out.push_str(&format!("=== v{} {} {}{} w={:.3} {}b ===\n",
node.version, ts, node.provenance, deleted, node.weight, node.content.len()));
out.push_str(&node.content);
out.push('\n');
} else {
let preview = crate::util::first_n_chars(&node.content, 120).replace('\n', "\\n");
out.push_str(&format!("v{:<3} {} {:24} w={:.3} {}b{}\n {}\n",
node.version, ts, node.provenance, node.weight, node.content.len(), deleted, preview));
}
}
Ok(out)
}
pub fn memory_weight_set(store: &Store, _provenance: &str, key: &str, weight: f32) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let (old, new) = store.set_weight(&resolved, weight).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("weight {} {:.2}{:.2}", resolved, old, new))
}
pub fn memory_rename(store: &Store, _provenance: &str, old_key: &str, new_key: &str) -> Result<String> {
let resolved = store.resolve_key(old_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.rename_node(&resolved, new_key).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("Renamed '{}' → '{}'", resolved, new_key))
}
pub fn memory_supersede(store: &Store, provenance: &str, old_key: &str, new_key: &str, reason: Option<&str>) -> Result<String> {
let reason = reason.unwrap_or("superseded");
let content = store.get_node(old_key)
.map_err(|e| anyhow::anyhow!("{}", e))?
.map(|n| n.content)
.ok_or_else(|| anyhow::anyhow!("node not found: {}", old_key))?;
let notice = format!("**SUPERSEDED** by `{}` — {}\n\n---\n\n{}",
new_key, reason, content.trim());
store.upsert_provenance(old_key, &notice, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.set_weight(old_key, 0.01).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
Ok(format!("superseded {}{} ({})", old_key, new_key, reason))
}
/// Convert a list of keys to ReplayItems with priority and graph metrics.
pub fn keys_to_replay_items(
store: &Store,
keys: &[String],
graph: &Graph,
) -> Vec<ReplayItem> {
keys.iter()
.filter_map(|key| {
let node = store.get_node(key).ok()??;
let priority = consolidation_priority(store, key, graph, None);
let cc = graph.clustering_coefficient(key);
Some(ReplayItem {
key: key.clone(),
priority,
interval_days: node.spaced_repetition_interval,
emotion: node.emotion,
cc,
classification: "unknown",
outlier_score: 0.0,
})
})
.collect()
}
pub fn memory_query(store: &Store, _provenance: &str, query_str: &str, format: Option<&str>) -> Result<String> {
let graph = store.build_graph();
match format.unwrap_or("compact") {
"full" => {
// Rich output with full content, graph metrics, hub analysis
let results = crate::query_parser::execute_query(store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let keys: Vec<String> = results.into_iter().map(|r| r.key).collect();
let items = keys_to_replay_items(store, &keys, &graph);
Ok(crate::subconscious::prompts::format_nodes_section(store, &items, &graph))
}
_ => {
// Compact output: handles count, select, and all expression types
crate::query_parser::query_to_string(store, &graph, query_str)
.map_err(|e| anyhow::anyhow!("{}", e))
}
}
}
// ── Journal tools ──────────────────────────────────────────────
/// A journal entry with key, content, and timestamp.
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
pub struct JournalEntry {
pub key: String,
pub content: String,
pub created_at: i64,
}
/// Get journal entries, sorted by timestamp (newest first).
/// level: 0=session, 1=daily, 2=weekly, 3=monthly
/// after: only entries after this date (YYYY-MM-DD)
pub fn journal_tail(store: &Store, _provenance: &str, count: Option<u64>, level: Option<u64>, after: Option<&str>) -> Result<Vec<JournalEntry>> {
let count = count.unwrap_or(10) as usize;
let level = level.unwrap_or(0);
let node_type = match level {
0 => crate::store::NodeType::EpisodicSession,
1 => crate::store::NodeType::EpisodicDaily,
2 => crate::store::NodeType::EpisodicWeekly,
3 => crate::store::NodeType::EpisodicMonthly,
_ => return Err(anyhow::anyhow!("invalid level: {}", level)),
};
let after_ts = after.and_then(|date| {
chrono::NaiveDate::parse_from_str(date, "%Y-%m-%d").ok()
.and_then(|nd| nd.and_hms_opt(0, 0, 0))
.map(|dt| dt.and_utc().timestamp())
});
let all_keys = store.all_keys()?;
let mut entries: Vec<_> = all_keys.iter()
.filter_map(|key| store.get_node(key).ok()?)
.filter(|n| n.node_type == node_type)
.filter(|n| after_ts.map(|ts| n.created_at >= ts).unwrap_or(true))
.map(|n| JournalEntry {
key: n.key.clone(),
content: n.content,
created_at: n.created_at,
})
.collect();
entries.sort_by_key(|e| std::cmp::Reverse(e.created_at));
entries.truncate(count);
Ok(entries)
}
fn level_to_node_type(level: i64) -> crate::store::NodeType {
match level {
1 => crate::store::NodeType::EpisodicDaily,
2 => crate::store::NodeType::EpisodicWeekly,
3 => crate::store::NodeType::EpisodicMonthly,
_ => crate::store::NodeType::EpisodicSession,
}
}
pub fn journal_new(store: &Store, provenance: &str, name: &str, title: &str, body: &str, level: Option<i64>) -> Result<String> {
let level = level.unwrap_or(0);
let ts = chrono::Local::now().format("%Y-%m-%dT%H:%M");
let content = format!("## {}{}\n\n{}", ts, title, body);
let base_key: String = name.split_whitespace()
.map(|w| w.to_lowercase()
.chars().filter(|c| c.is_alphanumeric() || *c == '-')
.collect::<String>())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");
let base_key = if base_key.len() > 80 { &base_key[..80] } else { base_key.as_str() };
let key = if store.contains_key(base_key).unwrap_or(false) {
let mut n = 2;
loop {
let candidate = format!("{}-{}", base_key, n);
if !store.contains_key(&candidate).unwrap_or(false) { break candidate; }
n += 1;
}
} else {
base_key.to_string()
};
let mut node = crate::store::new_node(&key, &content);
node.node_type = level_to_node_type(level);
node.provenance = provenance.to_string();
store.upsert_node(node).map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("New entry '{}' ({} words)", title, word_count))
}
pub fn journal_update(store: &Store, provenance: &str, body: &str, level: Option<i64>) -> Result<String> {
let level = level.unwrap_or(0);
let node_type = level_to_node_type(level);
let all_keys = store.all_keys()?;
let latest_key = all_keys.iter()
.filter_map(|key| store.get_node(key).ok()?)
.filter(|n| n.node_type == node_type)
.max_by_key(|n| n.created_at)
.map(|n| n.key.clone());
let Some(key) = latest_key else {
anyhow::bail!("no entry at level {} to update — use journal_new first", level);
};
let existing = store.get_node(&key)?.ok_or_else(|| anyhow::anyhow!("node not found"))?.content;
let new_content = format!("{}\n\n{}", existing.trim_end(), body);
store.upsert_provenance(&key, &new_content, provenance)
.map_err(|e| anyhow::anyhow!("{}", e))?;
store.save().map_err(|e| anyhow::anyhow!("{}", e))?;
let word_count = body.split_whitespace().count();
Ok(format!("Updated last entry (+{} words)", word_count))
}
// ── Graph tools ───────────────────────────────────────────────
pub fn graph_topology(store: &Store, _provenance: &str) -> Result<String> {
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_topology_header(store, &graph))
}
pub fn graph_health(store: &Store, _provenance: &str) -> Result<String> {
let graph = store.build_graph();
Ok(crate::subconscious::prompts::format_health_section(store, &graph))
}
pub fn graph_communities(store: &Store, _provenance: &str, top_n: Option<usize>, min_size: Option<usize>) -> Result<String> {
let top_n = top_n.unwrap_or(10);
let min_size = min_size.unwrap_or(3);
let g = store.build_graph();
let infos = g.community_info();
let total = infos.len();
let shown: Vec<_> = infos.into_iter()
.filter(|c| c.size >= min_size)
.take(top_n)
.collect();
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "{} communities total ({} with size >= {})\n",
total, shown.len(), min_size).ok();
writeln!(out, "{:<6} {:>5} {:>7} {:>7} members", "id", "size", "iso", "cross").ok();
writeln!(out, "{}", "-".repeat(70)).ok();
for c in &shown {
let preview: Vec<&str> = c.members.iter()
.take(5)
.map(|s| s.as_str())
.collect();
let more = if c.size > 5 {
format!(" +{}", c.size - 5)
} else {
String::new()
};
writeln!(out, "{:<6} {:>5} {:>6.0}% {:>7} {}{}",
c.id, c.size, c.isolation * 100.0, c.cross_edges,
preview.join(", "), more).ok();
}
Ok(out)
}
pub fn graph_normalize_strengths(store: &Store, _provenance: &str, apply: Option<bool>) -> Result<String> {
use crate::store::{StoreView, RelationType};
let apply = apply.unwrap_or(false);
let graph = store.build_graph();
let strengths = graph.jaccard_strengths();
// Build lookup from (source_key, target_key) → new_strength
let mut target_strengths: std::collections::HashMap<(String, String), f32> = std::collections::HashMap::new();
for (a, b, s) in &strengths {
target_strengths.insert((a.clone(), b.clone()), *s);
target_strengths.insert((b.clone(), a.clone()), *s);
}
// Collect edges and compute changes
let mut to_update: Vec<(String, String, f32)> = Vec::new();
let mut unchanged = 0usize;
let mut temporal_skipped = 0usize;
let mut delta_sum: f64 = 0.0;
let mut buckets = [0usize; 10];
store.for_each_relation(|source, target, strength, rel_type| {
// Skip temporal links
if strength == 1.0 && rel_type == RelationType::Auto {
temporal_skipped += 1;
return;
}
if let Some(&new_s) = target_strengths.get(&(source.to_string(), target.to_string())) {
let delta = (new_s - strength).abs();
if delta > 0.001 {
delta_sum += delta as f64;
to_update.push((source.to_string(), target.to_string(), new_s));
} else {
unchanged += 1;
}
let bucket = ((new_s * 10.0) as usize).min(9);
buckets[bucket] += 1;
}
});
let changed = to_update.len();
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Normalize link strengths (Jaccard similarity)").ok();
writeln!(out, " Total edges in graph: {}", strengths.len()).ok();
writeln!(out, " Would change: {}", changed).ok();
writeln!(out, " Unchanged: {}", unchanged).ok();
writeln!(out, " Temporal (skipped): {}", temporal_skipped).ok();
if changed > 0 {
writeln!(out, " Avg delta: {:.3}", delta_sum / changed as f64).ok();
}
writeln!(out).ok();
writeln!(out, " Strength distribution:").ok();
for (i, &count) in buckets.iter().enumerate() {
let lo = i as f32 / 10.0;
let hi = lo + 0.1;
let bar = "#".repeat(count / 50 + if count > 0 { 1 } else { 0 });
writeln!(out, " {:.1}-{:.1}: {:5} {}", lo, hi, count, bar).ok();
}
if apply {
for (source, target, new_strength) in to_update {
store.set_link_strength(&source, &target, new_strength)?;
}
writeln!(out, "\nApplied {} strength updates.", changed).ok();
} else {
writeln!(out, "\nDry run. Pass apply:true to write changes.").ok();
}
Ok(out)
}
pub fn graph_link_impact(store: &Store, _provenance: &str, source: &str, target: &str) -> Result<String> {
let source = store.resolve_key(source).map_err(|e| anyhow::anyhow!("{}", e))?;
let target = store.resolve_key(target).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let impact = g.link_impact(&source, &target);
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "Link impact: {} → {}", source, target).ok();
writeln!(out, " Source degree: {} Target degree: {}", impact.source_deg, impact.target_deg).ok();
writeln!(out, " Hub link: {} Same community: {}", impact.is_hub_link, impact.same_community).ok();
writeln!(out, " ΔCC source: {:+.4} ΔCC target: {:+.4}", impact.delta_cc_source, impact.delta_cc_target).ok();
writeln!(out, " ΔGini: {:+.6}", impact.delta_gini).ok();
writeln!(out, " Assessment: {}", impact.assessment).ok();
Ok(out)
}
pub fn graph_hubs(store: &Store, _provenance: &str, count: Option<usize>) -> Result<String> {
let count = count.unwrap_or(20);
let graph = store.build_graph();
// Top hub nodes by degree, spread apart (skip neighbors of already-selected hubs)
let all_keys = store.all_keys().unwrap_or_default();
let mut hubs: Vec<(String, usize)> = all_keys.iter()
.filter(|k| !k.starts_with('_'))
.map(|k| {
let degree = graph.neighbors(k).len();
(k.clone(), degree)
})
.collect();
hubs.sort_by(|a, b| b.1.cmp(&a.1));
let mut selected = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
for (key, degree) in &hubs {
if seen.contains(key) { continue; }
selected.push(format!(" - {} (degree {})", key, degree));
// Mark neighbors as seen so we pick far-apart hubs
for (nbr, _) in graph.neighbors(key) {
seen.insert(nbr.clone());
}
seen.insert(key.clone());
if selected.len() >= count { break; }
}
Ok(format!("## Hub nodes (link targets)\n\n{}", selected.join("\n")))
}
pub fn graph_trace(store: &Store, _provenance: &str, key: &str) -> Result<String> {
let resolved = store.resolve_key(key).map_err(|e| anyhow::anyhow!("{}", e))?;
let g = store.build_graph();
let node = store.get_node(&resolved)?
.ok_or_else(|| anyhow::anyhow!("Node not found: {}", resolved))?;
use std::fmt::Write;
let mut out = String::new();
writeln!(out, "=== {} ===", resolved).ok();
writeln!(out, "Type: {:?} Weight: {:.2}", node.node_type, node.weight).ok();
if !node.source_ref.is_empty() {
writeln!(out, "Source: {}", node.source_ref).ok();
}
let preview = crate::util::truncate(&node.content, 200, "...");
writeln!(out, "\n{}\n", preview).ok();
// Walk neighbors, grouped by node type
let neighbors = g.neighbors(&resolved);
let mut episodic_session: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut episodic_daily: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut episodic_weekly: Vec<(String, f32, crate::store::Node)> = Vec::new();
let mut semantic: Vec<(String, f32, crate::store::Node)> = Vec::new();
for (n, strength) in &neighbors {
if let Ok(Some(nnode)) = store.get_node(n) {
let node_type = nnode.node_type;
let key: String = (*n).clone();
let entry = (key, *strength, nnode);
match node_type {
crate::store::NodeType::EpisodicSession => episodic_session.push(entry),
crate::store::NodeType::EpisodicDaily => episodic_daily.push(entry),
crate::store::NodeType::EpisodicWeekly
| crate::store::NodeType::EpisodicMonthly => episodic_weekly.push(entry),
crate::store::NodeType::Semantic => semantic.push(entry),
}
}
}
if !episodic_weekly.is_empty() {
writeln!(out, "Weekly digests:").ok();
for (k, s, n) in &episodic_weekly {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, &k, preview).ok();
}
}
if !episodic_daily.is_empty() {
writeln!(out, "Daily digests:").ok();
for (k, s, n) in &episodic_daily {
let preview = crate::util::first_n_chars(n.content.lines().next().unwrap_or(""), 80);
writeln!(out, " [{:.2}] {} — {}", s, &k, preview).ok();
}
}
if !episodic_session.is_empty() {
writeln!(out, "Session entries:").ok();
for (k, s, n) in &episodic_session {
let preview = crate::util::first_n_chars(
n.content.lines()
.find(|l| !l.is_empty() && !l.starts_with("<!--"))
.unwrap_or(""),
80);
writeln!(out, " [{:.2}] {}", s, &k).ok();
if !n.source_ref.is_empty() {
writeln!(out, " ↳ source: {}", n.source_ref).ok();
}
writeln!(out, " {}", preview).ok();
}
}
if !semantic.is_empty() {
writeln!(out, "Semantic links:").ok();
for (k, s, _) in &semantic {
writeln!(out, " [{:.2}] {}", s, k).ok();
}
}
writeln!(out, "\nLinks: {} session, {} daily, {} weekly, {} semantic",
episodic_session.len(), episodic_daily.len(),
episodic_weekly.len(), semantic.len()).ok();
Ok(out)
}