move Claude Code-specific code from thalamus/ to claude/

Separates the Claude-specific daemon (idle timer, tmux pane detection,
prompt injection, RPC server, session hooks) from the universal
infrastructure (channels, supervisor, notify, daemon protocol).

thalamus/ now contains only substrate-independent code: the channel
client/supervisor, notification system, daemon_capnp protocol, and
shared helpers (now(), home()).

claude/ contains: idle.rs, tmux.rs, context.rs, rpc.rs, config.rs,
hook.rs (moved from subconscious/), plus the daemon CLI and server
startup code from thalamus/mod.rs.

All re-exports preserved for backward compatibility.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-03 19:14:39 -04:00
parent 36afa90cdb
commit dd7f1e3f86
19 changed files with 627 additions and 612 deletions

View file

@ -1,220 +0,0 @@
// memory-search CLI — thin wrapper around poc_memory::memory_search
//
// --hook: run hook logic (for debugging; poc-hook calls the library directly)
// surface/reflect: run agent, parse output, render memories to stdout
// no args: show seen set for current session
use clap::{Parser, Subcommand};
use std::fs;
use std::io::{self, Read};
use std::process::Command;
fn stash_path() -> std::path::PathBuf {
poc_memory::store::memory_dir().join("sessions/last-input.json")
}
#[derive(Parser)]
#[command(name = "memory-search")]
struct Args {
/// Run hook logic (reads JSON from stdin or stash file)
#[arg(long)]
hook: bool,
/// Session ID (overrides stash file; for multiple concurrent sessions)
#[arg(long)]
session: Option<String>,
#[command(subcommand)]
command: Option<Cmd>,
}
#[derive(Subcommand)]
enum Cmd {
/// Run surface agent, parse output, render memories
Surface,
/// Run reflect agent, dump output
Reflect,
}
fn resolve_session(session_arg: &Option<String>) -> Option<poc_memory::memory_search::HookSession> {
use poc_memory::memory_search::HookSession;
if let Some(id) = session_arg {
return HookSession::from_id(id.clone());
}
let input = fs::read_to_string(stash_path()).ok()?;
HookSession::from_json(&input)
}
fn show_seen(session_arg: &Option<String>) {
let Some(session) = resolve_session(session_arg) else {
eprintln!("No session state available (use --session ID)");
return;
};
println!("Session: {}", session.session_id);
if let Ok(cookie) = fs::read_to_string(&session.path("cookie")) {
println!("Cookie: {}", cookie.trim());
}
match fs::read_to_string(&session.path("compaction")) {
Ok(s) => {
let offset: u64 = s.trim().parse().unwrap_or(0);
let ts = poc_memory::transcript::compaction_timestamp(&session.transcript_path, offset);
match ts {
Some(t) => println!("Last compaction: offset {} ({})", offset, t),
None => println!("Last compaction: offset {}", offset),
}
}
Err(_) => println!("Last compaction: none detected"),
}
let pending = fs::read_dir(&session.path("chunks")).ok()
.map(|d| d.flatten().count()).unwrap_or(0);
if pending > 0 {
println!("Pending chunks: {}", pending);
}
for (label, suffix) in [("Current seen set", ""), ("Previous seen set (pre-compaction)", "-prev")] {
let path = session.state_dir.join(format!("seen{}-{}", suffix, session.session_id));
let content = fs::read_to_string(&path).unwrap_or_default();
let lines: Vec<&str> = content.lines().filter(|s| !s.is_empty()).collect();
if lines.is_empty() { continue; }
println!("\n{} ({}):", label, lines.len());
for line in &lines { println!(" {}", line); }
}
}
fn run_agent_and_parse(agent: &str, session_arg: &Option<String>) {
let session_id = session_arg.clone()
.or_else(|| std::env::var("CLAUDE_SESSION_ID").ok())
.or_else(|| {
fs::read_to_string(stash_path()).ok()
.and_then(|s| poc_memory::memory_search::HookSession::from_json(&s))
.map(|s| s.session_id)
})
.unwrap_or_default();
if session_id.is_empty() {
eprintln!("No session ID available (use --session ID, set CLAUDE_SESSION_ID, or run --hook first)");
std::process::exit(1);
}
eprintln!("Running {} agent (session {})...", agent, &session_id[..8.min(session_id.len())]);
let output = Command::new("poc-memory")
.args(["agent", "run", agent, "--count", "1", "--local"])
.env("POC_SESSION_ID", &session_id)
.output();
let output = match output {
Ok(o) => o,
Err(e) => {
eprintln!("Failed to run agent: {}", e);
std::process::exit(1);
}
};
let result = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if !stderr.is_empty() {
eprintln!("{}", stderr);
}
// Extract the final response — after the last "=== RESPONSE ===" marker
let response = result.rsplit_once("=== RESPONSE ===")
.map(|(_, rest)| rest.trim())
.unwrap_or(result.trim());
if agent == "reflect" {
// Reflect: find REFLECTION marker and dump what follows
if let Some(pos) = response.find("REFLECTION") {
let after = &response[pos + "REFLECTION".len()..];
let text = after.trim();
if !text.is_empty() {
println!("{}", text);
}
} else if response.contains("NO OUTPUT") {
println!("(no reflection)");
} else {
eprintln!("Unexpected output format");
println!("{}", response);
}
return;
}
// Surface: parse NEW RELEVANT MEMORIES, render them
let tail_lines: Vec<&str> = response.lines().rev()
.filter(|l| !l.trim().is_empty()).take(8).collect();
let has_new = tail_lines.iter().any(|l| l.starts_with("NEW RELEVANT MEMORIES:"));
let has_none = tail_lines.iter().any(|l| l.starts_with("NO NEW RELEVANT MEMORIES"));
if has_new {
let after_marker = response.rsplit_once("NEW RELEVANT MEMORIES:")
.map(|(_, rest)| rest).unwrap_or("");
let keys: Vec<String> = after_marker.lines()
.map(|l| l.trim().trim_start_matches("- ").trim().to_string())
.filter(|l| !l.is_empty() && !l.starts_with("```")).collect();
if keys.is_empty() {
println!("(no memories found)");
return;
}
let Ok(store) = poc_memory::store::Store::load() else {
eprintln!("Failed to load store");
return;
};
for key in &keys {
if let Some(content) = poc_memory::cli::node::render_node(&store, key) {
if !content.trim().is_empty() {
println!("--- {} (surfaced) ---", key);
print!("{}", content);
println!();
}
} else {
eprintln!(" key not found: {}", key);
}
}
} else if has_none {
println!("(no new relevant memories)");
} else {
eprintln!("Unexpected output format");
print!("{}", response);
}
}
fn main() {
let args = Args::parse();
if let Some(cmd) = args.command {
match cmd {
Cmd::Surface => run_agent_and_parse("surface", &args.session),
Cmd::Reflect => run_agent_and_parse("reflect", &args.session),
}
return;
}
if args.hook {
// Read from stdin if piped, otherwise from stash
let input = {
let mut buf = String::new();
io::stdin().read_to_string(&mut buf).ok();
if buf.trim().is_empty() {
fs::read_to_string(stash_path()).unwrap_or_default()
} else {
let _ = fs::create_dir_all(stash_path().parent().unwrap());
let _ = fs::write(stash_path(), &buf);
buf
}
};
let output = poc_memory::memory_search::run_hook(&input);
print!("{}", output);
} else {
show_seen(&args.session)
}
}

View file

@ -1,330 +0,0 @@
// parse-claude-conversation: debug tool for inspecting what's in the context window
//
// Two-layer design:
// 1. extract_context_items() — walks JSONL from last compaction, yields
// structured records representing what's in the context window
// 2. format_as_context() — renders those records as they appear to Claude
//
// The transcript is mmap'd and scanned backwards from EOF using brace-depth
// tracking to find complete JSON objects, avoiding a full forward scan of
// what can be a 500MB+ file.
//
// Usage:
// parse-claude-conversation [TRANSCRIPT_PATH]
// parse-claude-conversation --last # use the last stashed session
use clap::Parser;
use memmap2::Mmap;
use poc_memory::transcript::{JsonlBackwardIter, find_last_compaction};
use serde_json::Value;
use std::fs;
#[derive(Parser)]
#[command(name = "parse-claude-conversation")]
struct Args {
/// Transcript JSONL path (or --last to use stashed session)
path: Option<String>,
/// Use the last stashed session from memory-search
#[arg(long)]
last: bool,
/// Dump raw JSONL objects. Optional integer: number of extra objects
/// to include before the compaction boundary.
#[arg(long, num_args = 0..=1, default_missing_value = "0")]
raw: Option<usize>,
}
// --- Context extraction ---
/// A single item in the context window, as Claude sees it.
enum ContextItem {
UserText(String),
SystemReminder(String),
AssistantText(String),
AssistantThinking,
ToolUse { name: String, input: String },
ToolResult(String),
}
/// Extract context items from the transcript, starting from the last compaction.
fn extract_context_items(data: &[u8]) -> Vec<ContextItem> {
let start = find_last_compaction(data).unwrap_or(0);
let region = &data[start..];
let mut items = Vec::new();
// Forward scan through JSONL lines from compaction onward
for line in region.split(|&b| b == b'\n') {
if line.is_empty() { continue; }
let obj: Value = match serde_json::from_slice(line) {
Ok(v) => v,
Err(_) => continue,
};
let msg_type = obj.get("type").and_then(|v| v.as_str()).unwrap_or("");
match msg_type {
"user" => {
if let Some(content) = obj.get("message").and_then(|m| m.get("content")) {
extract_user_content(content, &mut items);
}
}
"assistant" => {
if let Some(content) = obj.get("message").and_then(|m| m.get("content")) {
extract_assistant_content(content, &mut items);
}
}
_ => {}
}
}
items
}
/// Parse user message content into context items.
fn extract_user_content(content: &Value, items: &mut Vec<ContextItem>) {
match content {
Value::String(s) => {
split_system_reminders(s, items, false);
}
Value::Array(arr) => {
for block in arr {
let btype = block.get("type").and_then(|v| v.as_str()).unwrap_or("");
match btype {
"text" => {
if let Some(t) = block.get("text").and_then(|v| v.as_str()) {
split_system_reminders(t, items, false);
}
}
"tool_result" => {
let result_text = extract_tool_result_text(block);
if !result_text.is_empty() {
split_system_reminders(&result_text, items, true);
}
}
_ => {}
}
}
}
_ => {}
}
}
/// Extract text from a tool_result block (content can be string or array).
fn extract_tool_result_text(block: &Value) -> String {
match block.get("content") {
Some(Value::String(s)) => s.clone(),
Some(Value::Array(arr)) => {
arr.iter()
.filter_map(|b| b.get("text").and_then(|v| v.as_str()))
.collect::<Vec<_>>()
.join("\n")
}
_ => String::new(),
}
}
/// Split text on <system-reminder> tags. Non-reminder text emits UserText
/// or ToolResult depending on `is_tool_result`.
fn split_system_reminders(text: &str, items: &mut Vec<ContextItem>, is_tool_result: bool) {
let mut remaining = text;
loop {
if let Some(start) = remaining.find("<system-reminder>") {
let before = remaining[..start].trim();
if !before.is_empty() {
if is_tool_result {
items.push(ContextItem::ToolResult(before.to_string()));
} else {
items.push(ContextItem::UserText(before.to_string()));
}
}
let after_open = &remaining[start + "<system-reminder>".len()..];
if let Some(end) = after_open.find("</system-reminder>") {
let reminder = after_open[..end].trim();
if !reminder.is_empty() {
items.push(ContextItem::SystemReminder(reminder.to_string()));
}
remaining = &after_open[end + "</system-reminder>".len()..];
} else {
let reminder = after_open.trim();
if !reminder.is_empty() {
items.push(ContextItem::SystemReminder(reminder.to_string()));
}
break;
}
} else {
let trimmed = remaining.trim();
if !trimmed.is_empty() {
if is_tool_result {
items.push(ContextItem::ToolResult(trimmed.to_string()));
} else {
items.push(ContextItem::UserText(trimmed.to_string()));
}
}
break;
}
}
}
/// Parse assistant message content into context items.
fn extract_assistant_content(content: &Value, items: &mut Vec<ContextItem>) {
match content {
Value::String(s) => {
let trimmed = s.trim();
if !trimmed.is_empty() {
items.push(ContextItem::AssistantText(trimmed.to_string()));
}
}
Value::Array(arr) => {
for block in arr {
let btype = block.get("type").and_then(|v| v.as_str()).unwrap_or("");
match btype {
"text" => {
if let Some(t) = block.get("text").and_then(|v| v.as_str()) {
let trimmed = t.trim();
if !trimmed.is_empty() {
items.push(ContextItem::AssistantText(trimmed.to_string()));
}
}
}
"tool_use" => {
let name = block.get("name")
.and_then(|v| v.as_str())
.unwrap_or("?")
.to_string();
let input = block.get("input")
.map(|v| v.to_string())
.unwrap_or_default();
items.push(ContextItem::ToolUse { name, input });
}
"thinking" => {
items.push(ContextItem::AssistantThinking);
}
_ => {}
}
}
}
_ => {}
}
}
// --- Formatting layer ---
fn truncate(s: &str, max: usize) -> String {
if s.len() <= max {
s.to_string()
} else {
format!("{}...({} total)", &s[..max], s.len())
}
}
fn format_as_context(items: &[ContextItem]) {
for item in items {
match item {
ContextItem::UserText(text) => {
println!("USER: {}", truncate(text, 300));
println!();
}
ContextItem::SystemReminder(text) => {
println!("<system-reminder>");
println!("{}", truncate(text, 500));
println!("</system-reminder>");
println!();
}
ContextItem::AssistantText(text) => {
println!("ASSISTANT: {}", truncate(text, 300));
println!();
}
ContextItem::AssistantThinking => {
println!("[thinking]");
println!();
}
ContextItem::ToolUse { name, input } => {
println!("TOOL_USE: {} {}", name, truncate(input, 200));
println!();
}
ContextItem::ToolResult(text) => {
println!("TOOL_RESULT: {}", truncate(text, 300));
println!();
}
}
}
}
fn main() {
let args = Args::parse();
let path = if args.last {
let stash_path = dirs::home_dir().unwrap_or_default()
.join(".consciousness/sessions/last-input.json");
let stash = fs::read_to_string(&stash_path)
.expect("No stashed input");
let json: Value = serde_json::from_str(&stash).expect("Bad JSON");
json["transcript_path"]
.as_str()
.expect("No transcript_path")
.to_string()
} else if let Some(p) = args.path {
p
} else {
eprintln!("error: provide a transcript path or --last");
std::process::exit(1);
};
let file = fs::File::open(&path).expect("Can't open transcript");
let mmap = unsafe { Mmap::map(&file).expect("Failed to mmap") };
eprintln!(
"Transcript: {} ({:.1} MB)",
&path,
mmap.len() as f64 / 1_000_000.0
);
let compaction_offset = find_last_compaction(&mmap).unwrap_or(0);
eprintln!("Compaction at byte offset: {}", compaction_offset);
if let Some(extra) = args.raw {
use std::io::Write;
// Collect `extra` JSON objects before the compaction boundary
let mut before = Vec::new();
if extra > 0 && compaction_offset > 0 {
for obj_bytes in JsonlBackwardIter::new(&mmap[..compaction_offset]) {
if let Ok(obj) = serde_json::from_slice::<Value>(obj_bytes) {
let t = obj.get("type").and_then(|v| v.as_str()).unwrap_or("");
if t == "file-history-snapshot" { continue; }
}
before.push(obj_bytes.to_vec());
if before.len() >= extra {
break;
}
}
before.reverse();
}
for obj in &before {
std::io::stdout().write_all(obj).ok();
println!();
}
// Then dump everything from compaction onward
let region = &mmap[compaction_offset..];
for line in region.split(|&b| b == b'\n') {
if line.is_empty() { continue; }
if let Ok(obj) = serde_json::from_slice::<Value>(line) {
let t = obj.get("type").and_then(|v| v.as_str()).unwrap_or("");
if t == "file-history-snapshot" { continue; }
std::io::stdout().write_all(line).ok();
println!();
}
}
} else {
let items = extract_context_items(&mmap);
eprintln!("Context items: {}", items.len());
format_as_context(&items);
}
}

View file

@ -1,14 +0,0 @@
// poc-daemon — backward-compatible entry point
//
// Delegates to the thalamus module in the main crate.
// The daemon is now part of the consciousness binary but this
// entry point is kept for compatibility with existing scripts.
use clap::Parser;
use poc_memory::thalamus;
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let cli = thalamus::Cli::parse();
thalamus::run(cli.command).await
}

View file

@ -1,214 +0,0 @@
// Unified Claude Code hook.
//
// Single binary handling all hook events:
// UserPromptSubmit — signal daemon, check notifications, check context
// PostToolUse — check context (rate-limited)
// Stop — signal daemon response
//
// Replaces: record-user-message-time.sh, check-notifications.sh,
// check-context-usage.sh, notify-done.sh, context-check
use serde_json::Value;
use std::fs;
use std::io::{self, Read};
use std::path::PathBuf;
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
const CONTEXT_THRESHOLD: u64 = 900_000;
const RATE_LIMIT_SECS: u64 = 60;
const SOCK_PATH: &str = ".consciousness/daemon.sock";
/// How many bytes of new transcript before triggering an observation run.
/// Override with POC_OBSERVATION_THRESHOLD env var.
/// Default: 20KB ≈ 5K tokens. The observation agent's chunk_size (in .agent
/// file) controls how much context it actually reads.
fn observation_threshold() -> u64 {
std::env::var("POC_OBSERVATION_THRESHOLD")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(20_000)
}
fn now_secs() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
}
fn home() -> PathBuf {
PathBuf::from(std::env::var("HOME").unwrap_or_else(|_| "/root".into()))
}
fn daemon_cmd(args: &[&str]) {
Command::new("poc-daemon")
.args(args)
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.ok();
}
fn daemon_available() -> bool {
home().join(SOCK_PATH).exists()
}
fn signal_user() {
let pane = std::env::var("TMUX_PANE").unwrap_or_default();
if pane.is_empty() {
daemon_cmd(&["user"]);
} else {
daemon_cmd(&["user", &pane]);
}
}
fn signal_response() {
daemon_cmd(&["response"]);
}
fn check_notifications() {
if !daemon_available() {
return;
}
let output = Command::new("poc-daemon")
.arg("notifications")
.output()
.ok();
if let Some(out) = output {
let text = String::from_utf8_lossy(&out.stdout);
if !text.trim().is_empty() {
println!("You have pending notifications:");
print!("{text}");
}
}
}
/// Check if enough new conversation has accumulated to trigger an observation run.
fn maybe_trigger_observation(transcript: &PathBuf) {
let cursor_file = poc_memory::store::memory_dir().join("observation-cursor");
let last_pos: u64 = fs::read_to_string(&cursor_file)
.ok()
.and_then(|s| s.trim().parse().ok())
.unwrap_or(0);
let current_size = transcript.metadata()
.map(|m| m.len())
.unwrap_or(0);
if current_size > last_pos + observation_threshold() {
// Queue observation via daemon RPC
let _ = Command::new("poc-memory")
.args(["agent", "daemon", "run", "observation", "1"])
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.spawn();
eprintln!("[poc-hook] observation triggered ({} new bytes)", current_size - last_pos);
// Update cursor to current position
let _ = fs::write(&cursor_file, current_size.to_string());
}
}
fn check_context(transcript: &PathBuf, rate_limit: bool) {
if rate_limit {
let rate_file = dirs::home_dir().unwrap_or_default().join(".consciousness/cache/context-check-last");
if let Ok(s) = fs::read_to_string(&rate_file) {
if let Ok(last) = s.trim().parse::<u64>() {
if now_secs() - last < RATE_LIMIT_SECS {
return;
}
}
}
let _ = fs::write(&rate_file, now_secs().to_string());
}
if !transcript.exists() {
return;
}
let content = match fs::read_to_string(transcript) {
Ok(c) => c,
Err(_) => return,
};
let mut usage: u64 = 0;
for line in content.lines().rev().take(500) {
if !line.contains("cache_read_input_tokens") {
continue;
}
if let Ok(v) = serde_json::from_str::<Value>(line) {
let u = &v["message"]["usage"];
let input_tokens = u["input_tokens"].as_u64().unwrap_or(0);
let cache_creation = u["cache_creation_input_tokens"].as_u64().unwrap_or(0);
let cache_read = u["cache_read_input_tokens"].as_u64().unwrap_or(0);
usage = input_tokens + cache_creation + cache_read;
break;
}
}
if usage > CONTEXT_THRESHOLD {
print!(
"\
CONTEXT WARNING: Compaction approaching ({usage} tokens). Write a journal entry NOW.
Use `poc-memory journal write \"entry text\"` to save a dated entry covering:
- What you're working on and current state (done / in progress / blocked)
- Key things learned this session (patterns, debugging insights)
- Anything half-finished that needs pickup
Keep it narrative, not a task log."
);
}
}
fn main() {
let mut input = String::new();
io::stdin().read_to_string(&mut input).ok();
let hook: Value = match serde_json::from_str(&input) {
Ok(v) => v,
Err(_) => return,
};
let hook_type = hook["hook_event_name"].as_str().unwrap_or("unknown");
let transcript = hook["transcript_path"]
.as_str()
.filter(|p| !p.is_empty())
.map(PathBuf::from);
// Daemon agent calls set POC_AGENT=1 — skip all signaling.
// Without this, the daemon's claude -p calls trigger hooks that
// signal "user active", keeping the idle timer permanently reset.
if std::env::var("POC_AGENT").is_ok() {
return;
}
match hook_type {
"UserPromptSubmit" => {
signal_user();
check_notifications();
print!("{}", poc_memory::memory_search::run_hook(&input));
if let Some(ref t) = transcript {
check_context(t, false);
maybe_trigger_observation(t);
}
}
"PostToolUse" => {
print!("{}", poc_memory::memory_search::run_hook(&input));
if let Some(ref t) = transcript {
check_context(t, true);
}
}
"Stop" => {
let stop_hook_active = hook["stop_hook_active"].as_bool().unwrap_or(false);
if !stop_hook_active {
signal_response();
}
}
_ => {}
}
}

View file

@ -1,65 +0,0 @@
// Test tool for the conversation resolver.
// Usage: POC_SESSION_ID=<id> cargo run --bin test-conversation
// or: cargo run --bin test-conversation -- <transcript-path>
use std::time::Instant;
fn main() {
let path = std::env::args().nth(1).unwrap_or_else(|| {
let session_id = std::env::var("POC_SESSION_ID")
.expect("pass a transcript path or set POC_SESSION_ID");
let projects = poc_memory::config::get().projects_dir.clone();
eprintln!("session: {}", session_id);
eprintln!("projects dir: {}", projects.display());
let mut found = None;
if let Ok(dirs) = std::fs::read_dir(&projects) {
for dir in dirs.filter_map(|e| e.ok()) {
let path = dir.path().join(format!("{}.jsonl", session_id));
eprintln!(" checking: {}", path.display());
if path.exists() {
found = Some(path);
break;
}
}
}
let path = found.expect("transcript not found");
path.to_string_lossy().to_string()
});
let meta = std::fs::metadata(&path).expect("can't stat file");
eprintln!("transcript: {} ({} bytes)", path, meta.len());
let t0 = Instant::now();
let iter = poc_memory::transcript::TailMessages::open(&path)
.expect("can't open transcript");
let mut count = 0;
let mut total_bytes = 0;
let mut last_report = Instant::now();
for (role, content, ts) in iter {
count += 1;
total_bytes += content.len();
if last_report.elapsed().as_secs() >= 2 {
eprintln!(" ... {} messages, {}KB so far ({:.1}s)",
count, total_bytes / 1024, t0.elapsed().as_secs_f64());
last_report = Instant::now();
}
if count <= 5 {
let preview: String = content.chars().take(80).collect();
eprintln!(" [{}] {} {}: {}...",
count, &ts[..ts.len().min(19)], role, preview);
}
if total_bytes >= 200_000 {
eprintln!(" hit 200KB budget at {} messages", count);
break;
}
}
let elapsed = t0.elapsed();
eprintln!("done: {} messages, {}KB in {:.3}s", count, total_bytes / 1024, elapsed.as_secs_f64());
}