flatten: move poc-memory contents to workspace root
No more subcrate nesting — src/, agents/, schema/, defaults/, build.rs all live at the workspace root. poc-daemon remains as the only workspace member. Crate name (poc-memory) and all imports unchanged. Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
parent
891cca57f8
commit
998b71e52c
113 changed files with 79 additions and 78 deletions
72
src/util.rs
Normal file
72
src/util.rs
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
// Shared utilities
|
||||
|
||||
use crate::store;
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// Ensure a subdirectory of the memory dir exists and return its path.
|
||||
pub fn memory_subdir(name: &str) -> Result<PathBuf, String> {
|
||||
let dir = store::memory_dir().join(name);
|
||||
fs::create_dir_all(&dir)
|
||||
.map_err(|e| format!("create {}: {}", dir.display(), e))?;
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Truncate text to `max_len` bytes at a char boundary, appending `suffix`.
|
||||
/// Returns the original string if it's already short enough.
|
||||
pub fn truncate(text: &str, max_len: usize, suffix: &str) -> String {
|
||||
if text.len() <= max_len {
|
||||
text.to_string()
|
||||
} else {
|
||||
let end = text.floor_char_boundary(max_len);
|
||||
format!("{}{}", &text[..end], suffix)
|
||||
}
|
||||
}
|
||||
|
||||
/// Take the first `n` chars from a string.
|
||||
pub fn first_n_chars(s: &str, n: usize) -> String {
|
||||
s.chars().take(n).collect()
|
||||
}
|
||||
|
||||
// ── JSONL helpers ───────────────────────────────────────────────────
|
||||
|
||||
/// Read a JSONL file, deserializing each line. Silently skips bad lines.
|
||||
pub fn jsonl_load<T: DeserializeOwned>(path: &Path) -> Vec<T> {
|
||||
let content = match fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => return Vec::new(),
|
||||
};
|
||||
content.lines()
|
||||
.filter_map(|line| serde_json::from_str(line).ok())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Append one record as a JSON line to a file (create if missing).
|
||||
pub fn jsonl_append<T: Serialize>(path: &Path, item: &T) -> Result<(), String> {
|
||||
let json = serde_json::to_string(item)
|
||||
.map_err(|e| format!("serialize: {}", e))?;
|
||||
let mut f = fs::OpenOptions::new()
|
||||
.create(true).append(true).open(path)
|
||||
.map_err(|e| format!("open {}: {}", path.display(), e))?;
|
||||
writeln!(f, "{}", json)
|
||||
.map_err(|e| format!("write {}: {}", path.display(), e))
|
||||
}
|
||||
|
||||
/// Parse a timestamp string to unix epoch seconds.
|
||||
/// Handles: "2026-03-05T19:56:00", "2026-03-05T19:56", "2026-03-05 19:56:00", "2026-03-05 19:56"
|
||||
pub fn parse_timestamp_to_epoch(ts: &str) -> Option<i64> {
|
||||
use chrono::{Local, NaiveDateTime, TimeZone};
|
||||
let formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M"];
|
||||
for fmt in &formats {
|
||||
if let Ok(ndt) = NaiveDateTime::parse_from_str(ts, fmt)
|
||||
&& let Some(dt) = Local.from_local_datetime(&ndt).earliest() {
|
||||
return Some(dt.timestamp());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue