update tokenizers

This commit is contained in:
Kent Overstreet 2026-04-13 22:39:50 -04:00
parent 4d22a28794
commit be909028a7
3 changed files with 15 additions and 35 deletions

View file

@ -128,16 +128,6 @@ pub fn recent_by_provenance(db: &Database, provenance: &str, limit: usize) -> Re
Ok(results)
}
/// Get node metadata (uuid, node_type, timestamp) from KEY_TO_UUID.
pub fn get_node_meta(db: &Database, key: &str) -> Result<Option<([u8; 16], u8, i64)>> {
let txn = db.begin_read()?;
let table = txn.open_table(KEY_TO_UUID)?;
match table.get(key)? {
Some(data) => Ok(Some(unpack_node_meta(data.value()))),
None => Ok(None),
}
}
/// Get offset for a node by key.
pub fn get_offset(db: &Database, key: &str) -> Result<Option<u64>> {
let txn = db.begin_read()?;