2026-03-03 17:18:18 -05:00
|
|
|
// Episodic digest generation: daily, weekly, monthly, auto
|
2026-02-28 23:58:05 -05:00
|
|
|
//
|
2026-03-03 17:34:00 -05:00
|
|
|
// Three digest levels form a temporal hierarchy: daily digests summarize
|
|
|
|
|
// journal entries, weekly digests summarize dailies, monthly digests
|
|
|
|
|
// summarize weeklies. All three share the same generate/auto-detect
|
|
|
|
|
// pipeline, parameterized by DigestLevel.
|
2026-03-03 17:18:18 -05:00
|
|
|
|
|
|
|
|
use crate::llm::{call_sonnet, semantic_keys};
|
|
|
|
|
use crate::store::{self, Store, new_relation};
|
2026-03-01 00:33:46 -05:00
|
|
|
use crate::neuro;
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
use chrono::{Datelike, Duration, Local, NaiveDate};
|
2026-02-28 23:58:05 -05:00
|
|
|
use regex::Regex;
|
2026-03-03 18:04:21 -05:00
|
|
|
use std::collections::BTreeSet;
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
// --- Digest level descriptors ---
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
struct DigestLevel {
|
2026-03-03 18:04:21 -05:00
|
|
|
name: &'static str,
|
|
|
|
|
title: &'static str,
|
|
|
|
|
period: &'static str,
|
2026-03-03 17:34:00 -05:00
|
|
|
input_title: &'static str,
|
|
|
|
|
timeout: u64,
|
2026-03-03 18:04:21 -05:00
|
|
|
child_name: Option<&'static str>, // None = journal (leaf), Some = child digest files
|
|
|
|
|
/// Expand an arg into (canonical_label, dates covered).
|
|
|
|
|
label_dates: fn(&str) -> Result<(String, Vec<String>), String>,
|
|
|
|
|
/// Map a YYYY-MM-DD date to this level's label.
|
|
|
|
|
date_to_label: fn(&str) -> Option<String>,
|
2026-02-28 23:58:05 -05:00
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
const DAILY: DigestLevel = DigestLevel {
|
|
|
|
|
name: "daily",
|
|
|
|
|
title: "Daily",
|
|
|
|
|
period: "Date",
|
|
|
|
|
input_title: "Journal entries",
|
|
|
|
|
timeout: 300,
|
2026-03-03 18:04:21 -05:00
|
|
|
child_name: None,
|
|
|
|
|
label_dates: |date| Ok((date.to_string(), vec![date.to_string()])),
|
|
|
|
|
date_to_label: |date| Some(date.to_string()),
|
2026-03-03 17:34:00 -05:00
|
|
|
};
|
|
|
|
|
|
2026-03-03 18:06:59 -05:00
|
|
|
/// Week label and 7 dates (Mon-Sun) for the week containing `date`.
|
|
|
|
|
fn week_dates(date: &str) -> Result<(String, Vec<String>), String> {
|
|
|
|
|
let nd = NaiveDate::parse_from_str(date, "%Y-%m-%d")
|
|
|
|
|
.map_err(|e| format!("bad date '{}': {}", date, e))?;
|
|
|
|
|
let iso = nd.iso_week();
|
|
|
|
|
let week_label = format!("{}-W{:02}", iso.year(), iso.week());
|
|
|
|
|
let monday = nd - Duration::days(nd.weekday().num_days_from_monday() as i64);
|
|
|
|
|
let dates = (0..7)
|
|
|
|
|
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
|
|
|
|
|
.collect();
|
|
|
|
|
Ok((week_label, dates))
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
const WEEKLY: DigestLevel = DigestLevel {
|
|
|
|
|
name: "weekly",
|
|
|
|
|
title: "Weekly",
|
|
|
|
|
period: "Week",
|
|
|
|
|
input_title: "Daily digests",
|
|
|
|
|
timeout: 300,
|
2026-03-03 18:04:21 -05:00
|
|
|
child_name: Some("daily"),
|
2026-03-03 18:06:59 -05:00
|
|
|
label_dates: |arg| {
|
|
|
|
|
if !arg.contains('W') {
|
|
|
|
|
return week_dates(arg);
|
|
|
|
|
}
|
|
|
|
|
let (y, w) = arg.split_once("-W")
|
|
|
|
|
.ok_or_else(|| format!("bad week label: {}", arg))?;
|
|
|
|
|
let year: i32 = y.parse().map_err(|_| format!("bad week year: {}", arg))?;
|
|
|
|
|
let week: u32 = w.parse().map_err(|_| format!("bad week number: {}", arg))?;
|
|
|
|
|
let monday = NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
|
|
|
|
|
.ok_or_else(|| format!("invalid week: {}", arg))?;
|
|
|
|
|
let dates = (0..7)
|
|
|
|
|
.map(|i| (monday + Duration::days(i)).format("%Y-%m-%d").to_string())
|
|
|
|
|
.collect();
|
|
|
|
|
Ok((arg.to_string(), dates))
|
|
|
|
|
},
|
2026-03-03 18:04:21 -05:00
|
|
|
date_to_label: |date| week_dates(date).ok().map(|(l, _)| l),
|
2026-03-03 17:34:00 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const MONTHLY: DigestLevel = DigestLevel {
|
|
|
|
|
name: "monthly",
|
|
|
|
|
title: "Monthly",
|
|
|
|
|
period: "Month",
|
|
|
|
|
input_title: "Weekly digests",
|
|
|
|
|
timeout: 600,
|
2026-03-03 18:04:21 -05:00
|
|
|
child_name: Some("weekly"),
|
2026-03-03 18:06:59 -05:00
|
|
|
label_dates: |arg| {
|
|
|
|
|
let (year, month) = if arg.len() <= 7 {
|
|
|
|
|
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
|
|
|
|
|
.map_err(|e| format!("bad month '{}': {}", arg, e))?;
|
|
|
|
|
(d.year(), d.month())
|
|
|
|
|
} else {
|
|
|
|
|
let d = NaiveDate::parse_from_str(arg, "%Y-%m-%d")
|
|
|
|
|
.map_err(|e| format!("bad date '{}': {}", arg, e))?;
|
|
|
|
|
(d.year(), d.month())
|
|
|
|
|
};
|
|
|
|
|
let label = format!("{}-{:02}", year, month);
|
|
|
|
|
let mut dates = Vec::new();
|
|
|
|
|
let mut day = 1u32;
|
|
|
|
|
while let Some(date) = NaiveDate::from_ymd_opt(year, month, day) {
|
|
|
|
|
if date.month() != month { break; }
|
|
|
|
|
dates.push(date.format("%Y-%m-%d").to_string());
|
|
|
|
|
day += 1;
|
|
|
|
|
}
|
|
|
|
|
Ok((label, dates))
|
|
|
|
|
},
|
2026-03-03 18:04:21 -05:00
|
|
|
date_to_label: |date| NaiveDate::parse_from_str(date, "%Y-%m-%d")
|
|
|
|
|
.ok().map(|d| format!("{}-{:02}", d.year(), d.month())),
|
2026-03-03 17:34:00 -05:00
|
|
|
};
|
|
|
|
|
|
2026-03-03 18:06:59 -05:00
|
|
|
const LEVELS: &[&DigestLevel] = &[&DAILY, &WEEKLY, &MONTHLY];
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
/// Store key for a digest node: "daily-2026-03-04.md", "weekly-2026-W09.md", etc.
|
|
|
|
|
/// Matches the key format from the old import_file() path.
|
|
|
|
|
fn digest_node_key(level_name: &str, label: &str) -> String {
|
|
|
|
|
format!("{}-{}.md", level_name, label)
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
// --- Input gathering ---
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
/// Load child digest content from the store.
|
|
|
|
|
fn load_child_digests(store: &Store, prefix: &str, labels: &[String]) -> Vec<(String, String)> {
|
2026-02-28 23:58:05 -05:00
|
|
|
let mut digests = Vec::new();
|
|
|
|
|
for label in labels {
|
2026-03-05 15:30:57 -05:00
|
|
|
let key = digest_node_key(prefix, label);
|
|
|
|
|
if let Some(node) = store.nodes.get(&key) {
|
|
|
|
|
digests.push((label.clone(), node.content.clone()));
|
2026-02-28 23:58:05 -05:00
|
|
|
}
|
|
|
|
|
}
|
2026-03-05 15:30:57 -05:00
|
|
|
digests
|
2026-02-28 23:58:05 -05:00
|
|
|
}
|
|
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
/// Unified: gather inputs for any digest level.
|
|
|
|
|
fn gather(level: &DigestLevel, store: &Store, arg: &str) -> Result<(String, Vec<(String, String)>), String> {
|
|
|
|
|
let (label, dates) = (level.label_dates)(arg)?;
|
|
|
|
|
|
|
|
|
|
let inputs = if let Some(child_name) = level.child_name {
|
|
|
|
|
// Map parent's dates through child's date_to_label → child labels
|
|
|
|
|
let child = LEVELS.iter()
|
|
|
|
|
.find(|l| l.name == child_name)
|
|
|
|
|
.expect("invalid child_name");
|
|
|
|
|
let child_labels: Vec<String> = dates.iter()
|
|
|
|
|
.filter_map(|d| (child.date_to_label)(d))
|
|
|
|
|
.collect::<BTreeSet<_>>()
|
|
|
|
|
.into_iter()
|
|
|
|
|
.collect();
|
2026-03-05 15:30:57 -05:00
|
|
|
load_child_digests(store, child_name, &child_labels)
|
2026-03-03 18:04:21 -05:00
|
|
|
} else {
|
|
|
|
|
// Leaf level: scan store for journal entries matching label
|
|
|
|
|
let date_re = Regex::new(&format!(
|
|
|
|
|
r"^journal\.md#j-{}", regex::escape(&label)
|
|
|
|
|
)).unwrap();
|
|
|
|
|
let mut entries: Vec<_> = store.nodes.values()
|
|
|
|
|
.filter(|n| date_re.is_match(&n.key))
|
|
|
|
|
.map(|n| {
|
|
|
|
|
let ts = n.key.strip_prefix("journal.md#j-").unwrap_or(&n.key);
|
|
|
|
|
(ts.to_string(), n.content.clone())
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
entries.sort_by(|a, b| a.0.cmp(&b.0));
|
|
|
|
|
entries
|
|
|
|
|
};
|
2026-03-03 17:48:24 -05:00
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
Ok((label, inputs))
|
2026-03-03 17:48:24 -05:00
|
|
|
}
|
2026-03-03 17:42:50 -05:00
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
/// Unified: find candidate labels for auto-generation (past, not yet generated).
|
|
|
|
|
fn find_candidates(level: &DigestLevel, dates: &[String], today: &str) -> Vec<String> {
|
|
|
|
|
let today_label = (level.date_to_label)(today);
|
|
|
|
|
dates.iter()
|
|
|
|
|
.filter_map(|d| (level.date_to_label)(d))
|
|
|
|
|
.collect::<BTreeSet<_>>()
|
|
|
|
|
.into_iter()
|
|
|
|
|
.filter(|l| Some(l) != today_label.as_ref())
|
2026-03-03 17:48:24 -05:00
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
// --- Unified generator ---
|
|
|
|
|
|
|
|
|
|
fn format_inputs(inputs: &[(String, String)], daily: bool) -> String {
|
|
|
|
|
let mut text = String::new();
|
|
|
|
|
for (label, content) in inputs {
|
|
|
|
|
if daily {
|
|
|
|
|
text.push_str(&format!("\n### {}\n\n{}\n", label, content));
|
|
|
|
|
} else {
|
|
|
|
|
text.push_str(&format!("\n---\n## {}\n{}\n", label, content));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
text
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn generate_digest(
|
|
|
|
|
store: &mut Store,
|
|
|
|
|
level: &DigestLevel,
|
|
|
|
|
label: &str,
|
|
|
|
|
inputs: &[(String, String)],
|
|
|
|
|
) -> Result<(), String> {
|
|
|
|
|
println!("Generating {} digest for {}...", level.name, label);
|
|
|
|
|
|
|
|
|
|
if inputs.is_empty() {
|
|
|
|
|
println!(" No inputs found for {}", label);
|
|
|
|
|
return Ok(());
|
2026-02-28 23:58:05 -05:00
|
|
|
}
|
2026-03-03 17:34:00 -05:00
|
|
|
println!(" {} inputs", inputs.len());
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
let keys = semantic_keys(store);
|
|
|
|
|
let keys_text = keys.iter()
|
2026-02-28 23:58:05 -05:00
|
|
|
.map(|k| format!(" - {}", k))
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join("\n");
|
|
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
let content = format_inputs(inputs, level.child_name.is_none());
|
2026-03-03 17:34:00 -05:00
|
|
|
let covered = inputs.iter()
|
|
|
|
|
.map(|(l, _)| l.as_str())
|
2026-02-28 23:58:05 -05:00
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join(", ");
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
let prompt = neuro::load_prompt("digest", &[
|
|
|
|
|
("{{LEVEL}}", level.title),
|
|
|
|
|
("{{PERIOD}}", level.period),
|
|
|
|
|
("{{INPUT_TITLE}}", level.input_title),
|
|
|
|
|
("{{LABEL}}", label),
|
|
|
|
|
("{{CONTENT}}", &content),
|
|
|
|
|
("{{COVERED}}", &covered),
|
2026-03-03 17:18:18 -05:00
|
|
|
("{{KEYS}}", &keys_text),
|
2026-03-03 17:34:00 -05:00
|
|
|
])?;
|
2026-02-28 23:58:05 -05:00
|
|
|
println!(" Prompt: {} chars (~{} tokens)", prompt.len(), prompt.len() / 4);
|
|
|
|
|
|
|
|
|
|
println!(" Calling Sonnet...");
|
2026-03-03 17:34:00 -05:00
|
|
|
let digest = call_sonnet(&prompt, level.timeout)?;
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
let key = digest_node_key(level.name, label);
|
|
|
|
|
store.upsert_provenance(&key, &digest, store::Provenance::AgentDigest)?;
|
2026-02-28 23:58:05 -05:00
|
|
|
store.save()?;
|
2026-03-05 15:30:57 -05:00
|
|
|
println!(" Stored: {}", key);
|
2026-02-28 23:58:05 -05:00
|
|
|
|
|
|
|
|
println!(" Done: {} lines", digest.lines().count());
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
// --- Public API ---
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
pub fn generate(store: &mut Store, level_name: &str, arg: &str) -> Result<(), String> {
|
|
|
|
|
let level = LEVELS.iter()
|
|
|
|
|
.find(|l| l.name == level_name)
|
|
|
|
|
.ok_or_else(|| format!("unknown digest level: {}", level_name))?;
|
|
|
|
|
let (label, inputs) = gather(level, store, arg)?;
|
|
|
|
|
generate_digest(store, level, &label, &inputs)
|
2026-03-03 17:34:00 -05:00
|
|
|
}
|
2026-02-28 23:58:05 -05:00
|
|
|
|
2026-03-03 17:34:00 -05:00
|
|
|
// --- Auto-detect and generate missing digests ---
|
2026-03-01 07:14:03 -05:00
|
|
|
|
|
|
|
|
pub fn digest_auto(store: &mut Store) -> Result<(), String> {
|
2026-03-03 17:42:50 -05:00
|
|
|
let today = Local::now().format("%Y-%m-%d").to_string();
|
2026-03-01 07:14:03 -05:00
|
|
|
|
2026-03-03 17:42:50 -05:00
|
|
|
// Collect all dates with journal entries
|
2026-03-01 07:14:03 -05:00
|
|
|
let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}").unwrap();
|
2026-03-03 17:42:50 -05:00
|
|
|
let dates: Vec<String> = store.nodes.keys()
|
|
|
|
|
.filter_map(|key| {
|
|
|
|
|
key.strip_prefix("journal.md#j-")
|
|
|
|
|
.filter(|rest| rest.len() >= 10 && date_re.is_match(rest))
|
|
|
|
|
.map(|rest| rest[..10].to_string())
|
|
|
|
|
})
|
|
|
|
|
.collect::<BTreeSet<_>>()
|
|
|
|
|
.into_iter()
|
|
|
|
|
.collect();
|
2026-03-01 07:14:03 -05:00
|
|
|
|
2026-03-03 17:42:50 -05:00
|
|
|
let mut total = 0u32;
|
2026-03-01 07:14:03 -05:00
|
|
|
|
2026-03-03 17:42:50 -05:00
|
|
|
for level in LEVELS {
|
2026-03-03 18:04:21 -05:00
|
|
|
let candidates = find_candidates(level, &dates, &today);
|
2026-03-03 17:42:50 -05:00
|
|
|
let mut generated = 0u32;
|
|
|
|
|
let mut skipped = 0u32;
|
2026-03-01 07:14:03 -05:00
|
|
|
|
2026-03-03 18:04:21 -05:00
|
|
|
for arg in &candidates {
|
|
|
|
|
let (label, inputs) = gather(level, store, arg)?;
|
2026-03-05 15:30:57 -05:00
|
|
|
let key = digest_node_key(level.name, &label);
|
|
|
|
|
if store.nodes.contains_key(&key) {
|
2026-03-03 17:42:50 -05:00
|
|
|
skipped += 1;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if inputs.is_empty() { continue; }
|
|
|
|
|
println!("[auto] Missing {} digest for {}", level.name, label);
|
|
|
|
|
generate_digest(store, level, &label, &inputs)?;
|
|
|
|
|
generated += 1;
|
2026-03-01 07:14:03 -05:00
|
|
|
}
|
|
|
|
|
|
2026-03-03 17:42:50 -05:00
|
|
|
println!("[auto] {}: {} generated, {} existed", level.name, generated, skipped);
|
|
|
|
|
total += generated;
|
2026-03-01 07:14:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if total == 0 {
|
|
|
|
|
println!("[auto] All digests up to date.");
|
|
|
|
|
} else {
|
|
|
|
|
println!("[auto] Generated {} total digests.", total);
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-01 00:10:03 -05:00
|
|
|
// --- Digest link parsing ---
|
|
|
|
|
// Replaces digest-link-parser.py: parses ## Links sections from digest
|
|
|
|
|
// files and applies them to the memory graph.
|
|
|
|
|
|
|
|
|
|
/// A parsed link from a digest's Links section.
|
|
|
|
|
pub struct DigestLink {
|
|
|
|
|
pub source: String,
|
|
|
|
|
pub target: String,
|
|
|
|
|
pub reason: String,
|
|
|
|
|
pub file: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Normalize a raw link target to a poc-memory key.
|
|
|
|
|
fn normalize_link_key(raw: &str) -> String {
|
|
|
|
|
let key = raw.trim().trim_matches('`').trim();
|
|
|
|
|
if key.is_empty() { return String::new(); }
|
|
|
|
|
|
|
|
|
|
// Self-references
|
|
|
|
|
let lower = key.to_lowercase();
|
|
|
|
|
if lower.starts_with("this ") { return String::new(); }
|
|
|
|
|
|
|
|
|
|
let mut key = key.to_string();
|
|
|
|
|
|
|
|
|
|
// weekly/2026-W06 → weekly-2026-W06, etc.
|
|
|
|
|
if let Some(pos) = key.find('/') {
|
|
|
|
|
let prefix = &key[..pos];
|
|
|
|
|
if prefix == "daily" || prefix == "weekly" || prefix == "monthly" {
|
|
|
|
|
let rest = &key[pos + 1..];
|
|
|
|
|
key = format!("{}-{}", prefix, rest);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// daily-2026-02-04 → daily-2026-02-04.md
|
|
|
|
|
let re = Regex::new(r"^(daily|weekly|monthly)-\d{4}").unwrap();
|
|
|
|
|
if re.is_match(&key) && !key.ends_with(".md") {
|
|
|
|
|
key.push_str(".md");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Bare date → daily digest
|
|
|
|
|
let date_re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap();
|
|
|
|
|
if date_re.is_match(key.strip_suffix(".md").unwrap_or(&key)) {
|
|
|
|
|
let date = key.strip_suffix(".md").unwrap_or(&key);
|
|
|
|
|
key = format!("daily-{}.md", date);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Ensure .md extension
|
|
|
|
|
if key.contains('#') {
|
|
|
|
|
let (file, section) = key.split_once('#').unwrap();
|
|
|
|
|
if !file.ends_with(".md") {
|
|
|
|
|
key = format!("{}.md#{}", file, section);
|
|
|
|
|
}
|
|
|
|
|
} else if !key.ends_with(".md") && !key.contains('/') && !key.starts_with("NEW:") {
|
|
|
|
|
key.push_str(".md");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
key
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
/// Parse the Links section from a digest node's content.
|
|
|
|
|
fn parse_digest_node_links(key: &str, content: &str) -> Vec<DigestLink> {
|
2026-03-01 00:10:03 -05:00
|
|
|
|
|
|
|
|
let link_re = Regex::new(r"^-\s+(.+?)\s*[→↔←]\s*(.+?)(?:\s*\((.+?)\))?\s*$").unwrap();
|
|
|
|
|
let header_re = Regex::new(r"^##\s+Links").unwrap();
|
|
|
|
|
let mut links = Vec::new();
|
|
|
|
|
let mut in_links = false;
|
|
|
|
|
|
|
|
|
|
for line in content.lines() {
|
|
|
|
|
if header_re.is_match(line) {
|
|
|
|
|
in_links = true;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if in_links && line.starts_with("## ") {
|
|
|
|
|
in_links = false;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if !in_links { continue; }
|
|
|
|
|
if line.starts_with("###") || line.starts_with("**") { continue; }
|
|
|
|
|
|
|
|
|
|
if let Some(cap) = link_re.captures(line) {
|
|
|
|
|
let raw_source = cap[1].trim();
|
|
|
|
|
let raw_target = cap[2].trim();
|
|
|
|
|
let reason = cap.get(3).map(|m| m.as_str().to_string()).unwrap_or_default();
|
|
|
|
|
|
|
|
|
|
let mut source = normalize_link_key(raw_source);
|
|
|
|
|
let mut target = normalize_link_key(raw_target);
|
|
|
|
|
|
|
|
|
|
// Replace self-references with digest key
|
2026-03-05 15:30:57 -05:00
|
|
|
if source.is_empty() { source = key.to_string(); }
|
|
|
|
|
if target.is_empty() { target = key.to_string(); }
|
2026-03-01 00:10:03 -05:00
|
|
|
|
|
|
|
|
// Handle "this daily/weekly/monthly" in raw text
|
|
|
|
|
let raw_s_lower = raw_source.to_lowercase();
|
|
|
|
|
let raw_t_lower = raw_target.to_lowercase();
|
|
|
|
|
if raw_s_lower.contains("this daily") || raw_s_lower.contains("this weekly")
|
|
|
|
|
|| raw_s_lower.contains("this monthly")
|
|
|
|
|
{
|
2026-03-05 15:30:57 -05:00
|
|
|
source = key.to_string();
|
2026-03-01 00:10:03 -05:00
|
|
|
}
|
|
|
|
|
if raw_t_lower.contains("this daily") || raw_t_lower.contains("this weekly")
|
|
|
|
|
|| raw_t_lower.contains("this monthly")
|
|
|
|
|
{
|
2026-03-05 15:30:57 -05:00
|
|
|
target = key.to_string();
|
2026-03-01 00:10:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Skip NEW: and self-links
|
|
|
|
|
if source.starts_with("NEW:") || target.starts_with("NEW:") { continue; }
|
|
|
|
|
if source == target { continue; }
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
links.push(DigestLink { source, target, reason, file: key.to_string() });
|
2026-03-01 00:10:03 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
links
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
/// Parse links from all digest nodes in the store.
|
|
|
|
|
pub fn parse_all_digest_links(store: &Store) -> Vec<DigestLink> {
|
2026-03-01 00:10:03 -05:00
|
|
|
let mut all_links = Vec::new();
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
let mut digest_keys: Vec<&String> = store.nodes.keys()
|
|
|
|
|
.filter(|k| k.starts_with("daily-")
|
|
|
|
|
|| k.starts_with("weekly-")
|
|
|
|
|
|| k.starts_with("monthly-"))
|
|
|
|
|
.collect();
|
|
|
|
|
digest_keys.sort();
|
|
|
|
|
|
|
|
|
|
for key in digest_keys {
|
|
|
|
|
if let Some(node) = store.nodes.get(key) {
|
|
|
|
|
all_links.extend(parse_digest_node_links(key, &node.content));
|
2026-03-01 00:10:03 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Deduplicate by (source, target) pair
|
|
|
|
|
let mut seen = std::collections::HashSet::new();
|
|
|
|
|
all_links.retain(|link| seen.insert((link.source.clone(), link.target.clone())));
|
|
|
|
|
|
2026-03-05 15:30:57 -05:00
|
|
|
all_links
|
2026-03-01 00:10:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Apply parsed digest links to the store.
|
|
|
|
|
pub fn apply_digest_links(store: &mut Store, links: &[DigestLink]) -> (usize, usize, usize) {
|
|
|
|
|
let mut applied = 0usize;
|
|
|
|
|
let mut skipped = 0usize;
|
|
|
|
|
let mut fallbacks = 0usize;
|
|
|
|
|
|
|
|
|
|
for link in links {
|
|
|
|
|
// Try resolving both keys
|
|
|
|
|
let source = match store.resolve_key(&link.source) {
|
|
|
|
|
Ok(s) => s,
|
|
|
|
|
Err(_) => {
|
|
|
|
|
// Try stripping section anchor as fallback
|
|
|
|
|
if let Some(base) = link.source.split('#').next() {
|
|
|
|
|
match store.resolve_key(base) {
|
|
|
|
|
Ok(s) => { fallbacks += 1; s }
|
|
|
|
|
Err(_) => { skipped += 1; continue; }
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
skipped += 1; continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let target = match store.resolve_key(&link.target) {
|
|
|
|
|
Ok(t) => t,
|
|
|
|
|
Err(_) => {
|
|
|
|
|
if let Some(base) = link.target.split('#').next() {
|
|
|
|
|
match store.resolve_key(base) {
|
|
|
|
|
Ok(t) => { fallbacks += 1; t }
|
|
|
|
|
Err(_) => { skipped += 1; continue; }
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
skipped += 1; continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-03-01 00:33:46 -05:00
|
|
|
// Refine target to best-matching section if available
|
|
|
|
|
let source_content = store.nodes.get(&source)
|
|
|
|
|
.map(|n| n.content.as_str()).unwrap_or("");
|
|
|
|
|
let target = neuro::refine_target(store, source_content, &target);
|
|
|
|
|
|
2026-03-01 00:10:03 -05:00
|
|
|
if source == target { skipped += 1; continue; }
|
|
|
|
|
|
|
|
|
|
// Check if link already exists
|
|
|
|
|
let exists = store.relations.iter().any(|r|
|
|
|
|
|
r.source_key == source && r.target_key == target && !r.deleted
|
|
|
|
|
);
|
|
|
|
|
if exists { skipped += 1; continue; }
|
|
|
|
|
|
|
|
|
|
let source_uuid = match store.nodes.get(&source) {
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
None => { skipped += 1; continue; }
|
|
|
|
|
};
|
|
|
|
|
let target_uuid = match store.nodes.get(&target) {
|
|
|
|
|
Some(n) => n.uuid,
|
|
|
|
|
None => { skipped += 1; continue; }
|
|
|
|
|
};
|
|
|
|
|
|
2026-03-03 12:56:15 -05:00
|
|
|
let rel = new_relation(
|
2026-03-01 00:10:03 -05:00
|
|
|
source_uuid, target_uuid,
|
2026-03-03 12:56:15 -05:00
|
|
|
store::RelationType::Link,
|
2026-03-01 00:10:03 -05:00
|
|
|
0.5,
|
|
|
|
|
&source, &target,
|
|
|
|
|
);
|
|
|
|
|
if store.add_relation(rel).is_ok() {
|
|
|
|
|
println!(" + {} → {}", source, target);
|
|
|
|
|
applied += 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
(applied, skipped, fallbacks)
|
|
|
|
|
}
|