digest: replace method dispatch with fn pointer fields on DigestLevel
The gather() and find_args() methods dispatched on child_prefix via match, duplicating the list of digest levels. Replace with fn pointer fields so each DigestLevel const carries its own behavior directly — no enum-like dispatch needed. Also replaces child_prefix with journal_input bool for format_inputs.
This commit is contained in:
parent
b083cc433c
commit
849c6c4b98
1 changed files with 86 additions and 96 deletions
182
src/digest.rs
182
src/digest.rs
|
|
@ -24,8 +24,10 @@ struct DigestLevel {
|
|||
period: &'static str, // "Date", "Week", "Month"
|
||||
input_title: &'static str,
|
||||
instructions: &'static str,
|
||||
child_prefix: Option<&'static str>,
|
||||
timeout: u64,
|
||||
journal_input: bool, // true for daily (journal entries), false for child digests
|
||||
gather: fn(&Store, &str) -> Result<(String, Vec<(String, String)>), String>,
|
||||
find_args: fn(&[String], &str) -> Vec<String>,
|
||||
}
|
||||
|
||||
const DAILY: DigestLevel = DigestLevel {
|
||||
|
|
@ -67,8 +69,10 @@ Include the original timestamp as a reference.]
|
|||
```
|
||||
|
||||
If a concept doesn't have a matching key, note it with "NEW:" prefix."#,
|
||||
child_prefix: None,
|
||||
timeout: 300,
|
||||
journal_input: true,
|
||||
gather: gather_daily,
|
||||
find_args: find_daily_args,
|
||||
};
|
||||
|
||||
const WEEKLY: DigestLevel = DigestLevel {
|
||||
|
|
@ -113,8 +117,10 @@ const WEEKLY: DigestLevel = DigestLevel {
|
|||
## Looking ahead
|
||||
[What's unfinished? What threads continue into next week?]
|
||||
```"#,
|
||||
child_prefix: Some("daily"),
|
||||
timeout: 300,
|
||||
journal_input: false,
|
||||
gather: gather_weekly,
|
||||
find_args: find_weekly_args,
|
||||
};
|
||||
|
||||
const MONTHLY: DigestLevel = DigestLevel {
|
||||
|
|
@ -173,8 +179,10 @@ Read all the weekly digests and synthesize the month's story.
|
|||
## Looking ahead
|
||||
[What threads carry into next month? What's unfinished?]
|
||||
```"#,
|
||||
child_prefix: Some("weekly"),
|
||||
timeout: 600,
|
||||
journal_input: false,
|
||||
gather: gather_monthly,
|
||||
find_args: find_monthly_args,
|
||||
};
|
||||
|
||||
// --- Input gathering ---
|
||||
|
|
@ -192,95 +200,77 @@ fn load_child_digests(prefix: &str, labels: &[String]) -> Result<Vec<(String, St
|
|||
Ok(digests)
|
||||
}
|
||||
|
||||
impl DigestLevel {
|
||||
/// Find candidate args from journal dates for auto-detection.
|
||||
/// Returns args suitable for passing to gather().
|
||||
fn find_args(&self, dates: &[String], today: &str) -> Vec<String> {
|
||||
match self.child_prefix {
|
||||
None => {
|
||||
// Daily: each date is a candidate, skip today
|
||||
dates.iter()
|
||||
.filter(|d| d.as_str() != today)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
Some("daily") => {
|
||||
// Weekly: group dates by week, return one date per complete week
|
||||
let mut weeks: BTreeMap<String, String> = BTreeMap::new();
|
||||
for date in dates {
|
||||
if let Ok((wl, _)) = week_dates(date) {
|
||||
weeks.entry(wl).or_insert_with(|| date.clone());
|
||||
}
|
||||
}
|
||||
weeks.into_values()
|
||||
.filter(|date| {
|
||||
week_dates(date).map_or(false, |(_, days)|
|
||||
days.last().unwrap() < &today.to_string())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
Some(_) => {
|
||||
// Monthly: group dates by month, return labels for past months
|
||||
let now = Local::now();
|
||||
let cur = (now.year(), now.month());
|
||||
let mut months: BTreeSet<(i32, u32)> = BTreeSet::new();
|
||||
for date in dates {
|
||||
if let Ok(nd) = NaiveDate::parse_from_str(date, "%Y-%m-%d") {
|
||||
months.insert((nd.year(), nd.month()));
|
||||
}
|
||||
}
|
||||
months.into_iter()
|
||||
.filter(|ym| *ym < cur)
|
||||
.map(|(y, m)| format!("{}-{:02}", y, m))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
fn find_daily_args(dates: &[String], today: &str) -> Vec<String> {
|
||||
dates.iter()
|
||||
.filter(|d| d.as_str() != today)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Gather inputs for this digest level. Returns (label, inputs).
|
||||
/// For daily: arg is a date, gathers journal entries from store.
|
||||
/// For weekly: arg is any date in the week, computes week label.
|
||||
/// For monthly: arg is "YYYY-MM" (or empty for current month).
|
||||
fn gather(&self, store: &Store, arg: &str) -> Result<(String, Vec<(String, String)>), String> {
|
||||
match self.child_prefix {
|
||||
None => {
|
||||
// Daily: gather journal entries for this date
|
||||
let date_re = Regex::new(&format!(
|
||||
r"^journal\.md#j-{}", regex::escape(arg)
|
||||
)).unwrap();
|
||||
let mut entries: Vec<_> = store.nodes.values()
|
||||
.filter(|n| date_re.is_match(&n.key))
|
||||
.map(|n| {
|
||||
let label = n.key.strip_prefix("journal.md#j-").unwrap_or(&n.key);
|
||||
(label.to_string(), n.content.clone())
|
||||
})
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
Ok((arg.to_string(), entries))
|
||||
}
|
||||
Some("daily") => {
|
||||
// Weekly: compute week from date, load daily digests
|
||||
let (week_label, dates) = week_dates(arg)?;
|
||||
let inputs = load_child_digests("daily", &dates)?;
|
||||
Ok((week_label, inputs))
|
||||
}
|
||||
Some(prefix) => {
|
||||
// Monthly: parse month arg, load weekly digests
|
||||
let (year, month) = if arg.is_empty() {
|
||||
let now = Local::now();
|
||||
(now.year(), now.month())
|
||||
} else {
|
||||
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
|
||||
.map_err(|e| format!("bad month '{}': {} (expected YYYY-MM)", arg, e))?;
|
||||
(d.year(), d.month())
|
||||
};
|
||||
let label = format!("{}-{:02}", year, month);
|
||||
let child_labels = weeks_in_month(year, month);
|
||||
let inputs = load_child_digests(prefix, &child_labels)?;
|
||||
Ok((label, inputs))
|
||||
}
|
||||
fn find_weekly_args(dates: &[String], today: &str) -> Vec<String> {
|
||||
let mut weeks: BTreeMap<String, String> = BTreeMap::new();
|
||||
for date in dates {
|
||||
if let Ok((wl, _)) = week_dates(date) {
|
||||
weeks.entry(wl).or_insert_with(|| date.clone());
|
||||
}
|
||||
}
|
||||
weeks.into_values()
|
||||
.filter(|date| {
|
||||
week_dates(date).map_or(false, |(_, days)|
|
||||
days.last().unwrap() < &today.to_string())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn find_monthly_args(dates: &[String], _today: &str) -> Vec<String> {
|
||||
let now = Local::now();
|
||||
let cur = (now.year(), now.month());
|
||||
let mut months: BTreeSet<(i32, u32)> = BTreeSet::new();
|
||||
for date in dates {
|
||||
if let Ok(nd) = NaiveDate::parse_from_str(date, "%Y-%m-%d") {
|
||||
months.insert((nd.year(), nd.month()));
|
||||
}
|
||||
}
|
||||
months.into_iter()
|
||||
.filter(|ym| *ym < cur)
|
||||
.map(|(y, m)| format!("{}-{:02}", y, m))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn gather_daily(store: &Store, date: &str) -> Result<(String, Vec<(String, String)>), String> {
|
||||
let date_re = Regex::new(&format!(
|
||||
r"^journal\.md#j-{}", regex::escape(date)
|
||||
)).unwrap();
|
||||
let mut entries: Vec<_> = store.nodes.values()
|
||||
.filter(|n| date_re.is_match(&n.key))
|
||||
.map(|n| {
|
||||
let label = n.key.strip_prefix("journal.md#j-").unwrap_or(&n.key);
|
||||
(label.to_string(), n.content.clone())
|
||||
})
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
Ok((date.to_string(), entries))
|
||||
}
|
||||
|
||||
fn gather_weekly(_store: &Store, date: &str) -> Result<(String, Vec<(String, String)>), String> {
|
||||
let (week_label, dates) = week_dates(date)?;
|
||||
let inputs = load_child_digests("daily", &dates)?;
|
||||
Ok((week_label, inputs))
|
||||
}
|
||||
|
||||
fn gather_monthly(_store: &Store, arg: &str) -> Result<(String, Vec<(String, String)>), String> {
|
||||
let (year, month) = if arg.is_empty() {
|
||||
let now = Local::now();
|
||||
(now.year(), now.month())
|
||||
} else {
|
||||
let d = NaiveDate::parse_from_str(&format!("{}-01", arg), "%Y-%m-%d")
|
||||
.map_err(|e| format!("bad month '{}': {} (expected YYYY-MM)", arg, e))?;
|
||||
(d.year(), d.month())
|
||||
};
|
||||
let label = format!("{}-{:02}", year, month);
|
||||
let child_labels = weeks_in_month(year, month);
|
||||
let inputs = load_child_digests("weekly", &child_labels)?;
|
||||
Ok((label, inputs))
|
||||
}
|
||||
|
||||
// --- Unified generator ---
|
||||
|
|
@ -317,7 +307,7 @@ fn generate_digest(
|
|||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let content = format_inputs(inputs, level.child_prefix.is_none());
|
||||
let content = format_inputs(inputs, level.journal_input);
|
||||
let covered = inputs.iter()
|
||||
.map(|(l, _)| l.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
|
|
@ -354,17 +344,17 @@ fn generate_digest(
|
|||
// --- Public API ---
|
||||
|
||||
pub fn generate_daily(store: &mut Store, date: &str) -> Result<(), String> {
|
||||
let (label, inputs) = DAILY.gather(store, date)?;
|
||||
let (label, inputs) = (DAILY.gather)(store, date)?;
|
||||
generate_digest(store, &DAILY, &label, &inputs)
|
||||
}
|
||||
|
||||
pub fn generate_weekly(store: &mut Store, date: &str) -> Result<(), String> {
|
||||
let (label, inputs) = WEEKLY.gather(store, date)?;
|
||||
let (label, inputs) = (WEEKLY.gather)(store, date)?;
|
||||
generate_digest(store, &WEEKLY, &label, &inputs)
|
||||
}
|
||||
|
||||
pub fn generate_monthly(store: &mut Store, month_arg: &str) -> Result<(), String> {
|
||||
let (label, inputs) = MONTHLY.gather(store, month_arg)?;
|
||||
let (label, inputs) = (MONTHLY.gather)(store, month_arg)?;
|
||||
generate_digest(store, &MONTHLY, &label, &inputs)
|
||||
}
|
||||
|
||||
|
|
@ -418,12 +408,12 @@ pub fn digest_auto(store: &mut Store) -> Result<(), String> {
|
|||
let mut total = 0u32;
|
||||
|
||||
for level in LEVELS {
|
||||
let args = level.find_args(&dates, &today);
|
||||
let args = (level.find_args)(&dates, &today);
|
||||
let mut generated = 0u32;
|
||||
let mut skipped = 0u32;
|
||||
|
||||
for arg in &args {
|
||||
let (label, inputs) = level.gather(store, arg)?;
|
||||
let (label, inputs) = (level.gather)(store, arg)?;
|
||||
if epi.join(format!("{}-{}.md", level.name, label)).exists() {
|
||||
skipped += 1;
|
||||
continue;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue