fix(timeline): report true total_events in robot JSON meta

The robot JSON envelope's meta.total_events field was incorrectly
reporting events.len() (the post-limit count), making it identical
to meta.showing. This defeated the purpose of having both fields.

Changes across the pipeline to fix this:

- collect_events now returns (Vec<TimelineEvent>, usize) where the
  second element is the total event count before truncation
- TimelineResult gains a total_events_before_limit field (serde-skipped)
  so the value flows cleanly from collect through to the renderer
- main.rs passes the real total instead of the events.len() workaround

Additional cleanup in this pass:

- Derive PartialEq/Eq/PartialOrd/Ord on TimelineEventType, replacing
  the hand-rolled event_type_discriminant() function. Variant declaration
  order now defines sort tiebreak, documented in a doc comment.
- Validate --since input with a proper LoreError::Other instead of
  silently treating invalid values as None
- Fix ANSI-aware tag column padding with console::pad_str (colored tags
  like "[merged]" were misaligned because ANSI escapes consumed width)
- Remove dead print_timeline_json and infer_max_depth functions that
  were superseded by print_timeline_json_with_meta

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Taylor Eernisse
2026-02-06 09:35:02 -05:00
parent f1cb45a168
commit 32783080f1
5 changed files with 47 additions and 73 deletions

View File

@@ -1,9 +1,9 @@
use console::style;
use console::{Alignment, pad_str, style};
use serde::Serialize;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
@@ -38,7 +38,17 @@ pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Timeline
.map(|p| resolve_project(&conn, p))
.transpose()?;
let since_ms = params.since.as_deref().and_then(parse_since);
let since_ms = params
.since
.as_deref()
.map(|s| {
parse_since(s).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
))
})
})
.transpose()?;
// Stage 1+2: SEED + HYDRATE
let seed_result = seed_timeline(
@@ -60,7 +70,7 @@ pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Timeline
)?;
// Stage 4: COLLECT
let events = collect_events(
let (events, total_before_limit) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
@@ -72,6 +82,7 @@ pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Timeline
Ok(TimelineResult {
query: params.query.clone(),
events,
total_events_before_limit: total_before_limit,
seed_entities: seed_result.seed_entities,
expanded_entities: expand_result.expanded_entities,
unresolved_references: expand_result.unresolved_references,
@@ -125,7 +136,8 @@ fn print_timeline_event(event: &TimelineEvent) {
let expanded_marker = if event.is_seed { "" } else { " [expanded]" };
let summary = truncate_summary(&event.summary, 50);
println!("{date} {tag:12} {entity_ref:7} {summary:50} {actor}{expanded_marker}");
let tag_padded = pad_str(&tag, 12, Alignment::Left, None);
println!("{date} {tag_padded} {entity_ref:7} {summary:50} {actor}{expanded_marker}");
// Show snippet for evidence notes
if let TimelineEventType::NoteEvidence { snippet, .. } = &event.event_type
@@ -235,29 +247,6 @@ fn wrap_snippet(text: &str, width: usize) -> Vec<String> {
// ─── Robot JSON output ───────────────────────────────────────────────────────
/// Render timeline as robot-mode JSON in {ok, data, meta} envelope.
pub fn print_timeline_json(result: &TimelineResult, total_events_before_limit: usize) {
let output = TimelineJsonEnvelope {
ok: true,
data: TimelineDataJson::from_result(result),
meta: TimelineMetaJson {
search_mode: "lexical".to_owned(),
expansion_depth: infer_max_depth(&result.expanded_entities),
expand_mentions: false, // caller should pass this, but we infer from data
total_entities: result.seed_entities.len() + result.expanded_entities.len(),
total_events: total_events_before_limit,
evidence_notes_included: count_evidence_notes(&result.events),
unresolved_references: result.unresolved_references.len(),
showing: result.events.len(),
},
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing timeline JSON: {e}"),
}
}
/// Extended version that accepts explicit meta values from the caller.
pub fn print_timeline_json_with_meta(
result: &TimelineResult,
total_events_before_limit: usize,
@@ -482,10 +471,6 @@ struct TimelineMetaJson {
showing: usize,
}
fn infer_max_depth(expanded: &[ExpandedEntityRef]) -> u32 {
expanded.iter().map(|e| e.depth).max().unwrap_or(0)
}
fn count_evidence_notes(events: &[TimelineEvent]) -> usize {
events
.iter()

View File

@@ -26,7 +26,7 @@ impl PartialEq for TimelineEvent {
fn eq(&self, other: &Self) -> bool {
self.timestamp == other.timestamp
&& self.entity_id == other.entity_id
&& self.event_type_discriminant() == other.event_type_discriminant()
&& self.event_type == other.event_type
}
}
@@ -43,31 +43,15 @@ impl Ord for TimelineEvent {
self.timestamp
.cmp(&other.timestamp)
.then_with(|| self.entity_id.cmp(&other.entity_id))
.then_with(|| {
self.event_type_discriminant()
.cmp(&other.event_type_discriminant())
})
}
}
impl TimelineEvent {
fn event_type_discriminant(&self) -> u8 {
match &self.event_type {
TimelineEventType::Created => 0,
TimelineEventType::StateChanged { .. } => 1,
TimelineEventType::LabelAdded { .. } => 2,
TimelineEventType::LabelRemoved { .. } => 3,
TimelineEventType::MilestoneSet { .. } => 4,
TimelineEventType::MilestoneRemoved { .. } => 5,
TimelineEventType::Merged => 6,
TimelineEventType::NoteEvidence { .. } => 7,
TimelineEventType::CrossReferenced { .. } => 8,
}
.then_with(|| self.event_type.cmp(&other.event_type))
}
}
/// Per spec Section 3.3. Serde tagged enum for JSON output.
#[derive(Debug, Clone, Serialize)]
///
/// Variant declaration order defines the sort order within a timestamp+entity
/// tiebreak (Created < StateChanged < LabelAdded < ... < CrossReferenced).
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum TimelineEventType {
Created,
@@ -133,6 +117,9 @@ pub struct UnresolvedRef {
pub struct TimelineResult {
pub query: String,
pub events: Vec<TimelineEvent>,
/// Total events before the `--limit` was applied (for meta.total_events vs meta.showing).
#[serde(skip)]
pub total_events_before_limit: usize,
pub seed_entities: Vec<EntityRef>,
pub expanded_entities: Vec<ExpandedEntityRef>,
pub unresolved_references: Vec<UnresolvedRef>,

View File

@@ -17,7 +17,7 @@ pub fn collect_events(
evidence_notes: &[TimelineEvent],
since_ms: Option<i64>,
limit: usize,
) -> Result<Vec<TimelineEvent>> {
) -> Result<(Vec<TimelineEvent>, usize)> {
let mut all_events: Vec<TimelineEvent> = Vec::new();
// Collect events for seed entities
@@ -41,10 +41,13 @@ pub fn collect_events(
all_events.retain(|e| e.timestamp >= since);
}
// Capture total before applying limit (for meta.total_events vs meta.showing)
let total_before_limit = all_events.len();
// Apply limit
all_events.truncate(limit);
Ok(all_events)
Ok((all_events, total_before_limit))
}
/// Collect all events for a single entity.
@@ -480,7 +483,7 @@ mod tests {
let issue_id = insert_issue(&conn, project_id, 1);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
assert_eq!(events.len(), 1);
assert!(matches!(events[0].event_type, TimelineEventType::Created));
assert_eq!(events[0].timestamp, 1000);
@@ -498,7 +501,7 @@ mod tests {
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 4000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
// Created + 2 state changes = 3
assert_eq!(events.len(), 3);
@@ -523,7 +526,7 @@ mod tests {
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
// Should have Created + Merged (not Created + StateChanged{merged} + Merged)
let merged_count = events
@@ -548,7 +551,7 @@ mod tests {
insert_label_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let label_event = events.iter().find(|e| {
matches!(&e.event_type, TimelineEventType::LabelAdded { label } if label == "[deleted label]")
@@ -565,7 +568,7 @@ mod tests {
insert_milestone_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let ms_event = events.iter().find(|e| {
matches!(&e.event_type, TimelineEventType::MilestoneSet { milestone } if milestone == "[deleted milestone]")
@@ -585,7 +588,7 @@ mod tests {
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
// Since 4000: should exclude Created (1000) and closed (3000)
let events = collect_events(&conn, &seeds, &[], &[], Some(4000), 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], Some(4000), 100).unwrap();
assert_eq!(events.len(), 1);
assert_eq!(events[0].timestamp, 5000);
}
@@ -612,7 +615,7 @@ mod tests {
make_entity_ref("issue", issue_id, 1),
make_entity_ref("merge_request", mr_id, 10),
];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
// Verify chronological order
for window in events.windows(2) {
@@ -638,8 +641,10 @@ mod tests {
}
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &[], None, 5).unwrap();
let (events, total) = collect_events(&conn, &seeds, &[], &[], None, 5).unwrap();
assert_eq!(events.len(), 5);
// 20 state changes + 1 created = 21 total before limit
assert_eq!(total, 21);
}
#[test]
@@ -666,7 +671,7 @@ mod tests {
}];
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let events = collect_events(&conn, &seeds, &[], &evidence, None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &evidence, None, 100).unwrap();
let note_event = events.iter().find(|e| {
matches!(
@@ -688,7 +693,7 @@ mod tests {
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
let events = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
let merged = events
.iter()

View File

@@ -1416,12 +1416,9 @@ fn handle_timeline(
let result = run_timeline(&config, &params)?;
if robot_mode {
// total_events_before_limit: the result already has events truncated,
// but we can compute it from the pipeline if needed. For now, use events.len()
// since collect_events already applied the limit internally.
print_timeline_json_with_meta(
&result,
result.events.len(),
result.total_events_before_limit,
params.depth,
params.expand_mentions,
);

View File

@@ -170,7 +170,7 @@ fn pipeline_seed_expand_collect_end_to_end() {
assert!(total_entities >= 2, "Should have at least issue + MR");
// COLLECT: gather all events
let events = collect_events(
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
@@ -224,7 +224,7 @@ fn pipeline_empty_query_produces_empty_result() {
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
assert!(expand_result.expanded_entities.is_empty());
let events = collect_events(
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
@@ -259,7 +259,7 @@ fn pipeline_since_filter_excludes_old_events() {
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 0, false, 100).unwrap();
// Collect with since=5000: should exclude Created(1000) and closed(2000)
let events = collect_events(
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,