feat(timeline): collect and render full discussion threads

Implements the downstream consumption of matched discussions from the seed
phase, completing the discussion thread feature across collect, CLI, and
integration tests.

Collect phase (timeline_collect.rs):
- New collect_discussion_threads() function assembles full threads by
  querying notes for each matched discussion_id, filtering out system notes
  (is_system = 0), ordering chronologically, and capping at THREAD_MAX_NOTES
  with a synthetic "[N more notes not shown]" summary note
- build_entity_lookup() creates a (type, id) -> (iid, path) map from seed
  and expanded entities to provide display metadata for thread events
- Thread timestamp is set to the first note's created_at for correct
  chronological interleaving with other timeline events
- collect_events() gains a matched_discussions parameter; threads are
  collected after entity events and before evidence note merging

CLI rendering (cli/commands/timeline.rs):
- Human mode: threads render with box-drawing borders, bold @author tags,
  date-stamped notes, and word-wrapped bodies (60 char width)
- Robot mode: DiscussionThread serializes as discussion_thread kind with
  note_count, full notes array (note_id, author, body, ISO created_at)
- THREAD tag in yellow for human event tag styling
- TimelineMeta gains discussion_threads_included count

Tests:
- 8 new collect tests: basic thread assembly, system note filtering, empty
  thread skipping, body truncation to THREAD_NOTE_MAX_CHARS, note cap with
  synthetic summary, timestamp from first note, chronological sort position,
  and deduplication of duplicate discussion_ids
- Integration tests updated for new collect_events signature

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
teernisse
2026-02-13 14:18:18 -05:00
parent 0e65202778
commit 2da1a228b3
4 changed files with 590 additions and 14 deletions

View File

@@ -86,6 +86,7 @@ pub async fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Ti
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
&seed_result.matched_discussions,
since_ms,
params.limit,
)?;
@@ -162,6 +163,25 @@ fn print_timeline_event(event: &TimelineEvent) {
);
}
}
// Show full discussion thread
if let TimelineEventType::DiscussionThread { notes, .. } = &event.event_type {
let bar = "\u{2500}".repeat(44);
println!(" \u{2500}\u{2500} Discussion {bar}");
for note in notes {
let note_date = format_date(note.created_at);
let author = note
.author
.as_deref()
.map(|a| format!("@{a}"))
.unwrap_or_else(|| "unknown".to_owned());
println!(" {} ({note_date}):", style(author).bold());
for line in wrap_text(&note.body, 60) {
println!(" {line}");
}
}
println!(" {}", "\u{2500}".repeat(60));
}
}
fn print_timeline_footer(result: &TimelineResult) {
@@ -206,6 +226,7 @@ fn format_event_tag(event_type: &TimelineEventType) -> String {
TimelineEventType::MilestoneRemoved { .. } => style("MILESTONE-").magenta().to_string(),
TimelineEventType::Merged => style("MERGED").cyan().to_string(),
TimelineEventType::NoteEvidence { .. } => style("NOTE").dim().to_string(),
TimelineEventType::DiscussionThread { .. } => style("THREAD").yellow().to_string(),
TimelineEventType::CrossReferenced { .. } => style("REF").dim().to_string(),
}
}
@@ -232,6 +253,28 @@ fn truncate_summary(s: &str, max: usize) -> String {
}
}
fn wrap_text(text: &str, width: usize) -> Vec<String> {
let mut lines = Vec::new();
let mut current = String::new();
for word in text.split_whitespace() {
if current.is_empty() {
current = word.to_string();
} else if current.len() + 1 + word.len() <= width {
current.push(' ');
current.push_str(word);
} else {
lines.push(current);
current = word.to_string();
}
}
if !current.is_empty() {
lines.push(current);
}
lines
}
fn wrap_snippet(text: &str, width: usize) -> Vec<String> {
let mut lines = Vec::new();
let mut current = String::new();
@@ -276,6 +319,7 @@ pub fn print_timeline_json_with_meta(
total_entities: result.seed_entities.len() + result.expanded_entities.len(),
total_events: total_events_before_limit,
evidence_notes_included: count_evidence_notes(&result.events),
discussion_threads_included: count_discussion_threads(&result.events),
unresolved_references: result.unresolved_references.len(),
showing: result.events.len(),
},
@@ -473,6 +517,22 @@ fn event_type_to_json(event_type: &TimelineEventType) -> (String, serde_json::Va
"discussion_id": discussion_id,
}),
),
TimelineEventType::DiscussionThread {
discussion_id,
notes,
} => (
"discussion_thread".to_owned(),
serde_json::json!({
"discussion_id": discussion_id,
"note_count": notes.len(),
"notes": notes.iter().map(|n| serde_json::json!({
"note_id": n.note_id,
"author": n.author,
"body": n.body,
"created_at": ms_to_iso(n.created_at),
})).collect::<Vec<_>>(),
}),
),
TimelineEventType::CrossReferenced { target } => (
"cross_referenced".to_owned(),
serde_json::json!({ "target": target }),
@@ -488,6 +548,7 @@ struct TimelineMetaJson {
total_entities: usize,
total_events: usize,
evidence_notes_included: usize,
discussion_threads_included: usize,
unresolved_references: usize,
showing: usize,
}
@@ -498,3 +559,10 @@ fn count_evidence_notes(events: &[TimelineEvent]) -> usize {
.filter(|e| matches!(e.event_type, TimelineEventType::NoteEvidence { .. }))
.count()
}
fn count_discussion_threads(events: &[TimelineEvent]) -> usize {
events
.iter()
.filter(|e| matches!(e.event_type, TimelineEventType::DiscussionThread { .. }))
.count()
}