diff --git a/src/cli/commands/timeline.rs b/src/cli/commands/timeline.rs index beb3989..470f274 100644 --- a/src/cli/commands/timeline.rs +++ b/src/cli/commands/timeline.rs @@ -86,6 +86,7 @@ pub async fn run_timeline(config: &Config, params: &TimelineParams) -> Result String { TimelineEventType::MilestoneRemoved { .. } => style("MILESTONE-").magenta().to_string(), TimelineEventType::Merged => style("MERGED").cyan().to_string(), TimelineEventType::NoteEvidence { .. } => style("NOTE").dim().to_string(), + TimelineEventType::DiscussionThread { .. } => style("THREAD").yellow().to_string(), TimelineEventType::CrossReferenced { .. } => style("REF").dim().to_string(), } } @@ -232,6 +253,28 @@ fn truncate_summary(s: &str, max: usize) -> String { } } +fn wrap_text(text: &str, width: usize) -> Vec { + let mut lines = Vec::new(); + let mut current = String::new(); + + for word in text.split_whitespace() { + if current.is_empty() { + current = word.to_string(); + } else if current.len() + 1 + word.len() <= width { + current.push(' '); + current.push_str(word); + } else { + lines.push(current); + current = word.to_string(); + } + } + if !current.is_empty() { + lines.push(current); + } + + lines +} + fn wrap_snippet(text: &str, width: usize) -> Vec { let mut lines = Vec::new(); let mut current = String::new(); @@ -276,6 +319,7 @@ pub fn print_timeline_json_with_meta( total_entities: result.seed_entities.len() + result.expanded_entities.len(), total_events: total_events_before_limit, evidence_notes_included: count_evidence_notes(&result.events), + discussion_threads_included: count_discussion_threads(&result.events), unresolved_references: result.unresolved_references.len(), showing: result.events.len(), }, @@ -473,6 +517,22 @@ fn event_type_to_json(event_type: &TimelineEventType) -> (String, serde_json::Va "discussion_id": discussion_id, }), ), + TimelineEventType::DiscussionThread { + discussion_id, + notes, + } => ( + "discussion_thread".to_owned(), + serde_json::json!({ + "discussion_id": discussion_id, + "note_count": notes.len(), + "notes": notes.iter().map(|n| serde_json::json!({ + "note_id": n.note_id, + "author": n.author, + "body": n.body, + "created_at": ms_to_iso(n.created_at), + })).collect::>(), + }), + ), TimelineEventType::CrossReferenced { target } => ( "cross_referenced".to_owned(), serde_json::json!({ "target": target }), @@ -488,6 +548,7 @@ struct TimelineMetaJson { total_entities: usize, total_events: usize, evidence_notes_included: usize, + discussion_threads_included: usize, unresolved_references: usize, showing: usize, } @@ -498,3 +559,10 @@ fn count_evidence_notes(events: &[TimelineEvent]) -> usize { .filter(|e| matches!(e.event_type, TimelineEventType::NoteEvidence { .. })) .count() } + +fn count_discussion_threads(events: &[TimelineEvent]) -> usize { + events + .iter() + .filter(|e| matches!(e.event_type, TimelineEventType::DiscussionThread { .. })) + .count() +} diff --git a/src/core/timeline_collect.rs b/src/core/timeline_collect.rs index 58fe92f..6f45ea6 100644 --- a/src/core/timeline_collect.rs +++ b/src/core/timeline_collect.rs @@ -1,20 +1,27 @@ use rusqlite::Connection; +use std::collections::HashSet; + use crate::core::error::{LoreError, Result}; -use crate::core::timeline::{EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType}; +use crate::core::timeline::{ + EntityRef, ExpandedEntityRef, MatchedDiscussion, THREAD_MAX_NOTES, THREAD_NOTE_MAX_CHARS, + ThreadNote, TimelineEvent, TimelineEventType, truncate_to_chars, +}; /// Collect all events for seed and expanded entities, interleave chronologically. /// /// Steps 4-5 of the timeline pipeline: /// 1. For each entity, collect Created, StateChanged, Label, Milestone, Merged events -/// 2. Merge in evidence notes from the seed phase -/// 3. Sort chronologically with stable tiebreak -/// 4. Apply --since filter and --limit +/// 2. Collect discussion threads from matched discussions +/// 3. Merge in evidence notes from the seed phase +/// 4. Sort chronologically with stable tiebreak +/// 5. Apply --since filter and --limit pub fn collect_events( conn: &Connection, seed_entities: &[EntityRef], expanded_entities: &[ExpandedEntityRef], evidence_notes: &[TimelineEvent], + matched_discussions: &[MatchedDiscussion], since_ms: Option, limit: usize, ) -> Result<(Vec, usize)> { @@ -30,6 +37,10 @@ pub fn collect_events( collect_entity_events(conn, &expanded.entity_ref, false, &mut all_events)?; } + // Collect discussion threads + let entity_lookup = build_entity_lookup(seed_entities, expanded_entities); + collect_discussion_threads(conn, matched_discussions, &entity_lookup, &mut all_events)?; + // Add evidence notes from seed phase all_events.extend(evidence_notes.iter().cloned()); @@ -369,6 +380,117 @@ fn entity_id_column(entity: &EntityRef) -> Result<(&'static str, i64)> { } } +/// Lookup key: (entity_type, entity_id) -> (iid, project_path) +type EntityLookup = std::collections::HashMap<(String, i64), (i64, String)>; + +fn build_entity_lookup(seeds: &[EntityRef], expanded: &[ExpandedEntityRef]) -> EntityLookup { + let mut lookup = EntityLookup::new(); + for e in seeds { + lookup.insert( + (e.entity_type.clone(), e.entity_id), + (e.entity_iid, e.project_path.clone()), + ); + } + for exp in expanded { + let e = &exp.entity_ref; + lookup.insert( + (e.entity_type.clone(), e.entity_id), + (e.entity_iid, e.project_path.clone()), + ); + } + lookup +} + +/// Collect full discussion threads for matched discussions. +fn collect_discussion_threads( + conn: &Connection, + matched_discussions: &[MatchedDiscussion], + entity_lookup: &EntityLookup, + events: &mut Vec, +) -> Result<()> { + // Deduplicate by discussion_id + let mut seen = HashSet::new(); + + for disc in matched_discussions { + if !seen.insert(disc.discussion_id) { + continue; + } + + let (iid, project_path) = + match entity_lookup.get(&(disc.entity_type.clone(), disc.entity_id)) { + Some(val) => val.clone(), + None => continue, // entity not in seed or expanded set + }; + + let mut stmt = conn.prepare( + "SELECT id, author_username, body, created_at FROM notes + WHERE discussion_id = ?1 AND is_system = 0 + ORDER BY created_at ASC", + )?; + + let rows = stmt.query_map(rusqlite::params![disc.discussion_id], |row| { + Ok(( + row.get::<_, i64>(0)?, // id + row.get::<_, Option>(1)?, // author_username + row.get::<_, Option>(2)?, // body + row.get::<_, i64>(3)?, // created_at + )) + })?; + + let mut notes = Vec::new(); + for row_result in rows { + let (note_id, author, body, created_at) = row_result?; + let body = truncate_to_chars(body.as_deref().unwrap_or(""), THREAD_NOTE_MAX_CHARS); + notes.push(ThreadNote { + note_id, + author, + body, + created_at, + }); + } + + // Skip empty threads (all notes were system notes) + if notes.is_empty() { + continue; + } + + let first_created_at = notes[0].created_at; + + // Cap notes per thread + let total_notes = notes.len(); + if total_notes > THREAD_MAX_NOTES { + notes.truncate(THREAD_MAX_NOTES); + notes.push(ThreadNote { + note_id: -1, + author: None, + body: format!("[{} more notes not shown]", total_notes - THREAD_MAX_NOTES), + created_at: notes.last().map_or(first_created_at, |n| n.created_at), + }); + } + + let note_count = notes.len(); + let actor = notes.first().and_then(|n| n.author.clone()); + + events.push(TimelineEvent { + timestamp: first_created_at, + entity_type: disc.entity_type.clone(), + entity_id: disc.entity_id, + entity_iid: iid, + project_path, + event_type: TimelineEventType::DiscussionThread { + discussion_id: disc.discussion_id, + notes, + }, + summary: format!("Discussion ({note_count} notes)"), + actor, + url: None, + is_seed: true, + }); + } + + Ok(()) +} + #[cfg(test)] #[path = "timeline_collect_tests.rs"] mod tests; diff --git a/src/core/timeline_collect_tests.rs b/src/core/timeline_collect_tests.rs index 6e74bef..88265eb 100644 --- a/src/core/timeline_collect_tests.rs +++ b/src/core/timeline_collect_tests.rs @@ -101,7 +101,7 @@ fn test_collect_creation_event() { let issue_id = insert_issue(&conn, project_id, 1); let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); assert_eq!(events.len(), 1); assert!(matches!(events[0].event_type, TimelineEventType::Created)); assert_eq!(events[0].timestamp, 1000); @@ -119,7 +119,7 @@ fn test_collect_state_events() { insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 4000); let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); // Created + 2 state changes = 3 assert_eq!(events.len(), 3); @@ -144,7 +144,7 @@ fn test_collect_merged_dedup() { insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000); let seeds = vec![make_entity_ref("merge_request", mr_id, 10)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); // Should have Created + Merged (not Created + StateChanged{merged} + Merged) let merged_count = events @@ -169,7 +169,7 @@ fn test_collect_null_label_fallback() { insert_label_event(&conn, project_id, Some(issue_id), None, "add", None, 2000); let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); let label_event = events.iter().find(|e| { matches!(&e.event_type, TimelineEventType::LabelAdded { label } if label == "[deleted label]") @@ -186,7 +186,7 @@ fn test_collect_null_milestone_fallback() { insert_milestone_event(&conn, project_id, Some(issue_id), None, "add", None, 2000); let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); let ms_event = events.iter().find(|e| { matches!(&e.event_type, TimelineEventType::MilestoneSet { milestone } if milestone == "[deleted milestone]") @@ -206,7 +206,7 @@ fn test_collect_since_filter() { let seeds = vec![make_entity_ref("issue", issue_id, 1)]; // Since 4000: should exclude Created (1000) and closed (3000) - let (events, _) = collect_events(&conn, &seeds, &[], &[], Some(4000), 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], Some(4000), 100).unwrap(); assert_eq!(events.len(), 1); assert_eq!(events[0].timestamp, 5000); } @@ -233,7 +233,7 @@ fn test_collect_chronological_sort() { make_entity_ref("issue", issue_id, 1), make_entity_ref("merge_request", mr_id, 10), ]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); // Verify chronological order for window in events.windows(2) { @@ -259,7 +259,7 @@ fn test_collect_respects_limit() { } let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, total) = collect_events(&conn, &seeds, &[], &[], None, 5).unwrap(); + let (events, total) = collect_events(&conn, &seeds, &[], &[], &[], None, 5).unwrap(); assert_eq!(events.len(), 5); // 20 state changes + 1 created = 21 total before limit assert_eq!(total, 21); @@ -289,7 +289,7 @@ fn test_collect_evidence_notes_included() { }]; let seeds = vec![make_entity_ref("issue", issue_id, 1)]; - let (events, _) = collect_events(&conn, &seeds, &[], &evidence, None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &evidence, &[], None, 100).unwrap(); let note_event = events.iter().find(|e| { matches!( @@ -311,7 +311,7 @@ fn test_collect_merged_fallback_to_state_event() { insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000); let seeds = vec![make_entity_ref("merge_request", mr_id, 10)]; - let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap(); + let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap(); let merged = events .iter() @@ -319,3 +319,386 @@ fn test_collect_merged_fallback_to_state_event() { assert!(merged.is_some()); assert_eq!(merged.unwrap().timestamp, 5000); } + +// ─── Discussion thread tests ──────────────────────────────────────────────── + +fn insert_discussion( + conn: &Connection, + project_id: i64, + issue_id: Option, + mr_id: Option, +) -> i64 { + let noteable_type = if issue_id.is_some() { + "Issue" + } else { + "MergeRequest" + }; + conn.execute( + "INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)", + rusqlite::params![format!("disc_{}", rand::random::()), project_id, issue_id, mr_id, noteable_type], + ) + .unwrap(); + conn.last_insert_rowid() +} + +#[allow(clippy::too_many_arguments)] +fn insert_note( + conn: &Connection, + discussion_id: i64, + project_id: i64, + author: &str, + body: &str, + is_system: bool, + created_at: i64, +) -> i64 { + let gitlab_id: i64 = rand::random::().into(); + conn.execute( + "INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?7, ?7)", + rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, author, body, created_at], + ) + .unwrap(); + conn.last_insert_rowid() +} + +fn make_matched_discussion( + discussion_id: i64, + entity_type: &str, + entity_id: i64, + project_id: i64, +) -> MatchedDiscussion { + MatchedDiscussion { + discussion_id, + entity_type: entity_type.to_owned(), + entity_id, + project_id, + } +} + +#[test] +fn test_collect_discussion_thread_basic() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + insert_note( + &conn, + disc_id, + project_id, + "alice", + "First note", + false, + 2000, + ); + insert_note(&conn, disc_id, project_id, "bob", "Reply here", false, 3000); + insert_note( + &conn, + disc_id, + project_id, + "alice", + "Follow up", + false, + 4000, + ); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread = events + .iter() + .find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })); + assert!(thread.is_some(), "Should have a DiscussionThread event"); + + let thread = thread.unwrap(); + if let TimelineEventType::DiscussionThread { + discussion_id, + notes, + } = &thread.event_type + { + assert_eq!(*discussion_id, disc_id); + assert_eq!(notes.len(), 3); + assert_eq!(notes[0].author.as_deref(), Some("alice")); + assert_eq!(notes[0].body, "First note"); + assert_eq!(notes[1].author.as_deref(), Some("bob")); + assert_eq!(notes[2].body, "Follow up"); + } else { + panic!("Expected DiscussionThread variant"); + } +} + +#[test] +fn test_collect_discussion_thread_skips_system_notes() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + insert_note( + &conn, + disc_id, + project_id, + "alice", + "User note", + false, + 2000, + ); + insert_note( + &conn, + disc_id, + project_id, + "system", + "added label ~bug", + true, + 3000, + ); + insert_note( + &conn, + disc_id, + project_id, + "bob", + "Another user note", + false, + 4000, + ); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread = events + .iter() + .find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })); + assert!(thread.is_some()); + + if let TimelineEventType::DiscussionThread { notes, .. } = &thread.unwrap().event_type { + assert_eq!(notes.len(), 2, "System notes should be filtered out"); + assert_eq!(notes[0].body, "User note"); + assert_eq!(notes[1].body, "Another user note"); + } else { + panic!("Expected DiscussionThread"); + } +} + +#[test] +fn test_collect_discussion_thread_empty_after_system_filter() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + // Only system notes + insert_note( + &conn, + disc_id, + project_id, + "system", + "added label", + true, + 2000, + ); + insert_note( + &conn, + disc_id, + project_id, + "system", + "removed label", + true, + 3000, + ); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread_count = events + .iter() + .filter(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })) + .count(); + assert_eq!( + thread_count, 0, + "All-system-note discussion should produce no thread" + ); +} + +#[test] +fn test_collect_discussion_thread_body_truncation() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + let long_body = "x".repeat(10_000); + insert_note(&conn, disc_id, project_id, "alice", &long_body, false, 2000); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread = events + .iter() + .find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })) + .unwrap(); + + if let TimelineEventType::DiscussionThread { notes, .. } = &thread.event_type { + assert!( + notes[0].body.chars().count() <= crate::core::timeline::THREAD_NOTE_MAX_CHARS, + "Body should be truncated to THREAD_NOTE_MAX_CHARS" + ); + } else { + panic!("Expected DiscussionThread"); + } +} + +#[test] +fn test_collect_discussion_thread_note_cap() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + // Insert 60 notes, exceeding THREAD_MAX_NOTES (50) + for i in 0..60 { + insert_note( + &conn, + disc_id, + project_id, + "alice", + &format!("Note {i}"), + false, + 2000 + i * 100, + ); + } + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread = events + .iter() + .find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })) + .unwrap(); + + if let TimelineEventType::DiscussionThread { notes, .. } = &thread.event_type { + // 50 notes + 1 synthetic summary = 51 + assert_eq!( + notes.len(), + crate::core::timeline::THREAD_MAX_NOTES + 1, + "Should cap at THREAD_MAX_NOTES + synthetic summary" + ); + let last = notes.last().unwrap(); + assert!(last.body.contains("more notes not shown")); + } else { + panic!("Expected DiscussionThread"); + } +} + +#[test] +fn test_collect_discussion_thread_timestamp_is_first_note() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + insert_note(&conn, disc_id, project_id, "alice", "First", false, 5000); + insert_note(&conn, disc_id, project_id, "bob", "Second", false, 8000); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread = events + .iter() + .find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })) + .unwrap(); + + assert_eq!( + thread.timestamp, 5000, + "Thread timestamp should be first note's created_at" + ); +} + +#[test] +fn test_collect_discussion_thread_sort_position() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + // Note at t=2000 (between Created at t=1000 and state change at t=3000) + insert_note( + &conn, + disc_id, + project_id, + "alice", + "discussion", + false, + 2000, + ); + insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + let discussions = [make_matched_discussion( + disc_id, "issue", issue_id, project_id, + )]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + // Expected order: Created(1000), DiscussionThread(2000), StateChanged(3000) + assert!(events.len() >= 3); + assert!(matches!(events[0].event_type, TimelineEventType::Created)); + assert!(matches!( + events[1].event_type, + TimelineEventType::DiscussionThread { .. } + )); + assert!(matches!( + events[2].event_type, + TimelineEventType::StateChanged { .. } + )); +} + +#[test] +fn test_collect_discussion_thread_dedup() { + let conn = setup_test_db(); + let project_id = insert_project(&conn); + let issue_id = insert_issue(&conn, project_id, 1); + let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None); + + insert_note(&conn, disc_id, project_id, "alice", "hello", false, 2000); + + let seeds = [make_entity_ref("issue", issue_id, 1)]; + // Same discussion_id twice + let discussions = [ + make_matched_discussion(disc_id, "issue", issue_id, project_id), + make_matched_discussion(disc_id, "issue", issue_id, project_id), + ]; + + let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap(); + + let thread_count = events + .iter() + .filter(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. })) + .count(); + assert_eq!( + thread_count, 1, + "Duplicate discussion_id should produce one thread" + ); +} diff --git a/tests/timeline_pipeline_tests.rs b/tests/timeline_pipeline_tests.rs index 05dd8b1..bbd282a 100644 --- a/tests/timeline_pipeline_tests.rs +++ b/tests/timeline_pipeline_tests.rs @@ -177,6 +177,7 @@ async fn pipeline_seed_expand_collect_end_to_end() { &seed_result.seed_entities, &expand_result.expanded_entities, &seed_result.evidence_notes, + &seed_result.matched_discussions, None, 1000, ) @@ -233,6 +234,7 @@ async fn pipeline_empty_query_produces_empty_result() { &seed_result.seed_entities, &expand_result.expanded_entities, &seed_result.evidence_notes, + &seed_result.matched_discussions, None, 1000, ) @@ -270,6 +272,7 @@ async fn pipeline_since_filter_excludes_old_events() { &seed_result.seed_entities, &expand_result.expanded_entities, &seed_result.evidence_notes, + &seed_result.matched_discussions, Some(5000), 1000, )