Files
gitlore/src/core/timeline_collect_tests.rs
teernisse 2da1a228b3 feat(timeline): collect and render full discussion threads
Implements the downstream consumption of matched discussions from the seed
phase, completing the discussion thread feature across collect, CLI, and
integration tests.

Collect phase (timeline_collect.rs):
- New collect_discussion_threads() function assembles full threads by
  querying notes for each matched discussion_id, filtering out system notes
  (is_system = 0), ordering chronologically, and capping at THREAD_MAX_NOTES
  with a synthetic "[N more notes not shown]" summary note
- build_entity_lookup() creates a (type, id) -> (iid, path) map from seed
  and expanded entities to provide display metadata for thread events
- Thread timestamp is set to the first note's created_at for correct
  chronological interleaving with other timeline events
- collect_events() gains a matched_discussions parameter; threads are
  collected after entity events and before evidence note merging

CLI rendering (cli/commands/timeline.rs):
- Human mode: threads render with box-drawing borders, bold @author tags,
  date-stamped notes, and word-wrapped bodies (60 char width)
- Robot mode: DiscussionThread serializes as discussion_thread kind with
  note_count, full notes array (note_id, author, body, ISO created_at)
- THREAD tag in yellow for human event tag styling
- TimelineMeta gains discussion_threads_included count

Tests:
- 8 new collect tests: basic thread assembly, system note filtering, empty
  thread skipping, body truncation to THREAD_NOTE_MAX_CHARS, note cap with
  synthetic summary, timestamp from first note, chronological sort position,
  and deduplication of duplicate discussion_ids
- Integration tests updated for new collect_events signature

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 14:18:36 -05:00

705 lines
22 KiB
Rust

use super::*;
use crate::core::db::{create_connection, run_migrations};
use std::path::Path;
fn setup_test_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
fn insert_project(conn: &Connection) -> i64 {
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
[],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
conn.execute(
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (?1, ?2, ?3, 'Auth bug', 'opened', 'alice', 1000, 2000, 3000, 'https://gitlab.com/group/project/-/issues/1')",
rusqlite::params![iid * 100, project_id, iid],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_mr(conn: &Connection, project_id: i64, iid: i64, merged_at: Option<i64>) -> i64 {
conn.execute(
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, merged_at, merge_user_username, web_url) VALUES (?1, ?2, ?3, 'Fix auth', 'merged', 'bob', 1000, 5000, 6000, ?4, 'charlie', 'https://gitlab.com/group/project/-/merge_requests/10')",
rusqlite::params![iid * 100, project_id, iid, merged_at],
)
.unwrap();
conn.last_insert_rowid()
}
fn make_entity_ref(entity_type: &str, entity_id: i64, iid: i64) -> EntityRef {
EntityRef {
entity_type: entity_type.to_owned(),
entity_id,
entity_iid: iid,
project_path: "group/project".to_owned(),
}
}
fn insert_state_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
state: &str,
created_at: i64,
) {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, 'alice', ?6)",
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, state, created_at],
)
.unwrap();
}
fn insert_label_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
action: &str,
label_name: Option<&str>,
created_at: i64,
) {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, label_name, created_at],
)
.unwrap();
}
fn insert_milestone_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
action: &str,
milestone_title: Option<&str>,
created_at: i64,
) {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO resource_milestone_events (gitlab_id, project_id, issue_id, merge_request_id, action, milestone_title, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, milestone_title, created_at],
)
.unwrap();
}
#[test]
fn test_collect_creation_event() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
assert_eq!(events.len(), 1);
assert!(matches!(events[0].event_type, TimelineEventType::Created));
assert_eq!(events[0].timestamp, 1000);
assert_eq!(events[0].actor, Some("alice".to_owned()));
assert!(events[0].is_seed);
}
#[test]
fn test_collect_state_events() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 4000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
// Created + 2 state changes = 3
assert_eq!(events.len(), 3);
assert!(matches!(events[0].event_type, TimelineEventType::Created));
assert!(matches!(
events[1].event_type,
TimelineEventType::StateChanged { ref state } if state == "closed"
));
assert!(matches!(
events[2].event_type,
TimelineEventType::StateChanged { ref state } if state == "reopened"
));
}
#[test]
fn test_collect_merged_dedup() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let mr_id = insert_mr(&conn, project_id, 10, Some(5000));
// Also add a state event for 'merged' — this should NOT produce a StateChanged
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
// Should have Created + Merged (not Created + StateChanged{merged} + Merged)
let merged_count = events
.iter()
.filter(|e| matches!(e.event_type, TimelineEventType::Merged))
.count();
let state_merged_count = events
.iter()
.filter(|e| matches!(&e.event_type, TimelineEventType::StateChanged { state } if state == "merged"))
.count();
assert_eq!(merged_count, 1);
assert_eq!(state_merged_count, 0);
}
#[test]
fn test_collect_null_label_fallback() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
insert_label_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
let label_event = events.iter().find(|e| {
matches!(&e.event_type, TimelineEventType::LabelAdded { label } if label == "[deleted label]")
});
assert!(label_event.is_some());
}
#[test]
fn test_collect_null_milestone_fallback() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
insert_milestone_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
let ms_event = events.iter().find(|e| {
matches!(&e.event_type, TimelineEventType::MilestoneSet { milestone } if milestone == "[deleted milestone]")
});
assert!(ms_event.is_some());
}
#[test]
fn test_collect_since_filter() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 5000);
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
// Since 4000: should exclude Created (1000) and closed (3000)
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], Some(4000), 100).unwrap();
assert_eq!(events.len(), 1);
assert_eq!(events[0].timestamp, 5000);
}
#[test]
fn test_collect_chronological_sort() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let mr_id = insert_mr(&conn, project_id, 10, Some(4000));
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
insert_label_event(
&conn,
project_id,
None,
Some(mr_id),
"add",
Some("bug"),
2000,
);
let seeds = vec![
make_entity_ref("issue", issue_id, 1),
make_entity_ref("merge_request", mr_id, 10),
];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
// Verify chronological order
for window in events.windows(2) {
assert!(window[0].timestamp <= window[1].timestamp);
}
}
#[test]
fn test_collect_respects_limit() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
for i in 0..20 {
insert_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
3000 + i * 100,
);
}
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, total) = collect_events(&conn, &seeds, &[], &[], &[], None, 5).unwrap();
assert_eq!(events.len(), 5);
// 20 state changes + 1 created = 21 total before limit
assert_eq!(total, 21);
}
#[test]
fn test_collect_evidence_notes_included() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let evidence = vec![TimelineEvent {
timestamp: 2500,
entity_type: "issue".to_owned(),
entity_id: issue_id,
entity_iid: 1,
project_path: "group/project".to_owned(),
event_type: TimelineEventType::NoteEvidence {
note_id: 42,
snippet: "relevant note".to_owned(),
discussion_id: Some(1),
},
summary: "Note by alice".to_owned(),
actor: Some("alice".to_owned()),
url: None,
is_seed: true,
}];
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
let (events, _) = collect_events(&conn, &seeds, &[], &evidence, &[], None, 100).unwrap();
let note_event = events.iter().find(|e| {
matches!(
&e.event_type,
TimelineEventType::NoteEvidence { note_id, .. } if *note_id == 42
)
});
assert!(note_event.is_some());
}
#[test]
fn test_collect_merged_fallback_to_state_event() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
// MR with merged_at = NULL
let mr_id = insert_mr(&conn, project_id, 10, None);
// But has a state event for 'merged'
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &[], None, 100).unwrap();
let merged = events
.iter()
.find(|e| matches!(e.event_type, TimelineEventType::Merged));
assert!(merged.is_some());
assert_eq!(merged.unwrap().timestamp, 5000);
}
// ─── Discussion thread tests ────────────────────────────────────────────────
fn insert_discussion(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
) -> i64 {
let noteable_type = if issue_id.is_some() {
"Issue"
} else {
"MergeRequest"
};
conn.execute(
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
rusqlite::params![format!("disc_{}", rand::random::<u32>()), project_id, issue_id, mr_id, noteable_type],
)
.unwrap();
conn.last_insert_rowid()
}
#[allow(clippy::too_many_arguments)]
fn insert_note(
conn: &Connection,
discussion_id: i64,
project_id: i64,
author: &str,
body: &str,
is_system: bool,
created_at: i64,
) -> i64 {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?7, ?7)",
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, author, body, created_at],
)
.unwrap();
conn.last_insert_rowid()
}
fn make_matched_discussion(
discussion_id: i64,
entity_type: &str,
entity_id: i64,
project_id: i64,
) -> MatchedDiscussion {
MatchedDiscussion {
discussion_id,
entity_type: entity_type.to_owned(),
entity_id,
project_id,
}
}
#[test]
fn test_collect_discussion_thread_basic() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
insert_note(
&conn,
disc_id,
project_id,
"alice",
"First note",
false,
2000,
);
insert_note(&conn, disc_id, project_id, "bob", "Reply here", false, 3000);
insert_note(
&conn,
disc_id,
project_id,
"alice",
"Follow up",
false,
4000,
);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread = events
.iter()
.find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }));
assert!(thread.is_some(), "Should have a DiscussionThread event");
let thread = thread.unwrap();
if let TimelineEventType::DiscussionThread {
discussion_id,
notes,
} = &thread.event_type
{
assert_eq!(*discussion_id, disc_id);
assert_eq!(notes.len(), 3);
assert_eq!(notes[0].author.as_deref(), Some("alice"));
assert_eq!(notes[0].body, "First note");
assert_eq!(notes[1].author.as_deref(), Some("bob"));
assert_eq!(notes[2].body, "Follow up");
} else {
panic!("Expected DiscussionThread variant");
}
}
#[test]
fn test_collect_discussion_thread_skips_system_notes() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
insert_note(
&conn,
disc_id,
project_id,
"alice",
"User note",
false,
2000,
);
insert_note(
&conn,
disc_id,
project_id,
"system",
"added label ~bug",
true,
3000,
);
insert_note(
&conn,
disc_id,
project_id,
"bob",
"Another user note",
false,
4000,
);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread = events
.iter()
.find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }));
assert!(thread.is_some());
if let TimelineEventType::DiscussionThread { notes, .. } = &thread.unwrap().event_type {
assert_eq!(notes.len(), 2, "System notes should be filtered out");
assert_eq!(notes[0].body, "User note");
assert_eq!(notes[1].body, "Another user note");
} else {
panic!("Expected DiscussionThread");
}
}
#[test]
fn test_collect_discussion_thread_empty_after_system_filter() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
// Only system notes
insert_note(
&conn,
disc_id,
project_id,
"system",
"added label",
true,
2000,
);
insert_note(
&conn,
disc_id,
project_id,
"system",
"removed label",
true,
3000,
);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread_count = events
.iter()
.filter(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }))
.count();
assert_eq!(
thread_count, 0,
"All-system-note discussion should produce no thread"
);
}
#[test]
fn test_collect_discussion_thread_body_truncation() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
let long_body = "x".repeat(10_000);
insert_note(&conn, disc_id, project_id, "alice", &long_body, false, 2000);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread = events
.iter()
.find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }))
.unwrap();
if let TimelineEventType::DiscussionThread { notes, .. } = &thread.event_type {
assert!(
notes[0].body.chars().count() <= crate::core::timeline::THREAD_NOTE_MAX_CHARS,
"Body should be truncated to THREAD_NOTE_MAX_CHARS"
);
} else {
panic!("Expected DiscussionThread");
}
}
#[test]
fn test_collect_discussion_thread_note_cap() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
// Insert 60 notes, exceeding THREAD_MAX_NOTES (50)
for i in 0..60 {
insert_note(
&conn,
disc_id,
project_id,
"alice",
&format!("Note {i}"),
false,
2000 + i * 100,
);
}
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread = events
.iter()
.find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }))
.unwrap();
if let TimelineEventType::DiscussionThread { notes, .. } = &thread.event_type {
// 50 notes + 1 synthetic summary = 51
assert_eq!(
notes.len(),
crate::core::timeline::THREAD_MAX_NOTES + 1,
"Should cap at THREAD_MAX_NOTES + synthetic summary"
);
let last = notes.last().unwrap();
assert!(last.body.contains("more notes not shown"));
} else {
panic!("Expected DiscussionThread");
}
}
#[test]
fn test_collect_discussion_thread_timestamp_is_first_note() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
insert_note(&conn, disc_id, project_id, "alice", "First", false, 5000);
insert_note(&conn, disc_id, project_id, "bob", "Second", false, 8000);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread = events
.iter()
.find(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }))
.unwrap();
assert_eq!(
thread.timestamp, 5000,
"Thread timestamp should be first note's created_at"
);
}
#[test]
fn test_collect_discussion_thread_sort_position() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
// Note at t=2000 (between Created at t=1000 and state change at t=3000)
insert_note(
&conn,
disc_id,
project_id,
"alice",
"discussion",
false,
2000,
);
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
let seeds = [make_entity_ref("issue", issue_id, 1)];
let discussions = [make_matched_discussion(
disc_id, "issue", issue_id, project_id,
)];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
// Expected order: Created(1000), DiscussionThread(2000), StateChanged(3000)
assert!(events.len() >= 3);
assert!(matches!(events[0].event_type, TimelineEventType::Created));
assert!(matches!(
events[1].event_type,
TimelineEventType::DiscussionThread { .. }
));
assert!(matches!(
events[2].event_type,
TimelineEventType::StateChanged { .. }
));
}
#[test]
fn test_collect_discussion_thread_dedup() {
let conn = setup_test_db();
let project_id = insert_project(&conn);
let issue_id = insert_issue(&conn, project_id, 1);
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
insert_note(&conn, disc_id, project_id, "alice", "hello", false, 2000);
let seeds = [make_entity_ref("issue", issue_id, 1)];
// Same discussion_id twice
let discussions = [
make_matched_discussion(disc_id, "issue", issue_id, project_id),
make_matched_discussion(disc_id, "issue", issue_id, project_id),
];
let (events, _) = collect_events(&conn, &seeds, &[], &[], &discussions, None, 100).unwrap();
let thread_count = events
.iter()
.filter(|e| matches!(&e.event_type, TimelineEventType::DiscussionThread { .. }))
.count();
assert_eq!(
thread_count, 1,
"Duplicate discussion_id should produce one thread"
);
}