Files
gitlore/tests/timeline_pipeline_tests.rs
teernisse 2da1a228b3 feat(timeline): collect and render full discussion threads
Implements the downstream consumption of matched discussions from the seed
phase, completing the discussion thread feature across collect, CLI, and
integration tests.

Collect phase (timeline_collect.rs):
- New collect_discussion_threads() function assembles full threads by
  querying notes for each matched discussion_id, filtering out system notes
  (is_system = 0), ordering chronologically, and capping at THREAD_MAX_NOTES
  with a synthetic "[N more notes not shown]" summary note
- build_entity_lookup() creates a (type, id) -> (iid, path) map from seed
  and expanded entities to provide display metadata for thread events
- Thread timestamp is set to the first note's created_at for correct
  chronological interleaving with other timeline events
- collect_events() gains a matched_discussions parameter; threads are
  collected after entity events and before evidence note merging

CLI rendering (cli/commands/timeline.rs):
- Human mode: threads render with box-drawing borders, bold @author tags,
  date-stamped notes, and word-wrapped bodies (60 char width)
- Robot mode: DiscussionThread serializes as discussion_thread kind with
  note_count, full notes array (note_id, author, body, ISO created_at)
- THREAD tag in yellow for human event tag styling
- TimelineMeta gains discussion_threads_included count

Tests:
- 8 new collect tests: basic thread assembly, system note filtering, empty
  thread skipping, body truncation to THREAD_NOTE_MAX_CHARS, note cap with
  synthetic summary, timestamp from first note, chronological sort position,
  and deduplication of duplicate discussion_ids
- Integration tests updated for new collect_events signature

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 14:18:36 -05:00

366 lines
12 KiB
Rust

use lore::core::db::{create_connection, run_migrations};
use lore::core::timeline::{TimelineEventType, resolve_entity_ref};
use lore::core::timeline_collect::collect_events;
use lore::core::timeline_expand::expand_timeline;
use lore::core::timeline_seed::seed_timeline;
use rusqlite::Connection;
use std::path::Path;
fn setup_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
fn insert_project(conn: &Connection, path: &str) -> i64 {
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (?1, ?2, ?3)",
rusqlite::params![1, path, format!("https://gitlab.com/{path}")],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_issue(conn: &Connection, project_id: i64, iid: i64, title: &str) -> i64 {
conn.execute(
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (?1, ?2, ?3, ?4, 'opened', 'alice', 1000, 2000, 3000, ?5)",
rusqlite::params![iid * 100, project_id, iid, title, format!("https://gitlab.com/g/p/-/issues/{iid}")],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_mr(
conn: &Connection,
project_id: i64,
iid: i64,
title: &str,
merged_at: Option<i64>,
) -> i64 {
conn.execute(
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, merged_at, merge_user_username, web_url) VALUES (?1, ?2, ?3, ?4, 'merged', 'bob', 1500, 5000, 6000, ?5, 'charlie', ?6)",
rusqlite::params![iid * 100, project_id, iid, title, merged_at, format!("https://gitlab.com/g/p/-/merge_requests/{iid}")],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_document(
conn: &Connection,
source_type: &str,
source_id: i64,
project_id: i64,
content: &str,
) {
conn.execute(
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES (?1, ?2, ?3, ?4, ?5)",
rusqlite::params![source_type, source_id, project_id, content, format!("hash_{source_id}")],
)
.unwrap();
}
fn insert_entity_ref(
conn: &Connection,
project_id: i64,
source_type: &str,
source_id: i64,
target_type: &str,
target_id: Option<i64>,
ref_type: &str,
) {
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, reference_type, source_method, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'api', 1000)",
rusqlite::params![project_id, source_type, source_id, target_type, target_id, ref_type],
)
.unwrap();
}
fn insert_state_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
state: &str,
created_at: i64,
) {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, 'alice', ?6)",
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, state, created_at],
)
.unwrap();
}
fn insert_label_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
label: &str,
created_at: i64,
) {
let gitlab_id: i64 = rand::random::<u32>().into();
conn.execute(
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at) VALUES (?1, ?2, ?3, NULL, 'add', ?4, 'alice', ?5)",
rusqlite::params![gitlab_id, project_id, issue_id, label, created_at],
)
.unwrap();
}
/// Full pipeline: seed -> expand -> collect for a scenario with an issue
/// that has a closing MR, state changes, and label events.
#[tokio::test]
async fn pipeline_seed_expand_collect_end_to_end() {
let conn = setup_db();
let project_id = insert_project(&conn, "group/project");
// Issue #5: "authentication error in login"
let issue_id = insert_issue(&conn, project_id, 5, "Authentication error in login");
insert_document(
&conn,
"issue",
issue_id,
project_id,
"authentication error in login flow causing 401",
);
// MR !10 closes issue #5
let mr_id = insert_mr(&conn, project_id, 10, "Fix auth bug", Some(4000));
insert_document(
&conn,
"merge_request",
mr_id,
project_id,
"fix authentication error in login module",
);
insert_entity_ref(
&conn,
project_id,
"merge_request",
mr_id,
"issue",
Some(issue_id),
"closes",
);
// State changes on issue
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
// Label added to issue
insert_label_event(&conn, project_id, Some(issue_id), "bug", 1500);
// SEED: find entities matching "authentication"
let seed_result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
.await
.unwrap();
assert!(
!seed_result.seed_entities.is_empty(),
"Seed should find at least one entity"
);
// Verify seeds contain the issue
let has_issue = seed_result
.seed_entities
.iter()
.any(|e| e.entity_type == "issue" && e.entity_iid == 5);
assert!(has_issue, "Seeds should include issue #5");
// EXPAND: discover related entities (MR !10 via closes reference)
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
// The MR should appear as an expanded entity (or as a seed if it was also matched)
let total_entities = seed_result.seed_entities.len() + expand_result.expanded_entities.len();
assert!(total_entities >= 2, "Should have at least issue + MR");
// COLLECT: gather all events
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
&seed_result.matched_discussions,
None,
1000,
)
.unwrap();
assert!(!events.is_empty(), "Should have events");
// Verify chronological ordering
for window in events.windows(2) {
assert!(
window[0].timestamp <= window[1].timestamp,
"Events must be chronologically sorted: {} > {}",
window[0].timestamp,
window[1].timestamp
);
}
// Verify expected event types are present
let has_created = events
.iter()
.any(|e| matches!(e.event_type, TimelineEventType::Created));
let has_state_change = events
.iter()
.any(|e| matches!(e.event_type, TimelineEventType::StateChanged { .. }));
let has_label = events
.iter()
.any(|e| matches!(e.event_type, TimelineEventType::LabelAdded { .. }));
let has_merged = events
.iter()
.any(|e| matches!(e.event_type, TimelineEventType::Merged));
assert!(has_created, "Should have Created events");
assert!(has_state_change, "Should have StateChanged events");
assert!(has_label, "Should have LabelAdded events");
assert!(has_merged, "Should have Merged event from MR");
}
/// Verify the pipeline handles an empty FTS result gracefully.
#[tokio::test]
async fn pipeline_empty_query_produces_empty_result() {
let conn = setup_db();
let _project_id = insert_project(&conn, "group/project");
let seed_result = seed_timeline(&conn, None, "", None, None, 50, 10)
.await
.unwrap();
assert!(seed_result.seed_entities.is_empty());
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
assert!(expand_result.expanded_entities.is_empty());
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
&seed_result.matched_discussions,
None,
1000,
)
.unwrap();
assert!(events.is_empty());
}
/// Verify since filter propagates through the full pipeline.
#[tokio::test]
async fn pipeline_since_filter_excludes_old_events() {
let conn = setup_db();
let project_id = insert_project(&conn, "group/project");
let issue_id = insert_issue(&conn, project_id, 1, "Deploy failure");
insert_document(
&conn,
"issue",
issue_id,
project_id,
"deploy failure in staging environment",
);
// Old state change at 2000, recent state change at 8000
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 2000);
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 8000);
let seed_result = seed_timeline(&conn, None, "deploy", None, None, 50, 10)
.await
.unwrap();
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 0, false, 100).unwrap();
// Collect with since=5000: should exclude Created(1000) and closed(2000)
let (events, _) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
&seed_result.matched_discussions,
Some(5000),
1000,
)
.unwrap();
assert_eq!(events.len(), 1, "Only the reopened event should survive");
assert_eq!(events[0].timestamp, 8000);
}
/// Verify unresolved references use Option<i64> for target_iid.
#[tokio::test]
async fn pipeline_unresolved_refs_have_optional_iid() {
let conn = setup_db();
let project_id = insert_project(&conn, "group/project");
let issue_id = insert_issue(&conn, project_id, 1, "Cross-project reference");
insert_document(
&conn,
"issue",
issue_id,
project_id,
"cross project reference test",
);
// Unresolved reference with known iid
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method, created_at) VALUES (?1, 'issue', ?2, 'issue', NULL, 'other/repo', 42, 'closes', 'description_parse', 1000)",
rusqlite::params![project_id, issue_id],
)
.unwrap();
// Unresolved reference with NULL iid
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method, created_at) VALUES (?1, 'issue', ?2, 'merge_request', NULL, 'other/repo', NULL, 'related', 'note_parse', 1000)",
rusqlite::params![project_id, issue_id],
)
.unwrap();
let seed_result = seed_timeline(&conn, None, "cross project", None, None, 50, 10)
.await
.unwrap();
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
assert_eq!(expand_result.unresolved_references.len(), 2);
let with_iid = expand_result
.unresolved_references
.iter()
.find(|r| r.target_type == "issue")
.unwrap();
assert_eq!(with_iid.target_iid, Some(42));
let without_iid = expand_result
.unresolved_references
.iter()
.find(|r| r.target_type == "merge_request")
.unwrap();
assert_eq!(without_iid.target_iid, None);
}
/// Verify the shared resolve_entity_ref works with and without project scoping.
#[test]
fn shared_resolve_entity_ref_scoping() {
let conn = setup_db();
let project_id = insert_project(&conn, "group/project");
let issue_id = insert_issue(&conn, project_id, 42, "Test issue");
// Resolve with project filter
let result = resolve_entity_ref(&conn, "issue", issue_id, Some(project_id)).unwrap();
assert!(result.is_some());
let entity = result.unwrap();
assert_eq!(entity.entity_iid, 42);
assert_eq!(entity.project_path, "group/project");
// Resolve without project filter
let result = resolve_entity_ref(&conn, "issue", issue_id, None).unwrap();
assert!(result.is_some());
// Resolve with wrong project filter
let result = resolve_entity_ref(&conn, "issue", issue_id, Some(9999)).unwrap();
assert!(result.is_none());
// Resolve unknown entity type
let result = resolve_entity_ref(&conn, "unknown_type", issue_id, None).unwrap();
assert!(result.is_none());
// Resolve nonexistent entity
let result = resolve_entity_ref(&conn, "issue", 99999, None).unwrap();
assert!(result.is_none());
}