use rusqlite::Connection; use serde::Serialize; use crate::core::error::{LoreError, Result}; use crate::core::project::resolve_project; use crate::core::time::ms_to_iso; use crate::timeline::collect::collect_events; use crate::timeline::seed::seed_timeline_direct; // --------------------------------------------------------------------------- // Types // --------------------------------------------------------------------------- /// Parameters controlling explain behavior. pub struct ExplainParams { pub entity_type: String, pub iid: i64, pub project: Option, pub sections: Option>, pub no_timeline: bool, pub max_decisions: usize, pub since: Option, } #[derive(Debug, Serialize)] pub struct ExplainResult { pub entity: EntitySummary, #[serde(skip_serializing_if = "Option::is_none")] pub description_excerpt: Option, #[serde(skip_serializing_if = "Option::is_none")] pub key_decisions: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub activity: Option, #[serde(skip_serializing_if = "Option::is_none")] pub open_threads: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub related: Option, #[serde(skip_serializing_if = "Option::is_none")] pub timeline_excerpt: Option>, } #[derive(Debug, Serialize)] pub struct EntitySummary { #[serde(rename = "type")] pub entity_type: String, pub iid: i64, pub title: String, pub state: String, pub author: String, pub assignees: Vec, pub labels: Vec, pub created_at: String, pub updated_at: String, pub url: Option, pub status_name: Option, } #[derive(Debug, Serialize)] pub struct KeyDecision { pub timestamp: String, pub actor: String, pub action: String, pub context_note: String, } #[derive(Debug, Serialize)] pub struct ActivitySummary { pub state_changes: usize, pub label_changes: usize, pub notes: usize, pub first_event: Option, pub last_event: Option, } #[derive(Debug, Serialize)] pub struct OpenThread { pub discussion_id: String, #[serde(skip_serializing_if = "Option::is_none")] pub started_by: Option, pub started_at: String, pub note_count: usize, pub last_note_at: String, } #[derive(Debug, Serialize)] pub struct RelatedEntities { pub closing_mrs: Vec, pub related_issues: Vec, } #[derive(Debug, Serialize)] pub struct ClosingMrInfo { pub iid: i64, pub title: String, pub state: String, pub web_url: Option, } #[derive(Debug, Serialize)] pub struct RelatedEntityInfo { pub entity_type: String, pub iid: i64, pub title: Option, pub reference_type: String, } #[derive(Debug, Serialize)] pub struct TimelineEventSummary { pub timestamp: String, pub event_type: String, pub actor: Option, pub summary: String, } // --------------------------------------------------------------------------- // Section filtering helper // --------------------------------------------------------------------------- fn should_include(sections: &Option>, name: &str) -> bool { sections .as_ref() .is_none_or(|s| s.iter().any(|sec| sec == name)) } // --------------------------------------------------------------------------- // Entity resolution (copied from show/ patterns — private there) // --------------------------------------------------------------------------- struct ExplainIssueRow { id: i64, iid: i64, title: String, state: String, author_username: String, created_at: i64, updated_at: i64, web_url: Option, project_path: String, status_name: Option, } struct ExplainMrRow { id: i64, iid: i64, title: String, state: String, author_username: String, created_at: i64, updated_at: i64, web_url: Option, project_path: String, } fn find_explain_issue( conn: &Connection, iid: i64, project_filter: Option<&str>, ) -> Result<(EntitySummary, i64, String)> { let (sql, params): (&str, Vec>) = match project_filter { Some(project) => { let project_id = resolve_project(conn, project)?; ( "SELECT i.id, i.iid, i.title, i.state, i.author_username, i.created_at, i.updated_at, i.web_url, p.path_with_namespace, i.status_name FROM issues i JOIN projects p ON i.project_id = p.id WHERE i.iid = ? AND i.project_id = ?", vec![Box::new(iid), Box::new(project_id)], ) } None => ( "SELECT i.id, i.iid, i.title, i.state, i.author_username, i.created_at, i.updated_at, i.web_url, p.path_with_namespace, i.status_name FROM issues i JOIN projects p ON i.project_id = p.id WHERE i.iid = ?", vec![Box::new(iid)], ), }; let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect(); let mut stmt = conn.prepare(sql)?; let rows: Vec = stmt .query_map(param_refs.as_slice(), |row| { Ok(ExplainIssueRow { id: row.get(0)?, iid: row.get(1)?, title: row.get(2)?, state: row.get(3)?, author_username: row.get(4)?, created_at: row.get(5)?, updated_at: row.get(6)?, web_url: row.get(7)?, project_path: row.get(8)?, status_name: row.get(9)?, }) })? .collect::, _>>()?; match rows.len() { 0 => Err(LoreError::NotFound(format!("Issue #{iid} not found"))), 1 => { let r = rows.into_iter().next().unwrap(); let local_id = r.id; let project_path = r.project_path.clone(); let labels = get_issue_labels(conn, r.id)?; let assignees = get_issue_assignees(conn, r.id)?; let summary = EntitySummary { entity_type: "issue".to_string(), iid: r.iid, title: r.title, state: r.state, author: r.author_username, assignees, labels, created_at: ms_to_iso(r.created_at), updated_at: ms_to_iso(r.updated_at), url: r.web_url, status_name: r.status_name, }; Ok((summary, local_id, project_path)) } _ => { let projects: Vec = rows.iter().map(|r| r.project_path.clone()).collect(); Err(LoreError::Ambiguous(format!( "Issue #{iid} exists in multiple projects: {}. Use --project to specify.", projects.join(", ") ))) } } } fn find_explain_mr( conn: &Connection, iid: i64, project_filter: Option<&str>, ) -> Result<(EntitySummary, i64, String)> { let (sql, params): (&str, Vec>) = match project_filter { Some(project) => { let project_id = resolve_project(conn, project)?; ( "SELECT m.id, m.iid, m.title, m.state, m.author_username, m.created_at, m.updated_at, m.web_url, p.path_with_namespace FROM merge_requests m JOIN projects p ON m.project_id = p.id WHERE m.iid = ? AND m.project_id = ?", vec![Box::new(iid), Box::new(project_id)], ) } None => ( "SELECT m.id, m.iid, m.title, m.state, m.author_username, m.created_at, m.updated_at, m.web_url, p.path_with_namespace FROM merge_requests m JOIN projects p ON m.project_id = p.id WHERE m.iid = ?", vec![Box::new(iid)], ), }; let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect(); let mut stmt = conn.prepare(sql)?; let rows: Vec = stmt .query_map(param_refs.as_slice(), |row| { Ok(ExplainMrRow { id: row.get(0)?, iid: row.get(1)?, title: row.get(2)?, state: row.get(3)?, author_username: row.get(4)?, created_at: row.get(5)?, updated_at: row.get(6)?, web_url: row.get(7)?, project_path: row.get(8)?, }) })? .collect::, _>>()?; match rows.len() { 0 => Err(LoreError::NotFound(format!("MR !{iid} not found"))), 1 => { let r = rows.into_iter().next().unwrap(); let local_id = r.id; let project_path = r.project_path.clone(); let labels = get_mr_labels(conn, r.id)?; let assignees = get_mr_assignees(conn, r.id)?; let summary = EntitySummary { entity_type: "merge_request".to_string(), iid: r.iid, title: r.title, state: r.state, author: r.author_username, assignees, labels, created_at: ms_to_iso(r.created_at), updated_at: ms_to_iso(r.updated_at), url: r.web_url, status_name: None, }; Ok((summary, local_id, project_path)) } _ => { let projects: Vec = rows.iter().map(|r| r.project_path.clone()).collect(); Err(LoreError::Ambiguous(format!( "MR !{iid} exists in multiple projects: {}. Use --project to specify.", projects.join(", ") ))) } } } fn get_issue_labels(conn: &Connection, issue_id: i64) -> Result> { let mut stmt = conn.prepare( "SELECT l.name FROM labels l JOIN issue_labels il ON l.id = il.label_id WHERE il.issue_id = ? ORDER BY l.name", )?; let labels: Vec = stmt .query_map([issue_id], |row| row.get(0))? .collect::, _>>()?; Ok(labels) } fn get_issue_assignees(conn: &Connection, issue_id: i64) -> Result> { let mut stmt = conn.prepare( "SELECT username FROM issue_assignees WHERE issue_id = ? ORDER BY username", )?; let assignees: Vec = stmt .query_map([issue_id], |row| row.get(0))? .collect::, _>>()?; Ok(assignees) } fn get_mr_labels(conn: &Connection, mr_id: i64) -> Result> { let mut stmt = conn.prepare( "SELECT l.name FROM labels l JOIN mr_labels ml ON l.id = ml.label_id WHERE ml.merge_request_id = ? ORDER BY l.name", )?; let labels: Vec = stmt .query_map([mr_id], |row| row.get(0))? .collect::, _>>()?; Ok(labels) } fn get_mr_assignees(conn: &Connection, mr_id: i64) -> Result> { let mut stmt = conn.prepare( "SELECT username FROM mr_assignees WHERE merge_request_id = ? ORDER BY username", )?; let assignees: Vec = stmt .query_map([mr_id], |row| row.get(0))? .collect::, _>>()?; Ok(assignees) } // --------------------------------------------------------------------------- // Description excerpt helper // --------------------------------------------------------------------------- fn truncate_description(desc: Option<&str>, max_len: usize) -> String { match desc { None | Some("") => "(no description)".to_string(), Some(s) => { if s.len() <= max_len { s.to_string() } else { let boundary = s.floor_char_boundary(max_len); format!("{}...", &s[..boundary]) } } } } // --------------------------------------------------------------------------- // Core: run_explain // --------------------------------------------------------------------------- pub fn run_explain(conn: &Connection, params: &ExplainParams) -> Result { let project_filter = params.project.as_deref(); let (entity_summary, entity_local_id, _project_path, description) = if params.entity_type == "issues" { let (summary, local_id, path) = find_explain_issue(conn, params.iid, project_filter)?; let desc = get_issue_description(conn, local_id)?; (summary, local_id, path, desc) } else { let (summary, local_id, path) = find_explain_mr(conn, params.iid, project_filter)?; let desc = get_mr_description(conn, local_id)?; (summary, local_id, path, desc) }; let description_excerpt = if should_include(¶ms.sections, "description") { Some(truncate_description(description.as_deref(), 500)) } else { None }; let key_decisions = if should_include(¶ms.sections, "key_decisions") { Some(extract_key_decisions( conn, ¶ms.entity_type, entity_local_id, params.since, params.max_decisions, )?) } else { None }; let activity = if should_include(¶ms.sections, "activity") { Some(build_activity_summary( conn, ¶ms.entity_type, entity_local_id, params.since, )?) } else { None }; let open_threads = if should_include(¶ms.sections, "open_threads") { Some(fetch_open_threads( conn, ¶ms.entity_type, entity_local_id, )?) } else { None }; let related = if should_include(¶ms.sections, "related") { Some(fetch_related_entities( conn, ¶ms.entity_type, entity_local_id, )?) } else { None }; let timeline_excerpt = if !params.no_timeline && should_include(¶ms.sections, "timeline") { build_timeline_excerpt_from_pipeline(conn, &entity_summary, params) } else { None }; Ok(ExplainResult { entity: entity_summary, description_excerpt, key_decisions, activity, open_threads, related, timeline_excerpt, }) } fn get_issue_description(conn: &Connection, issue_id: i64) -> Result> { let desc: Option = conn.query_row( "SELECT description FROM issues WHERE id = ?", [issue_id], |row| row.get(0), )?; Ok(desc) } fn get_mr_description(conn: &Connection, mr_id: i64) -> Result> { let desc: Option = conn.query_row( "SELECT description FROM merge_requests WHERE id = ?", [mr_id], |row| row.get(0), )?; Ok(desc) } // --------------------------------------------------------------------------- // Key-decisions heuristic (Task 2) // --------------------------------------------------------------------------- struct UnifiedEvent { created_at: i64, actor: String, description: String, } struct NoteRow { body: String, author: String, created_at: i64, } /// 60 minutes in milliseconds — the correlation window for matching /// a non-system note to a preceding state/label event by the same actor. const DECISION_WINDOW_MS: i64 = 60 * 60 * 1000; /// Maximum length (in bytes, snapped to a char boundary) for the /// `context_note` field in a `KeyDecision`. const NOTE_TRUNCATE_LEN: usize = 500; fn truncate_note(text: &str, max_len: usize) -> String { if text.len() <= max_len { text.to_string() } else { let boundary = text.floor_char_boundary(max_len); format!("{}...", &text[..boundary]) } } fn id_column_for(entity_type: &str) -> &'static str { if entity_type == "issues" { "issue_id" } else { "merge_request_id" } } fn query_state_events( conn: &Connection, entity_type: &str, entity_id: i64, since: Option, ) -> Result> { let id_col = id_column_for(entity_type); let sql = format!( "SELECT state, actor_username, created_at \ FROM resource_state_events \ WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) \ ORDER BY created_at" ); let mut stmt = conn.prepare(&sql)?; let rows = stmt .query_map(rusqlite::params![entity_id, since], |row| { let state: String = row.get(0)?; let actor: Option = row.get(1)?; let created_at: i64 = row.get(2)?; Ok(UnifiedEvent { created_at, actor: actor.unwrap_or_default(), description: format!("state: {state}"), }) })? .collect::, _>>()?; Ok(rows) } fn query_label_events( conn: &Connection, entity_type: &str, entity_id: i64, since: Option, ) -> Result> { let id_col = id_column_for(entity_type); let sql = format!( "SELECT action, label_name, actor_username, created_at \ FROM resource_label_events \ WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) \ ORDER BY created_at" ); let mut stmt = conn.prepare(&sql)?; let rows = stmt .query_map(rusqlite::params![entity_id, since], |row| { let action: String = row.get(0)?; let label_name: Option = row.get(1)?; let actor: Option = row.get(2)?; let created_at: i64 = row.get(3)?; let prefix = if action == "add" { "+" } else { "-" }; let label = label_name.unwrap_or_else(|| "(unknown)".to_string()); Ok(UnifiedEvent { created_at, actor: actor.unwrap_or_default(), description: format!("label: {prefix}{label}"), }) })? .collect::, _>>()?; Ok(rows) } fn query_non_system_notes( conn: &Connection, entity_type: &str, entity_id: i64, since: Option, ) -> Result> { let id_col = id_column_for(entity_type); let sql = format!( "SELECT n.body, n.author_username, n.created_at \ FROM notes n \ JOIN discussions d ON n.discussion_id = d.id \ WHERE d.{id_col} = ?1 AND n.is_system = 0 \ AND (?2 IS NULL OR n.created_at >= ?2) \ ORDER BY n.created_at" ); let mut stmt = conn.prepare(&sql)?; let rows = stmt .query_map(rusqlite::params![entity_id, since], |row| { Ok(NoteRow { body: row.get::<_, Option>(0)?.unwrap_or_default(), author: row.get::<_, Option>(1)?.unwrap_or_default(), created_at: row.get(2)?, }) })? .collect::, _>>()?; Ok(rows) } /// Extract key decisions by correlating state/label events with /// explanatory notes by the same actor within a 60-minute window. pub fn extract_key_decisions( conn: &Connection, entity_type: &str, entity_id: i64, since: Option, max_decisions: usize, ) -> Result> { let mut events = query_state_events(conn, entity_type, entity_id, since)?; let mut label_events = query_label_events(conn, entity_type, entity_id, since)?; events.append(&mut label_events); events.sort_by_key(|e| e.created_at); let notes = query_non_system_notes(conn, entity_type, entity_id, since)?; let mut decisions = Vec::new(); let mut used_notes: Vec = vec![false; notes.len()]; for event in &events { if decisions.len() >= max_decisions { break; } // Find the FIRST unconsumed non-system note by the SAME actor within 60 minutes // AFTER the event. Each note is used at most once to avoid duplicate decisions. let matching = notes.iter().enumerate().find(|(i, n)| { !used_notes[*i] && n.author == event.actor && n.created_at >= event.created_at && n.created_at <= event.created_at + DECISION_WINDOW_MS }); if let Some((idx, note)) = matching { used_notes[idx] = true; decisions.push(KeyDecision { timestamp: ms_to_iso(event.created_at), actor: event.actor.clone(), action: event.description.clone(), context_note: truncate_note(¬e.body, NOTE_TRUNCATE_LEN), }); } } Ok(decisions) } // --------------------------------------------------------------------------- // Activity summary (Task 3) // --------------------------------------------------------------------------- fn build_activity_summary( conn: &Connection, entity_type: &str, entity_id: i64, since: Option, ) -> Result { let id_col = id_column_for(entity_type); let state_sql = format!( "SELECT COUNT(*), MIN(created_at), MAX(created_at) \ FROM resource_state_events \ WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2)" ); let (state_count, state_min, state_max): (i64, Option, Option) = conn.query_row(&state_sql, rusqlite::params![entity_id, since], |row| { Ok((row.get(0)?, row.get(1)?, row.get(2)?)) })?; let state_changes = state_count as usize; let label_sql = format!( "SELECT COUNT(*), MIN(created_at), MAX(created_at) \ FROM resource_label_events \ WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2)" ); let (label_count, label_min, label_max): (i64, Option, Option) = conn.query_row(&label_sql, rusqlite::params![entity_id, since], |row| { Ok((row.get(0)?, row.get(1)?, row.get(2)?)) })?; let label_changes = label_count as usize; let notes_sql = format!( "SELECT COUNT(*), MIN(n.created_at), MAX(n.created_at) \ FROM notes n \ JOIN discussions d ON n.discussion_id = d.id \ WHERE d.{id_col} = ?1 AND n.is_system = 0 \ AND (?2 IS NULL OR n.created_at >= ?2)" ); let (notes_count, note_min, note_max): (i64, Option, Option) = conn.query_row(¬es_sql, rusqlite::params![entity_id, since], |row| { Ok((row.get(0)?, row.get(1)?, row.get(2)?)) })?; let notes = notes_count as usize; let first_event = [state_min, label_min, note_min] .iter() .copied() .flatten() .min(); let last_event = [state_max, label_max, note_max] .iter() .copied() .flatten() .max(); Ok(ActivitySummary { state_changes, label_changes, notes, first_event: first_event.map(ms_to_iso), last_event: last_event.map(ms_to_iso), }) } // --------------------------------------------------------------------------- // Open threads (Task 3) // --------------------------------------------------------------------------- fn fetch_open_threads( conn: &Connection, entity_type: &str, entity_id: i64, ) -> Result> { let id_col = id_column_for(entity_type); // Single query with scalar subqueries — avoids N+1. let sql = format!( "SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at, \ (SELECT COUNT(*) FROM notes n2 \ WHERE n2.discussion_id = d.id AND n2.is_system = 0) AS note_count, \ (SELECT n3.author_username FROM notes n3 \ WHERE n3.discussion_id = d.id \ ORDER BY n3.created_at ASC LIMIT 1) AS started_by \ FROM discussions d \ WHERE d.{id_col} = ?1 \ AND d.resolvable = 1 \ AND d.resolved = 0 \ ORDER BY d.last_note_at DESC" ); let mut stmt = conn.prepare(&sql)?; let threads = stmt .query_map([entity_id], |row| { let count: i64 = row.get(3)?; Ok(OpenThread { discussion_id: row.get(0)?, started_at: ms_to_iso(row.get::<_, i64>(1)?), last_note_at: ms_to_iso(row.get::<_, i64>(2)?), note_count: count as usize, started_by: row.get(4)?, }) })? .collect::, _>>()?; Ok(threads) } // --------------------------------------------------------------------------- // Related entities (Task 3) // --------------------------------------------------------------------------- /// Maps plural entity_type to the entity_references column value. fn ref_entity_type(entity_type: &str) -> &str { match entity_type { "issues" => "issue", "mrs" => "merge_request", _ => entity_type, } } fn fetch_related_entities( conn: &Connection, entity_type: &str, entity_id: i64, ) -> Result { let ref_type = ref_entity_type(entity_type); // Closing MRs (only for issues) let closing_mrs = if entity_type == "issues" { let mut stmt = conn.prepare( "SELECT mr.iid, mr.title, mr.state, mr.web_url \ FROM entity_references er \ JOIN merge_requests mr ON mr.id = er.source_entity_id \ WHERE er.target_entity_type = 'issue' \ AND er.target_entity_id = ?1 \ AND er.source_entity_type = 'merge_request' \ AND er.reference_type = 'closes' \ ORDER BY mr.iid", )?; stmt.query_map([entity_id], |row| { Ok(ClosingMrInfo { iid: row.get(0)?, title: row.get(1)?, state: row.get(2)?, web_url: row.get(3)?, }) })? .collect::, _>>()? } else { vec![] }; // Outgoing references (excluding closes, shown above). // Filter out unresolved refs (NULL target_entity_iid) to avoid rusqlite type errors. let mut out_stmt = conn.prepare( "SELECT er.target_entity_type, er.target_entity_iid, er.reference_type, \ COALESCE(i.title, mr.title) as title \ FROM entity_references er \ LEFT JOIN issues i ON er.target_entity_type = 'issue' AND i.id = er.target_entity_id \ LEFT JOIN merge_requests mr ON er.target_entity_type = 'merge_request' AND mr.id = er.target_entity_id \ WHERE er.source_entity_type = ?1 AND er.source_entity_id = ?2 \ AND er.reference_type != 'closes' \ AND er.target_entity_iid IS NOT NULL \ ORDER BY er.target_entity_type, er.target_entity_iid", )?; let outgoing: Vec = out_stmt .query_map(rusqlite::params![ref_type, entity_id], |row| { Ok(RelatedEntityInfo { entity_type: row.get(0)?, iid: row.get(1)?, reference_type: row.get(2)?, title: row.get(3)?, }) })? .collect::, _>>()?; // Incoming references (excluding closes). // COALESCE(i.iid, mr.iid) can be NULL if the source entity was deleted; filter those out. let mut in_stmt = conn.prepare( "SELECT er.source_entity_type, COALESCE(i.iid, mr.iid) as iid, er.reference_type, \ COALESCE(i.title, mr.title) as title \ FROM entity_references er \ LEFT JOIN issues i ON er.source_entity_type = 'issue' AND i.id = er.source_entity_id \ LEFT JOIN merge_requests mr ON er.source_entity_type = 'merge_request' AND mr.id = er.source_entity_id \ WHERE er.target_entity_type = ?1 AND er.target_entity_id = ?2 \ AND er.reference_type != 'closes' \ AND COALESCE(i.iid, mr.iid) IS NOT NULL \ ORDER BY er.source_entity_type, COALESCE(i.iid, mr.iid)", )?; let incoming: Vec = in_stmt .query_map(rusqlite::params![ref_type, entity_id], |row| { Ok(RelatedEntityInfo { entity_type: row.get(0)?, iid: row.get(1)?, reference_type: row.get(2)?, title: row.get(3)?, }) })? .collect::, _>>()?; let mut related_issues = outgoing; related_issues.extend(incoming); Ok(RelatedEntities { closing_mrs, related_issues, }) } // --------------------------------------------------------------------------- // Timeline excerpt (Task 4) // --------------------------------------------------------------------------- /// Maximum events in the timeline excerpt. const MAX_TIMELINE_EVENTS: usize = 20; /// Build a timeline excerpt by calling `seed_timeline_direct` + `collect_events`. /// Returns `None` on pipeline errors (timeline is supplementary, not critical). fn build_timeline_excerpt_from_pipeline( conn: &Connection, entity: &EntitySummary, params: &ExplainParams, ) -> Option> { let timeline_entity_type = match entity.entity_type.as_str() { "issue" => "issue", "merge_request" => "merge_request", _ => return Some(vec![]), }; let project_id = params .project .as_deref() .and_then(|p| resolve_project(conn, p).ok()); let seed_result = match seed_timeline_direct(conn, timeline_entity_type, params.iid, project_id) { Ok(result) => result, Err(e) => { tracing::warn!("explain: timeline seed failed: {e}"); return Some(vec![]); } }; let (mut events, _total) = match collect_events( conn, &seed_result.seed_entities, &[], &seed_result.evidence_notes, &seed_result.matched_discussions, params.since, MAX_TIMELINE_EVENTS, ) { Ok(result) => result, Err(e) => { tracing::warn!("explain: timeline collect failed: {e}"); return Some(vec![]); } }; events.truncate(MAX_TIMELINE_EVENTS); let summaries = events .iter() .map(|e| TimelineEventSummary { timestamp: ms_to_iso(e.timestamp), event_type: timeline_event_type_label(&e.event_type), actor: e.actor.clone(), summary: e.summary.clone(), }) .collect(); Some(summaries) } fn timeline_event_type_label(event_type: &crate::timeline::TimelineEventType) -> String { use crate::timeline::TimelineEventType; match event_type { TimelineEventType::Created => "created".to_string(), TimelineEventType::StateChanged { state } => format!("state_changed:{state}"), TimelineEventType::LabelAdded { label } => format!("label_added:{label}"), TimelineEventType::LabelRemoved { label } => format!("label_removed:{label}"), TimelineEventType::MilestoneSet { milestone } => format!("milestone_set:{milestone}"), TimelineEventType::MilestoneRemoved { milestone } => { format!("milestone_removed:{milestone}") } TimelineEventType::Merged => "merged".to_string(), TimelineEventType::NoteEvidence { .. } => "note_evidence".to_string(), TimelineEventType::DiscussionThread { .. } => "discussion_thread".to_string(), TimelineEventType::CrossReferenced { .. } => "cross_referenced".to_string(), } } // --------------------------------------------------------------------------- // Handler (called from main.rs) // --------------------------------------------------------------------------- #[allow(clippy::too_many_arguments)] pub fn handle_explain( config_override: Option<&str>, entity_type: &str, iid: i64, project: Option<&str>, sections: Option>, no_timeline: bool, max_decisions: usize, since: Option<&str>, robot_mode: bool, ) -> std::result::Result<(), Box> { let start = std::time::Instant::now(); // Normalize singular forms let entity_type = match entity_type { "issue" => "issues", "mr" => "mrs", other => other, }; // Validate sections const VALID_SECTIONS: &[&str] = &[ "entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline", ]; if let Some(ref secs) = sections { for s in secs { if !VALID_SECTIONS.contains(&s.as_str()) { return Err(Box::new(LoreError::Other(format!( "Invalid section '{s}'. Valid: {}", VALID_SECTIONS.join(", ") )))); } } } // Parse --since let since_ms = since.and_then(crate::core::time::parse_since); let config = crate::Config::load(config_override)?; let db_path = crate::core::paths::get_db_path(config.storage.db_path.as_deref()); let conn = crate::core::db::create_connection(&db_path)?; let effective_project = config.effective_project(project); let params = ExplainParams { entity_type: entity_type.to_string(), iid, project: effective_project.map(String::from), sections, no_timeline, max_decisions, since: since_ms, }; let result = run_explain(&conn, ¶ms)?; let elapsed_ms = start.elapsed().as_millis() as u64; if robot_mode { print_explain_json(&result, elapsed_ms)?; } else { print_explain(&result); } Ok(()) } // --------------------------------------------------------------------------- // Output rendering (Task 5 fills these in fully) // --------------------------------------------------------------------------- pub fn print_explain_json(result: &ExplainResult, elapsed_ms: u64) -> Result<()> { let response = serde_json::json!({ "ok": true, "data": result, "meta": { "elapsed_ms": elapsed_ms } }); println!( "{}", serde_json::to_string(&response) .map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))? ); Ok(()) } pub fn print_explain(result: &ExplainResult) { use crate::cli::render::{Icons, Theme}; // Entity header let type_label = match result.entity.entity_type.as_str() { "issue" => "Issue", "merge_request" => "MR", _ => &result.entity.entity_type, }; println!( "{} {} #{} — {}", Icons::info(), Theme::bold().render(type_label), result.entity.iid, Theme::bold().render(&result.entity.title) ); println!( " State: {} Author: {} Created: {}", result.entity.state, result.entity.author, result.entity.created_at ); if !result.entity.assignees.is_empty() { println!(" Assignees: {}", result.entity.assignees.join(", ")); } if !result.entity.labels.is_empty() { println!(" Labels: {}", result.entity.labels.join(", ")); } if let Some(ref url) = result.entity.url { println!(" URL: {url}"); } // Description if let Some(ref desc) = result.description_excerpt { println!("\n{}", Theme::bold().render("Description")); for line in desc.lines() { println!(" {line}"); } } // Key decisions if let Some(ref decisions) = result.key_decisions && !decisions.is_empty() { println!( "\n{} {}", Icons::info(), Theme::bold().render("Key Decisions") ); for d in decisions { println!( " {} {} — {}", Theme::muted().render(&d.timestamp), Theme::bold().render(&d.actor), d.action, ); for line in d.context_note.lines() { println!(" {line}"); } } } // Activity if let Some(ref act) = result.activity { println!("\n{}", Theme::bold().render("Activity")); println!( " {} state changes, {} label changes, {} notes", act.state_changes, act.label_changes, act.notes ); if let Some(ref first) = act.first_event { println!(" First event: {first}"); } if let Some(ref last) = act.last_event { println!(" Last event: {last}"); } } // Open threads if let Some(ref threads) = result.open_threads && !threads.is_empty() { println!( "\n{} {} ({})", Icons::warning(), Theme::bold().render("Open Threads"), threads.len() ); for t in threads { println!( " {} by {} ({} notes, last: {})", t.discussion_id, t.started_by.as_deref().unwrap_or("unknown"), t.note_count, t.last_note_at ); } } // Related if let Some(ref related) = result.related && (!related.closing_mrs.is_empty() || !related.related_issues.is_empty()) { println!("\n{}", Theme::bold().render("Related")); for mr in &related.closing_mrs { println!( " {} MR !{} — {} [{}]", Icons::success(), mr.iid, mr.title, mr.state ); } for ri in &related.related_issues { println!( " {} {} #{} — {} ({})", Icons::info(), ri.entity_type, ri.iid, ri.title.as_deref().unwrap_or("(untitled)"), ri.reference_type ); } } // Timeline excerpt if let Some(ref events) = result.timeline_excerpt && !events.is_empty() { println!( "\n{} {} ({} events)", Icons::info(), Theme::bold().render("Timeline"), events.len() ); for e in events { let actor_str = e.actor.as_deref().unwrap_or(""); println!( " {} {} {} {}", Theme::muted().render(&e.timestamp), e.event_type, actor_str, e.summary ); } } } // --------------------------------------------------------------------------- // Tests // --------------------------------------------------------------------------- #[cfg(test)] mod tests { use super::*; fn setup_explain_db() -> (Connection, i64) { let conn = crate::core::db::create_connection(std::path::Path::new(":memory:")).unwrap(); crate::core::db::run_migrations(&conn).unwrap(); conn.execute( "INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \ VALUES (100, 'test/project', 'https://gitlab.example.com/test/project')", [], ) .unwrap(); let project_id = conn.last_insert_rowid(); (conn, project_id) } fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64, desc: Option<&str>) -> i64 { conn.execute( "INSERT INTO issues (gitlab_id, iid, project_id, title, state, author_username, \ created_at, updated_at, last_seen_at, description) \ VALUES (?1, ?2, ?3, 'Test Issue', 'opened', 'testuser', \ 1704067200000, 1704153600000, 1704153600000, ?4)", rusqlite::params![iid * 10, iid, project_id, desc], ) .unwrap(); conn.last_insert_rowid() } fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 { conn.execute( "INSERT INTO merge_requests (gitlab_id, iid, project_id, title, state, draft, \ author_username, source_branch, target_branch, created_at, updated_at, \ merged_at, last_seen_at) \ VALUES (?1, ?2, ?3, 'Test MR', 'merged', 0, 'testuser', 'feat', 'main', \ 1704067200000, 1704153600000, 1704240000000, 1704153600000)", rusqlite::params![iid * 10, iid, project_id], ) .unwrap(); conn.last_insert_rowid() } fn insert_test_discussion( conn: &Connection, project_id: i64, issue_id: Option, mr_id: Option, gitlab_discussion_id: &str, ) -> i64 { conn.execute( "INSERT INTO discussions (gitlab_discussion_id, project_id, noteable_type, issue_id, \ merge_request_id, resolvable, resolved, first_note_at, last_note_at, last_seen_at) \ VALUES (?1, ?2, ?3, ?4, ?5, 0, 0, 1704067200000, 1704153600000, 1704153600000)", rusqlite::params![ gitlab_discussion_id, project_id, if issue_id.is_some() { "Issue" } else { "MergeRequest" }, issue_id, mr_id, ], ) .unwrap(); conn.last_insert_rowid() } fn insert_test_note( conn: &Connection, project_id: i64, discussion_id: i64, gitlab_id: i64, is_system: bool, ) { conn.execute( "INSERT INTO notes (gitlab_id, discussion_id, project_id, body, author_username, \ created_at, updated_at, last_seen_at, is_system) \ VALUES (?1, ?2, ?3, 'Test note body', 'testuser', \ 1704067200000, 1704067200000, 1704067200000, ?4)", rusqlite::params![gitlab_id, discussion_id, project_id, is_system], ) .unwrap(); } fn insert_test_state_event( conn: &Connection, project_id: i64, issue_id: Option, mr_id: Option, state: &str, actor: &str, created_at: i64, ) { conn.execute( "INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, \ merge_request_id, state, actor_username, created_at) \ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)", rusqlite::params![ created_at, project_id, issue_id, mr_id, state, actor, created_at, ], ) .unwrap(); } #[test] fn test_explain_issue_basic() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 42, Some("Issue description text")); let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-001"); insert_test_note(&conn, project_id, disc_id, 1001, false); insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "testuser", 1704100000000, ); let params = ExplainParams { entity_type: "issues".to_string(), iid: 42, project: None, sections: None, no_timeline: true, max_decisions: 10, since: None, }; let result = run_explain(&conn, ¶ms).unwrap(); assert_eq!(result.entity.entity_type, "issue"); assert_eq!(result.entity.iid, 42); assert_eq!(result.entity.title, "Test Issue"); assert_eq!(result.entity.state, "opened"); assert_eq!(result.entity.author, "testuser"); // All sections present (as Some) assert!(result.description_excerpt.is_some()); assert!(result.key_decisions.is_some()); assert!(result.activity.is_some()); assert!(result.open_threads.is_some()); assert!(result.related.is_some()); // timeline is None when no_timeline=true (tested separately) } #[test] fn test_explain_mr() { let (conn, project_id) = setup_explain_db(); insert_test_mr(&conn, project_id, 99); let params = ExplainParams { entity_type: "mrs".to_string(), iid: 99, project: None, sections: None, no_timeline: true, max_decisions: 10, since: None, }; let result = run_explain(&conn, ¶ms).unwrap(); assert_eq!(result.entity.entity_type, "merge_request"); assert_eq!(result.entity.iid, 99); assert_eq!(result.entity.title, "Test MR"); assert_eq!(result.entity.state, "merged"); } #[test] fn test_explain_singular_entity_type() { let (conn, project_id) = setup_explain_db(); insert_test_issue(&conn, project_id, 42, Some("Description")); // Use "issues" (the normalized form) since run_explain expects already-normalized types. // The normalization happens in handle_explain, which we test via the handler logic. // Here we verify the skeleton works with the normalized form. let params = ExplainParams { entity_type: "issues".to_string(), iid: 42, project: None, sections: None, no_timeline: true, max_decisions: 10, since: None, }; let result = run_explain(&conn, ¶ms).unwrap(); assert_eq!(result.entity.entity_type, "issue"); assert_eq!(result.entity.iid, 42); } #[test] fn test_explain_description_excerpt() { let (conn, project_id) = setup_explain_db(); insert_test_issue(&conn, project_id, 43, None); let params = ExplainParams { entity_type: "issues".to_string(), iid: 43, project: None, sections: None, no_timeline: true, max_decisions: 10, since: None, }; let result = run_explain(&conn, ¶ms).unwrap(); assert_eq!( result.description_excerpt.as_deref(), Some("(no description)") ); } #[test] fn test_explain_section_filtering() { let (conn, project_id) = setup_explain_db(); insert_test_issue(&conn, project_id, 44, Some("Desc")); let params = ExplainParams { entity_type: "issues".to_string(), iid: 44, project: None, sections: Some(vec!["key_decisions".to_string(), "activity".to_string()]), no_timeline: true, max_decisions: 10, since: None, }; let result = run_explain(&conn, ¶ms).unwrap(); // Entity always present assert_eq!(result.entity.iid, 44); // Selected sections present assert!(result.key_decisions.is_some()); assert!(result.activity.is_some()); // Unselected sections absent assert!(result.description_excerpt.is_none()); assert!(result.open_threads.is_none()); assert!(result.related.is_none()); assert!(result.timeline_excerpt.is_none()); } #[test] fn test_truncate_description() { assert_eq!(truncate_description(None, 500), "(no description)"); assert_eq!(truncate_description(Some(""), 500), "(no description)"); assert_eq!(truncate_description(Some("short"), 500), "short"); let long = "a".repeat(600); let truncated = truncate_description(Some(&long), 500); assert!(truncated.ends_with("...")); assert!(truncated.len() <= 504); // 500 + "..." } // ----------------------------------------------------------------------- // Test helpers for key-decisions heuristic (Task 2) // ----------------------------------------------------------------------- #[allow(clippy::too_many_arguments)] fn insert_test_label_event( conn: &Connection, project_id: i64, issue_id: Option, mr_id: Option, action: &str, label_name: &str, actor: &str, created_at: i64, ) { conn.execute( "INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, \ merge_request_id, action, label_name, actor_username, created_at) \ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)", rusqlite::params![ created_at, project_id, issue_id, mr_id, action, label_name, actor, created_at ], ) .unwrap(); } #[allow(clippy::too_many_arguments)] fn insert_test_note_with( conn: &Connection, project_id: i64, discussion_id: i64, gitlab_id: i64, body: &str, author: &str, created_at: i64, is_system: bool, ) { conn.execute( "INSERT INTO notes (gitlab_id, discussion_id, project_id, body, author_username, \ created_at, updated_at, last_seen_at, is_system) \ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?6, ?6, ?7)", rusqlite::params![ gitlab_id, discussion_id, project_id, body, author, created_at, is_system ], ) .unwrap(); } // ----------------------------------------------------------------------- // Key-decisions heuristic tests (Task 2) // ----------------------------------------------------------------------- #[test] fn test_explain_key_decision_heuristic() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 50, Some("desc")); // State event at T let t = 1_704_100_000_000_i64; insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "alice", t, ); // Note by SAME author at T + 30 minutes (within 60min window) let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd1"); insert_test_note_with( &conn, project_id, disc_id, 5001, "Closing because the fix landed in MR !200", "alice", t + 30 * 60 * 1000, false, ); let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap(); assert_eq!(decisions.len(), 1); assert_eq!(decisions[0].actor, "alice"); assert!(decisions[0].action.contains("state:")); assert!(decisions[0].action.contains("closed")); assert!(decisions[0].context_note.contains("Closing because")); } #[test] fn test_explain_key_decision_ignores_unrelated_notes() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 51, Some("desc")); let t = 1_704_100_000_000_i64; // State event by alice insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "alice", t, ); // Note by BOB at T + 30min — different author, should NOT correlate let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd2"); insert_test_note_with( &conn, project_id, disc_id, 5002, "Some unrelated comment", "bob", t + 30 * 60 * 1000, false, ); let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap(); assert_eq!(decisions.len(), 0); } #[test] fn test_explain_key_decision_label_event() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 52, Some("desc")); let t = 1_704_100_000_000_i64; // Label add event insert_test_label_event( &conn, project_id, Some(issue_id), None, "add", "bugfix", "alice", t, ); // Correlated note by same actor let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd3"); insert_test_note_with( &conn, project_id, disc_id, 5003, "Labeling as bugfix per triage", "alice", t + 10 * 60 * 1000, false, ); let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap(); assert_eq!(decisions.len(), 1); assert!( decisions[0].action.starts_with("label: +"), "Expected action to start with 'label: +', got: {}", decisions[0].action ); } #[test] fn test_explain_max_decisions() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 53, Some("desc")); let base_t = 1_704_100_000_000_i64; let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd4"); // Insert 5 correlated event+note pairs (each 2 hours apart to avoid overlap) for i in 0..5 { let event_t = base_t + i64::from(i) * 2 * 60 * 60 * 1000; insert_test_state_event( &conn, project_id, Some(issue_id), None, if i % 2 == 0 { "closed" } else { "reopened" }, "alice", event_t, ); insert_test_note_with( &conn, project_id, disc_id, 5010 + i64::from(i), &format!("Reason for change {i}"), "alice", event_t + 10 * 60 * 1000, false, ); } let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 3).unwrap(); assert_eq!(decisions.len(), 3, "Expected max_decisions=3 to cap at 3"); } #[test] fn test_explain_since_scopes_events() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 54, Some("desc")); let now = 1_704_200_000_000_i64; let sixty_days_ago = now - 60 * 24 * 60 * 60 * 1000; let ten_days_ago = now - 10 * 24 * 60 * 60 * 1000; let thirty_days_ago = now - 30 * 24 * 60 * 60 * 1000; let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd5"); // Old event at T-60d with correlated note insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "alice", sixty_days_ago, ); insert_test_note_with( &conn, project_id, disc_id, 5020, "Old closure reason", "alice", sixty_days_ago + 10 * 60 * 1000, false, ); // Recent event at T-10d with correlated note insert_test_state_event( &conn, project_id, Some(issue_id), None, "reopened", "alice", ten_days_ago, ); insert_test_note_with( &conn, project_id, disc_id, 5021, "Recent reopening reason", "alice", ten_days_ago + 10 * 60 * 1000, false, ); // Call with since = 30 days ago — should only get the recent event let decisions = extract_key_decisions(&conn, "issues", issue_id, Some(thirty_days_ago), 10).unwrap(); assert_eq!(decisions.len(), 1, "Expected only the recent event"); assert!(decisions[0].context_note.contains("Recent reopening")); } // ----------------------------------------------------------------------- // Activity / open threads / related tests (Task 3) // ----------------------------------------------------------------------- #[allow(clippy::too_many_arguments)] fn insert_resolvable_discussion( conn: &Connection, project_id: i64, issue_id: Option, mr_id: Option, gitlab_discussion_id: &str, resolvable: bool, resolved: bool, first_note_at: i64, last_note_at: i64, ) -> i64 { conn.execute( "INSERT INTO discussions (gitlab_discussion_id, project_id, noteable_type, issue_id, \ merge_request_id, resolvable, resolved, first_note_at, last_note_at, last_seen_at) \ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?9)", rusqlite::params![ gitlab_discussion_id, project_id, if issue_id.is_some() { "Issue" } else { "MergeRequest" }, issue_id, mr_id, resolvable, resolved, first_note_at, last_note_at, ], ) .unwrap(); conn.last_insert_rowid() } #[test] fn test_explain_open_threads() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 60, Some("desc")); // Unresolved, resolvable discussion let disc1 = insert_resolvable_discussion( &conn, project_id, Some(issue_id), None, "disc-unresolved", true, false, 1_000_000, 3_000_000, ); insert_test_note_with( &conn, project_id, disc1, 6001, "note1", "alice", 1_000_000, false, ); insert_test_note_with( &conn, project_id, disc1, 6002, "note2", "bob", 2_000_000, false, ); // Resolved discussion (should NOT appear) let disc2 = insert_resolvable_discussion( &conn, project_id, Some(issue_id), None, "disc-resolved", true, true, 1_500_000, 2_500_000, ); insert_test_note_with( &conn, project_id, disc2, 6003, "note3", "charlie", 1_500_000, false, ); let threads = fetch_open_threads(&conn, "issues", issue_id).unwrap(); assert_eq!(threads.len(), 1, "Only unresolved thread should appear"); assert_eq!(threads[0].discussion_id, "disc-unresolved"); assert_eq!(threads[0].started_by.as_deref(), Some("alice")); assert_eq!(threads[0].note_count, 2); } #[test] fn test_explain_activity_summary() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 61, Some("desc")); // 2 state events insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "alice", 1_000_000, ); insert_test_state_event( &conn, project_id, Some(issue_id), None, "reopened", "alice", 5_000_000, ); // 1 label event insert_test_label_event( &conn, project_id, Some(issue_id), None, "add", "bug", "alice", 1_500_000, ); // 3 non-system notes let disc = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-act"); for i in 0..3 { insert_test_note_with( &conn, project_id, disc, 7001 + i, &format!("comment {i}"), "commenter", 1_100_000 + i * 100_000, false, ); } let activity = build_activity_summary(&conn, "issues", issue_id, None).unwrap(); assert_eq!(activity.state_changes, 2); assert_eq!(activity.label_changes, 1); assert_eq!(activity.notes, 3); assert!(activity.first_event.is_some()); assert!(activity.last_event.is_some()); } #[test] fn test_explain_activity_with_since() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 62, Some("desc")); // Old event insert_test_state_event( &conn, project_id, Some(issue_id), None, "closed", "alice", 1_000_000, ); // Recent event insert_test_state_event( &conn, project_id, Some(issue_id), None, "reopened", "alice", 5_000_000, ); let activity = build_activity_summary(&conn, "issues", issue_id, Some(3_000_000)).unwrap(); assert_eq!(activity.state_changes, 1, "Only the recent event"); } #[test] fn test_explain_related_closing_mrs() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 63, Some("desc")); let mr_id = insert_test_mr(&conn, project_id, 99); // Insert a closing reference: MR closes issue conn.execute( "INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, \ target_entity_type, target_entity_id, target_entity_iid, reference_type, \ source_method, created_at) \ VALUES (?, 'merge_request', ?, 'issue', ?, 63, 'closes', 'api', 1000000)", rusqlite::params![project_id, mr_id, issue_id], ) .unwrap(); let related = fetch_related_entities(&conn, "issues", issue_id).unwrap(); assert_eq!(related.closing_mrs.len(), 1); assert_eq!(related.closing_mrs[0].iid, 99); assert_eq!(related.closing_mrs[0].state, "merged"); } #[test] fn test_explain_related_skips_unresolved_refs() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 65, Some("desc")); // Insert an unresolved cross-project reference (NULL target_entity_iid) conn.execute( "INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, \ target_entity_type, target_entity_id, target_entity_iid, reference_type, \ source_method, created_at) \ VALUES (?, 'issue', ?, 'issue', NULL, NULL, 'mentioned', 'note_parse', 1000000)", rusqlite::params![project_id, issue_id], ) .unwrap(); // Should NOT crash — unresolved refs are filtered out let related = fetch_related_entities(&conn, "issues", issue_id).unwrap(); assert!( related.related_issues.is_empty(), "Unresolved refs (NULL iid) should be excluded" ); } #[test] fn test_explain_empty_activity() { let (conn, project_id) = setup_explain_db(); let issue_id = insert_test_issue(&conn, project_id, 64, None); let activity = build_activity_summary(&conn, "issues", issue_id, None).unwrap(); assert_eq!(activity.state_changes, 0); assert_eq!(activity.label_changes, 0); assert_eq!(activity.notes, 0); assert!(activity.first_event.is_none()); assert!(activity.last_event.is_none()); let threads = fetch_open_threads(&conn, "issues", issue_id).unwrap(); assert!(threads.is_empty()); let related = fetch_related_entities(&conn, "issues", issue_id).unwrap(); assert!(related.closing_mrs.is_empty()); assert!(related.related_issues.is_empty()); } }