Files
gitlore/src/cli/commands/explain.rs
teernisse 20753608e8 fix(cli): flex-col min-width clamping and formatting consistency
- render.rs: clamp flex column width to min(min_flex, natural) instead
  of a hardcoded 20, preventing layout overflow when natural width is
  small; rewrites flex_width test to be terminal-independent
- list/issues.rs: adopt .flex_col() builder on table construction
- list/mrs.rs, list/notes.rs: consolidate multi-line StyledCell::styled
  calls to single-line format
- explain.rs: adopt flex_width() for related-issue title truncation,
  consolidate multi-line formatting
2026-03-13 11:13:40 -04:00

2139 lines
69 KiB
Rust

use rusqlite::Connection;
use serde::Serialize;
use crate::core::error::{LoreError, Result};
use crate::core::project::resolve_project;
use crate::core::time::{iso_to_ms, ms_to_iso};
use crate::timeline::collect::collect_events;
use crate::timeline::seed::seed_timeline_direct;
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/// Parameters controlling explain behavior.
pub struct ExplainParams {
pub entity_type: String,
pub iid: i64,
pub project: Option<String>,
pub sections: Option<Vec<String>>,
pub no_timeline: bool,
pub max_decisions: usize,
pub since: Option<i64>,
}
#[derive(Debug, Serialize)]
pub struct ExplainResult {
pub entity: EntitySummary,
#[serde(skip_serializing_if = "Option::is_none")]
pub description_excerpt: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub key_decisions: Option<Vec<KeyDecision>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub activity: Option<ActivitySummary>,
#[serde(skip_serializing_if = "Option::is_none")]
pub open_threads: Option<Vec<OpenThread>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub related: Option<RelatedEntities>,
#[serde(skip_serializing_if = "Option::is_none")]
pub timeline_excerpt: Option<TimelineExcerpt>,
}
#[derive(Debug, Serialize)]
pub struct EntitySummary {
#[serde(rename = "type")]
pub entity_type: String,
pub iid: i64,
pub title: String,
pub state: String,
pub author: String,
pub assignees: Vec<String>,
pub labels: Vec<String>,
pub created_at: String,
pub updated_at: String,
pub url: Option<String>,
pub project_path: String,
pub status_name: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct KeyDecision {
pub timestamp: String,
pub actor: String,
pub action: String,
pub context_note: String,
}
#[derive(Debug, Serialize)]
pub struct ActivitySummary {
pub state_changes: usize,
pub label_changes: usize,
pub notes: usize,
pub first_event: Option<String>,
pub last_event: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct OpenThread {
pub discussion_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub started_by: Option<String>,
pub started_at: String,
pub note_count: usize,
pub last_note_at: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub first_note_excerpt: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct RelatedEntities {
pub closing_mrs: Vec<ClosingMrInfo>,
pub related_issues: Vec<RelatedEntityInfo>,
}
#[derive(Debug, Serialize)]
pub struct ClosingMrInfo {
pub iid: i64,
pub title: String,
pub state: String,
pub web_url: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct RelatedEntityInfo {
pub entity_type: String,
pub iid: i64,
pub title: Option<String>,
pub state: Option<String>,
pub reference_type: String,
pub direction: String,
}
#[derive(Debug, Serialize)]
pub struct TimelineExcerpt {
pub events: Vec<TimelineEventSummary>,
pub total_events: usize,
pub truncated: bool,
}
#[derive(Debug, Serialize)]
pub struct TimelineEventSummary {
pub timestamp: String,
pub event_type: String,
pub actor: Option<String>,
pub summary: String,
}
// ---------------------------------------------------------------------------
// Section filtering helper
// ---------------------------------------------------------------------------
fn should_include(sections: &Option<Vec<String>>, name: &str) -> bool {
sections
.as_ref()
.is_none_or(|s| s.iter().any(|sec| sec == name))
}
// ---------------------------------------------------------------------------
// Entity resolution (copied from show/ patterns — private there)
// ---------------------------------------------------------------------------
struct ExplainIssueRow {
id: i64,
iid: i64,
title: String,
state: String,
author_username: String,
created_at: i64,
updated_at: i64,
web_url: Option<String>,
project_path: String,
status_name: Option<String>,
}
struct ExplainMrRow {
id: i64,
iid: i64,
title: String,
state: String,
author_username: String,
created_at: i64,
updated_at: i64,
web_url: Option<String>,
project_path: String,
}
fn find_explain_issue(
conn: &Connection,
iid: i64,
project_filter: Option<&str>,
) -> Result<(EntitySummary, i64, String)> {
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
Some(project) => {
let project_id = resolve_project(conn, project)?;
(
"SELECT i.id, i.iid, i.title, i.state, i.author_username,
i.created_at, i.updated_at, i.web_url, p.path_with_namespace,
i.status_name
FROM issues i
JOIN projects p ON i.project_id = p.id
WHERE i.iid = ? AND i.project_id = ?",
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (
"SELECT i.id, i.iid, i.title, i.state, i.author_username,
i.created_at, i.updated_at, i.web_url, p.path_with_namespace,
i.status_name
FROM issues i
JOIN projects p ON i.project_id = p.id
WHERE i.iid = ?",
vec![Box::new(iid)],
),
};
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(sql)?;
let rows: Vec<ExplainIssueRow> = stmt
.query_map(param_refs.as_slice(), |row| {
Ok(ExplainIssueRow {
id: row.get(0)?,
iid: row.get(1)?,
title: row.get(2)?,
state: row.get(3)?,
author_username: row.get(4)?,
created_at: row.get(5)?,
updated_at: row.get(6)?,
web_url: row.get(7)?,
project_path: row.get(8)?,
status_name: row.get(9)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
match rows.len() {
0 => Err(LoreError::NotFound(format!("Issue #{iid} not found"))),
1 => {
let r = rows.into_iter().next().unwrap();
let local_id = r.id;
let project_path = r.project_path.clone();
let labels = get_issue_labels(conn, r.id)?;
let assignees = get_issue_assignees(conn, r.id)?;
let summary = EntitySummary {
entity_type: "issue".to_string(),
iid: r.iid,
title: r.title,
state: r.state,
author: r.author_username,
assignees,
labels,
created_at: ms_to_iso(r.created_at),
updated_at: ms_to_iso(r.updated_at),
url: r.web_url,
project_path: project_path.clone(),
status_name: r.status_name,
};
Ok((summary, local_id, project_path))
}
_ => {
let projects: Vec<String> = rows.iter().map(|r| r.project_path.clone()).collect();
Err(LoreError::Ambiguous(format!(
"Issue #{iid} exists in multiple projects: {}. Use --project to specify.",
projects.join(", ")
)))
}
}
}
fn find_explain_mr(
conn: &Connection,
iid: i64,
project_filter: Option<&str>,
) -> Result<(EntitySummary, i64, String)> {
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
Some(project) => {
let project_id = resolve_project(conn, project)?;
(
"SELECT m.id, m.iid, m.title, m.state, m.author_username,
m.created_at, m.updated_at, m.web_url, p.path_with_namespace
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
WHERE m.iid = ? AND m.project_id = ?",
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (
"SELECT m.id, m.iid, m.title, m.state, m.author_username,
m.created_at, m.updated_at, m.web_url, p.path_with_namespace
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
WHERE m.iid = ?",
vec![Box::new(iid)],
),
};
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(sql)?;
let rows: Vec<ExplainMrRow> = stmt
.query_map(param_refs.as_slice(), |row| {
Ok(ExplainMrRow {
id: row.get(0)?,
iid: row.get(1)?,
title: row.get(2)?,
state: row.get(3)?,
author_username: row.get(4)?,
created_at: row.get(5)?,
updated_at: row.get(6)?,
web_url: row.get(7)?,
project_path: row.get(8)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
match rows.len() {
0 => Err(LoreError::NotFound(format!("MR !{iid} not found"))),
1 => {
let r = rows.into_iter().next().unwrap();
let local_id = r.id;
let project_path = r.project_path.clone();
let labels = get_mr_labels(conn, r.id)?;
let assignees = get_mr_assignees(conn, r.id)?;
let summary = EntitySummary {
entity_type: "merge_request".to_string(),
iid: r.iid,
title: r.title,
state: r.state,
author: r.author_username,
assignees,
labels,
created_at: ms_to_iso(r.created_at),
updated_at: ms_to_iso(r.updated_at),
url: r.web_url,
project_path: project_path.clone(),
status_name: None,
};
Ok((summary, local_id, project_path))
}
_ => {
let projects: Vec<String> = rows.iter().map(|r| r.project_path.clone()).collect();
Err(LoreError::Ambiguous(format!(
"MR !{iid} exists in multiple projects: {}. Use --project to specify.",
projects.join(", ")
)))
}
}
}
fn get_issue_labels(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT l.name FROM labels l
JOIN issue_labels il ON l.id = il.label_id
WHERE il.issue_id = ?
ORDER BY l.name",
)?;
let labels: Vec<String> = stmt
.query_map([issue_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(labels)
}
fn get_issue_assignees(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT username FROM issue_assignees
WHERE issue_id = ?
ORDER BY username",
)?;
let assignees: Vec<String> = stmt
.query_map([issue_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(assignees)
}
fn get_mr_labels(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT l.name FROM labels l
JOIN mr_labels ml ON l.id = ml.label_id
WHERE ml.merge_request_id = ?
ORDER BY l.name",
)?;
let labels: Vec<String> = stmt
.query_map([mr_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(labels)
}
fn get_mr_assignees(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT username FROM mr_assignees
WHERE merge_request_id = ?
ORDER BY username",
)?;
let assignees: Vec<String> = stmt
.query_map([mr_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(assignees)
}
// ---------------------------------------------------------------------------
// Description excerpt helper
// ---------------------------------------------------------------------------
fn truncate_description(desc: Option<&str>) -> String {
match desc {
None | Some("") => "(no description)".to_string(),
Some(s) => s.to_string(),
}
}
// ---------------------------------------------------------------------------
// Core: run_explain
// ---------------------------------------------------------------------------
pub fn run_explain(conn: &Connection, params: &ExplainParams) -> Result<ExplainResult> {
let project_filter = params.project.as_deref();
let (entity_summary, entity_local_id, _project_path, description, created_at_ms) =
if params.entity_type == "issues" {
let (summary, local_id, path) = find_explain_issue(conn, params.iid, project_filter)?;
let desc = get_issue_description(conn, local_id)?;
let created_at_ms = get_issue_created_at(conn, local_id)?;
(summary, local_id, path, desc, created_at_ms)
} else {
let (summary, local_id, path) = find_explain_mr(conn, params.iid, project_filter)?;
let desc = get_mr_description(conn, local_id)?;
let created_at_ms = get_mr_created_at(conn, local_id)?;
(summary, local_id, path, desc, created_at_ms)
};
let description_excerpt = if should_include(&params.sections, "description") {
Some(truncate_description(description.as_deref()))
} else {
None
};
let key_decisions = if should_include(&params.sections, "key_decisions") {
Some(extract_key_decisions(
conn,
&params.entity_type,
entity_local_id,
params.since,
params.max_decisions,
)?)
} else {
None
};
let activity = if should_include(&params.sections, "activity") {
Some(build_activity_summary(
conn,
&params.entity_type,
entity_local_id,
params.since,
created_at_ms,
)?)
} else {
None
};
let open_threads = if should_include(&params.sections, "open_threads") {
Some(fetch_open_threads(
conn,
&params.entity_type,
entity_local_id,
)?)
} else {
None
};
let related = if should_include(&params.sections, "related") {
Some(fetch_related_entities(
conn,
&params.entity_type,
entity_local_id,
)?)
} else {
None
};
let timeline_excerpt = if !params.no_timeline && should_include(&params.sections, "timeline") {
build_timeline_excerpt_from_pipeline(conn, &entity_summary, params)
} else {
None
};
Ok(ExplainResult {
entity: entity_summary,
description_excerpt,
key_decisions,
activity,
open_threads,
related,
timeline_excerpt,
})
}
fn get_issue_description(conn: &Connection, issue_id: i64) -> Result<Option<String>> {
let desc: Option<String> = conn.query_row(
"SELECT description FROM issues WHERE id = ?",
[issue_id],
|row| row.get(0),
)?;
Ok(desc)
}
fn get_mr_description(conn: &Connection, mr_id: i64) -> Result<Option<String>> {
let desc: Option<String> = conn.query_row(
"SELECT description FROM merge_requests WHERE id = ?",
[mr_id],
|row| row.get(0),
)?;
Ok(desc)
}
fn get_issue_created_at(conn: &Connection, issue_id: i64) -> Result<i64> {
let ts: i64 = conn.query_row(
"SELECT created_at FROM issues WHERE id = ?",
[issue_id],
|row| row.get(0),
)?;
Ok(ts)
}
fn get_mr_created_at(conn: &Connection, mr_id: i64) -> Result<i64> {
let ts: i64 = conn.query_row(
"SELECT created_at FROM merge_requests WHERE id = ?",
[mr_id],
|row| row.get(0),
)?;
Ok(ts)
}
// ---------------------------------------------------------------------------
// Key-decisions heuristic (Task 2)
// ---------------------------------------------------------------------------
struct UnifiedEvent {
created_at: i64,
actor: String,
description: String,
}
struct NoteRow {
body: String,
author: String,
created_at: i64,
}
/// 60 minutes in milliseconds — the correlation window for matching
/// a non-system note to a preceding state/label event by the same actor.
const DECISION_WINDOW_MS: i64 = 60 * 60 * 1000;
/// Maximum length (in bytes, snapped to a char boundary) for the
/// `context_note` field in a `KeyDecision`.
#[allow(dead_code)]
const NOTE_TRUNCATE_LEN: usize = 500;
#[allow(dead_code)]
fn truncate_note(text: &str, max_len: usize) -> String {
if text.len() <= max_len {
text.to_string()
} else {
let boundary = text.floor_char_boundary(max_len);
format!("{}...", &text[..boundary])
}
}
fn id_column_for(entity_type: &str) -> &'static str {
if entity_type == "issues" {
"issue_id"
} else {
"merge_request_id"
}
}
fn query_state_events(
conn: &Connection,
entity_type: &str,
entity_id: i64,
since: Option<i64>,
) -> Result<Vec<UnifiedEvent>> {
let id_col = id_column_for(entity_type);
let sql = format!(
"SELECT state, actor_username, created_at \
FROM resource_state_events \
WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) \
ORDER BY created_at"
);
let mut stmt = conn.prepare(&sql)?;
let rows = stmt
.query_map(rusqlite::params![entity_id, since], |row| {
let state: String = row.get(0)?;
let actor: Option<String> = row.get(1)?;
let created_at: i64 = row.get(2)?;
Ok(UnifiedEvent {
created_at,
actor: actor.unwrap_or_default(),
description: format!("state: {state}"),
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(rows)
}
fn query_label_events(
conn: &Connection,
entity_type: &str,
entity_id: i64,
since: Option<i64>,
) -> Result<Vec<UnifiedEvent>> {
let id_col = id_column_for(entity_type);
let sql = format!(
"SELECT action, label_name, actor_username, created_at \
FROM resource_label_events \
WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) \
ORDER BY created_at"
);
let mut stmt = conn.prepare(&sql)?;
let rows = stmt
.query_map(rusqlite::params![entity_id, since], |row| {
let action: String = row.get(0)?;
let label_name: Option<String> = row.get(1)?;
let actor: Option<String> = row.get(2)?;
let created_at: i64 = row.get(3)?;
let prefix = if action == "add" { "+" } else { "-" };
let label = label_name.unwrap_or_else(|| "(unknown)".to_string());
Ok(UnifiedEvent {
created_at,
actor: actor.unwrap_or_default(),
description: format!("label: {prefix}{label}"),
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(rows)
}
fn query_non_system_notes(
conn: &Connection,
entity_type: &str,
entity_id: i64,
since: Option<i64>,
) -> Result<Vec<NoteRow>> {
let id_col = id_column_for(entity_type);
let sql = format!(
"SELECT n.body, n.author_username, n.created_at \
FROM notes n \
JOIN discussions d ON n.discussion_id = d.id \
WHERE d.{id_col} = ?1 AND n.is_system = 0 \
AND (?2 IS NULL OR n.created_at >= ?2) \
ORDER BY n.created_at"
);
let mut stmt = conn.prepare(&sql)?;
let rows = stmt
.query_map(rusqlite::params![entity_id, since], |row| {
Ok(NoteRow {
body: row.get::<_, Option<String>>(0)?.unwrap_or_default(),
author: row.get::<_, Option<String>>(1)?.unwrap_or_default(),
created_at: row.get(2)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(rows)
}
/// Extract key decisions by correlating state/label events with
/// explanatory notes by the same actor within a 60-minute window.
pub fn extract_key_decisions(
conn: &Connection,
entity_type: &str,
entity_id: i64,
since: Option<i64>,
max_decisions: usize,
) -> Result<Vec<KeyDecision>> {
let mut events = query_state_events(conn, entity_type, entity_id, since)?;
let mut label_events = query_label_events(conn, entity_type, entity_id, since)?;
events.append(&mut label_events);
events.sort_by_key(|e| e.created_at);
let notes = query_non_system_notes(conn, entity_type, entity_id, since)?;
let mut decisions = Vec::new();
let mut used_notes: Vec<bool> = vec![false; notes.len()];
for event in &events {
if decisions.len() >= max_decisions {
break;
}
// Find the FIRST unconsumed non-system note by the SAME actor within 60 minutes
// AFTER the event. Each note is used at most once to avoid duplicate decisions.
let matching = notes.iter().enumerate().find(|(i, n)| {
!used_notes[*i]
&& n.author == event.actor
&& n.created_at >= event.created_at
&& n.created_at <= event.created_at + DECISION_WINDOW_MS
});
if let Some((idx, note)) = matching {
used_notes[idx] = true;
decisions.push(KeyDecision {
timestamp: ms_to_iso(event.created_at),
actor: event.actor.clone(),
action: event.description.clone(),
context_note: note.body.clone(),
});
}
}
Ok(decisions)
}
// ---------------------------------------------------------------------------
// Activity summary (Task 3)
// ---------------------------------------------------------------------------
fn build_activity_summary(
conn: &Connection,
entity_type: &str,
entity_id: i64,
since: Option<i64>,
created_at_ms: i64,
) -> Result<ActivitySummary> {
let id_col = id_column_for(entity_type);
let state_sql = format!(
"SELECT COUNT(*), MIN(created_at), MAX(created_at) \
FROM resource_state_events \
WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2)"
);
let (state_count, state_min, state_max): (i64, Option<i64>, Option<i64>) =
conn.query_row(&state_sql, rusqlite::params![entity_id, since], |row| {
Ok((row.get(0)?, row.get(1)?, row.get(2)?))
})?;
let state_changes = state_count as usize;
let label_sql = format!(
"SELECT COUNT(*), MIN(created_at), MAX(created_at) \
FROM resource_label_events \
WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2)"
);
let (label_count, label_min, label_max): (i64, Option<i64>, Option<i64>) =
conn.query_row(&label_sql, rusqlite::params![entity_id, since], |row| {
Ok((row.get(0)?, row.get(1)?, row.get(2)?))
})?;
let label_changes = label_count as usize;
let notes_sql = format!(
"SELECT COUNT(*), MIN(n.created_at), MAX(n.created_at) \
FROM notes n \
JOIN discussions d ON n.discussion_id = d.id \
WHERE d.{id_col} = ?1 AND n.is_system = 0 \
AND (?2 IS NULL OR n.created_at >= ?2)"
);
let (notes_count, note_min, note_max): (i64, Option<i64>, Option<i64>) =
conn.query_row(&notes_sql, rusqlite::params![entity_id, since], |row| {
Ok((row.get(0)?, row.get(1)?, row.get(2)?))
})?;
let notes = notes_count as usize;
// Floor first_event at created_at — label events can predate entity creation
// due to bulk operations or API imports
let first_event = [state_min, label_min, note_min]
.iter()
.copied()
.flatten()
.min()
.map(|ts| ts.max(created_at_ms));
let last_event = [state_max, label_max, note_max]
.iter()
.copied()
.flatten()
.max();
Ok(ActivitySummary {
state_changes,
label_changes,
notes,
first_event: first_event.map(ms_to_iso),
last_event: last_event.map(ms_to_iso),
})
}
// ---------------------------------------------------------------------------
// Open threads (Task 3)
// ---------------------------------------------------------------------------
fn fetch_open_threads(
conn: &Connection,
entity_type: &str,
entity_id: i64,
) -> Result<Vec<OpenThread>> {
let id_col = id_column_for(entity_type);
// Single query with scalar subqueries — avoids N+1.
let sql = format!(
"SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at, \
(SELECT COUNT(*) FROM notes n2 \
WHERE n2.discussion_id = d.id AND n2.is_system = 0) AS note_count, \
(SELECT n3.author_username FROM notes n3 \
WHERE n3.discussion_id = d.id \
ORDER BY n3.created_at ASC LIMIT 1) AS started_by, \
(SELECT SUBSTR(n4.body, 1, 200) FROM notes n4 \
WHERE n4.discussion_id = d.id AND n4.is_system = 0 \
ORDER BY n4.created_at ASC LIMIT 1) AS first_note_body \
FROM discussions d \
WHERE d.{id_col} = ?1 \
AND d.resolvable = 1 \
AND d.resolved = 0 \
ORDER BY d.last_note_at DESC"
);
let mut stmt = conn.prepare(&sql)?;
let threads = stmt
.query_map([entity_id], |row| {
let count: i64 = row.get(3)?;
let first_note_body: Option<String> = row.get(5)?;
Ok(OpenThread {
discussion_id: row.get(0)?,
started_at: ms_to_iso(row.get::<_, i64>(1)?),
last_note_at: ms_to_iso(row.get::<_, i64>(2)?),
note_count: count as usize,
started_by: row.get(4)?,
first_note_excerpt: first_note_body,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(threads)
}
// ---------------------------------------------------------------------------
// Related entities (Task 3)
// ---------------------------------------------------------------------------
/// Maps plural entity_type to the entity_references column value.
fn ref_entity_type(entity_type: &str) -> &str {
match entity_type {
"issues" => "issue",
"mrs" => "merge_request",
_ => entity_type,
}
}
fn fetch_related_entities(
conn: &Connection,
entity_type: &str,
entity_id: i64,
) -> Result<RelatedEntities> {
let ref_type = ref_entity_type(entity_type);
// Closing MRs (only for issues)
let closing_mrs = if entity_type == "issues" {
let mut stmt = conn.prepare(
"SELECT mr.iid, mr.title, mr.state, mr.web_url \
FROM entity_references er \
JOIN merge_requests mr ON mr.id = er.source_entity_id \
WHERE er.target_entity_type = 'issue' \
AND er.target_entity_id = ?1 \
AND er.source_entity_type = 'merge_request' \
AND er.reference_type = 'closes' \
ORDER BY mr.iid",
)?;
stmt.query_map([entity_id], |row| {
Ok(ClosingMrInfo {
iid: row.get(0)?,
title: row.get(1)?,
state: row.get(2)?,
web_url: row.get(3)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?
} else {
vec![]
};
// Outgoing references (excluding closes, shown above).
// Filter out unresolved refs (NULL target_entity_iid) to avoid rusqlite type errors.
// Excludes self-references (same type + same local ID).
let mut out_stmt = conn.prepare(
"SELECT er.target_entity_type, er.target_entity_iid, er.reference_type, \
COALESCE(i.title, mr.title) as title, \
COALESCE(i.state, mr.state) as state \
FROM entity_references er \
LEFT JOIN issues i ON er.target_entity_type = 'issue' AND i.id = er.target_entity_id \
LEFT JOIN merge_requests mr ON er.target_entity_type = 'merge_request' AND mr.id = er.target_entity_id \
WHERE er.source_entity_type = ?1 AND er.source_entity_id = ?2 \
AND er.reference_type != 'closes' \
AND er.target_entity_iid IS NOT NULL \
AND NOT (er.target_entity_type = ?1 AND er.target_entity_id = ?2) \
ORDER BY er.target_entity_type, er.target_entity_iid",
)?;
let outgoing: Vec<RelatedEntityInfo> = out_stmt
.query_map(rusqlite::params![ref_type, entity_id], |row| {
Ok(RelatedEntityInfo {
entity_type: row.get(0)?,
iid: row.get(1)?,
reference_type: row.get(2)?,
title: row.get(3)?,
state: row.get(4)?,
direction: "outgoing".to_string(),
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
// Incoming references (excluding closes).
// COALESCE(i.iid, mr.iid) can be NULL if the source entity was deleted; filter those out.
// Excludes self-references (same type + same local ID).
let mut in_stmt = conn.prepare(
"SELECT er.source_entity_type, COALESCE(i.iid, mr.iid) as iid, er.reference_type, \
COALESCE(i.title, mr.title) as title, \
COALESCE(i.state, mr.state) as state \
FROM entity_references er \
LEFT JOIN issues i ON er.source_entity_type = 'issue' AND i.id = er.source_entity_id \
LEFT JOIN merge_requests mr ON er.source_entity_type = 'merge_request' AND mr.id = er.source_entity_id \
WHERE er.target_entity_type = ?1 AND er.target_entity_id = ?2 \
AND er.reference_type != 'closes' \
AND COALESCE(i.iid, mr.iid) IS NOT NULL \
AND NOT (er.source_entity_type = ?1 AND er.source_entity_id = ?2) \
ORDER BY er.source_entity_type, COALESCE(i.iid, mr.iid)",
)?;
let incoming: Vec<RelatedEntityInfo> = in_stmt
.query_map(rusqlite::params![ref_type, entity_id], |row| {
Ok(RelatedEntityInfo {
entity_type: row.get(0)?,
iid: row.get(1)?,
reference_type: row.get(2)?,
title: row.get(3)?,
state: row.get(4)?,
direction: "incoming".to_string(),
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let mut related_issues = outgoing;
related_issues.extend(incoming);
Ok(RelatedEntities {
closing_mrs,
related_issues,
})
}
// ---------------------------------------------------------------------------
// Timeline excerpt (Task 4)
// ---------------------------------------------------------------------------
/// Maximum events in the timeline excerpt.
const MAX_TIMELINE_EVENTS: usize = 20;
/// Build a timeline excerpt by calling `seed_timeline_direct` + `collect_events`.
/// Returns `None` on pipeline errors (timeline is supplementary, not critical).
fn build_timeline_excerpt_from_pipeline(
conn: &Connection,
entity: &EntitySummary,
params: &ExplainParams,
) -> Option<TimelineExcerpt> {
let timeline_entity_type = match entity.entity_type.as_str() {
"issue" => "issue",
"merge_request" => "merge_request",
_ => {
return Some(TimelineExcerpt {
events: vec![],
total_events: 0,
truncated: false,
});
}
};
let project_id = params
.project
.as_deref()
.and_then(|p| resolve_project(conn, p).ok());
let seed_result = match seed_timeline_direct(conn, timeline_entity_type, params.iid, project_id)
{
Ok(result) => result,
Err(e) => {
tracing::warn!("explain: timeline seed failed: {e}");
return Some(TimelineExcerpt {
events: vec![],
total_events: 0,
truncated: false,
});
}
};
// Request a generous limit from the pipeline — we'll take the tail (most recent)
let pipeline_limit = 500;
let (events, _total) = match collect_events(
conn,
&seed_result.seed_entities,
&[],
&seed_result.evidence_notes,
&seed_result.matched_discussions,
params.since,
pipeline_limit,
) {
Ok(result) => result,
Err(e) => {
tracing::warn!("explain: timeline collect failed: {e}");
return Some(TimelineExcerpt {
events: vec![],
total_events: 0,
truncated: false,
});
}
};
let total_events = events.len();
let truncated = total_events > MAX_TIMELINE_EVENTS;
// Keep the MOST RECENT events — events are sorted ASC by collect_events,
// so we skip from the front to keep the tail
let start = total_events.saturating_sub(MAX_TIMELINE_EVENTS);
let summaries = events[start..]
.iter()
.map(|e| TimelineEventSummary {
timestamp: ms_to_iso(e.timestamp),
event_type: timeline_event_type_label(&e.event_type),
actor: e.actor.clone(),
summary: e.summary.clone(),
})
.collect();
Some(TimelineExcerpt {
events: summaries,
total_events,
truncated,
})
}
fn timeline_event_type_label(event_type: &crate::timeline::TimelineEventType) -> String {
use crate::timeline::TimelineEventType;
match event_type {
TimelineEventType::Created => "created".to_string(),
TimelineEventType::StateChanged { state } => format!("state_changed:{state}"),
TimelineEventType::LabelAdded { label } => format!("label_added:{label}"),
TimelineEventType::LabelRemoved { label } => format!("label_removed:{label}"),
TimelineEventType::MilestoneSet { milestone } => format!("milestone_set:{milestone}"),
TimelineEventType::MilestoneRemoved { milestone } => {
format!("milestone_removed:{milestone}")
}
TimelineEventType::Merged => "merged".to_string(),
TimelineEventType::NoteEvidence { .. } => "note_evidence".to_string(),
TimelineEventType::DiscussionThread { .. } => "discussion_thread".to_string(),
TimelineEventType::CrossReferenced { .. } => "cross_referenced".to_string(),
}
}
// ---------------------------------------------------------------------------
// Handler (called from main.rs)
// ---------------------------------------------------------------------------
#[allow(clippy::too_many_arguments)]
pub fn handle_explain(
config_override: Option<&str>,
entity_type: &str,
iid: i64,
project: Option<&str>,
sections: Option<Vec<String>>,
no_timeline: bool,
max_decisions: usize,
since: Option<&str>,
robot_mode: bool,
) -> std::result::Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
// Normalize singular forms
let entity_type = match entity_type {
"issue" => "issues",
"mr" => "mrs",
other => other,
};
// Validate sections
const VALID_SECTIONS: &[&str] = &[
"entity",
"description",
"key_decisions",
"activity",
"open_threads",
"related",
"timeline",
];
if let Some(ref secs) = sections {
for s in secs {
if !VALID_SECTIONS.contains(&s.as_str()) {
return Err(Box::new(LoreError::Other(format!(
"Invalid section '{s}'. Valid: {}",
VALID_SECTIONS.join(", ")
))));
}
}
}
// Parse --since
let since_ms = since.and_then(crate::core::time::parse_since);
let config = crate::Config::load(config_override)?;
let db_path = crate::core::paths::get_db_path(config.storage.db_path.as_deref());
let conn = crate::core::db::create_connection(&db_path)?;
let effective_project = config.effective_project(project);
let params = ExplainParams {
entity_type: entity_type.to_string(),
iid,
project: effective_project.map(String::from),
sections,
no_timeline,
max_decisions,
since: since_ms,
};
let result = run_explain(&conn, &params)?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_explain_json(&result, elapsed_ms)?;
} else {
print_explain(&result);
}
Ok(())
}
// ---------------------------------------------------------------------------
// Output rendering (Task 5 fills these in fully)
// ---------------------------------------------------------------------------
pub fn print_explain_json(result: &ExplainResult, elapsed_ms: u64) -> Result<()> {
let response = serde_json::json!({
"ok": true,
"data": result,
"meta": { "elapsed_ms": elapsed_ms }
});
println!(
"{}",
serde_json::to_string(&response)
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
);
Ok(())
}
pub fn print_explain(result: &ExplainResult) {
use crate::cli::render::{self, Icons, Theme};
let to_relative = |iso: &str| -> String {
iso_to_ms(iso)
.map(render::format_relative_time)
.unwrap_or_else(|| iso.to_string())
};
let to_date = |iso: &str| -> String {
iso_to_ms(iso)
.map(render::format_date)
.unwrap_or_else(|| iso.to_string())
};
// Entity header
let (type_label, ref_style, ref_str) = match result.entity.entity_type.as_str() {
"issue" => (
"Issue",
Theme::issue_ref(),
format!("#{}", result.entity.iid),
),
"merge_request" => ("MR", Theme::mr_ref(), format!("!{}", result.entity.iid)),
_ => (
result.entity.entity_type.as_str(),
Theme::info(),
format!("#{}", result.entity.iid),
),
};
let state_style = match result.entity.state.as_str() {
"opened" => Theme::state_opened(),
"closed" => Theme::state_closed(),
"merged" => Theme::state_merged(),
_ => Theme::dim(),
};
println!(
"{} {} {}{}",
Icons::info(),
Theme::bold().render(type_label),
ref_style.render(&ref_str),
Theme::bold().render(&result.entity.title)
);
println!(
" {} {} {} {}",
Theme::muted().render(&result.entity.project_path),
state_style.render(&result.entity.state),
Theme::username().render(&format!("@{}", result.entity.author)),
Theme::dim().render(&to_relative(&result.entity.created_at)),
);
if !result.entity.assignees.is_empty() {
let styled: Vec<String> = result
.entity
.assignees
.iter()
.map(|a| Theme::username().render(&format!("@{a}")))
.collect();
println!(" Assignees: {}", styled.join(", "));
}
if !result.entity.labels.is_empty() {
println!(
" Labels: {}",
Theme::dim().render(&result.entity.labels.join(", "))
);
}
if let Some(ref url) = result.entity.url {
println!(" {}", Theme::dim().render(url));
}
// Description
if let Some(ref desc) = result.description_excerpt {
println!("{}", render::section_divider("Description"));
for line in desc.lines() {
println!(" {line}");
}
}
// Key decisions
if let Some(ref decisions) = result.key_decisions
&& !decisions.is_empty()
{
println!(
"{}",
render::section_divider(&format!("Key Decisions ({})", decisions.len()))
);
for d in decisions {
println!(
" {} {}{}",
Theme::muted().render(&to_date(&d.timestamp)),
Theme::username().render(&format!("@{}", d.actor)),
d.action,
);
for line in d.context_note.lines() {
println!(" {line}");
}
}
}
// Activity
if let Some(ref act) = result.activity {
println!("{}", render::section_divider("Activity"));
println!(
" {} state changes, {} label changes, {} notes",
act.state_changes, act.label_changes, act.notes
);
if let Some(ref first) = act.first_event {
println!(
" First event: {}",
Theme::dim().render(&to_relative(first))
);
}
if let Some(ref last) = act.last_event {
println!(
" Last event: {}",
Theme::dim().render(&to_relative(last))
);
}
}
// Open threads
if let Some(ref threads) = result.open_threads
&& !threads.is_empty()
{
println!(
"{}",
render::section_divider(&format!("Open Threads ({})", threads.len()))
);
for t in threads {
println!(
" {} by {} ({} notes, last: {})",
Theme::dim().render(&t.discussion_id),
Theme::username().render(&format!(
"@{}",
t.started_by.as_deref().unwrap_or("unknown")
)),
t.note_count,
Theme::dim().render(&to_relative(&t.last_note_at))
);
if let Some(ref excerpt) = t.first_note_excerpt {
let preview = render::truncate(excerpt, render::flex_width(8, 30));
// Show first line only in human output
if let Some(line) = preview.lines().next() {
println!(" {}", Theme::muted().render(line));
}
}
}
}
// Related
if let Some(ref related) = result.related
&& (!related.closing_mrs.is_empty() || !related.related_issues.is_empty())
{
let total = related.closing_mrs.len() + related.related_issues.len();
println!("{}", render::section_divider(&format!("Related ({total})")));
for mr in &related.closing_mrs {
let mr_state = match mr.state.as_str() {
"merged" => Theme::state_merged(),
"closed" => Theme::state_closed(),
"opened" => Theme::state_opened(),
_ => Theme::dim(),
};
println!(
" {} {}{} {}",
Icons::success(),
Theme::mr_ref().render(&format!("!{}", mr.iid)),
render::truncate(&mr.title, render::flex_width(25, 20)),
mr_state.render(&format!("[{}]", mr.state))
);
}
for ri in &related.related_issues {
let state_str = ri
.state
.as_deref()
.map_or(String::new(), |s| format!(" [{s}]"));
let arrow = if ri.direction == "incoming" {
"<-"
} else {
"->"
};
let (ref_style, ref_prefix) = match ri.entity_type.as_str() {
"issue" => (Theme::issue_ref(), "#"),
"merge_request" => (Theme::mr_ref(), "!"),
_ => (Theme::info(), "#"),
};
println!(
" {arrow} {} {}{state_str} ({})",
ref_style.render(&format!("{ref_prefix}{}", ri.iid)),
render::truncate(
ri.title.as_deref().unwrap_or("(untitled)"),
render::flex_width(30, 20)
),
Theme::dim().render(&ri.reference_type)
);
}
}
// Timeline excerpt
if let Some(ref excerpt) = result.timeline_excerpt
&& !excerpt.events.is_empty()
{
let title = if excerpt.truncated {
format!(
"Timeline (showing {} of {})",
excerpt.events.len(),
excerpt.total_events
)
} else {
format!("Timeline ({})", excerpt.total_events)
};
println!("{}", render::section_divider(&title));
for e in &excerpt.events {
let actor_str = e
.actor
.as_deref()
.map(|a| Theme::username().render(&format!("@{a}")))
.unwrap_or_default();
println!(
" {} {} {} {}",
Theme::muted().render(&to_date(&e.timestamp)),
e.event_type,
actor_str,
e.summary
);
}
}
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
fn setup_explain_db() -> (Connection, i64) {
let conn = crate::core::db::create_connection(std::path::Path::new(":memory:")).unwrap();
crate::core::db::run_migrations(&conn).unwrap();
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
VALUES (100, 'test/project', 'https://gitlab.example.com/test/project')",
[],
)
.unwrap();
let project_id = conn.last_insert_rowid();
(conn, project_id)
}
fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64, desc: Option<&str>) -> i64 {
conn.execute(
"INSERT INTO issues (gitlab_id, iid, project_id, title, state, author_username, \
created_at, updated_at, last_seen_at, description) \
VALUES (?1, ?2, ?3, 'Test Issue', 'opened', 'testuser', \
1704067200000, 1704153600000, 1704153600000, ?4)",
rusqlite::params![iid * 10, iid, project_id, desc],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
conn.execute(
"INSERT INTO merge_requests (gitlab_id, iid, project_id, title, state, draft, \
author_username, source_branch, target_branch, created_at, updated_at, \
merged_at, last_seen_at) \
VALUES (?1, ?2, ?3, 'Test MR', 'merged', 0, 'testuser', 'feat', 'main', \
1704067200000, 1704153600000, 1704240000000, 1704153600000)",
rusqlite::params![iid * 10, iid, project_id],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_test_discussion(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
gitlab_discussion_id: &str,
) -> i64 {
conn.execute(
"INSERT INTO discussions (gitlab_discussion_id, project_id, noteable_type, issue_id, \
merge_request_id, resolvable, resolved, first_note_at, last_note_at, last_seen_at) \
VALUES (?1, ?2, ?3, ?4, ?5, 0, 0, 1704067200000, 1704153600000, 1704153600000)",
rusqlite::params![
gitlab_discussion_id,
project_id,
if issue_id.is_some() {
"Issue"
} else {
"MergeRequest"
},
issue_id,
mr_id,
],
)
.unwrap();
conn.last_insert_rowid()
}
fn insert_test_note(
conn: &Connection,
project_id: i64,
discussion_id: i64,
gitlab_id: i64,
is_system: bool,
) {
conn.execute(
"INSERT INTO notes (gitlab_id, discussion_id, project_id, body, author_username, \
created_at, updated_at, last_seen_at, is_system) \
VALUES (?1, ?2, ?3, 'Test note body', 'testuser', \
1704067200000, 1704067200000, 1704067200000, ?4)",
rusqlite::params![gitlab_id, discussion_id, project_id, is_system],
)
.unwrap();
}
fn insert_test_state_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
state: &str,
actor: &str,
created_at: i64,
) {
conn.execute(
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, \
merge_request_id, state, actor_username, created_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
created_at, project_id, issue_id, mr_id, state, actor, created_at,
],
)
.unwrap();
}
#[test]
fn test_explain_issue_basic() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 42, Some("Issue description text"));
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-001");
insert_test_note(&conn, project_id, disc_id, 1001, false);
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"testuser",
1704100000000,
);
let params = ExplainParams {
entity_type: "issues".to_string(),
iid: 42,
project: None,
sections: None,
no_timeline: true,
max_decisions: 10,
since: None,
};
let result = run_explain(&conn, &params).unwrap();
assert_eq!(result.entity.entity_type, "issue");
assert_eq!(result.entity.iid, 42);
assert_eq!(result.entity.title, "Test Issue");
assert_eq!(result.entity.state, "opened");
assert_eq!(result.entity.author, "testuser");
// All sections present (as Some)
assert!(result.description_excerpt.is_some());
assert!(result.key_decisions.is_some());
assert!(result.activity.is_some());
assert!(result.open_threads.is_some());
assert!(result.related.is_some());
// timeline is None when no_timeline=true (tested separately)
}
#[test]
fn test_explain_mr() {
let (conn, project_id) = setup_explain_db();
insert_test_mr(&conn, project_id, 99);
let params = ExplainParams {
entity_type: "mrs".to_string(),
iid: 99,
project: None,
sections: None,
no_timeline: true,
max_decisions: 10,
since: None,
};
let result = run_explain(&conn, &params).unwrap();
assert_eq!(result.entity.entity_type, "merge_request");
assert_eq!(result.entity.iid, 99);
assert_eq!(result.entity.title, "Test MR");
assert_eq!(result.entity.state, "merged");
}
#[test]
fn test_explain_singular_entity_type() {
let (conn, project_id) = setup_explain_db();
insert_test_issue(&conn, project_id, 42, Some("Description"));
// Use "issues" (the normalized form) since run_explain expects already-normalized types.
// The normalization happens in handle_explain, which we test via the handler logic.
// Here we verify the skeleton works with the normalized form.
let params = ExplainParams {
entity_type: "issues".to_string(),
iid: 42,
project: None,
sections: None,
no_timeline: true,
max_decisions: 10,
since: None,
};
let result = run_explain(&conn, &params).unwrap();
assert_eq!(result.entity.entity_type, "issue");
assert_eq!(result.entity.iid, 42);
}
#[test]
fn test_explain_description_excerpt() {
let (conn, project_id) = setup_explain_db();
insert_test_issue(&conn, project_id, 43, None);
let params = ExplainParams {
entity_type: "issues".to_string(),
iid: 43,
project: None,
sections: None,
no_timeline: true,
max_decisions: 10,
since: None,
};
let result = run_explain(&conn, &params).unwrap();
assert_eq!(
result.description_excerpt.as_deref(),
Some("(no description)")
);
}
#[test]
fn test_explain_section_filtering() {
let (conn, project_id) = setup_explain_db();
insert_test_issue(&conn, project_id, 44, Some("Desc"));
let params = ExplainParams {
entity_type: "issues".to_string(),
iid: 44,
project: None,
sections: Some(vec!["key_decisions".to_string(), "activity".to_string()]),
no_timeline: true,
max_decisions: 10,
since: None,
};
let result = run_explain(&conn, &params).unwrap();
// Entity always present
assert_eq!(result.entity.iid, 44);
// Selected sections present
assert!(result.key_decisions.is_some());
assert!(result.activity.is_some());
// Unselected sections absent
assert!(result.description_excerpt.is_none());
assert!(result.open_threads.is_none());
assert!(result.related.is_none());
assert!(result.timeline_excerpt.is_none());
}
#[test]
fn test_truncate_description() {
assert_eq!(truncate_description(None), "(no description)");
assert_eq!(truncate_description(Some("")), "(no description)");
assert_eq!(truncate_description(Some("short")), "short");
let long = "a".repeat(600);
let result = truncate_description(Some(&long));
assert_eq!(result, long); // no truncation — full description preserved
}
// -----------------------------------------------------------------------
// Test helpers for key-decisions heuristic (Task 2)
// -----------------------------------------------------------------------
#[allow(clippy::too_many_arguments)]
fn insert_test_label_event(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
action: &str,
label_name: &str,
actor: &str,
created_at: i64,
) {
conn.execute(
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, \
merge_request_id, action, label_name, actor_username, created_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)",
rusqlite::params![
created_at, project_id, issue_id, mr_id, action, label_name, actor, created_at
],
)
.unwrap();
}
#[allow(clippy::too_many_arguments)]
fn insert_test_note_with(
conn: &Connection,
project_id: i64,
discussion_id: i64,
gitlab_id: i64,
body: &str,
author: &str,
created_at: i64,
is_system: bool,
) {
conn.execute(
"INSERT INTO notes (gitlab_id, discussion_id, project_id, body, author_username, \
created_at, updated_at, last_seen_at, is_system) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?6, ?6, ?7)",
rusqlite::params![
gitlab_id,
discussion_id,
project_id,
body,
author,
created_at,
is_system
],
)
.unwrap();
}
// -----------------------------------------------------------------------
// Key-decisions heuristic tests (Task 2)
// -----------------------------------------------------------------------
#[test]
fn test_explain_key_decision_heuristic() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 50, Some("desc"));
// State event at T
let t = 1_704_100_000_000_i64;
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"alice",
t,
);
// Note by SAME author at T + 30 minutes (within 60min window)
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd1");
insert_test_note_with(
&conn,
project_id,
disc_id,
5001,
"Closing because the fix landed in MR !200",
"alice",
t + 30 * 60 * 1000,
false,
);
let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap();
assert_eq!(decisions.len(), 1);
assert_eq!(decisions[0].actor, "alice");
assert!(decisions[0].action.contains("state:"));
assert!(decisions[0].action.contains("closed"));
assert!(decisions[0].context_note.contains("Closing because"));
}
#[test]
fn test_explain_key_decision_ignores_unrelated_notes() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 51, Some("desc"));
let t = 1_704_100_000_000_i64;
// State event by alice
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"alice",
t,
);
// Note by BOB at T + 30min — different author, should NOT correlate
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd2");
insert_test_note_with(
&conn,
project_id,
disc_id,
5002,
"Some unrelated comment",
"bob",
t + 30 * 60 * 1000,
false,
);
let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap();
assert_eq!(decisions.len(), 0);
}
#[test]
fn test_explain_key_decision_label_event() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 52, Some("desc"));
let t = 1_704_100_000_000_i64;
// Label add event
insert_test_label_event(
&conn,
project_id,
Some(issue_id),
None,
"add",
"bugfix",
"alice",
t,
);
// Correlated note by same actor
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd3");
insert_test_note_with(
&conn,
project_id,
disc_id,
5003,
"Labeling as bugfix per triage",
"alice",
t + 10 * 60 * 1000,
false,
);
let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 10).unwrap();
assert_eq!(decisions.len(), 1);
assert!(
decisions[0].action.starts_with("label: +"),
"Expected action to start with 'label: +', got: {}",
decisions[0].action
);
}
#[test]
fn test_explain_max_decisions() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 53, Some("desc"));
let base_t = 1_704_100_000_000_i64;
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd4");
// Insert 5 correlated event+note pairs (each 2 hours apart to avoid overlap)
for i in 0..5 {
let event_t = base_t + i64::from(i) * 2 * 60 * 60 * 1000;
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
if i % 2 == 0 { "closed" } else { "reopened" },
"alice",
event_t,
);
insert_test_note_with(
&conn,
project_id,
disc_id,
5010 + i64::from(i),
&format!("Reason for change {i}"),
"alice",
event_t + 10 * 60 * 1000,
false,
);
}
let decisions = extract_key_decisions(&conn, "issues", issue_id, None, 3).unwrap();
assert_eq!(decisions.len(), 3, "Expected max_decisions=3 to cap at 3");
}
#[test]
fn test_explain_since_scopes_events() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 54, Some("desc"));
let now = 1_704_200_000_000_i64;
let sixty_days_ago = now - 60 * 24 * 60 * 60 * 1000;
let ten_days_ago = now - 10 * 24 * 60 * 60 * 1000;
let thirty_days_ago = now - 30 * 24 * 60 * 60 * 1000;
let disc_id = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-kd5");
// Old event at T-60d with correlated note
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"alice",
sixty_days_ago,
);
insert_test_note_with(
&conn,
project_id,
disc_id,
5020,
"Old closure reason",
"alice",
sixty_days_ago + 10 * 60 * 1000,
false,
);
// Recent event at T-10d with correlated note
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"reopened",
"alice",
ten_days_ago,
);
insert_test_note_with(
&conn,
project_id,
disc_id,
5021,
"Recent reopening reason",
"alice",
ten_days_ago + 10 * 60 * 1000,
false,
);
// Call with since = 30 days ago — should only get the recent event
let decisions =
extract_key_decisions(&conn, "issues", issue_id, Some(thirty_days_ago), 10).unwrap();
assert_eq!(decisions.len(), 1, "Expected only the recent event");
assert!(decisions[0].context_note.contains("Recent reopening"));
}
// -----------------------------------------------------------------------
// Activity / open threads / related tests (Task 3)
// -----------------------------------------------------------------------
#[allow(clippy::too_many_arguments)]
fn insert_resolvable_discussion(
conn: &Connection,
project_id: i64,
issue_id: Option<i64>,
mr_id: Option<i64>,
gitlab_discussion_id: &str,
resolvable: bool,
resolved: bool,
first_note_at: i64,
last_note_at: i64,
) -> i64 {
conn.execute(
"INSERT INTO discussions (gitlab_discussion_id, project_id, noteable_type, issue_id, \
merge_request_id, resolvable, resolved, first_note_at, last_note_at, last_seen_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?9)",
rusqlite::params![
gitlab_discussion_id,
project_id,
if issue_id.is_some() {
"Issue"
} else {
"MergeRequest"
},
issue_id,
mr_id,
resolvable,
resolved,
first_note_at,
last_note_at,
],
)
.unwrap();
conn.last_insert_rowid()
}
#[test]
fn test_explain_open_threads() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 60, Some("desc"));
// Unresolved, resolvable discussion
let disc1 = insert_resolvable_discussion(
&conn,
project_id,
Some(issue_id),
None,
"disc-unresolved",
true,
false,
1_000_000,
3_000_000,
);
insert_test_note_with(
&conn, project_id, disc1, 6001, "note1", "alice", 1_000_000, false,
);
insert_test_note_with(
&conn, project_id, disc1, 6002, "note2", "bob", 2_000_000, false,
);
// Resolved discussion (should NOT appear)
let disc2 = insert_resolvable_discussion(
&conn,
project_id,
Some(issue_id),
None,
"disc-resolved",
true,
true,
1_500_000,
2_500_000,
);
insert_test_note_with(
&conn, project_id, disc2, 6003, "note3", "charlie", 1_500_000, false,
);
let threads = fetch_open_threads(&conn, "issues", issue_id).unwrap();
assert_eq!(threads.len(), 1, "Only unresolved thread should appear");
assert_eq!(threads[0].discussion_id, "disc-unresolved");
assert_eq!(threads[0].started_by.as_deref(), Some("alice"));
assert_eq!(threads[0].note_count, 2);
}
#[test]
fn test_explain_activity_summary() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 61, Some("desc"));
// 2 state events
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"alice",
1_000_000,
);
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"reopened",
"alice",
5_000_000,
);
// 1 label event
insert_test_label_event(
&conn,
project_id,
Some(issue_id),
None,
"add",
"bug",
"alice",
1_500_000,
);
// 3 non-system notes
let disc = insert_test_discussion(&conn, project_id, Some(issue_id), None, "disc-act");
for i in 0..3 {
insert_test_note_with(
&conn,
project_id,
disc,
7001 + i,
&format!("comment {i}"),
"commenter",
1_100_000 + i * 100_000,
false,
);
}
let activity =
build_activity_summary(&conn, "issues", issue_id, None, 1_704_067_200_000).unwrap();
assert_eq!(activity.state_changes, 2);
assert_eq!(activity.label_changes, 1);
assert_eq!(activity.notes, 3);
assert!(activity.first_event.is_some());
assert!(activity.last_event.is_some());
}
#[test]
fn test_explain_activity_with_since() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 62, Some("desc"));
// Old event
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"closed",
"alice",
1_000_000,
);
// Recent event
insert_test_state_event(
&conn,
project_id,
Some(issue_id),
None,
"reopened",
"alice",
5_000_000,
);
let activity = build_activity_summary(
&conn,
"issues",
issue_id,
Some(3_000_000),
1_704_067_200_000,
)
.unwrap();
assert_eq!(activity.state_changes, 1, "Only the recent event");
}
#[test]
fn test_explain_related_closing_mrs() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 63, Some("desc"));
let mr_id = insert_test_mr(&conn, project_id, 99);
// Insert a closing reference: MR closes issue
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, \
target_entity_type, target_entity_id, target_entity_iid, reference_type, \
source_method, created_at) \
VALUES (?, 'merge_request', ?, 'issue', ?, 63, 'closes', 'api', 1000000)",
rusqlite::params![project_id, mr_id, issue_id],
)
.unwrap();
let related = fetch_related_entities(&conn, "issues", issue_id).unwrap();
assert_eq!(related.closing_mrs.len(), 1);
assert_eq!(related.closing_mrs[0].iid, 99);
assert_eq!(related.closing_mrs[0].state, "merged");
}
#[test]
fn test_explain_related_skips_unresolved_refs() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 65, Some("desc"));
// Insert an unresolved cross-project reference (NULL target_entity_iid)
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, \
target_entity_type, target_entity_id, target_entity_iid, reference_type, \
source_method, created_at) \
VALUES (?, 'issue', ?, 'issue', NULL, NULL, 'mentioned', 'note_parse', 1000000)",
rusqlite::params![project_id, issue_id],
)
.unwrap();
// Should NOT crash — unresolved refs are filtered out
let related = fetch_related_entities(&conn, "issues", issue_id).unwrap();
assert!(
related.related_issues.is_empty(),
"Unresolved refs (NULL iid) should be excluded"
);
}
#[test]
fn test_explain_empty_activity() {
let (conn, project_id) = setup_explain_db();
let issue_id = insert_test_issue(&conn, project_id, 64, None);
let activity =
build_activity_summary(&conn, "issues", issue_id, None, 1_704_067_200_000).unwrap();
assert_eq!(activity.state_changes, 0);
assert_eq!(activity.label_changes, 0);
assert_eq!(activity.notes, 0);
assert!(activity.first_event.is_none());
assert!(activity.last_event.is_none());
let threads = fetch_open_threads(&conn, "issues", issue_id).unwrap();
assert!(threads.is_empty());
let related = fetch_related_entities(&conn, "issues", issue_id).unwrap();
assert!(related.closing_mrs.is_empty());
assert!(related.related_issues.is_empty());
}
}