Add `lore who` command with 5 query modes answering collaboration questions using existing DB data (280K notes, 210K discussions, 33K DiffNotes): - Expert: who knows about a file/directory (DiffNote path analysis + MR breadth scoring) - Workload: what is a person working on (assigned issues, authored/reviewing MRs, discussions) - Active: what discussions need attention (unresolved resolvable, global/project-scoped) - Overlap: who else is touching these files (dual author+reviewer role tracking) - Reviews: what review patterns does a person have (prefix-based category extraction) Includes migration 017 (5 composite indexes), CLI skeleton with clap conflicts_with validation, robot JSON output with input+resolved_input reproducibility, human terminal output, and 20 unit tests. All quality gates pass. Closes: bd-1q8z, bd-34rr, bd-2rk9, bd-2ldg, bd-zqpf, bd-s3rc, bd-m7k1, bd-b51e, bd-2711, bd-1rdi, bd-3mj2, bd-tfh3, bd-zibc, bd-g0d5 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2677 lines
90 KiB
Rust
2677 lines
90 KiB
Rust
use console::style;
|
|
use rusqlite::Connection;
|
|
use serde::Serialize;
|
|
use std::collections::{HashMap, HashSet};
|
|
|
|
use crate::Config;
|
|
use crate::cli::WhoArgs;
|
|
use crate::cli::robot::RobotMeta;
|
|
use crate::core::db::create_connection;
|
|
use crate::core::error::{LoreError, Result};
|
|
use crate::core::paths::get_db_path;
|
|
use crate::core::project::resolve_project;
|
|
use crate::core::time::{ms_to_iso, now_ms, parse_since};
|
|
|
|
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
|
|
|
/// Determines which query mode to run based on args.
|
|
/// Path variants own their strings because path normalization produces new `String`s.
|
|
/// Username variants borrow from args since no normalization is needed.
|
|
enum WhoMode<'a> {
|
|
/// lore who <file-path> OR lore who --path <path>
|
|
Expert { path: String },
|
|
/// lore who <username>
|
|
Workload { username: &'a str },
|
|
/// lore who <username> --reviews
|
|
Reviews { username: &'a str },
|
|
/// lore who --active
|
|
Active,
|
|
/// lore who --overlap <path>
|
|
Overlap { path: String },
|
|
}
|
|
|
|
fn resolve_mode<'a>(args: &'a WhoArgs) -> Result<WhoMode<'a>> {
|
|
// Explicit --path flag always wins (handles root files like README.md,
|
|
// LICENSE, Makefile -- anything without a / that can't be auto-detected)
|
|
if let Some(p) = &args.path {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(p),
|
|
});
|
|
}
|
|
if args.active {
|
|
return Ok(WhoMode::Active);
|
|
}
|
|
if let Some(path) = &args.overlap {
|
|
return Ok(WhoMode::Overlap {
|
|
path: normalize_repo_path(path),
|
|
});
|
|
}
|
|
if let Some(target) = &args.target {
|
|
let clean = target.strip_prefix('@').unwrap_or(target);
|
|
if args.reviews {
|
|
return Ok(WhoMode::Reviews { username: clean });
|
|
}
|
|
// Disambiguation: if target contains '/', it's a file path.
|
|
// GitLab usernames never contain '/'.
|
|
// Root files (no '/') require --path.
|
|
if target.contains('/') {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(target),
|
|
});
|
|
}
|
|
return Ok(WhoMode::Workload { username: clean });
|
|
}
|
|
Err(LoreError::Other(
|
|
"Provide a username, file path, --active, or --overlap <path>.\n\n\
|
|
Examples:\n \
|
|
lore who src/features/auth/\n \
|
|
lore who @username\n \
|
|
lore who --active\n \
|
|
lore who --overlap src/features/\n \
|
|
lore who --path README.md\n \
|
|
lore who --path Makefile"
|
|
.to_string(),
|
|
))
|
|
}
|
|
|
|
/// Normalize user-supplied repo paths to match stored DiffNote paths.
|
|
/// - trims whitespace
|
|
/// - strips leading "./" and "/" (repo-relative paths)
|
|
/// - converts '\' to '/' when no '/' present (Windows paste)
|
|
/// - collapses repeated "//"
|
|
fn normalize_repo_path(input: &str) -> String {
|
|
let mut s = input.trim().to_string();
|
|
// Windows backslash normalization (only when no forward slashes present)
|
|
if s.contains('\\') && !s.contains('/') {
|
|
s = s.replace('\\', "/");
|
|
}
|
|
// Strip leading ./
|
|
while s.starts_with("./") {
|
|
s = s[2..].to_string();
|
|
}
|
|
// Strip leading /
|
|
s = s.trim_start_matches('/').to_string();
|
|
// Collapse repeated //
|
|
while s.contains("//") {
|
|
s = s.replace("//", "/");
|
|
}
|
|
s
|
|
}
|
|
|
|
// ─── Result Types ────────────────────────────────────────────────────────────
|
|
|
|
/// Top-level run result: carries resolved inputs + the mode-specific result.
|
|
pub struct WhoRun {
|
|
pub resolved_input: WhoResolvedInput,
|
|
pub result: WhoResult,
|
|
}
|
|
|
|
/// Resolved query parameters -- computed once, used for robot JSON reproducibility.
|
|
pub struct WhoResolvedInput {
|
|
pub mode: String,
|
|
pub project_id: Option<i64>,
|
|
pub project_path: Option<String>,
|
|
pub since_ms: Option<i64>,
|
|
pub since_iso: Option<String>,
|
|
/// "default" (mode default applied), "explicit" (user provided --since), "none" (no window)
|
|
pub since_mode: String,
|
|
pub limit: u16,
|
|
}
|
|
|
|
/// Top-level result enum -- one variant per mode.
|
|
pub enum WhoResult {
|
|
Expert(ExpertResult),
|
|
Workload(WorkloadResult),
|
|
Reviews(ReviewsResult),
|
|
Active(ActiveResult),
|
|
Overlap(OverlapResult),
|
|
}
|
|
|
|
// --- Expert ---
|
|
|
|
pub struct ExpertResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub experts: Vec<Expert>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct Expert {
|
|
pub username: String,
|
|
pub score: i64,
|
|
pub review_mr_count: u32,
|
|
pub review_note_count: u32,
|
|
pub author_mr_count: u32,
|
|
pub last_seen_ms: i64,
|
|
}
|
|
|
|
// --- Workload ---
|
|
|
|
pub struct WorkloadResult {
|
|
pub username: String,
|
|
pub assigned_issues: Vec<WorkloadIssue>,
|
|
pub authored_mrs: Vec<WorkloadMr>,
|
|
pub reviewing_mrs: Vec<WorkloadMr>,
|
|
pub unresolved_discussions: Vec<WorkloadDiscussion>,
|
|
pub assigned_issues_truncated: bool,
|
|
pub authored_mrs_truncated: bool,
|
|
pub reviewing_mrs_truncated: bool,
|
|
pub unresolved_discussions_truncated: bool,
|
|
}
|
|
|
|
pub struct WorkloadIssue {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project#iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub project_path: String,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadMr {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project!iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub draft: bool,
|
|
pub project_path: String,
|
|
pub author_username: Option<String>,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadDiscussion {
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
/// Canonical reference: `group/project!iid` or `group/project#iid`
|
|
pub ref_: String,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
}
|
|
|
|
// --- Reviews ---
|
|
|
|
pub struct ReviewsResult {
|
|
pub username: String,
|
|
pub total_diffnotes: u32,
|
|
pub categorized_count: u32,
|
|
pub mrs_reviewed: u32,
|
|
pub categories: Vec<ReviewCategory>,
|
|
}
|
|
|
|
pub struct ReviewCategory {
|
|
pub name: String,
|
|
pub count: u32,
|
|
pub percentage: f64,
|
|
}
|
|
|
|
// --- Active ---
|
|
|
|
pub struct ActiveResult {
|
|
pub discussions: Vec<ActiveDiscussion>,
|
|
/// Count of unresolved discussions *within the time window*, not total across all time.
|
|
pub total_unresolved_in_window: u32,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct ActiveDiscussion {
|
|
pub discussion_id: i64,
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
pub note_count: u32,
|
|
pub participants: Vec<String>,
|
|
pub participants_total: u32,
|
|
pub participants_truncated: bool,
|
|
}
|
|
|
|
// --- Overlap ---
|
|
|
|
pub struct OverlapResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub users: Vec<OverlapUser>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct OverlapUser {
|
|
pub username: String,
|
|
pub author_touch_count: u32,
|
|
pub review_touch_count: u32,
|
|
pub touch_count: u32,
|
|
pub last_seen_at: i64,
|
|
/// Stable MR references like "group/project!123"
|
|
pub mr_refs: Vec<String>,
|
|
pub mr_refs_total: u32,
|
|
pub mr_refs_truncated: bool,
|
|
}
|
|
|
|
// ─── Entry Point ─────────────────────────────────────────────────────────────
|
|
|
|
/// Main entry point. Resolves mode + resolved inputs once, then dispatches.
|
|
pub fn run_who(config: &Config, args: &WhoArgs) -> Result<WhoRun> {
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let conn = create_connection(&db_path)?;
|
|
|
|
let project_id = args
|
|
.project
|
|
.as_deref()
|
|
.map(|p| resolve_project(&conn, p))
|
|
.transpose()?;
|
|
|
|
let project_path = project_id
|
|
.map(|id| lookup_project_path(&conn, id))
|
|
.transpose()?;
|
|
|
|
let mode = resolve_mode(args)?;
|
|
|
|
// since_mode semantics:
|
|
// - expert/reviews/active/overlap: default window applies if args.since is None -> "default"
|
|
// - workload: no default window; args.since None => "none"
|
|
let since_mode_for_defaulted = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"default"
|
|
};
|
|
let since_mode_for_workload = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"none"
|
|
};
|
|
|
|
match mode {
|
|
WhoMode::Expert { path } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_expert(&conn, &path, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "expert".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Expert(result),
|
|
})
|
|
}
|
|
WhoMode::Workload { username } => {
|
|
let since_ms = args
|
|
.since
|
|
.as_deref()
|
|
.map(resolve_since_required)
|
|
.transpose()?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_workload(&conn, username, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "workload".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms,
|
|
since_iso: since_ms.map(ms_to_iso),
|
|
since_mode: since_mode_for_workload.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Workload(result),
|
|
})
|
|
}
|
|
WhoMode::Reviews { username } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
|
let result = query_reviews(&conn, username, project_id, since_ms)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "reviews".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Reviews(result),
|
|
})
|
|
}
|
|
WhoMode::Active => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "7d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_active(&conn, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "active".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Active(result),
|
|
})
|
|
}
|
|
WhoMode::Overlap { path } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "30d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_overlap(&conn, &path, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "overlap".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Overlap(result),
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
|
|
|
/// Look up the project path for a resolved project ID.
|
|
fn lookup_project_path(conn: &Connection, project_id: i64) -> Result<String> {
|
|
conn.query_row(
|
|
"SELECT path_with_namespace FROM projects WHERE id = ?1",
|
|
rusqlite::params![project_id],
|
|
|row| row.get(0),
|
|
)
|
|
.map_err(|e| LoreError::Other(format!("Failed to look up project path: {e}")))
|
|
}
|
|
|
|
/// Parse --since with a default fallback.
|
|
fn resolve_since(input: Option<&str>, default: &str) -> Result<i64> {
|
|
let s = input.unwrap_or(default);
|
|
parse_since(s).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
/// Parse --since without a default (returns error if invalid).
|
|
fn resolve_since_required(input: &str) -> Result<i64> {
|
|
parse_since(input).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{input}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
// ─── Path Query Construction ─────────────────────────────────────────────────
|
|
|
|
/// Describes how to match a user-supplied path in SQL.
|
|
struct PathQuery {
|
|
/// The parameter value to bind.
|
|
value: String,
|
|
/// If true: use `LIKE value ESCAPE '\'`. If false: use `= value`.
|
|
is_prefix: bool,
|
|
}
|
|
|
|
/// Build a path query from a user-supplied path, with project-scoped DB probes.
|
|
///
|
|
/// Rules:
|
|
/// - If the path ends with `/`, it's a directory prefix -> `escaped_path/%` (LIKE)
|
|
/// - If the path is a root path (no `/`) and does NOT end with `/`, treat as exact (=)
|
|
/// - Else if the last path segment contains `.`, heuristic suggests file (=)
|
|
/// - Two-way DB probe (project-scoped): when heuristics are ambiguous,
|
|
/// probe the DB to resolve.
|
|
/// - Otherwise, treat as directory prefix -> `escaped_path/%` (LIKE)
|
|
fn build_path_query(conn: &Connection, path: &str, project_id: Option<i64>) -> Result<PathQuery> {
|
|
let trimmed = path.trim_end_matches('/');
|
|
let last_segment = trimmed.rsplit('/').next().unwrap_or(trimmed);
|
|
let is_root = !trimmed.contains('/');
|
|
let forced_dir = path.ends_with('/');
|
|
// Heuristic is now only a fallback; probes decide first when ambiguous.
|
|
let looks_like_file = !forced_dir && (is_root || last_segment.contains('.'));
|
|
|
|
// Probe 1: exact file exists (project-scoped via nullable binding)
|
|
let exact_exists = conn
|
|
.query_row(
|
|
"SELECT 1 FROM notes
|
|
WHERE note_type = 'DiffNote'
|
|
AND is_system = 0
|
|
AND position_new_path = ?1
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![trimmed, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok();
|
|
|
|
// Probe 2: directory prefix exists (project-scoped)
|
|
let prefix_exists = if !forced_dir && !exact_exists {
|
|
let escaped = escape_like(trimmed);
|
|
let pat = format!("{escaped}/%");
|
|
conn.query_row(
|
|
"SELECT 1 FROM notes
|
|
WHERE note_type = 'DiffNote'
|
|
AND is_system = 0
|
|
AND position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![pat, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok()
|
|
} else {
|
|
false
|
|
};
|
|
|
|
// Forced directory always wins; otherwise: exact > prefix > heuristic
|
|
let is_file = if forced_dir {
|
|
false
|
|
} else if exact_exists {
|
|
true
|
|
} else if prefix_exists {
|
|
false
|
|
} else {
|
|
looks_like_file
|
|
};
|
|
|
|
if is_file {
|
|
// IMPORTANT: do NOT escape for exact match (=). LIKE metacharacters
|
|
// are not special in `=`, so escaping would produce wrong values.
|
|
Ok(PathQuery {
|
|
value: trimmed.to_string(),
|
|
is_prefix: false,
|
|
})
|
|
} else {
|
|
let escaped = escape_like(trimmed);
|
|
Ok(PathQuery {
|
|
value: format!("{escaped}/%"),
|
|
is_prefix: true,
|
|
})
|
|
}
|
|
}
|
|
|
|
/// Escape LIKE metacharacters. All queries using this must include `ESCAPE '\'`.
|
|
fn escape_like(input: &str) -> String {
|
|
input
|
|
.replace('\\', "\\\\")
|
|
.replace('%', "\\%")
|
|
.replace('_', "\\_")
|
|
}
|
|
|
|
// ─── Query: Expert Mode ─────────────────────────────────────────────────────
|
|
|
|
fn query_expert(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<ExpertResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
let sql_prefix = "
|
|
WITH activity AS (
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS mr_cnt,
|
|
COUNT(*) AS note_cnt,
|
|
MAX(n.created_at) AS last_seen_at
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS mr_cnt,
|
|
0 AS note_cnt,
|
|
MAX(n.created_at) AS last_seen_at
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username IS NOT NULL
|
|
AND n.position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
)
|
|
SELECT
|
|
username,
|
|
SUM(CASE WHEN role = 'reviewer' THEN mr_cnt ELSE 0 END) AS review_mr_count,
|
|
SUM(CASE WHEN role = 'reviewer' THEN note_cnt ELSE 0 END) AS review_note_count,
|
|
SUM(CASE WHEN role = 'author' THEN mr_cnt ELSE 0 END) AS author_mr_count,
|
|
MAX(last_seen_at) AS last_seen_at,
|
|
(
|
|
(SUM(CASE WHEN role = 'reviewer' THEN mr_cnt ELSE 0 END) * 20) +
|
|
(SUM(CASE WHEN role = 'author' THEN mr_cnt ELSE 0 END) * 12) +
|
|
(SUM(CASE WHEN role = 'reviewer' THEN note_cnt ELSE 0 END) * 1)
|
|
) AS score
|
|
FROM activity
|
|
GROUP BY username
|
|
ORDER BY score DESC, last_seen_at DESC, username ASC
|
|
LIMIT ?4
|
|
";
|
|
|
|
let sql_exact = "
|
|
WITH activity AS (
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS mr_cnt,
|
|
COUNT(*) AS note_cnt,
|
|
MAX(n.created_at) AS last_seen_at
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path = ?1
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS mr_cnt,
|
|
0 AS note_cnt,
|
|
MAX(n.created_at) AS last_seen_at
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username IS NOT NULL
|
|
AND n.position_new_path = ?1
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
)
|
|
SELECT
|
|
username,
|
|
SUM(CASE WHEN role = 'reviewer' THEN mr_cnt ELSE 0 END) AS review_mr_count,
|
|
SUM(CASE WHEN role = 'reviewer' THEN note_cnt ELSE 0 END) AS review_note_count,
|
|
SUM(CASE WHEN role = 'author' THEN mr_cnt ELSE 0 END) AS author_mr_count,
|
|
MAX(last_seen_at) AS last_seen_at,
|
|
(
|
|
(SUM(CASE WHEN role = 'reviewer' THEN mr_cnt ELSE 0 END) * 20) +
|
|
(SUM(CASE WHEN role = 'author' THEN mr_cnt ELSE 0 END) * 12) +
|
|
(SUM(CASE WHEN role = 'reviewer' THEN note_cnt ELSE 0 END) * 1)
|
|
) AS score
|
|
FROM activity
|
|
GROUP BY username
|
|
ORDER BY score DESC, last_seen_at DESC, username ASC
|
|
LIMIT ?4
|
|
";
|
|
|
|
let mut stmt = if pq.is_prefix {
|
|
conn.prepare_cached(sql_prefix)?
|
|
} else {
|
|
conn.prepare_cached(sql_exact)?
|
|
};
|
|
|
|
let experts: Vec<Expert> = stmt
|
|
.query_map(
|
|
rusqlite::params![pq.value, since_ms, project_id, limit_plus_one],
|
|
|row| {
|
|
Ok(Expert {
|
|
username: row.get(0)?,
|
|
review_mr_count: row.get(1)?,
|
|
review_note_count: row.get(2)?,
|
|
author_mr_count: row.get(3)?,
|
|
last_seen_ms: row.get(4)?,
|
|
score: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
let truncated = experts.len() > limit;
|
|
let experts: Vec<Expert> = experts.into_iter().take(limit).collect();
|
|
|
|
Ok(ExpertResult {
|
|
path_query: path.to_string(),
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
experts,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Workload Mode ───────────────────────────────────────────────────
|
|
|
|
fn query_workload(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: Option<i64>,
|
|
limit: usize,
|
|
) -> Result<WorkloadResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Query 1: Open issues assigned to user
|
|
let issues_sql = "SELECT i.iid,
|
|
(p.path_with_namespace || '#' || i.iid) AS ref,
|
|
i.title, p.path_with_namespace, i.updated_at
|
|
FROM issues i
|
|
JOIN issue_assignees ia ON ia.issue_id = i.id
|
|
JOIN projects p ON i.project_id = p.id
|
|
WHERE ia.username = ?1
|
|
AND i.state = 'opened'
|
|
AND (?2 IS NULL OR i.project_id = ?2)
|
|
AND (?3 IS NULL OR i.updated_at >= ?3)
|
|
ORDER BY i.updated_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(issues_sql)?;
|
|
let assigned_issues: Vec<WorkloadIssue> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadIssue {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
project_path: row.get(3)?,
|
|
updated_at: row.get(4)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 2: Open MRs authored
|
|
let authored_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(authored_sql)?;
|
|
let authored_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: None,
|
|
updated_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 3: Open MRs where user is reviewer
|
|
let reviewing_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace,
|
|
m.author_username, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE r.username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(reviewing_sql)?;
|
|
let reviewing_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: row.get(5)?,
|
|
updated_at: row.get(6)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 4: Unresolved discussions where user participated
|
|
let disc_sql = "SELECT d.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
(p.path_with_namespace ||
|
|
CASE WHEN d.noteable_type = 'MergeRequest' THEN '!' ELSE '#' END ||
|
|
COALESCE(i.iid, m.iid)) AS ref,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
p.path_with_namespace,
|
|
d.last_note_at
|
|
FROM discussions d
|
|
JOIN projects p ON d.project_id = p.id
|
|
LEFT JOIN issues i ON d.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND EXISTS (
|
|
SELECT 1 FROM notes n
|
|
WHERE n.discussion_id = d.id
|
|
AND n.author_username = ?1
|
|
AND n.is_system = 0
|
|
)
|
|
AND (?2 IS NULL OR d.project_id = ?2)
|
|
AND (?3 IS NULL OR d.last_note_at >= ?3)
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(disc_sql)?;
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
let noteable_type: String = row.get(0)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
Ok(WorkloadDiscussion {
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(1)?,
|
|
ref_: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Truncation detection
|
|
let assigned_issues_truncated = assigned_issues.len() > limit;
|
|
let authored_mrs_truncated = authored_mrs.len() > limit;
|
|
let reviewing_mrs_truncated = reviewing_mrs.len() > limit;
|
|
let unresolved_discussions_truncated = unresolved_discussions.len() > limit;
|
|
|
|
let assigned_issues: Vec<WorkloadIssue> = assigned_issues.into_iter().take(limit).collect();
|
|
let authored_mrs: Vec<WorkloadMr> = authored_mrs.into_iter().take(limit).collect();
|
|
let reviewing_mrs: Vec<WorkloadMr> = reviewing_mrs.into_iter().take(limit).collect();
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> =
|
|
unresolved_discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(WorkloadResult {
|
|
username: username.to_string(),
|
|
assigned_issues,
|
|
authored_mrs,
|
|
reviewing_mrs,
|
|
unresolved_discussions,
|
|
assigned_issues_truncated,
|
|
authored_mrs_truncated,
|
|
reviewing_mrs_truncated,
|
|
unresolved_discussions_truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Reviews Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_reviews(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
) -> Result<ReviewsResult> {
|
|
// Count total DiffNotes by this user on MRs they didn't author
|
|
let total_sql = "SELECT COUNT(*) FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username != ?1
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let total_diffnotes: u32 = conn.query_row(
|
|
total_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Count distinct MRs reviewed
|
|
let mrs_sql = "SELECT COUNT(DISTINCT m.id) FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username != ?1
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let mrs_reviewed: u32 = conn.query_row(
|
|
mrs_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Extract prefixed categories: body starts with **prefix**
|
|
let cat_sql = "SELECT
|
|
SUBSTR(ltrim(n.body), 3, INSTR(SUBSTR(ltrim(n.body), 3), '**') - 1) AS raw_prefix,
|
|
COUNT(*) AS cnt
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username != ?1
|
|
AND ltrim(n.body) LIKE '**%**%'
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY raw_prefix
|
|
ORDER BY cnt DESC";
|
|
|
|
let mut stmt = conn.prepare_cached(cat_sql)?;
|
|
let raw_categories: Vec<(String, u32)> = stmt
|
|
.query_map(rusqlite::params![username, since_ms, project_id], |row| {
|
|
Ok((row.get::<_, String>(0)?, row.get(1)?))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Normalize categories: lowercase, strip trailing colon/space,
|
|
// merge nit/nitpick variants, merge (non-blocking) variants
|
|
let mut merged: HashMap<String, u32> = HashMap::new();
|
|
for (raw, count) in &raw_categories {
|
|
let normalized = normalize_review_prefix(raw);
|
|
if !normalized.is_empty() {
|
|
*merged.entry(normalized).or_insert(0) += count;
|
|
}
|
|
}
|
|
|
|
let categorized_count: u32 = merged.values().sum();
|
|
|
|
let mut categories: Vec<ReviewCategory> = merged
|
|
.into_iter()
|
|
.map(|(name, count)| {
|
|
let percentage = if categorized_count > 0 {
|
|
f64::from(count) / f64::from(categorized_count) * 100.0
|
|
} else {
|
|
0.0
|
|
};
|
|
ReviewCategory {
|
|
name,
|
|
count,
|
|
percentage,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
categories.sort_by(|a, b| b.count.cmp(&a.count));
|
|
|
|
Ok(ReviewsResult {
|
|
username: username.to_string(),
|
|
total_diffnotes,
|
|
categorized_count,
|
|
mrs_reviewed,
|
|
categories,
|
|
})
|
|
}
|
|
|
|
/// Normalize a raw review prefix like "Suggestion (non-blocking):" into "suggestion".
|
|
fn normalize_review_prefix(raw: &str) -> String {
|
|
let s = raw.trim().trim_end_matches(':').trim().to_lowercase();
|
|
|
|
// Strip "(non-blocking)" and similar parentheticals
|
|
let s = if let Some(idx) = s.find('(') {
|
|
s[..idx].trim().to_string()
|
|
} else {
|
|
s
|
|
};
|
|
|
|
// Merge nit/nitpick variants
|
|
match s.as_str() {
|
|
"nitpick" | "nit" => "nit".to_string(),
|
|
other => other.to_string(),
|
|
}
|
|
}
|
|
|
|
// ─── Query: Active Mode ─────────────────────────────────────────────────────
|
|
|
|
fn query_active(
|
|
conn: &Connection,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<ActiveResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Total unresolved count -- two static variants
|
|
let total_sql_global = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1";
|
|
let total_sql_scoped = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2";
|
|
|
|
let total_unresolved_in_window: u32 = match project_id {
|
|
None => conn.query_row(total_sql_global, rusqlite::params![since_ms], |row| {
|
|
row.get(0)
|
|
})?,
|
|
Some(pid) => conn.query_row(total_sql_scoped, rusqlite::params![since_ms, pid], |row| {
|
|
row.get(0)
|
|
})?,
|
|
};
|
|
|
|
// Active discussions with context -- two static SQL variants
|
|
let sql_global = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?2
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
let sql_scoped = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?3
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
// Row-mapping closure shared between both variants
|
|
let map_row = |row: &rusqlite::Row| -> rusqlite::Result<ActiveDiscussion> {
|
|
let noteable_type: String = row.get(1)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
let participants_csv: Option<String> = row.get(7)?;
|
|
// Sort participants for deterministic output -- GROUP_CONCAT order is undefined
|
|
let mut participants: Vec<String> = participants_csv
|
|
.as_deref()
|
|
.filter(|s| !s.is_empty())
|
|
.map(|csv| csv.split('\x1F').map(String::from).collect())
|
|
.unwrap_or_default();
|
|
participants.sort();
|
|
|
|
const MAX_PARTICIPANTS: usize = 50;
|
|
let participants_total = participants.len() as u32;
|
|
let participants_truncated = participants.len() > MAX_PARTICIPANTS;
|
|
if participants_truncated {
|
|
participants.truncate(MAX_PARTICIPANTS);
|
|
}
|
|
|
|
Ok(ActiveDiscussion {
|
|
discussion_id: row.get(0)?,
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
note_count: row.get(6)?,
|
|
participants,
|
|
participants_total,
|
|
participants_truncated,
|
|
})
|
|
};
|
|
|
|
// Select variant first, then prepare exactly one statement
|
|
let discussions: Vec<ActiveDiscussion> = match project_id {
|
|
None => {
|
|
let mut stmt = conn.prepare_cached(sql_global)?;
|
|
stmt.query_map(rusqlite::params![since_ms, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
Some(pid) => {
|
|
let mut stmt = conn.prepare_cached(sql_scoped)?;
|
|
stmt.query_map(rusqlite::params![since_ms, pid, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
};
|
|
|
|
let truncated = discussions.len() > limit;
|
|
let discussions: Vec<ActiveDiscussion> = discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(ActiveResult {
|
|
discussions,
|
|
total_unresolved_in_window,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Overlap Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_overlap(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<OverlapResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
|
|
let sql_prefix = "SELECT username, role, touch_count, last_seen_at, mr_refs FROM (
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND n.is_system = 0
|
|
AND m.state IN ('opened', 'merged')
|
|
AND m.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
)";
|
|
|
|
let sql_exact = "SELECT username, role, touch_count, last_seen_at, mr_refs FROM (
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path = ?1
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path = ?1
|
|
AND n.is_system = 0
|
|
AND m.state IN ('opened', 'merged')
|
|
AND m.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
)";
|
|
|
|
let mut stmt = if pq.is_prefix {
|
|
conn.prepare_cached(sql_prefix)?
|
|
} else {
|
|
conn.prepare_cached(sql_exact)?
|
|
};
|
|
let rows: Vec<(String, String, u32, i64, Option<String>)> = stmt
|
|
.query_map(rusqlite::params![pq.value, since_ms, project_id], |row| {
|
|
Ok((
|
|
row.get(0)?,
|
|
row.get(1)?,
|
|
row.get(2)?,
|
|
row.get(3)?,
|
|
row.get(4)?,
|
|
))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Internal accumulator uses HashSet for MR refs from the start
|
|
struct OverlapAcc {
|
|
username: String,
|
|
author_touch_count: u32,
|
|
review_touch_count: u32,
|
|
touch_count: u32,
|
|
last_seen_at: i64,
|
|
mr_refs: HashSet<String>,
|
|
}
|
|
|
|
let mut user_map: HashMap<String, OverlapAcc> = HashMap::new();
|
|
for (username, role, count, last_seen, mr_refs_csv) in &rows {
|
|
let mr_refs: Vec<String> = mr_refs_csv
|
|
.as_deref()
|
|
.map(|csv| csv.split(',').map(|s| s.trim().to_string()).collect())
|
|
.unwrap_or_default();
|
|
|
|
let entry = user_map
|
|
.entry(username.clone())
|
|
.or_insert_with(|| OverlapAcc {
|
|
username: username.clone(),
|
|
author_touch_count: 0,
|
|
review_touch_count: 0,
|
|
touch_count: 0,
|
|
last_seen_at: 0,
|
|
mr_refs: HashSet::new(),
|
|
});
|
|
entry.touch_count += count;
|
|
if role == "author" {
|
|
entry.author_touch_count += count;
|
|
} else {
|
|
entry.review_touch_count += count;
|
|
}
|
|
if *last_seen > entry.last_seen_at {
|
|
entry.last_seen_at = *last_seen;
|
|
}
|
|
for r in mr_refs {
|
|
entry.mr_refs.insert(r);
|
|
}
|
|
}
|
|
|
|
// Convert accumulators to output structs
|
|
const MAX_MR_REFS_PER_USER: usize = 50;
|
|
let mut users: Vec<OverlapUser> = user_map
|
|
.into_values()
|
|
.map(|a| {
|
|
let mut mr_refs: Vec<String> = a.mr_refs.into_iter().collect();
|
|
mr_refs.sort();
|
|
let mr_refs_total = mr_refs.len() as u32;
|
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
|
if mr_refs_truncated {
|
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
|
}
|
|
OverlapUser {
|
|
username: a.username,
|
|
author_touch_count: a.author_touch_count,
|
|
review_touch_count: a.review_touch_count,
|
|
touch_count: a.touch_count,
|
|
last_seen_at: a.last_seen_at,
|
|
mr_refs,
|
|
mr_refs_total,
|
|
mr_refs_truncated,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Stable sort with full tie-breakers for deterministic output
|
|
users.sort_by(|a, b| {
|
|
b.touch_count
|
|
.cmp(&a.touch_count)
|
|
.then_with(|| b.last_seen_at.cmp(&a.last_seen_at))
|
|
.then_with(|| a.username.cmp(&b.username))
|
|
});
|
|
|
|
let truncated = users.len() > limit;
|
|
users.truncate(limit);
|
|
|
|
Ok(OverlapResult {
|
|
path_query: path.to_string(),
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
users,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
/// Format overlap role for display: "A", "R", or "A+R".
|
|
fn format_overlap_role(user: &OverlapUser) -> &'static str {
|
|
match (user.author_touch_count > 0, user.review_touch_count > 0) {
|
|
(true, true) => "A+R",
|
|
(true, false) => "A",
|
|
(false, true) => "R",
|
|
(false, false) => "-",
|
|
}
|
|
}
|
|
|
|
// ─── Human Output ────────────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_human(result: &WhoResult, project_path: Option<&str>) {
|
|
match result {
|
|
WhoResult::Expert(r) => print_expert_human(r, project_path),
|
|
WhoResult::Workload(r) => print_workload_human(r),
|
|
WhoResult::Reviews(r) => print_reviews_human(r),
|
|
WhoResult::Active(r) => print_active_human(r, project_path),
|
|
WhoResult::Overlap(r) => print_overlap_human(r, project_path),
|
|
}
|
|
}
|
|
|
|
/// Print a dim hint when results aggregate across all projects.
|
|
fn print_scope_hint(project_path: Option<&str>) {
|
|
if project_path.is_none() {
|
|
println!(
|
|
" {}",
|
|
style("(aggregated across all projects; use -p to scope)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
fn print_expert_human(r: &ExpertResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Experts for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.experts.is_empty() {
|
|
println!(" {}", style("No experts found for this path.").dim());
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {}",
|
|
style("Username").bold(),
|
|
style("Score").bold(),
|
|
style("Reviewed(MRs)").bold(),
|
|
style("Notes").bold(),
|
|
style("Authored(MRs)").bold(),
|
|
style("Last Seen").bold(),
|
|
);
|
|
|
|
for expert in &r.experts {
|
|
let reviews = if expert.review_mr_count > 0 {
|
|
expert.review_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let notes = if expert.review_note_count > 0 {
|
|
expert.review_note_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let authored = if expert.author_mr_count > 0 {
|
|
expert.author_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {}",
|
|
style(format!("@{}", expert.username)).cyan(),
|
|
expert.score,
|
|
reviews,
|
|
notes,
|
|
authored,
|
|
style(format_relative_time(expert.last_seen_ms)).dim(),
|
|
);
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_workload_human(r: &WorkloadResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Workload Summary", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
|
|
if !r.assigned_issues.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Assigned Issues").bold(),
|
|
r.assigned_issues.len()
|
|
);
|
|
for item in &r.assigned_issues {
|
|
println!(
|
|
" {} {} {}",
|
|
style(&item.ref_).cyan(),
|
|
truncate_str(&item.title, 40),
|
|
style(format_relative_time(item.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.assigned_issues_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.authored_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Authored MRs").bold(),
|
|
r.authored_mrs.len()
|
|
);
|
|
for mr in &r.authored_mrs {
|
|
let draft = if mr.draft { " [draft]" } else { "" };
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 35),
|
|
style(draft).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.authored_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.reviewing_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Reviewing MRs").bold(),
|
|
r.reviewing_mrs.len()
|
|
);
|
|
for mr in &r.reviewing_mrs {
|
|
let author = mr
|
|
.author_username
|
|
.as_deref()
|
|
.map(|a| format!(" by @{a}"))
|
|
.unwrap_or_default();
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 30),
|
|
style(author).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.reviewing_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.unresolved_discussions.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Unresolved Discussions").bold(),
|
|
r.unresolved_discussions.len()
|
|
);
|
|
for disc in &r.unresolved_discussions {
|
|
println!(
|
|
" {} {} {} {}",
|
|
style(&disc.entity_type).dim(),
|
|
style(&disc.ref_).cyan(),
|
|
truncate_str(&disc.entity_title, 35),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
);
|
|
}
|
|
if r.unresolved_discussions_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if r.assigned_issues.is_empty()
|
|
&& r.authored_mrs.is_empty()
|
|
&& r.reviewing_mrs.is_empty()
|
|
&& r.unresolved_discussions.is_empty()
|
|
{
|
|
println!();
|
|
println!(
|
|
" {}",
|
|
style("No open work items found for this user.").dim()
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_reviews_human(r: &ReviewsResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Review Patterns", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!();
|
|
|
|
if r.total_diffnotes == 0 {
|
|
println!(
|
|
" {}",
|
|
style("No review comments found for this user.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {} DiffNotes across {} MRs ({} categorized)",
|
|
style(r.total_diffnotes).bold(),
|
|
style(r.mrs_reviewed).bold(),
|
|
style(r.categorized_count).bold(),
|
|
);
|
|
println!();
|
|
|
|
if !r.categories.is_empty() {
|
|
println!(
|
|
" {:<16} {:>6} {:>6}",
|
|
style("Category").bold(),
|
|
style("Count").bold(),
|
|
style("%").bold(),
|
|
);
|
|
|
|
for cat in &r.categories {
|
|
println!(
|
|
" {:<16} {:>6} {:>5.1}%",
|
|
style(&cat.name).cyan(),
|
|
cat.count,
|
|
cat.percentage,
|
|
);
|
|
}
|
|
}
|
|
|
|
let uncategorized = r.total_diffnotes - r.categorized_count;
|
|
if uncategorized > 0 {
|
|
println!();
|
|
println!(
|
|
" {} {} uncategorized (no **prefix** convention)",
|
|
style("Note:").dim(),
|
|
uncategorized,
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_active_human(r: &ActiveResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!(
|
|
"Active Discussions ({} unresolved in window)",
|
|
r.total_unresolved_in_window
|
|
))
|
|
.bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.discussions.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No active unresolved discussions in this time window.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
for disc in &r.discussions {
|
|
let prefix = if disc.entity_type == "MR" { "!" } else { "#" };
|
|
let participants_str = disc
|
|
.participants
|
|
.iter()
|
|
.map(|p| format!("@{p}"))
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
|
|
println!(
|
|
" {} {} {} {} notes {}",
|
|
style(format!("{prefix}{}", disc.entity_iid)).cyan(),
|
|
truncate_str(&disc.entity_title, 40),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
disc.note_count,
|
|
style(&disc.project_path).dim(),
|
|
);
|
|
if !participants_str.is_empty() {
|
|
println!(" {}", style(participants_str).dim());
|
|
}
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_overlap_human(r: &OverlapResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Overlap for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.users.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No overlapping users found for this path.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}",
|
|
style("Username").bold(),
|
|
style("Role").bold(),
|
|
style("MRs").bold(),
|
|
style("Last Seen").bold(),
|
|
style("MR Refs").bold(),
|
|
);
|
|
|
|
for user in &r.users {
|
|
let mr_str = user
|
|
.mr_refs
|
|
.iter()
|
|
.take(5)
|
|
.cloned()
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
let overflow = if user.mr_refs.len() > 5 {
|
|
format!(" +{}", user.mr_refs.len() - 5)
|
|
} else {
|
|
String::new()
|
|
};
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}{}",
|
|
style(format!("@{}", user.username)).cyan(),
|
|
format_overlap_role(user),
|
|
user.touch_count,
|
|
format_relative_time(user.last_seen_at),
|
|
mr_str,
|
|
overflow,
|
|
);
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
// ─── Robot JSON Output ───────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
|
let (mode, data) = match &run.result {
|
|
WhoResult::Expert(r) => ("expert", expert_to_json(r)),
|
|
WhoResult::Workload(r) => ("workload", workload_to_json(r)),
|
|
WhoResult::Reviews(r) => ("reviews", reviews_to_json(r)),
|
|
WhoResult::Active(r) => ("active", active_to_json(r)),
|
|
WhoResult::Overlap(r) => ("overlap", overlap_to_json(r)),
|
|
};
|
|
|
|
// Raw CLI args -- what the user typed
|
|
let input = serde_json::json!({
|
|
"target": args.target,
|
|
"path": args.path,
|
|
"project": args.project,
|
|
"since": args.since,
|
|
"limit": args.limit,
|
|
});
|
|
|
|
// Resolved/computed values -- what actually ran
|
|
let resolved_input = serde_json::json!({
|
|
"mode": run.resolved_input.mode,
|
|
"project_id": run.resolved_input.project_id,
|
|
"project_path": run.resolved_input.project_path,
|
|
"since_ms": run.resolved_input.since_ms,
|
|
"since_iso": run.resolved_input.since_iso,
|
|
"since_mode": run.resolved_input.since_mode,
|
|
"limit": run.resolved_input.limit,
|
|
});
|
|
|
|
let output = WhoJsonEnvelope {
|
|
ok: true,
|
|
data: WhoJsonData {
|
|
mode: mode.to_string(),
|
|
input,
|
|
resolved_input,
|
|
result: data,
|
|
},
|
|
meta: RobotMeta { elapsed_ms },
|
|
};
|
|
|
|
println!("{}", serde_json::to_string(&output).unwrap());
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonEnvelope {
|
|
ok: bool,
|
|
data: WhoJsonData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonData {
|
|
mode: String,
|
|
input: serde_json::Value,
|
|
resolved_input: serde_json::Value,
|
|
#[serde(flatten)]
|
|
result: serde_json::Value,
|
|
}
|
|
|
|
fn expert_to_json(r: &ExpertResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"truncated": r.truncated,
|
|
"experts": r.experts.iter().map(|e| serde_json::json!({
|
|
"username": e.username,
|
|
"score": e.score,
|
|
"review_mr_count": e.review_mr_count,
|
|
"review_note_count": e.review_note_count,
|
|
"author_mr_count": e.author_mr_count,
|
|
"last_seen_at": ms_to_iso(e.last_seen_ms),
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn workload_to_json(r: &WorkloadResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"assigned_issues": r.assigned_issues.iter().map(|i| serde_json::json!({
|
|
"iid": i.iid,
|
|
"ref": i.ref_,
|
|
"title": i.title,
|
|
"project_path": i.project_path,
|
|
"updated_at": ms_to_iso(i.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"authored_mrs": r.authored_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"reviewing_mrs": r.reviewing_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"author_username": m.author_username,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"unresolved_discussions": r.unresolved_discussions.iter().map(|d| serde_json::json!({
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"ref": d.ref_,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
})).collect::<Vec<_>>(),
|
|
"summary": {
|
|
"assigned_issue_count": r.assigned_issues.len(),
|
|
"authored_mr_count": r.authored_mrs.len(),
|
|
"reviewing_mr_count": r.reviewing_mrs.len(),
|
|
"unresolved_discussion_count": r.unresolved_discussions.len(),
|
|
},
|
|
"truncation": {
|
|
"assigned_issues_truncated": r.assigned_issues_truncated,
|
|
"authored_mrs_truncated": r.authored_mrs_truncated,
|
|
"reviewing_mrs_truncated": r.reviewing_mrs_truncated,
|
|
"unresolved_discussions_truncated": r.unresolved_discussions_truncated,
|
|
}
|
|
})
|
|
}
|
|
|
|
fn reviews_to_json(r: &ReviewsResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"total_diffnotes": r.total_diffnotes,
|
|
"categorized_count": r.categorized_count,
|
|
"mrs_reviewed": r.mrs_reviewed,
|
|
"categories": r.categories.iter().map(|c| serde_json::json!({
|
|
"name": c.name,
|
|
"count": c.count,
|
|
"percentage": (c.percentage * 10.0).round() / 10.0,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn active_to_json(r: &ActiveResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"total_unresolved_in_window": r.total_unresolved_in_window,
|
|
"truncated": r.truncated,
|
|
"discussions": r.discussions.iter().map(|d| serde_json::json!({
|
|
"discussion_id": d.discussion_id,
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
"note_count": d.note_count,
|
|
"participants": d.participants,
|
|
"participants_total": d.participants_total,
|
|
"participants_truncated": d.participants_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn overlap_to_json(r: &OverlapResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"truncated": r.truncated,
|
|
"users": r.users.iter().map(|u| serde_json::json!({
|
|
"username": u.username,
|
|
"role": format_overlap_role(u),
|
|
"author_touch_count": u.author_touch_count,
|
|
"review_touch_count": u.review_touch_count,
|
|
"touch_count": u.touch_count,
|
|
"last_seen_at": ms_to_iso(u.last_seen_at),
|
|
"mr_refs": u.mr_refs,
|
|
"mr_refs_total": u.mr_refs_total,
|
|
"mr_refs_truncated": u.mr_refs_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
// ─── Helper Functions ────────────────────────────────────────────────────────
|
|
|
|
fn format_relative_time(ms_epoch: i64) -> String {
|
|
let now = now_ms();
|
|
let diff = now - ms_epoch;
|
|
|
|
if diff < 0 {
|
|
return "in the future".to_string();
|
|
}
|
|
|
|
match diff {
|
|
d if d < 60_000 => "just now".to_string(),
|
|
d if d < 3_600_000 => format!("{} min ago", d / 60_000),
|
|
d if d < 86_400_000 => {
|
|
let n = d / 3_600_000;
|
|
format!("{n} {} ago", if n == 1 { "hour" } else { "hours" })
|
|
}
|
|
d if d < 604_800_000 => {
|
|
let n = d / 86_400_000;
|
|
format!("{n} {} ago", if n == 1 { "day" } else { "days" })
|
|
}
|
|
d if d < 2_592_000_000 => {
|
|
let n = d / 604_800_000;
|
|
format!("{n} {} ago", if n == 1 { "week" } else { "weeks" })
|
|
}
|
|
_ => {
|
|
let n = diff / 2_592_000_000;
|
|
format!("{n} {} ago", if n == 1 { "month" } else { "months" })
|
|
}
|
|
}
|
|
}
|
|
|
|
fn truncate_str(s: &str, max: usize) -> String {
|
|
if s.chars().count() <= max {
|
|
s.to_owned()
|
|
} else {
|
|
let truncated: String = s.chars().take(max.saturating_sub(3)).collect();
|
|
format!("{truncated}...")
|
|
}
|
|
}
|
|
|
|
// ─── Tests ───────────────────────────────────────────────────────────────────
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::core::db::{create_connection, run_migrations};
|
|
use std::path::Path;
|
|
|
|
fn setup_test_db() -> Connection {
|
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
run_migrations(&conn).unwrap();
|
|
conn
|
|
}
|
|
|
|
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
|
VALUES (?1, ?2, ?3, ?4)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 100,
|
|
path,
|
|
format!("https://git.example.com/{}", path)
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str, state: &str) {
|
|
conn.execute(
|
|
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, author_username, state, last_seen_at, updated_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
project_id,
|
|
iid,
|
|
format!("MR {iid}"),
|
|
author,
|
|
state,
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
|
conn.execute(
|
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, 'opened', ?6, ?7, ?8, ?9)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
project_id,
|
|
iid,
|
|
format!("Issue {iid}"),
|
|
author,
|
|
now_ms(),
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_discussion(
|
|
conn: &Connection,
|
|
id: i64,
|
|
project_id: i64,
|
|
mr_id: Option<i64>,
|
|
issue_id: Option<i64>,
|
|
resolvable: bool,
|
|
resolved: bool,
|
|
) {
|
|
let noteable_type = if mr_id.is_some() {
|
|
"MergeRequest"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
conn.execute(
|
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, issue_id, noteable_type, resolvable, resolved, last_seen_at, last_note_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
|
|
rusqlite::params![
|
|
id,
|
|
format!("disc-{id}"),
|
|
project_id,
|
|
mr_id,
|
|
issue_id,
|
|
noteable_type,
|
|
i32::from(resolvable),
|
|
i32::from(resolved),
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
fn insert_diffnote(
|
|
conn: &Connection,
|
|
id: i64,
|
|
discussion_id: i64,
|
|
project_id: i64,
|
|
author: &str,
|
|
file_path: &str,
|
|
body: &str,
|
|
) {
|
|
conn.execute(
|
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, note_type, is_system, author_username, body, created_at, updated_at, last_seen_at, position_new_path)
|
|
VALUES (?1, ?2, ?3, ?4, 'DiffNote', 0, ?5, ?6, ?7, ?8, ?9, ?10)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
discussion_id,
|
|
project_id,
|
|
author,
|
|
body,
|
|
now_ms(),
|
|
now_ms(),
|
|
now_ms(),
|
|
file_path
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_assignee(conn: &Connection, issue_id: i64, username: &str) {
|
|
conn.execute(
|
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (?1, ?2)",
|
|
rusqlite::params![issue_id, username],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
#[allow(dead_code)]
|
|
fn insert_reviewer(conn: &Connection, mr_id: i64, username: &str) {
|
|
conn.execute(
|
|
"INSERT INTO mr_reviewers (merge_request_id, username) VALUES (?1, ?2)",
|
|
rusqlite::params![mr_id, username],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
#[test]
|
|
fn test_is_file_path_discrimination() {
|
|
// Contains '/' -> file path
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("src/auth/".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
|
|
// No '/' -> username
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Workload { .. }
|
|
));
|
|
|
|
// With @ prefix -> username (stripped)
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("@asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Workload { .. }
|
|
));
|
|
|
|
// --reviews flag -> reviews mode
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: true,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Reviews { .. }
|
|
));
|
|
|
|
// --path flag -> expert mode (handles root files)
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: None,
|
|
path: Some("README.md".to_string()),
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
|
|
// --path flag with dotless file -> expert mode
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: None,
|
|
path: Some("Makefile".to_string()),
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query() {
|
|
let conn = setup_test_db();
|
|
|
|
// Directory with trailing slash -> prefix
|
|
let pq = build_path_query(&conn, "src/auth/", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Directory without trailing slash (no dot in last segment) -> prefix
|
|
let pq = build_path_query(&conn, "src/auth", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// File with extension -> exact
|
|
let pq = build_path_query(&conn, "src/auth/login.rs", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/login.rs");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Root file -> exact
|
|
let pq = build_path_query(&conn, "README.md", None).unwrap();
|
|
assert_eq!(pq.value, "README.md");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Directory with dots in non-leaf segment -> prefix
|
|
let pq = build_path_query(&conn, ".github/workflows/", None).unwrap();
|
|
assert_eq!(pq.value, ".github/workflows/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Versioned directory path -> prefix
|
|
let pq = build_path_query(&conn, "src/v1.2/auth/", None).unwrap();
|
|
assert_eq!(pq.value, "src/v1.2/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Path with LIKE metacharacters -> prefix, escaped
|
|
let pq = build_path_query(&conn, "src/test_files/", None).unwrap();
|
|
assert_eq!(pq.value, "src/test\\_files/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Dotless root file -> exact match (root path without '/')
|
|
let pq = build_path_query(&conn, "Makefile", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
let pq = build_path_query(&conn, "LICENSE", None).unwrap();
|
|
assert_eq!(pq.value, "LICENSE");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Dotless root path with trailing '/' -> directory prefix (explicit override)
|
|
let pq = build_path_query(&conn, "Makefile/", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile/%");
|
|
assert!(pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_escape_like() {
|
|
assert_eq!(escape_like("normal/path"), "normal/path");
|
|
assert_eq!(escape_like("has_underscore"), "has\\_underscore");
|
|
assert_eq!(escape_like("has%percent"), "has\\%percent");
|
|
assert_eq!(escape_like("has\\backslash"), "has\\\\backslash");
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_exact_does_not_escape() {
|
|
let conn = setup_test_db();
|
|
// '_' must NOT be escaped for exact match (=).
|
|
let pq = build_path_query(&conn, "README_with_underscore.md", None).unwrap();
|
|
assert_eq!(pq.value, "README_with_underscore.md");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_path_flag_dotless_root_file_is_exact() {
|
|
let conn = setup_test_db();
|
|
// --path Makefile must produce an exact match, not Makefile/%
|
|
let pq = build_path_query(&conn, "Makefile", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
let pq = build_path_query(&conn, "Dockerfile", None).unwrap();
|
|
assert_eq!(pq.value, "Dockerfile");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"**suggestion**: use const",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"**question**: why?",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
3,
|
|
1,
|
|
1,
|
|
"reviewer_c",
|
|
"src/auth/session.rs",
|
|
"looks good",
|
|
);
|
|
|
|
let result = query_expert(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
assert_eq!(result.experts.len(), 3); // reviewer_b, reviewer_c, author_a
|
|
assert_eq!(result.experts[0].username, "reviewer_b"); // highest score
|
|
}
|
|
|
|
#[test]
|
|
fn test_workload_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_issue(&conn, 1, 1, 42, "someone_else");
|
|
insert_assignee(&conn, 1, "dev_a");
|
|
insert_mr(&conn, 1, 1, 100, "dev_a", "opened");
|
|
|
|
let result = query_workload(&conn, "dev_a", None, None, 20).unwrap();
|
|
assert_eq!(result.assigned_issues.len(), 1);
|
|
assert_eq!(result.authored_mrs.len(), 1);
|
|
}
|
|
|
|
#[test]
|
|
fn test_reviews_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/foo.rs",
|
|
"**suggestion**: refactor",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/bar.rs",
|
|
"**question**: why?",
|
|
);
|
|
insert_diffnote(&conn, 3, 1, 1, "reviewer_b", "src/baz.rs", "looks good");
|
|
|
|
let result = query_reviews(&conn, "reviewer_b", None, 0).unwrap();
|
|
assert_eq!(result.total_diffnotes, 3);
|
|
assert_eq!(result.categorized_count, 2);
|
|
assert_eq!(result.categories.len(), 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_active_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/foo.rs", "needs work");
|
|
// Second note by same participant -- note_count should be 2, participants still ["reviewer_b"]
|
|
insert_diffnote(&conn, 2, 1, 1, "reviewer_b", "src/foo.rs", "follow-up");
|
|
|
|
let result = query_active(&conn, None, 0, 20).unwrap();
|
|
assert_eq!(result.total_unresolved_in_window, 1);
|
|
assert_eq!(result.discussions.len(), 1);
|
|
assert_eq!(result.discussions[0].participants, vec!["reviewer_b"]);
|
|
// This was a regression in iteration 4: note_count was counting participants, not notes
|
|
assert_eq!(result.discussions[0].note_count, 2);
|
|
assert!(result.discussions[0].discussion_id > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_dual_roles() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
// User is both author of one MR and reviewer of another at same path
|
|
insert_mr(&conn, 1, 1, 100, "dual_user", "opened");
|
|
insert_mr(&conn, 2, 1, 200, "other_author", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 1, Some(2), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"someone",
|
|
"src/auth/login.rs",
|
|
"review of dual_user's MR",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
2,
|
|
1,
|
|
"dual_user",
|
|
"src/auth/login.rs",
|
|
"dual_user reviewing other MR",
|
|
);
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let dual = result
|
|
.users
|
|
.iter()
|
|
.find(|u| u.username == "dual_user")
|
|
.unwrap();
|
|
assert!(dual.author_touch_count > 0);
|
|
assert!(dual.review_touch_count > 0);
|
|
assert_eq!(format_overlap_role(dual), "A+R");
|
|
// MR refs should be project-qualified
|
|
assert!(dual.mr_refs.iter().any(|r| r.contains("team/backend!")));
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_multi_project_mr_refs() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_project(&conn, 2, "team/frontend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_mr(&conn, 2, 2, 100, "author_a", "opened"); // Same iid, different project
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 2, Some(2), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_x", "src/auth/login.rs", "review");
|
|
insert_diffnote(&conn, 2, 2, 2, "reviewer_x", "src/auth/login.rs", "review");
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let reviewer = result
|
|
.users
|
|
.iter()
|
|
.find(|u| u.username == "reviewer_x")
|
|
.unwrap();
|
|
// Should have two distinct refs despite same iid
|
|
assert!(reviewer.mr_refs.contains(&"team/backend!100".to_string()));
|
|
assert!(reviewer.mr_refs.contains(&"team/frontend!100".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn test_normalize_review_prefix() {
|
|
assert_eq!(normalize_review_prefix("suggestion"), "suggestion");
|
|
assert_eq!(normalize_review_prefix("Suggestion:"), "suggestion");
|
|
assert_eq!(
|
|
normalize_review_prefix("suggestion (non-blocking):"),
|
|
"suggestion"
|
|
);
|
|
assert_eq!(normalize_review_prefix("Nitpick:"), "nit");
|
|
assert_eq!(normalize_review_prefix("nit (non-blocking):"), "nit");
|
|
assert_eq!(normalize_review_prefix("question"), "question");
|
|
assert_eq!(normalize_review_prefix("TODO:"), "todo");
|
|
}
|
|
|
|
#[test]
|
|
fn test_normalize_repo_path() {
|
|
// Strips leading ./
|
|
assert_eq!(normalize_repo_path("./src/foo/"), "src/foo/");
|
|
// Strips leading /
|
|
assert_eq!(normalize_repo_path("/src/foo/"), "src/foo/");
|
|
// Strips leading ./ recursively
|
|
assert_eq!(normalize_repo_path("././src/foo"), "src/foo");
|
|
// Converts Windows backslashes when no forward slashes
|
|
assert_eq!(normalize_repo_path("src\\foo\\bar.rs"), "src/foo/bar.rs");
|
|
// Does NOT convert backslashes when forward slashes present
|
|
assert_eq!(normalize_repo_path("src/foo\\bar"), "src/foo\\bar");
|
|
// Collapses repeated //
|
|
assert_eq!(normalize_repo_path("src//foo//bar/"), "src/foo/bar/");
|
|
// Trims whitespace
|
|
assert_eq!(normalize_repo_path(" src/foo/ "), "src/foo/");
|
|
// Identity for clean paths
|
|
assert_eq!(normalize_repo_path("src/foo/bar.rs"), "src/foo/bar.rs");
|
|
}
|
|
|
|
#[test]
|
|
fn test_lookup_project_path() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
assert_eq!(lookup_project_path(&conn, 1).unwrap(), "team/backend");
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_dotless_subdir_file_uses_db_probe() {
|
|
// Dotless file in subdirectory (src/Dockerfile) would normally be
|
|
// treated as a directory. The DB probe detects it's actually a file.
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/Dockerfile", "note");
|
|
|
|
let pq = build_path_query(&conn, "src/Dockerfile", None).unwrap();
|
|
assert_eq!(pq.value, "src/Dockerfile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Same path without DB data -> falls through to prefix
|
|
let conn2 = setup_test_db();
|
|
let pq2 = build_path_query(&conn2, "src/Dockerfile", None).unwrap();
|
|
assert_eq!(pq2.value, "src/Dockerfile/%");
|
|
assert!(pq2.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_probe_is_project_scoped() {
|
|
// Path exists as a dotless file in project 1; project 2 should not
|
|
// treat it as an exact file unless it exists there too.
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/a");
|
|
insert_project(&conn, 2, "team/b");
|
|
insert_mr(&conn, 1, 1, 10, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "rev", "infra/Makefile", "note");
|
|
|
|
// Unscoped: finds exact match in project 1 -> exact
|
|
let pq_unscoped = build_path_query(&conn, "infra/Makefile", None).unwrap();
|
|
assert!(!pq_unscoped.is_prefix);
|
|
|
|
// Scoped to project 2: no data -> falls back to prefix
|
|
let pq_scoped = build_path_query(&conn, "infra/Makefile", Some(2)).unwrap();
|
|
assert!(pq_scoped.is_prefix);
|
|
|
|
// Scoped to project 1: finds data -> exact
|
|
let pq_scoped1 = build_path_query(&conn, "infra/Makefile", Some(1)).unwrap();
|
|
assert!(!pq_scoped1.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_excludes_self_review_notes() {
|
|
// MR author commenting on their own diff should not be counted as reviewer
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
// author_a comments on their own MR diff (clarification)
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"author_a",
|
|
"src/auth/login.rs",
|
|
"clarification",
|
|
);
|
|
// reviewer_b also reviews
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"looks good",
|
|
);
|
|
|
|
let result = query_expert(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
// author_a should appear as author only, not as reviewer
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
assert_eq!(author.review_mr_count, 0);
|
|
assert!(author.author_mr_count > 0);
|
|
|
|
// reviewer_b should be a reviewer
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_b")
|
|
.unwrap();
|
|
assert!(reviewer.review_mr_count > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_excludes_self_review_notes() {
|
|
// MR author commenting on their own diff should not inflate reviewer counts
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
// author_a comments on their own MR diff (clarification)
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"author_a",
|
|
"src/auth/login.rs",
|
|
"clarification",
|
|
);
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let u = result.users.iter().find(|u| u.username == "author_a");
|
|
// Should NOT be credited as reviewer touch
|
|
assert!(u.map_or(0, |x| x.review_touch_count) == 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_active_participants_sorted() {
|
|
// Participants should be sorted alphabetically for deterministic output
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "zebra_user", "src/foo.rs", "note 1");
|
|
insert_diffnote(&conn, 2, 1, 1, "alpha_user", "src/foo.rs", "note 2");
|
|
|
|
let result = query_active(&conn, None, 0, 20).unwrap();
|
|
assert_eq!(
|
|
result.discussions[0].participants,
|
|
vec!["alpha_user", "zebra_user"]
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_truncation() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
// Create 3 experts
|
|
for i in 1..=3 {
|
|
insert_mr(&conn, i, 1, 100 + i, &format!("author_{i}"), "opened");
|
|
insert_discussion(&conn, i, 1, Some(i), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
i,
|
|
i,
|
|
1,
|
|
&format!("reviewer_{i}"),
|
|
"src/auth/login.rs",
|
|
"note",
|
|
);
|
|
}
|
|
|
|
// limit = 2, should return truncated = true
|
|
let result = query_expert(&conn, "src/auth/", None, 0, 2).unwrap();
|
|
assert!(result.truncated);
|
|
assert_eq!(result.experts.len(), 2);
|
|
|
|
// limit = 10, should return truncated = false
|
|
let result = query_expert(&conn, "src/auth/", None, 0, 10).unwrap();
|
|
assert!(!result.truncated);
|
|
}
|
|
}
|