Expert mode now surfaces the specific MR references (project/path!iid) that contributed to each expert's score, capped at 50 per user. A new --detail flag adds per-MR breakdowns showing role (Author/Reviewer/both), note count, and last activity timestamp. Scoring weights (author_weight, reviewer_weight, note_bonus) are now configurable via the config file's `scoring` section with validation that rejects negative values. Defaults shift to author_weight=25, reviewer_weight=10, note_bonus=1 — better reflecting that code authorship is a stronger expertise signal than review assignment alone. Path resolution gains suffix matching: typing "login.rs" auto-resolves to "src/auth/login.rs" when unambiguous, with clear disambiguation errors when multiple paths match. Project-scoping (-p) narrows the candidate set. The MAX_MR_REFS_PER_USER constant is promoted to module scope for reuse across expert and overlap modes. Human output shows MR refs inline and detail sub-rows when requested. Robot JSON includes mr_refs, mr_refs_total, mr_refs_truncated, and optional details array. Includes comprehensive tests for suffix resolution, scoring weight configurability, MR ref aggregation across projects, and detail mode. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
3651 lines
126 KiB
Rust
3651 lines
126 KiB
Rust
use console::style;
|
|
use rusqlite::Connection;
|
|
use serde::Serialize;
|
|
use std::collections::{HashMap, HashSet};
|
|
|
|
use crate::Config;
|
|
use crate::cli::WhoArgs;
|
|
use crate::cli::robot::RobotMeta;
|
|
use crate::core::config::ScoringConfig;
|
|
use crate::core::db::create_connection;
|
|
use crate::core::error::{LoreError, Result};
|
|
use crate::core::paths::get_db_path;
|
|
use crate::core::project::resolve_project;
|
|
use crate::core::time::{ms_to_iso, now_ms, parse_since};
|
|
|
|
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
|
|
|
/// Determines which query mode to run based on args.
|
|
/// Path variants own their strings because path normalization produces new `String`s.
|
|
/// Username variants borrow from args since no normalization is needed.
|
|
enum WhoMode<'a> {
|
|
/// lore who <file-path> OR lore who --path <path>
|
|
Expert { path: String },
|
|
/// lore who <username>
|
|
Workload { username: &'a str },
|
|
/// lore who <username> --reviews
|
|
Reviews { username: &'a str },
|
|
/// lore who --active
|
|
Active,
|
|
/// lore who --overlap <path>
|
|
Overlap { path: String },
|
|
}
|
|
|
|
fn resolve_mode<'a>(args: &'a WhoArgs) -> Result<WhoMode<'a>> {
|
|
// Explicit --path flag always wins (handles root files like README.md,
|
|
// LICENSE, Makefile -- anything without a / that can't be auto-detected)
|
|
if let Some(p) = &args.path {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(p),
|
|
});
|
|
}
|
|
if args.active {
|
|
return Ok(WhoMode::Active);
|
|
}
|
|
if let Some(path) = &args.overlap {
|
|
return Ok(WhoMode::Overlap {
|
|
path: normalize_repo_path(path),
|
|
});
|
|
}
|
|
if let Some(target) = &args.target {
|
|
let clean = target.strip_prefix('@').unwrap_or(target);
|
|
if args.reviews {
|
|
return Ok(WhoMode::Reviews { username: clean });
|
|
}
|
|
// Disambiguation: if target contains '/', it's a file path.
|
|
// GitLab usernames never contain '/'.
|
|
// Root files (no '/') require --path.
|
|
if clean.contains('/') {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(clean),
|
|
});
|
|
}
|
|
return Ok(WhoMode::Workload { username: clean });
|
|
}
|
|
Err(LoreError::Other(
|
|
"Provide a username, file path, --active, or --overlap <path>.\n\n\
|
|
Examples:\n \
|
|
lore who src/features/auth/\n \
|
|
lore who @username\n \
|
|
lore who --active\n \
|
|
lore who --overlap src/features/\n \
|
|
lore who --path README.md\n \
|
|
lore who --path Makefile"
|
|
.to_string(),
|
|
))
|
|
}
|
|
|
|
/// Normalize user-supplied repo paths to match stored DiffNote paths.
|
|
/// - trims whitespace
|
|
/// - strips leading "./" and "/" (repo-relative paths)
|
|
/// - converts '\' to '/' when no '/' present (Windows paste)
|
|
/// - collapses repeated "//"
|
|
fn normalize_repo_path(input: &str) -> String {
|
|
let mut s = input.trim().to_string();
|
|
// Windows backslash normalization (only when no forward slashes present)
|
|
if s.contains('\\') && !s.contains('/') {
|
|
s = s.replace('\\', "/");
|
|
}
|
|
// Strip leading ./
|
|
while s.starts_with("./") {
|
|
s = s[2..].to_string();
|
|
}
|
|
// Strip leading /
|
|
s = s.trim_start_matches('/').to_string();
|
|
// Collapse repeated //
|
|
while s.contains("//") {
|
|
s = s.replace("//", "/");
|
|
}
|
|
s
|
|
}
|
|
|
|
// ─── Result Types ────────────────────────────────────────────────────────────
|
|
|
|
/// Top-level run result: carries resolved inputs + the mode-specific result.
|
|
pub struct WhoRun {
|
|
pub resolved_input: WhoResolvedInput,
|
|
pub result: WhoResult,
|
|
}
|
|
|
|
/// Resolved query parameters -- computed once, used for robot JSON reproducibility.
|
|
pub struct WhoResolvedInput {
|
|
pub mode: String,
|
|
pub project_id: Option<i64>,
|
|
pub project_path: Option<String>,
|
|
pub since_ms: Option<i64>,
|
|
pub since_iso: Option<String>,
|
|
/// "default" (mode default applied), "explicit" (user provided --since), "none" (no window)
|
|
pub since_mode: String,
|
|
pub limit: u16,
|
|
}
|
|
|
|
/// Top-level result enum -- one variant per mode.
|
|
pub enum WhoResult {
|
|
Expert(ExpertResult),
|
|
Workload(WorkloadResult),
|
|
Reviews(ReviewsResult),
|
|
Active(ActiveResult),
|
|
Overlap(OverlapResult),
|
|
}
|
|
|
|
// --- Expert ---
|
|
|
|
pub struct ExpertResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub experts: Vec<Expert>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct Expert {
|
|
pub username: String,
|
|
pub score: i64,
|
|
pub review_mr_count: u32,
|
|
pub review_note_count: u32,
|
|
pub author_mr_count: u32,
|
|
pub last_seen_ms: i64,
|
|
/// Stable MR references like "group/project!123"
|
|
pub mr_refs: Vec<String>,
|
|
pub mr_refs_total: u32,
|
|
pub mr_refs_truncated: bool,
|
|
/// Per-MR detail breakdown (only populated when --detail is set)
|
|
pub details: Option<Vec<ExpertMrDetail>>,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct ExpertMrDetail {
|
|
pub mr_ref: String,
|
|
pub title: String,
|
|
/// "R", "A", or "A+R"
|
|
pub role: String,
|
|
pub note_count: u32,
|
|
pub last_activity_ms: i64,
|
|
}
|
|
|
|
// --- Workload ---
|
|
|
|
pub struct WorkloadResult {
|
|
pub username: String,
|
|
pub assigned_issues: Vec<WorkloadIssue>,
|
|
pub authored_mrs: Vec<WorkloadMr>,
|
|
pub reviewing_mrs: Vec<WorkloadMr>,
|
|
pub unresolved_discussions: Vec<WorkloadDiscussion>,
|
|
pub assigned_issues_truncated: bool,
|
|
pub authored_mrs_truncated: bool,
|
|
pub reviewing_mrs_truncated: bool,
|
|
pub unresolved_discussions_truncated: bool,
|
|
}
|
|
|
|
pub struct WorkloadIssue {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project#iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub project_path: String,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadMr {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project!iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub draft: bool,
|
|
pub project_path: String,
|
|
pub author_username: Option<String>,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadDiscussion {
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
/// Canonical reference: `group/project!iid` or `group/project#iid`
|
|
pub ref_: String,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
}
|
|
|
|
// --- Reviews ---
|
|
|
|
pub struct ReviewsResult {
|
|
pub username: String,
|
|
pub total_diffnotes: u32,
|
|
pub categorized_count: u32,
|
|
pub mrs_reviewed: u32,
|
|
pub categories: Vec<ReviewCategory>,
|
|
}
|
|
|
|
pub struct ReviewCategory {
|
|
pub name: String,
|
|
pub count: u32,
|
|
pub percentage: f64,
|
|
}
|
|
|
|
// --- Active ---
|
|
|
|
pub struct ActiveResult {
|
|
pub discussions: Vec<ActiveDiscussion>,
|
|
/// Count of unresolved discussions *within the time window*, not total across all time.
|
|
pub total_unresolved_in_window: u32,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct ActiveDiscussion {
|
|
pub discussion_id: i64,
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
pub note_count: u32,
|
|
pub participants: Vec<String>,
|
|
pub participants_total: u32,
|
|
pub participants_truncated: bool,
|
|
}
|
|
|
|
// --- Overlap ---
|
|
|
|
pub struct OverlapResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub users: Vec<OverlapUser>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct OverlapUser {
|
|
pub username: String,
|
|
pub author_touch_count: u32,
|
|
pub review_touch_count: u32,
|
|
pub touch_count: u32,
|
|
pub last_seen_at: i64,
|
|
/// Stable MR references like "group/project!123"
|
|
pub mr_refs: Vec<String>,
|
|
pub mr_refs_total: u32,
|
|
pub mr_refs_truncated: bool,
|
|
}
|
|
|
|
/// Maximum MR references to retain per user in output (shared across modes).
|
|
const MAX_MR_REFS_PER_USER: usize = 50;
|
|
|
|
// ─── Entry Point ─────────────────────────────────────────────────────────────
|
|
|
|
/// Main entry point. Resolves mode + resolved inputs once, then dispatches.
|
|
pub fn run_who(config: &Config, args: &WhoArgs) -> Result<WhoRun> {
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let conn = create_connection(&db_path)?;
|
|
|
|
let project_id = args
|
|
.project
|
|
.as_deref()
|
|
.map(|p| resolve_project(&conn, p))
|
|
.transpose()?;
|
|
|
|
let project_path = project_id
|
|
.map(|id| lookup_project_path(&conn, id))
|
|
.transpose()?;
|
|
|
|
let mode = resolve_mode(args)?;
|
|
validate_mode_flags(&mode, args)?;
|
|
|
|
// since_mode semantics:
|
|
// - expert/reviews/active/overlap: default window applies if args.since is None -> "default"
|
|
// - workload: no default window; args.since None => "none"
|
|
let since_mode_for_defaulted = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"default"
|
|
};
|
|
let since_mode_for_workload = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"none"
|
|
};
|
|
|
|
match mode {
|
|
WhoMode::Expert { path } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_expert(
|
|
&conn,
|
|
&path,
|
|
project_id,
|
|
since_ms,
|
|
limit,
|
|
&config.scoring,
|
|
args.detail,
|
|
)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "expert".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Expert(result),
|
|
})
|
|
}
|
|
WhoMode::Workload { username } => {
|
|
let since_ms = args
|
|
.since
|
|
.as_deref()
|
|
.map(resolve_since_required)
|
|
.transpose()?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_workload(&conn, username, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "workload".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms,
|
|
since_iso: since_ms.map(ms_to_iso),
|
|
since_mode: since_mode_for_workload.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Workload(result),
|
|
})
|
|
}
|
|
WhoMode::Reviews { username } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
|
let result = query_reviews(&conn, username, project_id, since_ms)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "reviews".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Reviews(result),
|
|
})
|
|
}
|
|
WhoMode::Active => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "7d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_active(&conn, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "active".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Active(result),
|
|
})
|
|
}
|
|
WhoMode::Overlap { path } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "30d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_overlap(&conn, &path, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "overlap".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Overlap(result),
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
fn validate_mode_flags(mode: &WhoMode<'_>, args: &WhoArgs) -> Result<()> {
|
|
if args.detail && !matches!(mode, WhoMode::Expert { .. }) {
|
|
return Err(LoreError::Other(
|
|
"--detail is only supported in expert mode (`lore who --path <path>` or `lore who <path/with/slash>`).".to_string(),
|
|
));
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
|
|
|
/// Look up the project path for a resolved project ID.
|
|
fn lookup_project_path(conn: &Connection, project_id: i64) -> Result<String> {
|
|
conn.query_row(
|
|
"SELECT path_with_namespace FROM projects WHERE id = ?1",
|
|
rusqlite::params![project_id],
|
|
|row| row.get(0),
|
|
)
|
|
.map_err(|e| LoreError::Other(format!("Failed to look up project path: {e}")))
|
|
}
|
|
|
|
/// Parse --since with a default fallback.
|
|
fn resolve_since(input: Option<&str>, default: &str) -> Result<i64> {
|
|
let s = input.unwrap_or(default);
|
|
parse_since(s).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
/// Parse --since without a default (returns error if invalid).
|
|
fn resolve_since_required(input: &str) -> Result<i64> {
|
|
parse_since(input).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{input}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
// ─── Path Query Construction ─────────────────────────────────────────────────
|
|
|
|
/// Describes how to match a user-supplied path in SQL.
|
|
#[derive(Debug)]
|
|
struct PathQuery {
|
|
/// The parameter value to bind.
|
|
value: String,
|
|
/// If true: use `LIKE value ESCAPE '\'`. If false: use `= value`.
|
|
is_prefix: bool,
|
|
}
|
|
|
|
/// Build a path query from a user-supplied path, with project-scoped DB probes.
|
|
///
|
|
/// Rules:
|
|
/// - If the path ends with `/`, it's a directory prefix -> `escaped_path/%` (LIKE)
|
|
/// - If the path is a root path (no `/`) and does NOT end with `/`, treat as exact (=)
|
|
/// - Else if the last path segment contains `.`, heuristic suggests file (=)
|
|
/// - Two-way DB probe (project-scoped): when heuristics are ambiguous,
|
|
/// probe the DB to resolve.
|
|
/// - Otherwise, treat as directory prefix -> `escaped_path/%` (LIKE)
|
|
fn build_path_query(conn: &Connection, path: &str, project_id: Option<i64>) -> Result<PathQuery> {
|
|
let trimmed = path.trim_end_matches('/');
|
|
let last_segment = trimmed.rsplit('/').next().unwrap_or(trimmed);
|
|
let is_root = !trimmed.contains('/');
|
|
let forced_dir = path.ends_with('/');
|
|
// Heuristic is now only a fallback; probes decide first when ambiguous.
|
|
let looks_like_file = !forced_dir && (is_root || last_segment.contains('.'));
|
|
|
|
// Probe 1: exact file exists in DiffNotes OR mr_file_changes (project-scoped)
|
|
let exact_exists = conn
|
|
.query_row(
|
|
"SELECT 1 FROM notes
|
|
WHERE note_type = 'DiffNote'
|
|
AND is_system = 0
|
|
AND position_new_path = ?1
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![trimmed, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok()
|
|
|| conn
|
|
.query_row(
|
|
"SELECT 1 FROM mr_file_changes
|
|
WHERE new_path = ?1
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![trimmed, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok();
|
|
|
|
// Probe 2: directory prefix exists in DiffNotes OR mr_file_changes (project-scoped)
|
|
let prefix_exists = if !forced_dir && !exact_exists {
|
|
let escaped = escape_like(trimmed);
|
|
let pat = format!("{escaped}/%");
|
|
conn.query_row(
|
|
"SELECT 1 FROM notes
|
|
WHERE note_type = 'DiffNote'
|
|
AND is_system = 0
|
|
AND position_new_path LIKE ?1 ESCAPE '\\'
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![pat, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok()
|
|
|| conn
|
|
.query_row(
|
|
"SELECT 1 FROM mr_file_changes
|
|
WHERE new_path LIKE ?1 ESCAPE '\\'
|
|
AND (?2 IS NULL OR project_id = ?2)
|
|
LIMIT 1",
|
|
rusqlite::params![pat, project_id],
|
|
|_| Ok(()),
|
|
)
|
|
.is_ok()
|
|
} else {
|
|
false
|
|
};
|
|
|
|
// Probe 3: suffix match — user typed a bare filename or partial path that
|
|
// doesn't exist as-is. Search for full paths ending with /input (or equal to input).
|
|
// This handles "login.rs" matching "src/auth/login.rs".
|
|
let suffix_resolved = if !forced_dir && !exact_exists && !prefix_exists && looks_like_file {
|
|
suffix_probe(conn, trimmed, project_id)?
|
|
} else {
|
|
SuffixResult::NotAttempted
|
|
};
|
|
|
|
match suffix_resolved {
|
|
SuffixResult::Unique(full_path) => Ok(PathQuery {
|
|
value: full_path,
|
|
is_prefix: false,
|
|
}),
|
|
SuffixResult::Ambiguous(candidates) => {
|
|
let list = candidates
|
|
.iter()
|
|
.map(|p| format!(" {p}"))
|
|
.collect::<Vec<_>>()
|
|
.join("\n");
|
|
Err(LoreError::Ambiguous(format!(
|
|
"'{trimmed}' matches multiple paths. Use the full path or -p to scope:\n{list}"
|
|
)))
|
|
}
|
|
SuffixResult::NotAttempted | SuffixResult::NoMatch => {
|
|
// Original logic: exact > prefix > heuristic
|
|
let is_file = if forced_dir {
|
|
false
|
|
} else if exact_exists {
|
|
true
|
|
} else if prefix_exists {
|
|
false
|
|
} else {
|
|
looks_like_file
|
|
};
|
|
|
|
if is_file {
|
|
Ok(PathQuery {
|
|
value: trimmed.to_string(),
|
|
is_prefix: false,
|
|
})
|
|
} else {
|
|
let escaped = escape_like(trimmed);
|
|
Ok(PathQuery {
|
|
value: format!("{escaped}/%"),
|
|
is_prefix: true,
|
|
})
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Result of a suffix probe against the DB.
|
|
enum SuffixResult {
|
|
/// Suffix probe was not attempted (conditions not met).
|
|
NotAttempted,
|
|
/// No paths matched the suffix.
|
|
NoMatch,
|
|
/// Exactly one distinct path matched — auto-resolve.
|
|
Unique(String),
|
|
/// Multiple distinct paths matched — user must disambiguate.
|
|
Ambiguous(Vec<String>),
|
|
}
|
|
|
|
/// Probe both notes and mr_file_changes for paths ending with the given suffix.
|
|
/// Returns up to 11 distinct candidates (enough to detect ambiguity + show a useful list).
|
|
fn suffix_probe(conn: &Connection, suffix: &str, project_id: Option<i64>) -> Result<SuffixResult> {
|
|
let escaped = escape_like(suffix);
|
|
let suffix_pat = format!("%/{escaped}");
|
|
|
|
let mut stmt = conn.prepare_cached(
|
|
"SELECT DISTINCT full_path FROM (
|
|
SELECT position_new_path AS full_path FROM notes
|
|
WHERE note_type = 'DiffNote'
|
|
AND is_system = 0
|
|
AND (position_new_path LIKE ?1 ESCAPE '\\' OR position_new_path = ?2)
|
|
AND (?3 IS NULL OR project_id = ?3)
|
|
UNION
|
|
SELECT new_path AS full_path FROM mr_file_changes
|
|
WHERE (new_path LIKE ?1 ESCAPE '\\' OR new_path = ?2)
|
|
AND (?3 IS NULL OR project_id = ?3)
|
|
)
|
|
ORDER BY full_path
|
|
LIMIT 11",
|
|
)?;
|
|
|
|
let candidates: Vec<String> = stmt
|
|
.query_map(rusqlite::params![suffix_pat, suffix, project_id], |row| {
|
|
row.get(0)
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
match candidates.len() {
|
|
0 => Ok(SuffixResult::NoMatch),
|
|
1 => Ok(SuffixResult::Unique(candidates.into_iter().next().unwrap())),
|
|
_ => Ok(SuffixResult::Ambiguous(candidates)),
|
|
}
|
|
}
|
|
|
|
/// Escape LIKE metacharacters. All queries using this must include `ESCAPE '\'`.
|
|
fn escape_like(input: &str) -> String {
|
|
input
|
|
.replace('\\', "\\\\")
|
|
.replace('%', "\\%")
|
|
.replace('_', "\\_")
|
|
}
|
|
|
|
// ─── Query: Expert Mode ─────────────────────────────────────────────────────
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
fn query_expert(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
scoring: &ScoringConfig,
|
|
detail: bool,
|
|
) -> Result<ExpertResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Build SQL with 4 signal sources (UNION ALL), deduplicating via COUNT(DISTINCT mr_id):
|
|
// 1. DiffNote reviewer — left inline review comments (not self-review)
|
|
// 2. DiffNote MR author — authored MR that has DiffNotes on this path
|
|
// 3. File-change author — authored MR that touched this path (mr_file_changes)
|
|
// 4. File-change reviewer — assigned reviewer on MR that touched this path
|
|
// Each branch now JOINs projects to produce mr_ref for aggregation.
|
|
let path_op = if pq.is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
let author_w = scoring.author_weight;
|
|
let reviewer_w = scoring.reviewer_weight;
|
|
let note_b = scoring.note_bonus;
|
|
let sql = format!(
|
|
"
|
|
WITH signals AS (
|
|
-- 1. DiffNote reviewer (individual notes for note_cnt)
|
|
SELECT
|
|
n.author_username AS username,
|
|
'diffnote_reviewer' AS signal,
|
|
m.id AS mr_id,
|
|
n.id AS note_id,
|
|
n.created_at AS seen_at,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path {path_op}
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
|
|
UNION ALL
|
|
|
|
-- 2. DiffNote MR author
|
|
SELECT
|
|
m.author_username AS username,
|
|
'diffnote_author' AS signal,
|
|
m.id AS mr_id,
|
|
NULL AS note_id,
|
|
MAX(n.created_at) AS seen_at,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path {path_op}
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- 3. MR author via file changes
|
|
SELECT
|
|
m.author_username AS username,
|
|
'file_author' AS signal,
|
|
m.id AS mr_id,
|
|
NULL AS note_id,
|
|
m.updated_at AS seen_at,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
|
|
UNION ALL
|
|
|
|
-- 4. MR reviewer via file changes + mr_reviewers
|
|
SELECT
|
|
r.username AS username,
|
|
'file_reviewer' AS signal,
|
|
m.id AS mr_id,
|
|
NULL AS note_id,
|
|
m.updated_at AS seen_at,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
WHERE r.username IS NOT NULL
|
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
)
|
|
SELECT
|
|
username,
|
|
COUNT(DISTINCT CASE WHEN signal IN ('diffnote_reviewer', 'file_reviewer')
|
|
THEN mr_id END) AS review_mr_count,
|
|
COUNT(CASE WHEN signal = 'diffnote_reviewer' THEN note_id END) AS review_note_count,
|
|
COUNT(DISTINCT CASE WHEN signal IN ('diffnote_author', 'file_author')
|
|
THEN mr_id END) AS author_mr_count,
|
|
MAX(seen_at) AS last_seen_at,
|
|
(
|
|
(COUNT(DISTINCT CASE WHEN signal IN ('diffnote_reviewer', 'file_reviewer')
|
|
THEN mr_id END) * {reviewer_w}) +
|
|
(COUNT(DISTINCT CASE WHEN signal IN ('diffnote_author', 'file_author')
|
|
THEN mr_id END) * {author_w}) +
|
|
(COUNT(CASE WHEN signal = 'diffnote_reviewer' THEN note_id END) * {note_b})
|
|
) AS score,
|
|
GROUP_CONCAT(DISTINCT mr_ref) AS mr_refs_csv
|
|
FROM signals
|
|
GROUP BY username
|
|
ORDER BY score DESC, last_seen_at DESC, username ASC
|
|
LIMIT ?4
|
|
"
|
|
);
|
|
|
|
let mut stmt = conn.prepare_cached(&sql)?;
|
|
|
|
let experts: Vec<Expert> = stmt
|
|
.query_map(
|
|
rusqlite::params![pq.value, since_ms, project_id, limit_plus_one],
|
|
|row| {
|
|
let mr_refs_csv: Option<String> = row.get(6)?;
|
|
let mut mr_refs: Vec<String> = mr_refs_csv
|
|
.as_deref()
|
|
.map(|csv| csv.split(',').map(|s| s.trim().to_string()).collect())
|
|
.unwrap_or_default();
|
|
mr_refs.sort();
|
|
let mr_refs_total = mr_refs.len() as u32;
|
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
|
if mr_refs_truncated {
|
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
|
}
|
|
Ok(Expert {
|
|
username: row.get(0)?,
|
|
review_mr_count: row.get(1)?,
|
|
review_note_count: row.get(2)?,
|
|
author_mr_count: row.get(3)?,
|
|
last_seen_ms: row.get(4)?,
|
|
score: row.get(5)?,
|
|
mr_refs,
|
|
mr_refs_total,
|
|
mr_refs_truncated,
|
|
details: None,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
let truncated = experts.len() > limit;
|
|
let mut experts: Vec<Expert> = experts.into_iter().take(limit).collect();
|
|
|
|
// Populate per-MR detail when --detail is requested
|
|
if detail && !experts.is_empty() {
|
|
let details_map = query_expert_details(conn, &pq, &experts, since_ms, project_id)?;
|
|
for expert in &mut experts {
|
|
expert.details = details_map.get(&expert.username).cloned();
|
|
}
|
|
}
|
|
|
|
Ok(ExpertResult {
|
|
path_query: if pq.is_prefix {
|
|
// Use raw input (unescaped) for display — pq.value has LIKE escaping.
|
|
path.trim_end_matches('/').to_string()
|
|
} else {
|
|
// For exact matches (including suffix-resolved), show the resolved path.
|
|
pq.value.clone()
|
|
},
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
experts,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
/// Query per-MR detail for a set of experts. Returns a map of username -> Vec<ExpertMrDetail>.
|
|
fn query_expert_details(
|
|
conn: &Connection,
|
|
pq: &PathQuery,
|
|
experts: &[Expert],
|
|
since_ms: i64,
|
|
project_id: Option<i64>,
|
|
) -> Result<HashMap<String, Vec<ExpertMrDetail>>> {
|
|
let path_op = if pq.is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
|
|
// Build IN clause for usernames
|
|
let placeholders: Vec<String> = experts
|
|
.iter()
|
|
.enumerate()
|
|
.map(|(i, _)| format!("?{}", i + 4))
|
|
.collect();
|
|
let in_clause = placeholders.join(",");
|
|
|
|
let sql = format!(
|
|
"
|
|
WITH signals AS (
|
|
-- 1. DiffNote reviewer
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
COUNT(*) AS note_count,
|
|
MAX(n.created_at) AS last_activity
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path {path_op}
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND n.author_username IN ({in_clause})
|
|
GROUP BY n.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- 2. DiffNote MR author
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
MAX(n.created_at) AS last_activity
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged')
|
|
AND n.position_new_path {path_op}
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND m.author_username IN ({in_clause})
|
|
GROUP BY m.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- 3. MR author via file changes
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
m.updated_at AS last_activity
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
AND m.author_username IN ({in_clause})
|
|
|
|
UNION ALL
|
|
|
|
-- 4. MR reviewer via file changes + mr_reviewers
|
|
SELECT
|
|
r.username AS username,
|
|
'reviewer' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
m.updated_at AS last_activity
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
WHERE r.username IS NOT NULL
|
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
AND r.username IN ({in_clause})
|
|
)
|
|
SELECT
|
|
username,
|
|
mr_ref,
|
|
title,
|
|
GROUP_CONCAT(DISTINCT role) AS roles,
|
|
SUM(note_count) AS total_notes,
|
|
MAX(last_activity) AS last_activity
|
|
FROM signals
|
|
GROUP BY username, mr_ref
|
|
ORDER BY username ASC, last_activity DESC
|
|
"
|
|
);
|
|
|
|
// prepare() not prepare_cached(): the IN clause varies by expert count,
|
|
// so the SQL shape changes per invocation and caching wastes memory.
|
|
let mut stmt = conn.prepare(&sql)?;
|
|
|
|
// Build params: ?1=path, ?2=since_ms, ?3=project_id, ?4..=usernames
|
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
|
params.push(Box::new(pq.value.clone()));
|
|
params.push(Box::new(since_ms));
|
|
params.push(Box::new(project_id));
|
|
for expert in experts {
|
|
params.push(Box::new(expert.username.clone()));
|
|
}
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
|
|
|
let rows: Vec<(String, String, String, String, u32, i64)> = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get(0)?,
|
|
row.get(1)?,
|
|
row.get(2)?,
|
|
row.get::<_, String>(3)?,
|
|
row.get(4)?,
|
|
row.get(5)?,
|
|
))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
let mut map: HashMap<String, Vec<ExpertMrDetail>> = HashMap::new();
|
|
for (username, mr_ref, title, roles_csv, note_count, last_activity) in rows {
|
|
let has_author = roles_csv.contains("author");
|
|
let has_reviewer = roles_csv.contains("reviewer");
|
|
let role = match (has_author, has_reviewer) {
|
|
(true, true) => "A+R",
|
|
(true, false) => "A",
|
|
(false, true) => "R",
|
|
_ => "?",
|
|
}
|
|
.to_string();
|
|
map.entry(username).or_default().push(ExpertMrDetail {
|
|
mr_ref,
|
|
title,
|
|
role,
|
|
note_count,
|
|
last_activity_ms: last_activity,
|
|
});
|
|
}
|
|
|
|
Ok(map)
|
|
}
|
|
|
|
// ─── Query: Workload Mode ───────────────────────────────────────────────────
|
|
|
|
fn query_workload(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: Option<i64>,
|
|
limit: usize,
|
|
) -> Result<WorkloadResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Query 1: Open issues assigned to user
|
|
let issues_sql = "SELECT i.iid,
|
|
(p.path_with_namespace || '#' || i.iid) AS ref,
|
|
i.title, p.path_with_namespace, i.updated_at
|
|
FROM issues i
|
|
JOIN issue_assignees ia ON ia.issue_id = i.id
|
|
JOIN projects p ON i.project_id = p.id
|
|
WHERE ia.username = ?1
|
|
AND i.state = 'opened'
|
|
AND (?2 IS NULL OR i.project_id = ?2)
|
|
AND (?3 IS NULL OR i.updated_at >= ?3)
|
|
ORDER BY i.updated_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(issues_sql)?;
|
|
let assigned_issues: Vec<WorkloadIssue> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadIssue {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
project_path: row.get(3)?,
|
|
updated_at: row.get(4)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 2: Open MRs authored
|
|
let authored_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(authored_sql)?;
|
|
let authored_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: None,
|
|
updated_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 3: Open MRs where user is reviewer
|
|
let reviewing_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace,
|
|
m.author_username, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE r.username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(reviewing_sql)?;
|
|
let reviewing_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: row.get(5)?,
|
|
updated_at: row.get(6)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 4: Unresolved discussions where user participated
|
|
let disc_sql = "SELECT d.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
(p.path_with_namespace ||
|
|
CASE WHEN d.noteable_type = 'MergeRequest' THEN '!' ELSE '#' END ||
|
|
COALESCE(i.iid, m.iid)) AS ref,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
p.path_with_namespace,
|
|
d.last_note_at
|
|
FROM discussions d
|
|
JOIN projects p ON d.project_id = p.id
|
|
LEFT JOIN issues i ON d.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND EXISTS (
|
|
SELECT 1 FROM notes n
|
|
WHERE n.discussion_id = d.id
|
|
AND n.author_username = ?1
|
|
AND n.is_system = 0
|
|
)
|
|
AND (?2 IS NULL OR d.project_id = ?2)
|
|
AND (?3 IS NULL OR d.last_note_at >= ?3)
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(disc_sql)?;
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
let noteable_type: String = row.get(0)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
Ok(WorkloadDiscussion {
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(1)?,
|
|
ref_: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Truncation detection
|
|
let assigned_issues_truncated = assigned_issues.len() > limit;
|
|
let authored_mrs_truncated = authored_mrs.len() > limit;
|
|
let reviewing_mrs_truncated = reviewing_mrs.len() > limit;
|
|
let unresolved_discussions_truncated = unresolved_discussions.len() > limit;
|
|
|
|
let assigned_issues: Vec<WorkloadIssue> = assigned_issues.into_iter().take(limit).collect();
|
|
let authored_mrs: Vec<WorkloadMr> = authored_mrs.into_iter().take(limit).collect();
|
|
let reviewing_mrs: Vec<WorkloadMr> = reviewing_mrs.into_iter().take(limit).collect();
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> =
|
|
unresolved_discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(WorkloadResult {
|
|
username: username.to_string(),
|
|
assigned_issues,
|
|
authored_mrs,
|
|
reviewing_mrs,
|
|
unresolved_discussions,
|
|
assigned_issues_truncated,
|
|
authored_mrs_truncated,
|
|
reviewing_mrs_truncated,
|
|
unresolved_discussions_truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Reviews Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_reviews(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
) -> Result<ReviewsResult> {
|
|
// Count total DiffNotes by this user on MRs they didn't author
|
|
let total_sql = "SELECT COUNT(*) FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let total_diffnotes: u32 = conn.query_row(
|
|
total_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Count distinct MRs reviewed
|
|
let mrs_sql = "SELECT COUNT(DISTINCT m.id) FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let mrs_reviewed: u32 = conn.query_row(
|
|
mrs_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Extract prefixed categories: body starts with **prefix**
|
|
let cat_sql = "SELECT
|
|
SUBSTR(ltrim(n.body), 3, INSTR(SUBSTR(ltrim(n.body), 3), '**') - 1) AS raw_prefix,
|
|
COUNT(*) AS cnt
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND ltrim(n.body) LIKE '**%**%'
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY raw_prefix
|
|
ORDER BY cnt DESC";
|
|
|
|
let mut stmt = conn.prepare_cached(cat_sql)?;
|
|
let raw_categories: Vec<(String, u32)> = stmt
|
|
.query_map(rusqlite::params![username, since_ms, project_id], |row| {
|
|
Ok((row.get::<_, String>(0)?, row.get(1)?))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Normalize categories: lowercase, strip trailing colon/space,
|
|
// merge nit/nitpick variants, merge (non-blocking) variants
|
|
let mut merged: HashMap<String, u32> = HashMap::new();
|
|
for (raw, count) in &raw_categories {
|
|
let normalized = normalize_review_prefix(raw);
|
|
if !normalized.is_empty() {
|
|
*merged.entry(normalized).or_insert(0) += count;
|
|
}
|
|
}
|
|
|
|
let categorized_count: u32 = merged.values().sum();
|
|
|
|
let mut categories: Vec<ReviewCategory> = merged
|
|
.into_iter()
|
|
.map(|(name, count)| {
|
|
let percentage = if categorized_count > 0 {
|
|
f64::from(count) / f64::from(categorized_count) * 100.0
|
|
} else {
|
|
0.0
|
|
};
|
|
ReviewCategory {
|
|
name,
|
|
count,
|
|
percentage,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
categories.sort_by(|a, b| b.count.cmp(&a.count));
|
|
|
|
Ok(ReviewsResult {
|
|
username: username.to_string(),
|
|
total_diffnotes,
|
|
categorized_count,
|
|
mrs_reviewed,
|
|
categories,
|
|
})
|
|
}
|
|
|
|
/// Normalize a raw review prefix like "Suggestion (non-blocking):" into "suggestion".
|
|
fn normalize_review_prefix(raw: &str) -> String {
|
|
let s = raw.trim().trim_end_matches(':').trim().to_lowercase();
|
|
|
|
// Strip "(non-blocking)" and similar parentheticals
|
|
let s = if let Some(idx) = s.find('(') {
|
|
s[..idx].trim().to_string()
|
|
} else {
|
|
s
|
|
};
|
|
|
|
// Merge nit/nitpick variants
|
|
match s.as_str() {
|
|
"nitpick" | "nit" => "nit".to_string(),
|
|
other => other.to_string(),
|
|
}
|
|
}
|
|
|
|
// ─── Query: Active Mode ─────────────────────────────────────────────────────
|
|
|
|
fn query_active(
|
|
conn: &Connection,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<ActiveResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Total unresolved count -- two static variants
|
|
let total_sql_global = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1";
|
|
let total_sql_scoped = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2";
|
|
|
|
let total_unresolved_in_window: u32 = match project_id {
|
|
None => conn.query_row(total_sql_global, rusqlite::params![since_ms], |row| {
|
|
row.get(0)
|
|
})?,
|
|
Some(pid) => conn.query_row(total_sql_scoped, rusqlite::params![since_ms, pid], |row| {
|
|
row.get(0)
|
|
})?,
|
|
};
|
|
|
|
// Active discussions with context -- two static SQL variants
|
|
let sql_global = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?2
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
let sql_scoped = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?3
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
// Row-mapping closure shared between both variants
|
|
let map_row = |row: &rusqlite::Row| -> rusqlite::Result<ActiveDiscussion> {
|
|
let noteable_type: String = row.get(1)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
let participants_csv: Option<String> = row.get(7)?;
|
|
// Sort participants for deterministic output -- GROUP_CONCAT order is undefined
|
|
let mut participants: Vec<String> = participants_csv
|
|
.as_deref()
|
|
.filter(|s| !s.is_empty())
|
|
.map(|csv| csv.split('\x1F').map(String::from).collect())
|
|
.unwrap_or_default();
|
|
participants.sort();
|
|
|
|
const MAX_PARTICIPANTS: usize = 50;
|
|
let participants_total = participants.len() as u32;
|
|
let participants_truncated = participants.len() > MAX_PARTICIPANTS;
|
|
if participants_truncated {
|
|
participants.truncate(MAX_PARTICIPANTS);
|
|
}
|
|
|
|
Ok(ActiveDiscussion {
|
|
discussion_id: row.get(0)?,
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
note_count: row.get(6)?,
|
|
participants,
|
|
participants_total,
|
|
participants_truncated,
|
|
})
|
|
};
|
|
|
|
// Select variant first, then prepare exactly one statement
|
|
let discussions: Vec<ActiveDiscussion> = match project_id {
|
|
None => {
|
|
let mut stmt = conn.prepare_cached(sql_global)?;
|
|
stmt.query_map(rusqlite::params![since_ms, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
Some(pid) => {
|
|
let mut stmt = conn.prepare_cached(sql_scoped)?;
|
|
stmt.query_map(rusqlite::params![since_ms, pid, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
};
|
|
|
|
let truncated = discussions.len() > limit;
|
|
let discussions: Vec<ActiveDiscussion> = discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(ActiveResult {
|
|
discussions,
|
|
total_unresolved_in_window,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Overlap Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_overlap(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<OverlapResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
|
|
// Build SQL with 4 signal sources, matching the expert query expansion.
|
|
// Each row produces (username, role, mr_id, mr_ref, seen_at) for Rust-side accumulation.
|
|
let path_op = if pq.is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
let sql = format!(
|
|
"SELECT username, role, touch_count, last_seen_at, mr_refs FROM (
|
|
-- 1. DiffNote reviewer
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path {path_op}
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 2. DiffNote MR author
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.position_new_path {path_op}
|
|
AND n.is_system = 0
|
|
AND m.state IN ('opened', 'merged')
|
|
AND m.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 3. MR author via file changes
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(m.updated_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 4. MR reviewer via file changes + mr_reviewers
|
|
SELECT
|
|
r.username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(m.updated_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
WHERE r.username IS NOT NULL
|
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
|
AND m.state IN ('opened','merged')
|
|
AND fc.new_path {path_op}
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
GROUP BY r.username
|
|
)"
|
|
);
|
|
|
|
let mut stmt = conn.prepare_cached(&sql)?;
|
|
let rows: Vec<(String, String, u32, i64, Option<String>)> = stmt
|
|
.query_map(rusqlite::params![pq.value, since_ms, project_id], |row| {
|
|
Ok((
|
|
row.get(0)?,
|
|
row.get(1)?,
|
|
row.get(2)?,
|
|
row.get(3)?,
|
|
row.get(4)?,
|
|
))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Internal accumulator uses HashSet for MR refs from the start
|
|
struct OverlapAcc {
|
|
username: String,
|
|
author_touch_count: u32,
|
|
review_touch_count: u32,
|
|
touch_count: u32,
|
|
last_seen_at: i64,
|
|
mr_refs: HashSet<String>,
|
|
}
|
|
|
|
let mut user_map: HashMap<String, OverlapAcc> = HashMap::new();
|
|
for (username, role, count, last_seen, mr_refs_csv) in &rows {
|
|
let mr_refs: Vec<String> = mr_refs_csv
|
|
.as_deref()
|
|
.map(|csv| csv.split(',').map(|s| s.trim().to_string()).collect())
|
|
.unwrap_or_default();
|
|
|
|
let entry = user_map
|
|
.entry(username.clone())
|
|
.or_insert_with(|| OverlapAcc {
|
|
username: username.clone(),
|
|
author_touch_count: 0,
|
|
review_touch_count: 0,
|
|
touch_count: 0,
|
|
last_seen_at: 0,
|
|
mr_refs: HashSet::new(),
|
|
});
|
|
entry.touch_count += count;
|
|
if role == "author" {
|
|
entry.author_touch_count += count;
|
|
} else {
|
|
entry.review_touch_count += count;
|
|
}
|
|
if *last_seen > entry.last_seen_at {
|
|
entry.last_seen_at = *last_seen;
|
|
}
|
|
for r in mr_refs {
|
|
entry.mr_refs.insert(r);
|
|
}
|
|
}
|
|
|
|
// Convert accumulators to output structs
|
|
let mut users: Vec<OverlapUser> = user_map
|
|
.into_values()
|
|
.map(|a| {
|
|
let mut mr_refs: Vec<String> = a.mr_refs.into_iter().collect();
|
|
mr_refs.sort();
|
|
let mr_refs_total = mr_refs.len() as u32;
|
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
|
if mr_refs_truncated {
|
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
|
}
|
|
OverlapUser {
|
|
username: a.username,
|
|
author_touch_count: a.author_touch_count,
|
|
review_touch_count: a.review_touch_count,
|
|
touch_count: a.touch_count,
|
|
last_seen_at: a.last_seen_at,
|
|
mr_refs,
|
|
mr_refs_total,
|
|
mr_refs_truncated,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Stable sort with full tie-breakers for deterministic output
|
|
users.sort_by(|a, b| {
|
|
b.touch_count
|
|
.cmp(&a.touch_count)
|
|
.then_with(|| b.last_seen_at.cmp(&a.last_seen_at))
|
|
.then_with(|| a.username.cmp(&b.username))
|
|
});
|
|
|
|
let truncated = users.len() > limit;
|
|
users.truncate(limit);
|
|
|
|
Ok(OverlapResult {
|
|
path_query: if pq.is_prefix {
|
|
path.trim_end_matches('/').to_string()
|
|
} else {
|
|
pq.value.clone()
|
|
},
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
users,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
/// Format overlap role for display: "A", "R", or "A+R".
|
|
fn format_overlap_role(user: &OverlapUser) -> &'static str {
|
|
match (user.author_touch_count > 0, user.review_touch_count > 0) {
|
|
(true, true) => "A+R",
|
|
(true, false) => "A",
|
|
(false, true) => "R",
|
|
(false, false) => "-",
|
|
}
|
|
}
|
|
|
|
// ─── Human Output ────────────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_human(result: &WhoResult, project_path: Option<&str>) {
|
|
match result {
|
|
WhoResult::Expert(r) => print_expert_human(r, project_path),
|
|
WhoResult::Workload(r) => print_workload_human(r),
|
|
WhoResult::Reviews(r) => print_reviews_human(r),
|
|
WhoResult::Active(r) => print_active_human(r, project_path),
|
|
WhoResult::Overlap(r) => print_overlap_human(r, project_path),
|
|
}
|
|
}
|
|
|
|
/// Print a dim hint when results aggregate across all projects.
|
|
fn print_scope_hint(project_path: Option<&str>) {
|
|
if project_path.is_none() {
|
|
println!(
|
|
" {}",
|
|
style("(aggregated across all projects; use -p to scope)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
fn print_expert_human(r: &ExpertResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Experts for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.experts.is_empty() {
|
|
println!(" {}", style("No experts found for this path.").dim());
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {} {}",
|
|
style("Username").bold(),
|
|
style("Score").bold(),
|
|
style("Reviewed(MRs)").bold(),
|
|
style("Notes").bold(),
|
|
style("Authored(MRs)").bold(),
|
|
style("Last Seen").bold(),
|
|
style("MR Refs").bold(),
|
|
);
|
|
|
|
for expert in &r.experts {
|
|
let reviews = if expert.review_mr_count > 0 {
|
|
expert.review_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let notes = if expert.review_note_count > 0 {
|
|
expert.review_note_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let authored = if expert.author_mr_count > 0 {
|
|
expert.author_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let mr_str = expert
|
|
.mr_refs
|
|
.iter()
|
|
.take(5)
|
|
.cloned()
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
let overflow = if expert.mr_refs_total > 5 {
|
|
format!(" +{}", expert.mr_refs_total - 5)
|
|
} else {
|
|
String::new()
|
|
};
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {:<12}{}{}",
|
|
style(format!("@{}", expert.username)).cyan(),
|
|
expert.score,
|
|
reviews,
|
|
notes,
|
|
authored,
|
|
format_relative_time(expert.last_seen_ms),
|
|
if mr_str.is_empty() {
|
|
String::new()
|
|
} else {
|
|
format!(" {mr_str}")
|
|
},
|
|
overflow,
|
|
);
|
|
|
|
// Print detail sub-rows when populated
|
|
if let Some(details) = &expert.details {
|
|
const MAX_DETAIL_DISPLAY: usize = 10;
|
|
for d in details.iter().take(MAX_DETAIL_DISPLAY) {
|
|
let notes_str = if d.note_count > 0 {
|
|
format!("{} notes", d.note_count)
|
|
} else {
|
|
String::new()
|
|
};
|
|
println!(
|
|
" {:<3} {:<30} {:>30} {:>10} {}",
|
|
style(&d.role).dim(),
|
|
d.mr_ref,
|
|
truncate_str(&format!("\"{}\"", d.title), 30),
|
|
notes_str,
|
|
style(format_relative_time(d.last_activity_ms)).dim(),
|
|
);
|
|
}
|
|
if details.len() > MAX_DETAIL_DISPLAY {
|
|
println!(
|
|
" {}",
|
|
style(format!("+{} more", details.len() - MAX_DETAIL_DISPLAY)).dim()
|
|
);
|
|
}
|
|
}
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_workload_human(r: &WorkloadResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Workload Summary", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
|
|
if !r.assigned_issues.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Assigned Issues").bold(),
|
|
r.assigned_issues.len()
|
|
);
|
|
for item in &r.assigned_issues {
|
|
println!(
|
|
" {} {} {}",
|
|
style(&item.ref_).cyan(),
|
|
truncate_str(&item.title, 40),
|
|
style(format_relative_time(item.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.assigned_issues_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.authored_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Authored MRs").bold(),
|
|
r.authored_mrs.len()
|
|
);
|
|
for mr in &r.authored_mrs {
|
|
let draft = if mr.draft { " [draft]" } else { "" };
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 35),
|
|
style(draft).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.authored_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.reviewing_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Reviewing MRs").bold(),
|
|
r.reviewing_mrs.len()
|
|
);
|
|
for mr in &r.reviewing_mrs {
|
|
let author = mr
|
|
.author_username
|
|
.as_deref()
|
|
.map(|a| format!(" by @{a}"))
|
|
.unwrap_or_default();
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 30),
|
|
style(author).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.reviewing_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.unresolved_discussions.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Unresolved Discussions").bold(),
|
|
r.unresolved_discussions.len()
|
|
);
|
|
for disc in &r.unresolved_discussions {
|
|
println!(
|
|
" {} {} {} {}",
|
|
style(&disc.entity_type).dim(),
|
|
style(&disc.ref_).cyan(),
|
|
truncate_str(&disc.entity_title, 35),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
);
|
|
}
|
|
if r.unresolved_discussions_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if r.assigned_issues.is_empty()
|
|
&& r.authored_mrs.is_empty()
|
|
&& r.reviewing_mrs.is_empty()
|
|
&& r.unresolved_discussions.is_empty()
|
|
{
|
|
println!();
|
|
println!(
|
|
" {}",
|
|
style("No open work items found for this user.").dim()
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_reviews_human(r: &ReviewsResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Review Patterns", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!();
|
|
|
|
if r.total_diffnotes == 0 {
|
|
println!(
|
|
" {}",
|
|
style("No review comments found for this user.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {} DiffNotes across {} MRs ({} categorized)",
|
|
style(r.total_diffnotes).bold(),
|
|
style(r.mrs_reviewed).bold(),
|
|
style(r.categorized_count).bold(),
|
|
);
|
|
println!();
|
|
|
|
if !r.categories.is_empty() {
|
|
println!(
|
|
" {:<16} {:>6} {:>6}",
|
|
style("Category").bold(),
|
|
style("Count").bold(),
|
|
style("%").bold(),
|
|
);
|
|
|
|
for cat in &r.categories {
|
|
println!(
|
|
" {:<16} {:>6} {:>5.1}%",
|
|
style(&cat.name).cyan(),
|
|
cat.count,
|
|
cat.percentage,
|
|
);
|
|
}
|
|
}
|
|
|
|
let uncategorized = r.total_diffnotes - r.categorized_count;
|
|
if uncategorized > 0 {
|
|
println!();
|
|
println!(
|
|
" {} {} uncategorized (no **prefix** convention)",
|
|
style("Note:").dim(),
|
|
uncategorized,
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_active_human(r: &ActiveResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!(
|
|
"Active Discussions ({} unresolved in window)",
|
|
r.total_unresolved_in_window
|
|
))
|
|
.bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.discussions.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No active unresolved discussions in this time window.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
for disc in &r.discussions {
|
|
let prefix = if disc.entity_type == "MR" { "!" } else { "#" };
|
|
let participants_str = disc
|
|
.participants
|
|
.iter()
|
|
.map(|p| format!("@{p}"))
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
|
|
println!(
|
|
" {} {} {} {} notes {}",
|
|
style(format!("{prefix}{}", disc.entity_iid)).cyan(),
|
|
truncate_str(&disc.entity_title, 40),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
disc.note_count,
|
|
style(&disc.project_path).dim(),
|
|
);
|
|
if !participants_str.is_empty() {
|
|
println!(" {}", style(participants_str).dim());
|
|
}
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_overlap_human(r: &OverlapResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Overlap for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.users.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No overlapping users found for this path.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}",
|
|
style("Username").bold(),
|
|
style("Role").bold(),
|
|
style("MRs").bold(),
|
|
style("Last Seen").bold(),
|
|
style("MR Refs").bold(),
|
|
);
|
|
|
|
for user in &r.users {
|
|
let mr_str = user
|
|
.mr_refs
|
|
.iter()
|
|
.take(5)
|
|
.cloned()
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
let overflow = if user.mr_refs.len() > 5 {
|
|
format!(" +{}", user.mr_refs.len() - 5)
|
|
} else {
|
|
String::new()
|
|
};
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}{}",
|
|
style(format!("@{}", user.username)).cyan(),
|
|
format_overlap_role(user),
|
|
user.touch_count,
|
|
format_relative_time(user.last_seen_at),
|
|
mr_str,
|
|
overflow,
|
|
);
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
// ─── Robot JSON Output ───────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
|
let (mode, data) = match &run.result {
|
|
WhoResult::Expert(r) => ("expert", expert_to_json(r)),
|
|
WhoResult::Workload(r) => ("workload", workload_to_json(r)),
|
|
WhoResult::Reviews(r) => ("reviews", reviews_to_json(r)),
|
|
WhoResult::Active(r) => ("active", active_to_json(r)),
|
|
WhoResult::Overlap(r) => ("overlap", overlap_to_json(r)),
|
|
};
|
|
|
|
// Raw CLI args -- what the user typed
|
|
let input = serde_json::json!({
|
|
"target": args.target,
|
|
"path": args.path,
|
|
"project": args.project,
|
|
"since": args.since,
|
|
"limit": args.limit,
|
|
"detail": args.detail,
|
|
});
|
|
|
|
// Resolved/computed values -- what actually ran
|
|
let resolved_input = serde_json::json!({
|
|
"mode": run.resolved_input.mode,
|
|
"project_id": run.resolved_input.project_id,
|
|
"project_path": run.resolved_input.project_path,
|
|
"since_ms": run.resolved_input.since_ms,
|
|
"since_iso": run.resolved_input.since_iso,
|
|
"since_mode": run.resolved_input.since_mode,
|
|
"limit": run.resolved_input.limit,
|
|
});
|
|
|
|
let output = WhoJsonEnvelope {
|
|
ok: true,
|
|
data: WhoJsonData {
|
|
mode: mode.to_string(),
|
|
input,
|
|
resolved_input,
|
|
result: data,
|
|
},
|
|
meta: RobotMeta { elapsed_ms },
|
|
};
|
|
|
|
println!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|e| {
|
|
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
|
|
})
|
|
);
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonEnvelope {
|
|
ok: bool,
|
|
data: WhoJsonData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonData {
|
|
mode: String,
|
|
input: serde_json::Value,
|
|
resolved_input: serde_json::Value,
|
|
#[serde(flatten)]
|
|
result: serde_json::Value,
|
|
}
|
|
|
|
fn expert_to_json(r: &ExpertResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"truncated": r.truncated,
|
|
"experts": r.experts.iter().map(|e| {
|
|
let mut obj = serde_json::json!({
|
|
"username": e.username,
|
|
"score": e.score,
|
|
"review_mr_count": e.review_mr_count,
|
|
"review_note_count": e.review_note_count,
|
|
"author_mr_count": e.author_mr_count,
|
|
"last_seen_at": ms_to_iso(e.last_seen_ms),
|
|
"mr_refs": e.mr_refs,
|
|
"mr_refs_total": e.mr_refs_total,
|
|
"mr_refs_truncated": e.mr_refs_truncated,
|
|
});
|
|
if let Some(details) = &e.details {
|
|
obj["details"] = serde_json::json!(details.iter().map(|d| serde_json::json!({
|
|
"mr_ref": d.mr_ref,
|
|
"title": d.title,
|
|
"role": d.role,
|
|
"note_count": d.note_count,
|
|
"last_activity_at": ms_to_iso(d.last_activity_ms),
|
|
})).collect::<Vec<_>>());
|
|
}
|
|
obj
|
|
}).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn workload_to_json(r: &WorkloadResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"assigned_issues": r.assigned_issues.iter().map(|i| serde_json::json!({
|
|
"iid": i.iid,
|
|
"ref": i.ref_,
|
|
"title": i.title,
|
|
"project_path": i.project_path,
|
|
"updated_at": ms_to_iso(i.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"authored_mrs": r.authored_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"reviewing_mrs": r.reviewing_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"author_username": m.author_username,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"unresolved_discussions": r.unresolved_discussions.iter().map(|d| serde_json::json!({
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"ref": d.ref_,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
})).collect::<Vec<_>>(),
|
|
"summary": {
|
|
"assigned_issue_count": r.assigned_issues.len(),
|
|
"authored_mr_count": r.authored_mrs.len(),
|
|
"reviewing_mr_count": r.reviewing_mrs.len(),
|
|
"unresolved_discussion_count": r.unresolved_discussions.len(),
|
|
},
|
|
"truncation": {
|
|
"assigned_issues_truncated": r.assigned_issues_truncated,
|
|
"authored_mrs_truncated": r.authored_mrs_truncated,
|
|
"reviewing_mrs_truncated": r.reviewing_mrs_truncated,
|
|
"unresolved_discussions_truncated": r.unresolved_discussions_truncated,
|
|
}
|
|
})
|
|
}
|
|
|
|
fn reviews_to_json(r: &ReviewsResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"total_diffnotes": r.total_diffnotes,
|
|
"categorized_count": r.categorized_count,
|
|
"mrs_reviewed": r.mrs_reviewed,
|
|
"categories": r.categories.iter().map(|c| serde_json::json!({
|
|
"name": c.name,
|
|
"count": c.count,
|
|
"percentage": (c.percentage * 10.0).round() / 10.0,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn active_to_json(r: &ActiveResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"total_unresolved_in_window": r.total_unresolved_in_window,
|
|
"truncated": r.truncated,
|
|
"discussions": r.discussions.iter().map(|d| serde_json::json!({
|
|
"discussion_id": d.discussion_id,
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
"note_count": d.note_count,
|
|
"participants": d.participants,
|
|
"participants_total": d.participants_total,
|
|
"participants_truncated": d.participants_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn overlap_to_json(r: &OverlapResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"truncated": r.truncated,
|
|
"users": r.users.iter().map(|u| serde_json::json!({
|
|
"username": u.username,
|
|
"role": format_overlap_role(u),
|
|
"author_touch_count": u.author_touch_count,
|
|
"review_touch_count": u.review_touch_count,
|
|
"touch_count": u.touch_count,
|
|
"last_seen_at": ms_to_iso(u.last_seen_at),
|
|
"mr_refs": u.mr_refs,
|
|
"mr_refs_total": u.mr_refs_total,
|
|
"mr_refs_truncated": u.mr_refs_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
// ─── Helper Functions ────────────────────────────────────────────────────────
|
|
|
|
fn format_relative_time(ms_epoch: i64) -> String {
|
|
let now = now_ms();
|
|
let diff = now - ms_epoch;
|
|
|
|
if diff < 0 {
|
|
return "in the future".to_string();
|
|
}
|
|
|
|
match diff {
|
|
d if d < 60_000 => "just now".to_string(),
|
|
d if d < 3_600_000 => format!("{} min ago", d / 60_000),
|
|
d if d < 86_400_000 => {
|
|
let n = d / 3_600_000;
|
|
format!("{n} {} ago", if n == 1 { "hour" } else { "hours" })
|
|
}
|
|
d if d < 604_800_000 => {
|
|
let n = d / 86_400_000;
|
|
format!("{n} {} ago", if n == 1 { "day" } else { "days" })
|
|
}
|
|
d if d < 2_592_000_000 => {
|
|
let n = d / 604_800_000;
|
|
format!("{n} {} ago", if n == 1 { "week" } else { "weeks" })
|
|
}
|
|
_ => {
|
|
let n = diff / 2_592_000_000;
|
|
format!("{n} {} ago", if n == 1 { "month" } else { "months" })
|
|
}
|
|
}
|
|
}
|
|
|
|
fn truncate_str(s: &str, max: usize) -> String {
|
|
if s.chars().count() <= max {
|
|
s.to_owned()
|
|
} else {
|
|
let truncated: String = s.chars().take(max.saturating_sub(3)).collect();
|
|
format!("{truncated}...")
|
|
}
|
|
}
|
|
|
|
// ─── Tests ───────────────────────────────────────────────────────────────────
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::core::db::{create_connection, run_migrations};
|
|
use std::path::Path;
|
|
|
|
fn setup_test_db() -> Connection {
|
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
run_migrations(&conn).unwrap();
|
|
conn
|
|
}
|
|
|
|
fn default_scoring() -> ScoringConfig {
|
|
ScoringConfig::default()
|
|
}
|
|
|
|
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
|
VALUES (?1, ?2, ?3, ?4)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 100,
|
|
path,
|
|
format!("https://git.example.com/{}", path)
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str, state: &str) {
|
|
conn.execute(
|
|
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, author_username, state, last_seen_at, updated_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
project_id,
|
|
iid,
|
|
format!("MR {iid}"),
|
|
author,
|
|
state,
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
|
|
conn.execute(
|
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, 'opened', ?6, ?7, ?8, ?9)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
project_id,
|
|
iid,
|
|
format!("Issue {iid}"),
|
|
author,
|
|
now_ms(),
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_discussion(
|
|
conn: &Connection,
|
|
id: i64,
|
|
project_id: i64,
|
|
mr_id: Option<i64>,
|
|
issue_id: Option<i64>,
|
|
resolvable: bool,
|
|
resolved: bool,
|
|
) {
|
|
let noteable_type = if mr_id.is_some() {
|
|
"MergeRequest"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
conn.execute(
|
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, issue_id, noteable_type, resolvable, resolved, last_seen_at, last_note_at)
|
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10)",
|
|
rusqlite::params![
|
|
id,
|
|
format!("disc-{id}"),
|
|
project_id,
|
|
mr_id,
|
|
issue_id,
|
|
noteable_type,
|
|
i32::from(resolvable),
|
|
i32::from(resolved),
|
|
now_ms(),
|
|
now_ms()
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
fn insert_diffnote(
|
|
conn: &Connection,
|
|
id: i64,
|
|
discussion_id: i64,
|
|
project_id: i64,
|
|
author: &str,
|
|
file_path: &str,
|
|
body: &str,
|
|
) {
|
|
conn.execute(
|
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, note_type, is_system, author_username, body, created_at, updated_at, last_seen_at, position_new_path)
|
|
VALUES (?1, ?2, ?3, ?4, 'DiffNote', 0, ?5, ?6, ?7, ?8, ?9, ?10)",
|
|
rusqlite::params![
|
|
id,
|
|
id * 10,
|
|
discussion_id,
|
|
project_id,
|
|
author,
|
|
body,
|
|
now_ms(),
|
|
now_ms(),
|
|
now_ms(),
|
|
file_path
|
|
],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_assignee(conn: &Connection, issue_id: i64, username: &str) {
|
|
conn.execute(
|
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (?1, ?2)",
|
|
rusqlite::params![issue_id, username],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_reviewer(conn: &Connection, mr_id: i64, username: &str) {
|
|
conn.execute(
|
|
"INSERT INTO mr_reviewers (merge_request_id, username) VALUES (?1, ?2)",
|
|
rusqlite::params![mr_id, username],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
fn insert_file_change(
|
|
conn: &Connection,
|
|
mr_id: i64,
|
|
project_id: i64,
|
|
new_path: &str,
|
|
change_type: &str,
|
|
) {
|
|
conn.execute(
|
|
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type)
|
|
VALUES (?1, ?2, ?3, ?4)",
|
|
rusqlite::params![mr_id, project_id, new_path, change_type],
|
|
)
|
|
.unwrap();
|
|
}
|
|
|
|
#[test]
|
|
fn test_is_file_path_discrimination() {
|
|
// Contains '/' -> file path
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("src/auth/".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
|
|
// No '/' -> username
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Workload { .. }
|
|
));
|
|
|
|
// With @ prefix -> username (stripped)
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("@asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Workload { .. }
|
|
));
|
|
|
|
// --reviews flag -> reviews mode
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: Some("asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: true,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Reviews { .. }
|
|
));
|
|
|
|
// --path flag -> expert mode (handles root files)
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: None,
|
|
path: Some("README.md".to_string()),
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
|
|
// --path flag with dotless file -> expert mode
|
|
assert!(matches!(
|
|
resolve_mode(&WhoArgs {
|
|
target: None,
|
|
path: Some("Makefile".to_string()),
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: false,
|
|
no_detail: false,
|
|
})
|
|
.unwrap(),
|
|
WhoMode::Expert { .. }
|
|
));
|
|
}
|
|
|
|
#[test]
|
|
fn test_detail_rejected_outside_expert_mode() {
|
|
let args = WhoArgs {
|
|
target: Some("asmith".to_string()),
|
|
path: None,
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: true,
|
|
no_detail: false,
|
|
};
|
|
let mode = resolve_mode(&args).unwrap();
|
|
let err = validate_mode_flags(&mode, &args).unwrap_err();
|
|
let msg = err.to_string();
|
|
assert!(
|
|
msg.contains("--detail is only supported in expert mode"),
|
|
"unexpected error: {msg}"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_detail_allowed_in_expert_mode() {
|
|
let args = WhoArgs {
|
|
target: None,
|
|
path: Some("README.md".to_string()),
|
|
active: false,
|
|
overlap: None,
|
|
reviews: false,
|
|
since: None,
|
|
project: None,
|
|
limit: 20,
|
|
detail: true,
|
|
no_detail: false,
|
|
};
|
|
let mode = resolve_mode(&args).unwrap();
|
|
assert!(validate_mode_flags(&mode, &args).is_ok());
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query() {
|
|
let conn = setup_test_db();
|
|
|
|
// Directory with trailing slash -> prefix
|
|
let pq = build_path_query(&conn, "src/auth/", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Directory without trailing slash (no dot in last segment) -> prefix
|
|
let pq = build_path_query(&conn, "src/auth", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// File with extension -> exact
|
|
let pq = build_path_query(&conn, "src/auth/login.rs", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/login.rs");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Root file -> exact
|
|
let pq = build_path_query(&conn, "README.md", None).unwrap();
|
|
assert_eq!(pq.value, "README.md");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Directory with dots in non-leaf segment -> prefix
|
|
let pq = build_path_query(&conn, ".github/workflows/", None).unwrap();
|
|
assert_eq!(pq.value, ".github/workflows/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Versioned directory path -> prefix
|
|
let pq = build_path_query(&conn, "src/v1.2/auth/", None).unwrap();
|
|
assert_eq!(pq.value, "src/v1.2/auth/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Path with LIKE metacharacters -> prefix, escaped
|
|
let pq = build_path_query(&conn, "src/test_files/", None).unwrap();
|
|
assert_eq!(pq.value, "src/test\\_files/%");
|
|
assert!(pq.is_prefix);
|
|
|
|
// Dotless root file -> exact match (root path without '/')
|
|
let pq = build_path_query(&conn, "Makefile", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
let pq = build_path_query(&conn, "LICENSE", None).unwrap();
|
|
assert_eq!(pq.value, "LICENSE");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Dotless root path with trailing '/' -> directory prefix (explicit override)
|
|
let pq = build_path_query(&conn, "Makefile/", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile/%");
|
|
assert!(pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_escape_like() {
|
|
assert_eq!(escape_like("normal/path"), "normal/path");
|
|
assert_eq!(escape_like("has_underscore"), "has\\_underscore");
|
|
assert_eq!(escape_like("has%percent"), "has\\%percent");
|
|
assert_eq!(escape_like("has\\backslash"), "has\\\\backslash");
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_exact_does_not_escape() {
|
|
let conn = setup_test_db();
|
|
// '_' must NOT be escaped for exact match (=).
|
|
let pq = build_path_query(&conn, "README_with_underscore.md", None).unwrap();
|
|
assert_eq!(pq.value, "README_with_underscore.md");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_path_flag_dotless_root_file_is_exact() {
|
|
let conn = setup_test_db();
|
|
// --path Makefile must produce an exact match, not Makefile/%
|
|
let pq = build_path_query(&conn, "Makefile", None).unwrap();
|
|
assert_eq!(pq.value, "Makefile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
let pq = build_path_query(&conn, "Dockerfile", None).unwrap();
|
|
assert_eq!(pq.value, "Dockerfile");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"**suggestion**: use const",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"**question**: why?",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
3,
|
|
1,
|
|
1,
|
|
"reviewer_c",
|
|
"src/auth/session.rs",
|
|
"looks good",
|
|
);
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
assert_eq!(result.experts.len(), 3); // author_a, reviewer_b, reviewer_c
|
|
assert_eq!(result.experts[0].username, "author_a"); // highest score (authorship dominates)
|
|
}
|
|
|
|
#[test]
|
|
fn test_workload_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_issue(&conn, 1, 1, 42, "someone_else");
|
|
insert_assignee(&conn, 1, "dev_a");
|
|
insert_mr(&conn, 1, 1, 100, "dev_a", "opened");
|
|
|
|
let result = query_workload(&conn, "dev_a", None, None, 20).unwrap();
|
|
assert_eq!(result.assigned_issues.len(), 1);
|
|
assert_eq!(result.authored_mrs.len(), 1);
|
|
}
|
|
|
|
#[test]
|
|
fn test_reviews_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/foo.rs",
|
|
"**suggestion**: refactor",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/bar.rs",
|
|
"**question**: why?",
|
|
);
|
|
insert_diffnote(&conn, 3, 1, 1, "reviewer_b", "src/baz.rs", "looks good");
|
|
|
|
let result = query_reviews(&conn, "reviewer_b", None, 0).unwrap();
|
|
assert_eq!(result.total_diffnotes, 3);
|
|
assert_eq!(result.categorized_count, 2);
|
|
assert_eq!(result.categories.len(), 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_active_query() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/foo.rs", "needs work");
|
|
// Second note by same participant -- note_count should be 2, participants still ["reviewer_b"]
|
|
insert_diffnote(&conn, 2, 1, 1, "reviewer_b", "src/foo.rs", "follow-up");
|
|
|
|
let result = query_active(&conn, None, 0, 20).unwrap();
|
|
assert_eq!(result.total_unresolved_in_window, 1);
|
|
assert_eq!(result.discussions.len(), 1);
|
|
assert_eq!(result.discussions[0].participants, vec!["reviewer_b"]);
|
|
// This was a regression in iteration 4: note_count was counting participants, not notes
|
|
assert_eq!(result.discussions[0].note_count, 2);
|
|
assert!(result.discussions[0].discussion_id > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_dual_roles() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
// User is both author of one MR and reviewer of another at same path
|
|
insert_mr(&conn, 1, 1, 100, "dual_user", "opened");
|
|
insert_mr(&conn, 2, 1, 200, "other_author", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 1, Some(2), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"someone",
|
|
"src/auth/login.rs",
|
|
"review of dual_user's MR",
|
|
);
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
2,
|
|
1,
|
|
"dual_user",
|
|
"src/auth/login.rs",
|
|
"dual_user reviewing other MR",
|
|
);
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let dual = result
|
|
.users
|
|
.iter()
|
|
.find(|u| u.username == "dual_user")
|
|
.unwrap();
|
|
assert!(dual.author_touch_count > 0);
|
|
assert!(dual.review_touch_count > 0);
|
|
assert_eq!(format_overlap_role(dual), "A+R");
|
|
// MR refs should be project-qualified
|
|
assert!(dual.mr_refs.iter().any(|r| r.contains("team/backend!")));
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_multi_project_mr_refs() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_project(&conn, 2, "team/frontend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_mr(&conn, 2, 2, 100, "author_a", "opened"); // Same iid, different project
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 2, Some(2), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_x", "src/auth/login.rs", "review");
|
|
insert_diffnote(&conn, 2, 2, 2, "reviewer_x", "src/auth/login.rs", "review");
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let reviewer = result
|
|
.users
|
|
.iter()
|
|
.find(|u| u.username == "reviewer_x")
|
|
.unwrap();
|
|
// Should have two distinct refs despite same iid
|
|
assert!(reviewer.mr_refs.contains(&"team/backend!100".to_string()));
|
|
assert!(reviewer.mr_refs.contains(&"team/frontend!100".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn test_normalize_review_prefix() {
|
|
assert_eq!(normalize_review_prefix("suggestion"), "suggestion");
|
|
assert_eq!(normalize_review_prefix("Suggestion:"), "suggestion");
|
|
assert_eq!(
|
|
normalize_review_prefix("suggestion (non-blocking):"),
|
|
"suggestion"
|
|
);
|
|
assert_eq!(normalize_review_prefix("Nitpick:"), "nit");
|
|
assert_eq!(normalize_review_prefix("nit (non-blocking):"), "nit");
|
|
assert_eq!(normalize_review_prefix("question"), "question");
|
|
assert_eq!(normalize_review_prefix("TODO:"), "todo");
|
|
}
|
|
|
|
#[test]
|
|
fn test_normalize_repo_path() {
|
|
// Strips leading ./
|
|
assert_eq!(normalize_repo_path("./src/foo/"), "src/foo/");
|
|
// Strips leading /
|
|
assert_eq!(normalize_repo_path("/src/foo/"), "src/foo/");
|
|
// Strips leading ./ recursively
|
|
assert_eq!(normalize_repo_path("././src/foo"), "src/foo");
|
|
// Converts Windows backslashes when no forward slashes
|
|
assert_eq!(normalize_repo_path("src\\foo\\bar.rs"), "src/foo/bar.rs");
|
|
// Does NOT convert backslashes when forward slashes present
|
|
assert_eq!(normalize_repo_path("src/foo\\bar"), "src/foo\\bar");
|
|
// Collapses repeated //
|
|
assert_eq!(normalize_repo_path("src//foo//bar/"), "src/foo/bar/");
|
|
// Trims whitespace
|
|
assert_eq!(normalize_repo_path(" src/foo/ "), "src/foo/");
|
|
// Identity for clean paths
|
|
assert_eq!(normalize_repo_path("src/foo/bar.rs"), "src/foo/bar.rs");
|
|
}
|
|
|
|
#[test]
|
|
fn test_lookup_project_path() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
assert_eq!(lookup_project_path(&conn, 1).unwrap(), "team/backend");
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_dotless_subdir_file_uses_db_probe() {
|
|
// Dotless file in subdirectory (src/Dockerfile) would normally be
|
|
// treated as a directory. The DB probe detects it's actually a file.
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/Dockerfile", "note");
|
|
|
|
let pq = build_path_query(&conn, "src/Dockerfile", None).unwrap();
|
|
assert_eq!(pq.value, "src/Dockerfile");
|
|
assert!(!pq.is_prefix);
|
|
|
|
// Same path without DB data -> falls through to prefix
|
|
let conn2 = setup_test_db();
|
|
let pq2 = build_path_query(&conn2, "src/Dockerfile", None).unwrap();
|
|
assert_eq!(pq2.value, "src/Dockerfile/%");
|
|
assert!(pq2.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_probe_is_project_scoped() {
|
|
// Path exists as a dotless file in project 1; project 2 should not
|
|
// treat it as an exact file unless it exists there too.
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/a");
|
|
insert_project(&conn, 2, "team/b");
|
|
insert_mr(&conn, 1, 1, 10, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "rev", "infra/Makefile", "note");
|
|
|
|
// Unscoped: finds exact match in project 1 -> exact
|
|
let pq_unscoped = build_path_query(&conn, "infra/Makefile", None).unwrap();
|
|
assert!(!pq_unscoped.is_prefix);
|
|
|
|
// Scoped to project 2: no data -> falls back to prefix
|
|
let pq_scoped = build_path_query(&conn, "infra/Makefile", Some(2)).unwrap();
|
|
assert!(pq_scoped.is_prefix);
|
|
|
|
// Scoped to project 1: finds data -> exact
|
|
let pq_scoped1 = build_path_query(&conn, "infra/Makefile", Some(1)).unwrap();
|
|
assert!(!pq_scoped1.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_excludes_self_review_notes() {
|
|
// MR author commenting on their own diff should not be counted as reviewer
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
// author_a comments on their own MR diff (clarification)
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"author_a",
|
|
"src/auth/login.rs",
|
|
"clarification",
|
|
);
|
|
// reviewer_b also reviews
|
|
insert_diffnote(
|
|
&conn,
|
|
2,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"looks good",
|
|
);
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
// author_a should appear as author only, not as reviewer
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
assert_eq!(author.review_mr_count, 0);
|
|
assert!(author.author_mr_count > 0);
|
|
|
|
// reviewer_b should be a reviewer
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_b")
|
|
.unwrap();
|
|
assert!(reviewer.review_mr_count > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_excludes_self_review_notes() {
|
|
// MR author commenting on their own diff should not inflate reviewer counts
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
// author_a comments on their own MR diff (clarification)
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"author_a",
|
|
"src/auth/login.rs",
|
|
"clarification",
|
|
);
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let u = result.users.iter().find(|u| u.username == "author_a");
|
|
// Should NOT be credited as reviewer touch
|
|
assert!(u.map_or(0, |x| x.review_touch_count) == 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_active_participants_sorted() {
|
|
// Participants should be sorted alphabetically for deterministic output
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "zebra_user", "src/foo.rs", "note 1");
|
|
insert_diffnote(&conn, 2, 1, 1, "alpha_user", "src/foo.rs", "note 2");
|
|
|
|
let result = query_active(&conn, None, 0, 20).unwrap();
|
|
assert_eq!(
|
|
result.discussions[0].participants,
|
|
vec!["alpha_user", "zebra_user"]
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_truncation() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
// Create 3 experts
|
|
for i in 1..=3 {
|
|
insert_mr(&conn, i, 1, 100 + i, &format!("author_{i}"), "opened");
|
|
insert_discussion(&conn, i, 1, Some(i), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
i,
|
|
i,
|
|
1,
|
|
&format!("reviewer_{i}"),
|
|
"src/auth/login.rs",
|
|
"note",
|
|
);
|
|
}
|
|
|
|
// limit = 2, should return truncated = true
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 2, &default_scoring(), false).unwrap();
|
|
assert!(result.truncated);
|
|
assert_eq!(result.experts.len(), 2);
|
|
|
|
// limit = 10, should return truncated = false
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 10, &default_scoring(), false).unwrap();
|
|
assert!(!result.truncated);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_file_changes_only() {
|
|
// MR author should appear even when there are zero DiffNotes
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "file_author", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
|
|
let result = query_expert(
|
|
&conn,
|
|
"src/auth/login.rs",
|
|
None,
|
|
0,
|
|
20,
|
|
&default_scoring(),
|
|
false,
|
|
)
|
|
.unwrap();
|
|
assert_eq!(result.experts.len(), 1);
|
|
assert_eq!(result.experts[0].username, "file_author");
|
|
assert!(result.experts[0].author_mr_count > 0);
|
|
assert_eq!(result.experts[0].review_mr_count, 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_mr_reviewer_via_file_changes() {
|
|
// A reviewer assigned via mr_reviewers should appear when that MR
|
|
// touched the queried file (via mr_file_changes)
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_reviewer(&conn, 1, "assigned_reviewer");
|
|
|
|
let result = query_expert(
|
|
&conn,
|
|
"src/auth/login.rs",
|
|
None,
|
|
0,
|
|
20,
|
|
&default_scoring(),
|
|
false,
|
|
)
|
|
.unwrap();
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "assigned_reviewer");
|
|
assert!(reviewer.is_some(), "assigned_reviewer should appear");
|
|
assert!(reviewer.unwrap().review_mr_count > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_deduplicates_across_signals() {
|
|
// User who is BOTH a DiffNote reviewer AND an mr_reviewers entry for
|
|
// the same MR should be counted only once per MR
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_b",
|
|
"src/auth/login.rs",
|
|
"looks good",
|
|
);
|
|
// Same user also listed as assigned reviewer, with file change data
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_reviewer(&conn, 1, "reviewer_b");
|
|
|
|
let result = query_expert(
|
|
&conn,
|
|
"src/auth/login.rs",
|
|
None,
|
|
0,
|
|
20,
|
|
&default_scoring(),
|
|
false,
|
|
)
|
|
.unwrap();
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_b")
|
|
.unwrap();
|
|
// Should be 1 MR, not 2 (dedup across DiffNote + mr_reviewers)
|
|
assert_eq!(reviewer.review_mr_count, 1);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_combined_diffnote_and_file_changes() {
|
|
// Author with DiffNotes on path A and file_changes on path B should
|
|
// get credit for both when queried with a directory prefix
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
// MR 1: has DiffNotes on login.rs
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/auth/login.rs", "note");
|
|
// MR 2: has file_changes on session.rs (no DiffNotes)
|
|
insert_mr(&conn, 2, 1, 200, "author_a", "merged");
|
|
insert_file_change(&conn, 2, 1, "src/auth/session.rs", "added");
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
// Should count 2 authored MRs (one from DiffNote path, one from file changes)
|
|
assert_eq!(author.author_mr_count, 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_file_changes_prefix_match() {
|
|
// Directory prefix queries should pick up mr_file_changes under the directory
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_file_change(&conn, 1, 1, "src/auth/session.rs", "added");
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
assert_eq!(result.path_match, "prefix");
|
|
assert_eq!(result.experts.len(), 1);
|
|
assert_eq!(result.experts[0].username, "author_a");
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_file_changes_only() {
|
|
// Overlap mode should also find users via mr_file_changes
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_reviewer(&conn, 1, "reviewer_x");
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
assert!(
|
|
result.users.iter().any(|u| u.username == "author_a"),
|
|
"author_a should appear via file_changes"
|
|
);
|
|
assert!(
|
|
result.users.iter().any(|u| u.username == "reviewer_x"),
|
|
"reviewer_x should appear via mr_reviewers + file_changes"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_resolves_via_file_changes() {
|
|
// DB probe should detect exact file match from mr_file_changes even
|
|
// when no DiffNotes exist for the path
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/Dockerfile", "modified");
|
|
|
|
let pq = build_path_query(&conn, "src/Dockerfile", None).unwrap();
|
|
assert_eq!(pq.value, "src/Dockerfile");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_excludes_self_assigned_reviewer() {
|
|
// MR author listed in mr_reviewers for their own MR should NOT be
|
|
// counted as a reviewer (same principle as DiffNote self-review exclusion)
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
// author_a is self-assigned as reviewer
|
|
insert_reviewer(&conn, 1, "author_a");
|
|
// real_reviewer is also assigned
|
|
insert_reviewer(&conn, 1, "real_reviewer");
|
|
|
|
let result = query_expert(
|
|
&conn,
|
|
"src/auth/login.rs",
|
|
None,
|
|
0,
|
|
20,
|
|
&default_scoring(),
|
|
false,
|
|
)
|
|
.unwrap();
|
|
|
|
// author_a should appear as author only, not reviewer
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
assert_eq!(author.review_mr_count, 0);
|
|
assert!(author.author_mr_count > 0);
|
|
|
|
// real_reviewer should appear as reviewer
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "real_reviewer")
|
|
.unwrap();
|
|
assert!(reviewer.review_mr_count > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_overlap_excludes_self_assigned_reviewer() {
|
|
// Same self-review exclusion for overlap mode via file changes
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_reviewer(&conn, 1, "author_a"); // self-assigned
|
|
|
|
let result = query_overlap(&conn, "src/auth/", None, 0, 20).unwrap();
|
|
let user = result.users.iter().find(|u| u.username == "author_a");
|
|
// Should appear (as author) but NOT have reviewer touch count
|
|
assert!(user.is_some());
|
|
assert_eq!(user.unwrap().review_touch_count, 0);
|
|
}
|
|
|
|
// ─── Suffix / Fuzzy Path Resolution Tests ───────────────────────────────
|
|
|
|
#[test]
|
|
fn test_build_path_query_suffix_resolves_bare_filename() {
|
|
// User types just "login.rs" but the DB has "src/auth/login.rs"
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
|
|
let pq = build_path_query(&conn, "login.rs", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/login.rs");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_suffix_resolves_partial_path() {
|
|
// User types "auth/login.rs" but full path is "src/auth/login.rs"
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
|
|
let pq = build_path_query(&conn, "auth/login.rs", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/login.rs");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_suffix_ambiguous_returns_error() {
|
|
// Two different files share the same filename -> Ambiguous error
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/utils.rs", "modified");
|
|
insert_file_change(&conn, 1, 1, "src/db/utils.rs", "modified");
|
|
|
|
let err = build_path_query(&conn, "utils.rs", None).unwrap_err();
|
|
let msg = err.to_string();
|
|
assert!(
|
|
msg.contains("src/auth/utils.rs"),
|
|
"should list candidates: {msg}"
|
|
);
|
|
assert!(
|
|
msg.contains("src/db/utils.rs"),
|
|
"should list candidates: {msg}"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_suffix_scoped_to_project() {
|
|
// Two projects have the same filename; scoping to one should resolve
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_project(&conn, 2, "team/frontend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_mr(&conn, 2, 2, 200, "author_b", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/utils.rs", "modified");
|
|
insert_file_change(&conn, 2, 2, "lib/utils.rs", "modified");
|
|
|
|
// Unscoped -> ambiguous
|
|
assert!(build_path_query(&conn, "utils.rs", None).is_err());
|
|
|
|
// Scoped to project 1 -> resolves
|
|
let pq = build_path_query(&conn, "utils.rs", Some(1)).unwrap();
|
|
assert_eq!(pq.value, "src/utils.rs");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_suffix_deduplicates_across_sources() {
|
|
// Same path in both notes AND mr_file_changes -> single unique match, not ambiguous
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/auth/login.rs", "modified");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(
|
|
&conn,
|
|
1,
|
|
1,
|
|
1,
|
|
"reviewer_a",
|
|
"src/auth/login.rs",
|
|
"review note",
|
|
);
|
|
|
|
let pq = build_path_query(&conn, "login.rs", None).unwrap();
|
|
assert_eq!(pq.value, "src/auth/login.rs");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_build_path_query_exact_match_still_preferred() {
|
|
// If the exact path exists in the DB, suffix should NOT be attempted
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "merged");
|
|
insert_file_change(&conn, 1, 1, "README.md", "modified");
|
|
insert_file_change(&conn, 1, 1, "docs/README.md", "modified");
|
|
|
|
// "README.md" exists as exact match -> use it directly, no ambiguity
|
|
let pq = build_path_query(&conn, "README.md", None).unwrap();
|
|
assert_eq!(pq.value, "README.md");
|
|
assert!(!pq.is_prefix);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_scoring_weights_are_configurable() {
|
|
// With reviewer-heavy weights, reviewer should rank above author.
|
|
// With author-heavy weights (default), author should rank above reviewer.
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 100, "the_author", "merged");
|
|
insert_file_change(&conn, 1, 1, "src/app.rs", "modified");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "the_reviewer", "src/app.rs", "lgtm");
|
|
|
|
// Default weights: author=25, reviewer=10 → author wins
|
|
let result =
|
|
query_expert(&conn, "src/app.rs", None, 0, 20, &default_scoring(), false).unwrap();
|
|
assert_eq!(result.experts[0].username, "the_author");
|
|
|
|
// Custom weights: flip so reviewer dominates
|
|
let flipped = ScoringConfig {
|
|
author_weight: 5,
|
|
reviewer_weight: 30,
|
|
note_bonus: 1,
|
|
};
|
|
let result = query_expert(&conn, "src/app.rs", None, 0, 20, &flipped, false).unwrap();
|
|
assert_eq!(result.experts[0].username, "the_reviewer");
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_mr_refs() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 891, "author_a", "merged");
|
|
insert_mr(&conn, 2, 1, 847, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 1, Some(2), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/auth/login.rs", "note1");
|
|
insert_diffnote(&conn, 2, 2, 1, "reviewer_b", "src/auth/login.rs", "note2");
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
|
|
// reviewer_b should have MR refs
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_b")
|
|
.unwrap();
|
|
assert!(reviewer.mr_refs.contains(&"team/backend!847".to_string()));
|
|
assert!(reviewer.mr_refs.contains(&"team/backend!891".to_string()));
|
|
assert_eq!(reviewer.mr_refs_total, 2);
|
|
assert!(!reviewer.mr_refs_truncated);
|
|
|
|
// author_a should also have MR refs
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
assert!(author.mr_refs.contains(&"team/backend!847".to_string()));
|
|
assert!(author.mr_refs.contains(&"team/backend!891".to_string()));
|
|
assert_eq!(author.mr_refs_total, 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_mr_refs_multi_project() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_project(&conn, 2, "team/frontend");
|
|
insert_mr(&conn, 1, 1, 100, "author_a", "opened");
|
|
insert_mr(&conn, 2, 2, 100, "author_a", "opened"); // Same iid, different project
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 2, Some(2), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_x", "src/auth/login.rs", "review");
|
|
insert_diffnote(&conn, 2, 2, 2, "reviewer_x", "src/auth/login.rs", "review");
|
|
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_x")
|
|
.unwrap();
|
|
// Should have two distinct refs despite same iid
|
|
assert!(reviewer.mr_refs.contains(&"team/backend!100".to_string()));
|
|
assert!(reviewer.mr_refs.contains(&"team/frontend!100".to_string()));
|
|
assert_eq!(reviewer.mr_refs_total, 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_expert_detail_mode() {
|
|
let conn = setup_test_db();
|
|
insert_project(&conn, 1, "team/backend");
|
|
insert_mr(&conn, 1, 1, 891, "author_a", "merged");
|
|
insert_mr(&conn, 2, 1, 902, "author_a", "merged");
|
|
insert_discussion(&conn, 1, 1, Some(1), None, true, false);
|
|
insert_discussion(&conn, 2, 1, Some(2), None, true, false);
|
|
insert_diffnote(&conn, 1, 1, 1, "reviewer_b", "src/auth/login.rs", "note1");
|
|
insert_diffnote(&conn, 2, 1, 1, "reviewer_b", "src/auth/login.rs", "note2");
|
|
insert_diffnote(&conn, 3, 2, 1, "reviewer_b", "src/auth/session.rs", "note3");
|
|
|
|
// Without detail: details should be None
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), false).unwrap();
|
|
for expert in &result.experts {
|
|
assert!(expert.details.is_none());
|
|
}
|
|
|
|
// With detail: details should be populated
|
|
let result =
|
|
query_expert(&conn, "src/auth/", None, 0, 20, &default_scoring(), true).unwrap();
|
|
let reviewer = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "reviewer_b")
|
|
.unwrap();
|
|
let details = reviewer.details.as_ref().unwrap();
|
|
assert!(!details.is_empty());
|
|
|
|
// All detail entries should have role "R" for reviewer
|
|
for d in details {
|
|
assert!(
|
|
d.role == "R" || d.role == "A+R",
|
|
"role should be R or A+R, got {}",
|
|
d.role
|
|
);
|
|
assert!(d.mr_ref.starts_with("team/backend!"));
|
|
}
|
|
|
|
// author_a should have detail entries with role "A"
|
|
let author = result
|
|
.experts
|
|
.iter()
|
|
.find(|e| e.username == "author_a")
|
|
.unwrap();
|
|
let author_details = author.details.as_ref().unwrap();
|
|
assert!(!author_details.is_empty());
|
|
for d in author_details {
|
|
assert!(
|
|
d.role == "A" || d.role == "A+R",
|
|
"role should be A or A+R, got {}",
|
|
d.role
|
|
);
|
|
}
|
|
}
|
|
}
|