Fix two asymmetries between the main expert scoring query
(build_expert_sql_v2) and the auxiliary queries (query_expert_details
and query_overlap):
1. Missing old_path matching: The main scoring query correctly
searches both position_new_path AND position_old_path (via UNION ALL
branches) for DiffNotes, and both new_path AND old_path for
mr_file_changes. However, query_expert_details and query_overlap
only checked position_new_path / fc.new_path. This caused --detail
mode and overlap mode to silently drop activity on renamed files
that the main scoring correctly captured — the score and detail
table wouldn't match.
2. State filter mismatch: The main query uses
m.state IN ('opened','merged','closed') with a closed_mr_multiplier
to downweight closed MRs. The detail and overlap queries used
m.state IN ('opened','merged'), completely excluding closed MRs.
This meant detail couldn't fully explain an expert's score.
Fix: Add OR clauses for old_path in all signal branches of both
queries, and include 'closed' in the state filter. The INDEXED BY
hints are removed from the auxiliary queries (they use OR across path
columns), which is acceptable since these run once per command.
Also imports build_path_query, normalize_repo_path, and PathQuery
from the new core::path_resolver module, removing the previously
duplicated private functions (~261 lines deleted).
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2581 lines
89 KiB
Rust
2581 lines
89 KiB
Rust
use console::style;
|
|
use rusqlite::Connection;
|
|
use serde::Serialize;
|
|
use std::collections::{HashMap, HashSet};
|
|
|
|
use crate::Config;
|
|
use crate::cli::WhoArgs;
|
|
use crate::cli::robot::RobotMeta;
|
|
use crate::core::config::ScoringConfig;
|
|
use crate::core::db::create_connection;
|
|
use crate::core::error::{LoreError, Result};
|
|
use crate::core::path_resolver::{PathQuery, build_path_query, normalize_repo_path};
|
|
#[cfg(test)]
|
|
use crate::core::path_resolver::{SuffixResult, escape_like, suffix_probe};
|
|
use crate::core::paths::get_db_path;
|
|
use crate::core::project::resolve_project;
|
|
use crate::core::time::{ms_to_iso, now_ms, parse_since, parse_since_from};
|
|
|
|
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
|
|
|
/// Determines which query mode to run based on args.
|
|
/// Path variants own their strings because path normalization produces new `String`s.
|
|
/// Username variants borrow from args since no normalization is needed.
|
|
enum WhoMode<'a> {
|
|
/// lore who <file-path> OR lore who --path <path>
|
|
Expert { path: String },
|
|
/// lore who <username>
|
|
Workload { username: &'a str },
|
|
/// lore who <username> --reviews
|
|
Reviews { username: &'a str },
|
|
/// lore who --active
|
|
Active,
|
|
/// lore who --overlap <path>
|
|
Overlap { path: String },
|
|
}
|
|
|
|
fn resolve_mode<'a>(args: &'a WhoArgs) -> Result<WhoMode<'a>> {
|
|
// Explicit --path flag always wins (handles root files like README.md,
|
|
// LICENSE, Makefile -- anything without a / that can't be auto-detected)
|
|
if let Some(p) = &args.path {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(p),
|
|
});
|
|
}
|
|
if args.active {
|
|
return Ok(WhoMode::Active);
|
|
}
|
|
if let Some(path) = &args.overlap {
|
|
return Ok(WhoMode::Overlap {
|
|
path: normalize_repo_path(path),
|
|
});
|
|
}
|
|
if let Some(target) = &args.target {
|
|
let clean = target.strip_prefix('@').unwrap_or(target);
|
|
if args.reviews {
|
|
return Ok(WhoMode::Reviews { username: clean });
|
|
}
|
|
// Disambiguation: if target contains '/', it's a file path.
|
|
// GitLab usernames never contain '/'.
|
|
// Root files (no '/') require --path.
|
|
if clean.contains('/') {
|
|
return Ok(WhoMode::Expert {
|
|
path: normalize_repo_path(clean),
|
|
});
|
|
}
|
|
return Ok(WhoMode::Workload { username: clean });
|
|
}
|
|
Err(LoreError::Other(
|
|
"Provide a username, file path, --active, or --overlap <path>.\n\n\
|
|
Examples:\n \
|
|
lore who src/features/auth/\n \
|
|
lore who @username\n \
|
|
lore who --active\n \
|
|
lore who --overlap src/features/\n \
|
|
lore who --path README.md\n \
|
|
lore who --path Makefile"
|
|
.to_string(),
|
|
))
|
|
}
|
|
|
|
// ─── Result Types ────────────────────────────────────────────────────────────
|
|
|
|
/// Top-level run result: carries resolved inputs + the mode-specific result.
|
|
pub struct WhoRun {
|
|
pub resolved_input: WhoResolvedInput,
|
|
pub result: WhoResult,
|
|
}
|
|
|
|
/// Resolved query parameters -- computed once, used for robot JSON reproducibility.
|
|
pub struct WhoResolvedInput {
|
|
pub mode: String,
|
|
pub project_id: Option<i64>,
|
|
pub project_path: Option<String>,
|
|
pub since_ms: Option<i64>,
|
|
pub since_iso: Option<String>,
|
|
/// "default" (mode default applied), "explicit" (user provided --since), "none" (no window)
|
|
pub since_mode: String,
|
|
pub limit: u16,
|
|
}
|
|
|
|
/// Top-level result enum -- one variant per mode.
|
|
pub enum WhoResult {
|
|
Expert(ExpertResult),
|
|
Workload(WorkloadResult),
|
|
Reviews(ReviewsResult),
|
|
Active(ActiveResult),
|
|
Overlap(OverlapResult),
|
|
}
|
|
|
|
// --- Expert ---
|
|
|
|
pub struct ExpertResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub experts: Vec<Expert>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct Expert {
|
|
pub username: String,
|
|
pub score: i64,
|
|
/// Unrounded f64 score (only populated when explain_score is set).
|
|
pub score_raw: Option<f64>,
|
|
/// Per-component score breakdown (only populated when explain_score is set).
|
|
pub components: Option<ScoreComponents>,
|
|
pub review_mr_count: u32,
|
|
pub review_note_count: u32,
|
|
pub author_mr_count: u32,
|
|
pub last_seen_ms: i64,
|
|
/// Stable MR references like "group/project!123"
|
|
pub mr_refs: Vec<String>,
|
|
pub mr_refs_total: u32,
|
|
pub mr_refs_truncated: bool,
|
|
/// Per-MR detail breakdown (only populated when --detail is set)
|
|
pub details: Option<Vec<ExpertMrDetail>>,
|
|
}
|
|
|
|
/// Per-component score breakdown for explain mode.
|
|
pub struct ScoreComponents {
|
|
pub author: f64,
|
|
pub reviewer_participated: f64,
|
|
pub reviewer_assigned: f64,
|
|
pub notes: f64,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct ExpertMrDetail {
|
|
pub mr_ref: String,
|
|
pub title: String,
|
|
/// "R", "A", or "A+R"
|
|
pub role: String,
|
|
pub note_count: u32,
|
|
pub last_activity_ms: i64,
|
|
}
|
|
|
|
// --- Workload ---
|
|
|
|
pub struct WorkloadResult {
|
|
pub username: String,
|
|
pub assigned_issues: Vec<WorkloadIssue>,
|
|
pub authored_mrs: Vec<WorkloadMr>,
|
|
pub reviewing_mrs: Vec<WorkloadMr>,
|
|
pub unresolved_discussions: Vec<WorkloadDiscussion>,
|
|
pub assigned_issues_truncated: bool,
|
|
pub authored_mrs_truncated: bool,
|
|
pub reviewing_mrs_truncated: bool,
|
|
pub unresolved_discussions_truncated: bool,
|
|
}
|
|
|
|
pub struct WorkloadIssue {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project#iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub project_path: String,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadMr {
|
|
pub iid: i64,
|
|
/// Canonical reference: `group/project!iid`
|
|
pub ref_: String,
|
|
pub title: String,
|
|
pub draft: bool,
|
|
pub project_path: String,
|
|
pub author_username: Option<String>,
|
|
pub updated_at: i64,
|
|
}
|
|
|
|
pub struct WorkloadDiscussion {
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
/// Canonical reference: `group/project!iid` or `group/project#iid`
|
|
pub ref_: String,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
}
|
|
|
|
// --- Reviews ---
|
|
|
|
pub struct ReviewsResult {
|
|
pub username: String,
|
|
pub total_diffnotes: u32,
|
|
pub categorized_count: u32,
|
|
pub mrs_reviewed: u32,
|
|
pub categories: Vec<ReviewCategory>,
|
|
}
|
|
|
|
pub struct ReviewCategory {
|
|
pub name: String,
|
|
pub count: u32,
|
|
pub percentage: f64,
|
|
}
|
|
|
|
// --- Active ---
|
|
|
|
pub struct ActiveResult {
|
|
pub discussions: Vec<ActiveDiscussion>,
|
|
/// Count of unresolved discussions *within the time window*, not total across all time.
|
|
pub total_unresolved_in_window: u32,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct ActiveDiscussion {
|
|
pub discussion_id: i64,
|
|
pub entity_type: String,
|
|
pub entity_iid: i64,
|
|
pub entity_title: String,
|
|
pub project_path: String,
|
|
pub last_note_at: i64,
|
|
pub note_count: u32,
|
|
pub participants: Vec<String>,
|
|
pub participants_total: u32,
|
|
pub participants_truncated: bool,
|
|
}
|
|
|
|
// --- Overlap ---
|
|
|
|
pub struct OverlapResult {
|
|
pub path_query: String,
|
|
/// "exact" or "prefix" -- how the path was matched in SQL.
|
|
pub path_match: String,
|
|
pub users: Vec<OverlapUser>,
|
|
pub truncated: bool,
|
|
}
|
|
|
|
pub struct OverlapUser {
|
|
pub username: String,
|
|
pub author_touch_count: u32,
|
|
pub review_touch_count: u32,
|
|
pub touch_count: u32,
|
|
pub last_seen_at: i64,
|
|
/// Stable MR references like "group/project!123"
|
|
pub mr_refs: Vec<String>,
|
|
pub mr_refs_total: u32,
|
|
pub mr_refs_truncated: bool,
|
|
}
|
|
|
|
/// Maximum MR references to retain per user in output (shared across modes).
|
|
const MAX_MR_REFS_PER_USER: usize = 50;
|
|
|
|
// ─── Entry Point ─────────────────────────────────────────────────────────────
|
|
|
|
/// Main entry point. Resolves mode + resolved inputs once, then dispatches.
|
|
pub fn run_who(config: &Config, args: &WhoArgs) -> Result<WhoRun> {
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let conn = create_connection(&db_path)?;
|
|
|
|
let project_id = args
|
|
.project
|
|
.as_deref()
|
|
.map(|p| resolve_project(&conn, p))
|
|
.transpose()?;
|
|
|
|
let project_path = project_id
|
|
.map(|id| lookup_project_path(&conn, id))
|
|
.transpose()?;
|
|
|
|
let mode = resolve_mode(args)?;
|
|
validate_mode_flags(&mode, args)?;
|
|
|
|
// since_mode semantics:
|
|
// - expert/reviews/active/overlap: default window applies if args.since is None -> "default"
|
|
// - workload: no default window; args.since None => "none"
|
|
let since_mode_for_defaulted = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"default"
|
|
};
|
|
let since_mode_for_workload = if args.since.is_some() {
|
|
"explicit"
|
|
} else {
|
|
"none"
|
|
};
|
|
|
|
match mode {
|
|
WhoMode::Expert { path } => {
|
|
// Compute as_of first so --since durations are relative to it.
|
|
let as_of_ms = match &args.as_of {
|
|
Some(v) => parse_since(v).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --as-of value: '{v}'. Use a duration (30d, 6m) or date (2024-01-15)"
|
|
))
|
|
})?,
|
|
None => now_ms(),
|
|
};
|
|
let since_ms = if args.all_history {
|
|
0
|
|
} else {
|
|
resolve_since_from(args.since.as_deref(), "24m", as_of_ms)?
|
|
};
|
|
let limit = usize::from(args.limit);
|
|
let result = query_expert(
|
|
&conn,
|
|
&path,
|
|
project_id,
|
|
since_ms,
|
|
as_of_ms,
|
|
limit,
|
|
&config.scoring,
|
|
args.detail,
|
|
args.explain_score,
|
|
args.include_bots,
|
|
)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "expert".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Expert(result),
|
|
})
|
|
}
|
|
WhoMode::Workload { username } => {
|
|
let since_ms = args
|
|
.since
|
|
.as_deref()
|
|
.map(resolve_since_required)
|
|
.transpose()?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_workload(&conn, username, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "workload".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms,
|
|
since_iso: since_ms.map(ms_to_iso),
|
|
since_mode: since_mode_for_workload.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Workload(result),
|
|
})
|
|
}
|
|
WhoMode::Reviews { username } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "6m")?;
|
|
let result = query_reviews(&conn, username, project_id, since_ms)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "reviews".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Reviews(result),
|
|
})
|
|
}
|
|
WhoMode::Active => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "7d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_active(&conn, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "active".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Active(result),
|
|
})
|
|
}
|
|
WhoMode::Overlap { path } => {
|
|
let since_ms = resolve_since(args.since.as_deref(), "30d")?;
|
|
let limit = usize::from(args.limit);
|
|
let result = query_overlap(&conn, &path, project_id, since_ms, limit)?;
|
|
Ok(WhoRun {
|
|
resolved_input: WhoResolvedInput {
|
|
mode: "overlap".to_string(),
|
|
project_id,
|
|
project_path,
|
|
since_ms: Some(since_ms),
|
|
since_iso: Some(ms_to_iso(since_ms)),
|
|
since_mode: since_mode_for_defaulted.to_string(),
|
|
limit: args.limit,
|
|
},
|
|
result: WhoResult::Overlap(result),
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
fn validate_mode_flags(mode: &WhoMode<'_>, args: &WhoArgs) -> Result<()> {
|
|
if args.detail && !matches!(mode, WhoMode::Expert { .. }) {
|
|
return Err(LoreError::Other(
|
|
"--detail is only supported in expert mode (`lore who --path <path>` or `lore who <path/with/slash>`).".to_string(),
|
|
));
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
|
|
|
/// Look up the project path for a resolved project ID.
|
|
fn lookup_project_path(conn: &Connection, project_id: i64) -> Result<String> {
|
|
conn.query_row(
|
|
"SELECT path_with_namespace FROM projects WHERE id = ?1",
|
|
rusqlite::params![project_id],
|
|
|row| row.get(0),
|
|
)
|
|
.map_err(|e| LoreError::Other(format!("Failed to look up project path: {e}")))
|
|
}
|
|
|
|
/// Parse --since with a default fallback.
|
|
fn resolve_since(input: Option<&str>, default: &str) -> Result<i64> {
|
|
let s = input.unwrap_or(default);
|
|
parse_since(s).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
/// Parse --since with a default fallback, relative to a reference timestamp.
|
|
/// Durations (7d, 2w, 6m) are computed from `reference_ms` instead of now.
|
|
fn resolve_since_from(input: Option<&str>, default: &str, reference_ms: i64) -> Result<i64> {
|
|
let s = input.unwrap_or(default);
|
|
parse_since_from(s, reference_ms).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
/// Parse --since without a default (returns error if invalid).
|
|
fn resolve_since_required(input: &str) -> Result<i64> {
|
|
parse_since(input).ok_or_else(|| {
|
|
LoreError::Other(format!(
|
|
"Invalid --since value: '{input}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
|
|
))
|
|
})
|
|
}
|
|
|
|
// ─── Path Query Construction ─────────────────────────────────────────────────
|
|
|
|
// ─── Scoring Helpers ─────────────────────────────────────────────────────────
|
|
|
|
/// Exponential half-life decay: `2^(-days / half_life)`.
|
|
///
|
|
/// Returns a value in `[0.0, 1.0]` representing how much of an original signal
|
|
/// is retained after `elapsed_ms` milliseconds, given a `half_life_days` period.
|
|
/// At `elapsed=0` the signal is fully retained (1.0); at `elapsed=half_life`
|
|
/// exactly half remains (0.5); the signal halves again for each additional
|
|
/// half-life period.
|
|
///
|
|
/// Returns `0.0` when `half_life_days` is zero (prevents division by zero).
|
|
/// Negative elapsed values are clamped to zero (future events retain full weight).
|
|
fn half_life_decay(elapsed_ms: i64, half_life_days: u32) -> f64 {
|
|
let days = (elapsed_ms as f64 / 86_400_000.0).max(0.0);
|
|
let hl = f64::from(half_life_days);
|
|
if hl <= 0.0 {
|
|
return 0.0;
|
|
}
|
|
2.0_f64.powf(-days / hl)
|
|
}
|
|
|
|
// ─── Query: Expert Mode ─────────────────────────────────────────────────────
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
fn query_expert(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
as_of_ms: i64,
|
|
limit: usize,
|
|
scoring: &ScoringConfig,
|
|
detail: bool,
|
|
explain_score: bool,
|
|
include_bots: bool,
|
|
) -> Result<ExpertResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
|
|
let sql = build_expert_sql_v2(pq.is_prefix);
|
|
let mut stmt = conn.prepare_cached(&sql)?;
|
|
|
|
// Params: ?1=path, ?2=since_ms, ?3=project_id, ?4=as_of_ms,
|
|
// ?5=closed_mr_multiplier, ?6=reviewer_min_note_chars
|
|
let rows = stmt.query_map(
|
|
rusqlite::params![
|
|
pq.value,
|
|
since_ms,
|
|
project_id,
|
|
as_of_ms,
|
|
scoring.closed_mr_multiplier,
|
|
scoring.reviewer_min_note_chars,
|
|
],
|
|
|row| {
|
|
Ok(SignalRow {
|
|
username: row.get(0)?,
|
|
signal: row.get(1)?,
|
|
mr_id: row.get(2)?,
|
|
qty: row.get(3)?,
|
|
ts: row.get(4)?,
|
|
state_mult: row.get(5)?,
|
|
})
|
|
},
|
|
)?;
|
|
|
|
// Per-user accumulator keyed by username.
|
|
let mut accum: HashMap<String, UserAccum> = HashMap::new();
|
|
|
|
for row_result in rows {
|
|
let r = row_result?;
|
|
let entry = accum
|
|
.entry(r.username.clone())
|
|
.or_insert_with(|| UserAccum {
|
|
contributions: Vec::new(),
|
|
last_seen_ms: 0,
|
|
mr_ids_author: HashSet::new(),
|
|
mr_ids_reviewer: HashSet::new(),
|
|
note_count: 0,
|
|
});
|
|
|
|
if r.ts > entry.last_seen_ms {
|
|
entry.last_seen_ms = r.ts;
|
|
}
|
|
|
|
match r.signal.as_str() {
|
|
"diffnote_author" | "file_author" => {
|
|
entry.mr_ids_author.insert(r.mr_id);
|
|
}
|
|
"file_reviewer_participated" | "file_reviewer_assigned" => {
|
|
entry.mr_ids_reviewer.insert(r.mr_id);
|
|
}
|
|
"note_group" => {
|
|
entry.note_count += r.qty as u32;
|
|
// DiffNote reviewers are also reviewer activity.
|
|
entry.mr_ids_reviewer.insert(r.mr_id);
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
entry.contributions.push(Contribution {
|
|
signal: r.signal,
|
|
mr_id: r.mr_id,
|
|
qty: r.qty,
|
|
ts: r.ts,
|
|
state_mult: r.state_mult,
|
|
});
|
|
}
|
|
|
|
// Bot filtering: exclude configured bot usernames (case-insensitive).
|
|
if !include_bots && !scoring.excluded_usernames.is_empty() {
|
|
let excluded: HashSet<String> = scoring
|
|
.excluded_usernames
|
|
.iter()
|
|
.map(|u| u.to_lowercase())
|
|
.collect();
|
|
accum.retain(|username, _| !excluded.contains(&username.to_lowercase()));
|
|
}
|
|
|
|
// Compute decayed scores with deterministic ordering.
|
|
let mut scored: Vec<ScoredUser> = accum
|
|
.into_iter()
|
|
.map(|(username, mut ua)| {
|
|
// Sort contributions by mr_id ASC for deterministic f64 summation.
|
|
ua.contributions.sort_by_key(|c| c.mr_id);
|
|
|
|
let mut comp_author = 0.0_f64;
|
|
let mut comp_reviewer_participated = 0.0_f64;
|
|
let mut comp_reviewer_assigned = 0.0_f64;
|
|
let mut comp_notes = 0.0_f64;
|
|
|
|
for c in &ua.contributions {
|
|
let elapsed = as_of_ms - c.ts;
|
|
match c.signal.as_str() {
|
|
"diffnote_author" | "file_author" => {
|
|
let decay = half_life_decay(elapsed, scoring.author_half_life_days);
|
|
comp_author += scoring.author_weight as f64 * decay * c.state_mult;
|
|
}
|
|
"file_reviewer_participated" => {
|
|
let decay = half_life_decay(elapsed, scoring.reviewer_half_life_days);
|
|
comp_reviewer_participated +=
|
|
scoring.reviewer_weight as f64 * decay * c.state_mult;
|
|
}
|
|
"file_reviewer_assigned" => {
|
|
let decay =
|
|
half_life_decay(elapsed, scoring.reviewer_assignment_half_life_days);
|
|
comp_reviewer_assigned +=
|
|
scoring.reviewer_assignment_weight as f64 * decay * c.state_mult;
|
|
}
|
|
"note_group" => {
|
|
let decay = half_life_decay(elapsed, scoring.note_half_life_days);
|
|
// Diminishing returns: log2(1 + count) per MR.
|
|
let note_value = (1.0 + c.qty as f64).log2();
|
|
comp_notes += scoring.note_bonus as f64 * note_value * decay * c.state_mult;
|
|
}
|
|
_ => {}
|
|
}
|
|
}
|
|
|
|
let raw_score =
|
|
comp_author + comp_reviewer_participated + comp_reviewer_assigned + comp_notes;
|
|
ScoredUser {
|
|
username,
|
|
raw_score,
|
|
components: ScoreComponents {
|
|
author: comp_author,
|
|
reviewer_participated: comp_reviewer_participated,
|
|
reviewer_assigned: comp_reviewer_assigned,
|
|
notes: comp_notes,
|
|
},
|
|
accum: ua,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Sort: raw_score DESC, last_seen DESC, username ASC (deterministic tiebreaker).
|
|
scored.sort_by(|a, b| {
|
|
b.raw_score
|
|
.partial_cmp(&a.raw_score)
|
|
.unwrap_or(std::cmp::Ordering::Equal)
|
|
.then_with(|| b.accum.last_seen_ms.cmp(&a.accum.last_seen_ms))
|
|
.then_with(|| a.username.cmp(&b.username))
|
|
});
|
|
|
|
let truncated = scored.len() > limit;
|
|
scored.truncate(limit);
|
|
|
|
// Build Expert structs with MR refs.
|
|
let mut experts: Vec<Expert> = scored
|
|
.into_iter()
|
|
.map(|su| {
|
|
let mut mr_refs = build_mr_refs_for_user(conn, &su.accum);
|
|
mr_refs.sort();
|
|
let mr_refs_total = mr_refs.len() as u32;
|
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
|
if mr_refs_truncated {
|
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
|
}
|
|
Expert {
|
|
username: su.username,
|
|
score: su.raw_score.round() as i64,
|
|
score_raw: if explain_score {
|
|
Some(su.raw_score)
|
|
} else {
|
|
None
|
|
},
|
|
components: if explain_score {
|
|
Some(su.components)
|
|
} else {
|
|
None
|
|
},
|
|
review_mr_count: su.accum.mr_ids_reviewer.len() as u32,
|
|
review_note_count: su.accum.note_count,
|
|
author_mr_count: su.accum.mr_ids_author.len() as u32,
|
|
last_seen_ms: su.accum.last_seen_ms,
|
|
mr_refs,
|
|
mr_refs_total,
|
|
mr_refs_truncated,
|
|
details: None,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Populate per-MR detail when --detail is requested
|
|
if detail && !experts.is_empty() {
|
|
let details_map = query_expert_details(conn, &pq, &experts, since_ms, project_id)?;
|
|
for expert in &mut experts {
|
|
expert.details = details_map.get(&expert.username).cloned();
|
|
}
|
|
}
|
|
|
|
Ok(ExpertResult {
|
|
path_query: if pq.is_prefix {
|
|
// Use raw input (unescaped) for display — pq.value has LIKE escaping.
|
|
path.trim_end_matches('/').to_string()
|
|
} else {
|
|
// For exact matches (including suffix-resolved), show the resolved path.
|
|
pq.value.clone()
|
|
},
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
experts,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
/// Raw signal row from the v2 CTE query.
|
|
struct SignalRow {
|
|
username: String,
|
|
signal: String,
|
|
mr_id: i64,
|
|
qty: i64,
|
|
ts: i64,
|
|
state_mult: f64,
|
|
}
|
|
|
|
/// Per-user signal accumulator used during Rust-side scoring.
|
|
struct UserAccum {
|
|
contributions: Vec<Contribution>,
|
|
last_seen_ms: i64,
|
|
mr_ids_author: HashSet<i64>,
|
|
mr_ids_reviewer: HashSet<i64>,
|
|
note_count: u32,
|
|
}
|
|
|
|
/// A single contribution to a user's score (one signal row).
|
|
struct Contribution {
|
|
signal: String,
|
|
mr_id: i64,
|
|
qty: i64,
|
|
ts: i64,
|
|
state_mult: f64,
|
|
}
|
|
|
|
/// Intermediate scored user before building Expert structs.
|
|
struct ScoredUser {
|
|
username: String,
|
|
raw_score: f64,
|
|
components: ScoreComponents,
|
|
accum: UserAccum,
|
|
}
|
|
|
|
/// Build MR refs (e.g. "group/project!123") for a user from their accumulated MR IDs.
|
|
fn build_mr_refs_for_user(conn: &Connection, ua: &UserAccum) -> Vec<String> {
|
|
let all_mr_ids: HashSet<i64> = ua
|
|
.mr_ids_author
|
|
.iter()
|
|
.chain(ua.mr_ids_reviewer.iter())
|
|
.copied()
|
|
.chain(ua.contributions.iter().map(|c| c.mr_id))
|
|
.collect();
|
|
|
|
if all_mr_ids.is_empty() {
|
|
return Vec::new();
|
|
}
|
|
|
|
let placeholders: Vec<String> = (1..=all_mr_ids.len()).map(|i| format!("?{i}")).collect();
|
|
let sql = format!(
|
|
"SELECT p.path_with_namespace || '!' || CAST(m.iid AS TEXT)
|
|
FROM merge_requests m
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.id IN ({})",
|
|
placeholders.join(",")
|
|
);
|
|
|
|
let mut stmt = match conn.prepare(&sql) {
|
|
Ok(s) => s,
|
|
Err(_) => return Vec::new(),
|
|
};
|
|
|
|
let mut mr_ids_vec: Vec<i64> = all_mr_ids.into_iter().collect();
|
|
mr_ids_vec.sort_unstable();
|
|
let params: Vec<&dyn rusqlite::types::ToSql> = mr_ids_vec
|
|
.iter()
|
|
.map(|id| id as &dyn rusqlite::types::ToSql)
|
|
.collect();
|
|
|
|
stmt.query_map(&*params, |row| row.get::<_, String>(0))
|
|
.map(|rows| rows.filter_map(|r| r.ok()).collect())
|
|
.unwrap_or_default()
|
|
}
|
|
|
|
/// Build the CTE-based expert SQL for time-decay scoring (v2).
|
|
///
|
|
/// Returns raw signal rows `(username, signal, mr_id, qty, ts, state_mult)` that
|
|
/// Rust aggregates with per-signal decay and `log2(1+count)` for note groups.
|
|
///
|
|
/// Parameters: `?1` = path, `?2` = since_ms, `?3` = project_id (nullable),
|
|
/// `?4` = as_of_ms, `?5` = closed_mr_multiplier, `?6` = reviewer_min_note_chars
|
|
fn build_expert_sql_v2(is_prefix: bool) -> String {
|
|
let path_op = if is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
// INDEXED BY hints for each branch:
|
|
// - new_path branch: idx_notes_diffnote_path_created (existing)
|
|
// - old_path branch: idx_notes_old_path_author (migration 026)
|
|
format!(
|
|
"
|
|
WITH matched_notes_raw AS (
|
|
-- Branch 1: match on position_new_path
|
|
SELECT n.id, n.discussion_id, n.author_username, n.created_at, n.project_id
|
|
FROM notes n INDEXED BY idx_notes_diffnote_path_created
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND n.created_at < ?4
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND n.position_new_path {path_op}
|
|
UNION ALL
|
|
-- Branch 2: match on position_old_path
|
|
SELECT n.id, n.discussion_id, n.author_username, n.created_at, n.project_id
|
|
FROM notes n INDEXED BY idx_notes_old_path_author
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND n.created_at < ?4
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND n.position_old_path IS NOT NULL
|
|
AND n.position_old_path {path_op}
|
|
),
|
|
matched_notes AS (
|
|
-- Dedup: prevent double-counting when old_path = new_path (no rename)
|
|
SELECT DISTINCT id, discussion_id, author_username, created_at, project_id
|
|
FROM matched_notes_raw
|
|
),
|
|
matched_file_changes_raw AS (
|
|
-- Branch 1: match on new_path
|
|
SELECT fc.merge_request_id, fc.project_id
|
|
FROM mr_file_changes fc INDEXED BY idx_mfc_new_path_project_mr
|
|
WHERE (?3 IS NULL OR fc.project_id = ?3)
|
|
AND fc.new_path {path_op}
|
|
UNION ALL
|
|
-- Branch 2: match on old_path
|
|
SELECT fc.merge_request_id, fc.project_id
|
|
FROM mr_file_changes fc INDEXED BY idx_mfc_old_path_project_mr
|
|
WHERE (?3 IS NULL OR fc.project_id = ?3)
|
|
AND fc.old_path IS NOT NULL
|
|
AND fc.old_path {path_op}
|
|
),
|
|
matched_file_changes AS (
|
|
-- Dedup: prevent double-counting when old_path = new_path (no rename)
|
|
SELECT DISTINCT merge_request_id, project_id
|
|
FROM matched_file_changes_raw
|
|
),
|
|
mr_activity AS (
|
|
-- Centralized state-aware timestamps and state multiplier.
|
|
-- Scoped to MRs matched by file changes to avoid materializing the full MR table.
|
|
SELECT DISTINCT
|
|
m.id AS mr_id,
|
|
m.author_username,
|
|
m.state,
|
|
CASE
|
|
WHEN m.state = 'merged' THEN COALESCE(m.merged_at, m.created_at)
|
|
WHEN m.state = 'closed' THEN COALESCE(m.closed_at, m.created_at)
|
|
ELSE COALESCE(m.updated_at, m.created_at)
|
|
END AS activity_ts,
|
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
|
FROM merge_requests m
|
|
JOIN matched_file_changes mfc ON mfc.merge_request_id = m.id
|
|
WHERE m.state IN ('opened','merged','closed')
|
|
),
|
|
reviewer_participation AS (
|
|
-- Precompute which (mr_id, username) pairs have substantive DiffNote participation.
|
|
SELECT DISTINCT d.merge_request_id AS mr_id, mn.author_username AS username
|
|
FROM matched_notes mn
|
|
JOIN discussions d ON mn.discussion_id = d.id
|
|
JOIN notes n_body ON mn.id = n_body.id
|
|
WHERE d.merge_request_id IS NOT NULL
|
|
AND LENGTH(TRIM(COALESCE(n_body.body, ''))) >= ?6
|
|
),
|
|
raw AS (
|
|
-- Signal 1: DiffNote reviewer (individual notes for note_cnt)
|
|
SELECT mn.author_username AS username, 'diffnote_reviewer' AS signal,
|
|
m.id AS mr_id, mn.id AS note_id, mn.created_at AS seen_at,
|
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
|
FROM matched_notes mn
|
|
JOIN discussions d ON mn.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE (m.author_username IS NULL OR mn.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged','closed')
|
|
|
|
UNION ALL
|
|
|
|
-- Signal 2: DiffNote MR author
|
|
SELECT m.author_username AS username, 'diffnote_author' AS signal,
|
|
m.id AS mr_id, NULL AS note_id, MAX(mn.created_at) AS seen_at,
|
|
CASE WHEN m.state = 'closed' THEN ?5 ELSE 1.0 END AS state_mult
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN matched_notes mn ON mn.discussion_id = d.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged','closed')
|
|
GROUP BY m.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- Signal 3: MR author via file changes (uses mr_activity CTE)
|
|
SELECT a.author_username AS username, 'file_author' AS signal,
|
|
a.mr_id, NULL AS note_id,
|
|
a.activity_ts AS seen_at, a.state_mult
|
|
FROM mr_activity a
|
|
WHERE a.author_username IS NOT NULL
|
|
AND a.activity_ts >= ?2
|
|
AND a.activity_ts < ?4
|
|
|
|
UNION ALL
|
|
|
|
-- Signal 4a: Reviewer participated (in mr_reviewers AND left DiffNotes on path)
|
|
SELECT r.username AS username, 'file_reviewer_participated' AS signal,
|
|
a.mr_id, NULL AS note_id,
|
|
a.activity_ts AS seen_at, a.state_mult
|
|
FROM mr_activity a
|
|
JOIN mr_reviewers r ON r.merge_request_id = a.mr_id
|
|
JOIN reviewer_participation rp ON rp.mr_id = a.mr_id AND rp.username = r.username
|
|
WHERE r.username IS NOT NULL
|
|
AND (a.author_username IS NULL OR r.username != a.author_username)
|
|
AND a.activity_ts >= ?2
|
|
AND a.activity_ts < ?4
|
|
|
|
UNION ALL
|
|
|
|
-- Signal 4b: Reviewer assigned-only (in mr_reviewers, NO DiffNotes on path)
|
|
SELECT r.username AS username, 'file_reviewer_assigned' AS signal,
|
|
a.mr_id, NULL AS note_id,
|
|
a.activity_ts AS seen_at, a.state_mult
|
|
FROM mr_activity a
|
|
JOIN mr_reviewers r ON r.merge_request_id = a.mr_id
|
|
LEFT JOIN reviewer_participation rp ON rp.mr_id = a.mr_id AND rp.username = r.username
|
|
WHERE rp.username IS NULL
|
|
AND r.username IS NOT NULL
|
|
AND (a.author_username IS NULL OR r.username != a.author_username)
|
|
AND a.activity_ts >= ?2
|
|
AND a.activity_ts < ?4
|
|
),
|
|
aggregated AS (
|
|
-- MR-level signals: 1 row per (username, signal_class, mr_id) with MAX(ts)
|
|
SELECT username, signal, mr_id, 1 AS qty, MAX(seen_at) AS ts, MAX(state_mult) AS state_mult
|
|
FROM raw WHERE signal != 'diffnote_reviewer'
|
|
GROUP BY username, signal, mr_id
|
|
UNION ALL
|
|
-- Note signals: 1 row per (username, mr_id) with note_count and max_ts
|
|
SELECT username, 'note_group' AS signal, mr_id, COUNT(*) AS qty, MAX(seen_at) AS ts,
|
|
MAX(state_mult) AS state_mult
|
|
FROM raw WHERE signal = 'diffnote_reviewer' AND note_id IS NOT NULL
|
|
GROUP BY username, mr_id
|
|
)
|
|
SELECT username, signal, mr_id, qty, ts, state_mult FROM aggregated WHERE username IS NOT NULL
|
|
"
|
|
)
|
|
}
|
|
|
|
/// Query per-MR detail for a set of experts. Returns a map of username -> Vec<ExpertMrDetail>.
|
|
fn query_expert_details(
|
|
conn: &Connection,
|
|
pq: &PathQuery,
|
|
experts: &[Expert],
|
|
since_ms: i64,
|
|
project_id: Option<i64>,
|
|
) -> Result<HashMap<String, Vec<ExpertMrDetail>>> {
|
|
let path_op = if pq.is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
|
|
// Build IN clause for usernames
|
|
let placeholders: Vec<String> = experts
|
|
.iter()
|
|
.enumerate()
|
|
.map(|(i, _)| format!("?{}", i + 4))
|
|
.collect();
|
|
let in_clause = placeholders.join(",");
|
|
|
|
let sql = format!(
|
|
"
|
|
WITH signals AS (
|
|
-- 1. DiffNote reviewer (matches both new_path and old_path for renamed files)
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
COUNT(*) AS note_count,
|
|
MAX(n.created_at) AS last_activity
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (n.position_new_path {path_op}
|
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND n.author_username IN ({in_clause})
|
|
GROUP BY n.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- 2. DiffNote MR author (matches both new_path and old_path for renamed files)
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
MAX(n.created_at) AS last_activity
|
|
FROM merge_requests m
|
|
JOIN discussions d ON d.merge_request_id = m.id
|
|
JOIN notes n ON n.discussion_id = d.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (n.position_new_path {path_op}
|
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
AND m.author_username IN ({in_clause})
|
|
GROUP BY m.author_username, m.id
|
|
|
|
UNION ALL
|
|
|
|
-- 3. MR author via file changes (matches both new_path and old_path)
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
m.updated_at AS last_activity
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (fc.new_path {path_op}
|
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
AND m.author_username IN ({in_clause})
|
|
|
|
UNION ALL
|
|
|
|
-- 4. MR reviewer via file changes + mr_reviewers (matches both new_path and old_path)
|
|
SELECT
|
|
r.username AS username,
|
|
'reviewer' AS role,
|
|
m.id AS mr_id,
|
|
(p.path_with_namespace || '!' || CAST(m.iid AS TEXT)) AS mr_ref,
|
|
m.title AS title,
|
|
0 AS note_count,
|
|
m.updated_at AS last_activity
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
WHERE r.username IS NOT NULL
|
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (fc.new_path {path_op}
|
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
AND r.username IN ({in_clause})
|
|
)
|
|
SELECT
|
|
username,
|
|
mr_ref,
|
|
title,
|
|
GROUP_CONCAT(DISTINCT role) AS roles,
|
|
SUM(note_count) AS total_notes,
|
|
MAX(last_activity) AS last_activity
|
|
FROM signals
|
|
GROUP BY username, mr_ref
|
|
ORDER BY username ASC, last_activity DESC
|
|
"
|
|
);
|
|
|
|
// prepare() not prepare_cached(): the IN clause varies by expert count,
|
|
// so the SQL shape changes per invocation and caching wastes memory.
|
|
let mut stmt = conn.prepare(&sql)?;
|
|
|
|
// Build params: ?1=path, ?2=since_ms, ?3=project_id, ?4..=usernames
|
|
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
|
params.push(Box::new(pq.value.clone()));
|
|
params.push(Box::new(since_ms));
|
|
params.push(Box::new(project_id));
|
|
for expert in experts {
|
|
params.push(Box::new(expert.username.clone()));
|
|
}
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
|
|
|
let rows: Vec<(String, String, String, String, u32, i64)> = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get(0)?,
|
|
row.get(1)?,
|
|
row.get(2)?,
|
|
row.get::<_, String>(3)?,
|
|
row.get(4)?,
|
|
row.get(5)?,
|
|
))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
let mut map: HashMap<String, Vec<ExpertMrDetail>> = HashMap::new();
|
|
for (username, mr_ref, title, roles_csv, note_count, last_activity) in rows {
|
|
let has_author = roles_csv.contains("author");
|
|
let has_reviewer = roles_csv.contains("reviewer");
|
|
let role = match (has_author, has_reviewer) {
|
|
(true, true) => "A+R",
|
|
(true, false) => "A",
|
|
(false, true) => "R",
|
|
_ => "?",
|
|
}
|
|
.to_string();
|
|
map.entry(username).or_default().push(ExpertMrDetail {
|
|
mr_ref,
|
|
title,
|
|
role,
|
|
note_count,
|
|
last_activity_ms: last_activity,
|
|
});
|
|
}
|
|
|
|
Ok(map)
|
|
}
|
|
|
|
// ─── Query: Workload Mode ───────────────────────────────────────────────────
|
|
|
|
fn query_workload(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: Option<i64>,
|
|
limit: usize,
|
|
) -> Result<WorkloadResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Query 1: Open issues assigned to user
|
|
let issues_sql = "SELECT i.iid,
|
|
(p.path_with_namespace || '#' || i.iid) AS ref,
|
|
i.title, p.path_with_namespace, i.updated_at
|
|
FROM issues i
|
|
JOIN issue_assignees ia ON ia.issue_id = i.id
|
|
JOIN projects p ON i.project_id = p.id
|
|
WHERE ia.username = ?1
|
|
AND i.state = 'opened'
|
|
AND (?2 IS NULL OR i.project_id = ?2)
|
|
AND (?3 IS NULL OR i.updated_at >= ?3)
|
|
ORDER BY i.updated_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(issues_sql)?;
|
|
let assigned_issues: Vec<WorkloadIssue> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadIssue {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
project_path: row.get(3)?,
|
|
updated_at: row.get(4)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 2: Open MRs authored
|
|
let authored_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(authored_sql)?;
|
|
let authored_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: None,
|
|
updated_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 3: Open MRs where user is reviewer
|
|
let reviewing_sql = "SELECT m.iid,
|
|
(p.path_with_namespace || '!' || m.iid) AS ref,
|
|
m.title, m.draft, p.path_with_namespace,
|
|
m.author_username, m.updated_at
|
|
FROM merge_requests m
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE r.username = ?1
|
|
AND m.state = 'opened'
|
|
AND (?2 IS NULL OR m.project_id = ?2)
|
|
AND (?3 IS NULL OR m.updated_at >= ?3)
|
|
ORDER BY m.updated_at DESC
|
|
LIMIT ?4";
|
|
let mut stmt = conn.prepare_cached(reviewing_sql)?;
|
|
let reviewing_mrs: Vec<WorkloadMr> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
Ok(WorkloadMr {
|
|
iid: row.get(0)?,
|
|
ref_: row.get(1)?,
|
|
title: row.get(2)?,
|
|
draft: row.get::<_, i32>(3)? != 0,
|
|
project_path: row.get(4)?,
|
|
author_username: row.get(5)?,
|
|
updated_at: row.get(6)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Query 4: Unresolved discussions where user participated
|
|
let disc_sql = "SELECT d.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
(p.path_with_namespace ||
|
|
CASE WHEN d.noteable_type = 'MergeRequest' THEN '!' ELSE '#' END ||
|
|
COALESCE(i.iid, m.iid)) AS ref,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
p.path_with_namespace,
|
|
d.last_note_at
|
|
FROM discussions d
|
|
JOIN projects p ON d.project_id = p.id
|
|
LEFT JOIN issues i ON d.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND EXISTS (
|
|
SELECT 1 FROM notes n
|
|
WHERE n.discussion_id = d.id
|
|
AND n.author_username = ?1
|
|
AND n.is_system = 0
|
|
)
|
|
AND (?2 IS NULL OR d.project_id = ?2)
|
|
AND (?3 IS NULL OR d.last_note_at >= ?3)
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?4";
|
|
|
|
let mut stmt = conn.prepare_cached(disc_sql)?;
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> = stmt
|
|
.query_map(
|
|
rusqlite::params![username, project_id, since_ms, limit_plus_one],
|
|
|row| {
|
|
let noteable_type: String = row.get(0)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
Ok(WorkloadDiscussion {
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(1)?,
|
|
ref_: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
})
|
|
},
|
|
)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Truncation detection
|
|
let assigned_issues_truncated = assigned_issues.len() > limit;
|
|
let authored_mrs_truncated = authored_mrs.len() > limit;
|
|
let reviewing_mrs_truncated = reviewing_mrs.len() > limit;
|
|
let unresolved_discussions_truncated = unresolved_discussions.len() > limit;
|
|
|
|
let assigned_issues: Vec<WorkloadIssue> = assigned_issues.into_iter().take(limit).collect();
|
|
let authored_mrs: Vec<WorkloadMr> = authored_mrs.into_iter().take(limit).collect();
|
|
let reviewing_mrs: Vec<WorkloadMr> = reviewing_mrs.into_iter().take(limit).collect();
|
|
let unresolved_discussions: Vec<WorkloadDiscussion> =
|
|
unresolved_discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(WorkloadResult {
|
|
username: username.to_string(),
|
|
assigned_issues,
|
|
authored_mrs,
|
|
reviewing_mrs,
|
|
unresolved_discussions,
|
|
assigned_issues_truncated,
|
|
authored_mrs_truncated,
|
|
reviewing_mrs_truncated,
|
|
unresolved_discussions_truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Reviews Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_reviews(
|
|
conn: &Connection,
|
|
username: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
) -> Result<ReviewsResult> {
|
|
// Force the partial index on DiffNote queries (same rationale as expert mode).
|
|
// COUNT + COUNT(DISTINCT) + category extraction all benefit from 26K DiffNote
|
|
// scan vs 282K notes full scan: measured 25x speedup.
|
|
let total_sql = "SELECT COUNT(*) FROM notes n
|
|
INDEXED BY idx_notes_diffnote_path_created
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let total_diffnotes: u32 = conn.query_row(
|
|
total_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Count distinct MRs reviewed
|
|
let mrs_sql = "SELECT COUNT(DISTINCT m.id) FROM notes n
|
|
INDEXED BY idx_notes_diffnote_path_created
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)";
|
|
|
|
let mrs_reviewed: u32 = conn.query_row(
|
|
mrs_sql,
|
|
rusqlite::params![username, since_ms, project_id],
|
|
|row| row.get(0),
|
|
)?;
|
|
|
|
// Extract prefixed categories: body starts with **prefix**
|
|
let cat_sql = "SELECT
|
|
SUBSTR(ltrim(n.body), 3, INSTR(SUBSTR(ltrim(n.body), 3), '**') - 1) AS raw_prefix,
|
|
COUNT(*) AS cnt
|
|
FROM notes n INDEXED BY idx_notes_diffnote_path_created
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
WHERE n.author_username = ?1
|
|
AND n.note_type = 'DiffNote'
|
|
AND n.is_system = 0
|
|
AND (m.author_username IS NULL OR m.author_username != ?1)
|
|
AND ltrim(n.body) LIKE '**%**%'
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY raw_prefix
|
|
ORDER BY cnt DESC";
|
|
|
|
let mut stmt = conn.prepare_cached(cat_sql)?;
|
|
let raw_categories: Vec<(String, u32)> = stmt
|
|
.query_map(rusqlite::params![username, since_ms, project_id], |row| {
|
|
Ok((row.get::<_, String>(0)?, row.get(1)?))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Normalize categories: lowercase, strip trailing colon/space,
|
|
// merge nit/nitpick variants, merge (non-blocking) variants
|
|
let mut merged: HashMap<String, u32> = HashMap::new();
|
|
for (raw, count) in &raw_categories {
|
|
let normalized = normalize_review_prefix(raw);
|
|
if !normalized.is_empty() {
|
|
*merged.entry(normalized).or_insert(0) += count;
|
|
}
|
|
}
|
|
|
|
let categorized_count: u32 = merged.values().sum();
|
|
|
|
let mut categories: Vec<ReviewCategory> = merged
|
|
.into_iter()
|
|
.map(|(name, count)| {
|
|
let percentage = if categorized_count > 0 {
|
|
f64::from(count) / f64::from(categorized_count) * 100.0
|
|
} else {
|
|
0.0
|
|
};
|
|
ReviewCategory {
|
|
name,
|
|
count,
|
|
percentage,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
categories.sort_by(|a, b| b.count.cmp(&a.count));
|
|
|
|
Ok(ReviewsResult {
|
|
username: username.to_string(),
|
|
total_diffnotes,
|
|
categorized_count,
|
|
mrs_reviewed,
|
|
categories,
|
|
})
|
|
}
|
|
|
|
/// Normalize a raw review prefix like "Suggestion (non-blocking):" into "suggestion".
|
|
fn normalize_review_prefix(raw: &str) -> String {
|
|
let s = raw.trim().trim_end_matches(':').trim().to_lowercase();
|
|
|
|
// Strip "(non-blocking)" and similar parentheticals
|
|
let s = if let Some(idx) = s.find('(') {
|
|
s[..idx].trim().to_string()
|
|
} else {
|
|
s
|
|
};
|
|
|
|
// Merge nit/nitpick variants
|
|
match s.as_str() {
|
|
"nitpick" | "nit" => "nit".to_string(),
|
|
other => other.to_string(),
|
|
}
|
|
}
|
|
|
|
// ─── Query: Active Mode ─────────────────────────────────────────────────────
|
|
|
|
fn query_active(
|
|
conn: &Connection,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<ActiveResult> {
|
|
let limit_plus_one = (limit + 1) as i64;
|
|
|
|
// Total unresolved count -- two static variants
|
|
let total_sql_global = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1";
|
|
let total_sql_scoped = "SELECT COUNT(*) FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2";
|
|
|
|
let total_unresolved_in_window: u32 = match project_id {
|
|
None => conn.query_row(total_sql_global, rusqlite::params![since_ms], |row| {
|
|
row.get(0)
|
|
})?,
|
|
Some(pid) => conn.query_row(total_sql_scoped, rusqlite::params![since_ms, pid], |row| {
|
|
row.get(0)
|
|
})?,
|
|
};
|
|
|
|
// Active discussions with context -- two static SQL variants
|
|
let sql_global = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?2
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
let sql_scoped = "
|
|
WITH picked AS (
|
|
SELECT d.id, d.noteable_type, d.issue_id, d.merge_request_id,
|
|
d.project_id, d.last_note_at
|
|
FROM discussions d
|
|
WHERE d.resolvable = 1 AND d.resolved = 0
|
|
AND d.last_note_at >= ?1
|
|
AND d.project_id = ?2
|
|
ORDER BY d.last_note_at DESC
|
|
LIMIT ?3
|
|
),
|
|
note_counts AS (
|
|
SELECT
|
|
n.discussion_id,
|
|
COUNT(*) AS note_count
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0
|
|
GROUP BY n.discussion_id
|
|
),
|
|
participants AS (
|
|
SELECT
|
|
x.discussion_id,
|
|
GROUP_CONCAT(x.author_username, X'1F') AS participants
|
|
FROM (
|
|
SELECT DISTINCT n.discussion_id, n.author_username
|
|
FROM notes n
|
|
JOIN picked p ON p.id = n.discussion_id
|
|
WHERE n.is_system = 0 AND n.author_username IS NOT NULL
|
|
) x
|
|
GROUP BY x.discussion_id
|
|
)
|
|
SELECT
|
|
p.id AS discussion_id,
|
|
p.noteable_type,
|
|
COALESCE(i.iid, m.iid) AS entity_iid,
|
|
COALESCE(i.title, m.title) AS entity_title,
|
|
proj.path_with_namespace,
|
|
p.last_note_at,
|
|
COALESCE(nc.note_count, 0) AS note_count,
|
|
COALESCE(pa.participants, '') AS participants
|
|
FROM picked p
|
|
JOIN projects proj ON p.project_id = proj.id
|
|
LEFT JOIN issues i ON p.issue_id = i.id
|
|
LEFT JOIN merge_requests m ON p.merge_request_id = m.id
|
|
LEFT JOIN note_counts nc ON nc.discussion_id = p.id
|
|
LEFT JOIN participants pa ON pa.discussion_id = p.id
|
|
ORDER BY p.last_note_at DESC
|
|
";
|
|
|
|
// Row-mapping closure shared between both variants
|
|
let map_row = |row: &rusqlite::Row| -> rusqlite::Result<ActiveDiscussion> {
|
|
let noteable_type: String = row.get(1)?;
|
|
let entity_type = if noteable_type == "MergeRequest" {
|
|
"MR"
|
|
} else {
|
|
"Issue"
|
|
};
|
|
let participants_csv: Option<String> = row.get(7)?;
|
|
// Sort participants for deterministic output -- GROUP_CONCAT order is undefined
|
|
let mut participants: Vec<String> = participants_csv
|
|
.as_deref()
|
|
.filter(|s| !s.is_empty())
|
|
.map(|csv| csv.split('\x1F').map(String::from).collect())
|
|
.unwrap_or_default();
|
|
participants.sort();
|
|
|
|
const MAX_PARTICIPANTS: usize = 50;
|
|
let participants_total = participants.len() as u32;
|
|
let participants_truncated = participants.len() > MAX_PARTICIPANTS;
|
|
if participants_truncated {
|
|
participants.truncate(MAX_PARTICIPANTS);
|
|
}
|
|
|
|
Ok(ActiveDiscussion {
|
|
discussion_id: row.get(0)?,
|
|
entity_type: entity_type.to_string(),
|
|
entity_iid: row.get(2)?,
|
|
entity_title: row.get(3)?,
|
|
project_path: row.get(4)?,
|
|
last_note_at: row.get(5)?,
|
|
note_count: row.get(6)?,
|
|
participants,
|
|
participants_total,
|
|
participants_truncated,
|
|
})
|
|
};
|
|
|
|
// Select variant first, then prepare exactly one statement
|
|
let discussions: Vec<ActiveDiscussion> = match project_id {
|
|
None => {
|
|
let mut stmt = conn.prepare_cached(sql_global)?;
|
|
stmt.query_map(rusqlite::params![since_ms, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
Some(pid) => {
|
|
let mut stmt = conn.prepare_cached(sql_scoped)?;
|
|
stmt.query_map(rusqlite::params![since_ms, pid, limit_plus_one], &map_row)?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?
|
|
}
|
|
};
|
|
|
|
let truncated = discussions.len() > limit;
|
|
let discussions: Vec<ActiveDiscussion> = discussions.into_iter().take(limit).collect();
|
|
|
|
Ok(ActiveResult {
|
|
discussions,
|
|
total_unresolved_in_window,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
// ─── Query: Overlap Mode ────────────────────────────────────────────────────
|
|
|
|
fn query_overlap(
|
|
conn: &Connection,
|
|
path: &str,
|
|
project_id: Option<i64>,
|
|
since_ms: i64,
|
|
limit: usize,
|
|
) -> Result<OverlapResult> {
|
|
let pq = build_path_query(conn, path, project_id)?;
|
|
|
|
// Build SQL with 4 signal sources, matching the expert query expansion.
|
|
// Each row produces (username, role, mr_id, mr_ref, seen_at) for Rust-side accumulation.
|
|
let path_op = if pq.is_prefix {
|
|
"LIKE ?1 ESCAPE '\\'"
|
|
} else {
|
|
"= ?1"
|
|
};
|
|
// Match both new_path and old_path to capture activity on renamed files.
|
|
// INDEXED BY removed to allow OR across path columns; overlap runs once
|
|
// per command so the minor plan difference is acceptable.
|
|
let sql = format!(
|
|
"SELECT username, role, touch_count, last_seen_at, mr_refs FROM (
|
|
-- 1. DiffNote reviewer (matches both new_path and old_path)
|
|
SELECT
|
|
n.author_username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND (n.position_new_path {path_op}
|
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
|
AND n.is_system = 0
|
|
AND n.author_username IS NOT NULL
|
|
AND (m.author_username IS NULL OR n.author_username != m.author_username)
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY n.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 2. DiffNote MR author (matches both new_path and old_path)
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(n.created_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM notes n
|
|
JOIN discussions d ON n.discussion_id = d.id
|
|
JOIN merge_requests m ON d.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE n.note_type = 'DiffNote'
|
|
AND (n.position_new_path {path_op}
|
|
OR (n.position_old_path IS NOT NULL AND n.position_old_path {path_op}))
|
|
AND n.is_system = 0
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND m.author_username IS NOT NULL
|
|
AND n.created_at >= ?2
|
|
AND (?3 IS NULL OR n.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 3. MR author via file changes (matches both new_path and old_path)
|
|
SELECT
|
|
m.author_username AS username,
|
|
'author' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(m.updated_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
WHERE m.author_username IS NOT NULL
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (fc.new_path {path_op}
|
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
GROUP BY m.author_username
|
|
|
|
UNION ALL
|
|
|
|
-- 4. MR reviewer via file changes + mr_reviewers (matches both new_path and old_path)
|
|
SELECT
|
|
r.username AS username,
|
|
'reviewer' AS role,
|
|
COUNT(DISTINCT m.id) AS touch_count,
|
|
MAX(m.updated_at) AS last_seen_at,
|
|
GROUP_CONCAT(DISTINCT (p.path_with_namespace || '!' || m.iid)) AS mr_refs
|
|
FROM mr_file_changes fc
|
|
JOIN merge_requests m ON fc.merge_request_id = m.id
|
|
JOIN projects p ON m.project_id = p.id
|
|
JOIN mr_reviewers r ON r.merge_request_id = m.id
|
|
WHERE r.username IS NOT NULL
|
|
AND (m.author_username IS NULL OR r.username != m.author_username)
|
|
AND m.state IN ('opened','merged','closed')
|
|
AND (fc.new_path {path_op}
|
|
OR (fc.old_path IS NOT NULL AND fc.old_path {path_op}))
|
|
AND m.updated_at >= ?2
|
|
AND (?3 IS NULL OR fc.project_id = ?3)
|
|
GROUP BY r.username
|
|
)"
|
|
);
|
|
|
|
let mut stmt = conn.prepare_cached(&sql)?;
|
|
let rows: Vec<(String, String, u32, i64, Option<String>)> = stmt
|
|
.query_map(rusqlite::params![pq.value, since_ms, project_id], |row| {
|
|
Ok((
|
|
row.get(0)?,
|
|
row.get(1)?,
|
|
row.get(2)?,
|
|
row.get(3)?,
|
|
row.get(4)?,
|
|
))
|
|
})?
|
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
|
|
|
// Internal accumulator uses HashSet for MR refs from the start
|
|
struct OverlapAcc {
|
|
username: String,
|
|
author_touch_count: u32,
|
|
review_touch_count: u32,
|
|
touch_count: u32,
|
|
last_seen_at: i64,
|
|
mr_refs: HashSet<String>,
|
|
}
|
|
|
|
let mut user_map: HashMap<String, OverlapAcc> = HashMap::new();
|
|
for (username, role, count, last_seen, mr_refs_csv) in &rows {
|
|
let mr_refs: Vec<String> = mr_refs_csv
|
|
.as_deref()
|
|
.map(|csv| csv.split(',').map(|s| s.trim().to_string()).collect())
|
|
.unwrap_or_default();
|
|
|
|
let entry = user_map
|
|
.entry(username.clone())
|
|
.or_insert_with(|| OverlapAcc {
|
|
username: username.clone(),
|
|
author_touch_count: 0,
|
|
review_touch_count: 0,
|
|
touch_count: 0,
|
|
last_seen_at: 0,
|
|
mr_refs: HashSet::new(),
|
|
});
|
|
entry.touch_count += count;
|
|
if role == "author" {
|
|
entry.author_touch_count += count;
|
|
} else {
|
|
entry.review_touch_count += count;
|
|
}
|
|
if *last_seen > entry.last_seen_at {
|
|
entry.last_seen_at = *last_seen;
|
|
}
|
|
for r in mr_refs {
|
|
entry.mr_refs.insert(r);
|
|
}
|
|
}
|
|
|
|
// Convert accumulators to output structs
|
|
let mut users: Vec<OverlapUser> = user_map
|
|
.into_values()
|
|
.map(|a| {
|
|
let mut mr_refs: Vec<String> = a.mr_refs.into_iter().collect();
|
|
mr_refs.sort();
|
|
let mr_refs_total = mr_refs.len() as u32;
|
|
let mr_refs_truncated = mr_refs.len() > MAX_MR_REFS_PER_USER;
|
|
if mr_refs_truncated {
|
|
mr_refs.truncate(MAX_MR_REFS_PER_USER);
|
|
}
|
|
OverlapUser {
|
|
username: a.username,
|
|
author_touch_count: a.author_touch_count,
|
|
review_touch_count: a.review_touch_count,
|
|
touch_count: a.touch_count,
|
|
last_seen_at: a.last_seen_at,
|
|
mr_refs,
|
|
mr_refs_total,
|
|
mr_refs_truncated,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
// Stable sort with full tie-breakers for deterministic output
|
|
users.sort_by(|a, b| {
|
|
b.touch_count
|
|
.cmp(&a.touch_count)
|
|
.then_with(|| b.last_seen_at.cmp(&a.last_seen_at))
|
|
.then_with(|| a.username.cmp(&b.username))
|
|
});
|
|
|
|
let truncated = users.len() > limit;
|
|
users.truncate(limit);
|
|
|
|
Ok(OverlapResult {
|
|
path_query: if pq.is_prefix {
|
|
path.trim_end_matches('/').to_string()
|
|
} else {
|
|
pq.value.clone()
|
|
},
|
|
path_match: if pq.is_prefix { "prefix" } else { "exact" }.to_string(),
|
|
users,
|
|
truncated,
|
|
})
|
|
}
|
|
|
|
/// Format overlap role for display: "A", "R", or "A+R".
|
|
fn format_overlap_role(user: &OverlapUser) -> &'static str {
|
|
match (user.author_touch_count > 0, user.review_touch_count > 0) {
|
|
(true, true) => "A+R",
|
|
(true, false) => "A",
|
|
(false, true) => "R",
|
|
(false, false) => "-",
|
|
}
|
|
}
|
|
|
|
// ─── Human Output ────────────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_human(result: &WhoResult, project_path: Option<&str>) {
|
|
match result {
|
|
WhoResult::Expert(r) => print_expert_human(r, project_path),
|
|
WhoResult::Workload(r) => print_workload_human(r),
|
|
WhoResult::Reviews(r) => print_reviews_human(r),
|
|
WhoResult::Active(r) => print_active_human(r, project_path),
|
|
WhoResult::Overlap(r) => print_overlap_human(r, project_path),
|
|
}
|
|
}
|
|
|
|
/// Print a dim hint when results aggregate across all projects.
|
|
fn print_scope_hint(project_path: Option<&str>) {
|
|
if project_path.is_none() {
|
|
println!(
|
|
" {}",
|
|
style("(aggregated across all projects; use -p to scope)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
fn print_expert_human(r: &ExpertResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Experts for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.experts.is_empty() {
|
|
println!(" {}", style("No experts found for this path.").dim());
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {} {}",
|
|
style("Username").bold(),
|
|
style("Score").bold(),
|
|
style("Reviewed(MRs)").bold(),
|
|
style("Notes").bold(),
|
|
style("Authored(MRs)").bold(),
|
|
style("Last Seen").bold(),
|
|
style("MR Refs").bold(),
|
|
);
|
|
|
|
for expert in &r.experts {
|
|
let reviews = if expert.review_mr_count > 0 {
|
|
expert.review_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let notes = if expert.review_note_count > 0 {
|
|
expert.review_note_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let authored = if expert.author_mr_count > 0 {
|
|
expert.author_mr_count.to_string()
|
|
} else {
|
|
"-".to_string()
|
|
};
|
|
let mr_str = expert
|
|
.mr_refs
|
|
.iter()
|
|
.take(5)
|
|
.cloned()
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
let overflow = if expert.mr_refs_total > 5 {
|
|
format!(" +{}", expert.mr_refs_total - 5)
|
|
} else {
|
|
String::new()
|
|
};
|
|
println!(
|
|
" {:<16} {:>6} {:>12} {:>6} {:>12} {:<12}{}{}",
|
|
style(format!("@{}", expert.username)).cyan(),
|
|
expert.score,
|
|
reviews,
|
|
notes,
|
|
authored,
|
|
format_relative_time(expert.last_seen_ms),
|
|
if mr_str.is_empty() {
|
|
String::new()
|
|
} else {
|
|
format!(" {mr_str}")
|
|
},
|
|
overflow,
|
|
);
|
|
|
|
// Print detail sub-rows when populated
|
|
if let Some(details) = &expert.details {
|
|
const MAX_DETAIL_DISPLAY: usize = 10;
|
|
for d in details.iter().take(MAX_DETAIL_DISPLAY) {
|
|
let notes_str = if d.note_count > 0 {
|
|
format!("{} notes", d.note_count)
|
|
} else {
|
|
String::new()
|
|
};
|
|
println!(
|
|
" {:<3} {:<30} {:>30} {:>10} {}",
|
|
style(&d.role).dim(),
|
|
d.mr_ref,
|
|
truncate_str(&format!("\"{}\"", d.title), 30),
|
|
notes_str,
|
|
style(format_relative_time(d.last_activity_ms)).dim(),
|
|
);
|
|
}
|
|
if details.len() > MAX_DETAIL_DISPLAY {
|
|
println!(
|
|
" {}",
|
|
style(format!("+{} more", details.len() - MAX_DETAIL_DISPLAY)).dim()
|
|
);
|
|
}
|
|
}
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_workload_human(r: &WorkloadResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Workload Summary", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
|
|
if !r.assigned_issues.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Assigned Issues").bold(),
|
|
r.assigned_issues.len()
|
|
);
|
|
for item in &r.assigned_issues {
|
|
println!(
|
|
" {} {} {}",
|
|
style(&item.ref_).cyan(),
|
|
truncate_str(&item.title, 40),
|
|
style(format_relative_time(item.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.assigned_issues_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.authored_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Authored MRs").bold(),
|
|
r.authored_mrs.len()
|
|
);
|
|
for mr in &r.authored_mrs {
|
|
let draft = if mr.draft { " [draft]" } else { "" };
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 35),
|
|
style(draft).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.authored_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.reviewing_mrs.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Reviewing MRs").bold(),
|
|
r.reviewing_mrs.len()
|
|
);
|
|
for mr in &r.reviewing_mrs {
|
|
let author = mr
|
|
.author_username
|
|
.as_deref()
|
|
.map(|a| format!(" by @{a}"))
|
|
.unwrap_or_default();
|
|
println!(
|
|
" {} {}{} {}",
|
|
style(&mr.ref_).cyan(),
|
|
truncate_str(&mr.title, 30),
|
|
style(author).dim(),
|
|
style(format_relative_time(mr.updated_at)).dim(),
|
|
);
|
|
}
|
|
if r.reviewing_mrs_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !r.unresolved_discussions.is_empty() {
|
|
println!();
|
|
println!(
|
|
" {} ({})",
|
|
style("Unresolved Discussions").bold(),
|
|
r.unresolved_discussions.len()
|
|
);
|
|
for disc in &r.unresolved_discussions {
|
|
println!(
|
|
" {} {} {} {}",
|
|
style(&disc.entity_type).dim(),
|
|
style(&disc.ref_).cyan(),
|
|
truncate_str(&disc.entity_title, 35),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
);
|
|
}
|
|
if r.unresolved_discussions_truncated {
|
|
println!(
|
|
" {}",
|
|
style("(truncated; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
}
|
|
|
|
if r.assigned_issues.is_empty()
|
|
&& r.authored_mrs.is_empty()
|
|
&& r.reviewing_mrs.is_empty()
|
|
&& r.unresolved_discussions.is_empty()
|
|
{
|
|
println!();
|
|
println!(
|
|
" {}",
|
|
style("No open work items found for this user.").dim()
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_reviews_human(r: &ReviewsResult) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!("@{} -- Review Patterns", r.username)).bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!();
|
|
|
|
if r.total_diffnotes == 0 {
|
|
println!(
|
|
" {}",
|
|
style("No review comments found for this user.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {} DiffNotes across {} MRs ({} categorized)",
|
|
style(r.total_diffnotes).bold(),
|
|
style(r.mrs_reviewed).bold(),
|
|
style(r.categorized_count).bold(),
|
|
);
|
|
println!();
|
|
|
|
if !r.categories.is_empty() {
|
|
println!(
|
|
" {:<16} {:>6} {:>6}",
|
|
style("Category").bold(),
|
|
style("Count").bold(),
|
|
style("%").bold(),
|
|
);
|
|
|
|
for cat in &r.categories {
|
|
println!(
|
|
" {:<16} {:>6} {:>5.1}%",
|
|
style(&cat.name).cyan(),
|
|
cat.count,
|
|
cat.percentage,
|
|
);
|
|
}
|
|
}
|
|
|
|
let uncategorized = r.total_diffnotes - r.categorized_count;
|
|
if uncategorized > 0 {
|
|
println!();
|
|
println!(
|
|
" {} {} uncategorized (no **prefix** convention)",
|
|
style("Note:").dim(),
|
|
uncategorized,
|
|
);
|
|
}
|
|
|
|
println!();
|
|
}
|
|
|
|
fn print_active_human(r: &ActiveResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!(
|
|
"{}",
|
|
style(format!(
|
|
"Active Discussions ({} unresolved in window)",
|
|
r.total_unresolved_in_window
|
|
))
|
|
.bold()
|
|
);
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.discussions.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No active unresolved discussions in this time window.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
for disc in &r.discussions {
|
|
let prefix = if disc.entity_type == "MR" { "!" } else { "#" };
|
|
let participants_str = disc
|
|
.participants
|
|
.iter()
|
|
.map(|p| format!("@{p}"))
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
|
|
println!(
|
|
" {} {} {} {} notes {}",
|
|
style(format!("{prefix}{}", disc.entity_iid)).cyan(),
|
|
truncate_str(&disc.entity_title, 40),
|
|
style(format_relative_time(disc.last_note_at)).dim(),
|
|
disc.note_count,
|
|
style(&disc.project_path).dim(),
|
|
);
|
|
if !participants_str.is_empty() {
|
|
println!(" {}", style(participants_str).dim());
|
|
}
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
fn print_overlap_human(r: &OverlapResult, project_path: Option<&str>) {
|
|
println!();
|
|
println!("{}", style(format!("Overlap for {}", r.path_query)).bold());
|
|
println!("{}", "\u{2500}".repeat(60));
|
|
println!(
|
|
" {}",
|
|
style(format!(
|
|
"(matching {} {})",
|
|
r.path_match,
|
|
if r.path_match == "exact" {
|
|
"file"
|
|
} else {
|
|
"directory prefix"
|
|
}
|
|
))
|
|
.dim()
|
|
);
|
|
print_scope_hint(project_path);
|
|
println!();
|
|
|
|
if r.users.is_empty() {
|
|
println!(
|
|
" {}",
|
|
style("No overlapping users found for this path.").dim()
|
|
);
|
|
println!();
|
|
return;
|
|
}
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}",
|
|
style("Username").bold(),
|
|
style("Role").bold(),
|
|
style("MRs").bold(),
|
|
style("Last Seen").bold(),
|
|
style("MR Refs").bold(),
|
|
);
|
|
|
|
for user in &r.users {
|
|
let mr_str = user
|
|
.mr_refs
|
|
.iter()
|
|
.take(5)
|
|
.cloned()
|
|
.collect::<Vec<_>>()
|
|
.join(", ");
|
|
let overflow = if user.mr_refs.len() > 5 {
|
|
format!(" +{}", user.mr_refs.len() - 5)
|
|
} else {
|
|
String::new()
|
|
};
|
|
|
|
println!(
|
|
" {:<16} {:<6} {:>7} {:<12} {}{}",
|
|
style(format!("@{}", user.username)).cyan(),
|
|
format_overlap_role(user),
|
|
user.touch_count,
|
|
format_relative_time(user.last_seen_at),
|
|
mr_str,
|
|
overflow,
|
|
);
|
|
}
|
|
if r.truncated {
|
|
println!(
|
|
" {}",
|
|
style("(showing first -n; rerun with a higher --limit)").dim()
|
|
);
|
|
}
|
|
println!();
|
|
}
|
|
|
|
// ─── Robot JSON Output ───────────────────────────────────────────────────────
|
|
|
|
pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
|
let (mode, data) = match &run.result {
|
|
WhoResult::Expert(r) => ("expert", expert_to_json(r)),
|
|
WhoResult::Workload(r) => ("workload", workload_to_json(r)),
|
|
WhoResult::Reviews(r) => ("reviews", reviews_to_json(r)),
|
|
WhoResult::Active(r) => ("active", active_to_json(r)),
|
|
WhoResult::Overlap(r) => ("overlap", overlap_to_json(r)),
|
|
};
|
|
|
|
// Raw CLI args -- what the user typed
|
|
let input = serde_json::json!({
|
|
"target": args.target,
|
|
"path": args.path,
|
|
"project": args.project,
|
|
"since": args.since,
|
|
"limit": args.limit,
|
|
"detail": args.detail,
|
|
"as_of": args.as_of,
|
|
"explain_score": args.explain_score,
|
|
"include_bots": args.include_bots,
|
|
"all_history": args.all_history,
|
|
});
|
|
|
|
// Resolved/computed values -- what actually ran
|
|
let resolved_input = serde_json::json!({
|
|
"mode": run.resolved_input.mode,
|
|
"project_id": run.resolved_input.project_id,
|
|
"project_path": run.resolved_input.project_path,
|
|
"since_ms": run.resolved_input.since_ms,
|
|
"since_iso": run.resolved_input.since_iso,
|
|
"since_mode": run.resolved_input.since_mode,
|
|
"limit": run.resolved_input.limit,
|
|
});
|
|
|
|
let output = WhoJsonEnvelope {
|
|
ok: true,
|
|
data: WhoJsonData {
|
|
mode: mode.to_string(),
|
|
input,
|
|
resolved_input,
|
|
result: data,
|
|
},
|
|
meta: RobotMeta { elapsed_ms },
|
|
};
|
|
|
|
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
|
serde_json::json!({"ok":false,"error":{"code":"INTERNAL_ERROR","message":format!("JSON serialization failed: {e}")}})
|
|
});
|
|
|
|
if let Some(f) = &args.fields {
|
|
let preset_key = format!("who_{mode}");
|
|
let expanded = crate::cli::robot::expand_fields_preset(f, &preset_key);
|
|
// Each who mode uses a different array key; try all possible keys
|
|
for key in &[
|
|
"experts",
|
|
"assigned_issues",
|
|
"authored_mrs",
|
|
"review_mrs",
|
|
"categories",
|
|
"discussions",
|
|
"users",
|
|
] {
|
|
crate::cli::robot::filter_fields(&mut value, key, &expanded);
|
|
}
|
|
}
|
|
|
|
println!("{}", serde_json::to_string(&value).unwrap());
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonEnvelope {
|
|
ok: bool,
|
|
data: WhoJsonData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct WhoJsonData {
|
|
mode: String,
|
|
input: serde_json::Value,
|
|
resolved_input: serde_json::Value,
|
|
#[serde(flatten)]
|
|
result: serde_json::Value,
|
|
}
|
|
|
|
fn expert_to_json(r: &ExpertResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"scoring_model_version": 2,
|
|
"truncated": r.truncated,
|
|
"experts": r.experts.iter().map(|e| {
|
|
let mut obj = serde_json::json!({
|
|
"username": e.username,
|
|
"score": e.score,
|
|
"review_mr_count": e.review_mr_count,
|
|
"review_note_count": e.review_note_count,
|
|
"author_mr_count": e.author_mr_count,
|
|
"last_seen_at": ms_to_iso(e.last_seen_ms),
|
|
"mr_refs": e.mr_refs,
|
|
"mr_refs_total": e.mr_refs_total,
|
|
"mr_refs_truncated": e.mr_refs_truncated,
|
|
});
|
|
if let Some(raw) = e.score_raw {
|
|
obj["score_raw"] = serde_json::json!(raw);
|
|
}
|
|
if let Some(comp) = &e.components {
|
|
obj["components"] = serde_json::json!({
|
|
"author": comp.author,
|
|
"reviewer_participated": comp.reviewer_participated,
|
|
"reviewer_assigned": comp.reviewer_assigned,
|
|
"notes": comp.notes,
|
|
});
|
|
}
|
|
if let Some(details) = &e.details {
|
|
obj["details"] = serde_json::json!(details.iter().map(|d| serde_json::json!({
|
|
"mr_ref": d.mr_ref,
|
|
"title": d.title,
|
|
"role": d.role,
|
|
"note_count": d.note_count,
|
|
"last_activity_at": ms_to_iso(d.last_activity_ms),
|
|
})).collect::<Vec<_>>());
|
|
}
|
|
obj
|
|
}).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn workload_to_json(r: &WorkloadResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"assigned_issues": r.assigned_issues.iter().map(|i| serde_json::json!({
|
|
"iid": i.iid,
|
|
"ref": i.ref_,
|
|
"title": i.title,
|
|
"project_path": i.project_path,
|
|
"updated_at": ms_to_iso(i.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"authored_mrs": r.authored_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"reviewing_mrs": r.reviewing_mrs.iter().map(|m| serde_json::json!({
|
|
"iid": m.iid,
|
|
"ref": m.ref_,
|
|
"title": m.title,
|
|
"draft": m.draft,
|
|
"project_path": m.project_path,
|
|
"author_username": m.author_username,
|
|
"updated_at": ms_to_iso(m.updated_at),
|
|
})).collect::<Vec<_>>(),
|
|
"unresolved_discussions": r.unresolved_discussions.iter().map(|d| serde_json::json!({
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"ref": d.ref_,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
})).collect::<Vec<_>>(),
|
|
"summary": {
|
|
"assigned_issue_count": r.assigned_issues.len(),
|
|
"authored_mr_count": r.authored_mrs.len(),
|
|
"reviewing_mr_count": r.reviewing_mrs.len(),
|
|
"unresolved_discussion_count": r.unresolved_discussions.len(),
|
|
},
|
|
"truncation": {
|
|
"assigned_issues_truncated": r.assigned_issues_truncated,
|
|
"authored_mrs_truncated": r.authored_mrs_truncated,
|
|
"reviewing_mrs_truncated": r.reviewing_mrs_truncated,
|
|
"unresolved_discussions_truncated": r.unresolved_discussions_truncated,
|
|
}
|
|
})
|
|
}
|
|
|
|
fn reviews_to_json(r: &ReviewsResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"username": r.username,
|
|
"total_diffnotes": r.total_diffnotes,
|
|
"categorized_count": r.categorized_count,
|
|
"mrs_reviewed": r.mrs_reviewed,
|
|
"categories": r.categories.iter().map(|c| serde_json::json!({
|
|
"name": c.name,
|
|
"count": c.count,
|
|
"percentage": (c.percentage * 10.0).round() / 10.0,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn active_to_json(r: &ActiveResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"total_unresolved_in_window": r.total_unresolved_in_window,
|
|
"truncated": r.truncated,
|
|
"discussions": r.discussions.iter().map(|d| serde_json::json!({
|
|
"discussion_id": d.discussion_id,
|
|
"entity_type": d.entity_type,
|
|
"entity_iid": d.entity_iid,
|
|
"entity_title": d.entity_title,
|
|
"project_path": d.project_path,
|
|
"last_note_at": ms_to_iso(d.last_note_at),
|
|
"note_count": d.note_count,
|
|
"participants": d.participants,
|
|
"participants_total": d.participants_total,
|
|
"participants_truncated": d.participants_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
fn overlap_to_json(r: &OverlapResult) -> serde_json::Value {
|
|
serde_json::json!({
|
|
"path_query": r.path_query,
|
|
"path_match": r.path_match,
|
|
"truncated": r.truncated,
|
|
"users": r.users.iter().map(|u| serde_json::json!({
|
|
"username": u.username,
|
|
"role": format_overlap_role(u),
|
|
"author_touch_count": u.author_touch_count,
|
|
"review_touch_count": u.review_touch_count,
|
|
"touch_count": u.touch_count,
|
|
"last_seen_at": ms_to_iso(u.last_seen_at),
|
|
"mr_refs": u.mr_refs,
|
|
"mr_refs_total": u.mr_refs_total,
|
|
"mr_refs_truncated": u.mr_refs_truncated,
|
|
})).collect::<Vec<_>>(),
|
|
})
|
|
}
|
|
|
|
// ─── Helper Functions ────────────────────────────────────────────────────────
|
|
|
|
fn format_relative_time(ms_epoch: i64) -> String {
|
|
let now = now_ms();
|
|
let diff = now - ms_epoch;
|
|
|
|
if diff < 0 {
|
|
return "in the future".to_string();
|
|
}
|
|
|
|
match diff {
|
|
d if d < 60_000 => "just now".to_string(),
|
|
d if d < 3_600_000 => format!("{} min ago", d / 60_000),
|
|
d if d < 86_400_000 => {
|
|
let n = d / 3_600_000;
|
|
format!("{n} {} ago", if n == 1 { "hour" } else { "hours" })
|
|
}
|
|
d if d < 604_800_000 => {
|
|
let n = d / 86_400_000;
|
|
format!("{n} {} ago", if n == 1 { "day" } else { "days" })
|
|
}
|
|
d if d < 2_592_000_000 => {
|
|
let n = d / 604_800_000;
|
|
format!("{n} {} ago", if n == 1 { "week" } else { "weeks" })
|
|
}
|
|
_ => {
|
|
let n = diff / 2_592_000_000;
|
|
format!("{n} {} ago", if n == 1 { "month" } else { "months" })
|
|
}
|
|
}
|
|
}
|
|
|
|
fn truncate_str(s: &str, max: usize) -> String {
|
|
if s.chars().count() <= max {
|
|
s.to_owned()
|
|
} else {
|
|
let truncated: String = s.chars().take(max.saturating_sub(3)).collect();
|
|
format!("{truncated}...")
|
|
}
|
|
}
|
|
|
|
// ─── Tests ───────────────────────────────────────────────────────────────────
|
|
|
|
#[cfg(test)]
|
|
#[path = "who_tests.rs"]
|
|
mod tests;
|