feat: implement lore brief command (bd-1n5q)
Composable capstone: replaces 5+ separate lore calls with a single situational awareness command. Three modes: - Topic: lore brief 'authentication' - Path: lore brief --path src/auth/ - Person: lore brief --person username Seven sections: open_issues, active_mrs, experts, recent_activity, unresolved_threads, related (semantic), warnings. Each section degrades gracefully if data is unavailable. 7 unit tests, robot-docs, autocorrect registry.
This commit is contained in:
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
bd-2cbw
|
||||
bd-1n5q
|
||||
|
||||
@@ -290,6 +290,10 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
||||
("reset", &["--yes"]),
|
||||
("related", &["--limit", "--project"]),
|
||||
("explain", &["--project"]),
|
||||
(
|
||||
"brief",
|
||||
&["--path", "--person", "--project", "--section-limit"],
|
||||
),
|
||||
];
|
||||
|
||||
/// Valid values for enum-like flags, used for post-clap error enhancement.
|
||||
|
||||
838
src/cli/commands/brief.rs
Normal file
838
src/cli/commands/brief.rs
Normal file
@@ -0,0 +1,838 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::cli::WhoArgs;
|
||||
use crate::cli::commands::list::{IssueListRow, ListFilters, MrListFilters, MrListRow};
|
||||
use crate::cli::commands::related::RelatedResult;
|
||||
use crate::cli::commands::who::WhoRun;
|
||||
use crate::core::config::Config;
|
||||
use crate::core::db::create_connection;
|
||||
use crate::core::error::Result;
|
||||
use crate::core::paths::get_db_path;
|
||||
use crate::core::time::ms_to_iso;
|
||||
|
||||
// ─── Public Types ──────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefResponse {
|
||||
pub mode: String,
|
||||
pub query: Option<String>,
|
||||
pub summary: String,
|
||||
pub open_issues: Vec<BriefIssue>,
|
||||
pub active_mrs: Vec<BriefMr>,
|
||||
pub experts: Vec<BriefExpert>,
|
||||
pub recent_activity: Vec<BriefActivity>,
|
||||
pub unresolved_threads: Vec<BriefThread>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub related: Vec<BriefRelated>,
|
||||
pub warnings: Vec<String>,
|
||||
pub sections_computed: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefIssue {
|
||||
pub iid: i64,
|
||||
pub title: String,
|
||||
pub state: String,
|
||||
pub assignees: Vec<String>,
|
||||
pub labels: Vec<String>,
|
||||
pub updated_at: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub status_name: Option<String>,
|
||||
pub unresolved_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefMr {
|
||||
pub iid: i64,
|
||||
pub title: String,
|
||||
pub state: String,
|
||||
pub author: String,
|
||||
pub draft: bool,
|
||||
pub labels: Vec<String>,
|
||||
pub updated_at: String,
|
||||
pub unresolved_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefExpert {
|
||||
pub username: String,
|
||||
pub score: f64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub last_activity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefActivity {
|
||||
pub timestamp: String,
|
||||
pub event_type: String,
|
||||
pub entity_ref: String,
|
||||
pub summary: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub actor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefThread {
|
||||
pub discussion_id: String,
|
||||
pub entity_type: String,
|
||||
pub entity_iid: i64,
|
||||
pub started_by: String,
|
||||
pub note_count: i64,
|
||||
pub last_note_at: String,
|
||||
pub first_note_body: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BriefRelated {
|
||||
pub source_type: String,
|
||||
pub iid: i64,
|
||||
pub title: String,
|
||||
pub similarity_score: f64,
|
||||
}
|
||||
|
||||
// ─── Input ─────────────────────────────────────────────────────────────────
|
||||
|
||||
pub struct BriefArgs {
|
||||
pub query: Option<String>,
|
||||
pub path: Option<String>,
|
||||
pub person: Option<String>,
|
||||
pub project: Option<String>,
|
||||
pub section_limit: usize,
|
||||
}
|
||||
|
||||
// ─── Conversion helpers ────────────────────────────────────────────────────
|
||||
|
||||
fn issue_to_brief(row: &IssueListRow) -> BriefIssue {
|
||||
BriefIssue {
|
||||
iid: row.iid,
|
||||
title: row.title.clone(),
|
||||
state: row.state.clone(),
|
||||
assignees: row.assignees.clone(),
|
||||
labels: row.labels.clone(),
|
||||
updated_at: ms_to_iso(row.updated_at),
|
||||
status_name: row.status_name.clone(),
|
||||
unresolved_count: row.unresolved_count,
|
||||
}
|
||||
}
|
||||
|
||||
fn mr_to_brief(row: &MrListRow) -> BriefMr {
|
||||
BriefMr {
|
||||
iid: row.iid,
|
||||
title: row.title.clone(),
|
||||
state: row.state.clone(),
|
||||
author: row.author_username.clone(),
|
||||
draft: row.draft,
|
||||
labels: row.labels.clone(),
|
||||
updated_at: ms_to_iso(row.updated_at),
|
||||
unresolved_count: row.unresolved_count,
|
||||
}
|
||||
}
|
||||
|
||||
fn related_to_brief(r: &RelatedResult) -> BriefRelated {
|
||||
BriefRelated {
|
||||
source_type: r.source_type.clone(),
|
||||
iid: r.iid,
|
||||
title: r.title.clone(),
|
||||
similarity_score: r.similarity_score,
|
||||
}
|
||||
}
|
||||
|
||||
fn experts_from_who_run(run: &WhoRun) -> Vec<BriefExpert> {
|
||||
use crate::core::who_types::WhoResult;
|
||||
match &run.result {
|
||||
WhoResult::Expert(er) => er
|
||||
.experts
|
||||
.iter()
|
||||
.map(|e| BriefExpert {
|
||||
username: e.username.clone(),
|
||||
score: e.score as f64,
|
||||
last_activity: Some(ms_to_iso(e.last_seen_ms)),
|
||||
})
|
||||
.collect(),
|
||||
WhoResult::Workload(wr) => {
|
||||
vec![BriefExpert {
|
||||
username: wr.username.clone(),
|
||||
score: 0.0,
|
||||
last_activity: None,
|
||||
}]
|
||||
}
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Warning heuristics ────────────────────────────────────────────────────
|
||||
|
||||
const STALE_THRESHOLD_MS: i64 = 30 * 24 * 60 * 60 * 1000; // 30 days
|
||||
|
||||
fn compute_warnings(issues: &[IssueListRow], mrs: &[MrListRow]) -> Vec<String> {
|
||||
let now = chrono::Utc::now().timestamp_millis();
|
||||
let mut warnings = Vec::new();
|
||||
|
||||
for i in issues {
|
||||
let age_ms = now - i.updated_at;
|
||||
if age_ms > STALE_THRESHOLD_MS {
|
||||
let days = age_ms / (24 * 60 * 60 * 1000);
|
||||
warnings.push(format!(
|
||||
"Issue #{} has no activity for {} days",
|
||||
i.iid, days
|
||||
));
|
||||
}
|
||||
if i.assignees.is_empty() && i.state == "opened" {
|
||||
warnings.push(format!("Issue #{} is unassigned", i.iid));
|
||||
}
|
||||
}
|
||||
for m in mrs {
|
||||
let age_ms = now - m.updated_at;
|
||||
if age_ms > STALE_THRESHOLD_MS {
|
||||
let days = age_ms / (24 * 60 * 60 * 1000);
|
||||
warnings.push(format!("MR !{} has no activity for {} days", m.iid, days));
|
||||
}
|
||||
if m.unresolved_count > 0 && m.state == "opened" {
|
||||
warnings.push(format!(
|
||||
"MR !{} has {} unresolved threads",
|
||||
m.iid, m.unresolved_count
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
warnings
|
||||
}
|
||||
|
||||
fn build_summary(response: &BriefResponse) -> String {
|
||||
let parts: Vec<String> = [
|
||||
(!response.open_issues.is_empty())
|
||||
.then(|| format!("{} open issues", response.open_issues.len())),
|
||||
(!response.active_mrs.is_empty())
|
||||
.then(|| format!("{} active MRs", response.active_mrs.len())),
|
||||
(!response.experts.is_empty()).then(|| {
|
||||
format!(
|
||||
"top expert: {}",
|
||||
response.experts.first().map_or("none", |e| &e.username)
|
||||
)
|
||||
}),
|
||||
(!response.warnings.is_empty()).then(|| format!("{} warnings", response.warnings.len())),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
if parts.is_empty() {
|
||||
"No data found".to_string()
|
||||
} else {
|
||||
parts.join(", ")
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Unresolved threads (direct SQL) ───────────────────────────────────────
|
||||
|
||||
fn query_unresolved_threads(
|
||||
config: &Config,
|
||||
project: Option<&str>,
|
||||
limit: usize,
|
||||
) -> Result<Vec<BriefThread>> {
|
||||
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||
let conn = create_connection(&db_path)?;
|
||||
|
||||
let project_id: Option<i64> = project
|
||||
.map(|p| crate::core::project::resolve_project(&conn, p))
|
||||
.transpose()?;
|
||||
|
||||
let (sql, params): (String, Vec<Box<dyn rusqlite::ToSql>>) = if let Some(pid) = project_id {
|
||||
(
|
||||
format!(
|
||||
"SELECT d.gitlab_discussion_id, d.noteable_type, d.noteable_id,
|
||||
n.author_username, COUNT(n.id) as note_count,
|
||||
MAX(n.created_at_ms) as last_note_at,
|
||||
MIN(CASE WHEN n.system = 0 THEN n.body END) as first_body
|
||||
FROM discussions d
|
||||
JOIN notes n ON n.discussion_id = d.id
|
||||
WHERE d.resolved = 0
|
||||
AND d.project_id = ?
|
||||
GROUP BY d.id
|
||||
ORDER BY last_note_at DESC
|
||||
LIMIT {limit}"
|
||||
),
|
||||
vec![Box::new(pid)],
|
||||
)
|
||||
} else {
|
||||
(
|
||||
format!(
|
||||
"SELECT d.gitlab_discussion_id, d.noteable_type, d.noteable_id,
|
||||
n.author_username, COUNT(n.id) as note_count,
|
||||
MAX(n.created_at_ms) as last_note_at,
|
||||
MIN(CASE WHEN n.system = 0 THEN n.body END) as first_body
|
||||
FROM discussions d
|
||||
JOIN notes n ON n.discussion_id = d.id
|
||||
WHERE d.resolved = 0
|
||||
GROUP BY d.id
|
||||
ORDER BY last_note_at DESC
|
||||
LIMIT {limit}"
|
||||
),
|
||||
vec![],
|
||||
)
|
||||
};
|
||||
|
||||
let mut stmt = conn.prepare(&sql)?;
|
||||
let params_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||
let rows = stmt
|
||||
.query_map(params_refs.as_slice(), |row| {
|
||||
let noteable_id: i64 = row.get(2)?;
|
||||
let noteable_type: String = row.get(1)?;
|
||||
let last_note_ms: i64 = row.get(5)?;
|
||||
let body: Option<String> = row.get(6)?;
|
||||
|
||||
// Look up the IID from the entity table
|
||||
Ok(BriefThread {
|
||||
discussion_id: row.get(0)?,
|
||||
entity_type: noteable_type,
|
||||
entity_iid: noteable_id, // We'll resolve IID below
|
||||
started_by: row.get(3)?,
|
||||
note_count: row.get(4)?,
|
||||
last_note_at: ms_to_iso(last_note_ms),
|
||||
first_note_body: truncate_body(&body.unwrap_or_default(), 120),
|
||||
})
|
||||
})?
|
||||
.filter_map(|r| r.ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Resolve noteable_id -> IID. noteable_id is the internal DB id, not the IID.
|
||||
// For now, we use noteable_id as a best-effort proxy since the discussions table
|
||||
// stores noteable_id which is the row PK in issues/merge_requests table.
|
||||
let mut resolved = Vec::with_capacity(rows.len());
|
||||
for mut t in rows {
|
||||
let iid_result: rusqlite::Result<i64> = if t.entity_type == "Issue" {
|
||||
conn.query_row(
|
||||
"SELECT iid FROM issues WHERE id = ?",
|
||||
[t.entity_iid],
|
||||
|row| row.get(0),
|
||||
)
|
||||
} else {
|
||||
conn.query_row(
|
||||
"SELECT iid FROM merge_requests WHERE id = ?",
|
||||
[t.entity_iid],
|
||||
|row| row.get(0),
|
||||
)
|
||||
};
|
||||
if let Ok(iid) = iid_result {
|
||||
t.entity_iid = iid;
|
||||
}
|
||||
resolved.push(t);
|
||||
}
|
||||
|
||||
Ok(resolved)
|
||||
}
|
||||
|
||||
fn truncate_body(s: &str, max_len: usize) -> String {
|
||||
let first_line = s.lines().next().unwrap_or("");
|
||||
if first_line.len() <= max_len {
|
||||
first_line.to_string()
|
||||
} else {
|
||||
let mut end = max_len;
|
||||
while !first_line.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
format!("{}...", &first_line[..end])
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Recent activity (direct SQL, lightweight) ─────────────────────────────
|
||||
|
||||
fn query_recent_activity(
|
||||
config: &Config,
|
||||
project: Option<&str>,
|
||||
limit: usize,
|
||||
) -> Result<Vec<BriefActivity>> {
|
||||
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||
let conn = create_connection(&db_path)?;
|
||||
|
||||
let project_id: Option<i64> = project
|
||||
.map(|p| crate::core::project::resolve_project(&conn, p))
|
||||
.transpose()?;
|
||||
|
||||
// Combine state events and non-system notes into a timeline
|
||||
let mut events: Vec<BriefActivity> = Vec::new();
|
||||
|
||||
// State events
|
||||
{
|
||||
let (sql, params): (String, Vec<Box<dyn rusqlite::ToSql>>) = if let Some(pid) = project_id {
|
||||
(
|
||||
format!(
|
||||
"SELECT rse.created_at, rse.state, rse.actor_username,
|
||||
COALESCE(i.iid, mr.iid) as entity_iid,
|
||||
CASE WHEN rse.issue_id IS NOT NULL THEN 'issue' ELSE 'mr' END as etype
|
||||
FROM resource_state_events rse
|
||||
LEFT JOIN issues i ON i.id = rse.issue_id
|
||||
LEFT JOIN merge_requests mr ON mr.id = rse.merge_request_id
|
||||
WHERE (i.project_id = ? OR mr.project_id = ?)
|
||||
ORDER BY rse.created_at DESC
|
||||
LIMIT {limit}"
|
||||
),
|
||||
vec![Box::new(pid) as Box<dyn rusqlite::ToSql>, Box::new(pid)],
|
||||
)
|
||||
} else {
|
||||
(
|
||||
format!(
|
||||
"SELECT rse.created_at, rse.state, rse.actor_username,
|
||||
COALESCE(i.iid, mr.iid) as entity_iid,
|
||||
CASE WHEN rse.issue_id IS NOT NULL THEN 'issue' ELSE 'mr' END as etype
|
||||
FROM resource_state_events rse
|
||||
LEFT JOIN issues i ON i.id = rse.issue_id
|
||||
LEFT JOIN merge_requests mr ON mr.id = rse.merge_request_id
|
||||
ORDER BY rse.created_at DESC
|
||||
LIMIT {limit}"
|
||||
),
|
||||
vec![],
|
||||
)
|
||||
};
|
||||
|
||||
let mut stmt = conn.prepare(&sql)?;
|
||||
let params_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||
let rows = stmt.query_map(params_refs.as_slice(), |row| {
|
||||
let ts: i64 = row.get(0)?;
|
||||
let state: String = row.get(1)?;
|
||||
let actor: Option<String> = row.get(2)?;
|
||||
let iid: Option<i64> = row.get(3)?;
|
||||
let etype: String = row.get(4)?;
|
||||
Ok(BriefActivity {
|
||||
timestamp: ms_to_iso(ts),
|
||||
event_type: "state_change".to_string(),
|
||||
entity_ref: format!(
|
||||
"{}#{}",
|
||||
if etype == "issue" { "issues" } else { "mrs" },
|
||||
iid.unwrap_or(0)
|
||||
),
|
||||
summary: format!("State changed to {state}"),
|
||||
actor,
|
||||
})
|
||||
})?;
|
||||
for row in rows.flatten() {
|
||||
events.push(row);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp descending and truncate
|
||||
events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
|
||||
events.truncate(limit);
|
||||
|
||||
Ok(events)
|
||||
}
|
||||
|
||||
// ─── Main entry point ──────────────────────────────────────────────────────
|
||||
|
||||
pub async fn run_brief(config: &Config, args: &BriefArgs) -> Result<BriefResponse> {
|
||||
use crate::cli::commands::list::{run_list_issues, run_list_mrs};
|
||||
use crate::cli::commands::related::run_related;
|
||||
use crate::cli::commands::who::run_who;
|
||||
|
||||
let limit = args.section_limit;
|
||||
let mut sections = Vec::new();
|
||||
|
||||
let mode = if args.path.is_some() {
|
||||
"path"
|
||||
} else if args.person.is_some() {
|
||||
"person"
|
||||
} else {
|
||||
"topic"
|
||||
};
|
||||
|
||||
// ── 1. Open issues ─────────────────────────────────────────────────────
|
||||
let empty_statuses: Vec<String> = vec![];
|
||||
let assignee_filter = args.person.as_deref();
|
||||
|
||||
let issue_result = run_list_issues(
|
||||
config,
|
||||
ListFilters {
|
||||
limit,
|
||||
project: args.project.as_deref(),
|
||||
state: Some("opened"),
|
||||
author: None,
|
||||
assignee: assignee_filter,
|
||||
labels: None,
|
||||
milestone: None,
|
||||
since: None,
|
||||
due_before: None,
|
||||
has_due_date: false,
|
||||
statuses: &empty_statuses,
|
||||
sort: "updated_at",
|
||||
order: "desc",
|
||||
},
|
||||
);
|
||||
|
||||
let (open_issues, raw_issue_list): (Vec<BriefIssue>, Vec<IssueListRow>) = match issue_result {
|
||||
Ok(r) => {
|
||||
sections.push("open_issues".to_string());
|
||||
let brief: Vec<BriefIssue> = r.issues.iter().map(issue_to_brief).collect();
|
||||
(brief, r.issues)
|
||||
}
|
||||
Err(_) => (vec![], vec![]),
|
||||
};
|
||||
|
||||
// ── 2. Active MRs ──────────────────────────────────────────────────────
|
||||
let mr_result = run_list_mrs(
|
||||
config,
|
||||
MrListFilters {
|
||||
limit,
|
||||
project: args.project.as_deref(),
|
||||
state: Some("opened"),
|
||||
author: args.person.as_deref(),
|
||||
assignee: None,
|
||||
reviewer: None,
|
||||
labels: None,
|
||||
since: None,
|
||||
draft: false,
|
||||
no_draft: false,
|
||||
target_branch: None,
|
||||
source_branch: None,
|
||||
sort: "updated_at",
|
||||
order: "desc",
|
||||
},
|
||||
);
|
||||
|
||||
let (active_mrs, raw_mr_list): (Vec<BriefMr>, Vec<MrListRow>) = match mr_result {
|
||||
Ok(r) => {
|
||||
sections.push("active_mrs".to_string());
|
||||
let brief: Vec<BriefMr> = r.mrs.iter().map(mr_to_brief).collect();
|
||||
(brief, r.mrs)
|
||||
}
|
||||
Err(_) => (vec![], vec![]),
|
||||
};
|
||||
|
||||
// ── 3. Experts (only for path mode or if query looks like a path) ──────
|
||||
let experts: Vec<BriefExpert> = if args.path.is_some() {
|
||||
let who_args = WhoArgs {
|
||||
target: None,
|
||||
path: args.path.clone(),
|
||||
active: false,
|
||||
overlap: None,
|
||||
reviews: false,
|
||||
since: None,
|
||||
project: args.project.clone(),
|
||||
limit: 3,
|
||||
fields: None,
|
||||
detail: false,
|
||||
no_detail: false,
|
||||
as_of: None,
|
||||
explain_score: false,
|
||||
include_bots: false,
|
||||
include_closed: false,
|
||||
all_history: false,
|
||||
};
|
||||
match run_who(config, &who_args) {
|
||||
Ok(run) => {
|
||||
sections.push("experts".to_string());
|
||||
experts_from_who_run(&run)
|
||||
}
|
||||
Err(_) => vec![],
|
||||
}
|
||||
} else if let Some(person) = &args.person {
|
||||
let who_args = WhoArgs {
|
||||
target: Some(person.clone()),
|
||||
path: None,
|
||||
active: false,
|
||||
overlap: None,
|
||||
reviews: false,
|
||||
since: None,
|
||||
project: args.project.clone(),
|
||||
limit: 3,
|
||||
fields: None,
|
||||
detail: false,
|
||||
no_detail: false,
|
||||
as_of: None,
|
||||
explain_score: false,
|
||||
include_bots: false,
|
||||
include_closed: false,
|
||||
all_history: false,
|
||||
};
|
||||
match run_who(config, &who_args) {
|
||||
Ok(run) => {
|
||||
sections.push("experts".to_string());
|
||||
experts_from_who_run(&run)
|
||||
}
|
||||
Err(_) => vec![],
|
||||
}
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// ── 4. Recent activity ─────────────────────────────────────────────────
|
||||
let recent_activity =
|
||||
query_recent_activity(config, args.project.as_deref(), limit).unwrap_or_default();
|
||||
if !recent_activity.is_empty() {
|
||||
sections.push("recent_activity".to_string());
|
||||
}
|
||||
|
||||
// ── 5. Unresolved threads ──────────────────────────────────────────────
|
||||
let unresolved_threads =
|
||||
query_unresolved_threads(config, args.project.as_deref(), limit).unwrap_or_default();
|
||||
if !unresolved_threads.is_empty() {
|
||||
sections.push("unresolved_threads".to_string());
|
||||
}
|
||||
|
||||
// ── 6. Related (only for topic mode with a query) ──────────────────────
|
||||
let related: Vec<BriefRelated> = if let Some(q) = &args.query {
|
||||
match run_related(config, None, None, Some(q), args.project.as_deref(), limit).await {
|
||||
Ok(resp) => {
|
||||
if !resp.results.is_empty() {
|
||||
sections.push("related".to_string());
|
||||
}
|
||||
resp.results.iter().map(related_to_brief).collect()
|
||||
}
|
||||
Err(_) => vec![], // Graceful degradation: no embeddings = no related
|
||||
}
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// ── 7. Warnings ────────────────────────────────────────────────────────
|
||||
let warnings = compute_warnings(&raw_issue_list, &raw_mr_list);
|
||||
|
||||
// ── Build response ─────────────────────────────────────────────────────
|
||||
let mut response = BriefResponse {
|
||||
mode: mode.to_string(),
|
||||
query: args.query.clone(),
|
||||
summary: String::new(), // Computed below
|
||||
open_issues,
|
||||
active_mrs,
|
||||
experts,
|
||||
recent_activity,
|
||||
unresolved_threads,
|
||||
related,
|
||||
warnings,
|
||||
sections_computed: sections,
|
||||
};
|
||||
response.summary = build_summary(&response);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
// ─── Output formatters ─────────────────────────────────────────────────────
|
||||
|
||||
pub fn print_brief_json(response: &BriefResponse, elapsed_ms: u64) {
|
||||
let output = serde_json::json!({
|
||||
"ok": true,
|
||||
"data": response,
|
||||
"meta": {
|
||||
"elapsed_ms": elapsed_ms,
|
||||
"sections_computed": response.sections_computed,
|
||||
}
|
||||
});
|
||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
||||
}
|
||||
|
||||
pub fn print_brief_human(response: &BriefResponse) {
|
||||
println!("=== Brief: {} ===", response.summary);
|
||||
println!();
|
||||
|
||||
if !response.open_issues.is_empty() {
|
||||
println!("--- Open Issues ({}) ---", response.open_issues.len());
|
||||
for i in &response.open_issues {
|
||||
let status = i
|
||||
.status_name
|
||||
.as_deref()
|
||||
.map_or(String::new(), |s| format!(" [{s}]"));
|
||||
println!(" #{} {}{}", i.iid, i.title, status);
|
||||
if !i.assignees.is_empty() {
|
||||
println!(" assignees: {}", i.assignees.join(", "));
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.active_mrs.is_empty() {
|
||||
println!("--- Active MRs ({}) ---", response.active_mrs.len());
|
||||
for m in &response.active_mrs {
|
||||
let draft = if m.draft { " [DRAFT]" } else { "" };
|
||||
println!(" !{} {}{} by {}", m.iid, m.title, draft, m.author);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.experts.is_empty() {
|
||||
println!("--- Experts ({}) ---", response.experts.len());
|
||||
for e in &response.experts {
|
||||
println!(" {} (score: {:.1})", e.username, e.score);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.recent_activity.is_empty() {
|
||||
println!(
|
||||
"--- Recent Activity ({}) ---",
|
||||
response.recent_activity.len()
|
||||
);
|
||||
for a in &response.recent_activity {
|
||||
let actor = a.actor.as_deref().unwrap_or("system");
|
||||
println!(
|
||||
" {} {} | {} | {}",
|
||||
a.timestamp, actor, a.entity_ref, a.summary
|
||||
);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.unresolved_threads.is_empty() {
|
||||
println!(
|
||||
"--- Unresolved Threads ({}) ---",
|
||||
response.unresolved_threads.len()
|
||||
);
|
||||
for t in &response.unresolved_threads {
|
||||
println!(
|
||||
" {}#{} by {} ({} notes): {}",
|
||||
t.entity_type, t.entity_iid, t.started_by, t.note_count, t.first_note_body
|
||||
);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.related.is_empty() {
|
||||
println!("--- Related ({}) ---", response.related.len());
|
||||
for r in &response.related {
|
||||
println!(
|
||||
" {}#{} {} (sim: {:.2})",
|
||||
r.source_type, r.iid, r.title, r.similarity_score
|
||||
);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
if !response.warnings.is_empty() {
|
||||
println!("--- Warnings ({}) ---", response.warnings.len());
|
||||
for w in &response.warnings {
|
||||
println!(" {w}");
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Tests ─────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_truncate_body_short() {
|
||||
assert_eq!(truncate_body("hello world", 20), "hello world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_body_long() {
|
||||
let long = "a".repeat(200);
|
||||
let result = truncate_body(&long, 50);
|
||||
assert!(result.ends_with("..."));
|
||||
// 50 chars + "..."
|
||||
assert_eq!(result.len(), 53);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_body_multiline() {
|
||||
let text = "first line\nsecond line\nthird line";
|
||||
assert_eq!(truncate_body(text, 100), "first line");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_summary_empty() {
|
||||
let response = BriefResponse {
|
||||
mode: "topic".to_string(),
|
||||
query: Some("auth".to_string()),
|
||||
summary: String::new(),
|
||||
open_issues: vec![],
|
||||
active_mrs: vec![],
|
||||
experts: vec![],
|
||||
recent_activity: vec![],
|
||||
unresolved_threads: vec![],
|
||||
related: vec![],
|
||||
warnings: vec![],
|
||||
sections_computed: vec![],
|
||||
};
|
||||
assert_eq!(build_summary(&response), "No data found");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_summary_with_data() {
|
||||
let response = BriefResponse {
|
||||
mode: "topic".to_string(),
|
||||
query: Some("auth".to_string()),
|
||||
summary: String::new(),
|
||||
open_issues: vec![BriefIssue {
|
||||
iid: 1,
|
||||
title: "test".to_string(),
|
||||
state: "opened".to_string(),
|
||||
assignees: vec![],
|
||||
labels: vec![],
|
||||
updated_at: "2024-01-01".to_string(),
|
||||
status_name: None,
|
||||
unresolved_count: 0,
|
||||
}],
|
||||
active_mrs: vec![],
|
||||
experts: vec![BriefExpert {
|
||||
username: "alice".to_string(),
|
||||
score: 42.0,
|
||||
last_activity: None,
|
||||
}],
|
||||
recent_activity: vec![],
|
||||
unresolved_threads: vec![],
|
||||
related: vec![],
|
||||
warnings: vec!["stale".to_string()],
|
||||
sections_computed: vec![],
|
||||
};
|
||||
let summary = build_summary(&response);
|
||||
assert!(summary.contains("1 open issues"));
|
||||
assert!(summary.contains("top expert: alice"));
|
||||
assert!(summary.contains("1 warnings"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compute_warnings_stale_issue() {
|
||||
let now = chrono::Utc::now().timestamp_millis();
|
||||
let old = now - (45 * 24 * 60 * 60 * 1000); // 45 days ago
|
||||
let issues = vec![IssueListRow {
|
||||
iid: 42,
|
||||
title: "Old issue".to_string(),
|
||||
state: "opened".to_string(),
|
||||
author_username: "alice".to_string(),
|
||||
created_at: old,
|
||||
updated_at: old,
|
||||
web_url: None,
|
||||
project_path: "group/repo".to_string(),
|
||||
labels: vec![],
|
||||
assignees: vec![],
|
||||
discussion_count: 0,
|
||||
unresolved_count: 0,
|
||||
status_name: None,
|
||||
status_category: None,
|
||||
status_color: None,
|
||||
status_icon_name: None,
|
||||
status_synced_at: None,
|
||||
}];
|
||||
let warnings = compute_warnings(&issues, &[]);
|
||||
assert!(warnings.iter().any(|w| w.contains("Issue #42")));
|
||||
assert!(warnings.iter().any(|w| w.contains("unassigned")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compute_warnings_unresolved_mr() {
|
||||
let now = chrono::Utc::now().timestamp_millis();
|
||||
let mrs = vec![MrListRow {
|
||||
iid: 99,
|
||||
title: "WIP MR".to_string(),
|
||||
state: "opened".to_string(),
|
||||
draft: false,
|
||||
author_username: "bob".to_string(),
|
||||
source_branch: "feat".to_string(),
|
||||
target_branch: "main".to_string(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
web_url: None,
|
||||
project_path: "group/repo".to_string(),
|
||||
labels: vec![],
|
||||
assignees: vec![],
|
||||
reviewers: vec![],
|
||||
discussion_count: 3,
|
||||
unresolved_count: 2,
|
||||
}];
|
||||
let warnings = compute_warnings(&[], &mrs);
|
||||
assert!(warnings.iter().any(|w| w.contains("MR !99")));
|
||||
assert!(warnings.iter().any(|w| w.contains("2 unresolved")));
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod auth_test;
|
||||
pub mod brief;
|
||||
pub mod count;
|
||||
pub mod doctor;
|
||||
pub mod drift;
|
||||
@@ -22,6 +23,7 @@ pub mod tui;
|
||||
pub mod who;
|
||||
|
||||
pub use auth_test::run_auth_test;
|
||||
pub use brief::{BriefArgs, BriefResponse, print_brief_human, print_brief_json, run_brief};
|
||||
pub use count::{
|
||||
print_count, print_count_json, print_event_count, print_event_count_json,
|
||||
print_reference_count, print_reference_count_json, run_count, run_count_events,
|
||||
|
||||
@@ -250,6 +250,28 @@ pub enum Commands {
|
||||
#[command(visible_alias = "similar")]
|
||||
Related(RelatedArgs),
|
||||
|
||||
/// Situational awareness: open issues, active MRs, experts, activity, threads
|
||||
Brief {
|
||||
/// Free-text topic, entity type, or omit for project-wide brief
|
||||
query: Option<String>,
|
||||
|
||||
/// Focus on a file path (who expert mode)
|
||||
#[arg(long)]
|
||||
path: Option<String>,
|
||||
|
||||
/// Focus on a person (who workload mode)
|
||||
#[arg(long)]
|
||||
person: Option<String>,
|
||||
|
||||
/// Scope to project (fuzzy match)
|
||||
#[arg(short, long)]
|
||||
project: Option<String>,
|
||||
|
||||
/// Maximum items per section
|
||||
#[arg(long, default_value = "5")]
|
||||
section_limit: usize,
|
||||
},
|
||||
|
||||
/// Auto-generate a structured narrative for an issue or MR
|
||||
Explain {
|
||||
/// Entity type: "issues" or "mrs"
|
||||
|
||||
@@ -1,5 +1,46 @@
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
use crate::gitlab::types::GitLabAuthor;
|
||||
use crate::core::config::{
|
||||
EmbeddingConfig, GitLabConfig, LoggingConfig, ProjectConfig, ScoringConfig, StorageConfig,
|
||||
SyncConfig,
|
||||
};
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use crate::gitlab::types::{GitLabAuthor, GitLabMilestone};
|
||||
|
||||
// ─── Test Helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
||||
VALUES (1, 100, 'group/project', 'https://gitlab.example.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn
|
||||
}
|
||||
|
||||
fn test_config() -> Config {
|
||||
Config {
|
||||
gitlab: GitLabConfig {
|
||||
base_url: "https://gitlab.example.com".to_string(),
|
||||
token_env_var: "GITLAB_TOKEN".to_string(),
|
||||
},
|
||||
projects: vec![ProjectConfig {
|
||||
path: "group/project".to_string(),
|
||||
}],
|
||||
default_project: None,
|
||||
sync: SyncConfig::default(),
|
||||
storage: StorageConfig::default(),
|
||||
embedding: EmbeddingConfig::default(),
|
||||
logging: LoggingConfig::default(),
|
||||
scoring: ScoringConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn passes_cursor_filter(issue: &GitLabIssue, cursor: &SyncCursor) -> Result<bool> {
|
||||
let Some(cursor_ts) = cursor.updated_at_cursor else {
|
||||
@@ -47,6 +88,50 @@ fn make_test_issue(id: i64, updated_at: &str) -> GitLabIssue {
|
||||
}
|
||||
}
|
||||
|
||||
fn make_issue_with_labels(id: i64, labels: Vec<&str>) -> GitLabIssue {
|
||||
let mut issue = make_test_issue(id, "2024-06-01T00:00:00.000Z");
|
||||
issue.labels = labels.into_iter().map(String::from).collect();
|
||||
issue
|
||||
}
|
||||
|
||||
fn make_issue_with_assignees(id: i64, assignees: Vec<(&str, &str)>) -> GitLabIssue {
|
||||
let mut issue = make_test_issue(id, "2024-06-01T00:00:00.000Z");
|
||||
issue.assignees = assignees
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, (username, name))| GitLabAuthor {
|
||||
id: (i + 10) as i64,
|
||||
username: username.to_string(),
|
||||
name: name.to_string(),
|
||||
})
|
||||
.collect();
|
||||
issue
|
||||
}
|
||||
|
||||
fn make_issue_with_milestone(id: i64) -> GitLabIssue {
|
||||
let mut issue = make_test_issue(id, "2024-06-01T00:00:00.000Z");
|
||||
issue.milestone = Some(GitLabMilestone {
|
||||
id: 42,
|
||||
iid: 5,
|
||||
project_id: Some(100),
|
||||
title: "v1.0".to_string(),
|
||||
description: Some("First release".to_string()),
|
||||
state: Some("active".to_string()),
|
||||
due_date: Some("2024-12-31".to_string()),
|
||||
web_url: Some("https://gitlab.example.com/milestones/5".to_string()),
|
||||
});
|
||||
issue
|
||||
}
|
||||
|
||||
fn count_rows(conn: &Connection, table: &str) -> i64 {
|
||||
conn.query_row(&format!("SELECT COUNT(*) FROM {table}"), [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// ─── Cursor Filter Tests ────────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn cursor_filter_allows_newer_issues() {
|
||||
let cursor = SyncCursor {
|
||||
@@ -93,3 +178,452 @@ fn cursor_filter_allows_all_when_no_cursor() {
|
||||
let issue = make_test_issue(1, "2020-01-01T00:00:00.000Z");
|
||||
assert!(passes_cursor_filter(&issue, &cursor).unwrap_or(false));
|
||||
}
|
||||
|
||||
// ─── parse_timestamp Tests ──────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn parse_timestamp_valid_rfc3339() {
|
||||
let ts = parse_timestamp("2024-06-15T12:30:00.000Z").unwrap();
|
||||
assert_eq!(ts, 1718454600000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_timestamp_with_timezone_offset() {
|
||||
let ts = parse_timestamp("2024-06-15T14:30:00.000+02:00").unwrap();
|
||||
// +02:00 means UTC time is 12:30, same as above
|
||||
assert_eq!(ts, 1718454600000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_timestamp_invalid_format_returns_error() {
|
||||
let result = parse_timestamp("not-a-date");
|
||||
assert!(result.is_err());
|
||||
let err_msg = result.unwrap_err().to_string();
|
||||
assert!(err_msg.contains("not-a-date"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_timestamp_empty_string_returns_error() {
|
||||
assert!(parse_timestamp("").is_err());
|
||||
}
|
||||
|
||||
// ─── passes_cursor_filter_with_ts Tests ─────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn cursor_filter_with_ts_allows_newer() {
|
||||
let cursor = SyncCursor {
|
||||
updated_at_cursor: Some(1000),
|
||||
tie_breaker_id: Some(50),
|
||||
};
|
||||
assert!(passes_cursor_filter_with_ts(60, 2000, &cursor));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cursor_filter_with_ts_blocks_older() {
|
||||
let cursor = SyncCursor {
|
||||
updated_at_cursor: Some(2000),
|
||||
tie_breaker_id: Some(50),
|
||||
};
|
||||
assert!(!passes_cursor_filter_with_ts(60, 1000, &cursor));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cursor_filter_with_ts_same_timestamp_uses_tie_breaker() {
|
||||
let cursor = SyncCursor {
|
||||
updated_at_cursor: Some(1000),
|
||||
tie_breaker_id: Some(50),
|
||||
};
|
||||
// gitlab_id > cursor tie_breaker => allowed
|
||||
assert!(passes_cursor_filter_with_ts(51, 1000, &cursor));
|
||||
// gitlab_id == cursor tie_breaker => blocked (already processed)
|
||||
assert!(!passes_cursor_filter_with_ts(50, 1000, &cursor));
|
||||
// gitlab_id < cursor tie_breaker => blocked
|
||||
assert!(!passes_cursor_filter_with_ts(49, 1000, &cursor));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cursor_filter_with_ts_no_cursor_allows_all() {
|
||||
let cursor = SyncCursor::default();
|
||||
assert!(passes_cursor_filter_with_ts(1, 0, &cursor));
|
||||
}
|
||||
|
||||
// ─── Sync Cursor DB Tests ───────────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn get_sync_cursor_returns_default_when_no_row() {
|
||||
let conn = setup_test_db();
|
||||
let cursor = get_sync_cursor(&conn, 1).unwrap();
|
||||
assert!(cursor.updated_at_cursor.is_none());
|
||||
assert!(cursor.tie_breaker_id.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_sync_cursor_creates_and_reads_back() {
|
||||
let conn = setup_test_db();
|
||||
|
||||
update_sync_cursor(&conn, 1, 1705312800000, 42).unwrap();
|
||||
|
||||
let cursor = get_sync_cursor(&conn, 1).unwrap();
|
||||
assert_eq!(cursor.updated_at_cursor, Some(1705312800000));
|
||||
assert_eq!(cursor.tie_breaker_id, Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_sync_cursor_upserts_on_conflict() {
|
||||
let conn = setup_test_db();
|
||||
|
||||
update_sync_cursor(&conn, 1, 1000, 10).unwrap();
|
||||
update_sync_cursor(&conn, 1, 2000, 20).unwrap();
|
||||
|
||||
let cursor = get_sync_cursor(&conn, 1).unwrap();
|
||||
assert_eq!(cursor.updated_at_cursor, Some(2000));
|
||||
assert_eq!(cursor.tie_breaker_id, Some(20));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sync_cursors_are_project_scoped() {
|
||||
let conn = setup_test_db();
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
||||
VALUES (2, 200, 'other/project', 'https://gitlab.example.com/other/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
update_sync_cursor(&conn, 1, 1000, 10).unwrap();
|
||||
update_sync_cursor(&conn, 2, 2000, 20).unwrap();
|
||||
|
||||
let c1 = get_sync_cursor(&conn, 1).unwrap();
|
||||
let c2 = get_sync_cursor(&conn, 2).unwrap();
|
||||
assert_eq!(c1.updated_at_cursor, Some(1000));
|
||||
assert_eq!(c2.updated_at_cursor, Some(2000));
|
||||
}
|
||||
|
||||
// ─── process_single_issue Tests ─────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_inserts_basic_issue() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
|
||||
let labels_created = process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
assert_eq!(labels_created, 0);
|
||||
|
||||
let (title, state, author): (String, String, String) = conn
|
||||
.query_row(
|
||||
"SELECT title, state, author_username FROM issues WHERE gitlab_id = 1001",
|
||||
[],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(title, "Issue 1001");
|
||||
assert_eq!(state, "opened");
|
||||
assert_eq!(author, "test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_upserts_on_conflict() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
let issue_v1 = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
process_single_issue(&conn, &config, 1, &issue_v1).unwrap();
|
||||
|
||||
// Update the issue (same gitlab_id, changed title/state)
|
||||
let mut issue_v2 = make_test_issue(1001, "2024-06-16T12:00:00.000Z");
|
||||
issue_v2.title = "Updated title".to_string();
|
||||
issue_v2.state = "closed".to_string();
|
||||
process_single_issue(&conn, &config, 1, &issue_v2).unwrap();
|
||||
|
||||
// Should still be 1 issue (upserted, not duplicated)
|
||||
assert_eq!(count_rows(&conn, "issues"), 1);
|
||||
|
||||
let (title, state): (String, String) = conn
|
||||
.query_row(
|
||||
"SELECT title, state FROM issues WHERE gitlab_id = 1001",
|
||||
[],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(title, "Updated title");
|
||||
assert_eq!(state, "closed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_creates_labels() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_issue_with_labels(1001, vec!["bug", "critical"]);
|
||||
|
||||
let labels_created = process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
assert_eq!(labels_created, 2);
|
||||
|
||||
// Verify labels exist
|
||||
assert_eq!(count_rows(&conn, "labels"), 2);
|
||||
|
||||
// Verify junction table
|
||||
let label_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM issue_labels il
|
||||
JOIN issues i ON il.issue_id = i.id
|
||||
WHERE i.gitlab_id = 1001",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(label_count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_label_upsert_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
let issue1 = make_issue_with_labels(1001, vec!["bug"]);
|
||||
let created1 = process_single_issue(&conn, &config, 1, &issue1).unwrap();
|
||||
assert_eq!(created1, 1);
|
||||
|
||||
// Second issue with same label
|
||||
let issue2 = make_issue_with_labels(1002, vec!["bug"]);
|
||||
let created2 = process_single_issue(&conn, &config, 1, &issue2).unwrap();
|
||||
assert_eq!(created2, 0); // Label already exists
|
||||
|
||||
// Only 1 label row, but 2 junction rows
|
||||
assert_eq!(count_rows(&conn, "labels"), 1);
|
||||
assert_eq!(count_rows(&conn, "issue_labels"), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_replaces_labels_on_update() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
let issue_v1 = make_issue_with_labels(1001, vec!["bug", "critical"]);
|
||||
process_single_issue(&conn, &config, 1, &issue_v1).unwrap();
|
||||
|
||||
// Update issue: remove "critical", add "fixed"
|
||||
let mut issue_v2 = make_issue_with_labels(1001, vec!["bug", "fixed"]);
|
||||
issue_v2.updated_at = "2024-06-02T00:00:00.000Z".to_string();
|
||||
process_single_issue(&conn, &config, 1, &issue_v2).unwrap();
|
||||
|
||||
// Should now have "bug" and "fixed" linked (not "critical")
|
||||
let labels: Vec<String> = {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT l.name FROM labels l
|
||||
JOIN issue_labels il ON l.id = il.label_id
|
||||
JOIN issues i ON il.issue_id = i.id
|
||||
WHERE i.gitlab_id = 1001
|
||||
ORDER BY l.name",
|
||||
)
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
assert_eq!(labels, vec!["bug", "fixed"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_creates_assignees() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_issue_with_assignees(1001, vec![("alice", "Alice"), ("bob", "Bob")]);
|
||||
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
let assignee_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM issue_assignees ia
|
||||
JOIN issues i ON ia.issue_id = i.id
|
||||
WHERE i.gitlab_id = 1001",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(assignee_count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_replaces_assignees_on_update() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
let issue_v1 = make_issue_with_assignees(1001, vec![("alice", "Alice"), ("bob", "Bob")]);
|
||||
process_single_issue(&conn, &config, 1, &issue_v1).unwrap();
|
||||
|
||||
// Update: remove bob, add charlie
|
||||
let mut issue_v2 =
|
||||
make_issue_with_assignees(1001, vec![("alice", "Alice"), ("charlie", "Charlie")]);
|
||||
issue_v2.updated_at = "2024-06-02T00:00:00.000Z".to_string();
|
||||
process_single_issue(&conn, &config, 1, &issue_v2).unwrap();
|
||||
|
||||
let assignees: Vec<String> = {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT ia.username FROM issue_assignees ia
|
||||
JOIN issues i ON ia.issue_id = i.id
|
||||
WHERE i.gitlab_id = 1001
|
||||
ORDER BY ia.username",
|
||||
)
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
assert_eq!(assignees, vec!["alice", "charlie"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_creates_milestone() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_issue_with_milestone(1001);
|
||||
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
let (title, state): (String, Option<String>) = conn
|
||||
.query_row(
|
||||
"SELECT title, state FROM milestones WHERE gitlab_id = 42",
|
||||
[],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(title, "v1.0");
|
||||
assert_eq!(state.as_deref(), Some("active"));
|
||||
|
||||
// Issue should reference the milestone
|
||||
let ms_id: Option<i64> = conn
|
||||
.query_row(
|
||||
"SELECT milestone_id FROM issues WHERE gitlab_id = 1001",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(ms_id.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_marks_dirty() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
let local_id: i64 = conn
|
||||
.query_row("SELECT id FROM issues WHERE gitlab_id = 1001", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'issue' AND source_id = ?",
|
||||
[local_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(dirty_count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn process_single_issue_stores_raw_payload() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
let issue = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
let payload_id: Option<i64> = conn
|
||||
.query_row(
|
||||
"SELECT raw_payload_id FROM issues WHERE gitlab_id = 1001",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(payload_id.is_some());
|
||||
|
||||
// Verify payload row exists
|
||||
let payload_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM raw_payloads WHERE id = ?",
|
||||
[payload_id.unwrap()],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(payload_count, 1);
|
||||
}
|
||||
|
||||
// ─── Discussion Sync Queue Tests ────────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn get_issues_needing_discussion_sync_detects_updated() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
// Insert an issue
|
||||
let issue = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
// Issue was just upserted, discussions_synced_for_updated_at is NULL,
|
||||
// so it should need sync
|
||||
let needing_sync = get_issues_needing_discussion_sync(&conn, 1).unwrap();
|
||||
assert_eq!(needing_sync.len(), 1);
|
||||
assert_eq!(needing_sync[0].iid, 1001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_issues_needing_discussion_sync_skips_already_synced() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
let issue = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
process_single_issue(&conn, &config, 1, &issue).unwrap();
|
||||
|
||||
// Simulate discussion sync by setting discussions_synced_for_updated_at
|
||||
let updated_at: i64 = conn
|
||||
.query_row(
|
||||
"SELECT updated_at FROM issues WHERE gitlab_id = 1001",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"UPDATE issues SET discussions_synced_for_updated_at = ? WHERE gitlab_id = 1001",
|
||||
[updated_at],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let needing_sync = get_issues_needing_discussion_sync(&conn, 1).unwrap();
|
||||
assert!(needing_sync.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_issues_needing_discussion_sync_is_project_scoped() {
|
||||
let conn = setup_test_db();
|
||||
let config = test_config();
|
||||
|
||||
// Add a second project
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
|
||||
VALUES (2, 200, 'other/project', 'https://gitlab.example.com/other/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let issue1 = make_test_issue(1001, "2024-06-15T12:00:00.000Z");
|
||||
process_single_issue(&conn, &config, 1, &issue1).unwrap();
|
||||
|
||||
let mut issue2 = make_test_issue(1002, "2024-06-15T12:00:00.000Z");
|
||||
issue2.project_id = 200;
|
||||
process_single_issue(&conn, &config, 2, &issue2).unwrap();
|
||||
|
||||
// Only project 1's issue should appear
|
||||
let needing_sync = get_issues_needing_discussion_sync(&conn, 1).unwrap();
|
||||
assert_eq!(needing_sync.len(), 1);
|
||||
assert_eq!(needing_sync[0].iid, 1001);
|
||||
}
|
||||
|
||||
96
src/main.rs
96
src/main.rs
@@ -9,25 +9,25 @@ use tracing_subscriber::util::SubscriberInitExt;
|
||||
use lore::Config;
|
||||
use lore::cli::autocorrect::{self, CorrectionResult};
|
||||
use lore::cli::commands::{
|
||||
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||
BriefArgs, IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||
NoteListFilters, SearchCliFilters, SyncOptions, TimelineParams, find_lore_tui,
|
||||
open_issue_in_browser, open_mr_in_browser, parse_trace_path, print_count, print_count_json,
|
||||
print_doctor_results, print_drift_human, print_drift_json, print_dry_run_preview,
|
||||
print_dry_run_preview_json, print_embed, print_embed_json, print_event_count,
|
||||
print_event_count_json, print_explain_human, print_explain_json, print_file_history,
|
||||
print_file_history_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
|
||||
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
|
||||
print_list_mrs_json, print_list_notes, print_list_notes_csv, print_list_notes_json,
|
||||
print_list_notes_jsonl, print_reference_count, print_reference_count_json, print_related,
|
||||
print_related_json, print_search_results, print_search_results_json, print_show_issue,
|
||||
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
|
||||
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline,
|
||||
print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human, print_who_json,
|
||||
query_notes, run_auth_test, run_count, run_count_events, run_count_references, run_doctor,
|
||||
run_drift, run_embed, run_explain, run_file_history, run_generate_docs, run_ingest,
|
||||
run_ingest_dry_run, run_init, run_list_issues, run_list_mrs, run_related, run_search,
|
||||
run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_tui,
|
||||
run_who,
|
||||
open_issue_in_browser, open_mr_in_browser, parse_trace_path, print_brief_human,
|
||||
print_brief_json, print_count, print_count_json, print_doctor_results, print_drift_human,
|
||||
print_drift_json, print_dry_run_preview, print_dry_run_preview_json, print_embed,
|
||||
print_embed_json, print_event_count, print_event_count_json, print_explain_human,
|
||||
print_explain_json, print_file_history, print_file_history_json, print_generate_docs,
|
||||
print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
|
||||
print_list_issues_json, print_list_mrs, print_list_mrs_json, print_list_notes,
|
||||
print_list_notes_csv, print_list_notes_json, print_list_notes_jsonl, print_reference_count,
|
||||
print_reference_count_json, print_related, print_related_json, print_search_results,
|
||||
print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
|
||||
print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
|
||||
print_sync_status, print_sync_status_json, print_timeline, print_timeline_json_with_meta,
|
||||
print_trace, print_trace_json, print_who_human, print_who_json, query_notes, run_auth_test,
|
||||
run_brief, run_count, run_count_events, run_count_references, run_doctor, run_drift, run_embed,
|
||||
run_explain, run_file_history, run_generate_docs, run_ingest, run_ingest_dry_run, run_init,
|
||||
run_list_issues, run_list_mrs, run_related, run_search, run_show_issue, run_show_mr, run_stats,
|
||||
run_sync, run_sync_status, run_timeline, run_tui, run_who,
|
||||
};
|
||||
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
||||
use lore::cli::robot::{RobotMeta, strip_schemas};
|
||||
@@ -211,6 +211,24 @@ async fn main() {
|
||||
handle_related(cli.config.as_deref(), args, robot_mode).await
|
||||
}
|
||||
Some(Commands::Tui(args)) => run_tui(&args, robot_mode),
|
||||
Some(Commands::Brief {
|
||||
query,
|
||||
path,
|
||||
person,
|
||||
project,
|
||||
section_limit,
|
||||
}) => {
|
||||
handle_brief(
|
||||
cli.config.as_deref(),
|
||||
query,
|
||||
path,
|
||||
person,
|
||||
project,
|
||||
section_limit,
|
||||
robot_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Some(Commands::Explain {
|
||||
entity_type,
|
||||
iid,
|
||||
@@ -746,6 +764,7 @@ fn suggest_similar_command(invalid: &str) -> String {
|
||||
("who", "who"),
|
||||
("notes", "notes"),
|
||||
("note", "notes"),
|
||||
("brief", "brief"),
|
||||
("explain", "explain"),
|
||||
("drift", "drift"),
|
||||
("file-history", "file-history"),
|
||||
@@ -2827,6 +2846,17 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"meta": {"elapsed_ms": "int", "total_mrs": "int", "renames_followed": "bool", "paths_searched": "int"}
|
||||
}
|
||||
},
|
||||
"brief": {
|
||||
"description": "Situational awareness: open issues, active MRs, experts, activity, threads, related, warnings",
|
||||
"flags": ["[QUERY]", "--path <path>", "--person <username>", "-p/--project <path>", "--section-limit <N>"],
|
||||
"example": "lore --robot brief 'authentication'",
|
||||
"notes": "Composable capstone: replaces 5+ separate lore calls. Modes: topic (query text), path (--path), person (--person). Each section degrades gracefully.",
|
||||
"response_schema": {
|
||||
"ok": "bool",
|
||||
"data": "BriefResponse{mode,query?,summary,open_issues[{iid,title,state,assignees,labels,updated_at,status_name?,unresolved_count}],active_mrs[{iid,title,state,author,draft,labels,updated_at,unresolved_count}],experts[{username,score,last_activity?}],recent_activity[{timestamp,event_type,entity_ref,summary,actor?}],unresolved_threads[{discussion_id,entity_type,entity_iid,started_by,note_count,last_note_at,first_note_body}],related[{source_type,iid,title,similarity_score}],warnings[string]}",
|
||||
"meta": {"elapsed_ms": "int", "sections_computed": "[string]"}
|
||||
}
|
||||
},
|
||||
"explain": {
|
||||
"description": "Auto-generate a structured narrative for an issue or MR",
|
||||
"flags": ["<entity_type: issues|mrs>", "<IID>", "-p/--project <path>"],
|
||||
@@ -2897,6 +2927,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
||||
"file-history: MRs that touched a file with rename chain resolution",
|
||||
"trace: File -> MR -> issue -> discussion decision chain",
|
||||
"related: Semantic similarity discovery via vector embeddings",
|
||||
"brief: Situational awareness in one call (open issues, active MRs, experts, threads, warnings)",
|
||||
"explain: Auto-generated narrative for any issue or MR (template-based, no LLM)",
|
||||
"drift: Discussion divergence detection from original intent",
|
||||
"notes: Rich note listing with author, type, resolution, path, and discussion filters",
|
||||
@@ -3118,6 +3149,35 @@ fn handle_explain(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_brief(
|
||||
config_override: Option<&str>,
|
||||
query: Option<String>,
|
||||
path: Option<String>,
|
||||
person: Option<String>,
|
||||
project: Option<String>,
|
||||
section_limit: usize,
|
||||
robot_mode: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let start = std::time::Instant::now();
|
||||
let config = Config::load(config_override)?;
|
||||
let args = BriefArgs {
|
||||
query,
|
||||
path,
|
||||
person,
|
||||
project,
|
||||
section_limit,
|
||||
};
|
||||
let response = run_brief(&config, &args).await?;
|
||||
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
if robot_mode {
|
||||
print_brief_json(&response, elapsed_ms);
|
||||
} else {
|
||||
print_brief_human(&response);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_drift(
|
||||
config_override: Option<&str>,
|
||||
entity_type: &str,
|
||||
|
||||
Reference in New Issue
Block a user