refactor(structure): reorganize codebase into domain-focused modules

This commit is contained in:
teernisse
2026-03-06 15:22:42 -05:00
parent 4d41d74ea7
commit bf977eca1a
78 changed files with 8704 additions and 6973 deletions

View File

@@ -6,8 +6,8 @@ use crate::Config;
use crate::cli::robot::RobotMeta;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::events_db::{self, EventCounts};
use crate::core::paths::get_db_path;
use crate::ingestion::storage::events::{EventCounts, count_events};
pub struct CountResult {
pub entity: String,
@@ -208,7 +208,7 @@ struct CountJsonBreakdown {
pub fn run_count_events(config: &Config) -> Result<EventCounts> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
events_db::count_events(&conn)
count_events(&conn)
}
#[derive(Serialize)]

View File

@@ -0,0 +1,26 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use crate::cli::render::Theme;
use indicatif::{ProgressBar, ProgressStyle};
use rusqlite::Connection;
use serde::Serialize;
use tracing::Instrument;
use crate::Config;
use crate::cli::robot::RobotMeta;
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::lock::{AppLock, LockOptions};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::shutdown::ShutdownSignal;
use crate::gitlab::GitLabClient;
use crate::ingestion::{
IngestMrProjectResult, IngestProjectResult, ProgressEvent, ingest_project_issues_with_progress,
ingest_project_merge_requests_with_progress,
};
include!("run.rs");
include!("render.rs");

View File

@@ -0,0 +1,331 @@
fn print_issue_project_summary(path: &str, result: &IngestProjectResult) {
let labels_str = if result.labels_created > 0 {
format!(", {} new labels", result.labels_created)
} else {
String::new()
};
println!(
" {}: {} issues fetched{}",
Theme::info().render(path),
result.issues_upserted,
labels_str
);
if result.issues_synced_discussions > 0 {
println!(
" {} issues -> {} discussions, {} notes",
result.issues_synced_discussions, result.discussions_fetched, result.notes_upserted
);
}
if result.issues_skipped_discussion_sync > 0 {
println!(
" {} unchanged issues (discussion sync skipped)",
Theme::dim().render(&result.issues_skipped_discussion_sync.to_string())
);
}
}
fn print_mr_project_summary(path: &str, result: &IngestMrProjectResult) {
let labels_str = if result.labels_created > 0 {
format!(", {} new labels", result.labels_created)
} else {
String::new()
};
let assignees_str = if result.assignees_linked > 0 || result.reviewers_linked > 0 {
format!(
", {} assignees, {} reviewers",
result.assignees_linked, result.reviewers_linked
)
} else {
String::new()
};
println!(
" {}: {} MRs fetched{}{}",
Theme::info().render(path),
result.mrs_upserted,
labels_str,
assignees_str
);
if result.mrs_synced_discussions > 0 {
let diffnotes_str = if result.diffnotes_count > 0 {
format!(" ({} diff notes)", result.diffnotes_count)
} else {
String::new()
};
println!(
" {} MRs -> {} discussions, {} notes{}",
result.mrs_synced_discussions,
result.discussions_fetched,
result.notes_upserted,
diffnotes_str
);
}
if result.mrs_skipped_discussion_sync > 0 {
println!(
" {} unchanged MRs (discussion sync skipped)",
Theme::dim().render(&result.mrs_skipped_discussion_sync.to_string())
);
}
}
#[derive(Serialize)]
struct IngestJsonOutput {
ok: bool,
data: IngestJsonData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct IngestJsonData {
resource_type: String,
projects_synced: usize,
#[serde(skip_serializing_if = "Option::is_none")]
issues: Option<IngestIssueStats>,
#[serde(skip_serializing_if = "Option::is_none")]
merge_requests: Option<IngestMrStats>,
labels_created: usize,
discussions_fetched: usize,
notes_upserted: usize,
resource_events_fetched: usize,
resource_events_failed: usize,
#[serde(skip_serializing_if = "Vec::is_empty")]
status_enrichment: Vec<StatusEnrichmentJson>,
status_enrichment_errors: usize,
}
#[derive(Serialize)]
struct StatusEnrichmentJson {
mode: String,
#[serde(skip_serializing_if = "Option::is_none")]
reason: Option<String>,
seen: usize,
enriched: usize,
cleared: usize,
without_widget: usize,
partial_errors: usize,
#[serde(skip_serializing_if = "Option::is_none")]
first_partial_error: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
error: Option<String>,
}
#[derive(Serialize)]
struct IngestIssueStats {
fetched: usize,
upserted: usize,
synced_discussions: usize,
skipped_discussion_sync: usize,
}
#[derive(Serialize)]
struct IngestMrStats {
fetched: usize,
upserted: usize,
synced_discussions: usize,
skipped_discussion_sync: usize,
assignees_linked: usize,
reviewers_linked: usize,
diffnotes_count: usize,
}
pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
let (issues, merge_requests) = if result.resource_type == "issues" {
(
Some(IngestIssueStats {
fetched: result.issues_fetched,
upserted: result.issues_upserted,
synced_discussions: result.issues_synced_discussions,
skipped_discussion_sync: result.issues_skipped_discussion_sync,
}),
None,
)
} else {
(
None,
Some(IngestMrStats {
fetched: result.mrs_fetched,
upserted: result.mrs_upserted,
synced_discussions: result.mrs_synced_discussions,
skipped_discussion_sync: result.mrs_skipped_discussion_sync,
assignees_linked: result.assignees_linked,
reviewers_linked: result.reviewers_linked,
diffnotes_count: result.diffnotes_count,
}),
)
};
let status_enrichment: Vec<StatusEnrichmentJson> = result
.status_enrichment_projects
.iter()
.map(|p| StatusEnrichmentJson {
mode: p.mode.clone(),
reason: p.reason.clone(),
seen: p.seen,
enriched: p.enriched,
cleared: p.cleared,
without_widget: p.without_widget,
partial_errors: p.partial_errors,
first_partial_error: p.first_partial_error.clone(),
error: p.error.clone(),
})
.collect();
let output = IngestJsonOutput {
ok: true,
data: IngestJsonData {
resource_type: result.resource_type.clone(),
projects_synced: result.projects_synced,
issues,
merge_requests,
labels_created: result.labels_created,
discussions_fetched: result.discussions_fetched,
notes_upserted: result.notes_upserted,
resource_events_fetched: result.resource_events_fetched,
resource_events_failed: result.resource_events_failed,
status_enrichment,
status_enrichment_errors: result.status_enrichment_errors,
},
meta: RobotMeta { elapsed_ms },
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn print_ingest_summary(result: &IngestResult) {
println!();
if result.resource_type == "issues" {
println!(
"{}",
Theme::success().render(&format!(
"Total: {} issues, {} discussions, {} notes",
result.issues_upserted, result.discussions_fetched, result.notes_upserted
))
);
if result.issues_skipped_discussion_sync > 0 {
println!(
"{}",
Theme::dim().render(&format!(
"Skipped discussion sync for {} unchanged issues.",
result.issues_skipped_discussion_sync
))
);
}
} else {
let diffnotes_str = if result.diffnotes_count > 0 {
format!(" ({} diff notes)", result.diffnotes_count)
} else {
String::new()
};
println!(
"{}",
Theme::success().render(&format!(
"Total: {} MRs, {} discussions, {} notes{}",
result.mrs_upserted,
result.discussions_fetched,
result.notes_upserted,
diffnotes_str
))
);
if result.mrs_skipped_discussion_sync > 0 {
println!(
"{}",
Theme::dim().render(&format!(
"Skipped discussion sync for {} unchanged MRs.",
result.mrs_skipped_discussion_sync
))
);
}
}
if result.resource_events_fetched > 0 || result.resource_events_failed > 0 {
println!(
" Resource events: {} fetched{}",
result.resource_events_fetched,
if result.resource_events_failed > 0 {
format!(", {} failed", result.resource_events_failed)
} else {
String::new()
}
);
}
}
pub fn print_dry_run_preview(preview: &DryRunPreview) {
println!(
"{} {}",
Theme::info().bold().render("Dry Run Preview"),
Theme::warning().render("(no changes will be made)")
);
println!();
let type_label = if preview.resource_type == "issues" {
"issues"
} else {
"merge requests"
};
println!(" Resource type: {}", Theme::bold().render(type_label));
println!(
" Sync mode: {}",
if preview.sync_mode == "full" {
Theme::warning().render("full (all data will be re-fetched)")
} else {
Theme::success().render("incremental (only changes since last sync)")
}
);
println!(" Projects: {}", preview.projects.len());
println!();
println!("{}", Theme::info().bold().render("Projects to sync:"));
for project in &preview.projects {
let sync_status = if !project.has_cursor {
Theme::warning().render("initial sync")
} else {
Theme::success().render("incremental")
};
println!(
" {} ({})",
Theme::bold().render(&project.path),
sync_status
);
println!(" Existing {}: {}", type_label, project.existing_count);
if let Some(ref last_synced) = project.last_synced {
println!(" Last synced: {}", last_synced);
}
}
}
#[derive(Serialize)]
struct DryRunJsonOutput {
ok: bool,
dry_run: bool,
data: DryRunPreview,
}
pub fn print_dry_run_preview_json(preview: &DryRunPreview) {
let output = DryRunJsonOutput {
ok: true,
dry_run: true,
data: preview.clone(),
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}

View File

@@ -1,27 +1,3 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use crate::cli::render::Theme;
use indicatif::{ProgressBar, ProgressStyle};
use rusqlite::Connection;
use serde::Serialize;
use tracing::Instrument;
use crate::Config;
use crate::cli::robot::RobotMeta;
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::lock::{AppLock, LockOptions};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::shutdown::ShutdownSignal;
use crate::gitlab::GitLabClient;
use crate::ingestion::{
IngestMrProjectResult, IngestProjectResult, ProgressEvent, ingest_project_issues_with_progress,
ingest_project_merge_requests_with_progress,
};
#[derive(Default)]
pub struct IngestResult {
pub resource_type: String,
@@ -783,334 +759,3 @@ fn get_projects_to_sync(
Ok(projects)
}
fn print_issue_project_summary(path: &str, result: &IngestProjectResult) {
let labels_str = if result.labels_created > 0 {
format!(", {} new labels", result.labels_created)
} else {
String::new()
};
println!(
" {}: {} issues fetched{}",
Theme::info().render(path),
result.issues_upserted,
labels_str
);
if result.issues_synced_discussions > 0 {
println!(
" {} issues -> {} discussions, {} notes",
result.issues_synced_discussions, result.discussions_fetched, result.notes_upserted
);
}
if result.issues_skipped_discussion_sync > 0 {
println!(
" {} unchanged issues (discussion sync skipped)",
Theme::dim().render(&result.issues_skipped_discussion_sync.to_string())
);
}
}
fn print_mr_project_summary(path: &str, result: &IngestMrProjectResult) {
let labels_str = if result.labels_created > 0 {
format!(", {} new labels", result.labels_created)
} else {
String::new()
};
let assignees_str = if result.assignees_linked > 0 || result.reviewers_linked > 0 {
format!(
", {} assignees, {} reviewers",
result.assignees_linked, result.reviewers_linked
)
} else {
String::new()
};
println!(
" {}: {} MRs fetched{}{}",
Theme::info().render(path),
result.mrs_upserted,
labels_str,
assignees_str
);
if result.mrs_synced_discussions > 0 {
let diffnotes_str = if result.diffnotes_count > 0 {
format!(" ({} diff notes)", result.diffnotes_count)
} else {
String::new()
};
println!(
" {} MRs -> {} discussions, {} notes{}",
result.mrs_synced_discussions,
result.discussions_fetched,
result.notes_upserted,
diffnotes_str
);
}
if result.mrs_skipped_discussion_sync > 0 {
println!(
" {} unchanged MRs (discussion sync skipped)",
Theme::dim().render(&result.mrs_skipped_discussion_sync.to_string())
);
}
}
#[derive(Serialize)]
struct IngestJsonOutput {
ok: bool,
data: IngestJsonData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct IngestJsonData {
resource_type: String,
projects_synced: usize,
#[serde(skip_serializing_if = "Option::is_none")]
issues: Option<IngestIssueStats>,
#[serde(skip_serializing_if = "Option::is_none")]
merge_requests: Option<IngestMrStats>,
labels_created: usize,
discussions_fetched: usize,
notes_upserted: usize,
resource_events_fetched: usize,
resource_events_failed: usize,
#[serde(skip_serializing_if = "Vec::is_empty")]
status_enrichment: Vec<StatusEnrichmentJson>,
status_enrichment_errors: usize,
}
#[derive(Serialize)]
struct StatusEnrichmentJson {
mode: String,
#[serde(skip_serializing_if = "Option::is_none")]
reason: Option<String>,
seen: usize,
enriched: usize,
cleared: usize,
without_widget: usize,
partial_errors: usize,
#[serde(skip_serializing_if = "Option::is_none")]
first_partial_error: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
error: Option<String>,
}
#[derive(Serialize)]
struct IngestIssueStats {
fetched: usize,
upserted: usize,
synced_discussions: usize,
skipped_discussion_sync: usize,
}
#[derive(Serialize)]
struct IngestMrStats {
fetched: usize,
upserted: usize,
synced_discussions: usize,
skipped_discussion_sync: usize,
assignees_linked: usize,
reviewers_linked: usize,
diffnotes_count: usize,
}
pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
let (issues, merge_requests) = if result.resource_type == "issues" {
(
Some(IngestIssueStats {
fetched: result.issues_fetched,
upserted: result.issues_upserted,
synced_discussions: result.issues_synced_discussions,
skipped_discussion_sync: result.issues_skipped_discussion_sync,
}),
None,
)
} else {
(
None,
Some(IngestMrStats {
fetched: result.mrs_fetched,
upserted: result.mrs_upserted,
synced_discussions: result.mrs_synced_discussions,
skipped_discussion_sync: result.mrs_skipped_discussion_sync,
assignees_linked: result.assignees_linked,
reviewers_linked: result.reviewers_linked,
diffnotes_count: result.diffnotes_count,
}),
)
};
let status_enrichment: Vec<StatusEnrichmentJson> = result
.status_enrichment_projects
.iter()
.map(|p| StatusEnrichmentJson {
mode: p.mode.clone(),
reason: p.reason.clone(),
seen: p.seen,
enriched: p.enriched,
cleared: p.cleared,
without_widget: p.without_widget,
partial_errors: p.partial_errors,
first_partial_error: p.first_partial_error.clone(),
error: p.error.clone(),
})
.collect();
let output = IngestJsonOutput {
ok: true,
data: IngestJsonData {
resource_type: result.resource_type.clone(),
projects_synced: result.projects_synced,
issues,
merge_requests,
labels_created: result.labels_created,
discussions_fetched: result.discussions_fetched,
notes_upserted: result.notes_upserted,
resource_events_fetched: result.resource_events_fetched,
resource_events_failed: result.resource_events_failed,
status_enrichment,
status_enrichment_errors: result.status_enrichment_errors,
},
meta: RobotMeta { elapsed_ms },
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn print_ingest_summary(result: &IngestResult) {
println!();
if result.resource_type == "issues" {
println!(
"{}",
Theme::success().render(&format!(
"Total: {} issues, {} discussions, {} notes",
result.issues_upserted, result.discussions_fetched, result.notes_upserted
))
);
if result.issues_skipped_discussion_sync > 0 {
println!(
"{}",
Theme::dim().render(&format!(
"Skipped discussion sync for {} unchanged issues.",
result.issues_skipped_discussion_sync
))
);
}
} else {
let diffnotes_str = if result.diffnotes_count > 0 {
format!(" ({} diff notes)", result.diffnotes_count)
} else {
String::new()
};
println!(
"{}",
Theme::success().render(&format!(
"Total: {} MRs, {} discussions, {} notes{}",
result.mrs_upserted,
result.discussions_fetched,
result.notes_upserted,
diffnotes_str
))
);
if result.mrs_skipped_discussion_sync > 0 {
println!(
"{}",
Theme::dim().render(&format!(
"Skipped discussion sync for {} unchanged MRs.",
result.mrs_skipped_discussion_sync
))
);
}
}
if result.resource_events_fetched > 0 || result.resource_events_failed > 0 {
println!(
" Resource events: {} fetched{}",
result.resource_events_fetched,
if result.resource_events_failed > 0 {
format!(", {} failed", result.resource_events_failed)
} else {
String::new()
}
);
}
}
pub fn print_dry_run_preview(preview: &DryRunPreview) {
println!(
"{} {}",
Theme::info().bold().render("Dry Run Preview"),
Theme::warning().render("(no changes will be made)")
);
println!();
let type_label = if preview.resource_type == "issues" {
"issues"
} else {
"merge requests"
};
println!(" Resource type: {}", Theme::bold().render(type_label));
println!(
" Sync mode: {}",
if preview.sync_mode == "full" {
Theme::warning().render("full (all data will be re-fetched)")
} else {
Theme::success().render("incremental (only changes since last sync)")
}
);
println!(" Projects: {}", preview.projects.len());
println!();
println!("{}", Theme::info().bold().render("Projects to sync:"));
for project in &preview.projects {
let sync_status = if !project.has_cursor {
Theme::warning().render("initial sync")
} else {
Theme::success().render("incremental")
};
println!(
" {} ({})",
Theme::bold().render(&project.path),
sync_status
);
println!(" Existing {}: {}", type_label, project.existing_count);
if let Some(ref last_synced) = project.last_synced {
println!(" Last synced: {}", last_synced);
}
}
}
#[derive(Serialize)]
struct DryRunJsonOutput {
ok: bool,
dry_run: bool,
data: DryRunPreview,
}
pub fn print_dry_run_preview_json(preview: &DryRunPreview) {
let output = DryRunJsonOutput {
ok: true,
dry_run: true,
data: preview.clone(),
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,443 @@
use crate::cli::render::{self, Align, Icons, StyledCell, Table as LoreTable, Theme};
use rusqlite::Connection;
use serde::Serialize;
use crate::Config;
use crate::cli::robot::{expand_fields_preset, filter_fields};
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use super::render_helpers::{format_assignees, format_discussions};
#[derive(Debug, Serialize)]
pub struct IssueListRow {
pub iid: i64,
pub title: String,
pub state: String,
pub author_username: String,
pub created_at: i64,
pub updated_at: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub web_url: Option<String>,
pub project_path: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub discussion_count: i64,
pub unresolved_count: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_name: Option<String>,
#[serde(skip_serializing)]
pub status_category: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_color: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_icon_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_synced_at: Option<i64>,
}
#[derive(Serialize)]
pub struct IssueListRowJson {
pub iid: i64,
pub title: String,
pub state: String,
pub author_username: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub discussion_count: i64,
pub unresolved_count: i64,
pub created_at_iso: String,
pub updated_at_iso: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub web_url: Option<String>,
pub project_path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_name: Option<String>,
#[serde(skip_serializing)]
pub status_category: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_color: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_icon_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status_synced_at_iso: Option<String>,
}
impl From<&IssueListRow> for IssueListRowJson {
fn from(row: &IssueListRow) -> Self {
Self {
iid: row.iid,
title: row.title.clone(),
state: row.state.clone(),
author_username: row.author_username.clone(),
labels: row.labels.clone(),
assignees: row.assignees.clone(),
discussion_count: row.discussion_count,
unresolved_count: row.unresolved_count,
created_at_iso: ms_to_iso(row.created_at),
updated_at_iso: ms_to_iso(row.updated_at),
web_url: row.web_url.clone(),
project_path: row.project_path.clone(),
status_name: row.status_name.clone(),
status_category: row.status_category.clone(),
status_color: row.status_color.clone(),
status_icon_name: row.status_icon_name.clone(),
status_synced_at_iso: row.status_synced_at.map(ms_to_iso),
}
}
}
#[derive(Serialize)]
pub struct ListResult {
pub issues: Vec<IssueListRow>,
pub total_count: usize,
pub available_statuses: Vec<String>,
}
#[derive(Serialize)]
pub struct ListResultJson {
pub issues: Vec<IssueListRowJson>,
pub total_count: usize,
pub showing: usize,
}
impl From<&ListResult> for ListResultJson {
fn from(result: &ListResult) -> Self {
Self {
issues: result.issues.iter().map(IssueListRowJson::from).collect(),
total_count: result.total_count,
showing: result.issues.len(),
}
}
}
pub struct ListFilters<'a> {
pub limit: usize,
pub project: Option<&'a str>,
pub state: Option<&'a str>,
pub author: Option<&'a str>,
pub assignee: Option<&'a str>,
pub labels: Option<&'a [String]>,
pub milestone: Option<&'a str>,
pub since: Option<&'a str>,
pub due_before: Option<&'a str>,
pub has_due_date: bool,
pub statuses: &'a [String],
pub sort: &'a str,
pub order: &'a str,
}
pub fn run_list_issues(config: &Config, filters: ListFilters) -> Result<ListResult> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let mut result = query_issues(&conn, &filters)?;
result.available_statuses = query_available_statuses(&conn)?;
Ok(result)
}
fn query_available_statuses(conn: &Connection) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT DISTINCT status_name FROM issues WHERE status_name IS NOT NULL ORDER BY status_name",
)?;
let statuses = stmt
.query_map([], |row| row.get::<_, String>(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(statuses)
}
fn query_issues(conn: &Connection, filters: &ListFilters) -> Result<ListResult> {
let mut where_clauses = Vec::new();
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
if let Some(project) = filters.project {
let project_id = resolve_project(conn, project)?;
where_clauses.push("i.project_id = ?");
params.push(Box::new(project_id));
}
if let Some(state) = filters.state
&& state != "all"
{
where_clauses.push("i.state = ?");
params.push(Box::new(state.to_string()));
}
if let Some(author) = filters.author {
let username = author.strip_prefix('@').unwrap_or(author);
where_clauses.push("i.author_username = ?");
params.push(Box::new(username.to_string()));
}
if let Some(assignee) = filters.assignee {
let username = assignee.strip_prefix('@').unwrap_or(assignee);
where_clauses.push(
"EXISTS (SELECT 1 FROM issue_assignees ia
WHERE ia.issue_id = i.id AND ia.username = ?)",
);
params.push(Box::new(username.to_string()));
}
if let Some(since_str) = filters.since {
let cutoff_ms = parse_since(since_str).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
since_str
))
})?;
where_clauses.push("i.updated_at >= ?");
params.push(Box::new(cutoff_ms));
}
if let Some(labels) = filters.labels {
for label in labels {
where_clauses.push(
"EXISTS (SELECT 1 FROM issue_labels il
JOIN labels l ON il.label_id = l.id
WHERE il.issue_id = i.id AND l.name = ?)",
);
params.push(Box::new(label.clone()));
}
}
if let Some(milestone) = filters.milestone {
where_clauses.push("i.milestone_title = ?");
params.push(Box::new(milestone.to_string()));
}
if let Some(due_before) = filters.due_before {
where_clauses.push("i.due_date IS NOT NULL AND i.due_date <= ?");
params.push(Box::new(due_before.to_string()));
}
if filters.has_due_date {
where_clauses.push("i.due_date IS NOT NULL");
}
let status_in_clause;
if filters.statuses.len() == 1 {
where_clauses.push("i.status_name = ? COLLATE NOCASE");
params.push(Box::new(filters.statuses[0].clone()));
} else if filters.statuses.len() > 1 {
let placeholders: Vec<&str> = filters.statuses.iter().map(|_| "?").collect();
status_in_clause = format!(
"i.status_name COLLATE NOCASE IN ({})",
placeholders.join(", ")
);
where_clauses.push(&status_in_clause);
for s in filters.statuses {
params.push(Box::new(s.clone()));
}
}
let where_sql = if where_clauses.is_empty() {
String::new()
} else {
format!("WHERE {}", where_clauses.join(" AND "))
};
let count_sql = format!(
"SELECT COUNT(*) FROM issues i
JOIN projects p ON i.project_id = p.id
{where_sql}"
);
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
let total_count = total_count as usize;
let sort_column = match filters.sort {
"created" => "i.created_at",
"iid" => "i.iid",
_ => "i.updated_at",
};
let order = if filters.order == "asc" {
"ASC"
} else {
"DESC"
};
let query_sql = format!(
"SELECT
i.iid,
i.title,
i.state,
i.author_username,
i.created_at,
i.updated_at,
i.web_url,
p.path_with_namespace,
(SELECT GROUP_CONCAT(l.name, X'1F')
FROM issue_labels il
JOIN labels l ON il.label_id = l.id
WHERE il.issue_id = i.id) AS labels_csv,
(SELECT GROUP_CONCAT(ia.username, X'1F')
FROM issue_assignees ia
WHERE ia.issue_id = i.id) AS assignees_csv,
(SELECT COUNT(*) FROM discussions d
WHERE d.issue_id = i.id) AS discussion_count,
(SELECT COUNT(*) FROM discussions d
WHERE d.issue_id = i.id AND d.resolvable = 1 AND d.resolved = 0) AS unresolved_count,
i.status_name,
i.status_category,
i.status_color,
i.status_icon_name,
i.status_synced_at
FROM issues i
JOIN projects p ON i.project_id = p.id
{where_sql}
ORDER BY {sort_column} {order}
LIMIT ?"
);
params.push(Box::new(filters.limit as i64));
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(&query_sql)?;
let issues: Vec<IssueListRow> = stmt
.query_map(param_refs.as_slice(), |row| {
let labels_csv: Option<String> = row.get(8)?;
let labels = labels_csv
.map(|s| s.split('\x1F').map(String::from).collect())
.unwrap_or_default();
let assignees_csv: Option<String> = row.get(9)?;
let assignees = assignees_csv
.map(|s| s.split('\x1F').map(String::from).collect())
.unwrap_or_default();
Ok(IssueListRow {
iid: row.get(0)?,
title: row.get(1)?,
state: row.get(2)?,
author_username: row.get(3)?,
created_at: row.get(4)?,
updated_at: row.get(5)?,
web_url: row.get(6)?,
project_path: row.get(7)?,
labels,
assignees,
discussion_count: row.get(10)?,
unresolved_count: row.get(11)?,
status_name: row.get(12)?,
status_category: row.get(13)?,
status_color: row.get(14)?,
status_icon_name: row.get(15)?,
status_synced_at: row.get(16)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(ListResult {
issues,
total_count,
available_statuses: Vec::new(),
})
}
pub fn print_list_issues(result: &ListResult) {
if result.issues.is_empty() {
println!("No issues found.");
return;
}
println!(
"{} {} of {}\n",
Theme::bold().render("Issues"),
result.issues.len(),
result.total_count
);
let has_any_status = result.issues.iter().any(|i| i.status_name.is_some());
let mut headers = vec!["IID", "Title", "State"];
if has_any_status {
headers.push("Status");
}
headers.extend(["Assignee", "Labels", "Disc", "Updated"]);
let mut table = LoreTable::new().headers(&headers).align(0, Align::Right);
for issue in &result.issues {
let title = render::truncate(&issue.title, 45);
let relative_time = render::format_relative_time_compact(issue.updated_at);
let labels = render::format_labels_bare(&issue.labels, 2);
let assignee = format_assignees(&issue.assignees);
let discussions = format_discussions(issue.discussion_count, issue.unresolved_count);
let (icon, state_style) = if issue.state == "opened" {
(Icons::issue_opened(), Theme::success())
} else {
(Icons::issue_closed(), Theme::dim())
};
let state_cell = StyledCell::styled(format!("{icon} {}", issue.state), state_style);
let mut row = vec![
StyledCell::styled(format!("#{}", issue.iid), Theme::info()),
StyledCell::plain(title),
state_cell,
];
if has_any_status {
match &issue.status_name {
Some(status) => {
row.push(StyledCell::plain(render::style_with_hex(
status,
issue.status_color.as_deref(),
)));
}
None => {
row.push(StyledCell::plain(""));
}
}
}
row.extend([
StyledCell::styled(assignee, Theme::accent()),
StyledCell::styled(labels, Theme::warning()),
discussions,
StyledCell::styled(relative_time, Theme::dim()),
]);
table.add_row(row);
}
println!("{}", table.render());
}
pub fn print_list_issues_json(result: &ListResult, elapsed_ms: u64, fields: Option<&[String]>) {
let json_result = ListResultJson::from(result);
let output = serde_json::json!({
"ok": true,
"data": json_result,
"meta": {
"elapsed_ms": elapsed_ms,
"available_statuses": result.available_statuses,
},
});
let mut output = output;
if let Some(f) = fields {
let expanded = expand_fields_preset(f, "issues");
filter_fields(&mut output, "issues", &expanded);
}
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn open_issue_in_browser(result: &ListResult) -> Option<String> {
let first_issue = result.issues.first()?;
let url = first_issue.web_url.as_ref()?;
match open::that(url) {
Ok(()) => {
println!("Opened: {url}");
Some(url.clone())
}
Err(e) => {
eprintln!("Failed to open browser: {e}");
None
}
}
}

View File

@@ -1,6 +1,9 @@
use super::*;
use crate::cli::render;
use crate::core::time::now_ms;
use crate::test_support::{
insert_project as insert_test_project, setup_test_db as setup_note_test_db, test_config,
};
#[test]
fn truncate_leaves_short_strings_alone() {
@@ -82,34 +85,6 @@ fn format_discussions_with_unresolved() {
// Note query layer tests
// -----------------------------------------------------------------------
use std::path::Path;
use crate::core::config::{
Config, EmbeddingConfig, GitLabConfig, LoggingConfig, ProjectConfig, ScoringConfig,
StorageConfig, SyncConfig,
};
use crate::core::db::{create_connection, run_migrations};
fn test_config(default_project: Option<&str>) -> Config {
Config {
gitlab: GitLabConfig {
base_url: "https://gitlab.example.com".to_string(),
token_env_var: "GITLAB_TOKEN".to_string(),
token: None,
username: None,
},
projects: vec![ProjectConfig {
path: "group/project".to_string(),
}],
default_project: default_project.map(String::from),
sync: SyncConfig::default(),
storage: StorageConfig::default(),
embedding: EmbeddingConfig::default(),
logging: LoggingConfig::default(),
scoring: ScoringConfig::default(),
}
}
fn default_note_filters() -> NoteListFilters {
NoteListFilters {
limit: 50,
@@ -132,26 +107,6 @@ fn default_note_filters() -> NoteListFilters {
}
}
fn setup_note_test_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
fn insert_test_project(conn: &Connection, id: i64, path: &str) {
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
VALUES (?1, ?2, ?3, ?4)",
rusqlite::params![
id,
id * 100,
path,
format!("https://gitlab.example.com/{path}")
],
)
.unwrap();
}
fn insert_test_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, title: &str) {
conn.execute(
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username,

View File

@@ -0,0 +1,28 @@
mod issues;
mod mrs;
mod notes;
mod render_helpers;
pub use issues::{
IssueListRow, IssueListRowJson, ListFilters, ListResult, ListResultJson, open_issue_in_browser,
print_list_issues, print_list_issues_json, run_list_issues,
};
pub use mrs::{
MrListFilters, MrListResult, MrListResultJson, MrListRow, MrListRowJson, open_mr_in_browser,
print_list_mrs, print_list_mrs_json, run_list_mrs,
};
pub use notes::{
NoteListFilters, NoteListResult, NoteListResultJson, NoteListRow, NoteListRowJson,
print_list_notes, print_list_notes_json, query_notes,
};
#[cfg(test)]
use crate::core::path_resolver::escape_like as note_escape_like;
#[cfg(test)]
use render_helpers::{format_discussions, format_note_parent, format_note_type, truncate_body};
#[cfg(test)]
use rusqlite::Connection;
#[cfg(test)]
#[path = "list_tests.rs"]
mod tests;

View File

@@ -0,0 +1,404 @@
use crate::cli::render::{self, Align, Icons, StyledCell, Table as LoreTable, Theme};
use rusqlite::Connection;
use serde::Serialize;
use crate::Config;
use crate::cli::robot::{RobotMeta, expand_fields_preset, filter_fields};
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use super::render_helpers::{format_branches, format_discussions};
#[derive(Debug, Serialize)]
pub struct MrListRow {
pub iid: i64,
pub title: String,
pub state: String,
pub draft: bool,
pub author_username: String,
pub source_branch: String,
pub target_branch: String,
pub created_at: i64,
pub updated_at: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub web_url: Option<String>,
pub project_path: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub reviewers: Vec<String>,
pub discussion_count: i64,
pub unresolved_count: i64,
}
#[derive(Serialize)]
pub struct MrListRowJson {
pub iid: i64,
pub title: String,
pub state: String,
pub draft: bool,
pub author_username: String,
pub source_branch: String,
pub target_branch: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub reviewers: Vec<String>,
pub discussion_count: i64,
pub unresolved_count: i64,
pub created_at_iso: String,
pub updated_at_iso: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub web_url: Option<String>,
pub project_path: String,
}
impl From<&MrListRow> for MrListRowJson {
fn from(row: &MrListRow) -> Self {
Self {
iid: row.iid,
title: row.title.clone(),
state: row.state.clone(),
draft: row.draft,
author_username: row.author_username.clone(),
source_branch: row.source_branch.clone(),
target_branch: row.target_branch.clone(),
labels: row.labels.clone(),
assignees: row.assignees.clone(),
reviewers: row.reviewers.clone(),
discussion_count: row.discussion_count,
unresolved_count: row.unresolved_count,
created_at_iso: ms_to_iso(row.created_at),
updated_at_iso: ms_to_iso(row.updated_at),
web_url: row.web_url.clone(),
project_path: row.project_path.clone(),
}
}
}
#[derive(Serialize)]
pub struct MrListResult {
pub mrs: Vec<MrListRow>,
pub total_count: usize,
}
#[derive(Serialize)]
pub struct MrListResultJson {
pub mrs: Vec<MrListRowJson>,
pub total_count: usize,
pub showing: usize,
}
impl From<&MrListResult> for MrListResultJson {
fn from(result: &MrListResult) -> Self {
Self {
mrs: result.mrs.iter().map(MrListRowJson::from).collect(),
total_count: result.total_count,
showing: result.mrs.len(),
}
}
}
pub struct MrListFilters<'a> {
pub limit: usize,
pub project: Option<&'a str>,
pub state: Option<&'a str>,
pub author: Option<&'a str>,
pub assignee: Option<&'a str>,
pub reviewer: Option<&'a str>,
pub labels: Option<&'a [String]>,
pub since: Option<&'a str>,
pub draft: bool,
pub no_draft: bool,
pub target_branch: Option<&'a str>,
pub source_branch: Option<&'a str>,
pub sort: &'a str,
pub order: &'a str,
}
pub fn run_list_mrs(config: &Config, filters: MrListFilters) -> Result<MrListResult> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let result = query_mrs(&conn, &filters)?;
Ok(result)
}
fn query_mrs(conn: &Connection, filters: &MrListFilters) -> Result<MrListResult> {
let mut where_clauses = Vec::new();
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
if let Some(project) = filters.project {
let project_id = resolve_project(conn, project)?;
where_clauses.push("m.project_id = ?");
params.push(Box::new(project_id));
}
if let Some(state) = filters.state
&& state != "all"
{
where_clauses.push("m.state = ?");
params.push(Box::new(state.to_string()));
}
if let Some(author) = filters.author {
let username = author.strip_prefix('@').unwrap_or(author);
where_clauses.push("m.author_username = ?");
params.push(Box::new(username.to_string()));
}
if let Some(assignee) = filters.assignee {
let username = assignee.strip_prefix('@').unwrap_or(assignee);
where_clauses.push(
"EXISTS (SELECT 1 FROM mr_assignees ma
WHERE ma.merge_request_id = m.id AND ma.username = ?)",
);
params.push(Box::new(username.to_string()));
}
if let Some(reviewer) = filters.reviewer {
let username = reviewer.strip_prefix('@').unwrap_or(reviewer);
where_clauses.push(
"EXISTS (SELECT 1 FROM mr_reviewers mr
WHERE mr.merge_request_id = m.id AND mr.username = ?)",
);
params.push(Box::new(username.to_string()));
}
if let Some(since_str) = filters.since {
let cutoff_ms = parse_since(since_str).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
since_str
))
})?;
where_clauses.push("m.updated_at >= ?");
params.push(Box::new(cutoff_ms));
}
if let Some(labels) = filters.labels {
for label in labels {
where_clauses.push(
"EXISTS (SELECT 1 FROM mr_labels ml
JOIN labels l ON ml.label_id = l.id
WHERE ml.merge_request_id = m.id AND l.name = ?)",
);
params.push(Box::new(label.clone()));
}
}
if filters.draft {
where_clauses.push("m.draft = 1");
} else if filters.no_draft {
where_clauses.push("m.draft = 0");
}
if let Some(target_branch) = filters.target_branch {
where_clauses.push("m.target_branch = ?");
params.push(Box::new(target_branch.to_string()));
}
if let Some(source_branch) = filters.source_branch {
where_clauses.push("m.source_branch = ?");
params.push(Box::new(source_branch.to_string()));
}
let where_sql = if where_clauses.is_empty() {
String::new()
} else {
format!("WHERE {}", where_clauses.join(" AND "))
};
let count_sql = format!(
"SELECT COUNT(*) FROM merge_requests m
JOIN projects p ON m.project_id = p.id
{where_sql}"
);
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
let total_count = total_count as usize;
let sort_column = match filters.sort {
"created" => "m.created_at",
"iid" => "m.iid",
_ => "m.updated_at",
};
let order = if filters.order == "asc" {
"ASC"
} else {
"DESC"
};
let query_sql = format!(
"SELECT
m.iid,
m.title,
m.state,
m.draft,
m.author_username,
m.source_branch,
m.target_branch,
m.created_at,
m.updated_at,
m.web_url,
p.path_with_namespace,
(SELECT GROUP_CONCAT(l.name, X'1F')
FROM mr_labels ml
JOIN labels l ON ml.label_id = l.id
WHERE ml.merge_request_id = m.id) AS labels_csv,
(SELECT GROUP_CONCAT(ma.username, X'1F')
FROM mr_assignees ma
WHERE ma.merge_request_id = m.id) AS assignees_csv,
(SELECT GROUP_CONCAT(mr.username, X'1F')
FROM mr_reviewers mr
WHERE mr.merge_request_id = m.id) AS reviewers_csv,
(SELECT COUNT(*) FROM discussions d
WHERE d.merge_request_id = m.id) AS discussion_count,
(SELECT COUNT(*) FROM discussions d
WHERE d.merge_request_id = m.id AND d.resolvable = 1 AND d.resolved = 0) AS unresolved_count
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
{where_sql}
ORDER BY {sort_column} {order}
LIMIT ?"
);
params.push(Box::new(filters.limit as i64));
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(&query_sql)?;
let mrs: Vec<MrListRow> = stmt
.query_map(param_refs.as_slice(), |row| {
let labels_csv: Option<String> = row.get(11)?;
let labels = labels_csv
.map(|s| s.split('\x1F').map(String::from).collect())
.unwrap_or_default();
let assignees_csv: Option<String> = row.get(12)?;
let assignees = assignees_csv
.map(|s| s.split('\x1F').map(String::from).collect())
.unwrap_or_default();
let reviewers_csv: Option<String> = row.get(13)?;
let reviewers = reviewers_csv
.map(|s| s.split('\x1F').map(String::from).collect())
.unwrap_or_default();
let draft_int: i64 = row.get(3)?;
Ok(MrListRow {
iid: row.get(0)?,
title: row.get(1)?,
state: row.get(2)?,
draft: draft_int == 1,
author_username: row.get(4)?,
source_branch: row.get(5)?,
target_branch: row.get(6)?,
created_at: row.get(7)?,
updated_at: row.get(8)?,
web_url: row.get(9)?,
project_path: row.get(10)?,
labels,
assignees,
reviewers,
discussion_count: row.get(14)?,
unresolved_count: row.get(15)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(MrListResult { mrs, total_count })
}
pub fn print_list_mrs(result: &MrListResult) {
if result.mrs.is_empty() {
println!("No merge requests found.");
return;
}
println!(
"{} {} of {}\n",
Theme::bold().render("Merge Requests"),
result.mrs.len(),
result.total_count
);
let mut table = LoreTable::new()
.headers(&[
"IID", "Title", "State", "Author", "Branches", "Disc", "Updated",
])
.align(0, Align::Right);
for mr in &result.mrs {
let title = if mr.draft {
format!("{} {}", Icons::mr_draft(), render::truncate(&mr.title, 42))
} else {
render::truncate(&mr.title, 45)
};
let relative_time = render::format_relative_time_compact(mr.updated_at);
let branches = format_branches(&mr.target_branch, &mr.source_branch, 25);
let discussions = format_discussions(mr.discussion_count, mr.unresolved_count);
let (icon, style) = match mr.state.as_str() {
"opened" => (Icons::mr_opened(), Theme::success()),
"merged" => (Icons::mr_merged(), Theme::accent()),
"closed" => (Icons::mr_closed(), Theme::error()),
"locked" => (Icons::mr_opened(), Theme::warning()),
_ => (Icons::mr_opened(), Theme::dim()),
};
let state_cell = StyledCell::styled(format!("{icon} {}", mr.state), style);
table.add_row(vec![
StyledCell::styled(format!("!{}", mr.iid), Theme::info()),
StyledCell::plain(title),
state_cell,
StyledCell::styled(
format!("@{}", render::truncate(&mr.author_username, 12)),
Theme::accent(),
),
StyledCell::styled(branches, Theme::info()),
discussions,
StyledCell::styled(relative_time, Theme::dim()),
]);
}
println!("{}", table.render());
}
pub fn print_list_mrs_json(result: &MrListResult, elapsed_ms: u64, fields: Option<&[String]>) {
let json_result = MrListResultJson::from(result);
let meta = RobotMeta { elapsed_ms };
let output = serde_json::json!({
"ok": true,
"data": json_result,
"meta": meta,
});
let mut output = output;
if let Some(f) = fields {
let expanded = expand_fields_preset(f, "mrs");
filter_fields(&mut output, "mrs", &expanded);
}
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn open_mr_in_browser(result: &MrListResult) -> Option<String> {
let first_mr = result.mrs.first()?;
let url = first_mr.web_url.as_ref()?;
match open::that(url) {
Ok(()) => {
println!("Opened: {url}");
Some(url.clone())
}
Err(e) => {
eprintln!("Failed to open browser: {e}");
None
}
}
}

View File

@@ -0,0 +1,470 @@
use crate::cli::render::{self, Align, StyledCell, Table as LoreTable, Theme};
use rusqlite::Connection;
use serde::Serialize;
use crate::Config;
use crate::cli::robot::{RobotMeta, expand_fields_preset, filter_fields};
use crate::core::error::{LoreError, Result};
use crate::core::path_resolver::escape_like as note_escape_like;
use crate::core::project::resolve_project;
use crate::core::time::{iso_to_ms, ms_to_iso, parse_since};
use super::render_helpers::{
format_note_parent, format_note_path, format_note_type, truncate_body,
};
#[derive(Debug, Serialize)]
pub struct NoteListRow {
pub id: i64,
pub gitlab_id: i64,
pub author_username: String,
pub body: Option<String>,
pub note_type: Option<String>,
pub is_system: bool,
pub created_at: i64,
pub updated_at: i64,
pub position_new_path: Option<String>,
pub position_new_line: Option<i64>,
pub position_old_path: Option<String>,
pub position_old_line: Option<i64>,
pub resolvable: bool,
pub resolved: bool,
pub resolved_by: Option<String>,
pub noteable_type: Option<String>,
pub parent_iid: Option<i64>,
pub parent_title: Option<String>,
pub project_path: String,
}
#[derive(Serialize)]
pub struct NoteListRowJson {
pub id: i64,
pub gitlab_id: i64,
pub author_username: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub body: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub note_type: Option<String>,
pub is_system: bool,
pub created_at_iso: String,
pub updated_at_iso: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub position_new_path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub position_new_line: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub position_old_path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub position_old_line: Option<i64>,
pub resolvable: bool,
pub resolved: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub resolved_by: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub noteable_type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parent_iid: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parent_title: Option<String>,
pub project_path: String,
}
impl From<&NoteListRow> for NoteListRowJson {
fn from(row: &NoteListRow) -> Self {
Self {
id: row.id,
gitlab_id: row.gitlab_id,
author_username: row.author_username.clone(),
body: row.body.clone(),
note_type: row.note_type.clone(),
is_system: row.is_system,
created_at_iso: ms_to_iso(row.created_at),
updated_at_iso: ms_to_iso(row.updated_at),
position_new_path: row.position_new_path.clone(),
position_new_line: row.position_new_line,
position_old_path: row.position_old_path.clone(),
position_old_line: row.position_old_line,
resolvable: row.resolvable,
resolved: row.resolved,
resolved_by: row.resolved_by.clone(),
noteable_type: row.noteable_type.clone(),
parent_iid: row.parent_iid,
parent_title: row.parent_title.clone(),
project_path: row.project_path.clone(),
}
}
}
#[derive(Debug)]
pub struct NoteListResult {
pub notes: Vec<NoteListRow>,
pub total_count: i64,
}
#[derive(Serialize)]
pub struct NoteListResultJson {
pub notes: Vec<NoteListRowJson>,
pub total_count: i64,
pub showing: usize,
}
impl From<&NoteListResult> for NoteListResultJson {
fn from(result: &NoteListResult) -> Self {
Self {
notes: result.notes.iter().map(NoteListRowJson::from).collect(),
total_count: result.total_count,
showing: result.notes.len(),
}
}
}
pub struct NoteListFilters {
pub limit: usize,
pub project: Option<String>,
pub author: Option<String>,
pub note_type: Option<String>,
pub include_system: bool,
pub for_issue_iid: Option<i64>,
pub for_mr_iid: Option<i64>,
pub note_id: Option<i64>,
pub gitlab_note_id: Option<i64>,
pub discussion_id: Option<String>,
pub since: Option<String>,
pub until: Option<String>,
pub path: Option<String>,
pub contains: Option<String>,
pub resolution: Option<String>,
pub sort: String,
pub order: String,
}
pub fn print_list_notes(result: &NoteListResult) {
if result.notes.is_empty() {
println!("No notes found.");
return;
}
println!(
"{} {} of {}\n",
Theme::bold().render("Notes"),
result.notes.len(),
result.total_count
);
let mut table = LoreTable::new()
.headers(&[
"ID",
"Author",
"Type",
"Body",
"Path:Line",
"Parent",
"Created",
])
.align(0, Align::Right);
for note in &result.notes {
let body = note
.body
.as_deref()
.map(|b| truncate_body(b, 60))
.unwrap_or_default();
let path = format_note_path(note.position_new_path.as_deref(), note.position_new_line);
let parent = format_note_parent(note.noteable_type.as_deref(), note.parent_iid);
let relative_time = render::format_relative_time_compact(note.created_at);
let note_type = format_note_type(note.note_type.as_deref());
table.add_row(vec![
StyledCell::styled(note.gitlab_id.to_string(), Theme::info()),
StyledCell::styled(
format!("@{}", render::truncate(&note.author_username, 12)),
Theme::accent(),
),
StyledCell::plain(note_type),
StyledCell::plain(body),
StyledCell::plain(path),
StyledCell::plain(parent),
StyledCell::styled(relative_time, Theme::dim()),
]);
}
println!("{}", table.render());
}
pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: Option<&[String]>) {
let json_result = NoteListResultJson::from(result);
let meta = RobotMeta { elapsed_ms };
let output = serde_json::json!({
"ok": true,
"data": json_result,
"meta": meta,
});
let mut output = output;
if let Some(f) = fields {
let expanded = expand_fields_preset(f, "notes");
filter_fields(&mut output, "notes", &expanded);
}
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn query_notes(
conn: &Connection,
filters: &NoteListFilters,
config: &Config,
) -> Result<NoteListResult> {
let mut where_clauses: Vec<String> = Vec::new();
let mut params: Vec<Box<dyn rusqlite::ToSql>> = Vec::new();
if let Some(ref project) = filters.project {
let project_id = resolve_project(conn, project)?;
where_clauses.push("n.project_id = ?".to_string());
params.push(Box::new(project_id));
}
if let Some(ref author) = filters.author {
let username = author.strip_prefix('@').unwrap_or(author);
where_clauses.push("n.author_username = ? COLLATE NOCASE".to_string());
params.push(Box::new(username.to_string()));
}
if let Some(ref note_type) = filters.note_type {
where_clauses.push("n.note_type = ?".to_string());
params.push(Box::new(note_type.clone()));
}
if !filters.include_system {
where_clauses.push("n.is_system = 0".to_string());
}
let since_ms = if let Some(ref since_str) = filters.since {
let ms = parse_since(since_str).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --since value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
since_str
))
})?;
where_clauses.push("n.created_at >= ?".to_string());
params.push(Box::new(ms));
Some(ms)
} else {
None
};
if let Some(ref until_str) = filters.until {
let until_ms = if until_str.len() == 10
&& until_str.chars().filter(|&c| c == '-').count() == 2
{
let iso_full = format!("{until_str}T23:59:59.999Z");
iso_to_ms(&iso_full).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --until value '{}'. Use YYYY-MM-DD or relative format.",
until_str
))
})?
} else {
parse_since(until_str).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --until value '{}'. Use relative (7d, 2w, 1m) or absolute (YYYY-MM-DD) format.",
until_str
))
})?
};
if let Some(s) = since_ms
&& s > until_ms
{
return Err(LoreError::Other(
"Invalid time window: --since is after --until.".to_string(),
));
}
where_clauses.push("n.created_at <= ?".to_string());
params.push(Box::new(until_ms));
}
if let Some(ref path) = filters.path {
if let Some(prefix) = path.strip_suffix('/') {
let escaped = note_escape_like(prefix);
where_clauses.push("n.position_new_path LIKE ? ESCAPE '\\'".to_string());
params.push(Box::new(format!("{escaped}%")));
} else {
where_clauses.push("n.position_new_path = ?".to_string());
params.push(Box::new(path.clone()));
}
}
if let Some(ref contains) = filters.contains {
let escaped = note_escape_like(contains);
where_clauses.push("n.body LIKE ? ESCAPE '\\' COLLATE NOCASE".to_string());
params.push(Box::new(format!("%{escaped}%")));
}
if let Some(ref resolution) = filters.resolution {
match resolution.as_str() {
"unresolved" => {
where_clauses.push("n.resolvable = 1 AND n.resolved = 0".to_string());
}
"resolved" => {
where_clauses.push("n.resolvable = 1 AND n.resolved = 1".to_string());
}
other => {
return Err(LoreError::Other(format!(
"Invalid --resolution value '{}'. Use 'resolved' or 'unresolved'.",
other
)));
}
}
}
if let Some(iid) = filters.for_issue_iid {
let project_str = filters
.project
.as_deref()
.or(config.default_project.as_deref())
.ok_or_else(|| {
LoreError::Other(
"Cannot filter by issue IID without a project context. Use --project or set defaultProject in config."
.to_string(),
)
})?;
let project_id = resolve_project(conn, project_str)?;
where_clauses.push(
"d.issue_id = (SELECT id FROM issues WHERE project_id = ? AND iid = ?)".to_string(),
);
params.push(Box::new(project_id));
params.push(Box::new(iid));
}
if let Some(iid) = filters.for_mr_iid {
let project_str = filters
.project
.as_deref()
.or(config.default_project.as_deref())
.ok_or_else(|| {
LoreError::Other(
"Cannot filter by MR IID without a project context. Use --project or set defaultProject in config."
.to_string(),
)
})?;
let project_id = resolve_project(conn, project_str)?;
where_clauses.push(
"d.merge_request_id = (SELECT id FROM merge_requests WHERE project_id = ? AND iid = ?)"
.to_string(),
);
params.push(Box::new(project_id));
params.push(Box::new(iid));
}
if let Some(id) = filters.note_id {
where_clauses.push("n.id = ?".to_string());
params.push(Box::new(id));
}
if let Some(gitlab_id) = filters.gitlab_note_id {
where_clauses.push("n.gitlab_id = ?".to_string());
params.push(Box::new(gitlab_id));
}
if let Some(ref disc_id) = filters.discussion_id {
where_clauses.push("d.gitlab_discussion_id = ?".to_string());
params.push(Box::new(disc_id.clone()));
}
let where_sql = if where_clauses.is_empty() {
String::new()
} else {
format!("WHERE {}", where_clauses.join(" AND "))
};
let count_sql = format!(
"SELECT COUNT(*) FROM notes n
JOIN discussions d ON n.discussion_id = d.id
JOIN projects p ON n.project_id = p.id
LEFT JOIN issues i ON d.issue_id = i.id
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
{where_sql}"
);
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let total_count: i64 = conn.query_row(&count_sql, param_refs.as_slice(), |row| row.get(0))?;
let sort_column = match filters.sort.as_str() {
"updated" => "n.updated_at",
_ => "n.created_at",
};
let order = if filters.order == "asc" {
"ASC"
} else {
"DESC"
};
let query_sql = format!(
"SELECT
n.id,
n.gitlab_id,
n.author_username,
n.body,
n.note_type,
n.is_system,
n.created_at,
n.updated_at,
n.position_new_path,
n.position_new_line,
n.position_old_path,
n.position_old_line,
n.resolvable,
n.resolved,
n.resolved_by,
d.noteable_type,
COALESCE(i.iid, m.iid) AS parent_iid,
COALESCE(i.title, m.title) AS parent_title,
p.path_with_namespace AS project_path
FROM notes n
JOIN discussions d ON n.discussion_id = d.id
JOIN projects p ON n.project_id = p.id
LEFT JOIN issues i ON d.issue_id = i.id
LEFT JOIN merge_requests m ON d.merge_request_id = m.id
{where_sql}
ORDER BY {sort_column} {order}, n.id {order}
LIMIT ?"
);
params.push(Box::new(filters.limit as i64));
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(&query_sql)?;
let notes: Vec<NoteListRow> = stmt
.query_map(param_refs.as_slice(), |row| {
let is_system_int: i64 = row.get(5)?;
let resolvable_int: i64 = row.get(12)?;
let resolved_int: i64 = row.get(13)?;
Ok(NoteListRow {
id: row.get(0)?,
gitlab_id: row.get(1)?,
author_username: row.get::<_, Option<String>>(2)?.unwrap_or_default(),
body: row.get(3)?,
note_type: row.get(4)?,
is_system: is_system_int == 1,
created_at: row.get(6)?,
updated_at: row.get(7)?,
position_new_path: row.get(8)?,
position_new_line: row.get(9)?,
position_old_path: row.get(10)?,
position_old_line: row.get(11)?,
resolvable: resolvable_int == 1,
resolved: resolved_int == 1,
resolved_by: row.get(14)?,
noteable_type: row.get(15)?,
parent_iid: row.get(16)?,
parent_title: row.get(17)?,
project_path: row.get(18)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(NoteListResult { notes, total_count })
}

View File

@@ -0,0 +1,73 @@
use crate::cli::render::{self, StyledCell, Theme};
pub(crate) fn format_assignees(assignees: &[String]) -> String {
if assignees.is_empty() {
return "-".to_string();
}
let max_shown = 2;
let shown: Vec<String> = assignees
.iter()
.take(max_shown)
.map(|s| format!("@{}", render::truncate(s, 10)))
.collect();
let overflow = assignees.len().saturating_sub(max_shown);
if overflow > 0 {
format!("{} +{}", shown.join(", "), overflow)
} else {
shown.join(", ")
}
}
pub(crate) fn format_discussions(total: i64, unresolved: i64) -> StyledCell {
if total == 0 {
return StyledCell::plain(String::new());
}
if unresolved > 0 {
let text = format!("{total}/");
let warn = Theme::warning().render(&format!("{unresolved}!"));
StyledCell::plain(format!("{text}{warn}"))
} else {
StyledCell::plain(format!("{total}"))
}
}
pub(crate) fn format_branches(target: &str, source: &str, max_width: usize) -> String {
let full = format!("{} <- {}", target, source);
render::truncate(&full, max_width)
}
pub(crate) fn truncate_body(body: &str, max_len: usize) -> String {
if body.chars().count() <= max_len {
body.to_string()
} else {
let truncated: String = body.chars().take(max_len).collect();
format!("{truncated}...")
}
}
pub(crate) fn format_note_type(note_type: Option<&str>) -> &'static str {
match note_type {
Some("DiffNote") => "Diff",
Some("DiscussionNote") => "Disc",
_ => "-",
}
}
pub(crate) fn format_note_path(path: Option<&str>, line: Option<i64>) -> String {
match (path, line) {
(Some(p), Some(l)) => format!("{p}:{l}"),
(Some(p), None) => p.to_string(),
_ => "-".to_string(),
}
}
pub(crate) fn format_note_parent(noteable_type: Option<&str>, parent_iid: Option<i64>) -> String {
match (noteable_type, parent_iid) {
(Some("Issue"), Some(iid)) => format!("Issue #{iid}"),
(Some("MergeRequest"), Some(iid)) => format!("MR !{iid}"),
_ => "-".to_string(),
}
}

View File

@@ -1,32 +1,11 @@
use super::*;
use crate::cli::commands::me::types::{ActivityEventType, AttentionState};
use crate::core::db::{create_connection, run_migrations};
use crate::core::time::now_ms;
use crate::test_support::{insert_project, setup_test_db};
use rusqlite::Connection;
use std::path::Path;
// ─── Helpers ────────────────────────────────────────────────────────────────
fn setup_test_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
fn insert_project(conn: &Connection, id: i64, path: &str) {
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
VALUES (?1, ?2, ?3, ?4)",
rusqlite::params![
id,
id * 100,
path,
format!("https://git.example.com/{path}")
],
)
.unwrap();
}
fn insert_issue(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str) {
insert_issue_with_status(
conn,

View File

@@ -17,7 +17,6 @@ pub mod show;
pub mod stats;
pub mod sync;
pub mod sync_status;
pub mod sync_surgical;
pub mod timeline;
pub mod trace;
pub mod who;
@@ -61,9 +60,8 @@ pub use show::{
run_show_mr,
};
pub use stats::{print_stats, print_stats_json, run_stats};
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync};
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync, run_sync_surgical};
pub use sync_status::{print_sync_status, print_sync_status_json, run_sync_status};
pub use sync_surgical::run_sync_surgical;
pub use timeline::{TimelineParams, print_timeline, print_timeline_json_with_meta, run_timeline};
pub use trace::{parse_trace_path, print_trace, print_trace_json};
pub use who::{WhoRun, print_who_human, print_who_json, run_who};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,310 @@
#[derive(Debug, Clone, Serialize)]
pub struct ClosingMrRef {
pub iid: i64,
pub title: String,
pub state: String,
pub web_url: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct IssueDetail {
pub id: i64,
pub iid: i64,
pub title: String,
pub description: Option<String>,
pub state: String,
pub author_username: String,
pub created_at: i64,
pub updated_at: i64,
pub closed_at: Option<String>,
pub confidential: bool,
pub web_url: Option<String>,
pub project_path: String,
pub references_full: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub due_date: Option<String>,
pub milestone: Option<String>,
pub user_notes_count: i64,
pub merge_requests_count: usize,
pub closing_merge_requests: Vec<ClosingMrRef>,
pub discussions: Vec<DiscussionDetail>,
pub status_name: Option<String>,
pub status_category: Option<String>,
pub status_color: Option<String>,
pub status_icon_name: Option<String>,
pub status_synced_at: Option<i64>,
}
#[derive(Debug, Serialize)]
pub struct DiscussionDetail {
pub notes: Vec<NoteDetail>,
pub individual_note: bool,
}
#[derive(Debug, Serialize)]
pub struct NoteDetail {
pub author_username: String,
pub body: String,
pub created_at: i64,
pub is_system: bool,
}
pub fn run_show_issue(
config: &Config,
iid: i64,
project_filter: Option<&str>,
) -> Result<IssueDetail> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let issue = find_issue(&conn, iid, project_filter)?;
let labels = get_issue_labels(&conn, issue.id)?;
let assignees = get_issue_assignees(&conn, issue.id)?;
let closing_mrs = get_closing_mrs(&conn, issue.id)?;
let discussions = get_issue_discussions(&conn, issue.id)?;
let references_full = format!("{}#{}", issue.project_path, issue.iid);
let merge_requests_count = closing_mrs.len();
Ok(IssueDetail {
id: issue.id,
iid: issue.iid,
title: issue.title,
description: issue.description,
state: issue.state,
author_username: issue.author_username,
created_at: issue.created_at,
updated_at: issue.updated_at,
closed_at: issue.closed_at,
confidential: issue.confidential,
web_url: issue.web_url,
project_path: issue.project_path,
references_full,
labels,
assignees,
due_date: issue.due_date,
milestone: issue.milestone_title,
user_notes_count: issue.user_notes_count,
merge_requests_count,
closing_merge_requests: closing_mrs,
discussions,
status_name: issue.status_name,
status_category: issue.status_category,
status_color: issue.status_color,
status_icon_name: issue.status_icon_name,
status_synced_at: issue.status_synced_at,
})
}
#[derive(Debug)]
struct IssueRow {
id: i64,
iid: i64,
title: String,
description: Option<String>,
state: String,
author_username: String,
created_at: i64,
updated_at: i64,
closed_at: Option<String>,
confidential: bool,
web_url: Option<String>,
project_path: String,
due_date: Option<String>,
milestone_title: Option<String>,
user_notes_count: i64,
status_name: Option<String>,
status_category: Option<String>,
status_color: Option<String>,
status_icon_name: Option<String>,
status_synced_at: Option<i64>,
}
fn find_issue(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<IssueRow> {
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
Some(project) => {
let project_id = resolve_project(conn, project)?;
(
"SELECT i.id, i.iid, i.title, i.description, i.state, i.author_username,
i.created_at, i.updated_at, i.closed_at, i.confidential,
i.web_url, p.path_with_namespace,
i.due_date, i.milestone_title,
(SELECT COUNT(*) FROM notes n
JOIN discussions d ON n.discussion_id = d.id
WHERE d.noteable_type = 'Issue' AND d.issue_id = i.id AND n.is_system = 0) AS user_notes_count,
i.status_name, i.status_category, i.status_color,
i.status_icon_name, i.status_synced_at
FROM issues i
JOIN projects p ON i.project_id = p.id
WHERE i.iid = ? AND i.project_id = ?",
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (
"SELECT i.id, i.iid, i.title, i.description, i.state, i.author_username,
i.created_at, i.updated_at, i.closed_at, i.confidential,
i.web_url, p.path_with_namespace,
i.due_date, i.milestone_title,
(SELECT COUNT(*) FROM notes n
JOIN discussions d ON n.discussion_id = d.id
WHERE d.noteable_type = 'Issue' AND d.issue_id = i.id AND n.is_system = 0) AS user_notes_count,
i.status_name, i.status_category, i.status_color,
i.status_icon_name, i.status_synced_at
FROM issues i
JOIN projects p ON i.project_id = p.id
WHERE i.iid = ?",
vec![Box::new(iid)],
),
};
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(sql)?;
let issues: Vec<IssueRow> = stmt
.query_map(param_refs.as_slice(), |row| {
let confidential_val: i64 = row.get(9)?;
Ok(IssueRow {
id: row.get(0)?,
iid: row.get(1)?,
title: row.get(2)?,
description: row.get(3)?,
state: row.get(4)?,
author_username: row.get(5)?,
created_at: row.get(6)?,
updated_at: row.get(7)?,
closed_at: row.get(8)?,
confidential: confidential_val != 0,
web_url: row.get(10)?,
project_path: row.get(11)?,
due_date: row.get(12)?,
milestone_title: row.get(13)?,
user_notes_count: row.get(14)?,
status_name: row.get(15)?,
status_category: row.get(16)?,
status_color: row.get(17)?,
status_icon_name: row.get(18)?,
status_synced_at: row.get(19)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
match issues.len() {
0 => Err(LoreError::NotFound(format!("Issue #{} not found", iid))),
1 => Ok(issues.into_iter().next().unwrap()),
_ => {
let projects: Vec<String> = issues.iter().map(|i| i.project_path.clone()).collect();
Err(LoreError::Ambiguous(format!(
"Issue #{} exists in multiple projects: {}. Use --project to specify.",
iid,
projects.join(", ")
)))
}
}
}
fn get_issue_labels(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT l.name FROM labels l
JOIN issue_labels il ON l.id = il.label_id
WHERE il.issue_id = ?
ORDER BY l.name",
)?;
let labels: Vec<String> = stmt
.query_map([issue_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(labels)
}
fn get_issue_assignees(conn: &Connection, issue_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT username FROM issue_assignees
WHERE issue_id = ?
ORDER BY username",
)?;
let assignees: Vec<String> = stmt
.query_map([issue_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(assignees)
}
fn get_closing_mrs(conn: &Connection, issue_id: i64) -> Result<Vec<ClosingMrRef>> {
let mut stmt = conn.prepare(
"SELECT mr.iid, mr.title, mr.state, mr.web_url
FROM entity_references er
JOIN merge_requests mr ON mr.id = er.source_entity_id
WHERE er.target_entity_type = 'issue'
AND er.target_entity_id = ?
AND er.source_entity_type = 'merge_request'
AND er.reference_type = 'closes'
ORDER BY mr.iid",
)?;
let mrs: Vec<ClosingMrRef> = stmt
.query_map([issue_id], |row| {
Ok(ClosingMrRef {
iid: row.get(0)?,
title: row.get(1)?,
state: row.get(2)?,
web_url: row.get(3)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(mrs)
}
fn get_issue_discussions(conn: &Connection, issue_id: i64) -> Result<Vec<DiscussionDetail>> {
let mut disc_stmt = conn.prepare(
"SELECT id, individual_note FROM discussions
WHERE issue_id = ?
ORDER BY first_note_at",
)?;
let disc_rows: Vec<(i64, bool)> = disc_stmt
.query_map([issue_id], |row| {
let individual: i64 = row.get(1)?;
Ok((row.get(0)?, individual == 1))
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let mut note_stmt = conn.prepare(
"SELECT author_username, body, created_at, is_system
FROM notes
WHERE discussion_id = ?
ORDER BY position",
)?;
let mut discussions = Vec::new();
for (disc_id, individual_note) in disc_rows {
let notes: Vec<NoteDetail> = note_stmt
.query_map([disc_id], |row| {
let is_system: i64 = row.get(3)?;
Ok(NoteDetail {
author_username: row.get(0)?,
body: row.get(1)?,
created_at: row.get(2)?,
is_system: is_system == 1,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let has_user_notes = notes.iter().any(|n| !n.is_system);
if has_user_notes || notes.is_empty() {
discussions.push(DiscussionDetail {
notes,
individual_note,
});
}
}
Ok(discussions)
}

View File

@@ -0,0 +1,19 @@
use crate::cli::render::{self, Icons, Theme};
use rusqlite::Connection;
use serde::Serialize;
use crate::Config;
use crate::cli::robot::RobotMeta;
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::ms_to_iso;
include!("issue.rs");
include!("mr.rs");
include!("render.rs");
#[cfg(test)]
#[path = "show_tests.rs"]
mod tests;

283
src/cli/commands/show/mr.rs Normal file
View File

@@ -0,0 +1,283 @@
#[derive(Debug, Serialize)]
pub struct MrDetail {
pub id: i64,
pub iid: i64,
pub title: String,
pub description: Option<String>,
pub state: String,
pub draft: bool,
pub author_username: String,
pub source_branch: String,
pub target_branch: String,
pub created_at: i64,
pub updated_at: i64,
pub merged_at: Option<i64>,
pub closed_at: Option<i64>,
pub web_url: Option<String>,
pub project_path: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub reviewers: Vec<String>,
pub discussions: Vec<MrDiscussionDetail>,
}
#[derive(Debug, Serialize)]
pub struct MrDiscussionDetail {
pub notes: Vec<MrNoteDetail>,
pub individual_note: bool,
}
#[derive(Debug, Serialize)]
pub struct MrNoteDetail {
pub author_username: String,
pub body: String,
pub created_at: i64,
pub is_system: bool,
pub position: Option<DiffNotePosition>,
}
#[derive(Debug, Clone, Serialize)]
pub struct DiffNotePosition {
pub old_path: Option<String>,
pub new_path: Option<String>,
pub old_line: Option<i64>,
pub new_line: Option<i64>,
pub position_type: Option<String>,
}
pub fn run_show_mr(config: &Config, iid: i64, project_filter: Option<&str>) -> Result<MrDetail> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let mr = find_mr(&conn, iid, project_filter)?;
let labels = get_mr_labels(&conn, mr.id)?;
let assignees = get_mr_assignees(&conn, mr.id)?;
let reviewers = get_mr_reviewers(&conn, mr.id)?;
let discussions = get_mr_discussions(&conn, mr.id)?;
Ok(MrDetail {
id: mr.id,
iid: mr.iid,
title: mr.title,
description: mr.description,
state: mr.state,
draft: mr.draft,
author_username: mr.author_username,
source_branch: mr.source_branch,
target_branch: mr.target_branch,
created_at: mr.created_at,
updated_at: mr.updated_at,
merged_at: mr.merged_at,
closed_at: mr.closed_at,
web_url: mr.web_url,
project_path: mr.project_path,
labels,
assignees,
reviewers,
discussions,
})
}
struct MrRow {
id: i64,
iid: i64,
title: String,
description: Option<String>,
state: String,
draft: bool,
author_username: String,
source_branch: String,
target_branch: String,
created_at: i64,
updated_at: i64,
merged_at: Option<i64>,
closed_at: Option<i64>,
web_url: Option<String>,
project_path: String,
}
fn find_mr(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<MrRow> {
let (sql, params): (&str, Vec<Box<dyn rusqlite::ToSql>>) = match project_filter {
Some(project) => {
let project_id = resolve_project(conn, project)?;
(
"SELECT m.id, m.iid, m.title, m.description, m.state, m.draft,
m.author_username, m.source_branch, m.target_branch,
m.created_at, m.updated_at, m.merged_at, m.closed_at,
m.web_url, p.path_with_namespace
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
WHERE m.iid = ? AND m.project_id = ?",
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (
"SELECT m.id, m.iid, m.title, m.description, m.state, m.draft,
m.author_username, m.source_branch, m.target_branch,
m.created_at, m.updated_at, m.merged_at, m.closed_at,
m.web_url, p.path_with_namespace
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
WHERE m.iid = ?",
vec![Box::new(iid)],
),
};
let param_refs: Vec<&dyn rusqlite::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let mut stmt = conn.prepare(sql)?;
let mrs: Vec<MrRow> = stmt
.query_map(param_refs.as_slice(), |row| {
let draft_val: i64 = row.get(5)?;
Ok(MrRow {
id: row.get(0)?,
iid: row.get(1)?,
title: row.get(2)?,
description: row.get(3)?,
state: row.get(4)?,
draft: draft_val == 1,
author_username: row.get(6)?,
source_branch: row.get(7)?,
target_branch: row.get(8)?,
created_at: row.get(9)?,
updated_at: row.get(10)?,
merged_at: row.get(11)?,
closed_at: row.get(12)?,
web_url: row.get(13)?,
project_path: row.get(14)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
match mrs.len() {
0 => Err(LoreError::NotFound(format!("MR !{} not found", iid))),
1 => Ok(mrs.into_iter().next().unwrap()),
_ => {
let projects: Vec<String> = mrs.iter().map(|m| m.project_path.clone()).collect();
Err(LoreError::Ambiguous(format!(
"MR !{} exists in multiple projects: {}. Use --project to specify.",
iid,
projects.join(", ")
)))
}
}
}
fn get_mr_labels(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT l.name FROM labels l
JOIN mr_labels ml ON l.id = ml.label_id
WHERE ml.merge_request_id = ?
ORDER BY l.name",
)?;
let labels: Vec<String> = stmt
.query_map([mr_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(labels)
}
fn get_mr_assignees(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT username FROM mr_assignees
WHERE merge_request_id = ?
ORDER BY username",
)?;
let assignees: Vec<String> = stmt
.query_map([mr_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(assignees)
}
fn get_mr_reviewers(conn: &Connection, mr_id: i64) -> Result<Vec<String>> {
let mut stmt = conn.prepare(
"SELECT username FROM mr_reviewers
WHERE merge_request_id = ?
ORDER BY username",
)?;
let reviewers: Vec<String> = stmt
.query_map([mr_id], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(reviewers)
}
fn get_mr_discussions(conn: &Connection, mr_id: i64) -> Result<Vec<MrDiscussionDetail>> {
let mut disc_stmt = conn.prepare(
"SELECT id, individual_note FROM discussions
WHERE merge_request_id = ?
ORDER BY first_note_at",
)?;
let disc_rows: Vec<(i64, bool)> = disc_stmt
.query_map([mr_id], |row| {
let individual: i64 = row.get(1)?;
Ok((row.get(0)?, individual == 1))
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let mut note_stmt = conn.prepare(
"SELECT author_username, body, created_at, is_system,
position_old_path, position_new_path, position_old_line,
position_new_line, position_type
FROM notes
WHERE discussion_id = ?
ORDER BY position",
)?;
let mut discussions = Vec::new();
for (disc_id, individual_note) in disc_rows {
let notes: Vec<MrNoteDetail> = note_stmt
.query_map([disc_id], |row| {
let is_system: i64 = row.get(3)?;
let old_path: Option<String> = row.get(4)?;
let new_path: Option<String> = row.get(5)?;
let old_line: Option<i64> = row.get(6)?;
let new_line: Option<i64> = row.get(7)?;
let position_type: Option<String> = row.get(8)?;
let position = if old_path.is_some()
|| new_path.is_some()
|| old_line.is_some()
|| new_line.is_some()
{
Some(DiffNotePosition {
old_path,
new_path,
old_line,
new_line,
position_type,
})
} else {
None
};
Ok(MrNoteDetail {
author_username: row.get(0)?,
body: row.get(1)?,
created_at: row.get(2)?,
is_system: is_system == 1,
position,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let has_user_notes = notes.iter().any(|n| !n.is_system);
if has_user_notes || notes.is_empty() {
discussions.push(MrDiscussionDetail {
notes,
individual_note,
});
}
}
Ok(discussions)
}

View File

@@ -0,0 +1,580 @@
fn format_date(ms: i64) -> String {
render::format_date(ms)
}
fn wrap_text(text: &str, width: usize, indent: &str) -> String {
render::wrap_indent(text, width, indent)
}
pub fn print_show_issue(issue: &IssueDetail) {
// Title line
println!(
" Issue #{}: {}",
issue.iid,
Theme::bold().render(&issue.title),
);
// Details section
println!("{}", render::section_divider("Details"));
println!(
" Ref {}",
Theme::muted().render(&issue.references_full)
);
println!(
" Project {}",
Theme::info().render(&issue.project_path)
);
let (icon, state_style) = if issue.state == "opened" {
(Icons::issue_opened(), Theme::success())
} else {
(Icons::issue_closed(), Theme::dim())
};
println!(
" State {}",
state_style.render(&format!("{icon} {}", issue.state))
);
if let Some(status) = &issue.status_name {
println!(
" Status {}",
render::style_with_hex(status, issue.status_color.as_deref())
);
}
if issue.confidential {
println!(" {}", Theme::error().bold().render("CONFIDENTIAL"));
}
println!(" Author @{}", issue.author_username);
if !issue.assignees.is_empty() {
let label = if issue.assignees.len() > 1 {
"Assignees"
} else {
"Assignee"
};
println!(
" {}{} {}",
label,
" ".repeat(12 - label.len()),
issue
.assignees
.iter()
.map(|a| format!("@{a}"))
.collect::<Vec<_>>()
.join(", ")
);
}
println!(
" Created {} ({})",
format_date(issue.created_at),
render::format_relative_time_compact(issue.created_at),
);
println!(
" Updated {} ({})",
format_date(issue.updated_at),
render::format_relative_time_compact(issue.updated_at),
);
if let Some(closed_at) = &issue.closed_at {
println!(" Closed {closed_at}");
}
if let Some(due) = &issue.due_date {
println!(" Due {due}");
}
if let Some(ms) = &issue.milestone {
println!(" Milestone {ms}");
}
if !issue.labels.is_empty() {
println!(
" Labels {}",
render::format_labels_bare(&issue.labels, issue.labels.len())
);
}
if let Some(url) = &issue.web_url {
println!(" URL {}", Theme::muted().render(url));
}
// Development section
if !issue.closing_merge_requests.is_empty() {
println!("{}", render::section_divider("Development"));
for mr in &issue.closing_merge_requests {
let (mr_icon, mr_style) = match mr.state.as_str() {
"merged" => (Icons::mr_merged(), Theme::accent()),
"opened" => (Icons::mr_opened(), Theme::success()),
"closed" => (Icons::mr_closed(), Theme::error()),
_ => (Icons::mr_opened(), Theme::dim()),
};
println!(
" {} !{} {} {}",
mr_style.render(mr_icon),
mr.iid,
mr.title,
mr_style.render(&mr.state),
);
}
}
// Description section
println!("{}", render::section_divider("Description"));
if let Some(desc) = &issue.description {
let wrapped = wrap_text(desc, 72, " ");
println!(" {wrapped}");
} else {
println!(" {}", Theme::muted().render("(no description)"));
}
// Discussions section
let user_discussions: Vec<&DiscussionDetail> = issue
.discussions
.iter()
.filter(|d| d.notes.iter().any(|n| !n.is_system))
.collect();
if user_discussions.is_empty() {
println!("\n {}", Theme::muted().render("No discussions"));
} else {
println!(
"{}",
render::section_divider(&format!("Discussions ({})", user_discussions.len()))
);
for discussion in user_discussions {
let user_notes: Vec<&NoteDetail> =
discussion.notes.iter().filter(|n| !n.is_system).collect();
if let Some(first_note) = user_notes.first() {
println!(
" {} {}",
Theme::info().render(&format!("@{}", first_note.author_username)),
format_date(first_note.created_at),
);
let wrapped = wrap_text(&first_note.body, 68, " ");
println!(" {wrapped}");
println!();
for reply in user_notes.iter().skip(1) {
println!(
" {} {}",
Theme::info().render(&format!("@{}", reply.author_username)),
format_date(reply.created_at),
);
let wrapped = wrap_text(&reply.body, 66, " ");
println!(" {wrapped}");
println!();
}
}
}
}
}
pub fn print_show_mr(mr: &MrDetail) {
// Title line
let draft_prefix = if mr.draft {
format!("{} ", Icons::mr_draft())
} else {
String::new()
};
println!(
" MR !{}: {}{}",
mr.iid,
draft_prefix,
Theme::bold().render(&mr.title),
);
// Details section
println!("{}", render::section_divider("Details"));
println!(" Project {}", Theme::info().render(&mr.project_path));
let (icon, state_style) = match mr.state.as_str() {
"opened" => (Icons::mr_opened(), Theme::success()),
"merged" => (Icons::mr_merged(), Theme::accent()),
"closed" => (Icons::mr_closed(), Theme::error()),
_ => (Icons::mr_opened(), Theme::dim()),
};
println!(
" State {}",
state_style.render(&format!("{icon} {}", mr.state))
);
println!(
" Branches {} -> {}",
Theme::info().render(&mr.source_branch),
Theme::warning().render(&mr.target_branch)
);
println!(" Author @{}", mr.author_username);
if !mr.assignees.is_empty() {
println!(
" Assignees {}",
mr.assignees
.iter()
.map(|a| format!("@{a}"))
.collect::<Vec<_>>()
.join(", ")
);
}
if !mr.reviewers.is_empty() {
println!(
" Reviewers {}",
mr.reviewers
.iter()
.map(|r| format!("@{r}"))
.collect::<Vec<_>>()
.join(", ")
);
}
println!(
" Created {} ({})",
format_date(mr.created_at),
render::format_relative_time_compact(mr.created_at),
);
println!(
" Updated {} ({})",
format_date(mr.updated_at),
render::format_relative_time_compact(mr.updated_at),
);
if let Some(merged_at) = mr.merged_at {
println!(
" Merged {} ({})",
format_date(merged_at),
render::format_relative_time_compact(merged_at),
);
}
if let Some(closed_at) = mr.closed_at {
println!(
" Closed {} ({})",
format_date(closed_at),
render::format_relative_time_compact(closed_at),
);
}
if !mr.labels.is_empty() {
println!(
" Labels {}",
render::format_labels_bare(&mr.labels, mr.labels.len())
);
}
if let Some(url) = &mr.web_url {
println!(" URL {}", Theme::muted().render(url));
}
// Description section
println!("{}", render::section_divider("Description"));
if let Some(desc) = &mr.description {
let wrapped = wrap_text(desc, 72, " ");
println!(" {wrapped}");
} else {
println!(" {}", Theme::muted().render("(no description)"));
}
// Discussions section
let user_discussions: Vec<&MrDiscussionDetail> = mr
.discussions
.iter()
.filter(|d| d.notes.iter().any(|n| !n.is_system))
.collect();
if user_discussions.is_empty() {
println!("\n {}", Theme::muted().render("No discussions"));
} else {
println!(
"{}",
render::section_divider(&format!("Discussions ({})", user_discussions.len()))
);
for discussion in user_discussions {
let user_notes: Vec<&MrNoteDetail> =
discussion.notes.iter().filter(|n| !n.is_system).collect();
if let Some(first_note) = user_notes.first() {
if let Some(pos) = &first_note.position {
print_diff_position(pos);
}
println!(
" {} {}",
Theme::info().render(&format!("@{}", first_note.author_username)),
format_date(first_note.created_at),
);
let wrapped = wrap_text(&first_note.body, 68, " ");
println!(" {wrapped}");
println!();
for reply in user_notes.iter().skip(1) {
println!(
" {} {}",
Theme::info().render(&format!("@{}", reply.author_username)),
format_date(reply.created_at),
);
let wrapped = wrap_text(&reply.body, 66, " ");
println!(" {wrapped}");
println!();
}
}
}
}
}
fn print_diff_position(pos: &DiffNotePosition) {
let file = pos.new_path.as_ref().or(pos.old_path.as_ref());
if let Some(file_path) = file {
let line_str = match (pos.old_line, pos.new_line) {
(Some(old), Some(new)) if old == new => format!(":{}", new),
(Some(old), Some(new)) => format!(":{}{}", old, new),
(None, Some(new)) => format!(":+{}", new),
(Some(old), None) => format!(":-{}", old),
(None, None) => String::new(),
};
println!(
" {} {}{}",
Theme::dim().render("\u{1f4cd}"),
Theme::warning().render(file_path),
Theme::dim().render(&line_str)
);
}
}
#[derive(Serialize)]
pub struct IssueDetailJson {
pub id: i64,
pub iid: i64,
pub title: String,
pub description: Option<String>,
pub state: String,
pub author_username: String,
pub created_at: String,
pub updated_at: String,
pub closed_at: Option<String>,
pub confidential: bool,
pub web_url: Option<String>,
pub project_path: String,
pub references_full: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub due_date: Option<String>,
pub milestone: Option<String>,
pub user_notes_count: i64,
pub merge_requests_count: usize,
pub closing_merge_requests: Vec<ClosingMrRefJson>,
pub discussions: Vec<DiscussionDetailJson>,
pub status_name: Option<String>,
#[serde(skip_serializing)]
pub status_category: Option<String>,
pub status_color: Option<String>,
pub status_icon_name: Option<String>,
pub status_synced_at: Option<String>,
}
#[derive(Serialize)]
pub struct ClosingMrRefJson {
pub iid: i64,
pub title: String,
pub state: String,
pub web_url: Option<String>,
}
#[derive(Serialize)]
pub struct DiscussionDetailJson {
pub notes: Vec<NoteDetailJson>,
pub individual_note: bool,
}
#[derive(Serialize)]
pub struct NoteDetailJson {
pub author_username: String,
pub body: String,
pub created_at: String,
pub is_system: bool,
}
impl From<&IssueDetail> for IssueDetailJson {
fn from(issue: &IssueDetail) -> Self {
Self {
id: issue.id,
iid: issue.iid,
title: issue.title.clone(),
description: issue.description.clone(),
state: issue.state.clone(),
author_username: issue.author_username.clone(),
created_at: ms_to_iso(issue.created_at),
updated_at: ms_to_iso(issue.updated_at),
closed_at: issue.closed_at.clone(),
confidential: issue.confidential,
web_url: issue.web_url.clone(),
project_path: issue.project_path.clone(),
references_full: issue.references_full.clone(),
labels: issue.labels.clone(),
assignees: issue.assignees.clone(),
due_date: issue.due_date.clone(),
milestone: issue.milestone.clone(),
user_notes_count: issue.user_notes_count,
merge_requests_count: issue.merge_requests_count,
closing_merge_requests: issue
.closing_merge_requests
.iter()
.map(|mr| ClosingMrRefJson {
iid: mr.iid,
title: mr.title.clone(),
state: mr.state.clone(),
web_url: mr.web_url.clone(),
})
.collect(),
discussions: issue.discussions.iter().map(|d| d.into()).collect(),
status_name: issue.status_name.clone(),
status_category: issue.status_category.clone(),
status_color: issue.status_color.clone(),
status_icon_name: issue.status_icon_name.clone(),
status_synced_at: issue.status_synced_at.map(ms_to_iso),
}
}
}
impl From<&DiscussionDetail> for DiscussionDetailJson {
fn from(disc: &DiscussionDetail) -> Self {
Self {
notes: disc.notes.iter().map(|n| n.into()).collect(),
individual_note: disc.individual_note,
}
}
}
impl From<&NoteDetail> for NoteDetailJson {
fn from(note: &NoteDetail) -> Self {
Self {
author_username: note.author_username.clone(),
body: note.body.clone(),
created_at: ms_to_iso(note.created_at),
is_system: note.is_system,
}
}
}
#[derive(Serialize)]
pub struct MrDetailJson {
pub id: i64,
pub iid: i64,
pub title: String,
pub description: Option<String>,
pub state: String,
pub draft: bool,
pub author_username: String,
pub source_branch: String,
pub target_branch: String,
pub created_at: String,
pub updated_at: String,
pub merged_at: Option<String>,
pub closed_at: Option<String>,
pub web_url: Option<String>,
pub project_path: String,
pub labels: Vec<String>,
pub assignees: Vec<String>,
pub reviewers: Vec<String>,
pub discussions: Vec<MrDiscussionDetailJson>,
}
#[derive(Serialize)]
pub struct MrDiscussionDetailJson {
pub notes: Vec<MrNoteDetailJson>,
pub individual_note: bool,
}
#[derive(Serialize)]
pub struct MrNoteDetailJson {
pub author_username: String,
pub body: String,
pub created_at: String,
pub is_system: bool,
pub position: Option<DiffNotePosition>,
}
impl From<&MrDetail> for MrDetailJson {
fn from(mr: &MrDetail) -> Self {
Self {
id: mr.id,
iid: mr.iid,
title: mr.title.clone(),
description: mr.description.clone(),
state: mr.state.clone(),
draft: mr.draft,
author_username: mr.author_username.clone(),
source_branch: mr.source_branch.clone(),
target_branch: mr.target_branch.clone(),
created_at: ms_to_iso(mr.created_at),
updated_at: ms_to_iso(mr.updated_at),
merged_at: mr.merged_at.map(ms_to_iso),
closed_at: mr.closed_at.map(ms_to_iso),
web_url: mr.web_url.clone(),
project_path: mr.project_path.clone(),
labels: mr.labels.clone(),
assignees: mr.assignees.clone(),
reviewers: mr.reviewers.clone(),
discussions: mr.discussions.iter().map(|d| d.into()).collect(),
}
}
}
impl From<&MrDiscussionDetail> for MrDiscussionDetailJson {
fn from(disc: &MrDiscussionDetail) -> Self {
Self {
notes: disc.notes.iter().map(|n| n.into()).collect(),
individual_note: disc.individual_note,
}
}
}
impl From<&MrNoteDetail> for MrNoteDetailJson {
fn from(note: &MrNoteDetail) -> Self {
Self {
author_username: note.author_username.clone(),
body: note.body.clone(),
created_at: ms_to_iso(note.created_at),
is_system: note.is_system,
position: note.position.clone(),
}
}
}
pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
let json_result = IssueDetailJson::from(issue);
let meta = RobotMeta { elapsed_ms };
let output = serde_json::json!({
"ok": true,
"data": json_result,
"meta": meta,
});
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
pub fn print_show_mr_json(mr: &MrDetail, elapsed_ms: u64) {
let json_result = MrDetailJson::from(mr);
let meta = RobotMeta { elapsed_ms };
let output = serde_json::json!({
"ok": true,
"data": json_result,
"meta": meta,
});
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}

View File

@@ -0,0 +1,353 @@
use super::*;
use crate::core::db::run_migrations;
use std::path::Path;
fn setup_test_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
fn seed_project(conn: &Connection) {
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
VALUES (1, 100, 'group/repo', 'https://gitlab.example.com', 1000, 2000)",
[],
)
.unwrap();
}
fn seed_issue(conn: &Connection) {
seed_project(conn);
conn.execute(
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
created_at, updated_at, last_seen_at)
VALUES (1, 200, 10, 1, 'Test issue', 'opened', 'author', 1000, 2000, 2000)",
[],
)
.unwrap();
}
fn seed_second_project(conn: &Connection) {
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
VALUES (2, 101, 'other/repo', 'https://gitlab.example.com/other', 1000, 2000)",
[],
)
.unwrap();
}
fn seed_discussion_with_notes(
conn: &Connection,
issue_id: i64,
project_id: i64,
user_notes: usize,
system_notes: usize,
) {
let disc_id: i64 = conn
.query_row(
"SELECT COALESCE(MAX(id), 0) + 1 FROM discussions",
[],
|r| r.get(0),
)
.unwrap();
conn.execute(
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, first_note_at, last_note_at, last_seen_at)
VALUES (?1, ?2, ?3, ?4, 'Issue', 1000, 2000, 2000)",
rusqlite::params![disc_id, format!("disc-{}", disc_id), project_id, issue_id],
)
.unwrap();
for i in 0..user_notes {
conn.execute(
"INSERT INTO notes (gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system, position)
VALUES (?1, ?2, ?3, 'user1', 'comment', 1000, 2000, 2000, 0, ?4)",
rusqlite::params![1000 + disc_id * 100 + i as i64, disc_id, project_id, i as i64],
)
.unwrap();
}
for i in 0..system_notes {
conn.execute(
"INSERT INTO notes (gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system, position)
VALUES (?1, ?2, ?3, 'system', 'status changed', 1000, 2000, 2000, 1, ?4)",
rusqlite::params![2000 + disc_id * 100 + i as i64, disc_id, project_id, (user_notes + i) as i64],
)
.unwrap();
}
}
// --- find_issue tests ---
#[test]
fn test_find_issue_basic() {
let conn = setup_test_db();
seed_issue(&conn);
let row = find_issue(&conn, 10, None).unwrap();
assert_eq!(row.iid, 10);
assert_eq!(row.title, "Test issue");
assert_eq!(row.state, "opened");
assert_eq!(row.author_username, "author");
assert_eq!(row.project_path, "group/repo");
}
#[test]
fn test_find_issue_with_project_filter() {
let conn = setup_test_db();
seed_issue(&conn);
let row = find_issue(&conn, 10, Some("group/repo")).unwrap();
assert_eq!(row.iid, 10);
assert_eq!(row.project_path, "group/repo");
}
#[test]
fn test_find_issue_not_found() {
let conn = setup_test_db();
seed_issue(&conn);
let err = find_issue(&conn, 999, None).unwrap_err();
assert!(matches!(err, LoreError::NotFound(_)));
}
#[test]
fn test_find_issue_wrong_project_filter() {
let conn = setup_test_db();
seed_issue(&conn);
seed_second_project(&conn);
// Issue 10 only exists in project 1, not project 2
let err = find_issue(&conn, 10, Some("other/repo")).unwrap_err();
assert!(matches!(err, LoreError::NotFound(_)));
}
#[test]
fn test_find_issue_ambiguous_without_project() {
let conn = setup_test_db();
seed_issue(&conn); // issue iid=10 in project 1
seed_second_project(&conn);
conn.execute(
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
created_at, updated_at, last_seen_at)
VALUES (2, 201, 10, 2, 'Same iid different project', 'opened', 'author', 1000, 2000, 2000)",
[],
)
.unwrap();
let err = find_issue(&conn, 10, None).unwrap_err();
assert!(matches!(err, LoreError::Ambiguous(_)));
}
#[test]
fn test_find_issue_ambiguous_resolved_with_project() {
let conn = setup_test_db();
seed_issue(&conn);
seed_second_project(&conn);
conn.execute(
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
created_at, updated_at, last_seen_at)
VALUES (2, 201, 10, 2, 'Same iid different project', 'opened', 'author', 1000, 2000, 2000)",
[],
)
.unwrap();
let row = find_issue(&conn, 10, Some("other/repo")).unwrap();
assert_eq!(row.title, "Same iid different project");
}
#[test]
fn test_find_issue_user_notes_count_zero() {
let conn = setup_test_db();
seed_issue(&conn);
let row = find_issue(&conn, 10, None).unwrap();
assert_eq!(row.user_notes_count, 0);
}
#[test]
fn test_find_issue_user_notes_count_excludes_system() {
let conn = setup_test_db();
seed_issue(&conn);
// 2 user notes + 3 system notes = should count only 2
seed_discussion_with_notes(&conn, 1, 1, 2, 3);
let row = find_issue(&conn, 10, None).unwrap();
assert_eq!(row.user_notes_count, 2);
}
#[test]
fn test_find_issue_user_notes_count_across_discussions() {
let conn = setup_test_db();
seed_issue(&conn);
seed_discussion_with_notes(&conn, 1, 1, 3, 0); // 3 user notes
seed_discussion_with_notes(&conn, 1, 1, 1, 2); // 1 user note + 2 system
let row = find_issue(&conn, 10, None).unwrap();
assert_eq!(row.user_notes_count, 4);
}
#[test]
fn test_find_issue_notes_count_ignores_other_issues() {
let conn = setup_test_db();
seed_issue(&conn);
// Add a second issue
conn.execute(
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, author_username,
created_at, updated_at, last_seen_at)
VALUES (2, 201, 20, 1, 'Other issue', 'opened', 'author', 1000, 2000, 2000)",
[],
)
.unwrap();
// Notes on issue 2, not issue 1
seed_discussion_with_notes(&conn, 2, 1, 5, 0);
let row = find_issue(&conn, 10, None).unwrap();
assert_eq!(row.user_notes_count, 0); // Issue 10 has no notes
}
#[test]
fn test_ansi256_from_rgb() {
// Moved to render.rs — keeping basic hex sanity check
let result = render::style_with_hex("test", Some("#ff0000"));
assert!(!result.is_empty());
}
#[test]
fn test_get_issue_assignees_empty() {
let conn = setup_test_db();
seed_issue(&conn);
let result = get_issue_assignees(&conn, 1).unwrap();
assert!(result.is_empty());
}
#[test]
fn test_get_issue_assignees_single() {
let conn = setup_test_db();
seed_issue(&conn);
conn.execute(
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'charlie')",
[],
)
.unwrap();
let result = get_issue_assignees(&conn, 1).unwrap();
assert_eq!(result, vec!["charlie"]);
}
#[test]
fn test_get_issue_assignees_multiple_sorted() {
let conn = setup_test_db();
seed_issue(&conn);
conn.execute(
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'bob')",
[],
)
.unwrap();
conn.execute(
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'alice')",
[],
)
.unwrap();
let result = get_issue_assignees(&conn, 1).unwrap();
assert_eq!(result, vec!["alice", "bob"]); // alphabetical
}
#[test]
fn test_get_closing_mrs_empty() {
let conn = setup_test_db();
seed_issue(&conn);
let result = get_closing_mrs(&conn, 1).unwrap();
assert!(result.is_empty());
}
#[test]
fn test_get_closing_mrs_single() {
let conn = setup_test_db();
seed_issue(&conn);
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
source_branch, target_branch, created_at, updated_at, last_seen_at)
VALUES (1, 300, 5, 1, 'Fix the bug', 'merged', 'dev', 'fix', 'main', 1000, 2000, 2000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
target_entity_type, target_entity_id, reference_type, source_method, created_at)
VALUES (1, 'merge_request', 1, 'issue', 1, 'closes', 'api', 3000)",
[],
)
.unwrap();
let result = get_closing_mrs(&conn, 1).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].iid, 5);
assert_eq!(result[0].title, "Fix the bug");
assert_eq!(result[0].state, "merged");
}
#[test]
fn test_get_closing_mrs_ignores_mentioned() {
let conn = setup_test_db();
seed_issue(&conn);
// Add a 'mentioned' reference that should be ignored
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
source_branch, target_branch, created_at, updated_at, last_seen_at)
VALUES (1, 300, 5, 1, 'Some MR', 'opened', 'dev', 'feat', 'main', 1000, 2000, 2000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
target_entity_type, target_entity_id, reference_type, source_method, created_at)
VALUES (1, 'merge_request', 1, 'issue', 1, 'mentioned', 'note_parse', 3000)",
[],
)
.unwrap();
let result = get_closing_mrs(&conn, 1).unwrap();
assert!(result.is_empty()); // 'mentioned' refs not included
}
#[test]
fn test_get_closing_mrs_multiple_sorted() {
let conn = setup_test_db();
seed_issue(&conn);
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
source_branch, target_branch, created_at, updated_at, last_seen_at)
VALUES (1, 300, 8, 1, 'Second fix', 'opened', 'dev', 'fix2', 'main', 1000, 2000, 2000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, author_username,
source_branch, target_branch, created_at, updated_at, last_seen_at)
VALUES (2, 301, 5, 1, 'First fix', 'merged', 'dev', 'fix1', 'main', 1000, 2000, 2000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
target_entity_type, target_entity_id, reference_type, source_method, created_at)
VALUES (1, 'merge_request', 1, 'issue', 1, 'closes', 'api', 3000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id,
target_entity_type, target_entity_id, reference_type, source_method, created_at)
VALUES (1, 'merge_request', 2, 'issue', 1, 'closes', 'api', 3000)",
[],
)
.unwrap();
let result = get_closing_mrs(&conn, 1).unwrap();
assert_eq!(result.len(), 2);
assert_eq!(result[0].iid, 5); // Lower iid first
assert_eq!(result[1].iid, 8);
}
#[test]
fn wrap_text_single_line() {
assert_eq!(wrap_text("hello world", 80, " "), "hello world");
}
#[test]
fn wrap_text_multiple_lines() {
let result = wrap_text("one two three four five", 10, " ");
assert!(result.contains('\n'));
}
#[test]
fn format_date_extracts_date_part() {
let ms = 1705276800000;
let date = format_date(ms);
assert!(date.starts_with("2024-01-15"));
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,24 @@
pub mod surgical;
pub use surgical::run_sync_surgical;
use crate::cli::render::{self, Icons, Theme, format_number};
use serde::Serialize;
use std::time::Instant;
use tracing::Instrument;
use tracing::{debug, warn};
use crate::Config;
use crate::cli::progress::{format_stage_line, nested_progress, stage_spinner_v2};
use crate::core::error::Result;
use crate::core::metrics::{MetricsLayer, StageTiming};
use crate::core::shutdown::ShutdownSignal;
use super::embed::run_embed;
use super::generate_docs::run_generate_docs;
use super::ingest::{
DryRunPreview, IngestDisplay, ProjectStatusEnrichment, ProjectSummary, run_ingest,
run_ingest_dry_run,
};
include!("run.rs");
include!("render.rs");

View File

@@ -0,0 +1,533 @@
pub fn print_sync(
result: &SyncResult,
elapsed: std::time::Duration,
metrics: Option<&MetricsLayer>,
show_timings: bool,
) {
let has_data = result.issues_updated > 0
|| result.mrs_updated > 0
|| result.discussions_fetched > 0
|| result.resource_events_fetched > 0
|| result.mr_diffs_fetched > 0
|| result.documents_regenerated > 0
|| result.documents_embedded > 0
|| result.statuses_enriched > 0;
let has_failures = result.resource_events_failed > 0
|| result.mr_diffs_failed > 0
|| result.status_enrichment_errors > 0
|| result.documents_errored > 0
|| result.embedding_failed > 0;
if !has_data && !has_failures {
println!(
"\n {} ({})\n",
Theme::dim().render("Already up to date"),
Theme::timing().render(&format!("{:.1}s", elapsed.as_secs_f64()))
);
} else {
let headline = if has_failures {
Theme::warning().bold().render("Sync completed with issues")
} else {
Theme::success().bold().render("Synced")
};
println!(
"\n {} {} issues and {} MRs in {}",
headline,
Theme::info()
.bold()
.render(&result.issues_updated.to_string()),
Theme::info().bold().render(&result.mrs_updated.to_string()),
Theme::timing().render(&format!("{:.1}s", elapsed.as_secs_f64()))
);
// Detail: supporting counts, compact middle-dot format, zero-suppressed
let mut details: Vec<String> = Vec::new();
if result.discussions_fetched > 0 {
details.push(format!(
"{} {}",
Theme::info().render(&result.discussions_fetched.to_string()),
Theme::dim().render("discussions")
));
}
if result.resource_events_fetched > 0 {
details.push(format!(
"{} {}",
Theme::info().render(&result.resource_events_fetched.to_string()),
Theme::dim().render("events")
));
}
if result.mr_diffs_fetched > 0 {
details.push(format!(
"{} {}",
Theme::info().render(&result.mr_diffs_fetched.to_string()),
Theme::dim().render("diffs")
));
}
if result.statuses_enriched > 0 {
details.push(format!(
"{} {}",
Theme::info().render(&result.statuses_enriched.to_string()),
Theme::dim().render("statuses updated")
));
}
if !details.is_empty() {
let sep = Theme::dim().render(" \u{b7} ");
println!(" {}", details.join(&sep));
}
// Documents: regeneration + embedding as a second detail line
let mut doc_parts: Vec<String> = Vec::new();
if result.documents_regenerated > 0 {
doc_parts.push(format!(
"{} {}",
Theme::info().render(&result.documents_regenerated.to_string()),
Theme::dim().render("docs regenerated")
));
}
if result.documents_embedded > 0 {
doc_parts.push(format!(
"{} {}",
Theme::info().render(&result.documents_embedded.to_string()),
Theme::dim().render("embedded")
));
}
if result.documents_errored > 0 {
doc_parts
.push(Theme::error().render(&format!("{} doc errors", result.documents_errored)));
}
if !doc_parts.is_empty() {
let sep = Theme::dim().render(" \u{b7} ");
println!(" {}", doc_parts.join(&sep));
}
// Errors: visually prominent, only if non-zero
let mut errors: Vec<String> = Vec::new();
if result.resource_events_failed > 0 {
errors.push(format!("{} event failures", result.resource_events_failed));
}
if result.mr_diffs_failed > 0 {
errors.push(format!("{} diff failures", result.mr_diffs_failed));
}
if result.status_enrichment_errors > 0 {
errors.push(format!("{} status errors", result.status_enrichment_errors));
}
if result.embedding_failed > 0 {
errors.push(format!("{} embedding failures", result.embedding_failed));
}
if !errors.is_empty() {
println!(" {}", Theme::error().render(&errors.join(" \u{b7} ")));
}
println!();
}
if let Some(metrics) = metrics {
let stages = metrics.extract_timings();
if should_print_timings(show_timings, &stages) {
print_timing_summary(&stages);
}
}
}
fn issue_sub_rows(projects: &[ProjectSummary]) -> Vec<String> {
projects
.iter()
.map(|p| {
let mut parts: Vec<String> = Vec::new();
parts.push(format!(
"{} {}",
p.items_upserted,
if p.items_upserted == 1 {
"issue"
} else {
"issues"
}
));
if p.discussions_synced > 0 {
parts.push(format!("{} discussions", p.discussions_synced));
}
if p.statuses_seen > 0 || p.statuses_enriched > 0 {
parts.push(format!("{} statuses updated", p.statuses_enriched));
}
if p.events_fetched > 0 {
parts.push(format!("{} events", p.events_fetched));
}
if p.status_errors > 0 {
parts.push(Theme::warning().render(&format!("{} status errors", p.status_errors)));
}
if p.events_failed > 0 {
parts.push(Theme::warning().render(&format!("{} event failures", p.events_failed)));
}
let sep = Theme::dim().render(" \u{b7} ");
let detail = parts.join(&sep);
let path = Theme::muted().render(&format!("{:<30}", p.path));
format!(" {path} {detail}")
})
.collect()
}
fn status_sub_rows(projects: &[ProjectStatusEnrichment]) -> Vec<String> {
projects
.iter()
.map(|p| {
let total_errors = p.partial_errors + usize::from(p.error.is_some());
let mut parts: Vec<String> = vec![format!("{} statuses updated", p.enriched)];
if p.cleared > 0 {
parts.push(format!("{} cleared", p.cleared));
}
if p.seen > 0 {
parts.push(format!("{} seen", p.seen));
}
if total_errors > 0 {
parts.push(Theme::warning().render(&format!("{} errors", total_errors)));
} else if p.mode == "skipped" {
if let Some(reason) = &p.reason {
parts.push(Theme::dim().render(&format!("skipped ({reason})")));
} else {
parts.push(Theme::dim().render("skipped"));
}
}
let sep = Theme::dim().render(" \u{b7} ");
let detail = parts.join(&sep);
let path = Theme::muted().render(&format!("{:<30}", p.path));
format!(" {path} {detail}")
})
.collect()
}
fn mr_sub_rows(projects: &[ProjectSummary]) -> Vec<String> {
projects
.iter()
.map(|p| {
let mut parts: Vec<String> = Vec::new();
parts.push(format!(
"{} {}",
p.items_upserted,
if p.items_upserted == 1 { "MR" } else { "MRs" }
));
if p.discussions_synced > 0 {
parts.push(format!("{} discussions", p.discussions_synced));
}
if p.mr_diffs_fetched > 0 {
parts.push(format!("{} diffs", p.mr_diffs_fetched));
}
if p.events_fetched > 0 {
parts.push(format!("{} events", p.events_fetched));
}
if p.mr_diffs_failed > 0 {
parts
.push(Theme::warning().render(&format!("{} diff failures", p.mr_diffs_failed)));
}
if p.events_failed > 0 {
parts.push(Theme::warning().render(&format!("{} event failures", p.events_failed)));
}
let sep = Theme::dim().render(" \u{b7} ");
let detail = parts.join(&sep);
let path = Theme::muted().render(&format!("{:<30}", p.path));
format!(" {path} {detail}")
})
.collect()
}
fn emit_stage_line(
pb: &indicatif::ProgressBar,
icon: &str,
label: &str,
summary: &str,
elapsed: std::time::Duration,
) {
pb.finish_and_clear();
print_static_lines(&[format_stage_line(icon, label, summary, elapsed)]);
}
fn emit_stage_block(
pb: &indicatif::ProgressBar,
icon: &str,
label: &str,
summary: &str,
elapsed: std::time::Duration,
sub_rows: &[String],
) {
pb.finish_and_clear();
let mut lines = Vec::with_capacity(1 + sub_rows.len());
lines.push(format_stage_line(icon, label, summary, elapsed));
lines.extend(sub_rows.iter().cloned());
print_static_lines(&lines);
}
fn print_static_lines(lines: &[String]) {
crate::cli::progress::multi().suspend(|| {
for line in lines {
println!("{line}");
}
});
}
fn should_print_timings(show_timings: bool, stages: &[StageTiming]) -> bool {
show_timings && !stages.is_empty()
}
fn append_failures(summary: &mut String, failures: &[(&str, usize)]) {
let rendered: Vec<String> = failures
.iter()
.filter_map(|(label, count)| {
(*count > 0).then_some(Theme::warning().render(&format!("{count} {label}")))
})
.collect();
if !rendered.is_empty() {
summary.push_str(&format!(" ({})", rendered.join(", ")));
}
}
fn summarize_status_enrichment(projects: &[ProjectStatusEnrichment]) -> (String, bool) {
let statuses_enriched: usize = projects.iter().map(|p| p.enriched).sum();
let statuses_seen: usize = projects.iter().map(|p| p.seen).sum();
let statuses_cleared: usize = projects.iter().map(|p| p.cleared).sum();
let status_errors: usize = projects
.iter()
.map(|p| p.partial_errors + usize::from(p.error.is_some()))
.sum();
let skipped = projects.iter().filter(|p| p.mode == "skipped").count();
let mut parts = vec![format!(
"{} statuses updated",
format_number(statuses_enriched as i64)
)];
if statuses_cleared > 0 {
parts.push(format!(
"{} cleared",
format_number(statuses_cleared as i64)
));
}
if statuses_seen > 0 {
parts.push(format!("{} seen", format_number(statuses_seen as i64)));
}
if status_errors > 0 {
parts.push(format!("{} errors", format_number(status_errors as i64)));
} else if projects.is_empty() || skipped == projects.len() {
parts.push("skipped".to_string());
}
(parts.join(" \u{b7} "), status_errors > 0)
}
fn section(title: &str) {
println!("{}", render::section_divider(title));
}
fn print_timing_summary(stages: &[StageTiming]) {
section("Timing");
for stage in stages {
for sub in &stage.sub_stages {
print_stage_line(sub, 1);
}
}
}
fn print_stage_line(stage: &StageTiming, depth: usize) {
let indent = " ".repeat(depth);
let name = if let Some(ref project) = stage.project {
format!("{} ({})", stage.name, project)
} else {
stage.name.clone()
};
let pad_width = 30_usize.saturating_sub(indent.len() + name.len());
let dots = Theme::dim().render(&".".repeat(pad_width.max(2)));
let time_str = Theme::bold().render(&format!("{:.1}s", stage.elapsed_ms as f64 / 1000.0));
let mut parts: Vec<String> = Vec::new();
if stage.items_processed > 0 {
parts.push(format!("{} items", stage.items_processed));
}
if stage.errors > 0 {
parts.push(Theme::error().render(&format!("{} errors", stage.errors)));
}
if stage.rate_limit_hits > 0 {
parts.push(Theme::warning().render(&format!("{} rate limits", stage.rate_limit_hits)));
}
if parts.is_empty() {
println!("{indent}{name} {dots} {time_str}");
} else {
let suffix = parts.join(" \u{b7} ");
println!("{indent}{name} {dots} {time_str} ({suffix})");
}
for sub in &stage.sub_stages {
print_stage_line(sub, depth + 1);
}
}
#[derive(Serialize)]
struct SyncJsonOutput<'a> {
ok: bool,
data: &'a SyncResult,
meta: SyncMeta,
}
#[derive(Serialize)]
struct SyncMeta {
run_id: String,
elapsed_ms: u64,
#[serde(skip_serializing_if = "Vec::is_empty")]
stages: Vec<StageTiming>,
}
pub fn print_sync_json(result: &SyncResult, elapsed_ms: u64, metrics: Option<&MetricsLayer>) {
let stages = metrics.map_or_else(Vec::new, MetricsLayer::extract_timings);
let output = SyncJsonOutput {
ok: true,
data: result,
meta: SyncMeta {
run_id: result.run_id.clone(),
elapsed_ms,
stages,
},
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
#[derive(Debug, Default, Serialize)]
pub struct SyncDryRunResult {
pub issues_preview: DryRunPreview,
pub mrs_preview: DryRunPreview,
pub would_generate_docs: bool,
pub would_embed: bool,
}
async fn run_sync_dry_run(config: &Config, options: &SyncOptions) -> Result<SyncResult> {
// Get dry run previews for both issues and MRs
let issues_preview = run_ingest_dry_run(config, "issues", None, options.full)?;
let mrs_preview = run_ingest_dry_run(config, "mrs", None, options.full)?;
let dry_result = SyncDryRunResult {
issues_preview,
mrs_preview,
would_generate_docs: !options.no_docs,
would_embed: !options.no_embed,
};
if options.robot_mode {
print_sync_dry_run_json(&dry_result);
} else {
print_sync_dry_run(&dry_result);
}
// Return an empty SyncResult since this is just a preview
Ok(SyncResult::default())
}
pub fn print_sync_dry_run(result: &SyncDryRunResult) {
println!(
"\n {} {}",
Theme::info().bold().render("Dry run"),
Theme::dim().render("(no changes will be made)")
);
print_dry_run_entity("Issues", &result.issues_preview);
print_dry_run_entity("Merge Requests", &result.mrs_preview);
// Pipeline stages
section("Pipeline");
let mut stages: Vec<String> = Vec::new();
if result.would_generate_docs {
stages.push("generate-docs".to_string());
} else {
stages.push(Theme::dim().render("generate-docs (skip)"));
}
if result.would_embed {
stages.push("embed".to_string());
} else {
stages.push(Theme::dim().render("embed (skip)"));
}
println!(" {}", stages.join(" \u{b7} "));
}
fn print_dry_run_entity(label: &str, preview: &DryRunPreview) {
section(label);
let mode = if preview.sync_mode == "full" {
Theme::warning().render("full")
} else {
Theme::success().render("incremental")
};
println!(" {} \u{b7} {} projects", mode, preview.projects.len());
for project in &preview.projects {
let sync_status = if !project.has_cursor {
Theme::warning().render("initial sync")
} else {
Theme::success().render("incremental")
};
if project.existing_count > 0 {
println!(
" {} \u{b7} {} \u{b7} {} existing",
&project.path, sync_status, project.existing_count
);
} else {
println!(" {} \u{b7} {}", &project.path, sync_status);
}
}
}
#[derive(Serialize)]
struct SyncDryRunJsonOutput {
ok: bool,
dry_run: bool,
data: SyncDryRunJsonData,
}
#[derive(Serialize)]
struct SyncDryRunJsonData {
stages: Vec<SyncDryRunStage>,
}
#[derive(Serialize)]
struct SyncDryRunStage {
name: String,
would_run: bool,
#[serde(skip_serializing_if = "Option::is_none")]
preview: Option<DryRunPreview>,
}
pub fn print_sync_dry_run_json(result: &SyncDryRunResult) {
let output = SyncDryRunJsonOutput {
ok: true,
dry_run: true,
data: SyncDryRunJsonData {
stages: vec![
SyncDryRunStage {
name: "ingest_issues".to_string(),
would_run: true,
preview: Some(result.issues_preview.clone()),
},
SyncDryRunStage {
name: "ingest_mrs".to_string(),
would_run: true,
preview: Some(result.mrs_preview.clone()),
},
SyncDryRunStage {
name: "generate_docs".to_string(),
would_run: result.would_generate_docs,
preview: None,
},
SyncDryRunStage {
name: "embed".to_string(),
would_run: result.would_embed,
preview: None,
},
],
},
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing to JSON: {e}"),
}
}
#[cfg(test)]
#[path = "sync_tests.rs"]
mod tests;

View File

@@ -0,0 +1,380 @@
#[derive(Debug, Default)]
pub struct SyncOptions {
pub full: bool,
pub force: bool,
pub no_embed: bool,
pub no_docs: bool,
pub no_events: bool,
pub robot_mode: bool,
pub dry_run: bool,
pub issue_iids: Vec<u64>,
pub mr_iids: Vec<u64>,
pub project: Option<String>,
pub preflight_only: bool,
}
impl SyncOptions {
pub const MAX_SURGICAL_TARGETS: usize = 100;
pub fn is_surgical(&self) -> bool {
!self.issue_iids.is_empty() || !self.mr_iids.is_empty()
}
}
#[derive(Debug, Default, Serialize)]
pub struct SurgicalIids {
pub issues: Vec<u64>,
pub merge_requests: Vec<u64>,
}
#[derive(Debug, Serialize)]
pub struct EntitySyncResult {
pub entity_type: String,
pub iid: u64,
pub outcome: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub toctou_reason: Option<String>,
}
#[derive(Debug, Default, Serialize)]
pub struct SyncResult {
#[serde(skip)]
pub run_id: String,
pub issues_updated: usize,
pub mrs_updated: usize,
pub discussions_fetched: usize,
pub resource_events_fetched: usize,
pub resource_events_failed: usize,
pub mr_diffs_fetched: usize,
pub mr_diffs_failed: usize,
pub documents_regenerated: usize,
pub documents_errored: usize,
pub documents_embedded: usize,
pub embedding_failed: usize,
pub status_enrichment_errors: usize,
pub statuses_enriched: usize,
#[serde(skip_serializing_if = "Option::is_none")]
pub surgical_mode: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub surgical_iids: Option<SurgicalIids>,
#[serde(skip_serializing_if = "Option::is_none")]
pub entity_results: Option<Vec<EntitySyncResult>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub preflight_only: Option<bool>,
#[serde(skip)]
pub issue_projects: Vec<ProjectSummary>,
#[serde(skip)]
pub mr_projects: Vec<ProjectSummary>,
}
/// Alias for [`Theme::color_icon`] to keep call sites concise.
fn color_icon(icon: &str, has_errors: bool) -> String {
Theme::color_icon(icon, has_errors)
}
pub async fn run_sync(
config: &Config,
options: SyncOptions,
run_id: Option<&str>,
signal: &ShutdownSignal,
) -> Result<SyncResult> {
// Surgical dispatch: if any IIDs specified, route to surgical pipeline
if options.is_surgical() {
return run_sync_surgical(config, options, run_id, signal).await;
}
let generated_id;
let run_id = match run_id {
Some(id) => id,
None => {
generated_id = uuid::Uuid::new_v4().simple().to_string();
&generated_id[..8]
}
};
let span = tracing::info_span!("sync", %run_id);
async move {
let mut result = SyncResult {
run_id: run_id.to_string(),
..SyncResult::default()
};
// Handle dry_run mode - show preview without making any changes
if options.dry_run {
return run_sync_dry_run(config, &options).await;
}
let ingest_display = if options.robot_mode {
IngestDisplay::silent()
} else {
IngestDisplay::progress_only()
};
// ── Stage: Issues ──
let stage_start = Instant::now();
let spinner = stage_spinner_v2(Icons::sync(), "Issues", "fetching...", options.robot_mode);
debug!("Sync: ingesting issues");
let issues_result = run_ingest(
config,
"issues",
None,
options.force,
options.full,
false, // dry_run - sync has its own dry_run handling
ingest_display,
Some(spinner.clone()),
signal,
)
.await?;
result.issues_updated = issues_result.issues_upserted;
result.discussions_fetched += issues_result.discussions_fetched;
result.resource_events_fetched += issues_result.resource_events_fetched;
result.resource_events_failed += issues_result.resource_events_failed;
result.status_enrichment_errors += issues_result.status_enrichment_errors;
for sep in &issues_result.status_enrichment_projects {
result.statuses_enriched += sep.enriched;
}
result.issue_projects = issues_result.project_summaries;
let issues_elapsed = stage_start.elapsed();
if !options.robot_mode {
let (status_summary, status_has_errors) =
summarize_status_enrichment(&issues_result.status_enrichment_projects);
let status_icon = color_icon(
if status_has_errors {
Icons::warning()
} else {
Icons::success()
},
status_has_errors,
);
let mut status_lines = vec![format_stage_line(
&status_icon,
"Status",
&status_summary,
issues_elapsed,
)];
status_lines.extend(status_sub_rows(&issues_result.status_enrichment_projects));
print_static_lines(&status_lines);
}
let mut issues_summary = format!(
"{} issues from {} {}",
format_number(result.issues_updated as i64),
issues_result.projects_synced,
if issues_result.projects_synced == 1 { "project" } else { "projects" }
);
append_failures(
&mut issues_summary,
&[
("event failures", issues_result.resource_events_failed),
("status errors", issues_result.status_enrichment_errors),
],
);
let issues_icon = color_icon(
if issues_result.resource_events_failed > 0 || issues_result.status_enrichment_errors > 0
{
Icons::warning()
} else {
Icons::success()
},
issues_result.resource_events_failed > 0 || issues_result.status_enrichment_errors > 0,
);
if options.robot_mode {
emit_stage_line(&spinner, &issues_icon, "Issues", &issues_summary, issues_elapsed);
} else {
let sub_rows = issue_sub_rows(&result.issue_projects);
emit_stage_block(
&spinner,
&issues_icon,
"Issues",
&issues_summary,
issues_elapsed,
&sub_rows,
);
}
if signal.is_cancelled() {
debug!("Shutdown requested after issues stage, returning partial sync results");
return Ok(result);
}
// ── Stage: MRs ──
let stage_start = Instant::now();
let spinner = stage_spinner_v2(Icons::sync(), "MRs", "fetching...", options.robot_mode);
debug!("Sync: ingesting merge requests");
let mrs_result = run_ingest(
config,
"mrs",
None,
options.force,
options.full,
false, // dry_run - sync has its own dry_run handling
ingest_display,
Some(spinner.clone()),
signal,
)
.await?;
result.mrs_updated = mrs_result.mrs_upserted;
result.discussions_fetched += mrs_result.discussions_fetched;
result.resource_events_fetched += mrs_result.resource_events_fetched;
result.resource_events_failed += mrs_result.resource_events_failed;
result.mr_diffs_fetched += mrs_result.mr_diffs_fetched;
result.mr_diffs_failed += mrs_result.mr_diffs_failed;
result.mr_projects = mrs_result.project_summaries;
let mrs_elapsed = stage_start.elapsed();
let mut mrs_summary = format!(
"{} merge requests from {} {}",
format_number(result.mrs_updated as i64),
mrs_result.projects_synced,
if mrs_result.projects_synced == 1 { "project" } else { "projects" }
);
append_failures(
&mut mrs_summary,
&[
("event failures", mrs_result.resource_events_failed),
("diff failures", mrs_result.mr_diffs_failed),
],
);
let mrs_icon = color_icon(
if mrs_result.resource_events_failed > 0 || mrs_result.mr_diffs_failed > 0 {
Icons::warning()
} else {
Icons::success()
},
mrs_result.resource_events_failed > 0 || mrs_result.mr_diffs_failed > 0,
);
if options.robot_mode {
emit_stage_line(&spinner, &mrs_icon, "MRs", &mrs_summary, mrs_elapsed);
} else {
let sub_rows = mr_sub_rows(&result.mr_projects);
emit_stage_block(&spinner, &mrs_icon, "MRs", &mrs_summary, mrs_elapsed, &sub_rows);
}
if signal.is_cancelled() {
debug!("Shutdown requested after MRs stage, returning partial sync results");
return Ok(result);
}
// ── Stage: Docs ──
if !options.no_docs {
let stage_start = Instant::now();
let spinner = stage_spinner_v2(Icons::sync(), "Docs", "generating...", options.robot_mode);
debug!("Sync: generating documents");
let docs_bar = nested_progress("Docs", 0, options.robot_mode);
let docs_bar_clone = docs_bar.clone();
let docs_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
if total > 0 {
docs_bar_clone.set_length(total as u64);
docs_bar_clone.set_position(processed as u64);
}
});
let docs_result = run_generate_docs(config, options.full, None, Some(docs_cb))?;
result.documents_regenerated = docs_result.regenerated;
result.documents_errored = docs_result.errored;
docs_bar.finish_and_clear();
let mut docs_summary = format!(
"{} documents generated",
format_number(result.documents_regenerated as i64),
);
append_failures(&mut docs_summary, &[("errors", docs_result.errored)]);
let docs_icon = color_icon(
if docs_result.errored > 0 {
Icons::warning()
} else {
Icons::success()
},
docs_result.errored > 0,
);
emit_stage_line(&spinner, &docs_icon, "Docs", &docs_summary, stage_start.elapsed());
} else {
debug!("Sync: skipping document generation (--no-docs)");
}
// ── Stage: Embed ──
if !options.no_embed {
let stage_start = Instant::now();
let spinner = stage_spinner_v2(Icons::sync(), "Embed", "preparing...", options.robot_mode);
debug!("Sync: embedding documents");
let embed_bar = nested_progress("Embed", 0, options.robot_mode);
let embed_bar_clone = embed_bar.clone();
let embed_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
if total > 0 {
embed_bar_clone.set_length(total as u64);
embed_bar_clone.set_position(processed as u64);
}
});
match run_embed(config, options.full, false, Some(embed_cb), signal).await {
Ok(embed_result) => {
result.documents_embedded = embed_result.docs_embedded;
result.embedding_failed = embed_result.failed;
embed_bar.finish_and_clear();
let mut embed_summary = format!(
"{} chunks embedded",
format_number(embed_result.chunks_embedded as i64),
);
let mut tail_parts = Vec::new();
if embed_result.failed > 0 {
tail_parts.push(format!("{} failed", embed_result.failed));
}
if embed_result.skipped > 0 {
tail_parts.push(format!("{} skipped", embed_result.skipped));
}
if !tail_parts.is_empty() {
embed_summary.push_str(&format!(" ({})", tail_parts.join(", ")));
}
let embed_icon = color_icon(
if embed_result.failed > 0 {
Icons::warning()
} else {
Icons::success()
},
embed_result.failed > 0,
);
emit_stage_line(
&spinner,
&embed_icon,
"Embed",
&embed_summary,
stage_start.elapsed(),
);
}
Err(e) => {
embed_bar.finish_and_clear();
let warn_summary = format!("skipped ({})", e);
let warn_icon = color_icon(Icons::warning(), true);
emit_stage_line(
&spinner,
&warn_icon,
"Embed",
&warn_summary,
stage_start.elapsed(),
);
warn!(error = %e, "Embedding stage failed (Ollama may be unavailable), continuing");
}
}
} else {
debug!("Sync: skipping embedding (--no-embed)");
}
debug!(
issues = result.issues_updated,
mrs = result.mrs_updated,
discussions = result.discussions_fetched,
resource_events = result.resource_events_fetched,
resource_events_failed = result.resource_events_failed,
mr_diffs = result.mr_diffs_fetched,
mr_diffs_failed = result.mr_diffs_failed,
docs = result.documents_regenerated,
embedded = result.documents_embedded,
"Sync pipeline complete"
);
Ok(result)
}
.instrument(span)
.await
}

View File

@@ -12,11 +12,11 @@ use crate::core::lock::{AppLock, LockOptions};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::shutdown::ShutdownSignal;
use crate::core::sync_run::SyncRunRecorder;
use crate::documents::{SourceType, regenerate_dirty_documents_for_sources};
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
use crate::embedding::pipeline::{DEFAULT_EMBED_CONCURRENCY, embed_documents_by_ids};
use crate::gitlab::GitLabClient;
use crate::ingestion::storage::sync_run::SyncRunRecorder;
use crate::ingestion::surgical::{
fetch_dependents_for_issue, fetch_dependents_for_mr, ingest_issue_by_iid, ingest_mr_by_iid,
preflight_fetch,

View File

@@ -0,0 +1,268 @@
use super::*;
fn default_options() -> SyncOptions {
SyncOptions {
full: false,
force: false,
no_embed: false,
no_docs: false,
no_events: false,
robot_mode: false,
dry_run: false,
issue_iids: vec![],
mr_iids: vec![],
project: None,
preflight_only: false,
}
}
#[test]
fn append_failures_skips_zeroes() {
let mut summary = "base".to_string();
append_failures(&mut summary, &[("errors", 0), ("failures", 0)]);
assert_eq!(summary, "base");
}
#[test]
fn append_failures_renders_non_zero_counts() {
let mut summary = "base".to_string();
append_failures(&mut summary, &[("errors", 2), ("failures", 1)]);
assert!(summary.contains("base"));
assert!(summary.contains("2 errors"));
assert!(summary.contains("1 failures"));
}
#[test]
fn summarize_status_enrichment_reports_skipped_when_all_skipped() {
let projects = vec![ProjectStatusEnrichment {
path: "vs/typescript-code".to_string(),
mode: "skipped".to_string(),
reason: None,
seen: 0,
enriched: 0,
cleared: 0,
without_widget: 0,
partial_errors: 0,
first_partial_error: None,
error: None,
}];
let (summary, has_errors) = summarize_status_enrichment(&projects);
assert!(summary.contains("0 statuses updated"));
assert!(summary.contains("skipped"));
assert!(!has_errors);
}
#[test]
fn summarize_status_enrichment_reports_errors() {
let projects = vec![ProjectStatusEnrichment {
path: "vs/typescript-code".to_string(),
mode: "fetched".to_string(),
reason: None,
seen: 3,
enriched: 1,
cleared: 1,
without_widget: 0,
partial_errors: 2,
first_partial_error: None,
error: Some("boom".to_string()),
}];
let (summary, has_errors) = summarize_status_enrichment(&projects);
assert!(summary.contains("1 statuses updated"));
assert!(summary.contains("1 cleared"));
assert!(summary.contains("3 seen"));
assert!(summary.contains("3 errors"));
assert!(has_errors);
}
#[test]
fn should_print_timings_only_when_enabled_and_non_empty() {
let stages = vec![StageTiming {
name: "x".to_string(),
elapsed_ms: 10,
items_processed: 0,
items_skipped: 0,
errors: 0,
rate_limit_hits: 0,
retries: 0,
project: None,
sub_stages: vec![],
}];
assert!(should_print_timings(true, &stages));
assert!(!should_print_timings(false, &stages));
assert!(!should_print_timings(true, &[]));
}
#[test]
fn issue_sub_rows_include_project_and_statuses() {
let rows = issue_sub_rows(&[ProjectSummary {
path: "vs/typescript-code".to_string(),
items_upserted: 2,
discussions_synced: 0,
events_fetched: 0,
events_failed: 0,
statuses_enriched: 1,
statuses_seen: 5,
status_errors: 0,
mr_diffs_fetched: 0,
mr_diffs_failed: 0,
}]);
assert_eq!(rows.len(), 1);
assert!(rows[0].contains("vs/typescript-code"));
assert!(rows[0].contains("2 issues"));
assert!(rows[0].contains("1 statuses updated"));
}
#[test]
fn mr_sub_rows_include_project_and_diff_failures() {
let rows = mr_sub_rows(&[ProjectSummary {
path: "vs/python-code".to_string(),
items_upserted: 3,
discussions_synced: 0,
events_fetched: 0,
events_failed: 0,
statuses_enriched: 0,
statuses_seen: 0,
status_errors: 0,
mr_diffs_fetched: 4,
mr_diffs_failed: 1,
}]);
assert_eq!(rows.len(), 1);
assert!(rows[0].contains("vs/python-code"));
assert!(rows[0].contains("3 MRs"));
assert!(rows[0].contains("4 diffs"));
assert!(rows[0].contains("1 diff failures"));
}
#[test]
fn status_sub_rows_include_project_and_skip_reason() {
let rows = status_sub_rows(&[ProjectStatusEnrichment {
path: "vs/python-code".to_string(),
mode: "skipped".to_string(),
reason: Some("disabled".to_string()),
seen: 0,
enriched: 0,
cleared: 0,
without_widget: 0,
partial_errors: 0,
first_partial_error: None,
error: None,
}]);
assert_eq!(rows.len(), 1);
assert!(rows[0].contains("vs/python-code"));
assert!(rows[0].contains("0 statuses updated"));
assert!(rows[0].contains("skipped (disabled)"));
}
#[test]
fn is_surgical_with_issues() {
let opts = SyncOptions {
issue_iids: vec![1],
..default_options()
};
assert!(opts.is_surgical());
}
#[test]
fn is_surgical_with_mrs() {
let opts = SyncOptions {
mr_iids: vec![10],
..default_options()
};
assert!(opts.is_surgical());
}
#[test]
fn is_surgical_empty() {
let opts = default_options();
assert!(!opts.is_surgical());
}
#[test]
fn max_surgical_targets_is_100() {
assert_eq!(SyncOptions::MAX_SURGICAL_TARGETS, 100);
}
#[test]
fn sync_result_default_omits_surgical_fields() {
let result = SyncResult::default();
let json = serde_json::to_value(&result).unwrap();
assert!(json.get("surgical_mode").is_none());
assert!(json.get("surgical_iids").is_none());
assert!(json.get("entity_results").is_none());
assert!(json.get("preflight_only").is_none());
}
#[test]
fn sync_result_with_surgical_fields_serializes_correctly() {
let result = SyncResult {
surgical_mode: Some(true),
surgical_iids: Some(SurgicalIids {
issues: vec![7, 42],
merge_requests: vec![10],
}),
entity_results: Some(vec![
EntitySyncResult {
entity_type: "issue".to_string(),
iid: 7,
outcome: "synced".to_string(),
error: None,
toctou_reason: None,
},
EntitySyncResult {
entity_type: "issue".to_string(),
iid: 42,
outcome: "skipped_toctou".to_string(),
error: None,
toctou_reason: Some("updated_at changed".to_string()),
},
]),
preflight_only: Some(false),
..SyncResult::default()
};
let json = serde_json::to_value(&result).unwrap();
assert_eq!(json["surgical_mode"], true);
assert_eq!(json["surgical_iids"]["issues"], serde_json::json!([7, 42]));
assert_eq!(json["entity_results"].as_array().unwrap().len(), 2);
assert_eq!(json["entity_results"][1]["outcome"], "skipped_toctou");
assert_eq!(json["preflight_only"], false);
}
#[test]
fn entity_sync_result_omits_none_fields() {
let entity = EntitySyncResult {
entity_type: "merge_request".to_string(),
iid: 10,
outcome: "synced".to_string(),
error: None,
toctou_reason: None,
};
let json = serde_json::to_value(&entity).unwrap();
assert!(json.get("error").is_none());
assert!(json.get("toctou_reason").is_none());
assert!(json.get("entity_type").is_some());
}
#[test]
fn is_surgical_with_both_issues_and_mrs() {
let opts = SyncOptions {
issue_iids: vec![1, 2],
mr_iids: vec![10],
..default_options()
};
assert!(opts.is_surgical());
}
#[test]
fn is_not_surgical_with_only_project() {
let opts = SyncOptions {
project: Some("group/repo".to_string()),
..default_options()
};
assert!(!opts.is_surgical());
}

View File

@@ -8,13 +8,13 @@ use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use crate::core::timeline::{
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
use crate::timeline::collect::collect_events;
use crate::timeline::expand::expand_timeline;
use crate::timeline::seed::{seed_timeline, seed_timeline_direct};
use crate::timeline::{
EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType, TimelineResult, UnresolvedRef,
};
use crate::core::timeline_collect::collect_events;
use crate::core::timeline_expand::expand_timeline;
use crate::core::timeline_seed::{seed_timeline, seed_timeline_direct};
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
/// Parameters for running the timeline pipeline.
pub struct TimelineParams {

View File

@@ -1,12 +1,5 @@
use super::*;
use crate::core::db::{create_connection, run_migrations};
use std::path::Path;
fn setup_test_db() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn
}
use crate::test_support::{insert_project, setup_test_db};
fn default_scoring() -> ScoringConfig {
ScoringConfig::default()
@@ -17,20 +10,6 @@ fn test_as_of_ms() -> i64 {
now_ms() + 1000
}
fn insert_project(conn: &Connection, id: i64, path: &str) {
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url)
VALUES (?1, ?2, ?3, ?4)",
rusqlite::params![
id,
id * 100,
path,
format!("https://git.example.com/{}", path)
],
)
.unwrap();
}
fn insert_mr(conn: &Connection, id: i64, project_id: i64, iid: i64, author: &str, state: &str) {
let ts = now_ms();
conn.execute(