Files
gitlore/src/main.rs
teernisse 35c828ba73 feat(bd-91j1): enhance robot-docs with quick_start and example_output
Add quick_start section with glab equivalents, lore-exclusive features,
and read/write split guidance. Add example_output to issues, mrs, search,
and who commands. Update strip_schemas to also strip example_output in
brief mode. Update beads tracking state.

Closes: bd-91j1
2026-02-12 12:09:44 -05:00

2646 lines
91 KiB
Rust

use clap::Parser;
use console::style;
use dialoguer::{Confirm, Input};
use serde::Serialize;
use strsim::jaro_winkler;
use tracing_subscriber::Layer;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use lore::Config;
use lore::cli::autocorrect::{self, CorrectionResult};
use lore::cli::commands::{
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser, open_mr_in_browser,
print_count, print_count_json, print_doctor_results, print_drift_human, print_drift_json,
print_dry_run_preview, print_dry_run_preview_json, print_embed, print_embed_json,
print_event_count, print_event_count_json, print_generate_docs, print_generate_docs_json,
print_ingest_summary, print_ingest_summary_json, print_list_issues, print_list_issues_json,
print_list_mrs, print_list_mrs_json, print_search_results, print_search_results_json,
print_show_issue, print_show_issue_json, print_show_mr, print_show_mr_json, print_stats,
print_stats_json, print_sync, print_sync_json, print_sync_status, print_sync_status_json,
print_timeline, print_timeline_json_with_meta, print_who_human, print_who_json, run_auth_test,
run_count, run_count_events, run_doctor, run_drift, run_embed, run_generate_docs, run_ingest,
run_ingest_dry_run, run_init, run_list_issues, run_list_mrs, run_search, run_show_issue,
run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_who,
};
use lore::cli::robot::{RobotMeta, strip_schemas};
use lore::cli::{
Cli, Commands, CountArgs, EmbedArgs, GenerateDocsArgs, IngestArgs, IssuesArgs, MrsArgs,
SearchArgs, StatsArgs, SyncArgs, TimelineArgs, WhoArgs,
};
use lore::core::db::{
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
};
use lore::core::dependent_queue::release_all_locked_jobs;
use lore::core::error::{LoreError, RobotErrorOutput};
use lore::core::logging;
use lore::core::metrics::MetricsLayer;
use lore::core::paths::{get_config_path, get_db_path, get_log_dir};
use lore::core::shutdown::ShutdownSignal;
use lore::core::sync_run::SyncRunRecorder;
#[tokio::main]
async fn main() {
#[cfg(unix)]
unsafe {
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
// Phase 1: Early robot mode detection for structured clap errors
let robot_mode_early = Cli::detect_robot_mode_from_env();
// Phase 1.5: Pre-clap arg correction for agent typo tolerance
let raw_args: Vec<String> = std::env::args().collect();
let correction_result = autocorrect::correct_args(raw_args, robot_mode_early);
// Emit correction warnings to stderr (before clap parsing, so they appear
// even if clap still fails on something else)
if !correction_result.corrections.is_empty() {
emit_correction_warnings(&correction_result, robot_mode_early);
}
let cli = match Cli::try_parse_from(&correction_result.args) {
Ok(cli) => cli,
Err(e) => {
handle_clap_error(e, robot_mode_early, &correction_result);
}
};
let robot_mode = cli.is_robot_mode();
let logging_config = lore::Config::load(cli.config.as_deref())
.map(|c| c.logging)
.unwrap_or_default();
let log_dir = get_log_dir(logging_config.log_dir.as_deref());
if logging_config.file_logging && logging_config.retention_days > 0 {
logging::cleanup_old_logs(&log_dir, logging_config.retention_days);
}
let stderr_filter = logging::build_stderr_filter(cli.verbose, cli.quiet);
let metrics_layer = MetricsLayer::new();
let registry = tracing_subscriber::registry();
let _file_guard: Option<tracing_appender::non_blocking::WorkerGuard>;
if cli.log_format == "json" {
let stderr_layer = tracing_subscriber::fmt::layer()
.json()
.with_writer(lore::cli::progress::SuspendingWriter)
.with_filter(stderr_filter);
if logging_config.file_logging {
let file_filter = logging::build_file_filter();
std::fs::create_dir_all(&log_dir).ok();
let file_appender = tracing_appender::rolling::daily(&log_dir, "lore");
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
_file_guard = Some(guard);
let file_layer = tracing_subscriber::fmt::layer()
.json()
.with_writer(non_blocking)
.with_filter(file_filter);
registry
.with(stderr_layer)
.with(file_layer)
.with(metrics_layer.clone())
.init();
} else {
_file_guard = None;
registry
.with(stderr_layer)
.with(metrics_layer.clone())
.init();
}
} else {
let stderr_layer = tracing_subscriber::fmt::layer()
.with_target(false)
.with_writer(lore::cli::progress::SuspendingWriter)
.with_filter(stderr_filter);
if logging_config.file_logging {
let file_filter = logging::build_file_filter();
std::fs::create_dir_all(&log_dir).ok();
let file_appender = tracing_appender::rolling::daily(&log_dir, "lore");
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
_file_guard = Some(guard);
let file_layer = tracing_subscriber::fmt::layer()
.json()
.with_writer(non_blocking)
.with_filter(file_filter);
registry
.with(stderr_layer)
.with(file_layer)
.with(metrics_layer.clone())
.init();
} else {
_file_guard = None;
registry
.with(stderr_layer)
.with(metrics_layer.clone())
.init();
}
}
// I1: Respect NO_COLOR convention (https://no-color.org/)
if std::env::var("NO_COLOR").is_ok_and(|v| !v.is_empty()) {
console::set_colors_enabled(false);
} else {
match cli.color.as_str() {
"never" => console::set_colors_enabled(false),
"always" => console::set_colors_enabled(true),
"auto" => {}
other => {
eprintln!("Warning: unknown color mode '{}', using auto", other);
}
}
}
let quiet = cli.quiet;
let result = match cli.command {
// Phase 2: Handle no-args case - in robot mode, output robot-docs; otherwise show help
None => {
if robot_mode {
handle_robot_docs(robot_mode, false)
} else {
use clap::CommandFactory;
let mut cmd = Cli::command();
cmd.print_help().ok();
println!();
Ok(())
}
}
Some(Commands::Issues(args)) => handle_issues(cli.config.as_deref(), args, robot_mode),
Some(Commands::Mrs(args)) => handle_mrs(cli.config.as_deref(), args, robot_mode),
Some(Commands::Search(args)) => {
handle_search(cli.config.as_deref(), args, robot_mode).await
}
Some(Commands::Timeline(args)) => handle_timeline(cli.config.as_deref(), args, robot_mode),
Some(Commands::Who(args)) => handle_who(cli.config.as_deref(), args, robot_mode),
Some(Commands::Drift {
entity_type,
iid,
threshold,
project,
}) => {
handle_drift(
cli.config.as_deref(),
&entity_type,
iid,
threshold,
project.as_deref(),
robot_mode,
)
.await
}
Some(Commands::Stats(args)) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Embed(args)) => handle_embed(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Sync(args)) => {
handle_sync_cmd(cli.config.as_deref(), args, robot_mode, &metrics_layer).await
}
Some(Commands::Ingest(args)) => {
handle_ingest(
cli.config.as_deref(),
args,
robot_mode,
quiet,
&metrics_layer,
)
.await
}
Some(Commands::Count(args)) => handle_count(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Status) => handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await,
Some(Commands::Auth) => handle_auth_test(cli.config.as_deref(), robot_mode).await,
Some(Commands::Doctor) => handle_doctor(cli.config.as_deref(), robot_mode).await,
Some(Commands::Version) => handle_version(robot_mode),
Some(Commands::Completions { shell }) => handle_completions(&shell),
Some(Commands::Init {
force,
non_interactive,
gitlab_url,
token_env_var,
projects,
default_project,
}) => {
handle_init(
cli.config.as_deref(),
force,
non_interactive,
robot_mode,
gitlab_url,
token_env_var,
projects,
default_project,
)
.await
}
Some(Commands::GenerateDocs(args)) => {
handle_generate_docs(cli.config.as_deref(), args, robot_mode).await
}
Some(Commands::Backup) => handle_backup(robot_mode),
Some(Commands::Reset { yes: _ }) => handle_reset(robot_mode),
Some(Commands::Migrate) => handle_migrate(cli.config.as_deref(), robot_mode).await,
Some(Commands::Health) => handle_health(cli.config.as_deref(), robot_mode).await,
Some(Commands::RobotDocs { brief }) => handle_robot_docs(robot_mode, brief),
Some(Commands::List {
entity,
limit,
project,
state,
author,
assignee,
label,
milestone,
since,
due_before,
has_due_date,
sort,
order,
open,
draft,
no_draft,
reviewer,
target_branch,
source_branch,
}) => {
if robot_mode {
eprintln!(
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore list' is deprecated, use 'lore issues' or 'lore mrs'","successor":"issues / mrs"}}}}"#
);
} else {
eprintln!(
"{}",
style("warning: 'lore list' is deprecated, use 'lore issues' or 'lore mrs'")
.yellow()
);
}
handle_list_compat(
cli.config.as_deref(),
&entity,
limit,
project.as_deref(),
state.as_deref(),
author.as_deref(),
assignee.as_deref(),
label.as_deref(),
milestone.as_deref(),
since.as_deref(),
due_before.as_deref(),
has_due_date,
&sort,
&order,
open,
robot_mode,
draft,
no_draft,
reviewer.as_deref(),
target_branch.as_deref(),
source_branch.as_deref(),
)
.await
}
Some(Commands::Show {
entity,
iid,
project,
}) => {
if robot_mode {
eprintln!(
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore show' is deprecated, use 'lore {entity}s {iid}'","successor":"{entity}s"}}}}"#
);
} else {
eprintln!(
"{}",
style(format!(
"warning: 'lore show' is deprecated, use 'lore {}s {}'",
entity, iid
))
.yellow()
);
}
handle_show_compat(
cli.config.as_deref(),
&entity,
iid,
project.as_deref(),
robot_mode,
)
.await
}
Some(Commands::AuthTest) => {
if robot_mode {
eprintln!(
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore auth-test' is deprecated, use 'lore auth'","successor":"auth"}}}}"#
);
} else {
eprintln!(
"{}",
style("warning: 'lore auth-test' is deprecated, use 'lore auth'").yellow()
);
}
handle_auth_test(cli.config.as_deref(), robot_mode).await
}
Some(Commands::SyncStatus) => {
if robot_mode {
eprintln!(
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore sync-status' is deprecated, use 'lore status'","successor":"status"}}}}"#
);
} else {
eprintln!(
"{}",
style("warning: 'lore sync-status' is deprecated, use 'lore status'").yellow()
);
}
handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await
}
};
if let Err(e) = result {
handle_error(e, robot_mode);
}
}
#[derive(Serialize)]
struct FallbackErrorOutput {
error: FallbackError,
}
#[derive(Serialize)]
struct FallbackError {
code: String,
message: String,
}
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
if let Some(gi_error) = e.downcast_ref::<LoreError>() {
if robot_mode {
let output = RobotErrorOutput::from(gi_error);
eprintln!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|_| {
let fallback = FallbackErrorOutput {
error: FallbackError {
code: "INTERNAL_ERROR".to_string(),
message: gi_error.to_string(),
},
};
serde_json::to_string(&fallback)
.unwrap_or_else(|_| r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#.to_string())
})
);
std::process::exit(gi_error.exit_code());
} else {
eprintln!("{} {}", style("Error:").red(), gi_error);
if let Some(suggestion) = gi_error.suggestion() {
eprintln!("{} {}", style("Hint:").yellow(), suggestion);
}
std::process::exit(gi_error.exit_code());
}
}
if robot_mode {
let output = FallbackErrorOutput {
error: FallbackError {
code: "INTERNAL_ERROR".to_string(),
message: e.to_string(),
},
};
eprintln!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|_| {
r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#
.to_string()
})
);
} else {
eprintln!("{} {}", style("Error:").red(), e);
}
std::process::exit(1);
}
/// Emit stderr warnings for any corrections applied during Phase 1.5.
fn emit_correction_warnings(result: &CorrectionResult, robot_mode: bool) {
if robot_mode {
#[derive(Serialize)]
struct CorrectionWarning<'a> {
warning: CorrectionWarningInner<'a>,
}
#[derive(Serialize)]
struct CorrectionWarningInner<'a> {
r#type: &'static str,
corrections: &'a [autocorrect::Correction],
teaching: Vec<String>,
}
let teaching: Vec<String> = result
.corrections
.iter()
.map(autocorrect::format_teaching_note)
.collect();
let warning = CorrectionWarning {
warning: CorrectionWarningInner {
r#type: "ARG_CORRECTED",
corrections: &result.corrections,
teaching,
},
};
if let Ok(json) = serde_json::to_string(&warning) {
eprintln!("{json}");
}
} else {
for c in &result.corrections {
eprintln!(
"{} {}",
style("Auto-corrected:").yellow(),
autocorrect::format_teaching_note(c)
);
}
}
}
/// Phase 1 & 4: Handle clap parsing errors with structured JSON output in robot mode.
/// Also includes fuzzy command matching and flag-level suggestions.
fn handle_clap_error(e: clap::Error, robot_mode: bool, corrections: &CorrectionResult) -> ! {
use clap::error::ErrorKind;
// Always let clap handle --help and --version normally (print and exit 0).
// These are intentional user actions, not errors, even when stdout is redirected.
if matches!(e.kind(), ErrorKind::DisplayHelp | ErrorKind::DisplayVersion) {
e.exit()
}
if robot_mode {
let error_code = map_clap_error_kind(e.kind());
let full_msg = e.to_string();
let message = full_msg
.lines()
.take(3)
.collect::<Vec<_>>()
.join("; ")
.trim()
.to_string();
let (suggestion, correction, valid_values) = match e.kind() {
// Phase 4: Suggest similar command for unknown subcommands
ErrorKind::InvalidSubcommand => {
let suggestion = if let Some(invalid_cmd) = extract_invalid_subcommand(&e) {
suggest_similar_command(&invalid_cmd)
} else {
"Run 'lore robot-docs' for valid commands".to_string()
};
(suggestion, None, None)
}
// Flag-level fuzzy matching for unknown flags
ErrorKind::UnknownArgument => {
let invalid_flag = extract_invalid_flag(&e);
let similar = invalid_flag
.as_deref()
.and_then(|flag| autocorrect::suggest_similar_flag(flag, &corrections.args));
let suggestion = if let Some(ref s) = similar {
format!("Did you mean '{s}'? Run 'lore robot-docs' for all flags")
} else {
"Run 'lore robot-docs' for valid flags".to_string()
};
(suggestion, similar, None)
}
// Value-level suggestions for invalid enum values
ErrorKind::InvalidValue => {
let (flag, valid_vals) = extract_invalid_value_context(&e);
let suggestion = if let Some(vals) = &valid_vals {
format!(
"Valid values: {}. Run 'lore robot-docs' for details",
vals.join(", ")
)
} else if let Some(ref f) = flag {
if let Some(vals) = autocorrect::valid_values_for_flag(f) {
format!("Valid values for {f}: {}", vals.join(", "))
} else {
"Run 'lore robot-docs' for valid values".to_string()
}
} else {
"Run 'lore robot-docs' for valid values".to_string()
};
let vals_vec = valid_vals.or_else(|| {
flag.as_deref()
.and_then(autocorrect::valid_values_for_flag)
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
});
(suggestion, None, vals_vec)
}
_ => (
"Run 'lore robot-docs' for valid commands".to_string(),
None,
None,
),
};
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: error_code.to_string(),
message,
suggestion,
correction,
valid_values,
},
};
eprintln!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|_| {
r#"{"error":{"code":"PARSE_ERROR","message":"Parse error"}}"#.to_string()
})
);
std::process::exit(2);
} else {
e.exit()
}
}
/// Map clap ErrorKind to semantic error codes
fn map_clap_error_kind(kind: clap::error::ErrorKind) -> &'static str {
use clap::error::ErrorKind;
match kind {
ErrorKind::InvalidSubcommand => "UNKNOWN_COMMAND",
ErrorKind::UnknownArgument => "UNKNOWN_FLAG",
ErrorKind::MissingRequiredArgument => "MISSING_REQUIRED",
ErrorKind::InvalidValue => "INVALID_VALUE",
ErrorKind::ValueValidation => "INVALID_VALUE",
ErrorKind::TooManyValues => "TOO_MANY_VALUES",
ErrorKind::TooFewValues => "TOO_FEW_VALUES",
ErrorKind::ArgumentConflict => "ARGUMENT_CONFLICT",
ErrorKind::MissingSubcommand => "MISSING_COMMAND",
ErrorKind::DisplayHelp | ErrorKind::DisplayVersion => "HELP_REQUESTED",
_ => "PARSE_ERROR",
}
}
/// Extract the invalid subcommand from a clap error (Phase 4)
fn extract_invalid_subcommand(e: &clap::Error) -> Option<String> {
// Parse the error message to find the invalid subcommand
// Format is typically: "error: unrecognized subcommand 'foo'"
let msg = e.to_string();
if let Some(start) = msg.find('\'')
&& let Some(end) = msg[start + 1..].find('\'')
{
return Some(msg[start + 1..start + 1 + end].to_string());
}
None
}
/// Extract the invalid flag from a clap UnknownArgument error.
/// Format is typically: "error: unexpected argument '--xyzzy' found"
fn extract_invalid_flag(e: &clap::Error) -> Option<String> {
let msg = e.to_string();
if let Some(start) = msg.find('\'')
&& let Some(end) = msg[start + 1..].find('\'')
{
let value = &msg[start + 1..start + 1 + end];
if value.starts_with('-') {
return Some(value.to_string());
}
}
None
}
/// Extract flag name and valid values from a clap InvalidValue error.
/// Returns (flag_name, valid_values_if_listed_in_error).
fn extract_invalid_value_context(e: &clap::Error) -> (Option<String>, Option<Vec<String>>) {
let msg = e.to_string();
// Try to find the flag name from "[possible values: ...]" pattern or from the arg info
// Clap format: "error: invalid value 'opend' for '--state <STATE>'"
let flag = if let Some(for_pos) = msg.find("for '") {
let after_for = &msg[for_pos + 5..];
if let Some(end) = after_for.find('\'') {
let raw = &after_for[..end];
// Strip angle-bracket value placeholder: "--state <STATE>" -> "--state"
Some(raw.split_whitespace().next().unwrap_or(raw).to_string())
} else {
None
}
} else {
None
};
// Try to extract possible values from the error message
// Clap format: "[possible values: opened, closed, merged, locked, all]"
let valid_values = if let Some(pv_pos) = msg.find("[possible values: ") {
let after_pv = &msg[pv_pos + 18..];
after_pv.find(']').map(|end| {
after_pv[..end]
.split(", ")
.map(|s| s.trim().to_string())
.collect()
})
} else {
// Fall back to our static registry
flag.as_deref()
.and_then(autocorrect::valid_values_for_flag)
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
};
(flag, valid_values)
}
/// Phase 4: Suggest similar command using fuzzy matching
fn suggest_similar_command(invalid: &str) -> String {
const VALID_COMMANDS: &[&str] = &[
"issues",
"mrs",
"search",
"sync",
"ingest",
"count",
"status",
"auth",
"doctor",
"version",
"init",
"stats",
"generate-docs",
"embed",
"migrate",
"health",
"robot-docs",
"completions",
"timeline",
"who",
];
let invalid_lower = invalid.to_lowercase();
// Find the best match using Jaro-Winkler similarity
let best_match = VALID_COMMANDS
.iter()
.map(|cmd| (*cmd, jaro_winkler(&invalid_lower, cmd)))
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
if let Some((cmd, score)) = best_match
&& score > 0.7
{
return format!(
"Did you mean 'lore {}'? Run 'lore robot-docs' for all commands",
cmd
);
}
"Run 'lore robot-docs' for valid commands".to_string()
}
fn handle_issues(
config_override: Option<&str>,
args: IssuesArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let asc = args.asc && !args.no_asc;
let has_due = args.has_due && !args.no_has_due;
let open = args.open && !args.no_open;
let order = if asc { "asc" } else { "desc" };
if let Some(iid) = args.iid {
let result = run_show_issue(&config, iid, project)?;
if robot_mode {
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_issue(&result);
}
} else {
let state_normalized = args.state.as_deref().map(str::to_lowercase);
let filters = ListFilters {
limit: args.limit,
project,
state: state_normalized.as_deref(),
author: args.author.as_deref(),
assignee: args.assignee.as_deref(),
labels: args.label.as_deref(),
milestone: args.milestone.as_deref(),
since: args.since.as_deref(),
due_before: args.due_before.as_deref(),
has_due_date: has_due,
statuses: &args.status,
sort: &args.sort,
order,
};
let result = run_list_issues(&config, filters)?;
if open {
open_issue_in_browser(&result);
} else if robot_mode {
print_list_issues_json(
&result,
start.elapsed().as_millis() as u64,
args.fields.as_deref(),
);
} else {
print_list_issues(&result);
}
}
Ok(())
}
fn handle_mrs(
config_override: Option<&str>,
args: MrsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let asc = args.asc && !args.no_asc;
let open = args.open && !args.no_open;
let order = if asc { "asc" } else { "desc" };
if let Some(iid) = args.iid {
let result = run_show_mr(&config, iid, project)?;
if robot_mode {
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_mr(&result);
}
} else {
let state_normalized = args.state.as_deref().map(str::to_lowercase);
let filters = MrListFilters {
limit: args.limit,
project,
state: state_normalized.as_deref(),
author: args.author.as_deref(),
assignee: args.assignee.as_deref(),
reviewer: args.reviewer.as_deref(),
labels: args.label.as_deref(),
since: args.since.as_deref(),
draft: args.draft,
no_draft: args.no_draft,
target_branch: args.target.as_deref(),
source_branch: args.source.as_deref(),
sort: &args.sort,
order,
};
let result = run_list_mrs(&config, filters)?;
if open {
open_mr_in_browser(&result);
} else if robot_mode {
print_list_mrs_json(
&result,
start.elapsed().as_millis() as u64,
args.fields.as_deref(),
);
} else {
print_list_mrs(&result);
}
}
Ok(())
}
async fn handle_ingest(
config_override: Option<&str>,
args: IngestArgs,
robot_mode: bool,
quiet: bool,
metrics: &MetricsLayer,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let dry_run = args.dry_run && !args.no_dry_run;
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let force = args.force && !args.no_force;
let full = args.full && !args.no_full;
// Handle dry run mode - show preview without making any changes
if dry_run {
match args.entity.as_deref() {
Some(resource_type) => {
let preview = run_ingest_dry_run(&config, resource_type, project, full)?;
if robot_mode {
print_dry_run_preview_json(&preview);
} else {
print_dry_run_preview(&preview);
}
}
None => {
let issues_preview = run_ingest_dry_run(&config, "issues", project, full)?;
let mrs_preview = run_ingest_dry_run(&config, "mrs", project, full)?;
if robot_mode {
print_combined_dry_run_json(&issues_preview, &mrs_preview);
} else {
print_dry_run_preview(&issues_preview);
println!();
print_dry_run_preview(&mrs_preview);
}
}
}
return Ok(());
}
let display = if robot_mode || quiet {
IngestDisplay::silent()
} else {
IngestDisplay::interactive()
};
let entity_label = args.entity.as_deref().unwrap_or("all");
let command = format!("ingest:{entity_label}");
let db_path = get_db_path(config.storage.db_path.as_deref());
let recorder_conn = create_connection(&db_path)?;
let run_id = uuid::Uuid::new_v4().simple().to_string();
let run_id_short = &run_id[..8];
let recorder = SyncRunRecorder::start(&recorder_conn, &command, run_id_short)?;
let signal = ShutdownSignal::new();
let signal_for_handler = signal.clone();
tokio::spawn(async move {
let _ = tokio::signal::ctrl_c().await;
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
signal_for_handler.cancel();
let _ = tokio::signal::ctrl_c().await;
std::process::exit(130);
});
let ingest_result: std::result::Result<(), Box<dyn std::error::Error>> = async {
match args.entity.as_deref() {
Some(resource_type) => {
let result = run_ingest(
&config,
resource_type,
project,
force,
full,
false,
display,
None,
&signal,
)
.await?;
if robot_mode {
print_ingest_summary_json(&result, start.elapsed().as_millis() as u64);
} else {
print_ingest_summary(&result);
}
}
None => {
if !robot_mode && !quiet {
println!(
"{}",
style("Ingesting all content (issues + merge requests)...").blue()
);
println!();
}
let issues_result = run_ingest(
&config, "issues", project, force, full, false, display, None, &signal,
)
.await?;
let mrs_result = run_ingest(
&config, "mrs", project, force, full, false, display, None, &signal,
)
.await?;
if robot_mode {
print_combined_ingest_json(
&issues_result,
&mrs_result,
start.elapsed().as_millis() as u64,
);
} else {
print_ingest_summary(&issues_result);
print_ingest_summary(&mrs_result);
}
}
}
Ok(())
}
.await;
match ingest_result {
Ok(()) if signal.is_cancelled() => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(
&recorder_conn,
"Interrupted by user (Ctrl+C)",
Some(&stages),
);
if !robot_mode {
eprintln!(
"{}",
style("Interrupted by Ctrl+C. Partial data has been saved.").yellow()
);
}
Ok(())
}
Ok(()) => {
let stages = metrics.extract_timings();
let total_items: usize = stages.iter().map(|s| s.items_processed).sum();
let total_errors: usize = stages.iter().map(|s| s.errors).sum();
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
Ok(())
}
Err(e) => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
Err(e)
}
}
}
#[derive(Serialize)]
struct CombinedIngestOutput {
ok: bool,
data: CombinedIngestData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct CombinedIngestData {
resource_type: String,
issues: CombinedIngestEntityStats,
merge_requests: CombinedIngestEntityStats,
}
#[derive(Serialize)]
struct CombinedIngestEntityStats {
projects_synced: usize,
fetched: usize,
upserted: usize,
labels_created: usize,
discussions_fetched: usize,
notes_upserted: usize,
}
fn print_combined_ingest_json(
issues: &lore::cli::commands::ingest::IngestResult,
mrs: &lore::cli::commands::ingest::IngestResult,
elapsed_ms: u64,
) {
let output = CombinedIngestOutput {
ok: true,
data: CombinedIngestData {
resource_type: "all".to_string(),
issues: CombinedIngestEntityStats {
projects_synced: issues.projects_synced,
fetched: issues.issues_fetched,
upserted: issues.issues_upserted,
labels_created: issues.labels_created,
discussions_fetched: issues.discussions_fetched,
notes_upserted: issues.notes_upserted,
},
merge_requests: CombinedIngestEntityStats {
projects_synced: mrs.projects_synced,
fetched: mrs.mrs_fetched,
upserted: mrs.mrs_upserted,
labels_created: mrs.labels_created,
discussions_fetched: mrs.discussions_fetched,
notes_upserted: mrs.notes_upserted,
},
},
meta: RobotMeta { elapsed_ms },
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
#[derive(Serialize)]
struct CombinedDryRunOutput {
ok: bool,
dry_run: bool,
data: CombinedDryRunData,
}
#[derive(Serialize)]
struct CombinedDryRunData {
issues: lore::cli::commands::DryRunPreview,
merge_requests: lore::cli::commands::DryRunPreview,
}
fn print_combined_dry_run_json(
issues: &lore::cli::commands::DryRunPreview,
mrs: &lore::cli::commands::DryRunPreview,
) {
let output = CombinedDryRunOutput {
ok: true,
dry_run: true,
data: CombinedDryRunData {
issues: issues.clone(),
merge_requests: mrs.clone(),
},
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
async fn handle_count(
config_override: Option<&str>,
args: CountArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
if args.entity == "events" {
let counts = run_count_events(&config)?;
if robot_mode {
print_event_count_json(&counts, start.elapsed().as_millis() as u64);
} else {
print_event_count(&counts);
}
return Ok(());
}
let result = run_count(&config, &args.entity, args.for_entity.as_deref())?;
if robot_mode {
print_count_json(&result, start.elapsed().as_millis() as u64);
} else {
print_count(&result);
}
Ok(())
}
async fn handle_sync_status_cmd(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let result = run_sync_status(&config)?;
if robot_mode {
print_sync_status_json(&result, start.elapsed().as_millis() as u64);
} else {
print_sync_status(&result);
}
Ok(())
}
#[derive(Serialize)]
struct InitOutput {
ok: bool,
data: InitOutputData,
}
#[derive(Serialize)]
struct InitOutputData {
config_path: String,
data_dir: String,
user: InitOutputUser,
projects: Vec<InitOutputProject>,
#[serde(skip_serializing_if = "Option::is_none")]
default_project: Option<String>,
}
#[derive(Serialize)]
struct InitOutputUser {
username: String,
name: String,
}
#[derive(Serialize)]
struct InitOutputProject {
path: String,
name: String,
}
fn print_init_json(result: &InitResult) {
let output = InitOutput {
ok: true,
data: InitOutputData {
config_path: result.config_path.clone(),
data_dir: result.data_dir.clone(),
user: InitOutputUser {
username: result.user.username.clone(),
name: result.user.name.clone(),
},
projects: result
.projects
.iter()
.map(|p| InitOutputProject {
path: p.path.clone(),
name: p.name.clone(),
})
.collect(),
default_project: result.default_project.clone(),
},
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
#[allow(clippy::too_many_arguments)]
async fn handle_init(
config_override: Option<&str>,
force: bool,
non_interactive: bool,
robot_mode: bool,
gitlab_url_flag: Option<String>,
token_env_var_flag: Option<String>,
projects_flag: Option<String>,
default_project_flag: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
if robot_mode {
let missing: Vec<&str> = [
gitlab_url_flag.is_none().then_some("--gitlab-url"),
token_env_var_flag.is_none().then_some("--token-env-var"),
projects_flag.is_none().then_some("--projects"),
]
.into_iter()
.flatten()
.collect();
if !missing.is_empty() {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "MISSING_FLAGS".to_string(),
message: format!("Robot mode requires flags: {}", missing.join(", ")),
suggestion: "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
std::process::exit(2);
}
let project_paths: Vec<String> = projects_flag
.unwrap()
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect();
let result = run_init(
InitInputs {
gitlab_url: gitlab_url_flag.unwrap(),
token_env_var: token_env_var_flag.unwrap(),
project_paths,
default_project: default_project_flag.clone(),
},
InitOptions {
config_path: config_override.map(String::from),
force: true,
non_interactive: true,
},
)
.await?;
print_init_json(&result);
return Ok(());
}
let config_path = get_config_path(config_override);
let mut confirmed_overwrite = force;
if config_path.exists() && !force {
if non_interactive {
eprintln!(
"{}",
style(format!(
"Config file exists at {}. Use --force to overwrite.",
config_path.display()
))
.red()
);
std::process::exit(2);
}
let confirm = Confirm::new()
.with_prompt(format!(
"Config file exists at {}. Overwrite?",
config_path.display()
))
.default(false)
.interact()?;
if !confirm {
println!("{}", style("Cancelled.").yellow());
std::process::exit(2);
}
confirmed_overwrite = true;
}
let gitlab_url: String = if let Some(url) = gitlab_url_flag {
url
} else {
Input::new()
.with_prompt("GitLab URL")
.default("https://gitlab.com".to_string())
.validate_with(|input: &String| -> Result<(), &str> {
if url::Url::parse(input).is_ok() {
Ok(())
} else {
Err("Please enter a valid URL")
}
})
.interact_text()?
};
let token_env_var: String = if let Some(var) = token_env_var_flag {
var
} else {
Input::new()
.with_prompt("Token environment variable name")
.default("GITLAB_TOKEN".to_string())
.interact_text()?
};
let project_paths: Vec<String> = if let Some(projects) = projects_flag {
projects
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect()
} else {
let project_paths_input: String = Input::new()
.with_prompt("Project paths (comma-separated, e.g., group/project)")
.validate_with(|input: &String| -> Result<(), &str> {
if input.trim().is_empty() {
Err("Please enter at least one project path")
} else {
Ok(())
}
})
.interact_text()?;
project_paths_input
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect()
};
// Resolve default project: CLI flag, interactive prompt, or None
let default_project = if default_project_flag.is_some() {
default_project_flag
} else if project_paths.len() > 1 && !non_interactive {
let set_default = Confirm::new()
.with_prompt("Set a default project? (used when -p is omitted)")
.default(true)
.interact()?;
if set_default {
let selection = dialoguer::Select::new()
.with_prompt("Default project")
.items(&project_paths)
.default(0)
.interact()?;
Some(project_paths[selection].clone())
} else {
None
}
} else {
None
};
println!("{}", style("\nValidating configuration...").blue());
let result = run_init(
InitInputs {
gitlab_url,
token_env_var,
project_paths,
default_project,
},
InitOptions {
config_path: config_override.map(String::from),
force: confirmed_overwrite,
non_interactive,
},
)
.await?;
println!(
"{}",
style(format!(
"\n✓ Authenticated as @{} ({})",
result.user.username, result.user.name
))
.green()
);
for project in &result.projects {
println!(
"{}",
style(format!("{} ({})", project.path, project.name)).green()
);
}
if let Some(ref dp) = result.default_project {
println!("{}", style(format!("✓ Default project: {dp}")).green());
}
println!(
"{}",
style(format!("\n✓ Config written to {}", result.config_path)).green()
);
println!(
"{}",
style(format!("✓ Database initialized at {}", result.data_dir)).green()
);
println!(
"{}",
style("\nSetup complete! Run 'lore doctor' to verify.").blue()
);
Ok(())
}
#[derive(Serialize)]
struct AuthTestOutput {
ok: bool,
data: AuthTestData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct AuthTestData {
authenticated: bool,
username: String,
name: String,
gitlab_url: String,
}
async fn handle_auth_test(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
match run_auth_test(config_override).await {
Ok(result) => {
if robot_mode {
let output = AuthTestOutput {
ok: true,
data: AuthTestData {
authenticated: true,
username: result.username.clone(),
name: result.name.clone(),
gitlab_url: result.base_url.clone(),
},
meta: RobotMeta {
elapsed_ms: start.elapsed().as_millis() as u64,
},
};
println!("{}", serde_json::to_string(&output)?);
} else {
println!("Authenticated as @{} ({})", result.username, result.name);
println!("GitLab: {}", result.base_url);
}
Ok(())
}
Err(e) => {
if robot_mode {
let output = RobotErrorOutput::from(&e);
eprintln!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|_| {
let msg = e.to_string().replace('\\', "\\\\").replace('"', "\\\"");
format!(
r#"{{"error":{{"code":"{}","message":"{}"}}}}"#,
e.code(),
msg
)
})
);
} else {
eprintln!("{} {}", style("Error:").red(), e);
if let Some(suggestion) = e.suggestion() {
eprintln!("{} {}", style("Hint:").yellow(), suggestion);
}
}
std::process::exit(e.exit_code());
}
}
}
#[derive(Serialize)]
struct DoctorOutput {
ok: bool,
data: DoctorData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct DoctorData {
success: bool,
checks: lore::cli::commands::DoctorChecks,
}
async fn handle_doctor(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let result = run_doctor(config_override).await;
if robot_mode {
let output = DoctorOutput {
ok: true,
data: DoctorData {
success: result.success,
checks: result.checks,
},
meta: RobotMeta {
elapsed_ms: start.elapsed().as_millis() as u64,
},
};
println!("{}", serde_json::to_string(&output)?);
} else {
print_doctor_results(&result);
}
if !result.success {
std::process::exit(1);
}
Ok(())
}
#[derive(Serialize)]
struct VersionOutput {
ok: bool,
data: VersionData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct VersionData {
version: String,
#[serde(skip_serializing_if = "Option::is_none")]
git_hash: Option<String>,
}
fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let version = env!("CARGO_PKG_VERSION").to_string();
let git_hash = env!("GIT_HASH").to_string();
if robot_mode {
let output = VersionOutput {
ok: true,
data: VersionData {
version,
git_hash: if git_hash.is_empty() {
None
} else {
Some(git_hash)
},
},
meta: RobotMeta {
elapsed_ms: start.elapsed().as_millis() as u64,
},
};
println!("{}", serde_json::to_string(&output)?);
} else if git_hash.is_empty() {
println!("lore version {}", version);
} else {
println!("lore version {} ({})", version, git_hash);
}
Ok(())
}
fn handle_completions(shell: &str) -> Result<(), Box<dyn std::error::Error>> {
use clap::CommandFactory;
use clap_complete::{Shell, generate};
let shell = match shell {
"bash" => Shell::Bash,
"zsh" => Shell::Zsh,
"fish" => Shell::Fish,
"powershell" => Shell::PowerShell,
other => {
return Err(format!("Unsupported shell: {other}").into());
}
};
let mut cmd = Cli::command();
generate(shell, &mut cmd, "lore", &mut std::io::stdout());
Ok(())
}
fn handle_backup(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "NOT_IMPLEMENTED".to_string(),
message: "The 'backup' command is not yet implemented.".to_string(),
suggestion: "Use manual database backup: cp ~/.local/share/lore/lore.db ~/.local/share/lore/lore.db.bak".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{} The 'backup' command is not yet implemented.",
style("Error:").red()
);
}
std::process::exit(1);
}
fn handle_reset(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "NOT_IMPLEMENTED".to_string(),
message: "The 'reset' command is not yet implemented.".to_string(),
suggestion: "Manually delete the database: rm ~/.local/share/lore/lore.db"
.to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{} The 'reset' command is not yet implemented.",
style("Error:").red()
);
}
std::process::exit(1);
}
#[derive(Serialize)]
struct MigrateOutput {
ok: bool,
data: MigrateData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct MigrateData {
before_version: i32,
after_version: i32,
migrated: bool,
}
#[derive(Serialize)]
struct RobotErrorWithSuggestion {
error: RobotErrorSuggestionData,
}
#[derive(Serialize)]
struct RobotErrorSuggestionData {
code: String,
message: String,
suggestion: String,
#[serde(skip_serializing_if = "Option::is_none")]
correction: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
valid_values: Option<Vec<String>>,
}
async fn handle_migrate(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let db_path = get_db_path(config.storage.db_path.as_deref());
if !db_path.exists() {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "DB_ERROR".to_string(),
message: format!("Database not found at {}", db_path.display()),
suggestion: "Run 'lore init' first".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{}",
style(format!("Database not found at {}", db_path.display())).red()
);
eprintln!(
"{}",
style("Run 'lore init' first to create the database.").yellow()
);
}
std::process::exit(10);
}
let conn = create_connection(&db_path)?;
let before_version = get_schema_version(&conn);
if !robot_mode {
println!(
"{}",
style(format!("Current schema version: {}", before_version)).blue()
);
}
run_migrations(&conn)?;
let after_version = get_schema_version(&conn);
if robot_mode {
let output = MigrateOutput {
ok: true,
data: MigrateData {
before_version,
after_version,
migrated: after_version > before_version,
},
meta: RobotMeta {
elapsed_ms: start.elapsed().as_millis() as u64,
},
};
println!("{}", serde_json::to_string(&output)?);
} else if after_version > before_version {
println!(
"{}",
style(format!(
"Migrations applied: {} -> {}",
before_version, after_version
))
.green()
);
} else {
println!("{}", style("Database is already up to date.").green());
}
Ok(())
}
async fn handle_stats(
config_override: Option<&str>,
args: StatsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let dry_run = args.dry_run && !args.no_dry_run;
let config = Config::load(config_override)?;
let check = (args.check && !args.no_check) || args.repair;
let result = run_stats(&config, check, args.repair, dry_run)?;
if robot_mode {
print_stats_json(&result, start.elapsed().as_millis() as u64);
} else {
print_stats(&result);
}
Ok(())
}
fn handle_timeline(
config_override: Option<&str>,
args: TimelineArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let config = Config::load(config_override)?;
let params = TimelineParams {
query: args.query,
project: config
.effective_project(args.project.as_deref())
.map(String::from),
since: args.since,
depth: args.depth,
expand_mentions: args.expand_mentions,
limit: args.limit,
max_seeds: args.max_seeds,
max_entities: args.max_entities,
max_evidence: args.max_evidence,
};
let result = run_timeline(&config, &params)?;
if robot_mode {
print_timeline_json_with_meta(
&result,
result.total_events_before_limit,
params.depth,
params.expand_mentions,
args.fields.as_deref(),
);
} else {
print_timeline(&result);
}
Ok(())
}
async fn handle_search(
config_override: Option<&str>,
args: SearchArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let config = Config::load(config_override)?;
let explain = args.explain && !args.no_explain;
let fts_mode = match args.fts_mode.as_str() {
"raw" => lore::search::FtsQueryMode::Raw,
_ => lore::search::FtsQueryMode::Safe,
};
let cli_filters = SearchCliFilters {
source_type: args.source_type,
author: args.author,
project: config
.effective_project(args.project.as_deref())
.map(String::from),
labels: args.label,
path: args.path,
since: args.since,
updated_since: args.updated_since,
limit: args.limit,
};
let start = std::time::Instant::now();
let response = run_search(
&config,
&args.query,
cli_filters,
fts_mode,
&args.mode,
explain,
)
.await?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_search_results_json(&response, elapsed_ms, args.fields.as_deref());
} else {
print_search_results(&response);
}
Ok(())
}
async fn handle_generate_docs(
config_override: Option<&str>,
args: GenerateDocsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let result = run_generate_docs(&config, args.full, project, None)?;
if robot_mode {
print_generate_docs_json(&result, start.elapsed().as_millis() as u64);
} else {
print_generate_docs(&result);
}
Ok(())
}
async fn handle_embed(
config_override: Option<&str>,
args: EmbedArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let full = args.full && !args.no_full;
let retry_failed = args.retry_failed && !args.no_retry_failed;
let signal = ShutdownSignal::new();
let signal_for_handler = signal.clone();
tokio::spawn(async move {
let _ = tokio::signal::ctrl_c().await;
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
signal_for_handler.cancel();
let _ = tokio::signal::ctrl_c().await;
std::process::exit(130);
});
let result = run_embed(&config, full, retry_failed, None, &signal).await?;
if robot_mode {
print_embed_json(&result, start.elapsed().as_millis() as u64);
} else {
print_embed(&result);
}
Ok(())
}
async fn handle_sync_cmd(
config_override: Option<&str>,
args: SyncArgs,
robot_mode: bool,
metrics: &MetricsLayer,
) -> Result<(), Box<dyn std::error::Error>> {
let dry_run = args.dry_run && !args.no_dry_run;
let mut config = Config::load(config_override)?;
if args.no_events {
config.sync.fetch_resource_events = false;
}
if args.no_file_changes {
config.sync.fetch_mr_file_changes = false;
}
let options = SyncOptions {
full: args.full && !args.no_full,
force: args.force && !args.no_force,
no_embed: args.no_embed,
no_docs: args.no_docs,
no_events: args.no_events,
robot_mode,
dry_run,
};
// For dry_run, skip recording and just show the preview
if dry_run {
let signal = ShutdownSignal::new();
run_sync(&config, options, None, &signal).await?;
return Ok(());
}
let db_path = get_db_path(config.storage.db_path.as_deref());
let recorder_conn = create_connection(&db_path)?;
let run_id = uuid::Uuid::new_v4().simple().to_string();
let run_id_short = &run_id[..8];
let recorder = SyncRunRecorder::start(&recorder_conn, "sync", run_id_short)?;
let signal = ShutdownSignal::new();
let signal_for_handler = signal.clone();
tokio::spawn(async move {
let _ = tokio::signal::ctrl_c().await;
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
signal_for_handler.cancel();
let _ = tokio::signal::ctrl_c().await;
std::process::exit(130);
});
let start = std::time::Instant::now();
match run_sync(&config, options, Some(run_id_short), &signal).await {
Ok(result) if signal.is_cancelled() => {
let elapsed = start.elapsed();
let stages = metrics.extract_timings();
let released = release_all_locked_jobs(&recorder_conn).unwrap_or(0);
let _ = recorder.fail(
&recorder_conn,
"Interrupted by user (Ctrl+C)",
Some(&stages),
);
if robot_mode {
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
} else {
eprintln!();
eprintln!(
"{}",
console::style("Interrupted by Ctrl+C. Partial results:").yellow()
);
print_sync(&result, elapsed, Some(metrics));
if released > 0 {
eprintln!(
"{}",
console::style(format!("Released {released} locked jobs")).dim()
);
}
}
Ok(())
}
Ok(result) => {
let elapsed = start.elapsed();
let stages = metrics.extract_timings();
let total_items = result.issues_updated
+ result.mrs_updated
+ result.documents_regenerated
+ result.documents_embedded;
let total_errors = result.resource_events_failed;
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
if robot_mode {
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
} else {
print_sync(&result, elapsed, Some(metrics));
}
Ok(())
}
Err(e) => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
Err(e.into())
}
}
}
#[derive(Serialize)]
struct HealthOutput {
ok: bool,
data: HealthData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct HealthData {
healthy: bool,
config_found: bool,
db_found: bool,
schema_current: bool,
schema_version: i32,
#[serde(skip_serializing_if = "Vec::is_empty")]
actions: Vec<String>,
}
async fn handle_health(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config_path = get_config_path(config_override);
let config_found = config_path.exists();
let (db_found, schema_version, schema_current) = if config_found {
match Config::load(config_override) {
Ok(config) => {
let db_path = get_db_path(config.storage.db_path.as_deref());
if db_path.exists() {
match create_connection(&db_path) {
Ok(conn) => {
let version = get_schema_version(&conn);
(true, version, version >= LATEST_SCHEMA_VERSION)
}
Err(_) => (true, 0, false),
}
} else {
(false, 0, false)
}
}
Err(_) => (false, 0, false),
}
} else {
(false, 0, false)
};
let healthy = config_found && db_found && schema_current;
let mut actions = Vec::new();
if !config_found {
actions.push("lore init".to_string());
}
if !db_found && config_found {
actions.push("lore sync".to_string());
}
if db_found && !schema_current {
actions.push("lore migrate".to_string());
}
if robot_mode {
let output = HealthOutput {
ok: true,
data: HealthData {
healthy,
config_found,
db_found,
schema_current,
schema_version,
actions,
},
meta: RobotMeta {
elapsed_ms: start.elapsed().as_millis() as u64,
},
};
println!("{}", serde_json::to_string(&output)?);
} else {
let status = |ok: bool| {
if ok {
style("pass").green()
} else {
style("FAIL").red()
}
};
println!(
"Config: {} ({})",
status(config_found),
config_path.display()
);
println!("DB: {}", status(db_found));
println!("Schema: {} (v{})", status(schema_current), schema_version);
println!();
if healthy {
println!("{}", style("Healthy").green().bold());
} else {
println!(
"{}",
style("Unhealthy - run 'lore doctor' for details")
.red()
.bold()
);
}
}
if !healthy {
std::process::exit(19);
}
Ok(())
}
#[derive(Serialize)]
struct RobotDocsOutput {
ok: bool,
data: RobotDocsData,
}
#[derive(Serialize)]
struct RobotDocsData {
name: String,
version: String,
description: String,
activation: RobotDocsActivation,
quick_start: serde_json::Value,
commands: serde_json::Value,
/// Deprecated command aliases (old -> new)
aliases: serde_json::Value,
exit_codes: serde_json::Value,
/// Error codes emitted by clap parse failures
clap_error_codes: serde_json::Value,
error_format: String,
workflows: serde_json::Value,
config_notes: serde_json::Value,
}
#[derive(Serialize)]
struct RobotDocsActivation {
flags: Vec<String>,
env: String,
auto: String,
}
fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::error::Error>> {
let version = env!("CARGO_PKG_VERSION").to_string();
let commands = serde_json::json!({
"init": {
"description": "Initialize configuration and database",
"flags": ["--force", "--non-interactive", "--gitlab-url <URL>", "--token-env-var <VAR>", "--projects <paths>", "--default-project <path>"],
"robot_flags": ["--gitlab-url", "--token-env-var", "--projects", "--default-project"],
"example": "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project,other/repo --default-project group/project",
"response_schema": {
"ok": "bool",
"data": {"config_path": "string", "data_dir": "string", "user": {"username": "string", "name": "string"}, "projects": "[{path:string, name:string}]", "default_project": "string?"},
"meta": {"elapsed_ms": "int"}
}
},
"health": {
"description": "Quick pre-flight check: config, database, schema version",
"flags": [],
"example": "lore --robot health",
"response_schema": {
"ok": "bool",
"data": {"healthy": "bool", "config_found": "bool", "db_found": "bool", "schema_current": "bool", "schema_version": "int"},
"meta": {"elapsed_ms": "int"}
}
},
"auth": {
"description": "Verify GitLab authentication",
"flags": [],
"example": "lore --robot auth",
"response_schema": {
"ok": "bool",
"data": {"authenticated": "bool", "username": "string", "name": "string", "gitlab_url": "string"},
"meta": {"elapsed_ms": "int"}
}
},
"doctor": {
"description": "Full environment health check (config, auth, DB, Ollama)",
"flags": [],
"example": "lore --robot doctor",
"response_schema": {
"ok": "bool",
"data": {"success": "bool", "checks": "{config:object, auth:object, database:object, ollama:object}"},
"meta": {"elapsed_ms": "int"}
}
},
"ingest": {
"description": "Sync data from GitLab",
"flags": ["--project <path>", "--force", "--no-force", "--full", "--no-full", "--dry-run", "--no-dry-run", "<entity: issues|mrs>"],
"example": "lore --robot ingest issues --project group/repo",
"response_schema": {
"ok": "bool",
"data": {"resource_type": "string", "projects_synced": "int", "issues_fetched?": "int", "mrs_fetched?": "int", "upserted": "int", "labels_created": "int", "discussions_fetched": "int", "notes_upserted": "int"},
"meta": {"elapsed_ms": "int"}
}
},
"sync": {
"description": "Full sync pipeline: ingest -> generate-docs -> embed",
"flags": ["--full", "--no-full", "--force", "--no-force", "--no-embed", "--no-docs", "--no-events", "--dry-run", "--no-dry-run"],
"example": "lore --robot sync",
"response_schema": {
"ok": "bool",
"data": {"issues_updated": "int", "mrs_updated": "int", "documents_regenerated": "int", "documents_embedded": "int", "resource_events_synced": "int", "resource_events_failed": "int"},
"meta": {"elapsed_ms": "int", "stages?": "[{name:string, elapsed_ms:int, items_processed:int}]"}
}
},
"issues": {
"description": "List or show issues",
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
"example": "lore --robot issues --state opened --limit 10",
"notes": {
"status_filter": "--status filters by work item status NAME (case-insensitive). Valid values are in meta.available_statuses of any issues list response.",
"status_name": "status_name is the board column label (e.g. 'In review', 'Blocked'). This is the canonical status identifier for filtering."
},
"response_schema": {
"list": {
"ok": "bool",
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
},
"show": {
"ok": "bool",
"data": "IssueDetail (full entity with description, discussions, notes, events)",
"meta": {"elapsed_ms": "int"}
}
},
"example_output": {"list": {"ok":true,"data":{"issues":[{"iid":3864,"title":"Switch Health Card","state":"opened","status_name":"In progress","labels":["customer:BNSF"],"assignees":["teernisse"],"discussion_count":12,"updated_at_iso":"2026-02-12T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":42}}},
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
},
"mrs": {
"description": "List or show merge requests",
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
"example": "lore --robot mrs --state opened",
"response_schema": {
"list": {
"ok": "bool",
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
"meta": {"elapsed_ms": "int"}
},
"show": {
"ok": "bool",
"data": "MrDetail (full entity with description, discussions, notes, events)",
"meta": {"elapsed_ms": "int"}
}
},
"example_output": {"list": {"ok":true,"data":{"mrs":[{"iid":200,"title":"Add throw time chart","state":"opened","draft":false,"author_username":"teernisse","target_branch":"main","source_branch":"feat/throw-time","reviewers":["cseiber"],"discussion_count":5,"updated_at_iso":"2026-02-11T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":38}}},
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
},
"search": {
"description": "Search indexed documents (lexical, hybrid, semantic)",
"flags": ["<QUERY>", "--mode", "--type", "--author", "-p/--project", "--label", "--path", "--since", "--updated-since", "-n/--limit", "--fields <list>", "--explain", "--no-explain", "--fts-mode"],
"example": "lore --robot search 'authentication bug' --mode hybrid --limit 10",
"response_schema": {
"ok": "bool",
"data": {"results": "[{document_id:int, source_type:string, title:string, snippet:string, score:float, url:string?, author:string?, created_at:string?, updated_at:string?, project_path:string, labels:[string], paths:[string]}]", "total_results": "int", "query": "string", "mode": "string", "warnings": "[string]"},
"meta": {"elapsed_ms": "int"}
},
"example_output": {"ok":true,"data":{"query":"throw time","mode":"hybrid","total_results":3,"results":[{"document_id":42,"source_type":"issue","title":"Switch Health Card","score":0.92,"snippet":"...throw time data from BNSF...","project_path":"vs/typescript-code"}],"warnings":[]},"meta":{"elapsed_ms":85}},
"fields_presets": {"minimal": ["document_id", "title", "source_type", "score"]}
},
"count": {
"description": "Count entities in local database",
"flags": ["<entity: issues|mrs|discussions|notes|events>", "-f/--for <issue|mr>"],
"example": "lore --robot count issues",
"response_schema": {
"ok": "bool",
"data": {"entity": "string", "count": "int", "system_excluded?": "int", "breakdown?": {"opened": "int", "closed": "int", "merged?": "int", "locked?": "int"}},
"meta": {"elapsed_ms": "int"}
}
},
"stats": {
"description": "Show document and index statistics",
"flags": ["--check", "--no-check", "--repair", "--dry-run", "--no-dry-run"],
"example": "lore --robot stats",
"response_schema": {
"ok": "bool",
"data": {"total_documents": "int", "indexed_documents": "int", "embedded_documents": "int", "stale_documents": "int", "integrity?": "object"},
"meta": {"elapsed_ms": "int"}
}
},
"status": {
"description": "Show sync state (cursors, last sync times)",
"flags": [],
"example": "lore --robot status",
"response_schema": {
"ok": "bool",
"data": {"projects": "[{path:string, issues_cursor:string?, mrs_cursor:string?, last_sync:string?}]"},
"meta": {"elapsed_ms": "int"}
}
},
"generate-docs": {
"description": "Generate searchable documents from ingested data",
"flags": ["--full", "-p/--project <path>"],
"example": "lore --robot generate-docs --full",
"response_schema": {
"ok": "bool",
"data": {"generated": "int", "updated": "int", "unchanged": "int", "deleted": "int"},
"meta": {"elapsed_ms": "int"}
}
},
"embed": {
"description": "Generate vector embeddings for documents via Ollama",
"flags": ["--full", "--no-full", "--retry-failed", "--no-retry-failed"],
"example": "lore --robot embed",
"response_schema": {
"ok": "bool",
"data": {"embedded": "int", "skipped": "int", "failed": "int", "total_chunks": "int"},
"meta": {"elapsed_ms": "int"}
}
},
"migrate": {
"description": "Run pending database migrations",
"flags": [],
"example": "lore --robot migrate",
"response_schema": {
"ok": "bool",
"data": {"before_version": "int", "after_version": "int", "migrated": "bool"},
"meta": {"elapsed_ms": "int"}
}
},
"version": {
"description": "Show version information",
"flags": [],
"example": "lore --robot version",
"response_schema": {
"ok": "bool",
"data": {"version": "string", "git_hash?": "string"},
"meta": {"elapsed_ms": "int"}
}
},
"completions": {
"description": "Generate shell completions",
"flags": ["<shell: bash|zsh|fish|powershell>"],
"example": "lore completions bash > ~/.local/share/bash-completion/completions/lore"
},
"timeline": {
"description": "Chronological timeline of events matching a keyword query",
"flags": ["<QUERY>", "-p/--project", "--since <duration>", "--depth <n>", "--expand-mentions", "-n/--limit", "--fields <list>", "--max-seeds", "--max-entities", "--max-evidence"],
"example": "lore --robot timeline '<keyword>' --since 30d",
"response_schema": {
"ok": "bool",
"data": {"entities": "[{type:string, iid:int, title:string, project_path:string}]", "events": "[{timestamp:string, type:string, entity_type:string, entity_iid:int, detail:string}]", "total_events": "int"},
"meta": {"elapsed_ms": "int"}
},
"fields_presets": {"minimal": ["timestamp", "type", "entity_iid", "detail"]}
},
"who": {
"description": "People intelligence: experts, workload, active discussions, overlap, review patterns",
"flags": ["<target>", "--path <path>", "--active", "--overlap <path>", "--reviews", "--since <duration>", "-p/--project", "-n/--limit", "--fields <list>"],
"modes": {
"expert": "lore who <file-path> -- Who knows about this area? (also: --path for root files)",
"workload": "lore who <username> -- What is someone working on?",
"reviews": "lore who <username> --reviews -- Review pattern analysis",
"active": "lore who --active -- Active unresolved discussions",
"overlap": "lore who --overlap <path> -- Who else is touching these files?"
},
"example": "lore --robot who src/features/auth/",
"response_schema": {
"ok": "bool",
"data": {
"mode": "string",
"input": {"target": "string|null", "path": "string|null", "project": "string|null", "since": "string|null", "limit": "int"},
"resolved_input": {"mode": "string", "project_id": "int|null", "project_path": "string|null", "since_ms": "int", "since_iso": "string", "since_mode": "string (default|explicit|none)", "limit": "int"},
"...": "mode-specific fields"
},
"meta": {"elapsed_ms": "int"}
},
"example_output": {"expert": {"ok":true,"data":{"mode":"expert","result":{"experts":[{"username":"teernisse","score":42,"note_count":15,"diff_note_count":8}]}},"meta":{"elapsed_ms":65}}},
"fields_presets": {
"expert_minimal": ["username", "score"],
"workload_minimal": ["entity_type", "iid", "title", "state"],
"active_minimal": ["entity_type", "iid", "title", "participants"]
}
},
"robot-docs": {
"description": "This command (agent self-discovery manifest)",
"flags": ["--brief"],
"example": "lore robot-docs --brief"
}
});
let quick_start = serde_json::json!({
"glab_equivalents": [
{ "glab": "glab issue list", "lore": "lore -J issues -n 50", "note": "Richer: includes labels, status, closing MRs, discussion counts" },
{ "glab": "glab issue view 123", "lore": "lore -J issues 123", "note": "Includes full discussions, work-item status, cross-references" },
{ "glab": "glab issue list -l bug", "lore": "lore -J issues --label bug", "note": "AND logic for multiple --label flags" },
{ "glab": "glab mr list", "lore": "lore -J mrs", "note": "Includes draft status, reviewers, discussion counts" },
{ "glab": "glab mr view 456", "lore": "lore -J mrs 456", "note": "Includes discussions, review threads, source/target branches" },
{ "glab": "glab mr list -s opened", "lore": "lore -J mrs -s opened", "note": "States: opened, merged, closed, locked, all" },
{ "glab": "glab api '/projects/:id/issues'", "lore": "lore -J issues -p project", "note": "Fuzzy project matching (suffix or substring)" }
],
"lore_exclusive": [
"search: FTS5 + vector hybrid search across all entities",
"who: Expert/workload/reviews analysis per file path or person",
"timeline: Chronological event reconstruction across entities",
"stats: Database statistics with document/note/discussion counts",
"count: Entity counts with state breakdowns",
"embed: Generate vector embeddings for semantic search via Ollama"
],
"read_write_split": "lore = ALL reads (issues, MRs, search, who, timeline, intelligence). glab = ALL writes (create, update, approve, merge, CI/CD)."
});
// --brief: strip response_schema and example_output from every command (~60% smaller)
let mut commands = commands;
if brief {
strip_schemas(&mut commands);
}
let exit_codes = serde_json::json!({
"0": "Success",
"1": "Internal error",
"2": "Usage error (invalid flags or arguments)",
"3": "Config invalid",
"4": "Token not set",
"5": "GitLab auth failed",
"6": "Resource not found",
"7": "Rate limited",
"8": "Network error",
"9": "Database locked",
"10": "Database error",
"11": "Migration failed",
"12": "I/O error",
"13": "Transform error",
"14": "Ollama unavailable",
"15": "Ollama model not found",
"16": "Embedding failed",
"17": "Not found",
"18": "Ambiguous match",
"19": "Health check failed",
"20": "Config not found"
});
let workflows = serde_json::json!({
"first_setup": [
"lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project",
"lore --robot doctor",
"lore --robot sync"
],
"daily_sync": [
"lore --robot sync"
],
"search": [
"lore --robot search 'query' --mode hybrid"
],
"pre_flight": [
"lore --robot health"
],
"temporal_intelligence": [
"lore --robot sync",
"lore --robot timeline '<keyword>' --since 30d",
"lore --robot timeline '<keyword>' --depth 2 --expand-mentions"
],
"people_intelligence": [
"lore --robot who src/path/to/feature/",
"lore --robot who @username",
"lore --robot who @username --reviews",
"lore --robot who --active --since 7d",
"lore --robot who --overlap src/path/",
"lore --robot who --path README.md"
]
});
// Phase 3: Deprecated command aliases
let aliases = serde_json::json!({
"list issues": "issues",
"list mrs": "mrs",
"show issue <IID>": "issues <IID>",
"show mr <IID>": "mrs <IID>",
"auth-test": "auth",
"sync-status": "status"
});
// Phase 3: Clap error codes (emitted by handle_clap_error)
let clap_error_codes = serde_json::json!({
"UNKNOWN_COMMAND": "Unrecognized subcommand (includes fuzzy suggestion)",
"UNKNOWN_FLAG": "Unrecognized command-line flag",
"MISSING_REQUIRED": "Required argument not provided",
"INVALID_VALUE": "Invalid value for argument",
"TOO_MANY_VALUES": "Too many values provided",
"TOO_FEW_VALUES": "Too few values provided",
"ARGUMENT_CONFLICT": "Conflicting arguments",
"MISSING_COMMAND": "No subcommand provided (in non-robot mode, shows help)",
"HELP_REQUESTED": "Help or version flag used",
"PARSE_ERROR": "General parse error"
});
let config_notes = serde_json::json!({
"defaultProject": {
"type": "string?",
"description": "Fallback project path used when -p/--project is omitted. Must match a configured project path (exact or suffix). CLI -p always overrides.",
"example": "group/project"
}
});
let output = RobotDocsOutput {
ok: true,
data: RobotDocsData {
name: "lore".to_string(),
version,
description: "Local GitLab data management with semantic search".to_string(),
activation: RobotDocsActivation {
flags: vec!["--robot".to_string(), "-J".to_string(), "--json".to_string()],
env: "LORE_ROBOT=1".to_string(),
auto: "Non-TTY stdout".to_string(),
},
quick_start,
commands,
aliases,
exit_codes,
clap_error_codes,
error_format: "stderr JSON: {\"error\":{\"code\":\"...\",\"message\":\"...\",\"suggestion\":\"...\",\"actions\":[\"...\"]}}".to_string(),
workflows,
config_notes,
},
};
if robot_mode {
println!("{}", serde_json::to_string(&output)?);
} else {
println!("{}", serde_json::to_string_pretty(&output)?);
}
Ok(())
}
fn handle_who(
config_override: Option<&str>,
mut args: WhoArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
if args.project.is_none() {
args.project = config.default_project.clone();
}
let run = run_who(&config, &args)?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_who_json(&run, &args, elapsed_ms);
} else {
print_who_human(&run.result, run.resolved_input.project_path.as_deref());
}
Ok(())
}
async fn handle_drift(
config_override: Option<&str>,
entity_type: &str,
iid: i64,
threshold: f32,
project: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let effective_project = config.effective_project(project);
let response = run_drift(&config, entity_type, iid, threshold, effective_project).await?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_drift_json(&response, elapsed_ms);
} else {
print_drift_human(&response);
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn handle_list_compat(
config_override: Option<&str>,
entity: &str,
limit: usize,
project_filter: Option<&str>,
state_filter: Option<&str>,
author_filter: Option<&str>,
assignee_filter: Option<&str>,
label_filter: Option<&[String]>,
milestone_filter: Option<&str>,
since_filter: Option<&str>,
due_before_filter: Option<&str>,
has_due_date: bool,
sort: &str,
order: &str,
open_browser: bool,
json_output: bool,
draft: bool,
no_draft: bool,
reviewer_filter: Option<&str>,
target_branch_filter: Option<&str>,
source_branch_filter: Option<&str>,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project_filter = config.effective_project(project_filter);
let state_normalized = state_filter.map(str::to_lowercase);
match entity {
"issues" => {
let filters = ListFilters {
limit,
project: project_filter,
state: state_normalized.as_deref(),
author: author_filter,
assignee: assignee_filter,
labels: label_filter,
milestone: milestone_filter,
since: since_filter,
due_before: due_before_filter,
has_due_date,
statuses: &[],
sort,
order,
};
let result = run_list_issues(&config, filters)?;
if open_browser {
open_issue_in_browser(&result);
} else if json_output {
print_list_issues_json(&result, start.elapsed().as_millis() as u64, None);
} else {
print_list_issues(&result);
}
Ok(())
}
"mrs" => {
let filters = MrListFilters {
limit,
project: project_filter,
state: state_normalized.as_deref(),
author: author_filter,
assignee: assignee_filter,
reviewer: reviewer_filter,
labels: label_filter,
since: since_filter,
draft,
no_draft,
target_branch: target_branch_filter,
source_branch: source_branch_filter,
sort,
order,
};
let result = run_list_mrs(&config, filters)?;
if open_browser {
open_mr_in_browser(&result);
} else if json_output {
print_list_mrs_json(&result, start.elapsed().as_millis() as u64, None);
} else {
print_list_mrs(&result);
}
Ok(())
}
_ => {
eprintln!("{}", style(format!("Unknown entity: {entity}")).red());
std::process::exit(1);
}
}
}
async fn handle_show_compat(
config_override: Option<&str>,
entity: &str,
iid: i64,
project_filter: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project_filter = config.effective_project(project_filter);
match entity {
"issue" => {
let result = run_show_issue(&config, iid, project_filter)?;
if robot_mode {
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_issue(&result);
}
Ok(())
}
"mr" => {
let result = run_show_mr(&config, iid, project_filter)?;
if robot_mode {
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_mr(&result);
}
Ok(())
}
_ => {
eprintln!("{}", style(format!("Unknown entity: {entity}")).red());
std::process::exit(1);
}
}
}