tracing-indicatif pulled in vt100, arrayvec, and its own indicatif integration layer. Replace it with a minimal SuspendingWriter that coordinates tracing output with progress bars via a global LazyLock MultiProgress. - Add src/cli/progress.rs: shared MultiProgress singleton via LazyLock and a SuspendingWriter that suspends bars before writing log lines, preventing interleaving/flicker - Wire all progress bar creation through multi().add() in sync and ingest commands - Replace IndicatifLayer in main.rs with SuspendingWriter for tracing-subscriber's fmt layer - Remove tracing-indicatif from Cargo.toml (drops vt100 and arrayvec transitive deps) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1590 lines
48 KiB
Rust
1590 lines
48 KiB
Rust
//! Gitlore CLI entry point.
|
|
|
|
use clap::Parser;
|
|
use console::style;
|
|
use dialoguer::{Confirm, Input};
|
|
use serde::Serialize;
|
|
use tracing_subscriber::EnvFilter;
|
|
use tracing_subscriber::layer::SubscriberExt;
|
|
use tracing_subscriber::util::SubscriberInitExt;
|
|
|
|
use lore::Config;
|
|
use lore::cli::commands::{
|
|
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
|
SearchCliFilters, SyncOptions, open_issue_in_browser, open_mr_in_browser, print_count,
|
|
print_count_json, print_doctor_results, print_embed, print_embed_json, print_event_count,
|
|
print_event_count_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
|
|
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
|
|
print_list_mrs_json, print_search_results, print_search_results_json, print_show_issue,
|
|
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
|
|
print_sync, print_sync_json, print_sync_status, print_sync_status_json, run_auth_test,
|
|
run_count, run_count_events, run_doctor, run_embed, run_generate_docs, run_ingest, run_init,
|
|
run_list_issues, run_list_mrs, run_search, run_show_issue, run_show_mr, run_stats, run_sync,
|
|
run_sync_status,
|
|
};
|
|
use lore::cli::{
|
|
Cli, Commands, CountArgs, EmbedArgs, GenerateDocsArgs, IngestArgs, IssuesArgs, MrsArgs,
|
|
SearchArgs, StatsArgs, SyncArgs,
|
|
};
|
|
use lore::core::db::{
|
|
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
|
};
|
|
use lore::core::error::{LoreError, RobotErrorOutput};
|
|
use lore::core::paths::get_config_path;
|
|
use lore::core::paths::get_db_path;
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
// Reset SIGPIPE to default behavior so piping (e.g. `lore issues | head`) doesn't panic
|
|
#[cfg(unix)]
|
|
unsafe {
|
|
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
|
|
}
|
|
|
|
// Initialize logging with progress-bar-aware writer.
|
|
// SuspendingWriter suspends the shared MultiProgress before each log line,
|
|
// preventing log output from interleaving with progress bar animations.
|
|
tracing_subscriber::registry()
|
|
.with(
|
|
tracing_subscriber::fmt::layer()
|
|
.with_target(false)
|
|
.with_writer(lore::cli::progress::SuspendingWriter),
|
|
)
|
|
.with(
|
|
EnvFilter::from_default_env()
|
|
.add_directive("lore=info".parse().unwrap())
|
|
.add_directive("warn".parse().unwrap()),
|
|
)
|
|
.init();
|
|
|
|
let cli = Cli::parse();
|
|
let robot_mode = cli.is_robot_mode();
|
|
|
|
// Apply color settings (console crate handles NO_COLOR/CLICOLOR natively in "auto" mode)
|
|
match cli.color.as_str() {
|
|
"never" => console::set_colors_enabled(false),
|
|
"always" => console::set_colors_enabled(true),
|
|
"auto" => {} // console crate handles this natively
|
|
_ => unreachable!(),
|
|
}
|
|
|
|
let quiet = cli.quiet;
|
|
|
|
let result = match cli.command {
|
|
Commands::Issues(args) => handle_issues(cli.config.as_deref(), args, robot_mode),
|
|
Commands::Mrs(args) => handle_mrs(cli.config.as_deref(), args, robot_mode),
|
|
Commands::Search(args) => handle_search(cli.config.as_deref(), args, robot_mode).await,
|
|
Commands::Stats(args) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
|
|
Commands::Embed(args) => handle_embed(cli.config.as_deref(), args, robot_mode).await,
|
|
Commands::Sync(args) => handle_sync_cmd(cli.config.as_deref(), args, robot_mode).await,
|
|
Commands::Ingest(args) => {
|
|
handle_ingest(cli.config.as_deref(), args, robot_mode, quiet).await
|
|
}
|
|
Commands::Count(args) => handle_count(cli.config.as_deref(), args, robot_mode).await,
|
|
Commands::Status => handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await,
|
|
Commands::Auth => handle_auth_test(cli.config.as_deref(), robot_mode).await,
|
|
Commands::Doctor => handle_doctor(cli.config.as_deref(), robot_mode).await,
|
|
Commands::Version => handle_version(robot_mode),
|
|
Commands::Completions { shell } => handle_completions(&shell),
|
|
Commands::Init {
|
|
force,
|
|
non_interactive,
|
|
gitlab_url,
|
|
token_env_var,
|
|
projects,
|
|
} => {
|
|
handle_init(
|
|
cli.config.as_deref(),
|
|
force,
|
|
non_interactive,
|
|
robot_mode,
|
|
gitlab_url,
|
|
token_env_var,
|
|
projects,
|
|
)
|
|
.await
|
|
}
|
|
Commands::GenerateDocs(args) => {
|
|
handle_generate_docs(cli.config.as_deref(), args, robot_mode).await
|
|
}
|
|
Commands::Backup => handle_backup(robot_mode),
|
|
Commands::Reset { yes: _ } => handle_reset(robot_mode),
|
|
Commands::Migrate => handle_migrate(cli.config.as_deref(), robot_mode).await,
|
|
Commands::Health => handle_health(cli.config.as_deref(), robot_mode).await,
|
|
Commands::RobotDocs => handle_robot_docs(robot_mode),
|
|
|
|
// --- Backward-compat: deprecated aliases ---
|
|
Commands::List {
|
|
entity,
|
|
limit,
|
|
project,
|
|
state,
|
|
author,
|
|
assignee,
|
|
label,
|
|
milestone,
|
|
since,
|
|
due_before,
|
|
has_due_date,
|
|
sort,
|
|
order,
|
|
open,
|
|
draft,
|
|
no_draft,
|
|
reviewer,
|
|
target_branch,
|
|
source_branch,
|
|
} => {
|
|
if !robot_mode {
|
|
eprintln!(
|
|
"{}",
|
|
style("warning: 'lore list' is deprecated, use 'lore issues' or 'lore mrs'")
|
|
.yellow()
|
|
);
|
|
}
|
|
handle_list_compat(
|
|
cli.config.as_deref(),
|
|
&entity,
|
|
limit,
|
|
project.as_deref(),
|
|
state.as_deref(),
|
|
author.as_deref(),
|
|
assignee.as_deref(),
|
|
label.as_deref(),
|
|
milestone.as_deref(),
|
|
since.as_deref(),
|
|
due_before.as_deref(),
|
|
has_due_date,
|
|
&sort,
|
|
&order,
|
|
open,
|
|
robot_mode,
|
|
draft,
|
|
no_draft,
|
|
reviewer.as_deref(),
|
|
target_branch.as_deref(),
|
|
source_branch.as_deref(),
|
|
)
|
|
.await
|
|
}
|
|
Commands::Show {
|
|
entity,
|
|
iid,
|
|
project,
|
|
} => {
|
|
if !robot_mode {
|
|
eprintln!(
|
|
"{}",
|
|
style(format!(
|
|
"warning: 'lore show' is deprecated, use 'lore {}s {}'",
|
|
entity, iid
|
|
))
|
|
.yellow()
|
|
);
|
|
}
|
|
handle_show_compat(
|
|
cli.config.as_deref(),
|
|
&entity,
|
|
iid,
|
|
project.as_deref(),
|
|
robot_mode,
|
|
)
|
|
.await
|
|
}
|
|
Commands::AuthTest => {
|
|
if !robot_mode {
|
|
eprintln!(
|
|
"{}",
|
|
style("warning: 'lore auth-test' is deprecated, use 'lore auth'").yellow()
|
|
);
|
|
}
|
|
handle_auth_test(cli.config.as_deref(), robot_mode).await
|
|
}
|
|
Commands::SyncStatus => {
|
|
if !robot_mode {
|
|
eprintln!(
|
|
"{}",
|
|
style("warning: 'lore sync-status' is deprecated, use 'lore status'").yellow()
|
|
);
|
|
}
|
|
handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await
|
|
}
|
|
};
|
|
|
|
if let Err(e) = result {
|
|
handle_error(e, robot_mode);
|
|
}
|
|
}
|
|
|
|
/// Fallback error output for non-LoreError errors in robot mode.
|
|
#[derive(Serialize)]
|
|
struct FallbackErrorOutput {
|
|
error: FallbackError,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct FallbackError {
|
|
code: String,
|
|
message: String,
|
|
}
|
|
|
|
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
|
// Try to downcast to LoreError for structured output
|
|
if let Some(gi_error) = e.downcast_ref::<LoreError>() {
|
|
if robot_mode {
|
|
let output = RobotErrorOutput::from(gi_error);
|
|
// Use serde_json for safe serialization; fallback constructs JSON safely
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
// Fallback uses serde to ensure proper escaping
|
|
let fallback = FallbackErrorOutput {
|
|
error: FallbackError {
|
|
code: "INTERNAL_ERROR".to_string(),
|
|
message: gi_error.to_string(),
|
|
},
|
|
};
|
|
serde_json::to_string(&fallback)
|
|
.unwrap_or_else(|_| r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#.to_string())
|
|
})
|
|
);
|
|
std::process::exit(gi_error.exit_code());
|
|
} else {
|
|
eprintln!("{} {}", style("Error:").red(), gi_error);
|
|
if let Some(suggestion) = gi_error.suggestion() {
|
|
eprintln!("{} {}", style("Hint:").yellow(), suggestion);
|
|
}
|
|
std::process::exit(gi_error.exit_code());
|
|
}
|
|
}
|
|
|
|
// Fallback for non-LoreError errors - use serde for proper JSON escaping
|
|
if robot_mode {
|
|
let output = FallbackErrorOutput {
|
|
error: FallbackError {
|
|
code: "INTERNAL_ERROR".to_string(),
|
|
message: e.to_string(),
|
|
},
|
|
};
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#
|
|
.to_string()
|
|
})
|
|
);
|
|
} else {
|
|
eprintln!("{} {}", style("Error:").red(), e);
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
// ============================================================================
|
|
// Primary command handlers
|
|
// ============================================================================
|
|
|
|
fn handle_issues(
|
|
config_override: Option<&str>,
|
|
args: IssuesArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let asc = args.asc && !args.no_asc;
|
|
let has_due = args.has_due && !args.no_has_due;
|
|
let open = args.open && !args.no_open;
|
|
let order = if asc { "asc" } else { "desc" };
|
|
|
|
if let Some(iid) = args.iid {
|
|
// Show mode
|
|
let result = run_show_issue(&config, iid, args.project.as_deref())?;
|
|
if robot_mode {
|
|
print_show_issue_json(&result);
|
|
} else {
|
|
print_show_issue(&result);
|
|
}
|
|
} else {
|
|
// List mode
|
|
let filters = ListFilters {
|
|
limit: args.limit,
|
|
project: args.project.as_deref(),
|
|
state: args.state.as_deref(),
|
|
author: args.author.as_deref(),
|
|
assignee: args.assignee.as_deref(),
|
|
labels: args.label.as_deref(),
|
|
milestone: args.milestone.as_deref(),
|
|
since: args.since.as_deref(),
|
|
due_before: args.due_before.as_deref(),
|
|
has_due_date: has_due,
|
|
sort: &args.sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_issues(&config, filters)?;
|
|
|
|
if open {
|
|
open_issue_in_browser(&result);
|
|
} else if robot_mode {
|
|
print_list_issues_json(&result);
|
|
} else {
|
|
print_list_issues(&result);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_mrs(
|
|
config_override: Option<&str>,
|
|
args: MrsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let asc = args.asc && !args.no_asc;
|
|
let open = args.open && !args.no_open;
|
|
let order = if asc { "asc" } else { "desc" };
|
|
|
|
if let Some(iid) = args.iid {
|
|
// Show mode
|
|
let result = run_show_mr(&config, iid, args.project.as_deref())?;
|
|
if robot_mode {
|
|
print_show_mr_json(&result);
|
|
} else {
|
|
print_show_mr(&result);
|
|
}
|
|
} else {
|
|
// List mode
|
|
let filters = MrListFilters {
|
|
limit: args.limit,
|
|
project: args.project.as_deref(),
|
|
state: args.state.as_deref(),
|
|
author: args.author.as_deref(),
|
|
assignee: args.assignee.as_deref(),
|
|
reviewer: args.reviewer.as_deref(),
|
|
labels: args.label.as_deref(),
|
|
since: args.since.as_deref(),
|
|
draft: args.draft,
|
|
no_draft: args.no_draft,
|
|
target_branch: args.target.as_deref(),
|
|
source_branch: args.source.as_deref(),
|
|
sort: &args.sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_mrs(&config, filters)?;
|
|
|
|
if open {
|
|
open_mr_in_browser(&result);
|
|
} else if robot_mode {
|
|
print_list_mrs_json(&result);
|
|
} else {
|
|
print_list_mrs(&result);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_ingest(
|
|
config_override: Option<&str>,
|
|
args: IngestArgs,
|
|
robot_mode: bool,
|
|
quiet: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let display = if robot_mode || quiet {
|
|
IngestDisplay::silent()
|
|
} else {
|
|
IngestDisplay::interactive()
|
|
};
|
|
|
|
let force = args.force && !args.no_force;
|
|
let full = args.full && !args.no_full;
|
|
|
|
match args.entity.as_deref() {
|
|
Some(resource_type) => {
|
|
// Single entity ingest
|
|
let result = run_ingest(
|
|
&config,
|
|
resource_type,
|
|
args.project.as_deref(),
|
|
force,
|
|
full,
|
|
display,
|
|
)
|
|
.await?;
|
|
|
|
if robot_mode {
|
|
print_ingest_summary_json(&result);
|
|
} else {
|
|
print_ingest_summary(&result);
|
|
}
|
|
}
|
|
None => {
|
|
// Ingest everything: issues then MRs
|
|
if !robot_mode && !quiet {
|
|
println!(
|
|
"{}",
|
|
style("Ingesting all content (issues + merge requests)...").blue()
|
|
);
|
|
println!();
|
|
}
|
|
|
|
let issues_result = run_ingest(
|
|
&config,
|
|
"issues",
|
|
args.project.as_deref(),
|
|
force,
|
|
full,
|
|
display,
|
|
)
|
|
.await?;
|
|
|
|
let mrs_result = run_ingest(
|
|
&config,
|
|
"mrs",
|
|
args.project.as_deref(),
|
|
force,
|
|
full,
|
|
display,
|
|
)
|
|
.await?;
|
|
|
|
if robot_mode {
|
|
print_combined_ingest_json(&issues_result, &mrs_result);
|
|
} else {
|
|
print_ingest_summary(&issues_result);
|
|
print_ingest_summary(&mrs_result);
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// JSON output for combined ingest (issues + mrs).
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestOutput {
|
|
ok: bool,
|
|
data: CombinedIngestData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestData {
|
|
resource_type: String,
|
|
issues: CombinedIngestEntityStats,
|
|
merge_requests: CombinedIngestEntityStats,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestEntityStats {
|
|
projects_synced: usize,
|
|
fetched: usize,
|
|
upserted: usize,
|
|
labels_created: usize,
|
|
discussions_fetched: usize,
|
|
notes_upserted: usize,
|
|
}
|
|
|
|
fn print_combined_ingest_json(
|
|
issues: &lore::cli::commands::ingest::IngestResult,
|
|
mrs: &lore::cli::commands::ingest::IngestResult,
|
|
) {
|
|
let output = CombinedIngestOutput {
|
|
ok: true,
|
|
data: CombinedIngestData {
|
|
resource_type: "all".to_string(),
|
|
issues: CombinedIngestEntityStats {
|
|
projects_synced: issues.projects_synced,
|
|
fetched: issues.issues_fetched,
|
|
upserted: issues.issues_upserted,
|
|
labels_created: issues.labels_created,
|
|
discussions_fetched: issues.discussions_fetched,
|
|
notes_upserted: issues.notes_upserted,
|
|
},
|
|
merge_requests: CombinedIngestEntityStats {
|
|
projects_synced: mrs.projects_synced,
|
|
fetched: mrs.mrs_fetched,
|
|
upserted: mrs.mrs_upserted,
|
|
labels_created: mrs.labels_created,
|
|
discussions_fetched: mrs.discussions_fetched,
|
|
notes_upserted: mrs.notes_upserted,
|
|
},
|
|
},
|
|
};
|
|
|
|
println!("{}", serde_json::to_string(&output).unwrap());
|
|
}
|
|
|
|
async fn handle_count(
|
|
config_override: Option<&str>,
|
|
args: CountArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
if args.entity == "events" {
|
|
let counts = run_count_events(&config)?;
|
|
if robot_mode {
|
|
print_event_count_json(&counts);
|
|
} else {
|
|
print_event_count(&counts);
|
|
}
|
|
return Ok(());
|
|
}
|
|
|
|
let result = run_count(&config, &args.entity, args.for_entity.as_deref())?;
|
|
if robot_mode {
|
|
print_count_json(&result);
|
|
} else {
|
|
print_count(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_sync_status_cmd(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
let result = run_sync_status(&config)?;
|
|
if robot_mode {
|
|
print_sync_status_json(&result);
|
|
} else {
|
|
print_sync_status(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
/// JSON output for init command.
|
|
#[derive(Serialize)]
|
|
struct InitOutput {
|
|
ok: bool,
|
|
data: InitOutputData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputData {
|
|
config_path: String,
|
|
data_dir: String,
|
|
user: InitOutputUser,
|
|
projects: Vec<InitOutputProject>,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputUser {
|
|
username: String,
|
|
name: String,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputProject {
|
|
path: String,
|
|
name: String,
|
|
}
|
|
|
|
fn print_init_json(result: &InitResult) {
|
|
let output = InitOutput {
|
|
ok: true,
|
|
data: InitOutputData {
|
|
config_path: result.config_path.clone(),
|
|
data_dir: result.data_dir.clone(),
|
|
user: InitOutputUser {
|
|
username: result.user.username.clone(),
|
|
name: result.user.name.clone(),
|
|
},
|
|
projects: result
|
|
.projects
|
|
.iter()
|
|
.map(|p| InitOutputProject {
|
|
path: p.path.clone(),
|
|
name: p.name.clone(),
|
|
})
|
|
.collect(),
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output).unwrap());
|
|
}
|
|
|
|
async fn handle_init(
|
|
config_override: Option<&str>,
|
|
force: bool,
|
|
non_interactive: bool,
|
|
robot_mode: bool,
|
|
gitlab_url_flag: Option<String>,
|
|
token_env_var_flag: Option<String>,
|
|
projects_flag: Option<String>,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
// Robot mode: require all inputs via flags, skip interactive prompts
|
|
if robot_mode {
|
|
let missing: Vec<&str> = [
|
|
gitlab_url_flag.is_none().then_some("--gitlab-url"),
|
|
token_env_var_flag.is_none().then_some("--token-env-var"),
|
|
projects_flag.is_none().then_some("--projects"),
|
|
]
|
|
.into_iter()
|
|
.flatten()
|
|
.collect();
|
|
|
|
if !missing.is_empty() {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "MISSING_FLAGS".to_string(),
|
|
message: format!("Robot mode requires flags: {}", missing.join(", ")),
|
|
suggestion: "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project".to_string(),
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
std::process::exit(2);
|
|
}
|
|
|
|
let project_paths: Vec<String> = projects_flag
|
|
.unwrap()
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect();
|
|
|
|
let result = run_init(
|
|
InitInputs {
|
|
gitlab_url: gitlab_url_flag.unwrap(),
|
|
token_env_var: token_env_var_flag.unwrap(),
|
|
project_paths,
|
|
},
|
|
InitOptions {
|
|
config_path: config_override.map(String::from),
|
|
force: true,
|
|
non_interactive: true,
|
|
},
|
|
)
|
|
.await?;
|
|
|
|
print_init_json(&result);
|
|
return Ok(());
|
|
}
|
|
|
|
// Human mode: interactive prompts
|
|
let config_path = get_config_path(config_override);
|
|
let mut confirmed_overwrite = force;
|
|
|
|
if config_path.exists() && !force {
|
|
if non_interactive {
|
|
eprintln!(
|
|
"{}",
|
|
style(format!(
|
|
"Config file exists at {}. Use --force to overwrite.",
|
|
config_path.display()
|
|
))
|
|
.red()
|
|
);
|
|
std::process::exit(2);
|
|
}
|
|
|
|
let confirm = Confirm::new()
|
|
.with_prompt(format!(
|
|
"Config file exists at {}. Overwrite?",
|
|
config_path.display()
|
|
))
|
|
.default(false)
|
|
.interact()?;
|
|
|
|
if !confirm {
|
|
println!("{}", style("Cancelled.").yellow());
|
|
std::process::exit(2);
|
|
}
|
|
confirmed_overwrite = true;
|
|
}
|
|
|
|
let gitlab_url: String = if let Some(url) = gitlab_url_flag {
|
|
url
|
|
} else {
|
|
Input::new()
|
|
.with_prompt("GitLab URL")
|
|
.default("https://gitlab.com".to_string())
|
|
.validate_with(|input: &String| -> Result<(), &str> {
|
|
if url::Url::parse(input).is_ok() {
|
|
Ok(())
|
|
} else {
|
|
Err("Please enter a valid URL")
|
|
}
|
|
})
|
|
.interact_text()?
|
|
};
|
|
|
|
let token_env_var: String = if let Some(var) = token_env_var_flag {
|
|
var
|
|
} else {
|
|
Input::new()
|
|
.with_prompt("Token environment variable name")
|
|
.default("GITLAB_TOKEN".to_string())
|
|
.interact_text()?
|
|
};
|
|
|
|
let project_paths: Vec<String> = if let Some(projects) = projects_flag {
|
|
projects
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect()
|
|
} else {
|
|
let project_paths_input: String = Input::new()
|
|
.with_prompt("Project paths (comma-separated, e.g., group/project)")
|
|
.validate_with(|input: &String| -> Result<(), &str> {
|
|
if input.trim().is_empty() {
|
|
Err("Please enter at least one project path")
|
|
} else {
|
|
Ok(())
|
|
}
|
|
})
|
|
.interact_text()?;
|
|
|
|
project_paths_input
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect()
|
|
};
|
|
|
|
println!("{}", style("\nValidating configuration...").blue());
|
|
|
|
let result = run_init(
|
|
InitInputs {
|
|
gitlab_url,
|
|
token_env_var,
|
|
project_paths,
|
|
},
|
|
InitOptions {
|
|
config_path: config_override.map(String::from),
|
|
force: confirmed_overwrite,
|
|
non_interactive,
|
|
},
|
|
)
|
|
.await?;
|
|
|
|
println!(
|
|
"{}",
|
|
style(format!(
|
|
"\n✓ Authenticated as @{} ({})",
|
|
result.user.username, result.user.name
|
|
))
|
|
.green()
|
|
);
|
|
|
|
for project in &result.projects {
|
|
println!(
|
|
"{}",
|
|
style(format!("✓ {} ({})", project.path, project.name)).green()
|
|
);
|
|
}
|
|
|
|
println!(
|
|
"{}",
|
|
style(format!("\n✓ Config written to {}", result.config_path)).green()
|
|
);
|
|
println!(
|
|
"{}",
|
|
style(format!("✓ Database initialized at {}", result.data_dir)).green()
|
|
);
|
|
println!(
|
|
"{}",
|
|
style("\nSetup complete! Run 'lore doctor' to verify.").blue()
|
|
);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// JSON output for auth-test command.
|
|
#[derive(Serialize)]
|
|
struct AuthTestOutput {
|
|
ok: bool,
|
|
data: AuthTestData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct AuthTestData {
|
|
authenticated: bool,
|
|
username: String,
|
|
name: String,
|
|
gitlab_url: String,
|
|
}
|
|
|
|
async fn handle_auth_test(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
match run_auth_test(config_override).await {
|
|
Ok(result) => {
|
|
if robot_mode {
|
|
let output = AuthTestOutput {
|
|
ok: true,
|
|
data: AuthTestData {
|
|
authenticated: true,
|
|
username: result.username.clone(),
|
|
name: result.name.clone(),
|
|
gitlab_url: result.base_url.clone(),
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!("Authenticated as @{} ({})", result.username, result.name);
|
|
println!("GitLab: {}", result.base_url);
|
|
}
|
|
Ok(())
|
|
}
|
|
Err(e) => {
|
|
if robot_mode {
|
|
let output = FallbackErrorOutput {
|
|
error: FallbackError {
|
|
code: "AUTH_FAILED".to_string(),
|
|
message: e.to_string(),
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!("{}", style(format!("Error: {e}")).red());
|
|
}
|
|
std::process::exit(5); // AUTH_FAILED exit code
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn handle_doctor(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let result = run_doctor(config_override).await;
|
|
|
|
if robot_mode {
|
|
println!("{}", serde_json::to_string_pretty(&result)?);
|
|
} else {
|
|
print_doctor_results(&result);
|
|
}
|
|
|
|
if !result.success {
|
|
std::process::exit(1);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// JSON output for version command.
|
|
#[derive(Serialize)]
|
|
struct VersionOutput {
|
|
ok: bool,
|
|
data: VersionData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct VersionData {
|
|
version: String,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
git_hash: Option<String>,
|
|
}
|
|
|
|
fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
let version = env!("CARGO_PKG_VERSION").to_string();
|
|
let git_hash = env!("GIT_HASH").to_string();
|
|
if robot_mode {
|
|
let output = VersionOutput {
|
|
ok: true,
|
|
data: VersionData {
|
|
version,
|
|
git_hash: if git_hash.is_empty() {
|
|
None
|
|
} else {
|
|
Some(git_hash)
|
|
},
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else if git_hash.is_empty() {
|
|
println!("lore version {}", version);
|
|
} else {
|
|
println!("lore version {} ({})", version, git_hash);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_completions(shell: &str) -> Result<(), Box<dyn std::error::Error>> {
|
|
use clap::CommandFactory;
|
|
use clap_complete::{Shell, generate};
|
|
|
|
let shell = match shell {
|
|
"bash" => Shell::Bash,
|
|
"zsh" => Shell::Zsh,
|
|
"fish" => Shell::Fish,
|
|
"powershell" => Shell::PowerShell,
|
|
_ => unreachable!(),
|
|
};
|
|
|
|
let mut cmd = Cli::command();
|
|
generate(shell, &mut cmd, "lore", &mut std::io::stdout());
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_backup(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "NOT_IMPLEMENTED".to_string(),
|
|
message: "The 'backup' command is not yet implemented.".to_string(),
|
|
suggestion: "Use manual database backup: cp ~/.local/share/lore/lore.db ~/.local/share/lore/lore.db.bak".to_string(),
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{} The 'backup' command is not yet implemented.",
|
|
style("Error:").red()
|
|
);
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
fn handle_reset(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "NOT_IMPLEMENTED".to_string(),
|
|
message: "The 'reset' command is not yet implemented.".to_string(),
|
|
suggestion: "Manually delete the database: rm ~/.local/share/lore/lore.db"
|
|
.to_string(),
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{} The 'reset' command is not yet implemented.",
|
|
style("Error:").red()
|
|
);
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
/// JSON output for migrate command.
|
|
#[derive(Serialize)]
|
|
struct MigrateOutput {
|
|
ok: bool,
|
|
data: MigrateData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct MigrateData {
|
|
before_version: i32,
|
|
after_version: i32,
|
|
migrated: bool,
|
|
}
|
|
|
|
/// JSON error output with suggestion field.
|
|
#[derive(Serialize)]
|
|
struct RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotErrorSuggestionData {
|
|
code: String,
|
|
message: String,
|
|
suggestion: String,
|
|
}
|
|
|
|
async fn handle_migrate(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
|
|
if !db_path.exists() {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "DB_ERROR".to_string(),
|
|
message: format!("Database not found at {}", db_path.display()),
|
|
suggestion: "Run 'lore init' first".to_string(),
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
style(format!("Database not found at {}", db_path.display())).red()
|
|
);
|
|
eprintln!(
|
|
"{}",
|
|
style("Run 'lore init' first to create the database.").yellow()
|
|
);
|
|
}
|
|
std::process::exit(10); // DB_ERROR exit code
|
|
}
|
|
|
|
let conn = create_connection(&db_path)?;
|
|
let before_version = get_schema_version(&conn);
|
|
|
|
if !robot_mode {
|
|
println!(
|
|
"{}",
|
|
style(format!("Current schema version: {}", before_version)).blue()
|
|
);
|
|
}
|
|
|
|
run_migrations(&conn)?;
|
|
|
|
let after_version = get_schema_version(&conn);
|
|
|
|
if robot_mode {
|
|
let output = MigrateOutput {
|
|
ok: true,
|
|
data: MigrateData {
|
|
before_version,
|
|
after_version,
|
|
migrated: after_version > before_version,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else if after_version > before_version {
|
|
println!(
|
|
"{}",
|
|
style(format!(
|
|
"Migrations applied: {} -> {}",
|
|
before_version, after_version
|
|
))
|
|
.green()
|
|
);
|
|
} else {
|
|
println!("{}", style("Database is already up to date.").green());
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_stats(
|
|
config_override: Option<&str>,
|
|
args: StatsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
// Auto-enable --check when --repair is used
|
|
let check = (args.check && !args.no_check) || args.repair;
|
|
let result = run_stats(&config, check, args.repair)?;
|
|
if robot_mode {
|
|
print_stats_json(&result);
|
|
} else {
|
|
print_stats(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_search(
|
|
config_override: Option<&str>,
|
|
args: SearchArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let explain = args.explain && !args.no_explain;
|
|
|
|
let fts_mode = match args.fts_mode.as_str() {
|
|
"raw" => lore::search::FtsQueryMode::Raw,
|
|
_ => lore::search::FtsQueryMode::Safe,
|
|
};
|
|
|
|
let cli_filters = SearchCliFilters {
|
|
source_type: args.source_type,
|
|
author: args.author,
|
|
project: args.project,
|
|
labels: args.label,
|
|
path: args.path,
|
|
after: args.after,
|
|
updated_after: args.updated_after,
|
|
limit: args.limit,
|
|
};
|
|
|
|
let start = std::time::Instant::now();
|
|
let response = run_search(&config, &args.query, cli_filters, fts_mode, explain)?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
|
|
if robot_mode {
|
|
print_search_results_json(&response, elapsed_ms);
|
|
} else {
|
|
print_search_results(&response);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_generate_docs(
|
|
config_override: Option<&str>,
|
|
args: GenerateDocsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
let result = run_generate_docs(&config, args.full, args.project.as_deref())?;
|
|
if robot_mode {
|
|
print_generate_docs_json(&result);
|
|
} else {
|
|
print_generate_docs(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_embed(
|
|
config_override: Option<&str>,
|
|
args: EmbedArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let full = args.full && !args.no_full;
|
|
let retry_failed = args.retry_failed && !args.no_retry_failed;
|
|
let result = run_embed(&config, full, retry_failed).await?;
|
|
if robot_mode {
|
|
print_embed_json(&result);
|
|
} else {
|
|
print_embed(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_sync_cmd(
|
|
config_override: Option<&str>,
|
|
args: SyncArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let mut config = Config::load(config_override)?;
|
|
if args.no_events {
|
|
config.sync.fetch_resource_events = false;
|
|
}
|
|
let options = SyncOptions {
|
|
full: args.full && !args.no_full,
|
|
force: args.force && !args.no_force,
|
|
no_embed: args.no_embed,
|
|
no_docs: args.no_docs,
|
|
no_events: args.no_events,
|
|
robot_mode,
|
|
};
|
|
|
|
let start = std::time::Instant::now();
|
|
let result = run_sync(&config, options).await?;
|
|
let elapsed = start.elapsed();
|
|
|
|
if robot_mode {
|
|
print_sync_json(&result, elapsed.as_millis() as u64);
|
|
} else {
|
|
print_sync(&result, elapsed);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
// ============================================================================
|
|
// Health + Robot-docs handlers
|
|
// ============================================================================
|
|
|
|
/// JSON output for health command.
|
|
#[derive(Serialize)]
|
|
struct HealthOutput {
|
|
ok: bool,
|
|
data: HealthData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct HealthData {
|
|
healthy: bool,
|
|
config_found: bool,
|
|
db_found: bool,
|
|
schema_current: bool,
|
|
schema_version: i32,
|
|
}
|
|
|
|
async fn handle_health(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config_path = get_config_path(config_override);
|
|
let config_found = config_path.exists();
|
|
|
|
let (db_found, schema_version, schema_current) = if config_found {
|
|
match Config::load(config_override) {
|
|
Ok(config) => {
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
if db_path.exists() {
|
|
match create_connection(&db_path) {
|
|
Ok(conn) => {
|
|
let version = get_schema_version(&conn);
|
|
(true, version, version >= LATEST_SCHEMA_VERSION)
|
|
}
|
|
Err(_) => (true, 0, false),
|
|
}
|
|
} else {
|
|
(false, 0, false)
|
|
}
|
|
}
|
|
Err(_) => (false, 0, false),
|
|
}
|
|
} else {
|
|
(false, 0, false)
|
|
};
|
|
|
|
let healthy = config_found && db_found && schema_current;
|
|
|
|
if robot_mode {
|
|
let output = HealthOutput {
|
|
ok: true,
|
|
data: HealthData {
|
|
healthy,
|
|
config_found,
|
|
db_found,
|
|
schema_current,
|
|
schema_version,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
let status = |ok: bool| {
|
|
if ok {
|
|
style("pass").green()
|
|
} else {
|
|
style("FAIL").red()
|
|
}
|
|
};
|
|
println!(
|
|
"Config: {} ({})",
|
|
status(config_found),
|
|
config_path.display()
|
|
);
|
|
println!("DB: {}", status(db_found));
|
|
println!("Schema: {} (v{})", status(schema_current), schema_version);
|
|
println!();
|
|
if healthy {
|
|
println!("{}", style("Healthy").green().bold());
|
|
} else {
|
|
println!(
|
|
"{}",
|
|
style("Unhealthy - run 'lore doctor' for details")
|
|
.red()
|
|
.bold()
|
|
);
|
|
}
|
|
}
|
|
|
|
if !healthy {
|
|
std::process::exit(1);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// JSON output for robot-docs command.
|
|
#[derive(Serialize)]
|
|
struct RobotDocsOutput {
|
|
ok: bool,
|
|
data: RobotDocsData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotDocsData {
|
|
name: String,
|
|
version: String,
|
|
description: String,
|
|
activation: RobotDocsActivation,
|
|
commands: serde_json::Value,
|
|
exit_codes: serde_json::Value,
|
|
error_format: String,
|
|
workflows: serde_json::Value,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotDocsActivation {
|
|
flags: Vec<String>,
|
|
env: String,
|
|
auto: String,
|
|
}
|
|
|
|
fn handle_robot_docs(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
let version = env!("CARGO_PKG_VERSION").to_string();
|
|
|
|
let commands = serde_json::json!({
|
|
"init": {
|
|
"description": "Initialize configuration and database",
|
|
"flags": ["--force", "--non-interactive", "--gitlab-url <URL>", "--token-env-var <VAR>", "--projects <paths>"],
|
|
"robot_flags": ["--gitlab-url", "--token-env-var", "--projects"],
|
|
"example": "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project"
|
|
},
|
|
"health": {
|
|
"description": "Quick pre-flight check: config, database, schema version",
|
|
"flags": [],
|
|
"example": "lore --robot health"
|
|
},
|
|
"auth": {
|
|
"description": "Verify GitLab authentication",
|
|
"flags": [],
|
|
"example": "lore --robot auth"
|
|
},
|
|
"doctor": {
|
|
"description": "Full environment health check (config, auth, DB, Ollama)",
|
|
"flags": [],
|
|
"example": "lore --robot doctor"
|
|
},
|
|
"ingest": {
|
|
"description": "Sync data from GitLab",
|
|
"flags": ["--project <path>", "--force", "--full", "<entity: issues|mrs>"],
|
|
"example": "lore --robot ingest issues --project group/repo"
|
|
},
|
|
"sync": {
|
|
"description": "Full sync pipeline: ingest -> generate-docs -> embed",
|
|
"flags": ["--full", "--force", "--no-embed", "--no-docs"],
|
|
"example": "lore --robot sync"
|
|
},
|
|
"issues": {
|
|
"description": "List or show issues",
|
|
"flags": ["<IID>", "--limit", "--state", "--project", "--author", "--assignee", "--label", "--milestone", "--since", "--due-before", "--has-due", "--sort", "--asc"],
|
|
"example": "lore --robot issues --state opened --limit 10"
|
|
},
|
|
"mrs": {
|
|
"description": "List or show merge requests",
|
|
"flags": ["<IID>", "--limit", "--state", "--project", "--author", "--assignee", "--reviewer", "--label", "--since", "--draft", "--no-draft", "--target", "--source", "--sort", "--asc"],
|
|
"example": "lore --robot mrs --state opened"
|
|
},
|
|
"search": {
|
|
"description": "Search indexed documents (lexical, hybrid, semantic)",
|
|
"flags": ["<QUERY>", "--mode", "--type", "--author", "--project", "--label", "--path", "--after", "--updated-after", "--limit", "--explain", "--fts-mode"],
|
|
"example": "lore --robot search 'authentication bug' --mode hybrid --limit 10"
|
|
},
|
|
"count": {
|
|
"description": "Count entities in local database",
|
|
"flags": ["<entity: issues|mrs|discussions|notes>", "--for <issue|mr>"],
|
|
"example": "lore --robot count issues"
|
|
},
|
|
"stats": {
|
|
"description": "Show document and index statistics",
|
|
"flags": ["--check", "--repair"],
|
|
"example": "lore --robot stats"
|
|
},
|
|
"status": {
|
|
"description": "Show sync state (cursors, last sync times)",
|
|
"flags": [],
|
|
"example": "lore --robot status"
|
|
},
|
|
"generate-docs": {
|
|
"description": "Generate searchable documents from ingested data",
|
|
"flags": ["--full", "--project <path>"],
|
|
"example": "lore --robot generate-docs --full"
|
|
},
|
|
"embed": {
|
|
"description": "Generate vector embeddings for documents via Ollama",
|
|
"flags": ["--full", "--retry-failed"],
|
|
"example": "lore --robot embed"
|
|
},
|
|
"migrate": {
|
|
"description": "Run pending database migrations",
|
|
"flags": [],
|
|
"example": "lore --robot migrate"
|
|
},
|
|
"version": {
|
|
"description": "Show version information",
|
|
"flags": [],
|
|
"example": "lore --robot version"
|
|
},
|
|
"robot-docs": {
|
|
"description": "This command (agent self-discovery manifest)",
|
|
"flags": [],
|
|
"example": "lore robot-docs"
|
|
}
|
|
});
|
|
|
|
let exit_codes = serde_json::json!({
|
|
"0": "Success",
|
|
"1": "Internal error / health check failed / not implemented",
|
|
"2": "Usage error (invalid flags or arguments)",
|
|
"3": "Config invalid",
|
|
"4": "Token not set",
|
|
"5": "GitLab auth failed",
|
|
"6": "Resource not found",
|
|
"7": "Rate limited",
|
|
"8": "Network error",
|
|
"9": "Database locked",
|
|
"10": "Database error",
|
|
"11": "Migration failed",
|
|
"12": "I/O error",
|
|
"13": "Transform error",
|
|
"14": "Ollama unavailable",
|
|
"15": "Ollama model not found",
|
|
"16": "Embedding failed",
|
|
"17": "Not found",
|
|
"18": "Ambiguous match",
|
|
"20": "Config not found"
|
|
});
|
|
|
|
let workflows = serde_json::json!({
|
|
"first_setup": [
|
|
"lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project",
|
|
"lore --robot doctor",
|
|
"lore --robot sync"
|
|
],
|
|
"daily_sync": [
|
|
"lore --robot sync"
|
|
],
|
|
"search": [
|
|
"lore --robot search 'query' --mode hybrid"
|
|
],
|
|
"pre_flight": [
|
|
"lore --robot health"
|
|
]
|
|
});
|
|
|
|
let output = RobotDocsOutput {
|
|
ok: true,
|
|
data: RobotDocsData {
|
|
name: "lore".to_string(),
|
|
version,
|
|
description: "Local GitLab data management with semantic search".to_string(),
|
|
activation: RobotDocsActivation {
|
|
flags: vec!["--robot".to_string(), "-J".to_string(), "--json".to_string()],
|
|
env: "LORE_ROBOT=1".to_string(),
|
|
auto: "Non-TTY stdout".to_string(),
|
|
},
|
|
commands,
|
|
exit_codes,
|
|
error_format: "stderr JSON: {\"error\":{\"code\":\"...\",\"message\":\"...\",\"suggestion\":\"...\"}}".to_string(),
|
|
workflows,
|
|
},
|
|
};
|
|
|
|
if robot_mode {
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!("{}", serde_json::to_string_pretty(&output)?);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
// ============================================================================
|
|
// Backward-compat handlers (deprecated, delegate to new handlers)
|
|
// ============================================================================
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
async fn handle_list_compat(
|
|
config_override: Option<&str>,
|
|
entity: &str,
|
|
limit: usize,
|
|
project_filter: Option<&str>,
|
|
state_filter: Option<&str>,
|
|
author_filter: Option<&str>,
|
|
assignee_filter: Option<&str>,
|
|
label_filter: Option<&[String]>,
|
|
milestone_filter: Option<&str>,
|
|
since_filter: Option<&str>,
|
|
due_before_filter: Option<&str>,
|
|
has_due_date: bool,
|
|
sort: &str,
|
|
order: &str,
|
|
open_browser: bool,
|
|
json_output: bool,
|
|
draft: bool,
|
|
no_draft: bool,
|
|
reviewer_filter: Option<&str>,
|
|
target_branch_filter: Option<&str>,
|
|
source_branch_filter: Option<&str>,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
match entity {
|
|
"issues" => {
|
|
let filters = ListFilters {
|
|
limit,
|
|
project: project_filter,
|
|
state: state_filter,
|
|
author: author_filter,
|
|
assignee: assignee_filter,
|
|
labels: label_filter,
|
|
milestone: milestone_filter,
|
|
since: since_filter,
|
|
due_before: due_before_filter,
|
|
has_due_date,
|
|
sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_issues(&config, filters)?;
|
|
|
|
if open_browser {
|
|
open_issue_in_browser(&result);
|
|
} else if json_output {
|
|
print_list_issues_json(&result);
|
|
} else {
|
|
print_list_issues(&result);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
"mrs" => {
|
|
let filters = MrListFilters {
|
|
limit,
|
|
project: project_filter,
|
|
state: state_filter,
|
|
author: author_filter,
|
|
assignee: assignee_filter,
|
|
reviewer: reviewer_filter,
|
|
labels: label_filter,
|
|
since: since_filter,
|
|
draft,
|
|
no_draft,
|
|
target_branch: target_branch_filter,
|
|
source_branch: source_branch_filter,
|
|
sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_mrs(&config, filters)?;
|
|
|
|
if open_browser {
|
|
open_mr_in_browser(&result);
|
|
} else if json_output {
|
|
print_list_mrs_json(&result);
|
|
} else {
|
|
print_list_mrs(&result);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
_ => {
|
|
eprintln!("{}", style(format!("Unknown entity: {entity}")).red());
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn handle_show_compat(
|
|
config_override: Option<&str>,
|
|
entity: &str,
|
|
iid: i64,
|
|
project_filter: Option<&str>,
|
|
json: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
match entity {
|
|
"issue" => {
|
|
let result = run_show_issue(&config, iid, project_filter)?;
|
|
if json {
|
|
print_show_issue_json(&result);
|
|
} else {
|
|
print_show_issue(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
"mr" => {
|
|
let result = run_show_mr(&config, iid, project_filter)?;
|
|
if json {
|
|
print_show_mr_json(&result);
|
|
} else {
|
|
print_show_mr(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
_ => {
|
|
eprintln!("{}", style(format!("Unknown entity: {entity}")).red());
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|