Implement a personal work dashboard that shows everything relevant to the
configured GitLab user: open issues assigned to them, MRs they authored,
MRs they are reviewing, and a chronological activity feed.
Design decisions:
- Attention state computed from GitLab interaction data (comments, reviews)
with no local state tracking -- purely derived from existing synced data
- Username resolution: --user flag > config.gitlab.username > actionable error
- Project scoping: --project (fuzzy) | --all | default_project | all
- Section filtering: --issues, --mrs, --activity (combinable, default = all)
- Activity feed controlled by --since (default 30d); work item sections
always show all open items regardless of --since
Architecture (src/cli/commands/me/):
- types.rs: MeDashboard, MeSummary, AttentionState data types
- queries.rs: 4 SQL queries (open_issues, authored_mrs, reviewing_mrs,
activity) using existing issue_assignees, mr_reviewers, notes tables
- render_human.rs: colored terminal output with attention state indicators
- render_robot.rs: {ok, data, meta} JSON envelope with field selection
- mod.rs: orchestration (resolve_username, resolve_project_scope, run_me)
- me_tests.rs: comprehensive unit tests covering all query paths
Config additions:
- New optional gitlab.username field in config.json
- Tests for config with/without username
- Existing test configs updated with username: None
CLI wiring:
- MeArgs struct with section filter, since, project, all, user, fields flags
- Autocorrect support for me command flags
- LoreRenderer::try_get() for safe renderer access in me module
- Robot mode field selection presets (me_items, me_activity)
- handle_me() in main.rs command dispatch
Also fixes duplicate assertions in surgical sync tests (removed 6
duplicate assert! lines that were copy-paste artifacts).
Spec: docs/lore-me-spec.md
3356 lines
121 KiB
Rust
3356 lines
121 KiB
Rust
use clap::Parser;
|
|
use dialoguer::{Confirm, Input};
|
|
use serde::Serialize;
|
|
use strsim::jaro_winkler;
|
|
use tracing_subscriber::Layer;
|
|
use tracing_subscriber::layer::SubscriberExt;
|
|
use tracing_subscriber::util::SubscriberInitExt;
|
|
|
|
use lore::Config;
|
|
use lore::cli::autocorrect::{self, CorrectionResult};
|
|
use lore::cli::commands::{
|
|
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
|
NoteListFilters, SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser,
|
|
open_mr_in_browser, parse_trace_path, print_count, print_count_json, print_cron_install,
|
|
print_cron_install_json, print_cron_status, print_cron_status_json, print_cron_uninstall,
|
|
print_cron_uninstall_json, print_doctor_results, print_drift_human, print_drift_json,
|
|
print_dry_run_preview, print_dry_run_preview_json, print_embed, print_embed_json,
|
|
print_event_count, print_event_count_json, print_file_history, print_file_history_json,
|
|
print_generate_docs, print_generate_docs_json, print_ingest_summary, print_ingest_summary_json,
|
|
print_list_issues, print_list_issues_json, print_list_mrs, print_list_mrs_json,
|
|
print_list_notes, print_list_notes_json, print_search_results, print_search_results_json,
|
|
print_show_issue, print_show_issue_json, print_show_mr, print_show_mr_json, print_stats,
|
|
print_stats_json, print_sync, print_sync_json, print_sync_status, print_sync_status_json,
|
|
print_timeline, print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human,
|
|
print_who_json, query_notes, run_auth_test, run_count, run_count_events, run_cron_install,
|
|
run_cron_status, run_cron_uninstall, run_doctor, run_drift, run_embed, run_file_history,
|
|
run_generate_docs, run_ingest, run_ingest_dry_run, run_init, run_list_issues, run_list_mrs,
|
|
run_me, run_search, run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status,
|
|
run_timeline, run_token_set, run_token_show, run_who,
|
|
};
|
|
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
|
use lore::cli::robot::{RobotMeta, strip_schemas};
|
|
use lore::cli::{
|
|
Cli, Commands, CountArgs, CronAction, CronArgs, EmbedArgs, FileHistoryArgs, GenerateDocsArgs,
|
|
IngestArgs, IssuesArgs, MeArgs, MrsArgs, NotesArgs, SearchArgs, StatsArgs, SyncArgs,
|
|
TimelineArgs, TokenAction, TokenArgs, TraceArgs, WhoArgs,
|
|
};
|
|
use lore::core::db::{
|
|
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
|
};
|
|
use lore::core::dependent_queue::release_all_locked_jobs;
|
|
use lore::core::error::{LoreError, RobotErrorOutput};
|
|
use lore::core::logging;
|
|
use lore::core::metrics::MetricsLayer;
|
|
use lore::core::path_resolver::{build_path_query, normalize_repo_path};
|
|
use lore::core::paths::{get_config_path, get_db_path, get_log_dir};
|
|
use lore::core::project::resolve_project;
|
|
use lore::core::shutdown::ShutdownSignal;
|
|
use lore::core::sync_run::SyncRunRecorder;
|
|
use lore::core::trace::run_trace;
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
#[cfg(unix)]
|
|
unsafe {
|
|
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
|
|
}
|
|
|
|
// Phase 1: Early robot mode detection for structured clap errors
|
|
let robot_mode_early = Cli::detect_robot_mode_from_env();
|
|
|
|
// Phase 1.5: Pre-clap arg correction for agent typo tolerance
|
|
let raw_args: Vec<String> = std::env::args().collect();
|
|
let correction_result = autocorrect::correct_args(raw_args, robot_mode_early);
|
|
|
|
// Emit correction warnings to stderr (before clap parsing, so they appear
|
|
// even if clap still fails on something else)
|
|
if !correction_result.corrections.is_empty() {
|
|
emit_correction_warnings(&correction_result, robot_mode_early);
|
|
}
|
|
|
|
let cli = match Cli::try_parse_from(&correction_result.args) {
|
|
Ok(cli) => cli,
|
|
Err(e) => {
|
|
handle_clap_error(e, robot_mode_early, &correction_result);
|
|
}
|
|
};
|
|
let robot_mode = cli.is_robot_mode();
|
|
|
|
let logging_config = lore::Config::load(cli.config.as_deref())
|
|
.map(|c| c.logging)
|
|
.unwrap_or_default();
|
|
|
|
let log_dir = get_log_dir(logging_config.log_dir.as_deref());
|
|
if logging_config.file_logging && logging_config.retention_days > 0 {
|
|
logging::cleanup_old_logs(&log_dir, logging_config.retention_days);
|
|
}
|
|
|
|
let stderr_filter = logging::build_stderr_filter(cli.verbose, cli.quiet);
|
|
let metrics_layer = MetricsLayer::new();
|
|
|
|
let registry = tracing_subscriber::registry();
|
|
|
|
let _file_guard: Option<tracing_appender::non_blocking::WorkerGuard>;
|
|
|
|
if cli.log_format == "json" {
|
|
let stderr_layer = tracing_subscriber::fmt::layer()
|
|
.json()
|
|
.with_writer(lore::cli::progress::SuspendingWriter)
|
|
.with_filter(stderr_filter);
|
|
|
|
if logging_config.file_logging {
|
|
let file_filter = logging::build_file_filter();
|
|
std::fs::create_dir_all(&log_dir).ok();
|
|
let file_appender = tracing_appender::rolling::daily(&log_dir, "lore");
|
|
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
|
|
_file_guard = Some(guard);
|
|
let file_layer = tracing_subscriber::fmt::layer()
|
|
.json()
|
|
.with_writer(non_blocking)
|
|
.with_filter(file_filter);
|
|
registry
|
|
.with(stderr_layer)
|
|
.with(file_layer)
|
|
.with(metrics_layer.clone())
|
|
.init();
|
|
} else {
|
|
_file_guard = None;
|
|
registry
|
|
.with(stderr_layer)
|
|
.with(metrics_layer.clone())
|
|
.init();
|
|
}
|
|
} else {
|
|
let stderr_layer = tracing_subscriber::fmt::layer()
|
|
.event_format(logging::CompactHumanFormat)
|
|
.with_writer(lore::cli::progress::SuspendingWriter)
|
|
.with_filter(stderr_filter);
|
|
|
|
if logging_config.file_logging {
|
|
let file_filter = logging::build_file_filter();
|
|
std::fs::create_dir_all(&log_dir).ok();
|
|
let file_appender = tracing_appender::rolling::daily(&log_dir, "lore");
|
|
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
|
|
_file_guard = Some(guard);
|
|
let file_layer = tracing_subscriber::fmt::layer()
|
|
.json()
|
|
.with_writer(non_blocking)
|
|
.with_filter(file_filter);
|
|
registry
|
|
.with(stderr_layer)
|
|
.with(file_layer)
|
|
.with(metrics_layer.clone())
|
|
.init();
|
|
} else {
|
|
_file_guard = None;
|
|
registry
|
|
.with(stderr_layer)
|
|
.with(metrics_layer.clone())
|
|
.init();
|
|
}
|
|
}
|
|
|
|
// Icon mode is independent of color flags; robot mode still defaults to ASCII.
|
|
let glyphs = GlyphMode::detect(cli.icons.as_deref(), robot_mode);
|
|
|
|
if std::env::var("NO_COLOR").is_ok_and(|v| !v.is_empty()) {
|
|
LoreRenderer::init(ColorMode::Never, glyphs);
|
|
console::set_colors_enabled(false);
|
|
} else {
|
|
match cli.color.as_str() {
|
|
"never" => {
|
|
LoreRenderer::init(ColorMode::Never, glyphs);
|
|
console::set_colors_enabled(false);
|
|
}
|
|
"always" => {
|
|
LoreRenderer::init(ColorMode::Always, glyphs);
|
|
console::set_colors_enabled(true);
|
|
}
|
|
"auto" => {
|
|
LoreRenderer::init(ColorMode::Auto, glyphs);
|
|
}
|
|
other => {
|
|
LoreRenderer::init(ColorMode::Auto, glyphs);
|
|
eprintln!("Warning: unknown color mode '{}', using auto", other);
|
|
}
|
|
}
|
|
}
|
|
|
|
let quiet = cli.quiet;
|
|
|
|
let result = match cli.command {
|
|
// Phase 2: Handle no-args case - in robot mode, output robot-docs; otherwise show help
|
|
None => {
|
|
if robot_mode {
|
|
handle_robot_docs(robot_mode, false)
|
|
} else {
|
|
use clap::CommandFactory;
|
|
let mut cmd = Cli::command();
|
|
cmd.print_help().ok();
|
|
println!();
|
|
Ok(())
|
|
}
|
|
}
|
|
Some(Commands::Issues(args)) => handle_issues(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Mrs(args)) => handle_mrs(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Notes(args)) => handle_notes(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Search(args)) => {
|
|
handle_search(cli.config.as_deref(), args, robot_mode).await
|
|
}
|
|
Some(Commands::Timeline(args)) => {
|
|
handle_timeline(cli.config.as_deref(), args, robot_mode).await
|
|
}
|
|
Some(Commands::Who(args)) => handle_who(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Me(args)) => handle_me(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::FileHistory(args)) => {
|
|
handle_file_history(cli.config.as_deref(), args, robot_mode)
|
|
}
|
|
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
|
|
Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await,
|
|
Some(Commands::Drift {
|
|
entity_type,
|
|
iid,
|
|
threshold,
|
|
project,
|
|
}) => {
|
|
handle_drift(
|
|
cli.config.as_deref(),
|
|
&entity_type,
|
|
iid,
|
|
threshold,
|
|
project.as_deref(),
|
|
robot_mode,
|
|
)
|
|
.await
|
|
}
|
|
Some(Commands::Stats(args)) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
|
|
Some(Commands::Embed(args)) => handle_embed(cli.config.as_deref(), args, robot_mode).await,
|
|
Some(Commands::Sync(args)) => {
|
|
handle_sync_cmd(cli.config.as_deref(), args, robot_mode, &metrics_layer).await
|
|
}
|
|
Some(Commands::Ingest(args)) => {
|
|
handle_ingest(
|
|
cli.config.as_deref(),
|
|
args,
|
|
robot_mode,
|
|
quiet,
|
|
&metrics_layer,
|
|
)
|
|
.await
|
|
}
|
|
Some(Commands::Count(args)) => handle_count(cli.config.as_deref(), args, robot_mode).await,
|
|
Some(Commands::Status) => handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await,
|
|
Some(Commands::Auth) => handle_auth_test(cli.config.as_deref(), robot_mode).await,
|
|
Some(Commands::Doctor) => handle_doctor(cli.config.as_deref(), robot_mode).await,
|
|
Some(Commands::Version) => handle_version(robot_mode),
|
|
Some(Commands::Completions { shell }) => handle_completions(&shell),
|
|
Some(Commands::Init {
|
|
force,
|
|
non_interactive,
|
|
gitlab_url,
|
|
token_env_var,
|
|
projects,
|
|
default_project,
|
|
}) => {
|
|
handle_init(
|
|
cli.config.as_deref(),
|
|
force,
|
|
non_interactive,
|
|
robot_mode,
|
|
gitlab_url,
|
|
token_env_var,
|
|
projects,
|
|
default_project,
|
|
)
|
|
.await
|
|
}
|
|
Some(Commands::GenerateDocs(args)) => {
|
|
handle_generate_docs(cli.config.as_deref(), args, robot_mode).await
|
|
}
|
|
Some(Commands::Backup) => handle_backup(robot_mode),
|
|
Some(Commands::Reset { yes: _ }) => handle_reset(robot_mode),
|
|
Some(Commands::Migrate) => handle_migrate(cli.config.as_deref(), robot_mode).await,
|
|
Some(Commands::Health) => handle_health(cli.config.as_deref(), robot_mode).await,
|
|
Some(Commands::RobotDocs { brief }) => handle_robot_docs(robot_mode, brief),
|
|
|
|
Some(Commands::List {
|
|
entity,
|
|
limit,
|
|
project,
|
|
state,
|
|
author,
|
|
assignee,
|
|
label,
|
|
milestone,
|
|
since,
|
|
due_before,
|
|
has_due_date,
|
|
sort,
|
|
order,
|
|
open,
|
|
draft,
|
|
no_draft,
|
|
reviewer,
|
|
target_branch,
|
|
source_branch,
|
|
}) => {
|
|
if robot_mode {
|
|
eprintln!(
|
|
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore list' is deprecated, use 'lore issues' or 'lore mrs'","successor":"issues / mrs"}}}}"#
|
|
);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning().render(
|
|
"warning: 'lore list' is deprecated, use 'lore issues' or 'lore mrs'"
|
|
)
|
|
);
|
|
}
|
|
handle_list_compat(
|
|
cli.config.as_deref(),
|
|
&entity,
|
|
limit,
|
|
project.as_deref(),
|
|
state.as_deref(),
|
|
author.as_deref(),
|
|
assignee.as_deref(),
|
|
label.as_deref(),
|
|
milestone.as_deref(),
|
|
since.as_deref(),
|
|
due_before.as_deref(),
|
|
has_due_date,
|
|
&sort,
|
|
&order,
|
|
open,
|
|
robot_mode,
|
|
draft,
|
|
no_draft,
|
|
reviewer.as_deref(),
|
|
target_branch.as_deref(),
|
|
source_branch.as_deref(),
|
|
)
|
|
.await
|
|
}
|
|
Some(Commands::Show {
|
|
entity,
|
|
iid,
|
|
project,
|
|
}) => {
|
|
if robot_mode {
|
|
eprintln!(
|
|
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore show' is deprecated, use 'lore {entity}s {iid}'","successor":"{entity}s"}}}}"#
|
|
);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning().render(&format!(
|
|
"warning: 'lore show' is deprecated, use 'lore {}s {}'",
|
|
entity, iid
|
|
))
|
|
);
|
|
}
|
|
handle_show_compat(
|
|
cli.config.as_deref(),
|
|
&entity,
|
|
iid,
|
|
project.as_deref(),
|
|
robot_mode,
|
|
)
|
|
.await
|
|
}
|
|
Some(Commands::AuthTest) => {
|
|
if robot_mode {
|
|
eprintln!(
|
|
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore auth-test' is deprecated, use 'lore auth'","successor":"auth"}}}}"#
|
|
);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning()
|
|
.render("warning: 'lore auth-test' is deprecated, use 'lore auth'")
|
|
);
|
|
}
|
|
handle_auth_test(cli.config.as_deref(), robot_mode).await
|
|
}
|
|
Some(Commands::SyncStatus) => {
|
|
if robot_mode {
|
|
eprintln!(
|
|
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore sync-status' is deprecated, use 'lore status'","successor":"status"}}}}"#
|
|
);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning()
|
|
.render("warning: 'lore sync-status' is deprecated, use 'lore status'")
|
|
);
|
|
}
|
|
handle_sync_status_cmd(cli.config.as_deref(), robot_mode).await
|
|
}
|
|
};
|
|
|
|
if let Err(e) = result {
|
|
handle_error(e, robot_mode);
|
|
}
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct FallbackErrorOutput {
|
|
error: FallbackError,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct FallbackError {
|
|
code: String,
|
|
message: String,
|
|
}
|
|
|
|
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
|
if let Some(gi_error) = e.downcast_ref::<LoreError>() {
|
|
if robot_mode {
|
|
let output = RobotErrorOutput::from(gi_error);
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
let fallback = FallbackErrorOutput {
|
|
error: FallbackError {
|
|
code: "INTERNAL_ERROR".to_string(),
|
|
message: gi_error.to_string(),
|
|
},
|
|
};
|
|
serde_json::to_string(&fallback)
|
|
.unwrap_or_else(|_| r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#.to_string())
|
|
})
|
|
);
|
|
std::process::exit(gi_error.exit_code());
|
|
} else {
|
|
eprintln!();
|
|
eprintln!(
|
|
" {} {}",
|
|
Theme::error().render(Icons::error()),
|
|
Theme::error().bold().render(&gi_error.to_string())
|
|
);
|
|
if let Some(suggestion) = gi_error.suggestion() {
|
|
eprintln!();
|
|
eprintln!(" {suggestion}");
|
|
}
|
|
let actions = gi_error.actions();
|
|
if !actions.is_empty() {
|
|
eprintln!();
|
|
for action in &actions {
|
|
eprintln!(
|
|
" {} {}",
|
|
Theme::dim().render("\u{2192}"),
|
|
Theme::bold().render(action)
|
|
);
|
|
}
|
|
}
|
|
eprintln!();
|
|
std::process::exit(gi_error.exit_code());
|
|
}
|
|
}
|
|
|
|
if robot_mode {
|
|
let output = FallbackErrorOutput {
|
|
error: FallbackError {
|
|
code: "INTERNAL_ERROR".to_string(),
|
|
message: e.to_string(),
|
|
},
|
|
};
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
r#"{"error":{"code":"INTERNAL_ERROR","message":"Serialization failed"}}"#
|
|
.to_string()
|
|
})
|
|
);
|
|
} else {
|
|
eprintln!();
|
|
eprintln!(
|
|
" {} {}",
|
|
Theme::error().render(Icons::error()),
|
|
Theme::error().bold().render(&e.to_string())
|
|
);
|
|
eprintln!();
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
/// Emit stderr warnings for any corrections applied during Phase 1.5.
|
|
fn emit_correction_warnings(result: &CorrectionResult, robot_mode: bool) {
|
|
if robot_mode {
|
|
#[derive(Serialize)]
|
|
struct CorrectionWarning<'a> {
|
|
warning: CorrectionWarningInner<'a>,
|
|
}
|
|
#[derive(Serialize)]
|
|
struct CorrectionWarningInner<'a> {
|
|
r#type: &'static str,
|
|
corrections: &'a [autocorrect::Correction],
|
|
teaching: Vec<String>,
|
|
}
|
|
|
|
let teaching: Vec<String> = result
|
|
.corrections
|
|
.iter()
|
|
.map(autocorrect::format_teaching_note)
|
|
.collect();
|
|
|
|
let warning = CorrectionWarning {
|
|
warning: CorrectionWarningInner {
|
|
r#type: "ARG_CORRECTED",
|
|
corrections: &result.corrections,
|
|
teaching,
|
|
},
|
|
};
|
|
if let Ok(json) = serde_json::to_string(&warning) {
|
|
eprintln!("{json}");
|
|
}
|
|
} else {
|
|
for c in &result.corrections {
|
|
eprintln!(
|
|
"{} {}",
|
|
Theme::warning().render("Auto-corrected:"),
|
|
autocorrect::format_teaching_note(c)
|
|
);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Phase 1 & 4: Handle clap parsing errors with structured JSON output in robot mode.
|
|
/// Also includes fuzzy command matching and flag-level suggestions.
|
|
fn handle_clap_error(e: clap::Error, robot_mode: bool, corrections: &CorrectionResult) -> ! {
|
|
use clap::error::ErrorKind;
|
|
|
|
// Always let clap handle --help and --version normally (print and exit 0).
|
|
// These are intentional user actions, not errors, even when stdout is redirected.
|
|
if matches!(e.kind(), ErrorKind::DisplayHelp | ErrorKind::DisplayVersion) {
|
|
e.exit()
|
|
}
|
|
|
|
if robot_mode {
|
|
let error_code = map_clap_error_kind(e.kind());
|
|
let full_msg = e.to_string();
|
|
let message = full_msg
|
|
.lines()
|
|
.take(3)
|
|
.collect::<Vec<_>>()
|
|
.join("; ")
|
|
.trim()
|
|
.to_string();
|
|
|
|
let (suggestion, correction, valid_values) = match e.kind() {
|
|
// Phase 4: Suggest similar command for unknown subcommands
|
|
ErrorKind::InvalidSubcommand => {
|
|
let suggestion = if let Some(invalid_cmd) = extract_invalid_subcommand(&e) {
|
|
suggest_similar_command(&invalid_cmd)
|
|
} else {
|
|
"Run 'lore robot-docs' for valid commands".to_string()
|
|
};
|
|
(suggestion, None, None)
|
|
}
|
|
// Flag-level fuzzy matching for unknown flags
|
|
ErrorKind::UnknownArgument => {
|
|
let invalid_flag = extract_invalid_flag(&e);
|
|
let similar = invalid_flag
|
|
.as_deref()
|
|
.and_then(|flag| autocorrect::suggest_similar_flag(flag, &corrections.args));
|
|
let suggestion = if let Some(ref s) = similar {
|
|
format!("Did you mean '{s}'? Run 'lore robot-docs' for all flags")
|
|
} else {
|
|
"Run 'lore robot-docs' for valid flags".to_string()
|
|
};
|
|
(suggestion, similar, None)
|
|
}
|
|
// Value-level suggestions for invalid enum values
|
|
ErrorKind::InvalidValue => {
|
|
let (flag, valid_vals) = extract_invalid_value_context(&e);
|
|
let suggestion = if let Some(vals) = &valid_vals {
|
|
format!(
|
|
"Valid values: {}. Run 'lore robot-docs' for details",
|
|
vals.join(", ")
|
|
)
|
|
} else if let Some(ref f) = flag {
|
|
if let Some(vals) = autocorrect::valid_values_for_flag(f) {
|
|
format!("Valid values for {f}: {}", vals.join(", "))
|
|
} else {
|
|
"Run 'lore robot-docs' for valid values".to_string()
|
|
}
|
|
} else {
|
|
"Run 'lore robot-docs' for valid values".to_string()
|
|
};
|
|
let vals_vec = valid_vals.or_else(|| {
|
|
flag.as_deref()
|
|
.and_then(autocorrect::valid_values_for_flag)
|
|
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
|
|
});
|
|
(suggestion, None, vals_vec)
|
|
}
|
|
_ => (
|
|
"Run 'lore robot-docs' for valid commands".to_string(),
|
|
None,
|
|
None,
|
|
),
|
|
};
|
|
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: error_code.to_string(),
|
|
message,
|
|
suggestion,
|
|
correction,
|
|
valid_values,
|
|
},
|
|
};
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
r#"{"error":{"code":"PARSE_ERROR","message":"Parse error"}}"#.to_string()
|
|
})
|
|
);
|
|
std::process::exit(2);
|
|
} else {
|
|
e.exit()
|
|
}
|
|
}
|
|
|
|
/// Map clap ErrorKind to semantic error codes
|
|
fn map_clap_error_kind(kind: clap::error::ErrorKind) -> &'static str {
|
|
use clap::error::ErrorKind;
|
|
match kind {
|
|
ErrorKind::InvalidSubcommand => "UNKNOWN_COMMAND",
|
|
ErrorKind::UnknownArgument => "UNKNOWN_FLAG",
|
|
ErrorKind::MissingRequiredArgument => "MISSING_REQUIRED",
|
|
ErrorKind::InvalidValue => "INVALID_VALUE",
|
|
ErrorKind::ValueValidation => "INVALID_VALUE",
|
|
ErrorKind::TooManyValues => "TOO_MANY_VALUES",
|
|
ErrorKind::TooFewValues => "TOO_FEW_VALUES",
|
|
ErrorKind::ArgumentConflict => "ARGUMENT_CONFLICT",
|
|
ErrorKind::MissingSubcommand => "MISSING_COMMAND",
|
|
ErrorKind::DisplayHelp | ErrorKind::DisplayVersion => "HELP_REQUESTED",
|
|
_ => "PARSE_ERROR",
|
|
}
|
|
}
|
|
|
|
/// Extract the invalid subcommand from a clap error (Phase 4)
|
|
fn extract_invalid_subcommand(e: &clap::Error) -> Option<String> {
|
|
// Parse the error message to find the invalid subcommand
|
|
// Format is typically: "error: unrecognized subcommand 'foo'"
|
|
let msg = e.to_string();
|
|
if let Some(start) = msg.find('\'')
|
|
&& let Some(end) = msg[start + 1..].find('\'')
|
|
{
|
|
return Some(msg[start + 1..start + 1 + end].to_string());
|
|
}
|
|
None
|
|
}
|
|
|
|
/// Extract the invalid flag from a clap UnknownArgument error.
|
|
/// Format is typically: "error: unexpected argument '--xyzzy' found"
|
|
fn extract_invalid_flag(e: &clap::Error) -> Option<String> {
|
|
let msg = e.to_string();
|
|
if let Some(start) = msg.find('\'')
|
|
&& let Some(end) = msg[start + 1..].find('\'')
|
|
{
|
|
let value = &msg[start + 1..start + 1 + end];
|
|
if value.starts_with('-') {
|
|
return Some(value.to_string());
|
|
}
|
|
}
|
|
None
|
|
}
|
|
|
|
/// Extract flag name and valid values from a clap InvalidValue error.
|
|
/// Returns (flag_name, valid_values_if_listed_in_error).
|
|
fn extract_invalid_value_context(e: &clap::Error) -> (Option<String>, Option<Vec<String>>) {
|
|
let msg = e.to_string();
|
|
|
|
// Try to find the flag name from "[possible values: ...]" pattern or from the arg info
|
|
// Clap format: "error: invalid value 'opend' for '--state <STATE>'"
|
|
let flag = if let Some(for_pos) = msg.find("for '") {
|
|
let after_for = &msg[for_pos + 5..];
|
|
if let Some(end) = after_for.find('\'') {
|
|
let raw = &after_for[..end];
|
|
// Strip angle-bracket value placeholder: "--state <STATE>" -> "--state"
|
|
Some(raw.split_whitespace().next().unwrap_or(raw).to_string())
|
|
} else {
|
|
None
|
|
}
|
|
} else {
|
|
None
|
|
};
|
|
|
|
// Try to extract possible values from the error message
|
|
// Clap format: "[possible values: opened, closed, merged, locked, all]"
|
|
let valid_values = if let Some(pv_pos) = msg.find("[possible values: ") {
|
|
let after_pv = &msg[pv_pos + 18..];
|
|
after_pv.find(']').map(|end| {
|
|
after_pv[..end]
|
|
.split(", ")
|
|
.map(|s| s.trim().to_string())
|
|
.collect()
|
|
})
|
|
} else {
|
|
// Fall back to our static registry
|
|
flag.as_deref()
|
|
.and_then(autocorrect::valid_values_for_flag)
|
|
.map(|v| v.iter().map(|s| (*s).to_string()).collect())
|
|
};
|
|
|
|
(flag, valid_values)
|
|
}
|
|
|
|
/// Phase 4: Suggest similar command using fuzzy matching
|
|
fn suggest_similar_command(invalid: &str) -> String {
|
|
// Primary commands + common aliases for fuzzy matching
|
|
const VALID_COMMANDS: &[(&str, &str)] = &[
|
|
("issues", "issues"),
|
|
("issue", "issues"),
|
|
("mrs", "mrs"),
|
|
("mr", "mrs"),
|
|
("merge-requests", "mrs"),
|
|
("search", "search"),
|
|
("find", "search"),
|
|
("query", "search"),
|
|
("sync", "sync"),
|
|
("ingest", "ingest"),
|
|
("count", "count"),
|
|
("status", "status"),
|
|
("auth", "auth"),
|
|
("doctor", "doctor"),
|
|
("version", "version"),
|
|
("init", "init"),
|
|
("stats", "stats"),
|
|
("stat", "stats"),
|
|
("generate-docs", "generate-docs"),
|
|
("embed", "embed"),
|
|
("migrate", "migrate"),
|
|
("health", "health"),
|
|
("robot-docs", "robot-docs"),
|
|
("completions", "completions"),
|
|
("timeline", "timeline"),
|
|
("who", "who"),
|
|
("notes", "notes"),
|
|
("note", "notes"),
|
|
("drift", "drift"),
|
|
("file-history", "file-history"),
|
|
("trace", "trace"),
|
|
];
|
|
|
|
let invalid_lower = invalid.to_lowercase();
|
|
|
|
// Find the best match using Jaro-Winkler similarity
|
|
let best_match = VALID_COMMANDS
|
|
.iter()
|
|
.map(|(alias, canonical)| (*canonical, jaro_winkler(&invalid_lower, alias)))
|
|
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
|
|
|
|
if let Some((cmd, score)) = best_match
|
|
&& score > 0.7
|
|
{
|
|
let example = command_example(cmd);
|
|
return format!(
|
|
"Did you mean 'lore {cmd}'? Example: {example}. Run 'lore robot-docs' for all commands"
|
|
);
|
|
}
|
|
|
|
"Run 'lore robot-docs' for valid commands. Common: issues, mrs, search, sync, timeline, who"
|
|
.to_string()
|
|
}
|
|
|
|
/// Return a contextual usage example for a command.
|
|
fn command_example(cmd: &str) -> &'static str {
|
|
match cmd {
|
|
"issues" => "lore --robot issues -n 10",
|
|
"mrs" => "lore --robot mrs -n 10",
|
|
"search" => "lore --robot search \"auth bug\"",
|
|
"sync" => "lore --robot sync",
|
|
"ingest" => "lore --robot ingest issues",
|
|
"notes" => "lore --robot notes --for-issue 123",
|
|
"count" => "lore --robot count issues",
|
|
"status" => "lore --robot status",
|
|
"stats" => "lore --robot stats",
|
|
"timeline" => "lore --robot timeline \"auth flow\"",
|
|
"who" => "lore --robot who --path src/",
|
|
"health" => "lore --robot health",
|
|
"generate-docs" => "lore --robot generate-docs",
|
|
"embed" => "lore --robot embed",
|
|
"robot-docs" => "lore robot-docs",
|
|
"trace" => "lore --robot trace src/main.rs",
|
|
"init" => "lore init",
|
|
_ => "lore --robot <command>",
|
|
}
|
|
}
|
|
|
|
fn handle_issues(
|
|
config_override: Option<&str>,
|
|
args: IssuesArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let project = config.effective_project(args.project.as_deref());
|
|
let asc = args.asc && !args.no_asc;
|
|
let has_due = args.has_due && !args.no_has_due;
|
|
let open = args.open && !args.no_open;
|
|
let order = if asc { "asc" } else { "desc" };
|
|
|
|
if let Some(iid) = args.iid {
|
|
let result = run_show_issue(&config, iid, project)?;
|
|
if robot_mode {
|
|
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_show_issue(&result);
|
|
}
|
|
} else {
|
|
let state_normalized = args.state.as_deref().map(str::to_lowercase);
|
|
let filters = ListFilters {
|
|
limit: args.limit,
|
|
project,
|
|
state: state_normalized.as_deref(),
|
|
author: args.author.as_deref(),
|
|
assignee: args.assignee.as_deref(),
|
|
labels: args.label.as_deref(),
|
|
milestone: args.milestone.as_deref(),
|
|
since: args.since.as_deref(),
|
|
due_before: args.due_before.as_deref(),
|
|
has_due_date: has_due,
|
|
statuses: &args.status,
|
|
sort: &args.sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_issues(&config, filters)?;
|
|
|
|
if open {
|
|
open_issue_in_browser(&result);
|
|
} else if robot_mode {
|
|
print_list_issues_json(
|
|
&result,
|
|
start.elapsed().as_millis() as u64,
|
|
args.fields.as_deref(),
|
|
);
|
|
} else {
|
|
print_list_issues(&result);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_mrs(
|
|
config_override: Option<&str>,
|
|
args: MrsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let project = config.effective_project(args.project.as_deref());
|
|
let asc = args.asc && !args.no_asc;
|
|
let open = args.open && !args.no_open;
|
|
let order = if asc { "asc" } else { "desc" };
|
|
|
|
if let Some(iid) = args.iid {
|
|
let result = run_show_mr(&config, iid, project)?;
|
|
if robot_mode {
|
|
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_show_mr(&result);
|
|
}
|
|
} else {
|
|
let state_normalized = args.state.as_deref().map(str::to_lowercase);
|
|
let filters = MrListFilters {
|
|
limit: args.limit,
|
|
project,
|
|
state: state_normalized.as_deref(),
|
|
author: args.author.as_deref(),
|
|
assignee: args.assignee.as_deref(),
|
|
reviewer: args.reviewer.as_deref(),
|
|
labels: args.label.as_deref(),
|
|
since: args.since.as_deref(),
|
|
draft: args.draft,
|
|
no_draft: args.no_draft,
|
|
target_branch: args.target.as_deref(),
|
|
source_branch: args.source.as_deref(),
|
|
sort: &args.sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_mrs(&config, filters)?;
|
|
|
|
if open {
|
|
open_mr_in_browser(&result);
|
|
} else if robot_mode {
|
|
print_list_mrs_json(
|
|
&result,
|
|
start.elapsed().as_millis() as u64,
|
|
args.fields.as_deref(),
|
|
);
|
|
} else {
|
|
print_list_mrs(&result);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_notes(
|
|
config_override: Option<&str>,
|
|
args: NotesArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let conn = create_connection(&db_path)?;
|
|
|
|
let order = if args.asc { "asc" } else { "desc" };
|
|
let filters = NoteListFilters {
|
|
limit: args.limit,
|
|
project: args.project,
|
|
author: args.author,
|
|
note_type: args.note_type,
|
|
include_system: args.include_system,
|
|
for_issue_iid: args.for_issue,
|
|
for_mr_iid: args.for_mr,
|
|
note_id: args.note_id,
|
|
gitlab_note_id: args.gitlab_note_id,
|
|
discussion_id: args.discussion_id,
|
|
since: args.since,
|
|
until: args.until,
|
|
path: args.path,
|
|
contains: args.contains,
|
|
resolution: args.resolution,
|
|
sort: args.sort,
|
|
order: order.to_string(),
|
|
};
|
|
|
|
let result = query_notes(&conn, &filters, &config)?;
|
|
|
|
if robot_mode {
|
|
print_list_notes_json(
|
|
&result,
|
|
start.elapsed().as_millis() as u64,
|
|
args.fields.as_deref(),
|
|
);
|
|
} else {
|
|
print_list_notes(&result);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_ingest(
|
|
config_override: Option<&str>,
|
|
args: IngestArgs,
|
|
robot_mode: bool,
|
|
quiet: bool,
|
|
metrics: &MetricsLayer,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let dry_run = args.dry_run && !args.no_dry_run;
|
|
let config = Config::load(config_override)?;
|
|
let project = config.effective_project(args.project.as_deref());
|
|
|
|
let force = args.force && !args.no_force;
|
|
let full = args.full && !args.no_full;
|
|
|
|
// Handle dry run mode - show preview without making any changes
|
|
if dry_run {
|
|
match args.entity.as_deref() {
|
|
Some(resource_type) => {
|
|
let preview = run_ingest_dry_run(&config, resource_type, project, full)?;
|
|
if robot_mode {
|
|
print_dry_run_preview_json(&preview);
|
|
} else {
|
|
print_dry_run_preview(&preview);
|
|
}
|
|
}
|
|
None => {
|
|
let issues_preview = run_ingest_dry_run(&config, "issues", project, full)?;
|
|
let mrs_preview = run_ingest_dry_run(&config, "mrs", project, full)?;
|
|
if robot_mode {
|
|
print_combined_dry_run_json(&issues_preview, &mrs_preview);
|
|
} else {
|
|
print_dry_run_preview(&issues_preview);
|
|
println!();
|
|
print_dry_run_preview(&mrs_preview);
|
|
}
|
|
}
|
|
}
|
|
return Ok(());
|
|
}
|
|
|
|
let display = if robot_mode || quiet {
|
|
IngestDisplay::silent()
|
|
} else {
|
|
IngestDisplay::interactive()
|
|
};
|
|
|
|
let entity_label = args.entity.as_deref().unwrap_or("all");
|
|
let command = format!("ingest:{entity_label}");
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let recorder_conn = create_connection(&db_path)?;
|
|
let run_id = uuid::Uuid::new_v4().simple().to_string();
|
|
let run_id_short = &run_id[..8];
|
|
let recorder = SyncRunRecorder::start(&recorder_conn, &command, run_id_short)?;
|
|
|
|
let signal = ShutdownSignal::new();
|
|
let signal_for_handler = signal.clone();
|
|
tokio::spawn(async move {
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
|
|
signal_for_handler.cancel();
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
std::process::exit(130);
|
|
});
|
|
|
|
let ingest_result: std::result::Result<(), Box<dyn std::error::Error>> = async {
|
|
match args.entity.as_deref() {
|
|
Some(resource_type) => {
|
|
let result = run_ingest(
|
|
&config,
|
|
resource_type,
|
|
project,
|
|
force,
|
|
full,
|
|
false,
|
|
display,
|
|
None,
|
|
&signal,
|
|
)
|
|
.await?;
|
|
|
|
if robot_mode {
|
|
print_ingest_summary_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_ingest_summary(&result);
|
|
}
|
|
}
|
|
None => {
|
|
if !robot_mode && !quiet {
|
|
println!(
|
|
"{}",
|
|
Theme::info().render("Ingesting all content (issues + merge requests)...")
|
|
);
|
|
println!();
|
|
}
|
|
|
|
let issues_result = run_ingest(
|
|
&config, "issues", project, force, full, false, display, None, &signal,
|
|
)
|
|
.await?;
|
|
|
|
let mrs_result = run_ingest(
|
|
&config, "mrs", project, force, full, false, display, None, &signal,
|
|
)
|
|
.await?;
|
|
|
|
if robot_mode {
|
|
print_combined_ingest_json(
|
|
&issues_result,
|
|
&mrs_result,
|
|
start.elapsed().as_millis() as u64,
|
|
);
|
|
} else {
|
|
print_ingest_summary(&issues_result);
|
|
print_ingest_summary(&mrs_result);
|
|
}
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
.await;
|
|
|
|
match ingest_result {
|
|
Ok(()) if signal.is_cancelled() => {
|
|
let stages = metrics.extract_timings();
|
|
let _ = release_all_locked_jobs(&recorder_conn);
|
|
let _ = recorder.fail(
|
|
&recorder_conn,
|
|
"Interrupted by user (Ctrl+C)",
|
|
Some(&stages),
|
|
);
|
|
if !robot_mode {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning().render("Interrupted by Ctrl+C. Partial data has been saved.")
|
|
);
|
|
}
|
|
Ok(())
|
|
}
|
|
Ok(()) => {
|
|
let stages = metrics.extract_timings();
|
|
let total_items: usize = stages.iter().map(|s| s.items_processed).sum();
|
|
let total_errors: usize = stages.iter().map(|s| s.errors).sum();
|
|
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
|
|
if !robot_mode && !quiet {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::dim().render("Hint: Run 'lore generate-docs' to update searchable documents, then 'lore embed' for vectors.")
|
|
);
|
|
}
|
|
Ok(())
|
|
}
|
|
Err(e) => {
|
|
let stages = metrics.extract_timings();
|
|
let _ = release_all_locked_jobs(&recorder_conn);
|
|
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
|
|
Err(e)
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestOutput {
|
|
ok: bool,
|
|
data: CombinedIngestData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestData {
|
|
resource_type: String,
|
|
issues: CombinedIngestEntityStats,
|
|
merge_requests: CombinedIngestEntityStats,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedIngestEntityStats {
|
|
projects_synced: usize,
|
|
fetched: usize,
|
|
upserted: usize,
|
|
labels_created: usize,
|
|
discussions_fetched: usize,
|
|
notes_upserted: usize,
|
|
}
|
|
|
|
fn print_combined_ingest_json(
|
|
issues: &lore::cli::commands::ingest::IngestResult,
|
|
mrs: &lore::cli::commands::ingest::IngestResult,
|
|
elapsed_ms: u64,
|
|
) {
|
|
let output = CombinedIngestOutput {
|
|
ok: true,
|
|
data: CombinedIngestData {
|
|
resource_type: "all".to_string(),
|
|
issues: CombinedIngestEntityStats {
|
|
projects_synced: issues.projects_synced,
|
|
fetched: issues.issues_fetched,
|
|
upserted: issues.issues_upserted,
|
|
labels_created: issues.labels_created,
|
|
discussions_fetched: issues.discussions_fetched,
|
|
notes_upserted: issues.notes_upserted,
|
|
},
|
|
merge_requests: CombinedIngestEntityStats {
|
|
projects_synced: mrs.projects_synced,
|
|
fetched: mrs.mrs_fetched,
|
|
upserted: mrs.mrs_upserted,
|
|
labels_created: mrs.labels_created,
|
|
discussions_fetched: mrs.discussions_fetched,
|
|
notes_upserted: mrs.notes_upserted,
|
|
},
|
|
},
|
|
meta: RobotMeta { elapsed_ms },
|
|
};
|
|
|
|
println!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|e| {
|
|
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
|
|
})
|
|
);
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedDryRunOutput {
|
|
ok: bool,
|
|
dry_run: bool,
|
|
data: CombinedDryRunData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CombinedDryRunData {
|
|
issues: lore::cli::commands::DryRunPreview,
|
|
merge_requests: lore::cli::commands::DryRunPreview,
|
|
}
|
|
|
|
fn print_combined_dry_run_json(
|
|
issues: &lore::cli::commands::DryRunPreview,
|
|
mrs: &lore::cli::commands::DryRunPreview,
|
|
) {
|
|
let output = CombinedDryRunOutput {
|
|
ok: true,
|
|
dry_run: true,
|
|
data: CombinedDryRunData {
|
|
issues: issues.clone(),
|
|
merge_requests: mrs.clone(),
|
|
},
|
|
};
|
|
|
|
println!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|e| {
|
|
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
|
|
})
|
|
);
|
|
}
|
|
|
|
async fn handle_count(
|
|
config_override: Option<&str>,
|
|
args: CountArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
|
|
if args.entity == "events" {
|
|
let counts = run_count_events(&config)?;
|
|
if robot_mode {
|
|
print_event_count_json(&counts, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_event_count(&counts);
|
|
}
|
|
return Ok(());
|
|
}
|
|
|
|
let result = run_count(&config, &args.entity, args.for_entity.as_deref())?;
|
|
if robot_mode {
|
|
print_count_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_count(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_sync_status_cmd(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
|
|
let result = run_sync_status(&config)?;
|
|
if robot_mode {
|
|
print_sync_status_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_sync_status(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutput {
|
|
ok: bool,
|
|
data: InitOutputData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputData {
|
|
config_path: String,
|
|
data_dir: String,
|
|
user: InitOutputUser,
|
|
projects: Vec<InitOutputProject>,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
default_project: Option<String>,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputUser {
|
|
username: String,
|
|
name: String,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct InitOutputProject {
|
|
path: String,
|
|
name: String,
|
|
}
|
|
|
|
fn print_init_json(result: &InitResult) {
|
|
let output = InitOutput {
|
|
ok: true,
|
|
data: InitOutputData {
|
|
config_path: result.config_path.clone(),
|
|
data_dir: result.data_dir.clone(),
|
|
user: InitOutputUser {
|
|
username: result.user.username.clone(),
|
|
name: result.user.name.clone(),
|
|
},
|
|
projects: result
|
|
.projects
|
|
.iter()
|
|
.map(|p| InitOutputProject {
|
|
path: p.path.clone(),
|
|
name: p.name.clone(),
|
|
})
|
|
.collect(),
|
|
default_project: result.default_project.clone(),
|
|
},
|
|
};
|
|
println!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|e| {
|
|
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
|
|
})
|
|
);
|
|
}
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
async fn handle_init(
|
|
config_override: Option<&str>,
|
|
force: bool,
|
|
non_interactive: bool,
|
|
robot_mode: bool,
|
|
gitlab_url_flag: Option<String>,
|
|
token_env_var_flag: Option<String>,
|
|
projects_flag: Option<String>,
|
|
default_project_flag: Option<String>,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
if robot_mode {
|
|
let missing: Vec<&str> = [
|
|
gitlab_url_flag.is_none().then_some("--gitlab-url"),
|
|
token_env_var_flag.is_none().then_some("--token-env-var"),
|
|
projects_flag.is_none().then_some("--projects"),
|
|
]
|
|
.into_iter()
|
|
.flatten()
|
|
.collect();
|
|
|
|
if !missing.is_empty() {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "MISSING_FLAGS".to_string(),
|
|
message: format!("Robot mode requires flags: {}", missing.join(", ")),
|
|
suggestion: "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project".to_string(),
|
|
correction: None,
|
|
valid_values: None,
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
std::process::exit(2);
|
|
}
|
|
|
|
let project_paths: Vec<String> = projects_flag
|
|
.unwrap()
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect();
|
|
|
|
let result = run_init(
|
|
InitInputs {
|
|
gitlab_url: gitlab_url_flag.unwrap(),
|
|
token_env_var: token_env_var_flag.unwrap(),
|
|
project_paths,
|
|
default_project: default_project_flag.clone(),
|
|
},
|
|
InitOptions {
|
|
config_path: config_override.map(String::from),
|
|
force: true,
|
|
non_interactive: true,
|
|
},
|
|
)
|
|
.await?;
|
|
|
|
print_init_json(&result);
|
|
return Ok(());
|
|
}
|
|
|
|
let config_path = get_config_path(config_override);
|
|
let mut confirmed_overwrite = force;
|
|
|
|
if config_path.exists() && !force {
|
|
if non_interactive {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::error().render(&format!(
|
|
"Config file exists at {}. Use --force to overwrite.",
|
|
config_path.display()
|
|
))
|
|
);
|
|
std::process::exit(2);
|
|
}
|
|
|
|
let confirm = Confirm::new()
|
|
.with_prompt(format!(
|
|
"Config file exists at {}. Overwrite?",
|
|
config_path.display()
|
|
))
|
|
.default(false)
|
|
.interact()?;
|
|
|
|
if !confirm {
|
|
println!("{}", Theme::warning().render("Cancelled."));
|
|
std::process::exit(2);
|
|
}
|
|
confirmed_overwrite = true;
|
|
}
|
|
|
|
let gitlab_url: String = if let Some(url) = gitlab_url_flag {
|
|
url
|
|
} else {
|
|
Input::new()
|
|
.with_prompt("GitLab URL")
|
|
.default("https://gitlab.com".to_string())
|
|
.validate_with(|input: &String| -> Result<(), &str> {
|
|
if url::Url::parse(input).is_ok() {
|
|
Ok(())
|
|
} else {
|
|
Err("Please enter a valid URL")
|
|
}
|
|
})
|
|
.interact_text()?
|
|
};
|
|
|
|
let token_env_var: String = if let Some(var) = token_env_var_flag {
|
|
var
|
|
} else {
|
|
Input::new()
|
|
.with_prompt("Token environment variable name")
|
|
.default("GITLAB_TOKEN".to_string())
|
|
.interact_text()?
|
|
};
|
|
|
|
let project_paths: Vec<String> = if let Some(projects) = projects_flag {
|
|
projects
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect()
|
|
} else {
|
|
let project_paths_input: String = Input::new()
|
|
.with_prompt("Project paths (comma-separated, e.g., group/project)")
|
|
.validate_with(|input: &String| -> Result<(), &str> {
|
|
if input.trim().is_empty() {
|
|
Err("Please enter at least one project path")
|
|
} else {
|
|
Ok(())
|
|
}
|
|
})
|
|
.interact_text()?;
|
|
|
|
project_paths_input
|
|
.split(',')
|
|
.map(|p| p.trim().to_string())
|
|
.filter(|p| !p.is_empty())
|
|
.collect()
|
|
};
|
|
|
|
// Resolve default project: CLI flag, interactive prompt, or None
|
|
let default_project = if default_project_flag.is_some() {
|
|
default_project_flag
|
|
} else if project_paths.len() > 1 && !non_interactive {
|
|
let set_default = Confirm::new()
|
|
.with_prompt("Set a default project? (used when -p is omitted)")
|
|
.default(true)
|
|
.interact()?;
|
|
|
|
if set_default {
|
|
let selection = dialoguer::Select::new()
|
|
.with_prompt("Default project")
|
|
.items(&project_paths)
|
|
.default(0)
|
|
.interact()?;
|
|
Some(project_paths[selection].clone())
|
|
} else {
|
|
None
|
|
}
|
|
} else {
|
|
None
|
|
};
|
|
|
|
println!("{}", Theme::info().render("Validating configuration..."));
|
|
|
|
let result = run_init(
|
|
InitInputs {
|
|
gitlab_url,
|
|
token_env_var,
|
|
project_paths,
|
|
default_project,
|
|
},
|
|
InitOptions {
|
|
config_path: config_override.map(String::from),
|
|
force: confirmed_overwrite,
|
|
non_interactive,
|
|
},
|
|
)
|
|
.await?;
|
|
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!(
|
|
"\n\u{2713} Authenticated as @{} ({})",
|
|
result.user.username, result.user.name
|
|
))
|
|
);
|
|
|
|
for project in &result.projects {
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!("\u{2713} {} ({})", project.path, project.name))
|
|
);
|
|
}
|
|
|
|
if let Some(ref dp) = result.default_project {
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!("\u{2713} Default project: {dp}"))
|
|
);
|
|
}
|
|
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!(
|
|
"\n\u{2713} Config written to {}",
|
|
result.config_path
|
|
))
|
|
);
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!(
|
|
"\u{2713} Database initialized at {}",
|
|
result.data_dir
|
|
))
|
|
);
|
|
println!(
|
|
"{}",
|
|
Theme::info().render("\nSetup complete! Run 'lore doctor' to verify.")
|
|
);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct AuthTestOutput {
|
|
ok: bool,
|
|
data: AuthTestData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct AuthTestData {
|
|
authenticated: bool,
|
|
username: String,
|
|
name: String,
|
|
gitlab_url: String,
|
|
}
|
|
|
|
async fn handle_auth_test(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
match run_auth_test(config_override).await {
|
|
Ok(result) => {
|
|
if robot_mode {
|
|
let output = AuthTestOutput {
|
|
ok: true,
|
|
data: AuthTestData {
|
|
authenticated: true,
|
|
username: result.username.clone(),
|
|
name: result.name.clone(),
|
|
gitlab_url: result.base_url.clone(),
|
|
},
|
|
meta: RobotMeta {
|
|
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!("Authenticated as @{} ({})", result.username, result.name);
|
|
println!("GitLab: {}", result.base_url);
|
|
}
|
|
Ok(())
|
|
}
|
|
Err(e) => {
|
|
if robot_mode {
|
|
let output = RobotErrorOutput::from(&e);
|
|
eprintln!(
|
|
"{}",
|
|
serde_json::to_string(&output).unwrap_or_else(|_| {
|
|
let msg = e.to_string().replace('\\', "\\\\").replace('"', "\\\"");
|
|
format!(
|
|
r#"{{"error":{{"code":"{}","message":"{}"}}}}"#,
|
|
e.code(),
|
|
msg
|
|
)
|
|
})
|
|
);
|
|
} else {
|
|
eprintln!("{} {}", Theme::error().render("Error:"), e);
|
|
if let Some(suggestion) = e.suggestion() {
|
|
eprintln!("{} {}", Theme::warning().render("Hint:"), suggestion);
|
|
}
|
|
}
|
|
std::process::exit(e.exit_code());
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct DoctorOutput {
|
|
ok: bool,
|
|
data: DoctorData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct DoctorData {
|
|
success: bool,
|
|
checks: lore::cli::commands::DoctorChecks,
|
|
}
|
|
|
|
async fn handle_doctor(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let result = run_doctor(config_override).await;
|
|
|
|
if robot_mode {
|
|
let output = DoctorOutput {
|
|
ok: true,
|
|
data: DoctorData {
|
|
success: result.success,
|
|
checks: result.checks,
|
|
},
|
|
meta: RobotMeta {
|
|
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
print_doctor_results(&result);
|
|
}
|
|
|
|
if !result.success {
|
|
std::process::exit(1);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct VersionOutput {
|
|
ok: bool,
|
|
data: VersionData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct VersionData {
|
|
name: &'static str,
|
|
version: String,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
git_hash: Option<String>,
|
|
}
|
|
|
|
fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let version = env!("CARGO_PKG_VERSION").to_string();
|
|
let git_hash = env!("GIT_HASH").to_string();
|
|
if robot_mode {
|
|
let output = VersionOutput {
|
|
ok: true,
|
|
data: VersionData {
|
|
name: "lore",
|
|
version,
|
|
git_hash: if git_hash.is_empty() {
|
|
None
|
|
} else {
|
|
Some(git_hash)
|
|
},
|
|
},
|
|
meta: RobotMeta {
|
|
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else if git_hash.is_empty() {
|
|
println!("lore version {}", version);
|
|
} else {
|
|
println!("lore version {} ({})", version, git_hash);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_completions(shell: &str) -> Result<(), Box<dyn std::error::Error>> {
|
|
use clap::CommandFactory;
|
|
use clap_complete::{Shell, generate};
|
|
|
|
let shell = match shell {
|
|
"bash" => Shell::Bash,
|
|
"zsh" => Shell::Zsh,
|
|
"fish" => Shell::Fish,
|
|
"powershell" => Shell::PowerShell,
|
|
other => {
|
|
return Err(format!("Unsupported shell: {other}").into());
|
|
}
|
|
};
|
|
|
|
let mut cmd = Cli::command();
|
|
generate(shell, &mut cmd, "lore", &mut std::io::stdout());
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_backup(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "NOT_IMPLEMENTED".to_string(),
|
|
message: "The 'backup' command is not yet implemented.".to_string(),
|
|
suggestion: "Use manual database backup: cp ~/.local/share/lore/lore.db ~/.local/share/lore/lore.db.bak".to_string(),
|
|
correction: None,
|
|
valid_values: None,
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{} The 'backup' command is not yet implemented.",
|
|
Theme::error().render("Error:")
|
|
);
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
fn handle_reset(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "NOT_IMPLEMENTED".to_string(),
|
|
message: "The 'reset' command is not yet implemented.".to_string(),
|
|
suggestion: "Manually delete the database: rm ~/.local/share/lore/lore.db"
|
|
.to_string(),
|
|
correction: None,
|
|
valid_values: None,
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{} The 'reset' command is not yet implemented.",
|
|
Theme::error().render("Error:")
|
|
);
|
|
}
|
|
std::process::exit(1);
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct MigrateOutput {
|
|
ok: bool,
|
|
data: MigrateData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct MigrateData {
|
|
before_version: i32,
|
|
after_version: i32,
|
|
migrated: bool,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotErrorSuggestionData {
|
|
code: String,
|
|
message: String,
|
|
suggestion: String,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
correction: Option<String>,
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
|
valid_values: Option<Vec<String>>,
|
|
}
|
|
|
|
async fn handle_migrate(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
|
|
if !db_path.exists() {
|
|
if robot_mode {
|
|
let output = RobotErrorWithSuggestion {
|
|
error: RobotErrorSuggestionData {
|
|
code: "DB_ERROR".to_string(),
|
|
message: format!("Database not found at {}", db_path.display()),
|
|
suggestion: "Run 'lore init' first".to_string(),
|
|
correction: None,
|
|
valid_values: None,
|
|
},
|
|
};
|
|
eprintln!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::error().render(&format!("Database not found at {}", db_path.display()))
|
|
);
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning().render("Run 'lore init' first to create the database.")
|
|
);
|
|
}
|
|
std::process::exit(10);
|
|
}
|
|
|
|
let conn = create_connection(&db_path)?;
|
|
let before_version = get_schema_version(&conn);
|
|
|
|
if !robot_mode {
|
|
println!(
|
|
"{}",
|
|
Theme::info().render(&format!("Current schema version: {}", before_version))
|
|
);
|
|
}
|
|
|
|
run_migrations(&conn)?;
|
|
|
|
let after_version = get_schema_version(&conn);
|
|
|
|
if robot_mode {
|
|
let output = MigrateOutput {
|
|
ok: true,
|
|
data: MigrateData {
|
|
before_version,
|
|
after_version,
|
|
migrated: after_version > before_version,
|
|
},
|
|
meta: RobotMeta {
|
|
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else if after_version > before_version {
|
|
println!(
|
|
"{}",
|
|
Theme::success().render(&format!(
|
|
"Migrations applied: {} -> {}",
|
|
before_version, after_version
|
|
))
|
|
);
|
|
} else {
|
|
println!(
|
|
"{}",
|
|
Theme::success().render("Database is already up to date.")
|
|
);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_stats(
|
|
config_override: Option<&str>,
|
|
args: StatsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let dry_run = args.dry_run && !args.no_dry_run;
|
|
let config = Config::load(config_override)?;
|
|
let check = (args.check && !args.no_check) || args.repair;
|
|
let result = run_stats(&config, check, args.repair, dry_run)?;
|
|
if robot_mode {
|
|
print_stats_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_stats(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_file_history(
|
|
config_override: Option<&str>,
|
|
args: FileHistoryArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
|
|
let project = config
|
|
.effective_project(args.project.as_deref())
|
|
.map(String::from);
|
|
|
|
let normalized = normalize_repo_path(&args.path);
|
|
|
|
// Resolve bare filenames before querying (same path resolution as trace/who)
|
|
let db_path_tmp = get_db_path(config.storage.db_path.as_deref());
|
|
let conn_tmp = create_connection(&db_path_tmp)?;
|
|
let project_id_tmp = project
|
|
.as_deref()
|
|
.map(|p| resolve_project(&conn_tmp, p))
|
|
.transpose()?;
|
|
let pq = build_path_query(&conn_tmp, &normalized, project_id_tmp)?;
|
|
let resolved_path = if pq.is_prefix {
|
|
// Directory prefix — file-history is file-oriented, pass the raw path.
|
|
// Don't use pq.value which contains LIKE-escaped metacharacters.
|
|
normalized.trim_end_matches('/').to_string()
|
|
} else {
|
|
pq.value
|
|
};
|
|
|
|
let result = run_file_history(
|
|
&config,
|
|
&resolved_path,
|
|
project.as_deref(),
|
|
args.no_follow_renames,
|
|
args.merged,
|
|
args.discussions,
|
|
args.limit,
|
|
)?;
|
|
|
|
if robot_mode {
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
print_file_history_json(&result, elapsed_ms);
|
|
} else {
|
|
print_file_history(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_trace(
|
|
config_override: Option<&str>,
|
|
args: TraceArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
|
|
let (raw_path, line_requested) = parse_trace_path(&args.path);
|
|
let normalized = normalize_repo_path(&raw_path);
|
|
|
|
if line_requested.is_some() && !robot_mode {
|
|
eprintln!(
|
|
"Note: Line-level tracing requires Tier 2 (git blame). Showing file-level results."
|
|
);
|
|
}
|
|
|
|
let project = config
|
|
.effective_project(args.project.as_deref())
|
|
.map(String::from);
|
|
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let conn = create_connection(&db_path)?;
|
|
let project_id = project
|
|
.as_deref()
|
|
.map(|p| resolve_project(&conn, p))
|
|
.transpose()?;
|
|
|
|
// Resolve bare filenames (e.g. "operators.ts" -> "src/utils/operators.ts")
|
|
let pq = build_path_query(&conn, &normalized, project_id)?;
|
|
let path = if pq.is_prefix {
|
|
// Directory prefix — trace is file-oriented, pass the raw path.
|
|
// Don't use pq.value which contains LIKE-escaped metacharacters.
|
|
normalized.trim_end_matches('/').to_string()
|
|
} else {
|
|
pq.value
|
|
};
|
|
|
|
let result = run_trace(
|
|
&conn,
|
|
project_id,
|
|
&path,
|
|
!args.no_follow_renames,
|
|
args.discussions,
|
|
args.limit,
|
|
)?;
|
|
|
|
if robot_mode {
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
print_trace_json(&result, elapsed_ms, line_requested);
|
|
} else {
|
|
print_trace(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_timeline(
|
|
config_override: Option<&str>,
|
|
args: TimelineArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
|
|
let params = TimelineParams {
|
|
query: args.query,
|
|
project: config
|
|
.effective_project(args.project.as_deref())
|
|
.map(String::from),
|
|
since: args.since,
|
|
depth: args.depth,
|
|
no_mentions: args.no_mentions,
|
|
limit: args.limit,
|
|
max_seeds: args.max_seeds,
|
|
max_entities: args.max_entities,
|
|
max_evidence: args.max_evidence,
|
|
robot_mode,
|
|
};
|
|
|
|
let result = run_timeline(&config, ¶ms).await?;
|
|
|
|
if robot_mode {
|
|
print_timeline_json_with_meta(
|
|
&result,
|
|
result.total_events_before_limit,
|
|
params.depth,
|
|
!params.no_mentions,
|
|
args.fields.as_deref(),
|
|
);
|
|
} else {
|
|
print_timeline(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_search(
|
|
config_override: Option<&str>,
|
|
args: SearchArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
let explain = args.explain && !args.no_explain;
|
|
|
|
let fts_mode = match args.fts_mode.as_str() {
|
|
"raw" => lore::search::FtsQueryMode::Raw,
|
|
_ => lore::search::FtsQueryMode::Safe,
|
|
};
|
|
|
|
let cli_filters = SearchCliFilters {
|
|
source_type: args.source_type,
|
|
author: args.author,
|
|
project: config
|
|
.effective_project(args.project.as_deref())
|
|
.map(String::from),
|
|
labels: args.label,
|
|
path: args.path,
|
|
since: args.since,
|
|
updated_since: args.updated_since,
|
|
limit: args.limit,
|
|
};
|
|
|
|
let spinner = lore::cli::progress::stage_spinner_v2(
|
|
lore::cli::render::Icons::search(),
|
|
"Search",
|
|
&format!("Searching ({})...", args.mode),
|
|
robot_mode,
|
|
);
|
|
let start = std::time::Instant::now();
|
|
let response = run_search(
|
|
&config,
|
|
&args.query,
|
|
cli_filters,
|
|
fts_mode,
|
|
&args.mode,
|
|
explain,
|
|
)
|
|
.await?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
spinner.finish_and_clear();
|
|
|
|
if robot_mode {
|
|
print_search_results_json(&response, elapsed_ms, args.fields.as_deref());
|
|
} else {
|
|
print_search_results(&response);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_generate_docs(
|
|
config_override: Option<&str>,
|
|
args: GenerateDocsArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
|
|
let project = config.effective_project(args.project.as_deref());
|
|
let result = run_generate_docs(&config, args.full, project, None)?;
|
|
let elapsed = start.elapsed();
|
|
if robot_mode {
|
|
print_generate_docs_json(&result, elapsed.as_millis() as u64);
|
|
} else {
|
|
print_generate_docs(&result);
|
|
if elapsed.as_secs() >= 1 {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::dim().render(&format!(" Done in {:.1}s", elapsed.as_secs_f64()))
|
|
);
|
|
}
|
|
if result.regenerated > 0 {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::dim().render(
|
|
"Hint: Run 'lore embed' to update vector embeddings for changed documents."
|
|
)
|
|
);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_embed(
|
|
config_override: Option<&str>,
|
|
args: EmbedArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
use std::sync::Arc;
|
|
use std::sync::atomic::{AtomicBool, Ordering};
|
|
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let full = args.full && !args.no_full;
|
|
let retry_failed = args.retry_failed && !args.no_retry_failed;
|
|
|
|
let signal = ShutdownSignal::new();
|
|
let signal_for_handler = signal.clone();
|
|
tokio::spawn(async move {
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
|
|
signal_for_handler.cancel();
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
std::process::exit(130);
|
|
});
|
|
|
|
let embed_bar = lore::cli::progress::nested_progress("Embedding", 0, robot_mode);
|
|
let bar_clone = embed_bar.clone();
|
|
let tick_started = Arc::new(AtomicBool::new(false));
|
|
let tick_clone = Arc::clone(&tick_started);
|
|
let progress_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
|
|
if total > 0 {
|
|
if !tick_clone.swap(true, Ordering::Relaxed) {
|
|
bar_clone.enable_steady_tick(std::time::Duration::from_millis(100));
|
|
}
|
|
bar_clone.set_length(total as u64);
|
|
bar_clone.set_position(processed as u64);
|
|
}
|
|
});
|
|
|
|
let result = run_embed(&config, full, retry_failed, Some(progress_cb), &signal).await?;
|
|
embed_bar.finish_and_clear();
|
|
|
|
let elapsed = start.elapsed();
|
|
if robot_mode {
|
|
print_embed_json(&result, elapsed.as_millis() as u64);
|
|
} else {
|
|
print_embed(&result);
|
|
if elapsed.as_secs() >= 1 {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::dim().render(&format!(" Done in {:.1}s", elapsed.as_secs_f64()))
|
|
);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_sync_cmd(
|
|
config_override: Option<&str>,
|
|
args: SyncArgs,
|
|
robot_mode: bool,
|
|
metrics: &MetricsLayer,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let dry_run = args.dry_run && !args.no_dry_run;
|
|
|
|
// Dedup and sort IIDs
|
|
let mut issue_iids = args.issue;
|
|
let mut mr_iids = args.mr;
|
|
issue_iids.sort_unstable();
|
|
issue_iids.dedup();
|
|
mr_iids.sort_unstable();
|
|
mr_iids.dedup();
|
|
|
|
let mut config = Config::load(config_override)?;
|
|
if args.no_events {
|
|
config.sync.fetch_resource_events = false;
|
|
}
|
|
if args.no_file_changes {
|
|
config.sync.fetch_mr_file_changes = false;
|
|
}
|
|
if args.no_status {
|
|
config.sync.fetch_work_item_status = false;
|
|
}
|
|
let options = SyncOptions {
|
|
full: args.full && !args.no_full,
|
|
force: args.force && !args.no_force,
|
|
no_embed: args.no_embed,
|
|
no_docs: args.no_docs,
|
|
no_events: args.no_events,
|
|
robot_mode,
|
|
dry_run,
|
|
issue_iids,
|
|
mr_iids,
|
|
project: args.project,
|
|
preflight_only: args.preflight_only,
|
|
};
|
|
|
|
// Validation: preflight_only requires surgical mode
|
|
if options.preflight_only && !options.is_surgical() {
|
|
return Err("--preflight-only requires --issue or --mr".into());
|
|
}
|
|
|
|
// Validation: full + surgical are incompatible
|
|
if options.full && options.is_surgical() {
|
|
return Err("--full and --issue/--mr are incompatible".into());
|
|
}
|
|
|
|
// Validation: surgical mode requires a project (via -p or config defaultProject)
|
|
if options.is_surgical()
|
|
&& config
|
|
.effective_project(options.project.as_deref())
|
|
.is_none()
|
|
{
|
|
return Err("--issue/--mr requires -p/--project (or set defaultProject in config)".into());
|
|
}
|
|
|
|
// Validation: hard cap on total surgical targets
|
|
let total_targets = options.issue_iids.len() + options.mr_iids.len();
|
|
if total_targets > SyncOptions::MAX_SURGICAL_TARGETS {
|
|
return Err(format!(
|
|
"Too many surgical targets ({total_targets}); maximum is {}",
|
|
SyncOptions::MAX_SURGICAL_TARGETS
|
|
)
|
|
.into());
|
|
}
|
|
|
|
// Surgical + dry-run → treat as preflight-only
|
|
let mut options = options;
|
|
if dry_run && options.is_surgical() {
|
|
options.preflight_only = true;
|
|
}
|
|
|
|
// Resolve effective project for surgical mode: when -p is not passed but
|
|
// defaultProject is set in config, populate options.project so the surgical
|
|
// orchestrator receives the resolved project path.
|
|
if options.is_surgical() && options.project.is_none() {
|
|
options.project = config.default_project.clone();
|
|
}
|
|
|
|
// For non-surgical dry run, skip recording and just show the preview
|
|
if dry_run && !options.is_surgical() {
|
|
let signal = ShutdownSignal::new();
|
|
run_sync(&config, options, None, &signal).await?;
|
|
return Ok(());
|
|
}
|
|
|
|
// Acquire file lock if --lock was passed (used by cron to skip overlapping runs)
|
|
let _sync_lock = if args.lock {
|
|
match lore::core::cron::acquire_sync_lock() {
|
|
Ok(Some(guard)) => Some(guard),
|
|
Ok(None) => {
|
|
// Another sync is running — silently exit (expected for cron)
|
|
tracing::debug!("--lock: another sync is running, skipping");
|
|
return Ok(());
|
|
}
|
|
Err(e) => {
|
|
tracing::warn!(error = %e, "--lock: failed to acquire file lock, skipping sync");
|
|
return Ok(());
|
|
}
|
|
}
|
|
} else {
|
|
None
|
|
};
|
|
|
|
// Surgical mode: run_sync_surgical manages its own recorder, signal, and recording.
|
|
// Skip the normal recorder setup and let the dispatch handle everything.
|
|
if options.is_surgical() {
|
|
let signal = ShutdownSignal::new();
|
|
let signal_for_handler = signal.clone();
|
|
tokio::spawn(async move {
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
|
|
signal_for_handler.cancel();
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
std::process::exit(130);
|
|
});
|
|
|
|
let start = std::time::Instant::now();
|
|
match run_sync(&config, options, None, &signal).await {
|
|
Ok(result) => {
|
|
let elapsed = start.elapsed();
|
|
if robot_mode {
|
|
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
|
|
} else {
|
|
print_sync(&result, elapsed, Some(metrics), args.timings);
|
|
}
|
|
return Ok(());
|
|
}
|
|
Err(e) => return Err(e.into()),
|
|
}
|
|
}
|
|
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
let recorder_conn = create_connection(&db_path)?;
|
|
let run_id = uuid::Uuid::new_v4().simple().to_string();
|
|
let run_id_short = &run_id[..8];
|
|
let recorder = SyncRunRecorder::start(&recorder_conn, "sync", run_id_short)?;
|
|
|
|
let signal = ShutdownSignal::new();
|
|
let signal_for_handler = signal.clone();
|
|
tokio::spawn(async move {
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
eprintln!("\nInterrupted, finishing current batch... (Ctrl+C again to force quit)");
|
|
signal_for_handler.cancel();
|
|
let _ = tokio::signal::ctrl_c().await;
|
|
std::process::exit(130);
|
|
});
|
|
|
|
let start = std::time::Instant::now();
|
|
match run_sync(&config, options, Some(run_id_short), &signal).await {
|
|
Ok(result) if signal.is_cancelled() => {
|
|
let elapsed = start.elapsed();
|
|
let stages = metrics.extract_timings();
|
|
let released = release_all_locked_jobs(&recorder_conn).unwrap_or(0);
|
|
let _ = recorder.fail(
|
|
&recorder_conn,
|
|
"Interrupted by user (Ctrl+C)",
|
|
Some(&stages),
|
|
);
|
|
|
|
if robot_mode {
|
|
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
|
|
} else {
|
|
eprintln!();
|
|
eprintln!(
|
|
"{}",
|
|
Theme::warning().render("Interrupted by Ctrl+C. Partial results:")
|
|
);
|
|
print_sync(&result, elapsed, Some(metrics), args.timings);
|
|
if released > 0 {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::dim().render(&format!("Released {released} locked jobs"))
|
|
);
|
|
}
|
|
}
|
|
Ok(())
|
|
}
|
|
Ok(result) => {
|
|
let elapsed = start.elapsed();
|
|
let stages = metrics.extract_timings();
|
|
let total_items = result.issues_updated
|
|
+ result.mrs_updated
|
|
+ result.documents_regenerated
|
|
+ result.documents_embedded;
|
|
let total_errors = result.resource_events_failed;
|
|
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
|
|
|
|
if robot_mode {
|
|
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
|
|
} else {
|
|
print_sync(&result, elapsed, Some(metrics), args.timings);
|
|
}
|
|
Ok(())
|
|
}
|
|
Err(e) => {
|
|
let stages = metrics.extract_timings();
|
|
let _ = release_all_locked_jobs(&recorder_conn);
|
|
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
|
|
Err(e.into())
|
|
}
|
|
}
|
|
}
|
|
|
|
fn handle_cron(
|
|
config_override: Option<&str>,
|
|
args: CronArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
|
|
match args.action {
|
|
CronAction::Install { interval } => {
|
|
let result = run_cron_install(interval)?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
if robot_mode {
|
|
print_cron_install_json(&result, elapsed_ms);
|
|
} else {
|
|
print_cron_install(&result);
|
|
}
|
|
// Warn if no stored token — cron runs in a minimal shell with no env vars
|
|
if let Ok(config) = Config::load(config_override)
|
|
&& config
|
|
.gitlab
|
|
.token
|
|
.as_ref()
|
|
.is_none_or(|t| t.trim().is_empty())
|
|
{
|
|
if robot_mode {
|
|
eprintln!(
|
|
"{{\"warning\":\"No stored token found. Cron sync requires a stored token. Run: lore token set\"}}"
|
|
);
|
|
} else {
|
|
eprintln!();
|
|
eprintln!(
|
|
" {} No stored token found. Cron sync requires a stored token.",
|
|
lore::cli::render::Theme::warning()
|
|
.render(lore::cli::render::Icons::warning()),
|
|
);
|
|
eprintln!(" Run: lore token set");
|
|
eprintln!();
|
|
}
|
|
}
|
|
}
|
|
CronAction::Uninstall => {
|
|
let result = run_cron_uninstall()?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
if robot_mode {
|
|
print_cron_uninstall_json(&result, elapsed_ms);
|
|
} else {
|
|
print_cron_uninstall(&result);
|
|
}
|
|
}
|
|
CronAction::Status => {
|
|
let config = Config::load(config_override)?;
|
|
let info = run_cron_status(&config)?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
if robot_mode {
|
|
print_cron_status_json(&info, elapsed_ms);
|
|
} else {
|
|
print_cron_status(&info);
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_token(
|
|
config_override: Option<&str>,
|
|
args: TokenArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
|
|
match args.action {
|
|
TokenAction::Set { token } => {
|
|
let result = run_token_set(config_override, token).await?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
if robot_mode {
|
|
let output = serde_json::json!({
|
|
"ok": true,
|
|
"data": {
|
|
"action": "set",
|
|
"username": result.username,
|
|
"config_path": result.config_path,
|
|
},
|
|
"meta": { "elapsed_ms": elapsed_ms },
|
|
});
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!(
|
|
" {} Token stored and validated (authenticated as @{})",
|
|
lore::cli::render::Theme::success().render(lore::cli::render::Icons::success()),
|
|
result.username
|
|
);
|
|
println!(
|
|
" {} {}",
|
|
lore::cli::render::Theme::dim().render("config:"),
|
|
result.config_path
|
|
);
|
|
println!();
|
|
}
|
|
}
|
|
TokenAction::Show { unmask } => {
|
|
let result = run_token_show(config_override, unmask)?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
if robot_mode {
|
|
let output = serde_json::json!({
|
|
"ok": true,
|
|
"data": {
|
|
"token": result.token,
|
|
"source": result.source,
|
|
},
|
|
"meta": { "elapsed_ms": elapsed_ms },
|
|
});
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!(
|
|
" {} {}",
|
|
lore::cli::render::Theme::dim().render("token:"),
|
|
result.token
|
|
);
|
|
println!(
|
|
" {} {}",
|
|
lore::cli::render::Theme::dim().render("source:"),
|
|
result.source
|
|
);
|
|
println!();
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct HealthOutput {
|
|
ok: bool,
|
|
data: HealthData,
|
|
meta: RobotMeta,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct HealthData {
|
|
healthy: bool,
|
|
config_found: bool,
|
|
db_found: bool,
|
|
schema_current: bool,
|
|
schema_version: i32,
|
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
|
actions: Vec<String>,
|
|
}
|
|
|
|
async fn handle_health(
|
|
config_override: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config_path = get_config_path(config_override);
|
|
let config_found = config_path.exists();
|
|
|
|
let (db_found, schema_version, schema_current) = if config_found {
|
|
match Config::load(config_override) {
|
|
Ok(config) => {
|
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
|
if db_path.exists() {
|
|
match create_connection(&db_path) {
|
|
Ok(conn) => {
|
|
let version = get_schema_version(&conn);
|
|
(true, version, version >= LATEST_SCHEMA_VERSION)
|
|
}
|
|
Err(_) => (true, 0, false),
|
|
}
|
|
} else {
|
|
(false, 0, false)
|
|
}
|
|
}
|
|
Err(_) => (false, 0, false),
|
|
}
|
|
} else {
|
|
(false, 0, false)
|
|
};
|
|
|
|
let healthy = config_found && db_found && schema_current;
|
|
|
|
let mut actions = Vec::new();
|
|
if !config_found {
|
|
actions.push("lore init".to_string());
|
|
}
|
|
if !db_found && config_found {
|
|
actions.push("lore sync".to_string());
|
|
}
|
|
if db_found && !schema_current {
|
|
actions.push("lore migrate".to_string());
|
|
}
|
|
|
|
if robot_mode {
|
|
let output = HealthOutput {
|
|
ok: true,
|
|
data: HealthData {
|
|
healthy,
|
|
config_found,
|
|
db_found,
|
|
schema_current,
|
|
schema_version,
|
|
actions,
|
|
},
|
|
meta: RobotMeta {
|
|
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
},
|
|
};
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
let status = |ok: bool| {
|
|
if ok {
|
|
Theme::success().render("pass")
|
|
} else {
|
|
Theme::error().render("FAIL")
|
|
}
|
|
};
|
|
println!(
|
|
"Config: {} ({})",
|
|
status(config_found),
|
|
config_path.display()
|
|
);
|
|
println!("DB: {}", status(db_found));
|
|
println!("Schema: {} (v{})", status(schema_current), schema_version);
|
|
println!();
|
|
if healthy {
|
|
println!("{}", Theme::success().bold().render("Healthy"));
|
|
} else {
|
|
println!(
|
|
"{}",
|
|
Theme::error()
|
|
.bold()
|
|
.render("Unhealthy - run 'lore doctor' for details")
|
|
);
|
|
}
|
|
}
|
|
|
|
if !healthy {
|
|
std::process::exit(19);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotDocsOutput {
|
|
ok: bool,
|
|
data: RobotDocsData,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotDocsData {
|
|
name: String,
|
|
version: String,
|
|
description: String,
|
|
activation: RobotDocsActivation,
|
|
quick_start: serde_json::Value,
|
|
commands: serde_json::Value,
|
|
/// Deprecated command aliases (old -> new)
|
|
aliases: serde_json::Value,
|
|
/// Pre-clap error tolerance: what the CLI auto-corrects
|
|
error_tolerance: serde_json::Value,
|
|
exit_codes: serde_json::Value,
|
|
/// Error codes emitted by clap parse failures
|
|
clap_error_codes: serde_json::Value,
|
|
error_format: String,
|
|
workflows: serde_json::Value,
|
|
config_notes: serde_json::Value,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct RobotDocsActivation {
|
|
flags: Vec<String>,
|
|
env: String,
|
|
auto: String,
|
|
}
|
|
|
|
fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|
let version = env!("CARGO_PKG_VERSION").to_string();
|
|
|
|
let commands = serde_json::json!({
|
|
"init": {
|
|
"description": "Initialize configuration and database",
|
|
"flags": ["--force", "--non-interactive", "--gitlab-url <URL>", "--token-env-var <VAR>", "--projects <paths>", "--default-project <path>"],
|
|
"robot_flags": ["--gitlab-url", "--token-env-var", "--projects", "--default-project"],
|
|
"example": "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project,other/repo --default-project group/project",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"config_path": "string", "data_dir": "string", "user": {"username": "string", "name": "string"}, "projects": "[{path:string, name:string}]", "default_project": "string?"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"health": {
|
|
"description": "Quick pre-flight check: config, database, schema version",
|
|
"flags": [],
|
|
"example": "lore --robot health",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"healthy": "bool", "config_found": "bool", "db_found": "bool", "schema_current": "bool", "schema_version": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"auth": {
|
|
"description": "Verify GitLab authentication",
|
|
"flags": [],
|
|
"example": "lore --robot auth",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"authenticated": "bool", "username": "string", "name": "string", "gitlab_url": "string"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"doctor": {
|
|
"description": "Full environment health check (config, auth, DB, Ollama)",
|
|
"flags": [],
|
|
"example": "lore --robot doctor",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"success": "bool", "checks": "{config:object, auth:object, database:object, ollama:object}"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"ingest": {
|
|
"description": "Sync data from GitLab",
|
|
"flags": ["--project <path>", "--force", "--no-force", "--full", "--no-full", "--dry-run", "--no-dry-run", "<entity: issues|mrs>"],
|
|
"example": "lore --robot ingest issues --project group/repo",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"resource_type": "string", "projects_synced": "int", "issues_fetched?": "int", "mrs_fetched?": "int", "upserted": "int", "labels_created": "int", "discussions_fetched": "int", "notes_upserted": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"sync": {
|
|
"description": "Full sync pipeline: ingest -> generate-docs -> embed. Supports surgical per-IID mode.",
|
|
"flags": ["--full", "--no-full", "--force", "--no-force", "--no-embed", "--no-docs", "--no-events", "--no-file-changes", "--no-status", "--dry-run", "--no-dry-run", "-t/--timings", "--lock", "--issue <IID>", "--mr <IID>", "-p/--project <path>", "--preflight-only"],
|
|
"example": "lore --robot sync",
|
|
"surgical_mode": {
|
|
"description": "Sync specific issues or MRs by IID. Runs a scoped pipeline: preflight -> TOCTOU check -> ingest -> dependents -> docs -> embed.",
|
|
"flags": ["--issue <IID> (repeatable)", "--mr <IID> (repeatable)", "-p/--project <path> (required)", "--preflight-only"],
|
|
"examples": [
|
|
"lore --robot sync --issue 7 -p group/project",
|
|
"lore --robot sync --issue 7 --issue 42 --mr 10 -p group/project",
|
|
"lore --robot sync --issue 7 -p group/project --preflight-only"
|
|
],
|
|
"constraints": ["--issue/--mr requires -p/--project (or defaultProject in config)", "--full and --issue/--mr are incompatible", "--preflight-only requires --issue or --mr", "Max 100 total targets"],
|
|
"entity_result_outcomes": ["synced", "skipped_stale", "not_found", "preflight_failed", "error"]
|
|
},
|
|
"response_schema": {
|
|
"normal": {
|
|
"ok": "bool",
|
|
"data": {"issues_updated": "int", "mrs_updated": "int", "documents_regenerated": "int", "documents_embedded": "int", "resource_events_synced": "int", "resource_events_failed": "int"},
|
|
"meta": {"elapsed_ms": "int", "stages?": "[{name:string, elapsed_ms:int, items_processed:int}]"}
|
|
},
|
|
"surgical": {
|
|
"ok": "bool",
|
|
"data": {"surgical_mode": "true", "surgical_iids": "{issues:[int], merge_requests:[int]}", "entity_results": "[{entity_type:string, iid:int, outcome:string, error?:string, toctou_reason?:string}]", "preflight_only?": "bool", "issues_updated": "int", "mrs_updated": "int", "documents_regenerated": "int", "documents_embedded": "int", "discussions_fetched": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
}
|
|
},
|
|
"issues": {
|
|
"description": "List or show issues",
|
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
|
"example": "lore --robot issues --state opened --limit 10",
|
|
"notes": {
|
|
"status_filter": "--status filters by work item status NAME (case-insensitive). Valid values are in meta.available_statuses of any issues list response.",
|
|
"status_name": "status_name is the board column label (e.g. 'In review', 'Blocked'). This is the canonical status identifier for filtering."
|
|
},
|
|
"response_schema": {
|
|
"list": {
|
|
"ok": "bool",
|
|
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
|
|
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
|
|
},
|
|
"show": {
|
|
"ok": "bool",
|
|
"data": "IssueDetail (full entity with description, discussions, notes, events)",
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"example_output": {"list": {"ok":true,"data":{"issues":[{"iid":3864,"title":"Switch Health Card","state":"opened","status_name":"In progress","labels":["customer:BNSF"],"assignees":["teernisse"],"discussion_count":12,"updated_at_iso":"2026-02-12T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":42}}},
|
|
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
|
},
|
|
"mrs": {
|
|
"description": "List or show merge requests",
|
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
|
"example": "lore --robot mrs --state opened",
|
|
"response_schema": {
|
|
"list": {
|
|
"ok": "bool",
|
|
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
},
|
|
"show": {
|
|
"ok": "bool",
|
|
"data": "MrDetail (full entity with description, discussions, notes, events)",
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"example_output": {"list": {"ok":true,"data":{"mrs":[{"iid":200,"title":"Add throw time chart","state":"opened","draft":false,"author_username":"teernisse","target_branch":"main","source_branch":"feat/throw-time","reviewers":["cseiber"],"discussion_count":5,"updated_at_iso":"2026-02-11T..."}],"total_count":1,"showing":1},"meta":{"elapsed_ms":38}}},
|
|
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
|
},
|
|
"search": {
|
|
"description": "Search indexed documents (lexical, hybrid, semantic)",
|
|
"flags": ["<QUERY>", "--mode", "--type", "--author", "-p/--project", "--label", "--path", "--since", "--updated-since", "-n/--limit", "--fields <list>", "--explain", "--no-explain", "--fts-mode"],
|
|
"example": "lore --robot search 'authentication bug' --mode hybrid --limit 10",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"results": "[{document_id:int, source_type:string, title:string, snippet:string, score:float, url:string?, author:string?, created_at:string?, updated_at:string?, project_path:string, labels:[string], paths:[string]}]", "total_results": "int", "query": "string", "mode": "string", "warnings": "[string]"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
},
|
|
"example_output": {"ok":true,"data":{"query":"throw time","mode":"hybrid","total_results":3,"results":[{"document_id":42,"source_type":"issue","title":"Switch Health Card","score":0.92,"snippet":"...throw time data from BNSF...","project_path":"vs/typescript-code"}],"warnings":[]},"meta":{"elapsed_ms":85}},
|
|
"fields_presets": {"minimal": ["document_id", "title", "source_type", "score"]}
|
|
},
|
|
"count": {
|
|
"description": "Count entities in local database",
|
|
"flags": ["<entity: issues|mrs|discussions|notes|events>", "-f/--for <issue|mr>"],
|
|
"example": "lore --robot count issues",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"entity": "string", "count": "int", "system_excluded?": "int", "breakdown?": {"opened": "int", "closed": "int", "merged?": "int", "locked?": "int"}},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"stats": {
|
|
"description": "Show document and index statistics",
|
|
"flags": ["--check", "--no-check", "--repair", "--dry-run", "--no-dry-run"],
|
|
"example": "lore --robot stats",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"total_documents": "int", "indexed_documents": "int", "embedded_documents": "int", "stale_documents": "int", "integrity?": "object"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"status": {
|
|
"description": "Show sync state (cursors, last sync times)",
|
|
"flags": [],
|
|
"example": "lore --robot status",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"projects": "[{path:string, issues_cursor:string?, mrs_cursor:string?, last_sync:string?}]"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"generate-docs": {
|
|
"description": "Generate searchable documents from ingested data",
|
|
"flags": ["--full", "-p/--project <path>"],
|
|
"example": "lore --robot generate-docs --full",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"generated": "int", "updated": "int", "unchanged": "int", "deleted": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"embed": {
|
|
"description": "Generate vector embeddings for documents via Ollama",
|
|
"flags": ["--full", "--no-full", "--retry-failed", "--no-retry-failed"],
|
|
"example": "lore --robot embed",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"embedded": "int", "skipped": "int", "failed": "int", "total_chunks": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"migrate": {
|
|
"description": "Run pending database migrations",
|
|
"flags": [],
|
|
"example": "lore --robot migrate",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"before_version": "int", "after_version": "int", "migrated": "bool"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"version": {
|
|
"description": "Show version information",
|
|
"flags": [],
|
|
"example": "lore --robot version",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"version": "string", "git_hash?": "string"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"completions": {
|
|
"description": "Generate shell completions",
|
|
"flags": ["<shell: bash|zsh|fish|powershell>"],
|
|
"example": "lore completions bash > ~/.local/share/bash-completion/completions/lore"
|
|
},
|
|
"timeline": {
|
|
"description": "Chronological timeline of events matching a keyword query or entity reference",
|
|
"flags": ["<QUERY>", "-p/--project", "--since <duration>", "--depth <n>", "--no-mentions", "-n/--limit", "--fields <list>", "--max-seeds", "--max-entities", "--max-evidence"],
|
|
"query_syntax": {
|
|
"search": "Any text -> hybrid search seeding (FTS5 + vector)",
|
|
"entity_direct": "issue:N, i:N, mr:N, m:N -> direct entity seeding (no search, no Ollama)"
|
|
},
|
|
"example": "lore --robot timeline issue:42",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"entities": "[{type:string, iid:int, title:string, project_path:string}]", "events": "[{timestamp:string, type:string, entity_type:string, entity_iid:int, detail:string}]", "total_events": "int"},
|
|
"meta": {"elapsed_ms": "int", "search_mode": "string (hybrid|lexical|direct)"}
|
|
},
|
|
"fields_presets": {"minimal": ["timestamp", "type", "entity_iid", "detail"]}
|
|
},
|
|
"who": {
|
|
"description": "People intelligence: experts, workload, active discussions, overlap, review patterns",
|
|
"flags": ["<target>", "--path <path>", "--active", "--overlap <path>", "--reviews", "--since <duration>", "-p/--project", "-n/--limit", "--fields <list>", "--detail", "--no-detail", "--as-of <date>", "--explain-score", "--include-bots", "--include-closed", "--all-history"],
|
|
"modes": {
|
|
"expert": "lore who <file-path> -- Who knows about this area? (also: --path for root files)",
|
|
"workload": "lore who <username> -- What is someone working on?",
|
|
"reviews": "lore who <username> --reviews -- Review pattern analysis",
|
|
"active": "lore who --active -- Active unresolved discussions",
|
|
"overlap": "lore who --overlap <path> -- Who else is touching these files?"
|
|
},
|
|
"example": "lore --robot who src/features/auth/",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {
|
|
"mode": "string",
|
|
"input": {"target": "string|null", "path": "string|null", "project": "string|null", "since": "string|null", "limit": "int"},
|
|
"resolved_input": {"mode": "string", "project_id": "int|null", "project_path": "string|null", "since_ms": "int", "since_iso": "string", "since_mode": "string (default|explicit|none)", "limit": "int"},
|
|
"...": "mode-specific fields"
|
|
},
|
|
"meta": {"elapsed_ms": "int"}
|
|
},
|
|
"example_output": {"expert": {"ok":true,"data":{"mode":"expert","result":{"experts":[{"username":"teernisse","score":42,"note_count":15,"diff_note_count":8}]}},"meta":{"elapsed_ms":65}}},
|
|
"fields_presets": {
|
|
"expert_minimal": ["username", "score"],
|
|
"workload_minimal": ["entity_type", "iid", "title", "state"],
|
|
"active_minimal": ["entity_type", "iid", "title", "participants"]
|
|
}
|
|
},
|
|
"trace": {
|
|
"description": "Trace why code was introduced: file -> MR -> issue -> discussion. Follows rename chains by default.",
|
|
"flags": ["<path>", "-p/--project <path>", "--discussions", "--no-follow-renames", "-n/--limit <N>"],
|
|
"example": "lore --robot trace src/main.rs -p group/repo",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"path": "string", "resolved_paths": "[string]", "trace_chains": "[{mr_iid:int, mr_title:string, mr_state:string, mr_author:string, change_type:string, merged_at_iso:string?, updated_at_iso:string, web_url:string?, issues:[{iid:int, title:string, state:string, reference_type:string, web_url:string?}], discussions:[{discussion_id:string, mr_iid:int, author_username:string, body_snippet:string, path:string, created_at_iso:string}]}]"},
|
|
"meta": {"tier": "string (api_only)", "line_requested": "int?", "elapsed_ms": "int", "total_chains": "int", "renames_followed": "bool"}
|
|
}
|
|
},
|
|
"file-history": {
|
|
"description": "Show MRs that touched a file, with rename chain resolution and optional DiffNote discussions",
|
|
"flags": ["<path>", "-p/--project <path>", "--discussions", "--no-follow-renames", "--merged", "-n/--limit <N>"],
|
|
"example": "lore --robot file-history src/main.rs -p group/repo",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"path": "string", "rename_chain": "[string]?", "merge_requests": "[{iid:int, title:string, state:string, author_username:string, change_type:string, merged_at_iso:string?, updated_at_iso:string, merge_commit_sha:string?, web_url:string?}]", "discussions": "[{discussion_id:string, author_username:string, body_snippet:string, path:string, created_at_iso:string}]?"},
|
|
"meta": {"elapsed_ms": "int", "total_mrs": "int", "renames_followed": "bool", "paths_searched": "int"}
|
|
}
|
|
},
|
|
"drift": {
|
|
"description": "Detect discussion divergence from original issue intent",
|
|
"flags": ["<entity_type: issues>", "<IID>", "--threshold <0.0-1.0>", "-p/--project <path>"],
|
|
"example": "lore --robot drift issues 42 --threshold 0.4",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"entity_type": "string", "iid": "int", "title": "string", "threshold": "float", "divergent_discussions": "[{discussion_id:string, similarity:float, snippet:string}]"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"notes": {
|
|
"description": "List notes from discussions with rich filtering",
|
|
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
|
|
"robot_flags": ["--format json", "--fields minimal"],
|
|
"example": "lore --robot notes --author jdefting --since 1y --format json --fields minimal",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"notes": "[NoteListRowJson]", "total_count": "int", "showing": "int"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"cron": {
|
|
"description": "Manage cron-based automatic syncing (Unix only)",
|
|
"subcommands": {
|
|
"install": {"flags": ["--interval <minutes>"], "default_interval": 8},
|
|
"uninstall": {"flags": []},
|
|
"status": {"flags": []}
|
|
},
|
|
"example": "lore --robot cron status",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"action": "string (install|uninstall|status)", "installed?": "bool", "interval_minutes?": "int", "entry?": "string", "log_path?": "string", "replaced?": "bool", "was_installed?": "bool", "last_run_iso?": "string"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"token": {
|
|
"description": "Manage stored GitLab token",
|
|
"subcommands": {
|
|
"set": {"flags": ["--token <value>"], "note": "Reads from stdin if --token omitted in non-interactive mode"},
|
|
"show": {"flags": ["--unmask"]}
|
|
},
|
|
"example": "lore --robot token show",
|
|
"response_schema": {
|
|
"ok": "bool",
|
|
"data": {"action": "string (set|show)", "token_masked?": "string", "token?": "string", "valid?": "bool", "username?": "string"},
|
|
"meta": {"elapsed_ms": "int"}
|
|
}
|
|
},
|
|
"robot-docs": {
|
|
"description": "This command (agent self-discovery manifest)",
|
|
"flags": ["--brief"],
|
|
"example": "lore robot-docs --brief"
|
|
}
|
|
});
|
|
|
|
let quick_start = serde_json::json!({
|
|
"glab_equivalents": [
|
|
{ "glab": "glab issue list", "lore": "lore -J issues -n 50", "note": "Richer: includes labels, status, closing MRs, discussion counts" },
|
|
{ "glab": "glab issue view 123", "lore": "lore -J issues 123", "note": "Includes full discussions, work-item status, cross-references" },
|
|
{ "glab": "glab issue list -l bug", "lore": "lore -J issues --label bug", "note": "AND logic for multiple --label flags" },
|
|
{ "glab": "glab mr list", "lore": "lore -J mrs", "note": "Includes draft status, reviewers, discussion counts" },
|
|
{ "glab": "glab mr view 456", "lore": "lore -J mrs 456", "note": "Includes discussions, review threads, source/target branches" },
|
|
{ "glab": "glab mr list -s opened", "lore": "lore -J mrs -s opened", "note": "States: opened, merged, closed, locked, all" },
|
|
{ "glab": "glab api '/projects/:id/issues'", "lore": "lore -J issues -p project", "note": "Fuzzy project matching (suffix or substring)" }
|
|
],
|
|
"lore_exclusive": [
|
|
"search: FTS5 + vector hybrid search across all entities",
|
|
"who: Expert/workload/reviews analysis per file path or person",
|
|
"timeline: Chronological event reconstruction across entities",
|
|
"trace: Code provenance chains (file -> MR -> issue -> discussion)",
|
|
"file-history: MR history per file with rename resolution",
|
|
"notes: Rich note listing with author, type, resolution, path, and discussion filters",
|
|
"stats: Database statistics with document/note/discussion counts",
|
|
"count: Entity counts with state breakdowns",
|
|
"embed: Generate vector embeddings for semantic search via Ollama",
|
|
"cron: Automated sync scheduling (Unix)",
|
|
"token: Secure token management with masked display"
|
|
],
|
|
"read_write_split": "lore = ALL reads (issues, MRs, search, who, timeline, intelligence). glab = ALL writes (create, update, approve, merge, CI/CD)."
|
|
});
|
|
|
|
// --brief: strip response_schema and example_output from every command (~60% smaller)
|
|
let mut commands = commands;
|
|
if brief {
|
|
strip_schemas(&mut commands);
|
|
}
|
|
|
|
let exit_codes = serde_json::json!({
|
|
"0": "Success",
|
|
"1": "Internal error",
|
|
"2": "Usage error (invalid flags or arguments)",
|
|
"3": "Config invalid",
|
|
"4": "Token not set",
|
|
"5": "GitLab auth failed",
|
|
"6": "Resource not found",
|
|
"7": "Rate limited",
|
|
"8": "Network error",
|
|
"9": "Database locked",
|
|
"10": "Database error",
|
|
"11": "Migration failed",
|
|
"12": "I/O error",
|
|
"13": "Transform error",
|
|
"14": "Ollama unavailable",
|
|
"15": "Ollama model not found",
|
|
"16": "Embedding failed",
|
|
"17": "Not found",
|
|
"18": "Ambiguous match",
|
|
"19": "Health check failed",
|
|
"20": "Config not found"
|
|
});
|
|
|
|
let workflows = serde_json::json!({
|
|
"first_setup": [
|
|
"lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project",
|
|
"lore --robot doctor",
|
|
"lore --robot sync"
|
|
],
|
|
"daily_sync": [
|
|
"lore --robot sync"
|
|
],
|
|
"search": [
|
|
"lore --robot search 'query' --mode hybrid"
|
|
],
|
|
"pre_flight": [
|
|
"lore --robot health"
|
|
],
|
|
"temporal_intelligence": [
|
|
"lore --robot sync",
|
|
"lore --robot timeline '<keyword>' --since 30d",
|
|
"lore --robot timeline '<keyword>' --depth 2"
|
|
],
|
|
"people_intelligence": [
|
|
"lore --robot who src/path/to/feature/",
|
|
"lore --robot who @username",
|
|
"lore --robot who @username --reviews",
|
|
"lore --robot who --active --since 7d",
|
|
"lore --robot who --overlap src/path/",
|
|
"lore --robot who --path README.md"
|
|
],
|
|
"surgical_sync": [
|
|
"lore --robot sync --issue 7 -p group/project",
|
|
"lore --robot sync --issue 7 --mr 10 -p group/project",
|
|
"lore --robot sync --issue 7 -p group/project --preflight-only"
|
|
]
|
|
});
|
|
|
|
// Phase 3: Deprecated command aliases
|
|
let aliases = serde_json::json!({
|
|
"deprecated_commands": {
|
|
"list issues": "issues",
|
|
"list mrs": "mrs",
|
|
"show issue <IID>": "issues <IID>",
|
|
"show mr <IID>": "mrs <IID>",
|
|
"auth-test": "auth",
|
|
"sync-status": "status"
|
|
},
|
|
"command_aliases": {
|
|
"issue": "issues",
|
|
"mr": "mrs",
|
|
"merge-requests": "mrs",
|
|
"merge-request": "mrs",
|
|
"mergerequests": "mrs",
|
|
"mergerequest": "mrs",
|
|
"generate-docs": "generate-docs",
|
|
"generatedocs": "generate-docs",
|
|
"gendocs": "generate-docs",
|
|
"gen-docs": "generate-docs",
|
|
"robot-docs": "robot-docs",
|
|
"robotdocs": "robot-docs"
|
|
},
|
|
"pre_clap_aliases": {
|
|
"note": "Underscore/no-separator forms auto-corrected before parsing",
|
|
"merge_requests": "mrs",
|
|
"merge_request": "mrs",
|
|
"mergerequests": "mrs",
|
|
"mergerequest": "mrs",
|
|
"generate_docs": "generate-docs",
|
|
"generatedocs": "generate-docs",
|
|
"gendocs": "generate-docs",
|
|
"gen-docs": "generate-docs",
|
|
"robot-docs": "robot-docs",
|
|
"robotdocs": "robot-docs"
|
|
},
|
|
"prefix_matching": "Enabled via infer_subcommands. Unambiguous prefixes work: 'iss' -> issues, 'time' -> timeline, 'sea' -> search"
|
|
});
|
|
|
|
let error_tolerance = serde_json::json!({
|
|
"note": "The CLI auto-corrects common mistakes before parsing. Corrections are applied silently with a teaching note on stderr.",
|
|
"auto_corrections": [
|
|
{"type": "single_dash_long_flag", "example": "-robot -> --robot", "mode": "all"},
|
|
{"type": "case_normalization", "example": "--Robot -> --robot, --State -> --state", "mode": "all"},
|
|
{"type": "flag_prefix", "example": "--proj -> --project (when unambiguous)", "mode": "all"},
|
|
{"type": "fuzzy_flag", "example": "--projct -> --project", "mode": "all (threshold 0.9 in robot, 0.8 in human)"},
|
|
{"type": "subcommand_alias", "example": "merge_requests -> mrs, robotdocs -> robot-docs", "mode": "all"},
|
|
{"type": "value_normalization", "example": "--state Opened -> --state opened", "mode": "all"},
|
|
{"type": "value_fuzzy", "example": "--state opend -> --state opened", "mode": "all"},
|
|
{"type": "prefix_matching", "example": "lore iss -> lore issues, lore time -> lore timeline", "mode": "all (via clap infer_subcommands)"}
|
|
],
|
|
"teaching_notes": "Auto-corrections emit a JSON warning on stderr: {\"warning\":{\"type\":\"ARG_CORRECTED\",\"corrections\":[...],\"teaching\":[...]}}"
|
|
});
|
|
|
|
// Phase 3: Clap error codes (emitted by handle_clap_error)
|
|
let clap_error_codes = serde_json::json!({
|
|
"UNKNOWN_COMMAND": "Unrecognized subcommand (includes fuzzy suggestion)",
|
|
"UNKNOWN_FLAG": "Unrecognized command-line flag",
|
|
"MISSING_REQUIRED": "Required argument not provided",
|
|
"INVALID_VALUE": "Invalid value for argument",
|
|
"TOO_MANY_VALUES": "Too many values provided",
|
|
"TOO_FEW_VALUES": "Too few values provided",
|
|
"ARGUMENT_CONFLICT": "Conflicting arguments",
|
|
"MISSING_COMMAND": "No subcommand provided (in non-robot mode, shows help)",
|
|
"HELP_REQUESTED": "Help or version flag used",
|
|
"PARSE_ERROR": "General parse error"
|
|
});
|
|
|
|
let config_notes = serde_json::json!({
|
|
"defaultProject": {
|
|
"type": "string?",
|
|
"description": "Fallback project path used when -p/--project is omitted. Must match a configured project path (exact or suffix). CLI -p always overrides.",
|
|
"example": "group/project"
|
|
}
|
|
});
|
|
|
|
let output = RobotDocsOutput {
|
|
ok: true,
|
|
data: RobotDocsData {
|
|
name: "lore".to_string(),
|
|
version,
|
|
description: "Local GitLab data management with semantic search".to_string(),
|
|
activation: RobotDocsActivation {
|
|
flags: vec!["--robot".to_string(), "-J".to_string(), "--json".to_string()],
|
|
env: "LORE_ROBOT=1".to_string(),
|
|
auto: "Non-TTY stdout".to_string(),
|
|
},
|
|
quick_start,
|
|
commands,
|
|
aliases,
|
|
error_tolerance,
|
|
exit_codes,
|
|
clap_error_codes,
|
|
error_format: "stderr JSON: {\"error\":{\"code\":\"...\",\"message\":\"...\",\"suggestion\":\"...\",\"actions\":[\"...\"]}}".to_string(),
|
|
workflows,
|
|
config_notes,
|
|
},
|
|
};
|
|
|
|
if robot_mode {
|
|
println!("{}", serde_json::to_string(&output)?);
|
|
} else {
|
|
println!("{}", serde_json::to_string_pretty(&output)?);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_who(
|
|
config_override: Option<&str>,
|
|
mut args: WhoArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
if args.project.is_none() {
|
|
args.project = config.default_project.clone();
|
|
}
|
|
let run = run_who(&config, &args)?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
|
|
if robot_mode {
|
|
print_who_json(&run, &args, elapsed_ms);
|
|
} else {
|
|
print_who_human(&run.result, run.resolved_input.project_path.as_deref());
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn handle_me(
|
|
config_override: Option<&str>,
|
|
args: MeArgs,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let config = Config::load(config_override)?;
|
|
run_me(&config, &args, robot_mode)?;
|
|
Ok(())
|
|
}
|
|
|
|
async fn handle_drift(
|
|
config_override: Option<&str>,
|
|
entity_type: &str,
|
|
iid: i64,
|
|
threshold: f32,
|
|
project: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let effective_project = config.effective_project(project);
|
|
let response = run_drift(&config, entity_type, iid, threshold, effective_project).await?;
|
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
|
|
|
if robot_mode {
|
|
print_drift_json(&response, elapsed_ms);
|
|
} else {
|
|
print_drift_human(&response);
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
async fn handle_list_compat(
|
|
config_override: Option<&str>,
|
|
entity: &str,
|
|
limit: usize,
|
|
project_filter: Option<&str>,
|
|
state_filter: Option<&str>,
|
|
author_filter: Option<&str>,
|
|
assignee_filter: Option<&str>,
|
|
label_filter: Option<&[String]>,
|
|
milestone_filter: Option<&str>,
|
|
since_filter: Option<&str>,
|
|
due_before_filter: Option<&str>,
|
|
has_due_date: bool,
|
|
sort: &str,
|
|
order: &str,
|
|
open_browser: bool,
|
|
json_output: bool,
|
|
draft: bool,
|
|
no_draft: bool,
|
|
reviewer_filter: Option<&str>,
|
|
target_branch_filter: Option<&str>,
|
|
source_branch_filter: Option<&str>,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let project_filter = config.effective_project(project_filter);
|
|
|
|
let state_normalized = state_filter.map(str::to_lowercase);
|
|
match entity {
|
|
"issues" => {
|
|
let filters = ListFilters {
|
|
limit,
|
|
project: project_filter,
|
|
state: state_normalized.as_deref(),
|
|
author: author_filter,
|
|
assignee: assignee_filter,
|
|
labels: label_filter,
|
|
milestone: milestone_filter,
|
|
since: since_filter,
|
|
due_before: due_before_filter,
|
|
has_due_date,
|
|
statuses: &[],
|
|
sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_issues(&config, filters)?;
|
|
|
|
if open_browser {
|
|
open_issue_in_browser(&result);
|
|
} else if json_output {
|
|
print_list_issues_json(&result, start.elapsed().as_millis() as u64, None);
|
|
} else {
|
|
print_list_issues(&result);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
"mrs" => {
|
|
let filters = MrListFilters {
|
|
limit,
|
|
project: project_filter,
|
|
state: state_normalized.as_deref(),
|
|
author: author_filter,
|
|
assignee: assignee_filter,
|
|
reviewer: reviewer_filter,
|
|
labels: label_filter,
|
|
since: since_filter,
|
|
draft,
|
|
no_draft,
|
|
target_branch: target_branch_filter,
|
|
source_branch: source_branch_filter,
|
|
sort,
|
|
order,
|
|
};
|
|
|
|
let result = run_list_mrs(&config, filters)?;
|
|
|
|
if open_browser {
|
|
open_mr_in_browser(&result);
|
|
} else if json_output {
|
|
print_list_mrs_json(&result, start.elapsed().as_millis() as u64, None);
|
|
} else {
|
|
print_list_mrs(&result);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
_ => {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::error().render(&format!("Unknown entity: {entity}"))
|
|
);
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
|
|
async fn handle_show_compat(
|
|
config_override: Option<&str>,
|
|
entity: &str,
|
|
iid: i64,
|
|
project_filter: Option<&str>,
|
|
robot_mode: bool,
|
|
) -> Result<(), Box<dyn std::error::Error>> {
|
|
let start = std::time::Instant::now();
|
|
let config = Config::load(config_override)?;
|
|
let project_filter = config.effective_project(project_filter);
|
|
|
|
match entity {
|
|
"issue" => {
|
|
let result = run_show_issue(&config, iid, project_filter)?;
|
|
if robot_mode {
|
|
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_show_issue(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
"mr" => {
|
|
let result = run_show_mr(&config, iid, project_filter)?;
|
|
if robot_mode {
|
|
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
|
|
} else {
|
|
print_show_mr(&result);
|
|
}
|
|
Ok(())
|
|
}
|
|
_ => {
|
|
eprintln!(
|
|
"{}",
|
|
Theme::error().render(&format!("Unknown entity: {entity}"))
|
|
);
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
}
|