Files
gitlore/src/app/handlers.rs
teernisse 796b6b7289 fix(core): reduce ollama startup blocking and handle cold starts gracefully
The ensure_ollama() function previously blocked for up to 10 seconds
waiting for Ollama to become reachable after spawning. Cold starts can
take 30-60s, so this often timed out and reported a misleading error.

Now waits only 5 seconds (enough for hot restarts), and if Ollama is
still starting, reports started=true with no error instead of treating
it as a failure. The embed stage runs 60-90s later (after ingestion),
by which time Ollama is ready. The handler log message is updated to
distinguish hot restarts from cold starts still in progress.
2026-03-13 09:59:08 -04:00

2014 lines
61 KiB
Rust

fn handle_issues(
config_override: Option<&str>,
args: IssuesArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let asc = args.asc && !args.no_asc;
let has_due = args.has_due && !args.no_has_due;
let open = args.open && !args.no_open;
let order = if asc { "asc" } else { "desc" };
if let Some(iid) = args.iid {
let result = run_show_issue(&config, iid, project)?;
if robot_mode {
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_issue(&result);
}
} else {
let state_normalized = args.state.as_deref().map(str::to_lowercase);
let filters = ListFilters {
limit: args.limit,
project,
state: state_normalized.as_deref(),
author: args.author.as_deref(),
assignee: args.assignee.as_deref(),
labels: args.label.as_deref(),
milestone: args.milestone.as_deref(),
since: args.since.as_deref(),
due_before: args.due_before.as_deref(),
has_due_date: has_due,
statuses: &args.status,
sort: &args.sort,
order,
};
let result = run_list_issues(&config, filters)?;
if open {
open_issue_in_browser(&result);
} else if robot_mode {
print_list_issues_json(
&result,
start.elapsed().as_millis() as u64,
args.fields.as_deref(),
);
} else {
print_list_issues(&result);
}
}
Ok(())
}
fn handle_mrs(
config_override: Option<&str>,
args: MrsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let asc = args.asc && !args.no_asc;
let open = args.open && !args.no_open;
let order = if asc { "asc" } else { "desc" };
if let Some(iid) = args.iid {
let result = run_show_mr(&config, iid, project)?;
if robot_mode {
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
} else {
print_show_mr(&result);
}
} else {
let state_normalized = args.state.as_deref().map(str::to_lowercase);
let filters = MrListFilters {
limit: args.limit,
project,
state: state_normalized.as_deref(),
author: args.author.as_deref(),
assignee: args.assignee.as_deref(),
reviewer: args.reviewer.as_deref(),
labels: args.label.as_deref(),
since: args.since.as_deref(),
draft: args.draft,
no_draft: args.no_draft,
target_branch: args.target.as_deref(),
source_branch: args.source.as_deref(),
sort: &args.sort,
order,
};
let result = run_list_mrs(&config, filters)?;
if open {
open_mr_in_browser(&result);
} else if robot_mode {
print_list_mrs_json(
&result,
start.elapsed().as_millis() as u64,
args.fields.as_deref(),
);
} else {
print_list_mrs(&result);
}
}
Ok(())
}
fn handle_notes(
config_override: Option<&str>,
args: NotesArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let order = if args.asc { "asc" } else { "desc" };
let filters = NoteListFilters {
limit: args.limit,
project: args.project,
author: args.author,
note_type: args.note_type,
include_system: args.include_system,
for_issue_iid: args.for_issue,
for_mr_iid: args.for_mr,
note_id: args.note_id,
gitlab_note_id: args.gitlab_note_id,
discussion_id: args.discussion_id,
since: args.since,
until: args.until,
path: args.path,
contains: args.contains,
resolution: args.resolution,
sort: args.sort,
order: order.to_string(),
};
let result = query_notes(&conn, &filters, &config)?;
if robot_mode {
print_list_notes_json(
&result,
start.elapsed().as_millis() as u64,
args.fields.as_deref(),
);
} else {
print_list_notes(&result);
}
Ok(())
}
async fn handle_ingest(
config_override: Option<&str>,
args: IngestArgs,
robot_mode: bool,
quiet: bool,
metrics: &MetricsLayer,
rt_handle: &asupersync::runtime::RuntimeHandle,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let dry_run = args.dry_run && !args.no_dry_run;
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let force = args.force && !args.no_force;
let full = args.full && !args.no_full;
// Handle dry run mode - show preview without making any changes
if dry_run {
match args.entity.as_deref() {
Some(resource_type) => {
let preview = run_ingest_dry_run(&config, resource_type, project, full)?;
if robot_mode {
print_dry_run_preview_json(&preview);
} else {
print_dry_run_preview(&preview);
}
}
None => {
let issues_preview = run_ingest_dry_run(&config, "issues", project, full)?;
let mrs_preview = run_ingest_dry_run(&config, "mrs", project, full)?;
if robot_mode {
print_combined_dry_run_json(&issues_preview, &mrs_preview);
} else {
print_dry_run_preview(&issues_preview);
println!();
print_dry_run_preview(&mrs_preview);
}
}
}
return Ok(());
}
let display = if robot_mode || quiet {
IngestDisplay::silent()
} else {
IngestDisplay::interactive()
};
let entity_label = args.entity.as_deref().unwrap_or("all");
let command = format!("ingest:{entity_label}");
let db_path = get_db_path(config.storage.db_path.as_deref());
let recorder_conn = create_connection(&db_path)?;
let run_id = uuid::Uuid::new_v4().simple().to_string();
let run_id_short = &run_id[..8];
let recorder = SyncRunRecorder::start(&recorder_conn, &command, run_id_short)?;
let signal = ShutdownSignal::new();
install_ctrl_c_handler(rt_handle, signal.clone());
let ingest_result: std::result::Result<(), Box<dyn std::error::Error>> = async {
match args.entity.as_deref() {
Some(resource_type) => {
let result = run_ingest(
&config,
resource_type,
project,
force,
full,
false,
display,
None,
&signal,
)
.await?;
if robot_mode {
print_ingest_summary_json(&result, start.elapsed().as_millis() as u64);
} else {
print_ingest_summary(&result);
}
}
None => {
if !robot_mode && !quiet {
println!(
"{}",
Theme::info().render("Ingesting all content (issues + merge requests)...")
);
println!();
}
let issues_result = run_ingest(
&config, "issues", project, force, full, false, display, None, &signal,
)
.await?;
let mrs_result = run_ingest(
&config, "mrs", project, force, full, false, display, None, &signal,
)
.await?;
if robot_mode {
print_combined_ingest_json(
&issues_result,
&mrs_result,
start.elapsed().as_millis() as u64,
);
} else {
print_ingest_summary(&issues_result);
print_ingest_summary(&mrs_result);
}
}
}
Ok(())
}
.await;
match ingest_result {
Ok(()) if signal.is_cancelled() => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(
&recorder_conn,
"Interrupted by user (Ctrl+C)",
Some(&stages),
);
if !robot_mode {
eprintln!(
"{}",
Theme::warning().render("Interrupted by Ctrl+C. Partial data has been saved.")
);
}
Ok(())
}
Ok(()) => {
let stages = metrics.extract_timings();
let total_items: usize = stages.iter().map(|s| s.items_processed).sum();
let total_errors: usize = stages.iter().map(|s| s.errors).sum();
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
if !robot_mode && !quiet {
eprintln!(
"{}",
Theme::dim().render("Hint: Run 'lore generate-docs' to update searchable documents, then 'lore embed' for vectors.")
);
}
Ok(())
}
Err(e) => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
Err(e)
}
}
}
#[derive(Serialize)]
struct CombinedIngestOutput {
ok: bool,
data: CombinedIngestData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct CombinedIngestData {
resource_type: String,
issues: CombinedIngestEntityStats,
merge_requests: CombinedIngestEntityStats,
}
#[derive(Serialize)]
struct CombinedIngestEntityStats {
projects_synced: usize,
fetched: usize,
upserted: usize,
labels_created: usize,
discussions_fetched: usize,
notes_upserted: usize,
}
fn print_combined_ingest_json(
issues: &lore::cli::commands::ingest::IngestResult,
mrs: &lore::cli::commands::ingest::IngestResult,
elapsed_ms: u64,
) {
let output = CombinedIngestOutput {
ok: true,
data: CombinedIngestData {
resource_type: "all".to_string(),
issues: CombinedIngestEntityStats {
projects_synced: issues.projects_synced,
fetched: issues.issues_fetched,
upserted: issues.issues_upserted,
labels_created: issues.labels_created,
discussions_fetched: issues.discussions_fetched,
notes_upserted: issues.notes_upserted,
},
merge_requests: CombinedIngestEntityStats {
projects_synced: mrs.projects_synced,
fetched: mrs.mrs_fetched,
upserted: mrs.mrs_upserted,
labels_created: mrs.labels_created,
discussions_fetched: mrs.discussions_fetched,
notes_upserted: mrs.notes_upserted,
},
},
meta: RobotMeta::new(elapsed_ms),
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
#[derive(Serialize)]
struct CombinedDryRunOutput {
ok: bool,
dry_run: bool,
data: CombinedDryRunData,
}
#[derive(Serialize)]
struct CombinedDryRunData {
issues: lore::cli::commands::DryRunPreview,
merge_requests: lore::cli::commands::DryRunPreview,
}
fn print_combined_dry_run_json(
issues: &lore::cli::commands::DryRunPreview,
mrs: &lore::cli::commands::DryRunPreview,
) {
let output = CombinedDryRunOutput {
ok: true,
dry_run: true,
data: CombinedDryRunData {
issues: issues.clone(),
merge_requests: mrs.clone(),
},
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
async fn handle_count(
config_override: Option<&str>,
args: CountArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
if args.entity == "events" {
let counts = run_count_events(&config)?;
if robot_mode {
print_event_count_json(&counts, start.elapsed().as_millis() as u64);
} else {
print_event_count(&counts);
}
return Ok(());
}
let result = run_count(&config, &args.entity, args.for_entity.as_deref())?;
if robot_mode {
print_count_json(&result, start.elapsed().as_millis() as u64);
} else {
print_count(&result);
}
Ok(())
}
async fn handle_sync_status_cmd(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let result = run_sync_status(&config)?;
if robot_mode {
print_sync_status_json(&result, start.elapsed().as_millis() as u64);
} else {
print_sync_status(&result);
}
Ok(())
}
#[derive(Serialize)]
struct InitOutput {
ok: bool,
data: InitOutputData,
}
#[derive(Serialize)]
struct InitOutputData {
config_path: String,
data_dir: String,
user: InitOutputUser,
projects: Vec<InitOutputProject>,
#[serde(skip_serializing_if = "Option::is_none")]
default_project: Option<String>,
}
#[derive(Serialize)]
struct InitOutputUser {
username: String,
name: String,
}
#[derive(Serialize)]
struct InitOutputProject {
path: String,
name: String,
}
fn print_init_json(result: &InitResult) {
let output = InitOutput {
ok: true,
data: InitOutputData {
config_path: result.config_path.clone(),
data_dir: result.data_dir.clone(),
user: InitOutputUser {
username: result.user.username.clone(),
name: result.user.name.clone(),
},
projects: result
.projects
.iter()
.map(|p| InitOutputProject {
path: p.path.clone(),
name: p.name.clone(),
})
.collect(),
default_project: result.default_project.clone(),
},
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
// ── Refresh JSON types ──
#[derive(Serialize)]
struct RefreshOutput {
ok: bool,
data: RefreshOutputData,
}
#[derive(Serialize)]
struct RefreshOutputData {
mode: &'static str,
user: InitOutputUser,
projects_registered: Vec<InitOutputProject>,
#[serde(skip_serializing_if = "Vec::is_empty")]
projects_failed: Vec<RefreshOutputFailure>,
#[serde(skip_serializing_if = "Vec::is_empty")]
orphans_found: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
orphans_deleted: Vec<String>,
}
#[derive(Serialize)]
struct RefreshOutputFailure {
path: String,
error: String,
}
fn print_refresh_json(result: &RefreshResult) {
let output = RefreshOutput {
ok: true,
data: RefreshOutputData {
mode: "refresh",
user: InitOutputUser {
username: result.user.username.clone(),
name: result.user.name.clone(),
},
projects_registered: result
.projects_registered
.iter()
.map(|p| InitOutputProject {
path: p.path.clone(),
name: p.name.clone(),
})
.collect(),
projects_failed: result
.projects_failed
.iter()
.map(|p| RefreshOutputFailure {
path: p.path.clone(),
error: p.error.clone(),
})
.collect(),
orphans_found: result.orphans_found.clone(),
orphans_deleted: result.orphans_deleted.clone(),
},
};
println!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|e| {
format!(r#"{{"ok":false,"error":{{"code":"INTERNAL_ERROR","message":"JSON serialization failed: {e}"}}}}"#)
})
);
}
async fn handle_init_refresh(
config_override: Option<&str>,
non_interactive: bool,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let mut result = run_init_refresh(RefreshOptions {
config_path: config_override.map(String::from),
non_interactive,
})
.await?;
// Handle orphan deletion prompt (interactive only)
let mut orphans_deleted: Vec<String> = Vec::new();
if !result.orphans_found.is_empty() && !robot_mode && !non_interactive {
println!(
"\n{}",
Theme::warning().render(&format!(
"Found {} orphan project{} in database (not in config):",
result.orphans_found.len(),
if result.orphans_found.len() == 1 {
""
} else {
"s"
}
))
);
for orphan in &result.orphans_found {
println!(" {}", Theme::muted().render(&format!("{orphan}")));
}
println!();
let confirm = Confirm::new()
.with_prompt(format!(
"Delete {} orphan project{} from database?",
result.orphans_found.len(),
if result.orphans_found.len() == 1 {
""
} else {
"s"
}
))
.default(false)
.interact()?;
if confirm {
let deleted = delete_orphan_projects(config_override, &result.orphans_found)?;
orphans_deleted = result.orphans_found.clone();
println!(
"{}",
Theme::success().render(&format!(" Deleted {deleted} orphan project(s)"))
);
}
}
result.orphans_deleted = orphans_deleted;
if robot_mode {
print_refresh_json(&result);
return Ok(());
}
// Human output
println!(
"\n{}",
Theme::success().render(&format!(
"\u{2713} Authenticated as @{} ({})",
result.user.username, result.user.name
))
);
if !result.projects_registered.is_empty() {
println!("\n {}", Theme::bold().render("Projects"));
for project in &result.projects_registered {
println!(
" {} {:<40} registered",
Theme::success().render("\u{2713}"),
project.path
);
}
}
if !result.projects_failed.is_empty() {
for failure in &result.projects_failed {
println!(
" {} {:<40} {}",
Theme::error().render("\u{2717}"),
failure.path,
failure.error
);
}
}
// Summary
let registered = result.projects_registered.len();
let failed = result.projects_failed.len();
let orphans_kept = result.orphans_found.len() - result.orphans_deleted.len();
let mut summary_parts: Vec<String> = Vec::new();
summary_parts.push(format!(
"{} project{} registered",
registered,
if registered == 1 { "" } else { "s" }
));
if failed > 0 {
summary_parts.push(format!("{failed} failed"));
}
if !result.orphans_deleted.is_empty() {
summary_parts.push(format!(
"{} orphan(s) deleted",
result.orphans_deleted.len()
));
}
if orphans_kept > 0 {
summary_parts.push(format!("{orphans_kept} orphan(s) kept"));
}
println!(
"\n{}",
Theme::info().render(&format!(" {}", summary_parts.join(", ")))
);
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn handle_init(
config_override: Option<&str>,
refresh: bool,
force: bool,
non_interactive: bool,
robot_mode: bool,
gitlab_url_flag: Option<String>,
token_env_var_flag: Option<String>,
projects_flag: Option<String>,
default_project_flag: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
// ── Handle --refresh mode ──
if refresh {
return handle_init_refresh(config_override, non_interactive, robot_mode).await;
}
if robot_mode {
let missing: Vec<&str> = [
gitlab_url_flag.is_none().then_some("--gitlab-url"),
token_env_var_flag.is_none().then_some("--token-env-var"),
projects_flag.is_none().then_some("--projects"),
]
.into_iter()
.flatten()
.collect();
if !missing.is_empty() {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "MISSING_FLAGS".to_string(),
message: format!("Robot mode requires flags: {}", missing.join(", ")),
suggestion: "lore --robot init --gitlab-url https://gitlab.com --token-env-var GITLAB_TOKEN --projects group/project".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
std::process::exit(2);
}
let project_paths: Vec<String> = projects_flag
.expect("validated: checked for None at lines 714-721")
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect();
let result = run_init(
InitInputs {
gitlab_url: gitlab_url_flag
.expect("validated: checked for None at lines 714-721"),
token_env_var: token_env_var_flag
.expect("validated: checked for None at lines 714-721"),
project_paths,
default_project: default_project_flag.clone(),
},
InitOptions {
config_path: config_override.map(String::from),
force: true,
non_interactive: true,
},
)
.await?;
print_init_json(&result);
return Ok(());
}
let config_path = get_config_path(config_override);
let mut confirmed_overwrite = force;
if config_path.exists() && !force {
if non_interactive {
eprintln!(
"{}",
Theme::error().render(&format!(
"Config already exists at {}",
config_path.display()
))
);
eprintln!(
"{}",
Theme::info().render(" • Use --refresh to register new projects from config")
);
eprintln!(
"{}",
Theme::info().render(" • Use --force to overwrite the config file")
);
std::process::exit(2);
}
println!(
"{}",
Theme::warning().render(&format!(
"Config already exists at {}",
config_path.display()
))
);
println!(
"{}",
Theme::info().render(" • Use --refresh to register new projects from config")
);
println!();
let confirm = Confirm::new()
.with_prompt("Overwrite existing config?")
.default(false)
.interact()?;
if !confirm {
println!("{}", Theme::warning().render("Cancelled."));
std::process::exit(2);
}
confirmed_overwrite = true;
}
let gitlab_url: String = if let Some(url) = gitlab_url_flag {
url
} else {
Input::new()
.with_prompt("GitLab URL")
.default("https://gitlab.com".to_string())
.validate_with(|input: &String| -> Result<(), &str> {
if url::Url::parse(input).is_ok() {
Ok(())
} else {
Err("Please enter a valid URL")
}
})
.interact_text()?
};
let token_env_var: String = if let Some(var) = token_env_var_flag {
var
} else {
Input::new()
.with_prompt("Token environment variable name")
.default("GITLAB_TOKEN".to_string())
.interact_text()?
};
let project_paths: Vec<String> = if let Some(projects) = projects_flag {
projects
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect()
} else {
let project_paths_input: String = Input::new()
.with_prompt("Project paths (comma-separated, e.g., group/project)")
.validate_with(|input: &String| -> Result<(), &str> {
if input.trim().is_empty() {
Err("Please enter at least one project path")
} else {
Ok(())
}
})
.interact_text()?;
project_paths_input
.split(',')
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect()
};
// Resolve default project: CLI flag, interactive prompt, or None
let default_project = if default_project_flag.is_some() {
default_project_flag
} else if project_paths.len() > 1 && !non_interactive {
let set_default = Confirm::new()
.with_prompt("Set a default project? (used when -p is omitted)")
.default(true)
.interact()?;
if set_default {
let selection = dialoguer::Select::new()
.with_prompt("Default project")
.items(&project_paths)
.default(0)
.interact()?;
Some(project_paths[selection].clone())
} else {
None
}
} else {
None
};
println!("{}", Theme::info().render("Validating configuration..."));
let result = run_init(
InitInputs {
gitlab_url,
token_env_var,
project_paths,
default_project,
},
InitOptions {
config_path: config_override.map(String::from),
force: confirmed_overwrite,
non_interactive,
},
)
.await?;
println!(
"{}",
Theme::success().render(&format!(
"\n\u{2713} Authenticated as @{} ({})",
result.user.username, result.user.name
))
);
for project in &result.projects {
println!(
"{}",
Theme::success().render(&format!("\u{2713} {} ({})", project.path, project.name))
);
}
if let Some(ref dp) = result.default_project {
println!(
"{}",
Theme::success().render(&format!("\u{2713} Default project: {dp}"))
);
}
println!(
"{}",
Theme::success().render(&format!(
"\n\u{2713} Config written to {}",
result.config_path
))
);
println!(
"{}",
Theme::success().render(&format!(
"\u{2713} Database initialized at {}",
result.data_dir
))
);
println!(
"{}",
Theme::info().render("\nSetup complete! Run 'lore doctor' to verify.")
);
Ok(())
}
#[derive(Serialize)]
struct AuthTestOutput {
ok: bool,
data: AuthTestData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct AuthTestData {
authenticated: bool,
username: String,
name: String,
gitlab_url: String,
}
async fn handle_auth_test(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
match run_auth_test(config_override).await {
Ok(result) => {
if robot_mode {
let output = AuthTestOutput {
ok: true,
data: AuthTestData {
authenticated: true,
username: result.username.clone(),
name: result.name.clone(),
gitlab_url: result.base_url.clone(),
},
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
};
println!("{}", serde_json::to_string(&output)?);
} else {
println!("Authenticated as @{} ({})", result.username, result.name);
println!("GitLab: {}", result.base_url);
}
Ok(())
}
Err(e) => {
if robot_mode {
let output = RobotErrorOutput::from(&e);
eprintln!(
"{}",
serde_json::to_string(&output).unwrap_or_else(|_| {
let msg = e.to_string().replace('\\', "\\\\").replace('"', "\\\"");
format!(
r#"{{"error":{{"code":"{}","message":"{}"}}}}"#,
e.code(),
msg
)
})
);
} else {
eprintln!("{} {}", Theme::error().render("Error:"), e);
if let Some(suggestion) = e.suggestion() {
eprintln!("{} {}", Theme::warning().render("Hint:"), suggestion);
}
}
std::process::exit(e.exit_code());
}
}
}
#[derive(Serialize)]
struct DoctorOutput {
ok: bool,
data: DoctorData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct DoctorData {
success: bool,
checks: lore::cli::commands::DoctorChecks,
}
async fn handle_doctor(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let result = run_doctor(config_override).await;
if robot_mode {
let output = DoctorOutput {
ok: true,
data: DoctorData {
success: result.success,
checks: result.checks,
},
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
};
println!("{}", serde_json::to_string(&output)?);
} else {
print_doctor_results(&result);
}
if !result.success {
std::process::exit(1);
}
Ok(())
}
#[derive(Serialize)]
struct VersionOutput {
ok: bool,
data: VersionData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct VersionData {
name: &'static str,
version: String,
#[serde(skip_serializing_if = "Option::is_none")]
git_hash: Option<String>,
}
fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let version = env!("CARGO_PKG_VERSION").to_string();
let git_hash = env!("GIT_HASH").to_string();
if robot_mode {
let output = VersionOutput {
ok: true,
data: VersionData {
name: "lore",
version,
git_hash: if git_hash.is_empty() {
None
} else {
Some(git_hash)
},
},
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
};
println!("{}", serde_json::to_string(&output)?);
} else if git_hash.is_empty() {
println!("lore version {}", version);
} else {
println!("lore version {} ({})", version, git_hash);
}
Ok(())
}
fn handle_completions(shell: &str) -> Result<(), Box<dyn std::error::Error>> {
use clap::CommandFactory;
use clap_complete::{Shell, generate};
let shell = match shell {
"bash" => Shell::Bash,
"zsh" => Shell::Zsh,
"fish" => Shell::Fish,
"powershell" => Shell::PowerShell,
other => {
return Err(format!("Unsupported shell: {other}").into());
}
};
let mut cmd = Cli::command();
generate(shell, &mut cmd, "lore", &mut std::io::stdout());
Ok(())
}
fn handle_backup(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "NOT_IMPLEMENTED".to_string(),
message: "The 'backup' command is not yet implemented.".to_string(),
suggestion: "Use manual database backup: cp ~/.local/share/lore/lore.db ~/.local/share/lore/lore.db.bak".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{} The 'backup' command is not yet implemented.",
Theme::error().render("Error:")
);
}
std::process::exit(1);
}
fn handle_reset(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "NOT_IMPLEMENTED".to_string(),
message: "The 'reset' command is not yet implemented.".to_string(),
suggestion: "Manually delete the database: rm ~/.local/share/lore/lore.db"
.to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{} The 'reset' command is not yet implemented.",
Theme::error().render("Error:")
);
}
std::process::exit(1);
}
#[derive(Serialize)]
struct MigrateOutput {
ok: bool,
data: MigrateData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct MigrateData {
before_version: i32,
after_version: i32,
migrated: bool,
}
#[derive(Serialize)]
struct RobotErrorWithSuggestion {
error: RobotErrorSuggestionData,
}
#[derive(Serialize)]
struct RobotErrorSuggestionData {
code: String,
message: String,
suggestion: String,
#[serde(skip_serializing_if = "Option::is_none")]
correction: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
valid_values: Option<Vec<String>>,
}
async fn handle_migrate(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let db_path = get_db_path(config.storage.db_path.as_deref());
if !db_path.exists() {
if robot_mode {
let output = RobotErrorWithSuggestion {
error: RobotErrorSuggestionData {
code: "DB_ERROR".to_string(),
message: format!("Database not found at {}", db_path.display()),
suggestion: "Run 'lore init' first".to_string(),
correction: None,
valid_values: None,
},
};
eprintln!("{}", serde_json::to_string(&output)?);
} else {
eprintln!(
"{}",
Theme::error().render(&format!("Database not found at {}", db_path.display()))
);
eprintln!(
"{}",
Theme::warning().render("Run 'lore init' first to create the database.")
);
}
std::process::exit(10);
}
let conn = create_connection(&db_path)?;
let before_version = get_schema_version(&conn);
if !robot_mode {
println!(
"{}",
Theme::info().render(&format!("Current schema version: {}", before_version))
);
}
run_migrations(&conn)?;
let after_version = get_schema_version(&conn);
if robot_mode {
let output = MigrateOutput {
ok: true,
data: MigrateData {
before_version,
after_version,
migrated: after_version > before_version,
},
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
};
println!("{}", serde_json::to_string(&output)?);
} else if after_version > before_version {
println!(
"{}",
Theme::success().render(&format!(
"Migrations applied: {} -> {}",
before_version, after_version
))
);
} else {
println!(
"{}",
Theme::success().render("Database is already up to date.")
);
}
Ok(())
}
async fn handle_stats(
config_override: Option<&str>,
args: StatsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let dry_run = args.dry_run && !args.no_dry_run;
let config = Config::load(config_override)?;
let check = (args.check && !args.no_check) || args.repair;
let result = run_stats(&config, check, args.repair, dry_run)?;
if robot_mode {
print_stats_json(&result, start.elapsed().as_millis() as u64);
} else {
print_stats(&result);
}
Ok(())
}
fn handle_file_history(
config_override: Option<&str>,
args: FileHistoryArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config
.effective_project(args.project.as_deref())
.map(String::from);
let normalized = normalize_repo_path(&args.path);
// Resolve bare filenames before querying (same path resolution as trace/who)
let db_path_tmp = get_db_path(config.storage.db_path.as_deref());
let conn_tmp = create_connection(&db_path_tmp)?;
let project_id_tmp = project
.as_deref()
.map(|p| resolve_project(&conn_tmp, p))
.transpose()?;
let pq = build_path_query(&conn_tmp, &normalized, project_id_tmp)?;
let resolved_path = if pq.is_prefix {
// Directory prefix — file-history is file-oriented, pass the raw path.
// Don't use pq.value which contains LIKE-escaped metacharacters.
normalized.trim_end_matches('/').to_string()
} else {
pq.value
};
let result = run_file_history(
&config,
&resolved_path,
project.as_deref(),
args.no_follow_renames,
args.merged,
args.discussions,
args.limit,
)?;
if robot_mode {
let elapsed_ms = start.elapsed().as_millis() as u64;
print_file_history_json(&result, elapsed_ms)?;
} else {
print_file_history(&result);
}
Ok(())
}
fn handle_trace(
config_override: Option<&str>,
args: TraceArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let (raw_path, line_requested) = parse_trace_path(&args.path);
let normalized = normalize_repo_path(&raw_path);
if line_requested.is_some() && !robot_mode {
eprintln!(
"Note: Line-level tracing requires Tier 2 (git blame). Showing file-level results."
);
}
let project = config
.effective_project(args.project.as_deref())
.map(String::from);
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let project_id = project
.as_deref()
.map(|p| resolve_project(&conn, p))
.transpose()?;
// Resolve bare filenames (e.g. "operators.ts" -> "src/utils/operators.ts")
let pq = build_path_query(&conn, &normalized, project_id)?;
let path = if pq.is_prefix {
// Directory prefix — trace is file-oriented, pass the raw path.
// Don't use pq.value which contains LIKE-escaped metacharacters.
normalized.trim_end_matches('/').to_string()
} else {
pq.value
};
let result = run_trace(
&conn,
project_id,
&path,
!args.no_follow_renames,
args.discussions,
args.limit,
)?;
if robot_mode {
let elapsed_ms = start.elapsed().as_millis() as u64;
print_trace_json(&result, elapsed_ms, line_requested)?;
} else {
print_trace(&result);
}
Ok(())
}
async fn handle_timeline(
config_override: Option<&str>,
args: TimelineArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let config = Config::load(config_override)?;
let params = TimelineParams {
query: args.query,
project: config
.effective_project(args.project.as_deref())
.map(String::from),
since: args.since,
depth: args.depth,
no_mentions: args.no_mentions,
limit: args.limit,
max_seeds: args.max_seeds,
max_entities: args.max_entities,
max_evidence: args.max_evidence,
robot_mode,
};
let result = run_timeline(&config, &params).await?;
if robot_mode {
print_timeline_json_with_meta(
&result,
result.total_filtered_events,
params.depth,
!params.no_mentions,
args.fields.as_deref(),
);
} else {
print_timeline(&result);
}
Ok(())
}
async fn handle_search(
config_override: Option<&str>,
args: SearchArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let config = Config::load(config_override)?;
let explain = args.explain && !args.no_explain;
let fts_mode = match args.fts_mode.as_str() {
"raw" => lore::search::FtsQueryMode::Raw,
_ => lore::search::FtsQueryMode::Safe,
};
let cli_filters = SearchCliFilters {
source_type: args.source_type,
author: args.author,
project: config
.effective_project(args.project.as_deref())
.map(String::from),
labels: args.label,
path: args.path,
since: args.since,
updated_since: args.updated_since,
limit: args.limit,
};
let spinner = lore::cli::progress::stage_spinner_v2(
lore::cli::render::Icons::search(),
"Search",
&format!("Searching ({})...", args.mode),
robot_mode,
);
let start = std::time::Instant::now();
let response = run_search(
&config,
&args.query,
cli_filters,
fts_mode,
&args.mode,
explain,
)
.await?;
let elapsed_ms = start.elapsed().as_millis() as u64;
spinner.finish_and_clear();
if robot_mode {
print_search_results_json(&response, elapsed_ms, args.fields.as_deref());
} else {
print_search_results(&response, explain);
}
Ok(())
}
async fn handle_generate_docs(
config_override: Option<&str>,
args: GenerateDocsArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let project = config.effective_project(args.project.as_deref());
let result = run_generate_docs(&config, args.full, project, None)?;
let elapsed = start.elapsed();
if robot_mode {
print_generate_docs_json(&result, elapsed.as_millis() as u64);
} else {
print_generate_docs(&result);
if elapsed.as_secs() >= 1 {
eprintln!(
"{}",
Theme::dim().render(&format!(" Done in {:.1}s", elapsed.as_secs_f64()))
);
}
if result.regenerated > 0 {
eprintln!(
"{}",
Theme::dim().render(
"Hint: Run 'lore embed' to update vector embeddings for changed documents."
)
);
}
}
Ok(())
}
async fn handle_embed(
config_override: Option<&str>,
args: EmbedArgs,
robot_mode: bool,
rt_handle: &asupersync::runtime::RuntimeHandle,
) -> Result<(), Box<dyn std::error::Error>> {
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
let start = std::time::Instant::now();
let config = Config::load(config_override)?;
let full = args.full && !args.no_full;
let retry_failed = args.retry_failed && !args.no_retry_failed;
let signal = ShutdownSignal::new();
install_ctrl_c_handler(rt_handle, signal.clone());
let embed_bar = lore::cli::progress::nested_progress("Embedding", 0, robot_mode);
let bar_clone = embed_bar.clone();
let tick_started = Arc::new(AtomicBool::new(false));
let tick_clone = Arc::clone(&tick_started);
let progress_cb: Box<dyn Fn(usize, usize)> = Box::new(move |processed, total| {
if total > 0 {
if !tick_clone.swap(true, Ordering::Relaxed) {
bar_clone.enable_steady_tick(std::time::Duration::from_millis(100));
}
bar_clone.set_length(total as u64);
bar_clone.set_position(processed as u64);
}
});
let result = run_embed(&config, full, retry_failed, Some(progress_cb), &signal).await?;
embed_bar.finish_and_clear();
let elapsed = start.elapsed();
if robot_mode {
print_embed_json(&result, elapsed.as_millis() as u64);
} else {
print_embed(&result);
if elapsed.as_secs() >= 1 {
eprintln!(
"{}",
Theme::dim().render(&format!(" Done in {:.1}s", elapsed.as_secs_f64()))
);
}
}
Ok(())
}
async fn handle_sync_cmd(
config_override: Option<&str>,
args: SyncArgs,
robot_mode: bool,
metrics: &MetricsLayer,
rt_handle: &asupersync::runtime::RuntimeHandle,
) -> Result<(), Box<dyn std::error::Error>> {
let dry_run = args.dry_run && !args.no_dry_run;
// Dedup and sort IIDs
let mut issue_iids = args.issue;
let mut mr_iids = args.mr;
issue_iids.sort_unstable();
issue_iids.dedup();
mr_iids.sort_unstable();
mr_iids.dedup();
let mut config = Config::load(config_override)?;
if args.no_events {
config.sync.fetch_resource_events = false;
}
if args.no_file_changes {
config.sync.fetch_mr_file_changes = false;
}
if args.no_status {
config.sync.fetch_work_item_status = false;
}
let options = SyncOptions {
full: args.full && !args.no_full,
force: args.force && !args.no_force,
no_embed: args.no_embed,
no_docs: args.no_docs,
no_events: args.no_events,
robot_mode,
dry_run,
issue_iids,
mr_iids,
project: args.project,
preflight_only: args.preflight_only,
};
// Validation: preflight_only requires surgical mode
if options.preflight_only && !options.is_surgical() {
return Err("--preflight-only requires --issue or --mr".into());
}
// Validation: full + surgical are incompatible
if options.full && options.is_surgical() {
return Err("--full and --issue/--mr are incompatible".into());
}
// Validation: surgical mode requires a project (via -p or config defaultProject)
if options.is_surgical()
&& config
.effective_project(options.project.as_deref())
.is_none()
{
return Err("--issue/--mr requires -p/--project (or set defaultProject in config)".into());
}
// Validation: hard cap on total surgical targets
let total_targets = options.issue_iids.len() + options.mr_iids.len();
if total_targets > SyncOptions::MAX_SURGICAL_TARGETS {
return Err(format!(
"Too many surgical targets ({total_targets}); maximum is {}",
SyncOptions::MAX_SURGICAL_TARGETS
)
.into());
}
// Surgical + dry-run → treat as preflight-only
let mut options = options;
if dry_run && options.is_surgical() {
options.preflight_only = true;
}
// Resolve effective project for surgical mode: when -p is not passed but
// defaultProject is set in config, populate options.project so the surgical
// orchestrator receives the resolved project path.
if options.is_surgical() && options.project.is_none() {
options.project = config.default_project.clone();
}
// For non-surgical dry run, skip recording and just show the preview
if dry_run && !options.is_surgical() {
let signal = ShutdownSignal::new();
run_sync(&config, options, None, &signal).await?;
return Ok(());
}
// Acquire file lock if --lock was passed (used by cron to skip overlapping runs)
let _sync_lock = if args.lock {
match lore::core::cron::acquire_sync_lock() {
Ok(Some(guard)) => Some(guard),
Ok(None) => {
// Another sync is running — silently exit (expected for cron)
tracing::debug!("--lock: another sync is running, skipping");
return Ok(());
}
Err(e) => {
tracing::warn!(error = %e, "--lock: failed to acquire file lock, skipping sync");
return Ok(());
}
}
} else {
None
};
// In cron mode (--lock), ensure Ollama is running for embeddings
if args.lock {
let result = lore::core::ollama_mgmt::ensure_ollama(&config.embedding.base_url);
if !result.installed {
tracing::warn!(
"Ollama is not installed — embeddings will be skipped. {}",
result.install_hint.as_deref().unwrap_or("")
);
} else if result.started && result.running {
tracing::info!("Started ollama serve (was not running)");
} else if result.started {
tracing::info!("Spawned ollama serve (cold start in progress, should be ready by embed stage)");
} else if !result.running {
tracing::warn!(
"Failed to start Ollama: {}",
result.error.as_deref().unwrap_or("unknown error")
);
}
}
// Surgical mode: run_sync_surgical manages its own recorder, signal, and recording.
// Skip the normal recorder setup and let the dispatch handle everything.
if options.is_surgical() {
let signal = ShutdownSignal::new();
install_ctrl_c_handler(rt_handle, signal.clone());
let start = std::time::Instant::now();
match run_sync(&config, options, None, &signal).await {
Ok(result) => {
let elapsed = start.elapsed();
if robot_mode {
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
} else {
print_sync(&result, elapsed, Some(metrics), args.timings);
}
return Ok(());
}
Err(e) => return Err(e.into()),
}
}
let db_path = get_db_path(config.storage.db_path.as_deref());
let recorder_conn = create_connection(&db_path)?;
let run_id = uuid::Uuid::new_v4().simple().to_string();
let run_id_short = &run_id[..8];
let recorder = SyncRunRecorder::start(&recorder_conn, "sync", run_id_short)?;
let signal = ShutdownSignal::new();
install_ctrl_c_handler(rt_handle, signal.clone());
let start = std::time::Instant::now();
match run_sync(&config, options, Some(run_id_short), &signal).await {
Ok(result) if signal.is_cancelled() => {
let elapsed = start.elapsed();
let stages = metrics.extract_timings();
let released = release_all_locked_jobs(&recorder_conn).unwrap_or(0);
let _ = recorder.fail(
&recorder_conn,
"Interrupted by user (Ctrl+C)",
Some(&stages),
);
if robot_mode {
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
} else {
eprintln!();
eprintln!(
"{}",
Theme::warning().render("Interrupted by Ctrl+C. Partial results:")
);
print_sync(&result, elapsed, Some(metrics), args.timings);
if released > 0 {
eprintln!(
"{}",
Theme::dim().render(&format!("Released {released} locked jobs"))
);
}
}
Ok(())
}
Ok(result) => {
let elapsed = start.elapsed();
let stages = metrics.extract_timings();
let total_items = result.issues_updated
+ result.mrs_updated
+ result.documents_regenerated
+ result.documents_embedded;
let total_errors = result.resource_events_failed;
let _ = recorder.succeed(&recorder_conn, &stages, total_items, total_errors);
if robot_mode {
print_sync_json(&result, elapsed.as_millis() as u64, Some(metrics));
} else {
print_sync(&result, elapsed, Some(metrics), args.timings);
}
Ok(())
}
Err(e) => {
let stages = metrics.extract_timings();
let _ = release_all_locked_jobs(&recorder_conn);
let _ = recorder.fail(&recorder_conn, &e.to_string(), Some(&stages));
Err(e.into())
}
}
}
fn handle_cron(
config_override: Option<&str>,
args: CronArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
match args.action {
CronAction::Install { interval } => {
let result = run_cron_install(interval)?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_cron_install_json(&result, elapsed_ms);
} else {
print_cron_install(&result);
}
// Warn if no stored token — cron runs in a minimal shell with no env vars
if let Ok(config) = Config::load(config_override)
&& config
.gitlab
.token
.as_ref()
.is_none_or(|t| t.trim().is_empty())
{
if robot_mode {
eprintln!(
"{{\"warning\":\"No stored token found. Cron sync requires a stored token. Run: lore token set\"}}"
);
} else {
eprintln!();
eprintln!(
" {} No stored token found. Cron sync requires a stored token.",
lore::cli::render::Theme::warning()
.render(lore::cli::render::Icons::warning()),
);
eprintln!(" Run: lore token set");
eprintln!();
}
}
}
CronAction::Uninstall => {
let result = run_cron_uninstall()?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_cron_uninstall_json(&result, elapsed_ms);
} else {
print_cron_uninstall(&result);
}
}
CronAction::Status => {
let config = Config::load(config_override)?;
let info = run_cron_status(&config)?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
print_cron_status_json(&info, elapsed_ms);
} else {
print_cron_status(&info);
}
}
}
Ok(())
}
async fn handle_token(
config_override: Option<&str>,
args: TokenArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
match args.action {
TokenAction::Set { token } => {
let result = run_token_set(config_override, token).await?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
let output = serde_json::json!({
"ok": true,
"data": {
"action": "set",
"username": result.username,
"config_path": result.config_path,
},
"meta": { "elapsed_ms": elapsed_ms },
});
println!("{}", serde_json::to_string(&output)?);
} else {
println!(
" {} Token stored and validated (authenticated as @{})",
lore::cli::render::Theme::success().render(lore::cli::render::Icons::success()),
result.username
);
println!(
" {} {}",
lore::cli::render::Theme::dim().render("config:"),
result.config_path
);
println!();
}
}
TokenAction::Show { unmask } => {
let result = run_token_show(config_override, unmask)?;
let elapsed_ms = start.elapsed().as_millis() as u64;
if robot_mode {
let output = serde_json::json!({
"ok": true,
"data": {
"token": result.token,
"source": result.source,
},
"meta": { "elapsed_ms": elapsed_ms },
});
println!("{}", serde_json::to_string(&output)?);
} else {
println!(
" {} {}",
lore::cli::render::Theme::dim().render("token:"),
result.token
);
println!(
" {} {}",
lore::cli::render::Theme::dim().render("source:"),
result.source
);
println!();
}
}
}
Ok(())
}
#[derive(Serialize)]
struct HealthOutput {
ok: bool,
data: HealthData,
meta: RobotMeta,
}
#[derive(Serialize)]
struct HealthData {
healthy: bool,
config_found: bool,
db_found: bool,
schema_current: bool,
schema_version: i32,
#[serde(skip_serializing_if = "Vec::is_empty")]
actions: Vec<String>,
}
async fn handle_health(
config_override: Option<&str>,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let start = std::time::Instant::now();
let config_path = get_config_path(config_override);
let config_found = config_path.exists();
let (db_found, schema_version, schema_current) = if config_found {
match Config::load(config_override) {
Ok(config) => {
let db_path = get_db_path(config.storage.db_path.as_deref());
if db_path.exists() {
match create_connection(&db_path) {
Ok(conn) => {
let version = get_schema_version(&conn);
(true, version, version >= LATEST_SCHEMA_VERSION)
}
Err(_) => (true, 0, false),
}
} else {
(false, 0, false)
}
}
Err(_) => (false, 0, false),
}
} else {
(false, 0, false)
};
let healthy = config_found && db_found && schema_current;
let mut actions = Vec::new();
if !config_found {
actions.push("lore init".to_string());
}
if !db_found && config_found {
actions.push("lore sync".to_string());
}
if db_found && !schema_current {
actions.push("lore migrate".to_string());
}
if robot_mode {
let output = HealthOutput {
ok: true,
data: HealthData {
healthy,
config_found,
db_found,
schema_current,
schema_version,
actions,
},
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
};
println!("{}", serde_json::to_string(&output)?);
} else {
let status = |ok: bool| {
if ok {
Theme::success().render("pass")
} else {
Theme::error().render("FAIL")
}
};
println!(
"Config: {} ({})",
status(config_found),
config_path.display()
);
println!("DB: {}", status(db_found));
println!("Schema: {} (v{})", status(schema_current), schema_version);
println!();
if healthy {
println!("{}", Theme::success().bold().render("Healthy"));
} else {
println!(
"{}",
Theme::error()
.bold()
.render("Unhealthy - run 'lore doctor' for details")
);
}
}
if !healthy {
std::process::exit(19);
}
Ok(())
}