style: Apply cargo fmt and clippy fixes across codebase

Automated formatting and lint corrections from parallel agent work:

- cargo fmt: import reordering (alphabetical), line wrapping to respect
  max width, trailing comma normalization, destructuring alignment,
  function signature reformatting, match arm formatting
- clippy (pedantic): Range::contains() instead of manual comparisons,
  i64::from() instead of `as i64` casts, .clamp() instead of
  .max().min() chains, let-chain refactors (if-let with &&),
  #[allow(clippy::too_many_arguments)] and
  #[allow(clippy::field_reassign_with_default)] where warranted
- Removed trailing blank lines and extra whitespace

No behavioral changes. All existing tests pass unmodified.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Taylor Eernisse
2026-02-03 13:01:59 -05:00
parent ff94f24702
commit a50fc78823
42 changed files with 1431 additions and 623 deletions

View File

@@ -3,12 +3,12 @@
use console::style;
use serde::Serialize;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::paths::get_db_path;
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
use crate::embedding::pipeline::embed_documents;
use crate::Config;
/// Result of the embed command.
#[derive(Debug, Default, Serialize)]
@@ -69,10 +69,7 @@ pub async fn run_embed(
/// Print human-readable output.
pub fn print_embed(result: &EmbedCommandResult) {
println!(
"{} Embedding complete",
style("done").green().bold(),
);
println!("{} Embedding complete", style("done").green().bold(),);
println!(" Embedded: {}", result.embedded);
if result.failed > 0 {
println!(" Failed: {}", style(result.failed).red());

View File

@@ -5,12 +5,12 @@ use rusqlite::Connection;
use serde::Serialize;
use tracing::info;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::documents::{SourceType, regenerate_dirty_documents};
use crate::Config;
const FULL_MODE_CHUNK_SIZE: i64 = 2000;
@@ -134,7 +134,11 @@ fn seed_dirty(
/// Print human-readable output.
pub fn print_generate_docs(result: &GenerateDocsResult) {
let mode = if result.full_mode { "full" } else { "incremental" };
let mode = if result.full_mode {
"full"
} else {
"incremental"
};
println!(
"{} Document generation complete ({})",
style("done").green().bold(),
@@ -147,10 +151,7 @@ pub fn print_generate_docs(result: &GenerateDocsResult) {
println!(" Regenerated: {}", result.regenerated);
println!(" Unchanged: {}", result.unchanged);
if result.errored > 0 {
println!(
" Errored: {}",
style(result.errored).red()
);
println!(" Errored: {}", style(result.errored).red());
}
}

View File

@@ -22,19 +22,19 @@ pub use count::{
pub use doctor::{print_doctor_results, run_doctor};
pub use embed::{print_embed, print_embed_json, run_embed};
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
pub use stats::{print_stats, print_stats_json, run_stats};
pub use search::{
print_search_results, print_search_results_json, run_search, SearchCliFilters, SearchResponse,
};
pub use ingest::{IngestDisplay, print_ingest_summary, print_ingest_summary_json, run_ingest};
pub use init::{InitInputs, InitOptions, InitResult, run_init};
pub use list::{
ListFilters, MrListFilters, open_issue_in_browser, open_mr_in_browser, print_list_issues,
print_list_issues_json, print_list_mrs, print_list_mrs_json, run_list_issues, run_list_mrs,
};
pub use sync::{print_sync, print_sync_json, run_sync, SyncOptions, SyncResult};
pub use search::{
SearchCliFilters, SearchResponse, print_search_results, print_search_results_json, run_search,
};
pub use show::{
print_show_issue, print_show_issue_json, print_show_mr, print_show_mr_json, run_show_issue,
run_show_mr,
};
pub use stats::{print_stats, print_stats_json, run_stats};
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync};
pub use sync_status::{print_sync_status, print_sync_status_json, run_sync_status};

View File

@@ -3,6 +3,7 @@
use console::style;
use serde::Serialize;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
@@ -10,10 +11,9 @@ use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use crate::documents::SourceType;
use crate::search::{
apply_filters, get_result_snippet, rank_rrf, search_fts, FtsQueryMode, PathFilter,
SearchFilters,
FtsQueryMode, PathFilter, SearchFilters, apply_filters, get_result_snippet, rank_rrf,
search_fts,
};
use crate::Config;
/// Display-ready search result with all fields hydrated.
#[derive(Debug, Serialize)]
@@ -86,9 +86,7 @@ pub fn run_search(
mode: "lexical".to_string(),
total_results: 0,
results: vec![],
warnings: vec![
"No documents indexed. Run 'lore generate-docs' first.".to_string()
],
warnings: vec!["No documents indexed. Run 'lore generate-docs' first.".to_string()],
});
}
@@ -151,9 +149,9 @@ pub fn run_search(
// Adaptive recall: wider initial fetch when filters applied
let requested = filters.clamp_limit();
let top_k = if filters.has_any_filter() {
(requested * 50).max(200).min(1500)
(requested * 50).clamp(200, 1500)
} else {
(requested * 10).max(50).min(1500)
(requested * 10).clamp(50, 1500)
};
// FTS search
@@ -190,10 +188,8 @@ pub fn run_search(
let hydrated = hydrate_results(&conn, &filtered_ids)?;
// Build display results preserving filter order
let rrf_map: std::collections::HashMap<i64, &crate::search::RrfResult> = ranked
.iter()
.map(|r| (r.document_id, r))
.collect();
let rrf_map: std::collections::HashMap<i64, &crate::search::RrfResult> =
ranked.iter().map(|r| (r.document_id, r)).collect();
let mut results: Vec<SearchResultDisplay> = Vec::with_capacity(hydrated.len());
for row in &hydrated {
@@ -256,16 +252,13 @@ struct HydratedRow {
///
/// Uses json_each() to pass ranked IDs and preserve ordering via ORDER BY j.key.
/// Labels and paths fetched via correlated json_group_array subqueries.
fn hydrate_results(
conn: &rusqlite::Connection,
document_ids: &[i64],
) -> Result<Vec<HydratedRow>> {
fn hydrate_results(conn: &rusqlite::Connection, document_ids: &[i64]) -> Result<Vec<HydratedRow>> {
if document_ids.is_empty() {
return Ok(Vec::new());
}
let ids_json = serde_json::to_string(document_ids)
.map_err(|e| LoreError::Other(e.to_string()))?;
let ids_json =
serde_json::to_string(document_ids).map_err(|e| LoreError::Other(e.to_string()))?;
let sql = r#"
SELECT d.id, d.source_type, d.title, d.url, d.author_username,
@@ -325,10 +318,7 @@ pub fn print_search_results(response: &SearchResponse) {
}
if response.results.is_empty() {
println!(
"No results found for '{}'",
style(&response.query).bold()
);
println!("No results found for '{}'", style(&response.query).bold());
return;
}
@@ -371,17 +361,11 @@ pub fn print_search_results(response: &SearchResponse) {
);
if !result.labels.is_empty() {
println!(
" Labels: {}",
result.labels.join(", ")
);
println!(" Labels: {}", result.labels.join(", "));
}
// Strip HTML tags from snippet for terminal display
let clean_snippet = result
.snippet
.replace("<mark>", "")
.replace("</mark>", "");
let clean_snippet = result.snippet.replace("<mark>", "").replace("</mark>", "");
println!(" {}", style(clean_snippet).dim());
if let Some(ref explain) = result.explain {

View File

@@ -154,10 +154,7 @@ fn find_issue(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Resu
FROM issues i
JOIN projects p ON i.project_id = p.id
WHERE i.iid = ? AND i.project_id = ?",
vec![
Box::new(iid),
Box::new(project_id),
],
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (
@@ -346,10 +343,7 @@ fn find_mr(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<
FROM merge_requests m
JOIN projects p ON m.project_id = p.id
WHERE m.iid = ? AND m.project_id = ?",
vec![
Box::new(iid),
Box::new(project_id),
],
vec![Box::new(iid), Box::new(project_id)],
)
}
None => (

View File

@@ -4,10 +4,10 @@ use console::style;
use rusqlite::Connection;
use serde::Serialize;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::paths::get_db_path;
use crate::Config;
/// Result of the stats command.
#[derive(Debug, Default, Serialize)]
@@ -75,11 +75,7 @@ pub struct RepairResult {
}
/// Run the stats command.
pub fn run_stats(
config: &Config,
check: bool,
repair: bool,
) -> Result<StatsResult> {
pub fn run_stats(config: &Config, check: bool, repair: bool) -> Result<StatsResult> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
@@ -87,14 +83,22 @@ pub fn run_stats(
// Document counts
result.documents.total = count_query(&conn, "SELECT COUNT(*) FROM documents")?;
result.documents.issues =
count_query(&conn, "SELECT COUNT(*) FROM documents WHERE source_type = 'issue'")?;
result.documents.merge_requests =
count_query(&conn, "SELECT COUNT(*) FROM documents WHERE source_type = 'merge_request'")?;
result.documents.discussions =
count_query(&conn, "SELECT COUNT(*) FROM documents WHERE source_type = 'discussion'")?;
result.documents.truncated =
count_query(&conn, "SELECT COUNT(*) FROM documents WHERE is_truncated = 1")?;
result.documents.issues = count_query(
&conn,
"SELECT COUNT(*) FROM documents WHERE source_type = 'issue'",
)?;
result.documents.merge_requests = count_query(
&conn,
"SELECT COUNT(*) FROM documents WHERE source_type = 'merge_request'",
)?;
result.documents.discussions = count_query(
&conn,
"SELECT COUNT(*) FROM documents WHERE source_type = 'discussion'",
)?;
result.documents.truncated = count_query(
&conn,
"SELECT COUNT(*) FROM documents WHERE is_truncated = 1",
)?;
// Embedding stats — skip gracefully if table doesn't exist (Gate A only)
if table_exists(&conn, "embedding_metadata") {
@@ -119,10 +123,14 @@ pub fn run_stats(
result.fts.indexed = count_query(&conn, "SELECT COUNT(*) FROM documents_fts")?;
// Queue stats
result.queues.dirty_sources =
count_query(&conn, "SELECT COUNT(*) FROM dirty_sources WHERE last_error IS NULL")?;
result.queues.dirty_sources_failed =
count_query(&conn, "SELECT COUNT(*) FROM dirty_sources WHERE last_error IS NOT NULL")?;
result.queues.dirty_sources = count_query(
&conn,
"SELECT COUNT(*) FROM dirty_sources WHERE last_error IS NULL",
)?;
result.queues.dirty_sources_failed = count_query(
&conn,
"SELECT COUNT(*) FROM dirty_sources WHERE last_error IS NOT NULL",
)?;
if table_exists(&conn, "pending_discussion_fetches") {
result.queues.pending_discussion_fetches = count_query(
@@ -151,6 +159,7 @@ pub fn run_stats(
}
// Integrity check
#[allow(clippy::field_reassign_with_default)]
if check {
let mut integrity = IntegrityResult::default();
@@ -276,9 +285,7 @@ pub fn run_stats(
}
fn count_query(conn: &Connection, sql: &str) -> Result<i64> {
let count: i64 = conn
.query_row(sql, [], |row| row.get(0))
.unwrap_or(0);
let count: i64 = conn.query_row(sql, [], |row| row.get(0)).unwrap_or(0);
Ok(count)
}
@@ -300,7 +307,10 @@ pub fn print_stats(result: &StatsResult) {
println!(" Merge Requests: {}", result.documents.merge_requests);
println!(" Discussions: {}", result.documents.discussions);
if result.documents.truncated > 0 {
println!(" Truncated: {}", style(result.documents.truncated).yellow());
println!(
" Truncated: {}",
style(result.documents.truncated).yellow()
);
}
println!();
@@ -318,13 +328,13 @@ pub fn print_stats(result: &StatsResult) {
println!();
println!("{}", style("Queues").cyan().bold());
println!(" Dirty sources: {} pending, {} failed",
result.queues.dirty_sources,
result.queues.dirty_sources_failed
println!(
" Dirty sources: {} pending, {} failed",
result.queues.dirty_sources, result.queues.dirty_sources_failed
);
println!(" Discussion fetch: {} pending, {} failed",
result.queues.pending_discussion_fetches,
result.queues.pending_discussion_fetches_failed
println!(
" Discussion fetch: {} pending, {} failed",
result.queues.pending_discussion_fetches, result.queues.pending_discussion_fetches_failed
);
if result.queues.pending_dependent_fetches > 0
|| result.queues.pending_dependent_fetches_failed > 0
@@ -431,10 +441,12 @@ pub fn print_stats_json(result: &StatsResult) {
let output = StatsJsonOutput {
ok: true,
data: StatsResult {
documents: DocumentStats { ..*&result.documents },
embeddings: EmbeddingStats { ..*&result.embeddings },
fts: FtsStats { ..*&result.fts },
queues: QueueStats { ..*&result.queues },
documents: DocumentStats { ..result.documents },
embeddings: EmbeddingStats {
..result.embeddings
},
fts: FtsStats { ..result.fts },
queues: QueueStats { ..result.queues },
integrity: result.integrity.as_ref().map(|i| IntegrityResult {
ok: i.ok,
fts_doc_mismatch: i.fts_doc_mismatch,