3 Commits

Author SHA1 Message Date
teernisse
06889ec85a fix(explain): address review findings — N+1 queries, duplicate decisions, silent errors
1. fetch_open_threads: replace N+1 loop (2 queries per thread) with a
   single query using correlated subqueries for note_count and started_by.
2. extract_key_decisions: track consumed notes so the same note is not
   matched to multiple events, preventing duplicate decision entries.
3. build_timeline_excerpt_from_pipeline: log tracing::warn on seed/collect
   failures instead of silently returning empty timeline.
2026-03-10 16:43:06 -04:00
teernisse
08bda08934 fix(explain): filter out NULL iids in related entities queries
entity_references.target_entity_iid is nullable (unresolved cross-project
refs), and COALESCE(i.iid, mr.iid) returns NULL for orphaned refs.
Both paths caused rusqlite InvalidColumnType errors when fetching i64.
Added IS NOT NULL filters to both outgoing and incoming reference queries.
2026-03-10 15:54:54 -04:00
teernisse
32134ea933 feat(explain): implement lore explain command for auto-generating issue/MR narratives
Adds the full explain command with 7 output sections: entity summary, description,
key decisions (heuristic event-note correlation), activity summary, open threads,
related entities (closing MRs, cross-references), and timeline excerpt (reuses
existing pipeline). Supports --sections filtering, --since time scoping,
--no-timeline, --max-decisions, and robot mode JSON output.

Closes: bd-2i3z, bd-a3j8, bd-wb0b, bd-3q5e, bd-nj7f, bd-9lbr
2026-03-10 15:04:35 -04:00
16 changed files with 2129 additions and 230 deletions

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
bd-2i3z bd-9lbr

View File

@@ -7,6 +7,10 @@ struct FallbackErrorOutput {
struct FallbackError { struct FallbackError {
code: String, code: String,
message: String, message: String,
#[serde(skip_serializing_if = "Option::is_none")]
suggestion: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
actions: Vec<String>,
} }
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! { fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
@@ -20,6 +24,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
error: FallbackError { error: FallbackError {
code: "INTERNAL_ERROR".to_string(), code: "INTERNAL_ERROR".to_string(),
message: gi_error.to_string(), message: gi_error.to_string(),
suggestion: None,
actions: Vec::new(),
}, },
}; };
serde_json::to_string(&fallback) serde_json::to_string(&fallback)
@@ -59,6 +65,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
error: FallbackError { error: FallbackError {
code: "INTERNAL_ERROR".to_string(), code: "INTERNAL_ERROR".to_string(),
message: e.to_string(), message: e.to_string(),
suggestion: None,
actions: Vec::new(),
}, },
}; };
eprintln!( eprintln!(

View File

@@ -735,7 +735,7 @@ async fn handle_init(
} }
let project_paths: Vec<String> = projects_flag let project_paths: Vec<String> = projects_flag
.unwrap() .expect("validated: checked for None at lines 714-721")
.split(',') .split(',')
.map(|p| p.trim().to_string()) .map(|p| p.trim().to_string())
.filter(|p| !p.is_empty()) .filter(|p| !p.is_empty())
@@ -743,8 +743,10 @@ async fn handle_init(
let result = run_init( let result = run_init(
InitInputs { InitInputs {
gitlab_url: gitlab_url_flag.unwrap(), gitlab_url: gitlab_url_flag
token_env_var: token_env_var_flag.unwrap(), .expect("validated: checked for None at lines 714-721"),
token_env_var: token_env_var_flag
.expect("validated: checked for None at lines 714-721"),
project_paths, project_paths,
default_project: default_project_flag.clone(), default_project: default_project_flag.clone(),
}, },

View File

@@ -316,6 +316,17 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
"meta": {"elapsed_ms": "int"} "meta": {"elapsed_ms": "int"}
} }
}, },
"explain": {
"description": "Auto-generate a structured narrative of an issue or MR",
"flags": ["<entity_type: issues|mrs>", "<IID>", "-p/--project <path>", "--sections <comma-list>", "--no-timeline", "--max-decisions <N>", "--since <period>"],
"valid_sections": ["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"],
"example": "lore --robot explain issues 42 --sections key_decisions,activity --since 30d",
"response_schema": {
"ok": "bool",
"data": {"entity": "{type:string, iid:int, title:string, state:string, author:string, assignees:[string], labels:[string], created_at:string, updated_at:string, url:string?, status_name:string?}", "description_excerpt": "string?", "key_decisions": "[{timestamp:string, actor:string, action:string, context_note:string}]?", "activity": "{state_changes:int, label_changes:int, notes:int, first_event:string?, last_event:string?}?", "open_threads": "[{discussion_id:string, started_by:string, started_at:string, note_count:int, last_note_at:string}]?", "related": "{closing_mrs:[{iid:int, title:string, state:string, web_url:string?}], related_issues:[{entity_type:string, iid:int, title:string?, reference_type:string}]}?", "timeline_excerpt": "[{timestamp:string, event_type:string, actor:string?, summary:string}]?"},
"meta": {"elapsed_ms": "int"}
}
},
"notes": { "notes": {
"description": "List notes from discussions with rich filtering", "description": "List notes from discussions with rich filtering",
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"], "flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
@@ -449,7 +460,8 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
"17": "Not found", "17": "Not found",
"18": "Ambiguous match", "18": "Ambiguous match",
"19": "Health check failed", "19": "Health check failed",
"20": "Config not found" "20": "Config not found",
"21": "Embeddings not built"
}); });
let workflows = serde_json::json!({ let workflows = serde_json::json!({

View File

@@ -209,6 +209,16 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
], ],
), ),
("drift", &["--threshold", "--project"]), ("drift", &["--threshold", "--project"]),
(
"explain",
&[
"--project",
"--sections",
"--no-timeline",
"--max-decisions",
"--since",
],
),
( (
"notes", "notes",
&[ &[
@@ -388,6 +398,7 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
"file-history", "file-history",
"trace", "trace",
"drift", "drift",
"explain",
"related", "related",
"cron", "cron",
"token", "token",

1970
src/cli/commands/explain.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -5,6 +5,7 @@ pub mod cron;
pub mod doctor; pub mod doctor;
pub mod drift; pub mod drift;
pub mod embed; pub mod embed;
pub mod explain;
pub mod file_history; pub mod file_history;
pub mod generate_docs; pub mod generate_docs;
pub mod ingest; pub mod ingest;
@@ -35,6 +36,7 @@ pub use cron::{
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor}; pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift}; pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
pub use embed::{print_embed, print_embed_json, run_embed}; pub use embed::{print_embed, print_embed_json, run_embed};
pub use explain::{handle_explain, print_explain, print_explain_json, run_explain};
pub use file_history::{print_file_history, print_file_history_json, run_file_history}; pub use file_history::{print_file_history, print_file_history_json, run_file_history};
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs}; pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
pub use ingest::{ pub use ingest::{

View File

@@ -277,6 +277,44 @@ pub enum Commands {
/// Trace why code was introduced: file -> MR -> issue -> discussion /// Trace why code was introduced: file -> MR -> issue -> discussion
Trace(TraceArgs), Trace(TraceArgs),
/// Auto-generate a structured narrative of an issue or MR
#[command(after_help = "\x1b[1mExamples:\x1b[0m
lore explain issues 42 # Narrative for issue #42
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
lore -J explain issues 42 # JSON output for automation
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
Explain {
/// Entity type: "issues" or "mrs" (singular forms also accepted)
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
entity_type: String,
/// Entity IID
iid: i64,
/// Scope to project (fuzzy match)
#[arg(short, long)]
project: Option<String>,
/// Select specific sections (comma-separated)
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
#[arg(long, value_delimiter = ',', help_heading = "Output")]
sections: Option<Vec<String>>,
/// Skip timeline excerpt (faster execution)
#[arg(long, help_heading = "Output")]
no_timeline: bool,
/// Maximum key decisions to include
#[arg(long, default_value = "10", help_heading = "Output")]
max_decisions: usize,
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
#[arg(long, help_heading = "Filters")]
since: Option<String>,
},
/// Detect discussion divergence from original intent /// Detect discussion divergence from original intent
#[command(after_help = "\x1b[1mExamples:\x1b[0m #[command(after_help = "\x1b[1mExamples:\x1b[0m
lore drift issues 42 # Check drift on issue #42 lore drift issues 42 # Check drift on issue #42

View File

@@ -28,8 +28,11 @@ pub enum ErrorCode {
OllamaUnavailable, OllamaUnavailable,
OllamaModelNotFound, OllamaModelNotFound,
EmbeddingFailed, EmbeddingFailed,
EmbeddingsNotBuilt,
NotFound, NotFound,
Ambiguous, Ambiguous,
HealthCheckFailed,
UsageError,
SurgicalPreflightFailed, SurgicalPreflightFailed,
} }
@@ -52,8 +55,11 @@ impl std::fmt::Display for ErrorCode {
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE", Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND", Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
Self::EmbeddingFailed => "EMBEDDING_FAILED", Self::EmbeddingFailed => "EMBEDDING_FAILED",
Self::EmbeddingsNotBuilt => "EMBEDDINGS_NOT_BUILT",
Self::NotFound => "NOT_FOUND", Self::NotFound => "NOT_FOUND",
Self::Ambiguous => "AMBIGUOUS", Self::Ambiguous => "AMBIGUOUS",
Self::HealthCheckFailed => "HEALTH_CHECK_FAILED",
Self::UsageError => "USAGE_ERROR",
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED", Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
}; };
write!(f, "{code}") write!(f, "{code}")
@@ -79,8 +85,11 @@ impl ErrorCode {
Self::OllamaUnavailable => 14, Self::OllamaUnavailable => 14,
Self::OllamaModelNotFound => 15, Self::OllamaModelNotFound => 15,
Self::EmbeddingFailed => 16, Self::EmbeddingFailed => 16,
Self::EmbeddingsNotBuilt => 21,
Self::NotFound => 17, Self::NotFound => 17,
Self::Ambiguous => 18, Self::Ambiguous => 18,
Self::HealthCheckFailed => 19,
Self::UsageError => 2,
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found). // Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
// Robot consumers distinguish via ErrorCode string, not exit code. // Robot consumers distinguish via ErrorCode string, not exit code.
Self::SurgicalPreflightFailed => 6, Self::SurgicalPreflightFailed => 6,
@@ -201,7 +210,7 @@ impl LoreError {
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable, Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound, Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed, Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingFailed, Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingsNotBuilt,
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed, Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
} }
} }

View File

@@ -1,70 +0,0 @@
pub const CHUNK_ROWID_MULTIPLIER: i64 = 1000;
pub fn encode_rowid(document_id: i64, chunk_index: i64) -> i64 {
assert!(
(0..CHUNK_ROWID_MULTIPLIER).contains(&chunk_index),
"chunk_index {chunk_index} out of range [0, {CHUNK_ROWID_MULTIPLIER})"
);
document_id
.checked_mul(CHUNK_ROWID_MULTIPLIER)
.and_then(|v| v.checked_add(chunk_index))
.unwrap_or_else(|| {
panic!("encode_rowid overflow: document_id={document_id}, chunk_index={chunk_index}")
})
}
pub fn decode_rowid(rowid: i64) -> (i64, i64) {
assert!(
rowid >= 0,
"decode_rowid called with negative rowid: {rowid}"
);
let document_id = rowid / CHUNK_ROWID_MULTIPLIER;
let chunk_index = rowid % CHUNK_ROWID_MULTIPLIER;
(document_id, chunk_index)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encode_single_chunk() {
assert_eq!(encode_rowid(1, 0), 1000);
}
#[test]
fn test_encode_multi_chunk() {
assert_eq!(encode_rowid(1, 5), 1005);
}
#[test]
fn test_encode_specific_values() {
assert_eq!(encode_rowid(42, 0), 42000);
assert_eq!(encode_rowid(42, 5), 42005);
}
#[test]
fn test_decode_zero_chunk() {
assert_eq!(decode_rowid(42000), (42, 0));
}
#[test]
fn test_decode_roundtrip() {
for doc_id in [0, 1, 42, 100, 999, 10000] {
for chunk_idx in [0, 1, 5, 99, 999] {
let rowid = encode_rowid(doc_id, chunk_idx);
let (decoded_doc, decoded_chunk) = decode_rowid(rowid);
assert_eq!(
(decoded_doc, decoded_chunk),
(doc_id, chunk_idx),
"Roundtrip failed for doc_id={doc_id}, chunk_idx={chunk_idx}"
);
}
}
}
#[test]
fn test_multiplier_value() {
assert_eq!(CHUNK_ROWID_MULTIPLIER, 1000);
}
}

View File

@@ -1,107 +0,0 @@
pub const CHUNK_MAX_BYTES: usize = 1_500;
pub const EXPECTED_DIMS: usize = 768;
pub const CHUNK_OVERLAP_CHARS: usize = 200;
pub fn split_into_chunks(content: &str) -> Vec<(usize, String)> {
if content.is_empty() {
return Vec::new();
}
if content.len() <= CHUNK_MAX_BYTES {
return vec![(0, content.to_string())];
}
let mut chunks: Vec<(usize, String)> = Vec::new();
let mut start = 0;
let mut chunk_index = 0;
while start < content.len() {
let remaining = &content[start..];
if remaining.len() <= CHUNK_MAX_BYTES {
chunks.push((chunk_index, remaining.to_string()));
break;
}
let end = floor_char_boundary(content, start + CHUNK_MAX_BYTES);
let window = &content[start..end];
let split_at = find_paragraph_break(window)
.or_else(|| find_sentence_break(window))
.or_else(|| find_word_break(window))
.unwrap_or(window.len());
let chunk_text = &content[start..start + split_at];
chunks.push((chunk_index, chunk_text.to_string()));
let advance = if split_at > CHUNK_OVERLAP_CHARS {
split_at - CHUNK_OVERLAP_CHARS
} else {
split_at
}
.max(1);
let old_start = start;
start += advance;
// Ensure start lands on a char boundary after overlap subtraction
start = floor_char_boundary(content, start);
// Guarantee forward progress: multi-byte chars can cause
// floor_char_boundary to round back to old_start
if start <= old_start {
start = old_start
+ content[old_start..]
.chars()
.next()
.map_or(1, |c| c.len_utf8());
}
chunk_index += 1;
}
chunks
}
fn find_paragraph_break(window: &str) -> Option<usize> {
let search_start = floor_char_boundary(window, window.len() * 2 / 3);
window[search_start..]
.rfind("\n\n")
.map(|pos| search_start + pos + 2)
.or_else(|| window[..search_start].rfind("\n\n").map(|pos| pos + 2))
}
fn find_sentence_break(window: &str) -> Option<usize> {
let search_start = floor_char_boundary(window, window.len() / 2);
for pat in &[". ", "? ", "! "] {
if let Some(pos) = window[search_start..].rfind(pat) {
return Some(search_start + pos + pat.len());
}
}
for pat in &[". ", "? ", "! "] {
if let Some(pos) = window[..search_start].rfind(pat) {
return Some(pos + pat.len());
}
}
None
}
fn find_word_break(window: &str) -> Option<usize> {
let search_start = floor_char_boundary(window, window.len() / 2);
window[search_start..]
.rfind(' ')
.map(|pos| search_start + pos + 1)
.or_else(|| window[..search_start].rfind(' ').map(|pos| pos + 1))
}
fn floor_char_boundary(s: &str, idx: usize) -> usize {
if idx >= s.len() {
return s.len();
}
let mut i = idx;
while i > 0 && !s.is_char_boundary(i) {
i -= 1;
}
i
}
#[cfg(test)]
#[path = "chunking_tests.rs"]
mod tests;

View File

@@ -53,14 +53,8 @@ pub struct NormalizedNote {
pub position_head_sha: Option<String>, pub position_head_sha: Option<String>,
} }
fn parse_timestamp(ts: &str) -> i64 { fn parse_timestamp(ts: &str) -> Result<i64, String> {
match iso_to_ms(ts) { iso_to_ms_strict(ts)
Some(ms) => ms,
None => {
warn!(timestamp = ts, "Invalid timestamp, defaulting to epoch 0");
0
}
}
} }
pub fn transform_discussion( pub fn transform_discussion(
@@ -133,7 +127,15 @@ pub fn transform_notes(
.notes .notes
.iter() .iter()
.enumerate() .enumerate()
.map(|(idx, note)| transform_single_note(note, local_project_id, idx as i32, now)) .filter_map(|(idx, note)| {
match transform_single_note(note, local_project_id, idx as i32, now) {
Ok(n) => Some(n),
Err(e) => {
warn!(note_id = note.id, error = %e, "Skipping note with invalid timestamp");
None
}
}
})
.collect() .collect()
} }
@@ -142,7 +144,10 @@ fn transform_single_note(
local_project_id: i64, local_project_id: i64,
position: i32, position: i32,
now: i64, now: i64,
) -> NormalizedNote { ) -> Result<NormalizedNote, String> {
let created_at = parse_timestamp(&note.created_at)?;
let updated_at = parse_timestamp(&note.updated_at)?;
let ( let (
position_old_path, position_old_path,
position_new_path, position_new_path,
@@ -156,7 +161,7 @@ fn transform_single_note(
position_head_sha, position_head_sha,
) = extract_position_fields(&note.position); ) = extract_position_fields(&note.position);
NormalizedNote { Ok(NormalizedNote {
gitlab_id: note.id, gitlab_id: note.id,
project_id: local_project_id, project_id: local_project_id,
note_type: note.note_type.clone(), note_type: note.note_type.clone(),
@@ -164,8 +169,8 @@ fn transform_single_note(
author_id: Some(note.author.id), author_id: Some(note.author.id),
author_username: note.author.username.clone(), author_username: note.author.username.clone(),
body: note.body.clone(), body: note.body.clone(),
created_at: parse_timestamp(&note.created_at), created_at,
updated_at: parse_timestamp(&note.updated_at), updated_at,
last_seen_at: now, last_seen_at: now,
position, position,
resolvable: note.resolvable, resolvable: note.resolvable,
@@ -182,7 +187,7 @@ fn transform_single_note(
position_base_sha, position_base_sha,
position_start_sha, position_start_sha,
position_head_sha, position_head_sha,
} })
} }
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]

View File

@@ -13,23 +13,24 @@ use lore::cli::autocorrect::{self, CorrectionResult};
use lore::cli::commands::{ use lore::cli::commands::{
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters, IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
NoteListFilters, RefreshOptions, RefreshResult, SearchCliFilters, SyncOptions, TimelineParams, NoteListFilters, RefreshOptions, RefreshResult, SearchCliFilters, SyncOptions, TimelineParams,
delete_orphan_projects, open_issue_in_browser, open_mr_in_browser, parse_trace_path, delete_orphan_projects, handle_explain, open_issue_in_browser, open_mr_in_browser,
print_count, print_count_json, print_cron_install, print_cron_install_json, print_cron_status, parse_trace_path, print_count, print_count_json, print_cron_install, print_cron_install_json,
print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json, print_doctor_results, print_cron_status, print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json,
print_drift_human, print_drift_json, print_dry_run_preview, print_dry_run_preview_json, print_doctor_results, print_drift_human, print_drift_json, print_dry_run_preview,
print_embed, print_embed_json, print_event_count, print_event_count_json, print_file_history, print_dry_run_preview_json, print_embed, print_embed_json, print_event_count,
print_file_history_json, print_generate_docs, print_generate_docs_json, print_ingest_summary, print_event_count_json, print_file_history, print_file_history_json, print_generate_docs,
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs, print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
print_list_mrs_json, print_list_notes, print_list_notes_json, print_related_human, print_list_issues_json, print_list_mrs, print_list_mrs_json, print_list_notes,
print_related_json, print_search_results, print_search_results_json, print_show_issue, print_list_notes_json, print_related_human, print_related_json, print_search_results,
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json, print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline, print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human, print_who_json, print_sync_status, print_sync_status_json, print_timeline, print_timeline_json_with_meta,
query_notes, run_auth_test, run_count, run_count_events, run_cron_install, run_cron_status, print_trace, print_trace_json, print_who_human, print_who_json, query_notes, run_auth_test,
run_cron_uninstall, run_doctor, run_drift, run_embed, run_file_history, run_generate_docs, run_count, run_count_events, run_cron_install, run_cron_status, run_cron_uninstall, run_doctor,
run_ingest, run_ingest_dry_run, run_init, run_init_refresh, run_list_issues, run_list_mrs, run_drift, run_embed, run_file_history, run_generate_docs, run_ingest, run_ingest_dry_run,
run_me, run_related, run_search, run_show_issue, run_show_mr, run_stats, run_sync, run_init, run_init_refresh, run_list_issues, run_list_mrs, run_me, run_related, run_search,
run_sync_status, run_timeline, run_token_set, run_token_show, run_who, run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_token_set,
run_token_show, run_who,
}; };
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme}; use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
use lore::cli::robot::{RobotMeta, strip_schemas}; use lore::cli::robot::{RobotMeta, strip_schemas};
@@ -222,6 +223,25 @@ fn main() {
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode), Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode), Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await, Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Explain {
entity_type,
iid,
project,
sections,
no_timeline,
max_decisions,
since,
}) => handle_explain(
cli.config.as_deref(),
&entity_type,
iid,
project.as_deref(),
sections,
no_timeline,
max_decisions,
since.as_deref(),
robot_mode,
),
Some(Commands::Drift { Some(Commands::Drift {
entity_type, entity_type,
iid, iid,

View File

@@ -119,15 +119,12 @@ pub fn search_fts(
} }
pub fn generate_fallback_snippet(content_text: &str, max_chars: usize) -> String { pub fn generate_fallback_snippet(content_text: &str, max_chars: usize) -> String {
if content_text.chars().count() <= max_chars { // Use char_indices to find the boundary at max_chars in a single pass,
return content_text.to_string(); // short-circuiting early for large strings instead of counting all chars.
} let byte_end = match content_text.char_indices().nth(max_chars) {
Some((i, _)) => i,
let byte_end = content_text None => return content_text.to_string(), // content fits within max_chars
.char_indices() };
.nth(max_chars)
.map(|(i, _)| i)
.unwrap_or(content_text.len());
let truncated = &content_text[..byte_end]; let truncated = &content_text[..byte_end];
if let Some(last_space) = truncated.rfind(' ') { if let Some(last_space) = truncated.rfind(' ') {

View File

@@ -411,7 +411,9 @@ fn round_robin_select_by_discussion(
let mut made_progress = false; let mut made_progress = false;
for (disc_idx, &discussion_id) in discussion_order.iter().enumerate() { for (disc_idx, &discussion_id) in discussion_order.iter().enumerate() {
let notes = by_discussion.get(&discussion_id).unwrap(); let notes = by_discussion
.get(&discussion_id)
.expect("key present: inserted into by_discussion via discussion_order");
let note_idx = indices[disc_idx]; let note_idx = indices[disc_idx];
if note_idx < notes.len() { if note_idx < notes.len() {