Files
gitlore/src/cli/commands/timeline.rs
teernisse f36e900570 feat(cli): add pipeline progress spinners to timeline and search
Adds numbered stage spinners ([1/3], [2/3], [3/3]) to the timeline
pipeline stages (seed, expand, collect) so users see activity during
longer queries. TimelineParams gains a robot_mode field to suppress
spinners in JSON output mode.

Adds a [1/1] spinner to the search command for consistency, using the
shared stage_spinner from cli/progress.

Also refactors wrap_snippet() to delegate to wrap_text() with a 4-line
cap, eliminating the duplicated word-wrapping logic.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 14:56:19 -05:00

559 lines
17 KiB
Rust

use console::{Alignment, pad_str, style};
use serde::Serialize;
use crate::Config;
use crate::cli::progress::stage_spinner;
use crate::core::db::create_connection;
use crate::core::error::{LoreError, Result};
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use crate::core::timeline::{
EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType, TimelineResult, UnresolvedRef,
};
use crate::core::timeline_collect::collect_events;
use crate::core::timeline_expand::expand_timeline;
use crate::core::timeline_seed::seed_timeline;
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
/// Parameters for running the timeline pipeline.
pub struct TimelineParams {
pub query: String,
pub project: Option<String>,
pub since: Option<String>,
pub depth: u32,
pub expand_mentions: bool,
pub limit: usize,
pub max_seeds: usize,
pub max_entities: usize,
pub max_evidence: usize,
pub robot_mode: bool,
}
/// Run the full timeline pipeline: SEED -> EXPAND -> COLLECT.
pub async fn run_timeline(config: &Config, params: &TimelineParams) -> Result<TimelineResult> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let project_id = params
.project
.as_deref()
.map(|p| resolve_project(&conn, p))
.transpose()?;
let since_ms = params
.since
.as_deref()
.map(|s| {
parse_since(s).ok_or_else(|| {
LoreError::Other(format!(
"Invalid --since value: '{s}'. Use a duration (7d, 2w, 6m) or date (2024-01-15)"
))
})
})
.transpose()?;
// Construct OllamaClient for hybrid search (same pattern as run_search)
let ollama_cfg = &config.embedding;
let client = OllamaClient::new(OllamaConfig {
base_url: ollama_cfg.base_url.clone(),
model: ollama_cfg.model.clone(),
..OllamaConfig::default()
});
// Stage 1+2: SEED + HYDRATE (hybrid search with FTS fallback)
let spinner = stage_spinner(1, 3, "Seeding timeline...", params.robot_mode);
let seed_result = seed_timeline(
&conn,
Some(&client),
&params.query,
project_id,
since_ms,
params.max_seeds,
params.max_evidence,
)
.await?;
spinner.finish_and_clear();
// Stage 3: EXPAND
let spinner = stage_spinner(2, 3, "Expanding cross-references...", params.robot_mode);
let expand_result = expand_timeline(
&conn,
&seed_result.seed_entities,
params.depth,
params.expand_mentions,
params.max_entities,
)?;
spinner.finish_and_clear();
// Stage 4: COLLECT
let spinner = stage_spinner(3, 3, "Collecting events...", params.robot_mode);
let (events, total_before_limit) = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
&seed_result.matched_discussions,
since_ms,
params.limit,
)?;
spinner.finish_and_clear();
Ok(TimelineResult {
query: params.query.clone(),
search_mode: seed_result.search_mode,
events,
total_events_before_limit: total_before_limit,
seed_entities: seed_result.seed_entities,
expanded_entities: expand_result.expanded_entities,
unresolved_references: expand_result.unresolved_references,
})
}
// ─── Human output ────────────────────────────────────────────────────────────
/// Render timeline as colored human-readable output.
pub fn print_timeline(result: &TimelineResult) {
let entity_count = result.seed_entities.len() + result.expanded_entities.len();
println!();
println!(
"{}",
style(format!(
"Timeline: \"{}\" ({} events across {} entities)",
result.query,
result.events.len(),
entity_count,
))
.bold()
);
println!("{}", "".repeat(60));
println!();
if result.events.is_empty() {
println!(" {}", style("No events found for this query.").dim());
println!();
return;
}
for event in &result.events {
print_timeline_event(event);
}
println!();
println!("{}", "".repeat(60));
print_timeline_footer(result);
}
fn print_timeline_event(event: &TimelineEvent) {
let date = format_date(event.timestamp);
let tag = format_event_tag(&event.event_type);
let entity_ref = format_entity_ref(&event.entity_type, event.entity_iid);
let actor = event
.actor
.as_deref()
.map(|a| format!("@{a}"))
.unwrap_or_default();
let expanded_marker = if event.is_seed { "" } else { " [expanded]" };
let summary = truncate_summary(&event.summary, 50);
let tag_padded = pad_str(&tag, 12, Alignment::Left, None);
println!("{date} {tag_padded} {entity_ref:7} {summary:50} {actor}{expanded_marker}");
// Show snippet for evidence notes
if let TimelineEventType::NoteEvidence { snippet, .. } = &event.event_type
&& !snippet.is_empty()
{
for line in wrap_snippet(snippet, 60) {
println!(
" \"{}\"",
style(line).dim()
);
}
}
// Show full discussion thread
if let TimelineEventType::DiscussionThread { notes, .. } = &event.event_type {
let bar = "\u{2500}".repeat(44);
println!(" \u{2500}\u{2500} Discussion {bar}");
for note in notes {
let note_date = format_date(note.created_at);
let author = note
.author
.as_deref()
.map(|a| format!("@{a}"))
.unwrap_or_else(|| "unknown".to_owned());
println!(" {} ({note_date}):", style(author).bold());
for line in wrap_text(&note.body, 60) {
println!(" {line}");
}
}
println!(" {}", "\u{2500}".repeat(60));
}
}
fn print_timeline_footer(result: &TimelineResult) {
println!(
" Seed entities: {}",
result
.seed_entities
.iter()
.map(|e| format_entity_ref(&e.entity_type, e.entity_iid))
.collect::<Vec<_>>()
.join(", ")
);
if !result.expanded_entities.is_empty() {
println!(
" Expanded: {} entities via cross-references",
result.expanded_entities.len()
);
}
if !result.unresolved_references.is_empty() {
println!(
" Unresolved: {} external references",
result.unresolved_references.len()
);
}
println!();
}
fn format_event_tag(event_type: &TimelineEventType) -> String {
match event_type {
TimelineEventType::Created => style("CREATED").green().to_string(),
TimelineEventType::StateChanged { state } => match state.as_str() {
"closed" => style("CLOSED").red().to_string(),
"reopened" => style("REOPENED").yellow().to_string(),
_ => style(state.to_uppercase()).dim().to_string(),
},
TimelineEventType::LabelAdded { .. } => style("LABEL+").blue().to_string(),
TimelineEventType::LabelRemoved { .. } => style("LABEL-").blue().to_string(),
TimelineEventType::MilestoneSet { .. } => style("MILESTONE+").magenta().to_string(),
TimelineEventType::MilestoneRemoved { .. } => style("MILESTONE-").magenta().to_string(),
TimelineEventType::Merged => style("MERGED").cyan().to_string(),
TimelineEventType::NoteEvidence { .. } => style("NOTE").dim().to_string(),
TimelineEventType::DiscussionThread { .. } => style("THREAD").yellow().to_string(),
TimelineEventType::CrossReferenced { .. } => style("REF").dim().to_string(),
}
}
fn format_entity_ref(entity_type: &str, iid: i64) -> String {
match entity_type {
"issue" => format!("#{iid}"),
"merge_request" => format!("!{iid}"),
_ => format!("{entity_type}:{iid}"),
}
}
fn format_date(ms: i64) -> String {
let iso = ms_to_iso(ms);
iso.split('T').next().unwrap_or(&iso).to_string()
}
fn truncate_summary(s: &str, max: usize) -> String {
if s.chars().count() <= max {
s.to_owned()
} else {
let truncated: String = s.chars().take(max - 3).collect();
format!("{truncated}...")
}
}
fn wrap_text(text: &str, width: usize) -> Vec<String> {
let mut lines = Vec::new();
let mut current = String::new();
for word in text.split_whitespace() {
if current.is_empty() {
current = word.to_string();
} else if current.len() + 1 + word.len() <= width {
current.push(' ');
current.push_str(word);
} else {
lines.push(current);
current = word.to_string();
}
}
if !current.is_empty() {
lines.push(current);
}
lines
}
fn wrap_snippet(text: &str, width: usize) -> Vec<String> {
let mut lines = wrap_text(text, width);
lines.truncate(4);
lines
}
// ─── Robot JSON output ───────────────────────────────────────────────────────
/// Render timeline as robot-mode JSON in {ok, data, meta} envelope.
pub fn print_timeline_json_with_meta(
result: &TimelineResult,
total_events_before_limit: usize,
depth: u32,
expand_mentions: bool,
fields: Option<&[String]>,
) {
let output = TimelineJsonEnvelope {
ok: true,
data: TimelineDataJson::from_result(result),
meta: TimelineMetaJson {
search_mode: result.search_mode.clone(),
expansion_depth: depth,
expand_mentions,
total_entities: result.seed_entities.len() + result.expanded_entities.len(),
total_events: total_events_before_limit,
evidence_notes_included: count_evidence_notes(&result.events),
discussion_threads_included: count_discussion_threads(&result.events),
unresolved_references: result.unresolved_references.len(),
showing: result.events.len(),
},
};
let mut value = match serde_json::to_value(&output) {
Ok(v) => v,
Err(e) => {
eprintln!("Error serializing timeline JSON: {e}");
return;
}
};
if let Some(f) = fields {
let expanded = crate::cli::robot::expand_fields_preset(f, "timeline");
crate::cli::robot::filter_fields(&mut value, "events", &expanded);
}
println!("{}", serde_json::to_string(&value).unwrap());
}
#[derive(Serialize)]
struct TimelineJsonEnvelope {
ok: bool,
data: TimelineDataJson,
meta: TimelineMetaJson,
}
#[derive(Serialize)]
struct TimelineDataJson {
query: String,
event_count: usize,
seed_entities: Vec<EntityJson>,
expanded_entities: Vec<ExpandedEntityJson>,
unresolved_references: Vec<UnresolvedRefJson>,
events: Vec<EventJson>,
}
impl TimelineDataJson {
fn from_result(result: &TimelineResult) -> Self {
Self {
query: result.query.clone(),
event_count: result.events.len(),
seed_entities: result.seed_entities.iter().map(EntityJson::from).collect(),
expanded_entities: result
.expanded_entities
.iter()
.map(ExpandedEntityJson::from)
.collect(),
unresolved_references: result
.unresolved_references
.iter()
.map(UnresolvedRefJson::from)
.collect(),
events: result.events.iter().map(EventJson::from).collect(),
}
}
}
#[derive(Serialize)]
struct EntityJson {
#[serde(rename = "type")]
entity_type: String,
iid: i64,
project: String,
}
impl From<&EntityRef> for EntityJson {
fn from(e: &EntityRef) -> Self {
Self {
entity_type: e.entity_type.clone(),
iid: e.entity_iid,
project: e.project_path.clone(),
}
}
}
#[derive(Serialize)]
struct ExpandedEntityJson {
#[serde(rename = "type")]
entity_type: String,
iid: i64,
project: String,
depth: u32,
via: ViaJson,
}
impl From<&ExpandedEntityRef> for ExpandedEntityJson {
fn from(e: &ExpandedEntityRef) -> Self {
Self {
entity_type: e.entity_ref.entity_type.clone(),
iid: e.entity_ref.entity_iid,
project: e.entity_ref.project_path.clone(),
depth: e.depth,
via: ViaJson {
from: EntityJson::from(&e.via_from),
reference_type: e.via_reference_type.clone(),
source_method: e.via_source_method.clone(),
},
}
}
}
#[derive(Serialize)]
struct ViaJson {
from: EntityJson,
reference_type: String,
source_method: String,
}
#[derive(Serialize)]
struct UnresolvedRefJson {
source: EntityJson,
target_project: Option<String>,
target_type: String,
target_iid: Option<i64>,
reference_type: String,
}
impl From<&UnresolvedRef> for UnresolvedRefJson {
fn from(r: &UnresolvedRef) -> Self {
Self {
source: EntityJson::from(&r.source),
target_project: r.target_project.clone(),
target_type: r.target_type.clone(),
target_iid: r.target_iid,
reference_type: r.reference_type.clone(),
}
}
}
#[derive(Serialize)]
struct EventJson {
timestamp: String,
entity_type: String,
entity_iid: i64,
project: String,
event_type: String,
summary: String,
actor: Option<String>,
url: Option<String>,
is_seed: bool,
details: serde_json::Value,
}
impl From<&TimelineEvent> for EventJson {
fn from(e: &TimelineEvent) -> Self {
let (event_type, details) = event_type_to_json(&e.event_type);
Self {
timestamp: ms_to_iso(e.timestamp),
entity_type: e.entity_type.clone(),
entity_iid: e.entity_iid,
project: e.project_path.clone(),
event_type,
summary: e.summary.clone(),
actor: e.actor.clone(),
url: e.url.clone(),
is_seed: e.is_seed,
details,
}
}
}
fn event_type_to_json(event_type: &TimelineEventType) -> (String, serde_json::Value) {
match event_type {
TimelineEventType::Created => ("created".to_owned(), serde_json::json!({})),
TimelineEventType::StateChanged { state } => (
"state_changed".to_owned(),
serde_json::json!({ "state": state }),
),
TimelineEventType::LabelAdded { label } => (
"label_added".to_owned(),
serde_json::json!({ "label": label }),
),
TimelineEventType::LabelRemoved { label } => (
"label_removed".to_owned(),
serde_json::json!({ "label": label }),
),
TimelineEventType::MilestoneSet { milestone } => (
"milestone_set".to_owned(),
serde_json::json!({ "milestone": milestone }),
),
TimelineEventType::MilestoneRemoved { milestone } => (
"milestone_removed".to_owned(),
serde_json::json!({ "milestone": milestone }),
),
TimelineEventType::Merged => ("merged".to_owned(), serde_json::json!({})),
TimelineEventType::NoteEvidence {
note_id,
snippet,
discussion_id,
} => (
"note_evidence".to_owned(),
serde_json::json!({
"note_id": note_id,
"snippet": snippet,
"discussion_id": discussion_id,
}),
),
TimelineEventType::DiscussionThread {
discussion_id,
notes,
} => (
"discussion_thread".to_owned(),
serde_json::json!({
"discussion_id": discussion_id,
"note_count": notes.len(),
"notes": notes.iter().map(|n| serde_json::json!({
"note_id": n.note_id,
"author": n.author,
"body": n.body,
"created_at": ms_to_iso(n.created_at),
})).collect::<Vec<_>>(),
}),
),
TimelineEventType::CrossReferenced { target } => (
"cross_referenced".to_owned(),
serde_json::json!({ "target": target }),
),
}
}
#[derive(Serialize)]
struct TimelineMetaJson {
search_mode: String,
expansion_depth: u32,
expand_mentions: bool,
total_entities: usize,
total_events: usize,
evidence_notes_included: usize,
discussion_threads_included: usize,
unresolved_references: usize,
showing: usize,
}
fn count_evidence_notes(events: &[TimelineEvent]) -> usize {
events
.iter()
.filter(|e| matches!(e.event_type, TimelineEventType::NoteEvidence { .. }))
.count()
}
fn count_discussion_threads(events: &[TimelineEvent]) -> usize {
events
.iter()
.filter(|e| matches!(e.event_type, TimelineEventType::DiscussionThread { .. }))
.count()
}