feat(timeline): wire up lore timeline command with human + robot renderers

Complete Gate 3 by implementing the final three beads:
- bd-2f2: Human output renderer with colored event tags, entity refs,
  evidence snippets, and expansion summary footer
- bd-dty: Robot JSON output with {ok,data,meta} envelope, ISO timestamps,
  nested via provenance, and per-event-type details objects
- bd-1nf: CLI wiring with TimelineArgs (9 flags), Commands::Timeline
  variant, handle_timeline handler, VALID_COMMANDS entry, and robot-docs
  manifest with temporal_intelligence workflow

All 7 Gate 3 children now closed. Pipeline: SEED -> HYDRATE -> EXPAND ->
COLLECT -> RENDER fully operational.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Taylor Eernisse
2026-02-06 08:49:48 -05:00
parent b005edb7f2
commit 69df8a5603
6 changed files with 617 additions and 17 deletions

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
bd-3as
bd-ike

View File

@@ -11,6 +11,7 @@ pub mod show;
pub mod stats;
pub mod sync;
pub mod sync_status;
pub mod timeline;
pub use auth_test::run_auth_test;
pub use count::{
@@ -39,3 +40,4 @@ pub use show::{
pub use stats::{print_stats, print_stats_json, run_stats};
pub use sync::{SyncOptions, SyncResult, print_sync, print_sync_json, run_sync};
pub use sync_status::{print_sync_status, print_sync_status_json, run_sync_status};
pub use timeline::{TimelineParams, print_timeline, print_timeline_json_with_meta, run_timeline};

View File

@@ -0,0 +1,494 @@
use console::style;
use serde::Serialize;
use crate::Config;
use crate::core::db::create_connection;
use crate::core::error::Result;
use crate::core::paths::get_db_path;
use crate::core::project::resolve_project;
use crate::core::time::{ms_to_iso, parse_since};
use crate::core::timeline::{
EntityRef, ExpandedEntityRef, TimelineEvent, TimelineEventType, TimelineResult, UnresolvedRef,
};
use crate::core::timeline_collect::collect_events;
use crate::core::timeline_expand::expand_timeline;
use crate::core::timeline_seed::seed_timeline;
/// Parameters for running the timeline pipeline.
pub struct TimelineParams {
pub query: String,
pub project: Option<String>,
pub since: Option<String>,
pub depth: u32,
pub expand_mentions: bool,
pub limit: usize,
pub max_seeds: usize,
pub max_entities: usize,
pub max_evidence: usize,
}
/// Run the full timeline pipeline: SEED -> EXPAND -> COLLECT.
pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<TimelineResult> {
let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?;
let project_id = params
.project
.as_deref()
.map(|p| resolve_project(&conn, p))
.transpose()?;
let since_ms = params.since.as_deref().and_then(parse_since);
// Stage 1+2: SEED + HYDRATE
let seed_result = seed_timeline(
&conn,
&params.query,
project_id,
since_ms,
params.max_seeds,
params.max_evidence,
)?;
// Stage 3: EXPAND
let expand_result = expand_timeline(
&conn,
&seed_result.seed_entities,
params.depth,
params.expand_mentions,
params.max_entities,
)?;
// Stage 4: COLLECT
let events = collect_events(
&conn,
&seed_result.seed_entities,
&expand_result.expanded_entities,
&seed_result.evidence_notes,
since_ms,
params.limit,
)?;
Ok(TimelineResult {
query: params.query.clone(),
events,
seed_entities: seed_result.seed_entities,
expanded_entities: expand_result.expanded_entities,
unresolved_references: expand_result.unresolved_references,
})
}
// ─── Human output ────────────────────────────────────────────────────────────
/// Render timeline as colored human-readable output.
pub fn print_timeline(result: &TimelineResult) {
let entity_count = result.seed_entities.len() + result.expanded_entities.len();
println!();
println!(
"{}",
style(format!(
"Timeline: \"{}\" ({} events across {} entities)",
result.query,
result.events.len(),
entity_count,
))
.bold()
);
println!("{}", "".repeat(60));
println!();
if result.events.is_empty() {
println!(" {}", style("No events found for this query.").dim());
println!();
return;
}
for event in &result.events {
print_timeline_event(event);
}
println!();
println!("{}", "".repeat(60));
print_timeline_footer(result);
}
fn print_timeline_event(event: &TimelineEvent) {
let date = format_date(event.timestamp);
let tag = format_event_tag(&event.event_type);
let entity_ref = format_entity_ref(&event.entity_type, event.entity_iid);
let actor = event
.actor
.as_deref()
.map(|a| format!("@{a}"))
.unwrap_or_default();
let expanded_marker = if event.is_seed { "" } else { " [expanded]" };
let summary = truncate_summary(&event.summary, 50);
println!("{date} {tag:12} {entity_ref:7} {summary:50} {actor}{expanded_marker}");
// Show snippet for evidence notes
if let TimelineEventType::NoteEvidence { snippet, .. } = &event.event_type
&& !snippet.is_empty()
{
for line in wrap_snippet(snippet, 60) {
println!(
" \"{}\"",
style(line).dim()
);
}
}
}
fn print_timeline_footer(result: &TimelineResult) {
println!(
" Seed entities: {}",
result
.seed_entities
.iter()
.map(|e| format_entity_ref(&e.entity_type, e.entity_iid))
.collect::<Vec<_>>()
.join(", ")
);
if !result.expanded_entities.is_empty() {
println!(
" Expanded: {} entities via cross-references",
result.expanded_entities.len()
);
}
if !result.unresolved_references.is_empty() {
println!(
" Unresolved: {} external references",
result.unresolved_references.len()
);
}
println!();
}
fn format_event_tag(event_type: &TimelineEventType) -> String {
match event_type {
TimelineEventType::Created => style("CREATED").green().to_string(),
TimelineEventType::StateChanged { state } => match state.as_str() {
"closed" => style("CLOSED").red().to_string(),
"reopened" => style("REOPENED").yellow().to_string(),
_ => style(state.to_uppercase()).dim().to_string(),
},
TimelineEventType::LabelAdded { .. } => style("LABEL+").blue().to_string(),
TimelineEventType::LabelRemoved { .. } => style("LABEL-").blue().to_string(),
TimelineEventType::MilestoneSet { .. } => style("MILESTONE+").magenta().to_string(),
TimelineEventType::MilestoneRemoved { .. } => style("MILESTONE-").magenta().to_string(),
TimelineEventType::Merged => style("MERGED").cyan().to_string(),
TimelineEventType::NoteEvidence { .. } => style("NOTE").dim().to_string(),
TimelineEventType::CrossReferenced { .. } => style("REF").dim().to_string(),
}
}
fn format_entity_ref(entity_type: &str, iid: i64) -> String {
match entity_type {
"issue" => format!("#{iid}"),
"merge_request" => format!("!{iid}"),
_ => format!("{entity_type}:{iid}"),
}
}
fn format_date(ms: i64) -> String {
let iso = ms_to_iso(ms);
iso.split('T').next().unwrap_or(&iso).to_string()
}
fn truncate_summary(s: &str, max: usize) -> String {
if s.chars().count() <= max {
s.to_owned()
} else {
let truncated: String = s.chars().take(max - 3).collect();
format!("{truncated}...")
}
}
fn wrap_snippet(text: &str, width: usize) -> Vec<String> {
let mut lines = Vec::new();
let mut current = String::new();
for word in text.split_whitespace() {
if current.is_empty() {
current = word.to_string();
} else if current.len() + 1 + word.len() <= width {
current.push(' ');
current.push_str(word);
} else {
lines.push(current);
current = word.to_string();
}
}
if !current.is_empty() {
lines.push(current);
}
// Cap at 4 lines
lines.truncate(4);
lines
}
// ─── Robot JSON output ───────────────────────────────────────────────────────
/// Render timeline as robot-mode JSON in {ok, data, meta} envelope.
pub fn print_timeline_json(result: &TimelineResult, total_events_before_limit: usize) {
let output = TimelineJsonEnvelope {
ok: true,
data: TimelineDataJson::from_result(result),
meta: TimelineMetaJson {
search_mode: "lexical".to_owned(),
expansion_depth: infer_max_depth(&result.expanded_entities),
expand_mentions: false, // caller should pass this, but we infer from data
total_entities: result.seed_entities.len() + result.expanded_entities.len(),
total_events: total_events_before_limit,
evidence_notes_included: count_evidence_notes(&result.events),
unresolved_references: result.unresolved_references.len(),
showing: result.events.len(),
},
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing timeline JSON: {e}"),
}
}
/// Extended version that accepts explicit meta values from the caller.
pub fn print_timeline_json_with_meta(
result: &TimelineResult,
total_events_before_limit: usize,
depth: u32,
expand_mentions: bool,
) {
let output = TimelineJsonEnvelope {
ok: true,
data: TimelineDataJson::from_result(result),
meta: TimelineMetaJson {
search_mode: "lexical".to_owned(),
expansion_depth: depth,
expand_mentions,
total_entities: result.seed_entities.len() + result.expanded_entities.len(),
total_events: total_events_before_limit,
evidence_notes_included: count_evidence_notes(&result.events),
unresolved_references: result.unresolved_references.len(),
showing: result.events.len(),
},
};
match serde_json::to_string(&output) {
Ok(json) => println!("{json}"),
Err(e) => eprintln!("Error serializing timeline JSON: {e}"),
}
}
#[derive(Serialize)]
struct TimelineJsonEnvelope {
ok: bool,
data: TimelineDataJson,
meta: TimelineMetaJson,
}
#[derive(Serialize)]
struct TimelineDataJson {
query: String,
event_count: usize,
seed_entities: Vec<EntityJson>,
expanded_entities: Vec<ExpandedEntityJson>,
unresolved_references: Vec<UnresolvedRefJson>,
events: Vec<EventJson>,
}
impl TimelineDataJson {
fn from_result(result: &TimelineResult) -> Self {
Self {
query: result.query.clone(),
event_count: result.events.len(),
seed_entities: result.seed_entities.iter().map(EntityJson::from).collect(),
expanded_entities: result
.expanded_entities
.iter()
.map(ExpandedEntityJson::from)
.collect(),
unresolved_references: result
.unresolved_references
.iter()
.map(UnresolvedRefJson::from)
.collect(),
events: result.events.iter().map(EventJson::from).collect(),
}
}
}
#[derive(Serialize)]
struct EntityJson {
#[serde(rename = "type")]
entity_type: String,
iid: i64,
project: String,
}
impl From<&EntityRef> for EntityJson {
fn from(e: &EntityRef) -> Self {
Self {
entity_type: e.entity_type.clone(),
iid: e.entity_iid,
project: e.project_path.clone(),
}
}
}
#[derive(Serialize)]
struct ExpandedEntityJson {
#[serde(rename = "type")]
entity_type: String,
iid: i64,
project: String,
depth: u32,
via: ViaJson,
}
impl From<&ExpandedEntityRef> for ExpandedEntityJson {
fn from(e: &ExpandedEntityRef) -> Self {
Self {
entity_type: e.entity_ref.entity_type.clone(),
iid: e.entity_ref.entity_iid,
project: e.entity_ref.project_path.clone(),
depth: e.depth,
via: ViaJson {
from: EntityJson::from(&e.via_from),
reference_type: e.via_reference_type.clone(),
source_method: e.via_source_method.clone(),
},
}
}
}
#[derive(Serialize)]
struct ViaJson {
from: EntityJson,
reference_type: String,
source_method: String,
}
#[derive(Serialize)]
struct UnresolvedRefJson {
source: EntityJson,
target_project: Option<String>,
target_type: String,
target_iid: Option<i64>,
reference_type: String,
}
impl From<&UnresolvedRef> for UnresolvedRefJson {
fn from(r: &UnresolvedRef) -> Self {
Self {
source: EntityJson::from(&r.source),
target_project: r.target_project.clone(),
target_type: r.target_type.clone(),
target_iid: r.target_iid,
reference_type: r.reference_type.clone(),
}
}
}
#[derive(Serialize)]
struct EventJson {
timestamp: String,
entity_type: String,
entity_iid: i64,
project: String,
event_type: String,
summary: String,
actor: Option<String>,
url: Option<String>,
is_seed: bool,
details: serde_json::Value,
}
impl From<&TimelineEvent> for EventJson {
fn from(e: &TimelineEvent) -> Self {
let (event_type, details) = event_type_to_json(&e.event_type);
Self {
timestamp: ms_to_iso(e.timestamp),
entity_type: e.entity_type.clone(),
entity_iid: e.entity_iid,
project: e.project_path.clone(),
event_type,
summary: e.summary.clone(),
actor: e.actor.clone(),
url: e.url.clone(),
is_seed: e.is_seed,
details,
}
}
}
fn event_type_to_json(event_type: &TimelineEventType) -> (String, serde_json::Value) {
match event_type {
TimelineEventType::Created => ("created".to_owned(), serde_json::json!({})),
TimelineEventType::StateChanged { state } => (
"state_changed".to_owned(),
serde_json::json!({ "state": state }),
),
TimelineEventType::LabelAdded { label } => (
"label_added".to_owned(),
serde_json::json!({ "label": label }),
),
TimelineEventType::LabelRemoved { label } => (
"label_removed".to_owned(),
serde_json::json!({ "label": label }),
),
TimelineEventType::MilestoneSet { milestone } => (
"milestone_set".to_owned(),
serde_json::json!({ "milestone": milestone }),
),
TimelineEventType::MilestoneRemoved { milestone } => (
"milestone_removed".to_owned(),
serde_json::json!({ "milestone": milestone }),
),
TimelineEventType::Merged => ("merged".to_owned(), serde_json::json!({})),
TimelineEventType::NoteEvidence {
note_id,
snippet,
discussion_id,
} => (
"note_evidence".to_owned(),
serde_json::json!({
"note_id": note_id,
"snippet": snippet,
"discussion_id": discussion_id,
}),
),
TimelineEventType::CrossReferenced { target } => (
"cross_referenced".to_owned(),
serde_json::json!({ "target": target }),
),
}
}
#[derive(Serialize)]
struct TimelineMetaJson {
search_mode: String,
expansion_depth: u32,
expand_mentions: bool,
total_entities: usize,
total_events: usize,
evidence_notes_included: usize,
unresolved_references: usize,
showing: usize,
}
fn infer_max_depth(expanded: &[ExpandedEntityRef]) -> u32 {
expanded.iter().map(|e| e.depth).max().unwrap_or(0)
}
fn count_evidence_notes(events: &[TimelineEvent]) -> usize {
events
.iter()
.filter(|e| matches!(e.event_type, TimelineEventType::NoteEvidence { .. }))
.count()
}

View File

@@ -179,6 +179,9 @@ pub enum Commands {
shell: String,
},
/// Show a chronological timeline of events matching a query
Timeline(TimelineArgs),
#[command(hide = true)]
List {
#[arg(value_parser = ["issues", "mrs"])]
@@ -596,6 +599,57 @@ pub struct EmbedArgs {
pub no_retry_failed: bool,
}
#[derive(Parser)]
pub struct TimelineArgs {
/// Search query (keywords to find in issues, MRs, and discussions)
pub query: String,
/// Scope to a specific project (fuzzy match)
#[arg(short = 'p', long, help_heading = "Filters")]
pub project: Option<String>,
/// Only show events after this date (e.g. "6m", "2w", "2024-01-01")
#[arg(long, help_heading = "Filters")]
pub since: Option<String>,
/// Cross-reference expansion depth (0 = no expansion)
#[arg(long, default_value = "1", help_heading = "Expansion")]
pub depth: u32,
/// Also follow 'mentioned' edges during expansion (high fan-out)
#[arg(long = "expand-mentions", help_heading = "Expansion")]
pub expand_mentions: bool,
/// Maximum number of events to display
#[arg(
short = 'n',
long = "limit",
default_value = "100",
help_heading = "Output"
)]
pub limit: usize,
/// Maximum seed entities from search
#[arg(long = "max-seeds", default_value = "10", help_heading = "Expansion")]
pub max_seeds: usize,
/// Maximum expanded entities via cross-references
#[arg(
long = "max-entities",
default_value = "50",
help_heading = "Expansion"
)]
pub max_entities: usize,
/// Maximum evidence notes included
#[arg(
long = "max-evidence",
default_value = "10",
help_heading = "Expansion"
)]
pub max_evidence: usize,
}
#[derive(Parser)]
pub struct CountArgs {
/// Entity type to count (issues, mrs, discussions, notes, events)

View File

@@ -10,21 +10,22 @@ use tracing_subscriber::util::SubscriberInitExt;
use lore::Config;
use lore::cli::commands::{
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
SearchCliFilters, SyncOptions, open_issue_in_browser, open_mr_in_browser, print_count,
print_count_json, print_doctor_results, print_dry_run_preview, print_dry_run_preview_json,
print_embed, print_embed_json, print_event_count, print_event_count_json, print_generate_docs,
print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
print_list_issues_json, print_list_mrs, print_list_mrs_json, print_search_results,
print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
print_sync_status, print_sync_status_json, run_auth_test, run_count, run_count_events,
run_doctor, run_embed, run_generate_docs, run_ingest, run_ingest_dry_run, run_init,
run_list_issues, run_list_mrs, run_search, run_show_issue, run_show_mr, run_stats, run_sync,
run_sync_status,
SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser, open_mr_in_browser,
print_count, print_count_json, print_doctor_results, print_dry_run_preview,
print_dry_run_preview_json, print_embed, print_embed_json, print_event_count,
print_event_count_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
print_list_mrs_json, print_search_results, print_search_results_json, print_show_issue,
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline,
print_timeline_json_with_meta, run_auth_test, run_count, run_count_events, run_doctor,
run_embed, run_generate_docs, run_ingest, run_ingest_dry_run, run_init, run_list_issues,
run_list_mrs, run_search, run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status,
run_timeline,
};
use lore::cli::{
Cli, Commands, CountArgs, EmbedArgs, GenerateDocsArgs, IngestArgs, IssuesArgs, MrsArgs,
SearchArgs, StatsArgs, SyncArgs,
SearchArgs, StatsArgs, SyncArgs, TimelineArgs,
};
use lore::core::db::{
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
@@ -154,6 +155,7 @@ async fn main() {
Some(Commands::Search(args)) => {
handle_search(cli.config.as_deref(), args, robot_mode).await
}
Some(Commands::Timeline(args)) => handle_timeline(cli.config.as_deref(), args, robot_mode),
Some(Commands::Stats(args)) => handle_stats(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Embed(args)) => handle_embed(cli.config.as_deref(), args, robot_mode).await,
Some(Commands::Sync(args)) => {
@@ -464,6 +466,7 @@ fn suggest_similar_command(invalid: &str) -> String {
"health",
"robot-docs",
"completions",
"timeline",
];
let invalid_lower = invalid.to_lowercase();
@@ -1391,6 +1394,43 @@ async fn handle_stats(
Ok(())
}
fn handle_timeline(
config_override: Option<&str>,
args: TimelineArgs,
robot_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let config = Config::load(config_override)?;
let params = TimelineParams {
query: args.query,
project: args.project,
since: args.since,
depth: args.depth,
expand_mentions: args.expand_mentions,
limit: args.limit,
max_seeds: args.max_seeds,
max_entities: args.max_entities,
max_evidence: args.max_evidence,
};
let result = run_timeline(&config, &params)?;
if robot_mode {
// total_events_before_limit: the result already has events truncated,
// but we can compute it from the pipeline if needed. For now, use events.len()
// since collect_events already applied the limit internally.
print_timeline_json_with_meta(
&result,
result.events.len(),
params.depth,
params.expand_mentions,
);
} else {
print_timeline(&result);
}
Ok(())
}
async fn handle_search(
config_override: Option<&str>,
args: SearchArgs,
@@ -1733,6 +1773,11 @@ fn handle_robot_docs(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>>
"flags": ["<shell: bash|zsh|fish|powershell>"],
"example": "lore completions bash > ~/.local/share/bash-completion/completions/lore"
},
"timeline": {
"description": "Chronological timeline of events matching a keyword query",
"flags": ["<QUERY>", "-p/--project", "--since <duration>", "--depth <n>", "--expand-mentions", "-n/--limit", "--max-seeds", "--max-entities", "--max-evidence"],
"example": "lore --robot timeline 'authentication' --since 30d"
},
"robot-docs": {
"description": "This command (agent self-discovery manifest)",
"flags": [],
@@ -1777,6 +1822,11 @@ fn handle_robot_docs(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>>
],
"pre_flight": [
"lore --robot health"
],
"temporal_intelligence": [
"lore --robot sync",
"lore --robot timeline '<keyword>' --since 30d",
"lore --robot timeline '<keyword>' --depth 2 --expand-mentions"
]
});