feat(timeline): upgrade seed phase to hybrid search

Replace FTS-only seed entity discovery with hybrid search (FTS + vector
via RRF), using the same search_hybrid infrastructure as the search
command. Falls back gracefully to FTS-only when Ollama is unavailable.

Changes:
- seed_timeline() now accepts OllamaClient, delegates to search_hybrid
- New resolve_documents_to_entities() replaces find_seed_entities()
- SeedResult gains search_mode field tracking actual mode used
- TimelineResult carries search_mode through to JSON renderer
- run_timeline wires up OllamaClient from config
- handle_timeline made async for the hybrid search await
- Tests updated for new function signatures

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
teernisse
2026-02-13 13:50:14 -05:00
parent e6771709f1
commit 4f3ec72923
6 changed files with 192 additions and 84 deletions

View File

@@ -13,6 +13,7 @@ use crate::core::timeline::{
use crate::core::timeline_collect::collect_events; use crate::core::timeline_collect::collect_events;
use crate::core::timeline_expand::expand_timeline; use crate::core::timeline_expand::expand_timeline;
use crate::core::timeline_seed::seed_timeline; use crate::core::timeline_seed::seed_timeline;
use crate::embedding::ollama::{OllamaClient, OllamaConfig};
/// Parameters for running the timeline pipeline. /// Parameters for running the timeline pipeline.
pub struct TimelineParams { pub struct TimelineParams {
@@ -28,7 +29,7 @@ pub struct TimelineParams {
} }
/// Run the full timeline pipeline: SEED -> EXPAND -> COLLECT. /// Run the full timeline pipeline: SEED -> EXPAND -> COLLECT.
pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<TimelineResult> { pub async fn run_timeline(config: &Config, params: &TimelineParams) -> Result<TimelineResult> {
let db_path = get_db_path(config.storage.db_path.as_deref()); let db_path = get_db_path(config.storage.db_path.as_deref());
let conn = create_connection(&db_path)?; let conn = create_connection(&db_path)?;
@@ -50,15 +51,25 @@ pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Timeline
}) })
.transpose()?; .transpose()?;
// Stage 1+2: SEED + HYDRATE // Construct OllamaClient for hybrid search (same pattern as run_search)
let ollama_cfg = &config.embedding;
let client = OllamaClient::new(OllamaConfig {
base_url: ollama_cfg.base_url.clone(),
model: ollama_cfg.model.clone(),
..OllamaConfig::default()
});
// Stage 1+2: SEED + HYDRATE (hybrid search with FTS fallback)
let seed_result = seed_timeline( let seed_result = seed_timeline(
&conn, &conn,
Some(&client),
&params.query, &params.query,
project_id, project_id,
since_ms, since_ms,
params.max_seeds, params.max_seeds,
params.max_evidence, params.max_evidence,
)?; )
.await?;
// Stage 3: EXPAND // Stage 3: EXPAND
let expand_result = expand_timeline( let expand_result = expand_timeline(
@@ -81,6 +92,7 @@ pub fn run_timeline(config: &Config, params: &TimelineParams) -> Result<Timeline
Ok(TimelineResult { Ok(TimelineResult {
query: params.query.clone(), query: params.query.clone(),
search_mode: seed_result.search_mode,
events, events,
total_events_before_limit: total_before_limit, total_events_before_limit: total_before_limit,
seed_entities: seed_result.seed_entities, seed_entities: seed_result.seed_entities,
@@ -258,7 +270,7 @@ pub fn print_timeline_json_with_meta(
ok: true, ok: true,
data: TimelineDataJson::from_result(result), data: TimelineDataJson::from_result(result),
meta: TimelineMetaJson { meta: TimelineMetaJson {
search_mode: "lexical".to_owned(), search_mode: result.search_mode.clone(),
expansion_depth: depth, expansion_depth: depth,
expand_mentions, expand_mentions,
total_entities: result.seed_entities.len() + result.expanded_entities.len(), total_entities: result.seed_entities.len() + result.expanded_entities.len(),

View File

@@ -118,6 +118,8 @@ pub struct UnresolvedRef {
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct TimelineResult { pub struct TimelineResult {
pub query: String, pub query: String,
/// The search mode actually used for seeding (e.g. "hybrid", "lexical", "lexical (hybrid fallback)").
pub search_mode: String,
pub events: Vec<TimelineEvent>, pub events: Vec<TimelineEvent>,
/// Total events before the `--limit` was applied (for meta.total_events vs meta.showing). /// Total events before the `--limit` was applied (for meta.total_events vs meta.showing).
#[serde(skip)] #[serde(skip)]

View File

@@ -5,23 +5,28 @@ use tracing::debug;
use crate::core::error::Result; use crate::core::error::Result;
use crate::core::timeline::{EntityRef, TimelineEvent, TimelineEventType, resolve_entity_ref}; use crate::core::timeline::{EntityRef, TimelineEvent, TimelineEventType, resolve_entity_ref};
use crate::search::{FtsQueryMode, to_fts_query}; use crate::embedding::ollama::OllamaClient;
use crate::search::{FtsQueryMode, SearchFilters, SearchMode, search_hybrid, to_fts_query};
/// Result of the seed + hydrate phases. /// Result of the seed + hydrate phases.
pub struct SeedResult { pub struct SeedResult {
pub seed_entities: Vec<EntityRef>, pub seed_entities: Vec<EntityRef>,
pub evidence_notes: Vec<TimelineEvent>, pub evidence_notes: Vec<TimelineEvent>,
/// The search mode actually used (hybrid with fallback info).
pub search_mode: String,
} }
/// Run the SEED + HYDRATE phases of the timeline pipeline. /// Run the SEED + HYDRATE phases of the timeline pipeline.
/// ///
/// 1. SEED: FTS5 keyword search over documents -> matched document IDs /// 1. SEED: Hybrid search (FTS + vector via RRF) over documents -> matched document IDs
/// 2. HYDRATE: Map document IDs -> source entities + top matched notes as evidence /// 2. HYDRATE: Map document IDs -> source entities + top matched notes as evidence
/// ///
/// When `client` is `None` or Ollama is unavailable, falls back to FTS-only search.
/// Discussion documents are resolved to their parent entity (issue or MR). /// Discussion documents are resolved to their parent entity (issue or MR).
/// Entities are deduplicated. Evidence notes are capped at `max_evidence`. /// Entities are deduplicated. Evidence notes are capped at `max_evidence`.
pub fn seed_timeline( pub async fn seed_timeline(
conn: &Connection, conn: &Connection,
client: Option<&OllamaClient>,
query: &str, query: &str,
project_id: Option<i64>, project_id: Option<i64>,
since_ms: Option<i64>, since_ms: Option<i64>,
@@ -33,57 +38,110 @@ pub fn seed_timeline(
return Ok(SeedResult { return Ok(SeedResult {
seed_entities: Vec::new(), seed_entities: Vec::new(),
evidence_notes: Vec::new(), evidence_notes: Vec::new(),
search_mode: "lexical".to_owned(),
}); });
} }
let seed_entities = find_seed_entities(conn, &fts_query, project_id, since_ms, max_seeds)?; // Use hybrid search for seed entity discovery (better recall than FTS alone).
// search_hybrid gracefully falls back to FTS-only when Ollama is unavailable.
let filters = SearchFilters {
project_id,
updated_since: since_ms,
limit: max_seeds.saturating_mul(3),
..SearchFilters::default()
};
let (hybrid_results, warnings) = search_hybrid(
conn,
client,
query,
SearchMode::Hybrid,
&filters,
FtsQueryMode::Safe,
)
.await?;
let search_mode = if warnings
.iter()
.any(|w| w.contains("falling back") || w.contains("FTS only"))
{
"lexical (hybrid fallback)".to_owned()
} else if client.is_some() && !hybrid_results.is_empty() {
"hybrid".to_owned()
} else {
"lexical".to_owned()
};
for w in &warnings {
debug!(warning = %w, "hybrid search warning during timeline seeding");
}
let seed_entities = resolve_documents_to_entities(
conn,
&hybrid_results
.iter()
.map(|r| r.document_id)
.collect::<Vec<_>>(),
max_seeds,
)?;
// Evidence notes stay FTS-only (supplementary context, not worth a second embedding call)
let evidence_notes = find_evidence_notes(conn, &fts_query, project_id, since_ms, max_evidence)?; let evidence_notes = find_evidence_notes(conn, &fts_query, project_id, since_ms, max_evidence)?;
Ok(SeedResult { Ok(SeedResult {
seed_entities, seed_entities,
evidence_notes, evidence_notes,
search_mode,
}) })
} }
/// Find seed entities via FTS5 search, resolving discussions to their parent entity. /// Resolve a list of document IDs to deduplicated entity refs.
fn find_seed_entities( /// Discussion documents are resolved to their parent entity (issue or MR).
fn resolve_documents_to_entities(
conn: &Connection, conn: &Connection,
fts_query: &str, document_ids: &[i64],
project_id: Option<i64>, max_entities: usize,
since_ms: Option<i64>,
max_seeds: usize,
) -> Result<Vec<EntityRef>> { ) -> Result<Vec<EntityRef>> {
let sql = r" if document_ids.is_empty() {
return Ok(Vec::new());
}
let placeholders: String = document_ids
.iter()
.map(|_| "?")
.collect::<Vec<_>>()
.join(",");
let sql = format!(
r"
SELECT d.source_type, d.source_id, d.project_id, SELECT d.source_type, d.source_id, d.project_id,
disc.issue_id, disc.merge_request_id disc.issue_id, disc.merge_request_id
FROM documents_fts FROM documents d
JOIN documents d ON d.id = documents_fts.rowid
LEFT JOIN discussions disc ON disc.id = d.source_id AND d.source_type = 'discussion' LEFT JOIN discussions disc ON disc.id = d.source_id AND d.source_type = 'discussion'
WHERE documents_fts MATCH ?1 WHERE d.id IN ({placeholders})
AND (?2 IS NULL OR d.project_id = ?2) ORDER BY CASE d.id {order_clause} END
AND (?3 IS NULL OR d.updated_at >= ?3) ",
ORDER BY rank order_clause = document_ids
LIMIT ?4 .iter()
"; .enumerate()
.map(|(i, id)| format!("WHEN {id} THEN {i}"))
.collect::<Vec<_>>()
.join(" "),
);
let mut stmt = conn.prepare(sql)?; let mut stmt = conn.prepare(&sql)?;
let rows = stmt.query_map( let params: Vec<&dyn rusqlite::types::ToSql> = document_ids
rusqlite::params![ .iter()
fts_query, .map(|id| id as &dyn rusqlite::types::ToSql)
project_id, .collect();
since_ms, let rows = stmt.query_map(params.as_slice(), |row| {
max_seeds.saturating_mul(3) as i64 Ok((
], row.get::<_, String>(0)?,
|row| { row.get::<_, i64>(1)?,
Ok(( row.get::<_, i64>(2)?,
row.get::<_, String>(0)?, row.get::<_, Option<i64>>(3)?,
row.get::<_, i64>(1)?, row.get::<_, Option<i64>>(4)?,
row.get::<_, i64>(2)?, ))
row.get::<_, Option<i64>>(3)?, })?;
row.get::<_, Option<i64>>(4)?,
))
},
)?;
let mut seen = HashSet::new(); let mut seen = HashSet::new();
let mut entities = Vec::new(); let mut entities = Vec::new();
@@ -116,7 +174,7 @@ fn find_seed_entities(
entities.push(entity_ref); entities.push(entity_ref);
} }
if entities.len() >= max_seeds { if entities.len() >= max_entities {
break; break;
} }
} }

View File

@@ -85,16 +85,18 @@ fn insert_note(
conn.last_insert_rowid() conn.last_insert_rowid()
} }
#[test] #[tokio::test]
fn test_seed_empty_query_returns_empty() { async fn test_seed_empty_query_returns_empty() {
let conn = setup_test_db(); let conn = setup_test_db();
let result = seed_timeline(&conn, "", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "", None, None, 50, 10)
.await
.unwrap();
assert!(result.seed_entities.is_empty()); assert!(result.seed_entities.is_empty());
assert!(result.evidence_notes.is_empty()); assert!(result.evidence_notes.is_empty());
} }
#[test] #[tokio::test]
fn test_seed_no_matches_returns_empty() { async fn test_seed_no_matches_returns_empty() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 1); let issue_id = insert_test_issue(&conn, project_id, 1);
@@ -106,12 +108,14 @@ fn test_seed_no_matches_returns_empty() {
"unrelated content here", "unrelated content here",
); );
let result = seed_timeline(&conn, "nonexistent_xyzzy_query", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "nonexistent_xyzzy_query", None, None, 50, 10)
.await
.unwrap();
assert!(result.seed_entities.is_empty()); assert!(result.seed_entities.is_empty());
} }
#[test] #[tokio::test]
fn test_seed_finds_issue() { async fn test_seed_finds_issue() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 42); let issue_id = insert_test_issue(&conn, project_id, 42);
@@ -123,15 +127,17 @@ fn test_seed_finds_issue() {
"authentication error in login flow", "authentication error in login flow",
); );
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
.await
.unwrap();
assert_eq!(result.seed_entities.len(), 1); assert_eq!(result.seed_entities.len(), 1);
assert_eq!(result.seed_entities[0].entity_type, "issue"); assert_eq!(result.seed_entities[0].entity_type, "issue");
assert_eq!(result.seed_entities[0].entity_iid, 42); assert_eq!(result.seed_entities[0].entity_iid, 42);
assert_eq!(result.seed_entities[0].project_path, "group/project"); assert_eq!(result.seed_entities[0].project_path, "group/project");
} }
#[test] #[tokio::test]
fn test_seed_finds_mr() { async fn test_seed_finds_mr() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let mr_id = insert_test_mr(&conn, project_id, 99); let mr_id = insert_test_mr(&conn, project_id, 99);
@@ -143,14 +149,16 @@ fn test_seed_finds_mr() {
"fix authentication bug", "fix authentication bug",
); );
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
.await
.unwrap();
assert_eq!(result.seed_entities.len(), 1); assert_eq!(result.seed_entities.len(), 1);
assert_eq!(result.seed_entities[0].entity_type, "merge_request"); assert_eq!(result.seed_entities[0].entity_type, "merge_request");
assert_eq!(result.seed_entities[0].entity_iid, 99); assert_eq!(result.seed_entities[0].entity_iid, 99);
} }
#[test] #[tokio::test]
fn test_seed_deduplicates_entities() { async fn test_seed_deduplicates_entities() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 10); let issue_id = insert_test_issue(&conn, project_id, 10);
@@ -172,14 +180,16 @@ fn test_seed_deduplicates_entities() {
"authentication error second doc", "authentication error second doc",
); );
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
.await
.unwrap();
// Should deduplicate: both map to the same issue // Should deduplicate: both map to the same issue
assert_eq!(result.seed_entities.len(), 1); assert_eq!(result.seed_entities.len(), 1);
assert_eq!(result.seed_entities[0].entity_iid, 10); assert_eq!(result.seed_entities[0].entity_iid, 10);
} }
#[test] #[tokio::test]
fn test_seed_resolves_discussion_to_parent() { async fn test_seed_resolves_discussion_to_parent() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 7); let issue_id = insert_test_issue(&conn, project_id, 7);
@@ -192,14 +202,16 @@ fn test_seed_resolves_discussion_to_parent() {
"deployment pipeline failed", "deployment pipeline failed",
); );
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
.await
.unwrap();
assert_eq!(result.seed_entities.len(), 1); assert_eq!(result.seed_entities.len(), 1);
assert_eq!(result.seed_entities[0].entity_type, "issue"); assert_eq!(result.seed_entities[0].entity_type, "issue");
assert_eq!(result.seed_entities[0].entity_iid, 7); assert_eq!(result.seed_entities[0].entity_iid, 7);
} }
#[test] #[tokio::test]
fn test_seed_evidence_capped() { async fn test_seed_evidence_capped() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 1); let issue_id = insert_test_issue(&conn, project_id, 1);
@@ -223,12 +235,14 @@ fn test_seed_evidence_capped() {
); );
} }
let result = seed_timeline(&conn, "deployment", None, None, 50, 5).unwrap(); let result = seed_timeline(&conn, None, "deployment", None, None, 50, 5)
.await
.unwrap();
assert!(result.evidence_notes.len() <= 5); assert!(result.evidence_notes.len() <= 5);
} }
#[test] #[tokio::test]
fn test_seed_evidence_snippet_truncated() { async fn test_seed_evidence_snippet_truncated() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
let issue_id = insert_test_issue(&conn, project_id, 1); let issue_id = insert_test_issue(&conn, project_id, 1);
@@ -244,7 +258,9 @@ fn test_seed_evidence_snippet_truncated() {
let long_body = "x".repeat(500); let long_body = "x".repeat(500);
insert_note(&conn, disc_id, project_id, &long_body, false); insert_note(&conn, disc_id, project_id, &long_body, false);
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap(); let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
.await
.unwrap();
assert!(!result.evidence_notes.is_empty()); assert!(!result.evidence_notes.is_empty());
if let TimelineEventType::NoteEvidence { snippet, .. } = &result.evidence_notes[0].event_type { if let TimelineEventType::NoteEvidence { snippet, .. } = &result.evidence_notes[0].event_type {
assert!(snippet.chars().count() <= 200); assert!(snippet.chars().count() <= 200);
@@ -253,8 +269,8 @@ fn test_seed_evidence_snippet_truncated() {
} }
} }
#[test] #[tokio::test]
fn test_seed_respects_project_filter() { async fn test_seed_respects_project_filter() {
let conn = setup_test_db(); let conn = setup_test_db();
let project_id = insert_test_project(&conn); let project_id = insert_test_project(&conn);
@@ -285,7 +301,17 @@ fn test_seed_respects_project_filter() {
); );
// Filter to project 1 only // Filter to project 1 only
let result = seed_timeline(&conn, "authentication", Some(project_id), None, 50, 10).unwrap(); let result = seed_timeline(
&conn,
None,
"authentication",
Some(project_id),
None,
50,
10,
)
.await
.unwrap();
assert_eq!(result.seed_entities.len(), 1); assert_eq!(result.seed_entities.len(), 1);
assert_eq!(result.seed_entities[0].project_path, "group/project"); assert_eq!(result.seed_entities[0].project_path, "group/project");
} }

View File

@@ -179,7 +179,9 @@ async fn main() {
Some(Commands::Search(args)) => { Some(Commands::Search(args)) => {
handle_search(cli.config.as_deref(), args, robot_mode).await handle_search(cli.config.as_deref(), args, robot_mode).await
} }
Some(Commands::Timeline(args)) => handle_timeline(cli.config.as_deref(), args, robot_mode), Some(Commands::Timeline(args)) => {
handle_timeline(cli.config.as_deref(), args, robot_mode).await
}
Some(Commands::Who(args)) => handle_who(cli.config.as_deref(), args, robot_mode), Some(Commands::Who(args)) => handle_who(cli.config.as_deref(), args, robot_mode),
Some(Commands::Drift { Some(Commands::Drift {
entity_type, entity_type,
@@ -1763,7 +1765,7 @@ async fn handle_stats(
Ok(()) Ok(())
} }
fn handle_timeline( async fn handle_timeline(
config_override: Option<&str>, config_override: Option<&str>,
args: TimelineArgs, args: TimelineArgs,
robot_mode: bool, robot_mode: bool,
@@ -1784,7 +1786,7 @@ fn handle_timeline(
max_evidence: args.max_evidence, max_evidence: args.max_evidence,
}; };
let result = run_timeline(&config, &params)?; let result = run_timeline(&config, &params).await?;
if robot_mode { if robot_mode {
print_timeline_json_with_meta( print_timeline_json_with_meta(

View File

@@ -108,8 +108,8 @@ fn insert_label_event(
/// Full pipeline: seed -> expand -> collect for a scenario with an issue /// Full pipeline: seed -> expand -> collect for a scenario with an issue
/// that has a closing MR, state changes, and label events. /// that has a closing MR, state changes, and label events.
#[test] #[tokio::test]
fn pipeline_seed_expand_collect_end_to_end() { async fn pipeline_seed_expand_collect_end_to_end() {
let conn = setup_db(); let conn = setup_db();
let project_id = insert_project(&conn, "group/project"); let project_id = insert_project(&conn, "group/project");
@@ -149,7 +149,9 @@ fn pipeline_seed_expand_collect_end_to_end() {
insert_label_event(&conn, project_id, Some(issue_id), "bug", 1500); insert_label_event(&conn, project_id, Some(issue_id), "bug", 1500);
// SEED: find entities matching "authentication" // SEED: find entities matching "authentication"
let seed_result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap(); let seed_result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
.await
.unwrap();
assert!( assert!(
!seed_result.seed_entities.is_empty(), !seed_result.seed_entities.is_empty(),
"Seed should find at least one entity" "Seed should find at least one entity"
@@ -213,12 +215,14 @@ fn pipeline_seed_expand_collect_end_to_end() {
} }
/// Verify the pipeline handles an empty FTS result gracefully. /// Verify the pipeline handles an empty FTS result gracefully.
#[test] #[tokio::test]
fn pipeline_empty_query_produces_empty_result() { async fn pipeline_empty_query_produces_empty_result() {
let conn = setup_db(); let conn = setup_db();
let _project_id = insert_project(&conn, "group/project"); let _project_id = insert_project(&conn, "group/project");
let seed_result = seed_timeline(&conn, "", None, None, 50, 10).unwrap(); let seed_result = seed_timeline(&conn, None, "", None, None, 50, 10)
.await
.unwrap();
assert!(seed_result.seed_entities.is_empty()); assert!(seed_result.seed_entities.is_empty());
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap(); let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
@@ -237,8 +241,8 @@ fn pipeline_empty_query_produces_empty_result() {
} }
/// Verify since filter propagates through the full pipeline. /// Verify since filter propagates through the full pipeline.
#[test] #[tokio::test]
fn pipeline_since_filter_excludes_old_events() { async fn pipeline_since_filter_excludes_old_events() {
let conn = setup_db(); let conn = setup_db();
let project_id = insert_project(&conn, "group/project"); let project_id = insert_project(&conn, "group/project");
@@ -255,7 +259,9 @@ fn pipeline_since_filter_excludes_old_events() {
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 2000); insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 2000);
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 8000); insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 8000);
let seed_result = seed_timeline(&conn, "deploy", None, None, 50, 10).unwrap(); let seed_result = seed_timeline(&conn, None, "deploy", None, None, 50, 10)
.await
.unwrap();
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 0, false, 100).unwrap(); let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 0, false, 100).unwrap();
// Collect with since=5000: should exclude Created(1000) and closed(2000) // Collect with since=5000: should exclude Created(1000) and closed(2000)
@@ -274,8 +280,8 @@ fn pipeline_since_filter_excludes_old_events() {
} }
/// Verify unresolved references use Option<i64> for target_iid. /// Verify unresolved references use Option<i64> for target_iid.
#[test] #[tokio::test]
fn pipeline_unresolved_refs_have_optional_iid() { async fn pipeline_unresolved_refs_have_optional_iid() {
let conn = setup_db(); let conn = setup_db();
let project_id = insert_project(&conn, "group/project"); let project_id = insert_project(&conn, "group/project");
@@ -302,7 +308,9 @@ fn pipeline_unresolved_refs_have_optional_iid() {
) )
.unwrap(); .unwrap();
let seed_result = seed_timeline(&conn, "cross project", None, None, 50, 10).unwrap(); let seed_result = seed_timeline(&conn, None, "cross project", None, None, 50, 10)
.await
.unwrap();
let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap(); let expand_result = expand_timeline(&conn, &seed_result.seed_entities, 1, false, 100).unwrap();
assert_eq!(expand_result.unresolved_references.len(), 2); assert_eq!(expand_result.unresolved_references.len(), 2);