- Add HTTP adapter layer (src/http.rs) wrapping asupersync h1 client - Migrate gitlab client, graphql, and ollama to HTTP adapter - Swap entrypoint from #[tokio::main] to RuntimeBuilder::new().block_on() - Rewrite signal handler for asupersync (RuntimeHandle::spawn + ctrl_c()) - Migrate rate limiter sleeps to asupersync::time::sleep(wall_now(), d) - Add asupersync-native HTTP integration tests - Convert timeline_seed_tests to RuntimeBuilder pattern Phases 1-3 of asupersync migration (atomic: code won't compile without all pieces).
554 lines
18 KiB
Rust
554 lines
18 KiB
Rust
use super::*;
|
|
use crate::core::db::{create_connection, run_migrations};
|
|
use std::path::Path;
|
|
|
|
fn setup_test_db() -> Connection {
|
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
|
run_migrations(&conn).unwrap();
|
|
conn
|
|
}
|
|
|
|
fn insert_test_project(conn: &Connection) -> i64 {
|
|
conn.execute(
|
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
|
[],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
|
conn.execute(
|
|
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test issue', 'opened', 'alice', 1000, 2000, 3000)",
|
|
rusqlite::params![iid * 100, project_id, iid],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
|
conn.execute(
|
|
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
|
rusqlite::params![iid * 100, project_id, iid],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
fn insert_document(
|
|
conn: &Connection,
|
|
source_type: &str,
|
|
source_id: i64,
|
|
project_id: i64,
|
|
content: &str,
|
|
) -> i64 {
|
|
conn.execute(
|
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES (?1, ?2, ?3, ?4, ?5)",
|
|
rusqlite::params![source_type, source_id, project_id, content, format!("hash_{source_id}")],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
fn insert_discussion(
|
|
conn: &Connection,
|
|
project_id: i64,
|
|
issue_id: Option<i64>,
|
|
mr_id: Option<i64>,
|
|
) -> i64 {
|
|
let noteable_type = if issue_id.is_some() {
|
|
"Issue"
|
|
} else {
|
|
"MergeRequest"
|
|
};
|
|
conn.execute(
|
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
|
|
rusqlite::params![format!("disc_{}", rand::random::<u32>()), project_id, issue_id, mr_id, noteable_type],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
fn insert_note(
|
|
conn: &Connection,
|
|
discussion_id: i64,
|
|
project_id: i64,
|
|
body: &str,
|
|
is_system: bool,
|
|
) -> i64 {
|
|
let gitlab_id: i64 = rand::random::<u32>().into();
|
|
conn.execute(
|
|
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, 'alice', ?5, 5000, 5000, 5000)",
|
|
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, body],
|
|
)
|
|
.unwrap();
|
|
conn.last_insert_rowid()
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_empty_query_returns_empty() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let result = seed_timeline(&conn, None, "", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert!(result.seed_entities.is_empty());
|
|
assert!(result.evidence_notes.is_empty());
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_no_matches_returns_empty() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
insert_document(
|
|
&conn,
|
|
"issue",
|
|
issue_id,
|
|
project_id,
|
|
"unrelated content here",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "nonexistent_xyzzy_query", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert!(result.seed_entities.is_empty());
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_finds_issue() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
insert_document(
|
|
&conn,
|
|
"issue",
|
|
issue_id,
|
|
project_id,
|
|
"authentication error in login flow",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
|
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_finds_mr() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
insert_document(
|
|
&conn,
|
|
"merge_request",
|
|
mr_id,
|
|
project_id,
|
|
"fix authentication bug",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
|
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_deduplicates_entities() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 10);
|
|
|
|
// Two documents referencing the same issue
|
|
insert_document(
|
|
&conn,
|
|
"issue",
|
|
issue_id,
|
|
project_id,
|
|
"authentication error first doc",
|
|
);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"authentication error second doc",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "authentication", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
// Should deduplicate: both map to the same issue
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_iid, 10);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_resolves_discussion_to_parent() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 7);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"deployment pipeline failed",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
assert_eq!(result.seed_entities[0].entity_iid, 7);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_evidence_capped() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
|
|
// Create 15 discussion documents with notes about "deployment"
|
|
for i in 0..15 {
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
&format!("deployment issue number {i}"),
|
|
);
|
|
insert_note(
|
|
&conn,
|
|
disc_id,
|
|
project_id,
|
|
&format!("deployment note {i}"),
|
|
false,
|
|
);
|
|
}
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 5)
|
|
.await
|
|
.unwrap();
|
|
assert!(result.evidence_notes.len() <= 5);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_evidence_snippet_truncated() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"deployment configuration",
|
|
);
|
|
|
|
let long_body = "x".repeat(500);
|
|
insert_note(&conn, disc_id, project_id, &long_body, false);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert!(!result.evidence_notes.is_empty());
|
|
if let TimelineEventType::NoteEvidence { snippet, .. } =
|
|
&result.evidence_notes[0].event_type
|
|
{
|
|
assert!(snippet.chars().count() <= 200);
|
|
} else {
|
|
panic!("Expected NoteEvidence");
|
|
}
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_respects_project_filter() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
|
|
// Insert a second project
|
|
conn.execute(
|
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (2, 'other/repo', 'https://gitlab.com/other/repo')",
|
|
[],
|
|
)
|
|
.unwrap();
|
|
let project2_id = conn.last_insert_rowid();
|
|
|
|
let issue1_id = insert_test_issue(&conn, project_id, 1);
|
|
insert_document(
|
|
&conn,
|
|
"issue",
|
|
issue1_id,
|
|
project_id,
|
|
"authentication error",
|
|
);
|
|
|
|
let issue2_id = insert_test_issue(&conn, project2_id, 2);
|
|
insert_document(
|
|
&conn,
|
|
"issue",
|
|
issue2_id,
|
|
project2_id,
|
|
"authentication error",
|
|
);
|
|
|
|
// Filter to project 1 only
|
|
let result = seed_timeline(
|
|
&conn,
|
|
None,
|
|
"authentication",
|
|
Some(project_id),
|
|
None,
|
|
50,
|
|
10,
|
|
)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
});
|
|
}
|
|
|
|
// ─── Matched discussion tests ───────────────────────────────────────────────
|
|
|
|
#[test]
|
|
fn test_seed_captures_matched_discussions_from_discussion_doc() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"deployment pipeline authentication",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.matched_discussions.len(), 1);
|
|
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
assert_eq!(result.matched_discussions[0].entity_type, "issue");
|
|
assert_eq!(result.matched_discussions[0].entity_id, issue_id);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_captures_matched_discussions_from_note_doc() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
let note_id = insert_note(&conn, disc_id, project_id, "note about deployment", false);
|
|
insert_document(
|
|
&conn,
|
|
"note",
|
|
note_id,
|
|
project_id,
|
|
"deployment configuration details",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(
|
|
result.matched_discussions.len(),
|
|
1,
|
|
"Note doc should resolve to parent discussion"
|
|
);
|
|
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
assert_eq!(result.matched_discussions[0].entity_type, "issue");
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_deduplicates_matched_discussions() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 1);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
|
|
// Two docs referencing the same discussion
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"deployment pipeline first doc",
|
|
);
|
|
let note_id = insert_note(&conn, disc_id, project_id, "deployment note", false);
|
|
insert_document(
|
|
&conn,
|
|
"note",
|
|
note_id,
|
|
project_id,
|
|
"deployment pipeline second doc",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(
|
|
result.matched_discussions.len(),
|
|
1,
|
|
"Same discussion_id from two docs should deduplicate"
|
|
);
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn test_seed_matched_discussions_have_correct_parent_entity() {
|
|
let rt = asupersync::runtime::RuntimeBuilder::new().build().unwrap();
|
|
rt.block_on(async {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
let disc_id = insert_discussion(&conn, project_id, None, Some(mr_id));
|
|
insert_document(
|
|
&conn,
|
|
"discussion",
|
|
disc_id,
|
|
project_id,
|
|
"deployment pipeline for merge request",
|
|
);
|
|
|
|
let result = seed_timeline(&conn, None, "deployment", None, None, 50, 10)
|
|
.await
|
|
.unwrap();
|
|
assert_eq!(result.matched_discussions.len(), 1);
|
|
assert_eq!(result.matched_discussions[0].entity_type, "merge_request");
|
|
assert_eq!(result.matched_discussions[0].entity_id, mr_id);
|
|
});
|
|
}
|
|
|
|
// ─── seed_timeline_direct tests ─────────────────────────────────────────────
|
|
|
|
#[test]
|
|
fn test_direct_seed_resolves_entity() {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
insert_test_issue(&conn, project_id, 42);
|
|
|
|
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
|
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
|
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
|
}
|
|
|
|
#[test]
|
|
fn test_direct_seed_gathers_all_discussions() {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
|
|
// Create 3 discussions for this issue
|
|
let disc1 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
let disc2 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
let disc3 = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
|
|
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
assert_eq!(result.matched_discussions.len(), 3);
|
|
let disc_ids: Vec<i64> = result
|
|
.matched_discussions
|
|
.iter()
|
|
.map(|d| d.discussion_id)
|
|
.collect();
|
|
assert!(disc_ids.contains(&disc1));
|
|
assert!(disc_ids.contains(&disc2));
|
|
assert!(disc_ids.contains(&disc3));
|
|
}
|
|
|
|
#[test]
|
|
fn test_direct_seed_no_evidence_notes() {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let issue_id = insert_test_issue(&conn, project_id, 42);
|
|
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
|
insert_note(&conn, disc_id, project_id, "some note body", false);
|
|
|
|
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
assert!(
|
|
result.evidence_notes.is_empty(),
|
|
"Direct seeding should not produce evidence notes"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_direct_seed_search_mode_is_direct() {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
insert_test_issue(&conn, project_id, 42);
|
|
|
|
let result = seed_timeline_direct(&conn, "issue", 42, None).unwrap();
|
|
assert_eq!(result.search_mode, "direct");
|
|
}
|
|
|
|
#[test]
|
|
fn test_direct_seed_not_found() {
|
|
let conn = setup_test_db();
|
|
insert_test_project(&conn);
|
|
|
|
let result = seed_timeline_direct(&conn, "issue", 999, None);
|
|
assert!(result.is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn test_direct_seed_mr() {
|
|
let conn = setup_test_db();
|
|
let project_id = insert_test_project(&conn);
|
|
let mr_id = insert_test_mr(&conn, project_id, 99);
|
|
let disc_id = insert_discussion(&conn, project_id, None, Some(mr_id));
|
|
|
|
let result = seed_timeline_direct(&conn, "merge_request", 99, None).unwrap();
|
|
assert_eq!(result.seed_entities.len(), 1);
|
|
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
|
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
|
assert_eq!(result.matched_discussions.len(), 1);
|
|
assert_eq!(result.matched_discussions[0].discussion_id, disc_id);
|
|
}
|