refactor: extract unit tests into separate _tests.rs files
Move inline #[cfg(test)] mod tests { ... } blocks from 22 source files
into dedicated _tests.rs companion files, wired via:
#[cfg(test)]
#[path = "module_tests.rs"]
mod tests;
This keeps implementation-focused source files leaner and more scannable
while preserving full access to private items through `use super::*;`.
Modules extracted:
core: db, note_parser, payloads, project, references, sync_run,
timeline_collect, timeline_expand, timeline_seed
cli: list (55 tests), who (75 tests)
documents: extractor (43 tests), regenerator
embedding: change_detector, chunking
gitlab: graphql (wiremock async tests), transformers/issue
ingestion: dirty_tracker, discussions, issues, mr_diffs
Also adds conflicts_with("explain_score") to the --detail flag in the
who command to prevent mutually exclusive flags from being combined.
All 629 unit tests pass. No behavior changes.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
636
src/core/db.rs
636
src/core/db.rs
@@ -334,637 +334,5 @@ pub fn get_schema_version(conn: &Connection) -> i32 {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn setup_migrated_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn index_exists(conn: &Connection, index_name: &str) -> bool {
|
||||
conn.query_row(
|
||||
"SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='index' AND name=?1",
|
||||
[index_name],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn column_exists(conn: &Connection, table: &str, column: &str) -> bool {
|
||||
let sql = format!("PRAGMA table_info({})", table);
|
||||
let mut stmt = conn.prepare(&sql).unwrap();
|
||||
let columns: Vec<String> = stmt
|
||||
.query_map([], |row| row.get::<_, String>(1))
|
||||
.unwrap()
|
||||
.filter_map(|r| r.ok())
|
||||
.collect();
|
||||
columns.contains(&column.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_022_indexes_exist() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// New indexes from migration 022
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_user_created"),
|
||||
"idx_notes_user_created should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_project_created"),
|
||||
"idx_notes_project_created should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_author_id"),
|
||||
"idx_notes_author_id should exist"
|
||||
);
|
||||
|
||||
// Discussion JOIN indexes (idx_discussions_issue_id is new;
|
||||
// idx_discussions_mr_id already existed from migration 006 but
|
||||
// IF NOT EXISTS makes it safe)
|
||||
assert!(
|
||||
index_exists(&conn, "idx_discussions_issue_id"),
|
||||
"idx_discussions_issue_id should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_discussions_mr_id"),
|
||||
"idx_discussions_mr_id should exist"
|
||||
);
|
||||
|
||||
// author_id column on notes
|
||||
assert!(
|
||||
column_exists(&conn, "notes", "author_id"),
|
||||
"notes.author_id column should exist"
|
||||
);
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal project for FK satisfaction --
|
||||
fn insert_test_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
|
||||
VALUES (1000, 'test/project', 'https://example.com/test/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal issue --
|
||||
fn insert_test_issue(conn: &Connection, project_id: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, state, author_username, \
|
||||
created_at, updated_at, last_seen_at) \
|
||||
VALUES (100, ?1, 1, 'opened', 'alice', 1000, 1000, 1000)",
|
||||
[project_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal discussion --
|
||||
fn insert_test_discussion(conn: &Connection, project_id: i64, issue_id: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, \
|
||||
noteable_type, last_seen_at) \
|
||||
VALUES ('disc-001', ?1, ?2, 'Issue', 1000)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal non-system note --
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_test_note(
|
||||
conn: &Connection,
|
||||
gitlab_id: i64,
|
||||
discussion_id: i64,
|
||||
project_id: i64,
|
||||
is_system: bool,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, \
|
||||
author_username, body, created_at, updated_at, last_seen_at) \
|
||||
VALUES (?1, ?2, ?3, ?4, 'alice', 'note body', 1000, 1000, 1000)",
|
||||
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a document --
|
||||
fn insert_test_document(
|
||||
conn: &Connection,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
project_id: i64,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||
VALUES (?1, ?2, ?3, 'test content', 'hash123')",
|
||||
rusqlite::params![source_type, source_id, project_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_allows_note_source_type() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Should succeed — 'note' is now allowed
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||
VALUES ('note', 1, ?1, 'note content', 'hash-note')",
|
||||
[pid],
|
||||
)
|
||||
.expect("INSERT with source_type='note' into documents should succeed");
|
||||
|
||||
// dirty_sources should also accept 'note'
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
|
||||
VALUES ('note', 1, 1000)",
|
||||
[],
|
||||
)
|
||||
.expect("INSERT with source_type='note' into dirty_sources should succeed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_preserves_existing_data() {
|
||||
// Run migrations up to 023 only, insert data, then apply 024
|
||||
// Migration 024 is at index 23 (0-based). Use hardcoded index so adding
|
||||
// later migrations doesn't silently shift what this test exercises.
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
|
||||
// Apply migrations 001-023 (indices 0..23)
|
||||
run_migrations_up_to(&conn, 23);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Insert a document with existing source_type
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash, title) \
|
||||
VALUES ('issue', 1, ?1, 'issue content', 'hash-issue', 'Test Issue')",
|
||||
[pid],
|
||||
)
|
||||
.unwrap();
|
||||
let doc_id: i64 = conn.last_insert_rowid();
|
||||
|
||||
// Insert junction data
|
||||
conn.execute(
|
||||
"INSERT INTO document_labels (document_id, label_name) VALUES (?1, 'bug')",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO document_paths (document_id, path) VALUES (?1, 'src/main.rs')",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Insert dirty_sources row
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('issue', 1, 1000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Now apply migration 024 (index 23) — the table-rebuild migration
|
||||
run_single_migration(&conn, 23);
|
||||
|
||||
// Verify document still exists with correct data
|
||||
let (st, content, title): (String, String, String) = conn
|
||||
.query_row(
|
||||
"SELECT source_type, content_text, title FROM documents WHERE id = ?1",
|
||||
[doc_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(st, "issue");
|
||||
assert_eq!(content, "issue content");
|
||||
assert_eq!(title, "Test Issue");
|
||||
|
||||
// Verify junction data preserved
|
||||
let label_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM document_labels WHERE document_id = ?1",
|
||||
[doc_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(label_count, 1);
|
||||
|
||||
let path_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM document_paths WHERE document_id = ?1",
|
||||
[doc_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(path_count, 1);
|
||||
|
||||
// Verify dirty_sources preserved
|
||||
let dirty_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(dirty_count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_fts_triggers_intact() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Insert a document after migration — FTS trigger should fire
|
||||
let doc_id = insert_test_document(&conn, "note", 1, pid);
|
||||
|
||||
// Verify FTS entry exists
|
||||
let fts_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'test'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(fts_count > 0, "FTS trigger should have created an entry");
|
||||
|
||||
// Verify update trigger works
|
||||
conn.execute(
|
||||
"UPDATE documents SET content_text = 'updated content' WHERE id = ?1",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let fts_updated: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(
|
||||
fts_updated > 0,
|
||||
"FTS update trigger should reflect new content"
|
||||
);
|
||||
|
||||
// Verify delete trigger works
|
||||
conn.execute("DELETE FROM documents WHERE id = ?1", [doc_id])
|
||||
.unwrap();
|
||||
|
||||
let fts_after_delete: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
fts_after_delete, 0,
|
||||
"FTS delete trigger should remove the entry"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_row_counts_preserved() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// After full migration, tables should exist and be queryable
|
||||
let doc_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM documents", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(doc_count, 0, "Fresh DB should have 0 documents");
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(dirty_count, 0, "Fresh DB should have 0 dirty_sources");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_integrity_checks_pass() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// PRAGMA integrity_check
|
||||
let integrity: String = conn
|
||||
.query_row("PRAGMA integrity_check", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(integrity, "ok", "Database integrity check should pass");
|
||||
|
||||
// PRAGMA foreign_key_check (returns rows only if there are violations)
|
||||
let fk_violations: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM pragma_foreign_key_check", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(fk_violations, 0, "No foreign key violations should exist");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_note_delete_trigger_cleans_document() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
let note_id = insert_test_note(&conn, 200, disc_id, pid, false);
|
||||
|
||||
// Create a document for this note
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Delete the note — trigger should remove the document
|
||||
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 0,
|
||||
"notes_ad_cleanup trigger should delete the document"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_note_system_flip_trigger_cleans_document() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
let note_id = insert_test_note(&conn, 201, disc_id, pid, false);
|
||||
|
||||
// Create a document for this note
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Flip is_system from 0 to 1 — trigger should remove the document
|
||||
conn.execute("UPDATE notes SET is_system = 1 WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 0,
|
||||
"notes_au_system_cleanup trigger should delete the document"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_system_note_delete_trigger_does_not_fire() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert a system note (is_system = true)
|
||||
let note_id = insert_test_note(&conn, 202, disc_id, pid, true);
|
||||
|
||||
// Manually insert a document (shouldn't exist for system notes in practice,
|
||||
// but we test the trigger guard)
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Delete system note — trigger has WHEN old.is_system = 0 so it should NOT fire
|
||||
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 1,
|
||||
"notes_ad_cleanup trigger should NOT fire for system notes"
|
||||
);
|
||||
}
|
||||
|
||||
/// Run migrations only up to version `up_to` (inclusive).
|
||||
fn run_migrations_up_to(conn: &Connection, up_to: usize) {
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS schema_version ( \
|
||||
version INTEGER PRIMARY KEY, applied_at INTEGER NOT NULL, description TEXT);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
for (version_str, sql) in &MIGRATIONS[..up_to] {
|
||||
let version: i32 = version_str.parse().unwrap();
|
||||
conn.execute_batch(sql).unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||
rusqlite::params![version, version_str],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Run a single migration by index (0-based).
|
||||
fn run_single_migration(conn: &Connection, index: usize) {
|
||||
let (version_str, sql) = MIGRATIONS[index];
|
||||
let version: i32 = version_str.parse().unwrap();
|
||||
conn.execute_batch(sql).unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||
rusqlite::params![version, version_str],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_backfills_existing_notes() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
// Run all migrations through 024 (index 0..24)
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 5 non-system notes
|
||||
for i in 1..=5 {
|
||||
insert_test_note(&conn, 300 + i, disc_id, pid, false);
|
||||
}
|
||||
// Insert 2 system notes
|
||||
for i in 1..=2 {
|
||||
insert_test_note(&conn, 400 + i, disc_id, pid, true);
|
||||
}
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 5,
|
||||
"Migration 025 should backfill 5 non-system notes"
|
||||
);
|
||||
|
||||
// Verify system notes were not backfilled
|
||||
let system_note_ids: Vec<i64> = {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT source_id FROM dirty_sources WHERE source_type = 'note' ORDER BY source_id",
|
||||
)
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
// System note ids should not appear
|
||||
let all_system_note_ids: Vec<i64> = {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id FROM notes WHERE is_system = 1 ORDER BY id")
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
for sys_id in &all_system_note_ids {
|
||||
assert!(
|
||||
!system_note_ids.contains(sys_id),
|
||||
"System note id {} should not be in dirty_sources",
|
||||
sys_id
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_idempotent_with_existing_documents() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 3 non-system notes
|
||||
let note_ids: Vec<i64> = (1..=3)
|
||||
.map(|i| insert_test_note(&conn, 500 + i, disc_id, pid, false))
|
||||
.collect();
|
||||
|
||||
// Create documents for 2 of 3 notes (simulating already-generated docs)
|
||||
insert_test_document(&conn, "note", note_ids[0], pid);
|
||||
insert_test_document(&conn, "note", note_ids[1], pid);
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 1,
|
||||
"Only the note without a document should be backfilled"
|
||||
);
|
||||
|
||||
// Verify the correct note was queued
|
||||
let queued_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT source_id FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(queued_id, note_ids[2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_skips_notes_already_in_dirty_queue() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 3 non-system notes
|
||||
let note_ids: Vec<i64> = (1..=3)
|
||||
.map(|i| insert_test_note(&conn, 600 + i, disc_id, pid, false))
|
||||
.collect();
|
||||
|
||||
// Pre-queue one note in dirty_sources
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('note', ?1, 999)",
|
||||
[note_ids[0]],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 3,
|
||||
"All 3 notes should be in dirty_sources (1 pre-existing + 2 new)"
|
||||
);
|
||||
|
||||
// Verify the pre-existing entry preserved its original queued_at
|
||||
let original_queued_at: i64 = conn
|
||||
.query_row(
|
||||
"SELECT queued_at FROM dirty_sources WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_ids[0]],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
original_queued_at, 999,
|
||||
"ON CONFLICT DO NOTHING should preserve the original queued_at"
|
||||
);
|
||||
}
|
||||
}
|
||||
#[path = "db_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
632
src/core/db_tests.rs
Normal file
632
src/core/db_tests.rs
Normal file
@@ -0,0 +1,632 @@
|
||||
use super::*;
|
||||
|
||||
fn setup_migrated_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn index_exists(conn: &Connection, index_name: &str) -> bool {
|
||||
conn.query_row(
|
||||
"SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='index' AND name=?1",
|
||||
[index_name],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn column_exists(conn: &Connection, table: &str, column: &str) -> bool {
|
||||
let sql = format!("PRAGMA table_info({})", table);
|
||||
let mut stmt = conn.prepare(&sql).unwrap();
|
||||
let columns: Vec<String> = stmt
|
||||
.query_map([], |row| row.get::<_, String>(1))
|
||||
.unwrap()
|
||||
.filter_map(|r| r.ok())
|
||||
.collect();
|
||||
columns.contains(&column.to_string())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_022_indexes_exist() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// New indexes from migration 022
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_user_created"),
|
||||
"idx_notes_user_created should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_project_created"),
|
||||
"idx_notes_project_created should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_notes_author_id"),
|
||||
"idx_notes_author_id should exist"
|
||||
);
|
||||
|
||||
// Discussion JOIN indexes (idx_discussions_issue_id is new;
|
||||
// idx_discussions_mr_id already existed from migration 006 but
|
||||
// IF NOT EXISTS makes it safe)
|
||||
assert!(
|
||||
index_exists(&conn, "idx_discussions_issue_id"),
|
||||
"idx_discussions_issue_id should exist"
|
||||
);
|
||||
assert!(
|
||||
index_exists(&conn, "idx_discussions_mr_id"),
|
||||
"idx_discussions_mr_id should exist"
|
||||
);
|
||||
|
||||
// author_id column on notes
|
||||
assert!(
|
||||
column_exists(&conn, "notes", "author_id"),
|
||||
"notes.author_id column should exist"
|
||||
);
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal project for FK satisfaction --
|
||||
fn insert_test_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
|
||||
VALUES (1000, 'test/project', 'https://example.com/test/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal issue --
|
||||
fn insert_test_issue(conn: &Connection, project_id: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, state, author_username, \
|
||||
created_at, updated_at, last_seen_at) \
|
||||
VALUES (100, ?1, 1, 'opened', 'alice', 1000, 1000, 1000)",
|
||||
[project_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal discussion --
|
||||
fn insert_test_discussion(conn: &Connection, project_id: i64, issue_id: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, \
|
||||
noteable_type, last_seen_at) \
|
||||
VALUES ('disc-001', ?1, ?2, 'Issue', 1000)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a minimal non-system note --
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_test_note(
|
||||
conn: &Connection,
|
||||
gitlab_id: i64,
|
||||
discussion_id: i64,
|
||||
project_id: i64,
|
||||
is_system: bool,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, \
|
||||
author_username, body, created_at, updated_at, last_seen_at) \
|
||||
VALUES (?1, ?2, ?3, ?4, 'alice', 'note body', 1000, 1000, 1000)",
|
||||
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
// -- Helper: insert a document --
|
||||
fn insert_test_document(
|
||||
conn: &Connection,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
project_id: i64,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||
VALUES (?1, ?2, ?3, 'test content', 'hash123')",
|
||||
rusqlite::params![source_type, source_id, project_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_allows_note_source_type() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Should succeed -- 'note' is now allowed
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||
VALUES ('note', 1, ?1, 'note content', 'hash-note')",
|
||||
[pid],
|
||||
)
|
||||
.expect("INSERT with source_type='note' into documents should succeed");
|
||||
|
||||
// dirty_sources should also accept 'note'
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
|
||||
VALUES ('note', 1, 1000)",
|
||||
[],
|
||||
)
|
||||
.expect("INSERT with source_type='note' into dirty_sources should succeed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_preserves_existing_data() {
|
||||
// Run migrations up to 023 only, insert data, then apply 024
|
||||
// Migration 024 is at index 23 (0-based). Use hardcoded index so adding
|
||||
// later migrations doesn't silently shift what this test exercises.
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
|
||||
// Apply migrations 001-023 (indices 0..23)
|
||||
run_migrations_up_to(&conn, 23);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Insert a document with existing source_type
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash, title) \
|
||||
VALUES ('issue', 1, ?1, 'issue content', 'hash-issue', 'Test Issue')",
|
||||
[pid],
|
||||
)
|
||||
.unwrap();
|
||||
let doc_id: i64 = conn.last_insert_rowid();
|
||||
|
||||
// Insert junction data
|
||||
conn.execute(
|
||||
"INSERT INTO document_labels (document_id, label_name) VALUES (?1, 'bug')",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO document_paths (document_id, path) VALUES (?1, 'src/main.rs')",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Insert dirty_sources row
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('issue', 1, 1000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Now apply migration 024 (index 23) -- the table-rebuild migration
|
||||
run_single_migration(&conn, 23);
|
||||
|
||||
// Verify document still exists with correct data
|
||||
let (st, content, title): (String, String, String) = conn
|
||||
.query_row(
|
||||
"SELECT source_type, content_text, title FROM documents WHERE id = ?1",
|
||||
[doc_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(st, "issue");
|
||||
assert_eq!(content, "issue content");
|
||||
assert_eq!(title, "Test Issue");
|
||||
|
||||
// Verify junction data preserved
|
||||
let label_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM document_labels WHERE document_id = ?1",
|
||||
[doc_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(label_count, 1);
|
||||
|
||||
let path_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM document_paths WHERE document_id = ?1",
|
||||
[doc_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(path_count, 1);
|
||||
|
||||
// Verify dirty_sources preserved
|
||||
let dirty_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(dirty_count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_fts_triggers_intact() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
|
||||
// Insert a document after migration -- FTS trigger should fire
|
||||
let doc_id = insert_test_document(&conn, "note", 1, pid);
|
||||
|
||||
// Verify FTS entry exists
|
||||
let fts_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'test'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(fts_count > 0, "FTS trigger should have created an entry");
|
||||
|
||||
// Verify update trigger works
|
||||
conn.execute(
|
||||
"UPDATE documents SET content_text = 'updated content' WHERE id = ?1",
|
||||
[doc_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let fts_updated: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(
|
||||
fts_updated > 0,
|
||||
"FTS update trigger should reflect new content"
|
||||
);
|
||||
|
||||
// Verify delete trigger works
|
||||
conn.execute("DELETE FROM documents WHERE id = ?1", [doc_id])
|
||||
.unwrap();
|
||||
|
||||
let fts_after_delete: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
fts_after_delete, 0,
|
||||
"FTS delete trigger should remove the entry"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_row_counts_preserved() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// After full migration, tables should exist and be queryable
|
||||
let doc_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM documents", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(doc_count, 0, "Fresh DB should have 0 documents");
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(dirty_count, 0, "Fresh DB should have 0 dirty_sources");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_integrity_checks_pass() {
|
||||
let conn = setup_migrated_db();
|
||||
|
||||
// PRAGMA integrity_check
|
||||
let integrity: String = conn
|
||||
.query_row("PRAGMA integrity_check", [], |row| row.get(0))
|
||||
.unwrap();
|
||||
assert_eq!(integrity, "ok", "Database integrity check should pass");
|
||||
|
||||
// PRAGMA foreign_key_check (returns rows only if there are violations)
|
||||
let fk_violations: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM pragma_foreign_key_check", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(fk_violations, 0, "No foreign key violations should exist");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_note_delete_trigger_cleans_document() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
let note_id = insert_test_note(&conn, 200, disc_id, pid, false);
|
||||
|
||||
// Create a document for this note
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Delete the note -- trigger should remove the document
|
||||
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 0,
|
||||
"notes_ad_cleanup trigger should delete the document"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_note_system_flip_trigger_cleans_document() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
let note_id = insert_test_note(&conn, 201, disc_id, pid, false);
|
||||
|
||||
// Create a document for this note
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Flip is_system from 0 to 1 -- trigger should remove the document
|
||||
conn.execute("UPDATE notes SET is_system = 1 WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 0,
|
||||
"notes_au_system_cleanup trigger should delete the document"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_024_system_note_delete_trigger_does_not_fire() {
|
||||
let conn = setup_migrated_db();
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert a system note (is_system = true)
|
||||
let note_id = insert_test_note(&conn, 202, disc_id, pid, true);
|
||||
|
||||
// Manually insert a document (shouldn't exist for system notes in practice,
|
||||
// but we test the trigger guard)
|
||||
insert_test_document(&conn, "note", note_id, pid);
|
||||
|
||||
let doc_before: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(doc_before, 1);
|
||||
|
||||
// Delete system note -- trigger has WHEN old.is_system = 0 so it should NOT fire
|
||||
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||
.unwrap();
|
||||
|
||||
let doc_after: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
doc_after, 1,
|
||||
"notes_ad_cleanup trigger should NOT fire for system notes"
|
||||
);
|
||||
}
|
||||
|
||||
/// Run migrations only up to version `up_to` (inclusive).
|
||||
fn run_migrations_up_to(conn: &Connection, up_to: usize) {
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS schema_version ( \
|
||||
version INTEGER PRIMARY KEY, applied_at INTEGER NOT NULL, description TEXT);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
for (version_str, sql) in &MIGRATIONS[..up_to] {
|
||||
let version: i32 = version_str.parse().unwrap();
|
||||
conn.execute_batch(sql).unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||
rusqlite::params![version, version_str],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Run a single migration by index (0-based).
|
||||
fn run_single_migration(conn: &Connection, index: usize) {
|
||||
let (version_str, sql) = MIGRATIONS[index];
|
||||
let version: i32 = version_str.parse().unwrap();
|
||||
conn.execute_batch(sql).unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||
rusqlite::params![version, version_str],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_backfills_existing_notes() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
// Run all migrations through 024 (index 0..24)
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 5 non-system notes
|
||||
for i in 1..=5 {
|
||||
insert_test_note(&conn, 300 + i, disc_id, pid, false);
|
||||
}
|
||||
// Insert 2 system notes
|
||||
for i in 1..=2 {
|
||||
insert_test_note(&conn, 400 + i, disc_id, pid, true);
|
||||
}
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 5,
|
||||
"Migration 025 should backfill 5 non-system notes"
|
||||
);
|
||||
|
||||
// Verify system notes were not backfilled
|
||||
let system_note_ids: Vec<i64> = {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT source_id FROM dirty_sources WHERE source_type = 'note' ORDER BY source_id",
|
||||
)
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
// System note ids should not appear
|
||||
let all_system_note_ids: Vec<i64> = {
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT id FROM notes WHERE is_system = 1 ORDER BY id")
|
||||
.unwrap();
|
||||
stmt.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.unwrap()
|
||||
};
|
||||
for sys_id in &all_system_note_ids {
|
||||
assert!(
|
||||
!system_note_ids.contains(sys_id),
|
||||
"System note id {} should not be in dirty_sources",
|
||||
sys_id
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_idempotent_with_existing_documents() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 3 non-system notes
|
||||
let note_ids: Vec<i64> = (1..=3)
|
||||
.map(|i| insert_test_note(&conn, 500 + i, disc_id, pid, false))
|
||||
.collect();
|
||||
|
||||
// Create documents for 2 of 3 notes (simulating already-generated docs)
|
||||
insert_test_document(&conn, "note", note_ids[0], pid);
|
||||
insert_test_document(&conn, "note", note_ids[1], pid);
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 1,
|
||||
"Only the note without a document should be backfilled"
|
||||
);
|
||||
|
||||
// Verify the correct note was queued
|
||||
let queued_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT source_id FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(queued_id, note_ids[2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_migration_025_skips_notes_already_in_dirty_queue() {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations_up_to(&conn, 24);
|
||||
|
||||
let pid = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, pid);
|
||||
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||
|
||||
// Insert 3 non-system notes
|
||||
let note_ids: Vec<i64> = (1..=3)
|
||||
.map(|i| insert_test_note(&conn, 600 + i, disc_id, pid, false))
|
||||
.collect();
|
||||
|
||||
// Pre-queue one note in dirty_sources
|
||||
conn.execute(
|
||||
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('note', ?1, 999)",
|
||||
[note_ids[0]],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Run migration 025
|
||||
run_single_migration(&conn, 24);
|
||||
|
||||
let dirty_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
dirty_count, 3,
|
||||
"All 3 notes should be in dirty_sources (1 pre-existing + 2 new)"
|
||||
);
|
||||
|
||||
// Verify the pre-existing entry preserved its original queued_at
|
||||
let original_queued_at: i64 = conn
|
||||
.query_row(
|
||||
"SELECT queued_at FROM dirty_sources WHERE source_type = 'note' AND source_id = ?1",
|
||||
[note_ids[0]],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
original_queued_at, 999,
|
||||
"ON CONFLICT DO NOTHING should preserve the original queued_at"
|
||||
);
|
||||
}
|
||||
@@ -234,330 +234,5 @@ fn resolve_cross_project_entity(
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_in_mr() {
|
||||
let refs = parse_cross_refs("mentioned in !567");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 567);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_in_issue() {
|
||||
let refs = parse_cross_refs("mentioned in #234");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "issue");
|
||||
assert_eq!(refs[0].target_iid, 234);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_cross_project() {
|
||||
let refs = parse_cross_refs("mentioned in group/repo!789");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 789);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_cross_project_issue() {
|
||||
let refs = parse_cross_refs("mentioned in group/repo#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "issue");
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_closed_by_mr() {
|
||||
let refs = parse_cross_refs("closed by !567");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "closes");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 567);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_closed_by_cross_project() {
|
||||
let refs = parse_cross_refs("closed by group/repo!789");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "closes");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 789);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multiple_refs() {
|
||||
let refs = parse_cross_refs("mentioned in !123 and mentioned in #456");
|
||||
assert_eq!(refs.len(), 2);
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
assert_eq!(refs[1].target_entity_type, "issue");
|
||||
assert_eq!(refs[1].target_iid, 456);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_no_refs() {
|
||||
let refs = parse_cross_refs("Updated the description");
|
||||
assert!(refs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_non_english_note() {
|
||||
let refs = parse_cross_refs("a ajout\u{00e9} l'\u{00e9}tiquette ~bug");
|
||||
assert!(refs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multi_level_group_path() {
|
||||
let refs = parse_cross_refs("mentioned in top/sub/project#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("top/sub/project")
|
||||
);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_deeply_nested_group_path() {
|
||||
let refs = parse_cross_refs("mentioned in a/b/c/d/e!42");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("a/b/c/d/e"));
|
||||
assert_eq!(refs[0].target_iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_hyphenated_project_path() {
|
||||
let refs = parse_cross_refs("mentioned in my-group/my-project#99");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("my-group/my-project")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dotted_project_path() {
|
||||
let refs = parse_cross_refs("mentioned in visiostack.io/backend#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("visiostack.io/backend")
|
||||
);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dotted_nested_project_path() {
|
||||
let refs = parse_cross_refs("closed by my.org/sub.group/my.project!42");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("my.org/sub.group/my.project")
|
||||
);
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_self_reference_is_valid() {
|
||||
let refs = parse_cross_refs("mentioned in #123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mixed_mentioned_and_closed() {
|
||||
let refs = parse_cross_refs("mentioned in !10 and closed by !20");
|
||||
assert_eq!(refs.len(), 2);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_iid, 10);
|
||||
assert_eq!(refs[1].reference_type, "closes");
|
||||
assert_eq!(refs[1].target_iid, 20);
|
||||
}
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
|
||||
let conn = create_connection(std::path::Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn seed_test_data(conn: &Connection) -> i64 {
|
||||
let now = now_ms();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (1, 100, 'group/test-project', 'https://gitlab.com/group/test-project', ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (10, 1000, 1, 123, 'Test Issue', 'opened', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (11, 1001, 1, 456, 'Another Issue', 'opened', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, source_branch, target_branch, author_username, created_at, updated_at, last_seen_at)
|
||||
VALUES (20, 2000, 1, 789, 'Test MR', 'opened', 'feat', 'main', 'dev', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at)
|
||||
VALUES (30, 'disc-aaa', 1, 10, 'Issue', ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, noteable_type, last_seen_at)
|
||||
VALUES (31, 'disc-bbb', 1, 20, 'MergeRequest', ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (40, 4000, 30, 1, 1, 'mentioned in !789', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (41, 4001, 31, 1, 1, 'mentioned in #456', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (42, 4002, 30, 1, 0, 'mentioned in !999', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (43, 4003, 30, 1, 1, 'added label ~bug', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (44, 4004, 30, 1, 1, 'mentioned in other/project#999', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
1
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_from_system_notes_integration() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = seed_test_data(&conn);
|
||||
|
||||
let result = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
|
||||
assert_eq!(result.inserted, 2, "Two same-project refs should resolve");
|
||||
assert_eq!(
|
||||
result.skipped_unresolvable, 1,
|
||||
"One cross-project ref should be unresolvable"
|
||||
);
|
||||
assert_eq!(
|
||||
result.parse_failures, 1,
|
||||
"One system note has no cross-ref pattern"
|
||||
);
|
||||
|
||||
let ref_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1 AND source_method = 'note_parse'",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(ref_count, 3, "Should have 3 entity_references rows total");
|
||||
|
||||
let unresolved_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE target_entity_id IS NULL AND source_method = 'note_parse'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
unresolved_count, 1,
|
||||
"Should have 1 unresolved cross-project ref"
|
||||
);
|
||||
|
||||
let (path, iid): (String, i64) = conn
|
||||
.query_row(
|
||||
"SELECT target_project_path, target_entity_iid FROM entity_references WHERE target_entity_id IS NULL",
|
||||
[],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(path, "other/project");
|
||||
assert_eq!(iid, 999);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = seed_test_data(&conn);
|
||||
|
||||
let result1 = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
let result2 = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
|
||||
assert_eq!(result2.inserted, 0);
|
||||
assert_eq!(result2.skipped_unresolvable, 0);
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE source_method = 'note_parse'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
total,
|
||||
(result1.inserted + result1.skipped_unresolvable) as i64
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_empty_project() {
|
||||
let conn = setup_test_db();
|
||||
let result = extract_refs_from_system_notes(&conn, 999).unwrap();
|
||||
assert_eq!(result.inserted, 0);
|
||||
assert_eq!(result.skipped_unresolvable, 0);
|
||||
assert_eq!(result.parse_failures, 0);
|
||||
}
|
||||
}
|
||||
#[path = "note_parser_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
325
src/core/note_parser_tests.rs
Normal file
325
src/core/note_parser_tests.rs
Normal file
@@ -0,0 +1,325 @@
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_in_mr() {
|
||||
let refs = parse_cross_refs("mentioned in !567");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 567);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_in_issue() {
|
||||
let refs = parse_cross_refs("mentioned in #234");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "issue");
|
||||
assert_eq!(refs[0].target_iid, 234);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_cross_project() {
|
||||
let refs = parse_cross_refs("mentioned in group/repo!789");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 789);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mentioned_cross_project_issue() {
|
||||
let refs = parse_cross_refs("mentioned in group/repo#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_entity_type, "issue");
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_closed_by_mr() {
|
||||
let refs = parse_cross_refs("closed by !567");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "closes");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 567);
|
||||
assert!(refs[0].target_project_path.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_closed_by_cross_project() {
|
||||
let refs = parse_cross_refs("closed by group/repo!789");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].reference_type, "closes");
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 789);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("group/repo"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multiple_refs() {
|
||||
let refs = parse_cross_refs("mentioned in !123 and mentioned in #456");
|
||||
assert_eq!(refs.len(), 2);
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
assert_eq!(refs[1].target_entity_type, "issue");
|
||||
assert_eq!(refs[1].target_iid, 456);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_no_refs() {
|
||||
let refs = parse_cross_refs("Updated the description");
|
||||
assert!(refs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_non_english_note() {
|
||||
let refs = parse_cross_refs("a ajout\u{00e9} l'\u{00e9}tiquette ~bug");
|
||||
assert!(refs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_multi_level_group_path() {
|
||||
let refs = parse_cross_refs("mentioned in top/sub/project#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("top/sub/project")
|
||||
);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_deeply_nested_group_path() {
|
||||
let refs = parse_cross_refs("mentioned in a/b/c/d/e!42");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].target_project_path.as_deref(), Some("a/b/c/d/e"));
|
||||
assert_eq!(refs[0].target_iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_hyphenated_project_path() {
|
||||
let refs = parse_cross_refs("mentioned in my-group/my-project#99");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("my-group/my-project")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dotted_project_path() {
|
||||
let refs = parse_cross_refs("mentioned in visiostack.io/backend#123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("visiostack.io/backend")
|
||||
);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dotted_nested_project_path() {
|
||||
let refs = parse_cross_refs("closed by my.org/sub.group/my.project!42");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(
|
||||
refs[0].target_project_path.as_deref(),
|
||||
Some("my.org/sub.group/my.project")
|
||||
);
|
||||
assert_eq!(refs[0].target_entity_type, "merge_request");
|
||||
assert_eq!(refs[0].target_iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_self_reference_is_valid() {
|
||||
let refs = parse_cross_refs("mentioned in #123");
|
||||
assert_eq!(refs.len(), 1);
|
||||
assert_eq!(refs[0].target_iid, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_mixed_mentioned_and_closed() {
|
||||
let refs = parse_cross_refs("mentioned in !10 and closed by !20");
|
||||
assert_eq!(refs.len(), 2);
|
||||
assert_eq!(refs[0].reference_type, "mentioned");
|
||||
assert_eq!(refs[0].target_iid, 10);
|
||||
assert_eq!(refs[1].reference_type, "closes");
|
||||
assert_eq!(refs[1].target_iid, 20);
|
||||
}
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
|
||||
let conn = create_connection(std::path::Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn seed_test_data(conn: &Connection) -> i64 {
|
||||
let now = now_ms();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (1, 100, 'group/test-project', 'https://gitlab.com/group/test-project', ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (10, 1000, 1, 123, 'Test Issue', 'opened', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (11, 1001, 1, 456, 'Another Issue', 'opened', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, source_branch, target_branch, author_username, created_at, updated_at, last_seen_at)
|
||||
VALUES (20, 2000, 1, 789, 'Test MR', 'opened', 'feat', 'main', 'dev', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at)
|
||||
VALUES (30, 'disc-aaa', 1, 10, 'Issue', ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, noteable_type, last_seen_at)
|
||||
VALUES (31, 'disc-bbb', 1, 20, 'MergeRequest', ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (40, 4000, 30, 1, 1, 'mentioned in !789', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (41, 4001, 31, 1, 1, 'mentioned in #456', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (42, 4002, 30, 1, 0, 'mentioned in !999', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (43, 4003, 30, 1, 1, 'added label ~bug', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, is_system, body, created_at, updated_at, last_seen_at)
|
||||
VALUES (44, 4004, 30, 1, 1, 'mentioned in other/project#999', ?1, ?1, ?1)",
|
||||
[now],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
1
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_from_system_notes_integration() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = seed_test_data(&conn);
|
||||
|
||||
let result = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
|
||||
assert_eq!(result.inserted, 2, "Two same-project refs should resolve");
|
||||
assert_eq!(
|
||||
result.skipped_unresolvable, 1,
|
||||
"One cross-project ref should be unresolvable"
|
||||
);
|
||||
assert_eq!(
|
||||
result.parse_failures, 1,
|
||||
"One system note has no cross-ref pattern"
|
||||
);
|
||||
|
||||
let ref_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1 AND source_method = 'note_parse'",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(ref_count, 3, "Should have 3 entity_references rows total");
|
||||
|
||||
let unresolved_count: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE target_entity_id IS NULL AND source_method = 'note_parse'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
unresolved_count, 1,
|
||||
"Should have 1 unresolved cross-project ref"
|
||||
);
|
||||
|
||||
let (path, iid): (String, i64) = conn
|
||||
.query_row(
|
||||
"SELECT target_project_path, target_entity_iid FROM entity_references WHERE target_entity_id IS NULL",
|
||||
[],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(path, "other/project");
|
||||
assert_eq!(iid, 999);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = seed_test_data(&conn);
|
||||
|
||||
let result1 = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
let result2 = extract_refs_from_system_notes(&conn, project_id).unwrap();
|
||||
|
||||
assert_eq!(result2.inserted, 0);
|
||||
assert_eq!(result2.skipped_unresolvable, 0);
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE source_method = 'note_parse'",
|
||||
[],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
total,
|
||||
(result1.inserted + result1.skipped_unresolvable) as i64
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_empty_project() {
|
||||
let conn = setup_test_db();
|
||||
let result = extract_refs_from_system_notes(&conn, 999).unwrap();
|
||||
assert_eq!(result.inserted, 0);
|
||||
assert_eq!(result.skipped_unresolvable, 0);
|
||||
assert_eq!(result.parse_failures, 0);
|
||||
}
|
||||
@@ -95,110 +95,5 @@ pub fn read_payload(conn: &Connection, id: i64) -> Result<Option<serde_json::Val
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::create_connection;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let dir = tempdir().unwrap();
|
||||
let db_path = dir.path().join("test.db");
|
||||
let conn = create_connection(&db_path).unwrap();
|
||||
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE raw_payloads (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source TEXT NOT NULL,
|
||||
project_id INTEGER,
|
||||
resource_type TEXT NOT NULL,
|
||||
gitlab_id TEXT NOT NULL,
|
||||
fetched_at INTEGER NOT NULL,
|
||||
content_encoding TEXT NOT NULL DEFAULT 'identity',
|
||||
payload_hash TEXT NOT NULL,
|
||||
payload BLOB NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX uq_raw_payloads_dedupe
|
||||
ON raw_payloads(project_id, resource_type, gitlab_id, payload_hash);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_store_and_read_payload() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"title": "Test Issue", "id": 123});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "123",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = read_payload(&conn, id).unwrap().unwrap();
|
||||
assert_eq!(result["title"], "Test Issue");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compression_roundtrip() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"data": "x".repeat(1000)});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "456",
|
||||
json_bytes: &json_bytes,
|
||||
compress: true,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = read_payload(&conn, id).unwrap().unwrap();
|
||||
assert_eq!(result["data"], "x".repeat(1000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deduplication() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"id": 789});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id1 = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "789",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let id2 = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "789",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
}
|
||||
#[path = "payloads_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
105
src/core/payloads_tests.rs
Normal file
105
src/core/payloads_tests.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use super::*;
|
||||
use crate::core::db::create_connection;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let dir = tempdir().unwrap();
|
||||
let db_path = dir.path().join("test.db");
|
||||
let conn = create_connection(&db_path).unwrap();
|
||||
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE raw_payloads (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source TEXT NOT NULL,
|
||||
project_id INTEGER,
|
||||
resource_type TEXT NOT NULL,
|
||||
gitlab_id TEXT NOT NULL,
|
||||
fetched_at INTEGER NOT NULL,
|
||||
content_encoding TEXT NOT NULL DEFAULT 'identity',
|
||||
payload_hash TEXT NOT NULL,
|
||||
payload BLOB NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX uq_raw_payloads_dedupe
|
||||
ON raw_payloads(project_id, resource_type, gitlab_id, payload_hash);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_store_and_read_payload() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"title": "Test Issue", "id": 123});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "123",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = read_payload(&conn, id).unwrap().unwrap();
|
||||
assert_eq!(result["title"], "Test Issue");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compression_roundtrip() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"data": "x".repeat(1000)});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "456",
|
||||
json_bytes: &json_bytes,
|
||||
compress: true,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = read_payload(&conn, id).unwrap().unwrap();
|
||||
assert_eq!(result["data"], "x".repeat(1000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deduplication() {
|
||||
let conn = setup_test_db();
|
||||
let payload = serde_json::json!({"id": 789});
|
||||
let json_bytes = serde_json::to_vec(&payload).unwrap();
|
||||
|
||||
let id1 = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "789",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let id2 = store_payload(
|
||||
&conn,
|
||||
StorePayloadOptions {
|
||||
project_id: Some(1),
|
||||
resource_type: "issue",
|
||||
gitlab_id: "789",
|
||||
json_bytes: &json_bytes,
|
||||
compress: false,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(id1, id2);
|
||||
}
|
||||
@@ -114,161 +114,5 @@ fn escape_like(input: &str) -> String {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn setup_db() -> Connection {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL,
|
||||
default_branch TEXT,
|
||||
web_url TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
raw_payload_id INTEGER
|
||||
);
|
||||
CREATE INDEX idx_projects_path ON projects(path_with_namespace);
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (?1, ?2, ?3)",
|
||||
rusqlite::params![id, id * 100, path],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exact_match() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let id = resolve_project(&conn, "backend/auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_insensitive() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let id = resolve_project(&conn, "Backend/Auth-Service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_unambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "frontend/web-ui");
|
||||
let id = resolve_project(&conn, "auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_ambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "frontend/auth-service");
|
||||
let err = resolve_project(&conn, "auth-service").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("ambiguous"),
|
||||
"Expected ambiguous error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("backend/auth-service"));
|
||||
assert!(msg.contains("frontend/auth-service"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_unambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let id = resolve_project(&conn, "typescript").unwrap();
|
||||
assert_eq!(id, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_case_insensitive() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let id = resolve_project(&conn, "TypeScript").unwrap();
|
||||
assert_eq!(id, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_ambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let err = resolve_project(&conn, "code").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("ambiguous"),
|
||||
"Expected ambiguous error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("vs/python-code"));
|
||||
assert!(msg.contains("vs/typescript-code"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_preferred_over_substring() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "backend/auth-service-v2");
|
||||
let id = resolve_project(&conn, "auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_match() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let err = resolve_project(&conn, "nonexistent").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("not found"),
|
||||
"Expected not found error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("backend/auth-service"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_projects() {
|
||||
let conn = setup_db();
|
||||
let err = resolve_project(&conn, "anything").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(msg.contains("No projects have been synced"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_underscore_not_wildcard() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/my_project");
|
||||
insert_project(&conn, 2, "backend/my-project");
|
||||
// `_` in user input must not match `-` (LIKE wildcard behavior)
|
||||
let id = resolve_project(&conn, "my_project").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_percent_not_wildcard() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/a%b");
|
||||
insert_project(&conn, 2, "backend/axyzb");
|
||||
// `%` in user input must not match arbitrary strings
|
||||
let id = resolve_project(&conn, "a%b").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
}
|
||||
#[path = "project_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
156
src/core/project_tests.rs
Normal file
156
src/core/project_tests.rs
Normal file
@@ -0,0 +1,156 @@
|
||||
use super::*;
|
||||
|
||||
fn setup_db() -> Connection {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL,
|
||||
default_branch TEXT,
|
||||
web_url TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
raw_payload_id INTEGER
|
||||
);
|
||||
CREATE INDEX idx_projects_path ON projects(path_with_namespace);
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection, id: i64, path: &str) {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (?1, ?2, ?3)",
|
||||
rusqlite::params![id, id * 100, path],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exact_match() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let id = resolve_project(&conn, "backend/auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_insensitive() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let id = resolve_project(&conn, "Backend/Auth-Service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_unambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "frontend/web-ui");
|
||||
let id = resolve_project(&conn, "auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_ambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "frontend/auth-service");
|
||||
let err = resolve_project(&conn, "auth-service").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("ambiguous"),
|
||||
"Expected ambiguous error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("backend/auth-service"));
|
||||
assert!(msg.contains("frontend/auth-service"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_unambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let id = resolve_project(&conn, "typescript").unwrap();
|
||||
assert_eq!(id, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_case_insensitive() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let id = resolve_project(&conn, "TypeScript").unwrap();
|
||||
assert_eq!(id, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substring_ambiguous() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "vs/python-code");
|
||||
insert_project(&conn, 2, "vs/typescript-code");
|
||||
let err = resolve_project(&conn, "code").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("ambiguous"),
|
||||
"Expected ambiguous error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("vs/python-code"));
|
||||
assert!(msg.contains("vs/typescript-code"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_suffix_preferred_over_substring() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
insert_project(&conn, 2, "backend/auth-service-v2");
|
||||
let id = resolve_project(&conn, "auth-service").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_match() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/auth-service");
|
||||
let err = resolve_project(&conn, "nonexistent").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(
|
||||
msg.contains("not found"),
|
||||
"Expected not found error, got: {}",
|
||||
msg
|
||||
);
|
||||
assert!(msg.contains("backend/auth-service"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_projects() {
|
||||
let conn = setup_db();
|
||||
let err = resolve_project(&conn, "anything").unwrap_err();
|
||||
let msg = err.to_string();
|
||||
assert!(msg.contains("No projects have been synced"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_underscore_not_wildcard() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/my_project");
|
||||
insert_project(&conn, 2, "backend/my-project");
|
||||
// `_` in user input must not match `-` (LIKE wildcard behavior)
|
||||
let id = resolve_project(&conn, "my_project").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_percent_not_wildcard() {
|
||||
let conn = setup_db();
|
||||
insert_project(&conn, 1, "backend/a%b");
|
||||
insert_project(&conn, 2, "backend/axyzb");
|
||||
// `%` in user input must not match arbitrary strings
|
||||
let id = resolve_project(&conn, "a%b").unwrap();
|
||||
assert_eq!(id, 1);
|
||||
}
|
||||
@@ -122,430 +122,5 @@ pub fn count_references_for_source(
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn seed_project_issue_mr(conn: &Connection) -> (i64, i64, i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (1, 100, 'group/repo', 'https://gitlab.example.com/group/repo', 1000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (1, 200, 10, 1, 'Test issue', 'closed', 1000, 2000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at, source_branch, target_branch)
|
||||
VALUES (1, 300, 5, 1, 'Test MR', 'merged', 1000, 2000, 2000, 'feature', 'main')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(1, 1, 1)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_from_state_events_basic() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 1, "Should insert exactly one reference");
|
||||
|
||||
let (src_type, src_id, tgt_type, tgt_id, ref_type, method): (
|
||||
String,
|
||||
i64,
|
||||
String,
|
||||
i64,
|
||||
String,
|
||||
String,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT source_entity_type, source_entity_id,
|
||||
target_entity_type, target_entity_id,
|
||||
reference_type, source_method
|
||||
FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| {
|
||||
Ok((
|
||||
row.get(0)?,
|
||||
row.get(1)?,
|
||||
row.get(2)?,
|
||||
row.get(3)?,
|
||||
row.get(4)?,
|
||||
row.get(5)?,
|
||||
))
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(src_type, "merge_request");
|
||||
assert_eq!(src_id, mr_id, "Source should be the MR's local DB id");
|
||||
assert_eq!(tgt_type, "issue");
|
||||
assert_eq!(tgt_id, issue_id, "Target should be the issue's local DB id");
|
||||
assert_eq!(ref_type, "closes");
|
||||
assert_eq!(method, "api");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_dedup_with_closes_issues() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references
|
||||
(project_id, source_entity_type, source_entity_id,
|
||||
target_entity_type, target_entity_id,
|
||||
reference_type, source_method, created_at)
|
||||
VALUES (?1, 'merge_request', ?2, 'issue', ?3, 'closes', 'api', 3000)",
|
||||
rusqlite::params![project_id, mr_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 0, "Should not insert duplicate reference");
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(total, 1, "Should still have exactly one reference");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_no_source_mr() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, NULL)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 0, "Should not create refs when no source MR");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_mr_not_synced() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, ?1, ?2, NULL, 'closed', 3000, 999)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(
|
||||
count, 0,
|
||||
"Should not create ref when MR is not synced locally"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count1 = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count1, 1);
|
||||
|
||||
let count2 = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count2, 0, "Second run should insert nothing (idempotent)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_multiple_events_same_mr_issue() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, ?1, ?2, NULL, 'closed', 4000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert!(count <= 2, "At most 2 inserts attempted");
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
total, 1,
|
||||
"Only one unique reference should exist for same MR->issue pair"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_scoped_to_project() {
|
||||
let conn = setup_test_db();
|
||||
seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (2, 101, 'group/other', 'https://gitlab.example.com/group/other', 1000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (2, 201, 10, 2, 'Other issue', 'closed', 1000, 2000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at, source_branch, target_branch)
|
||||
VALUES (2, 301, 5, 2, 'Other MR', 'merged', 1000, 2000, 2000, 'feature', 'main')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, 1, 1, NULL, 'closed', 3000, 5)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, 2, 2, NULL, 'closed', 3000, 5)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, 1).unwrap();
|
||||
assert_eq!(count, 1);
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM entity_references", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(total, 1, "Only project 1 refs should be created");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_creates_row() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(issue_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let inserted = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(inserted);
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(issue_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let first = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(first);
|
||||
|
||||
let second = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(!second, "Duplicate insert should be ignored");
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 1, "Still just one reference");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_cross_project_unresolved() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, _issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: None,
|
||||
target_project_path: Some("other-group/other-project"),
|
||||
target_entity_iid: Some(99),
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let inserted = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(inserted);
|
||||
|
||||
let (target_id, target_path, target_iid): (Option<i64>, Option<String>, Option<i64>) = conn
|
||||
.query_row(
|
||||
"SELECT target_entity_id, target_project_path, target_entity_iid \
|
||||
FROM entity_references WHERE source_entity_id = ?1",
|
||||
[mr_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(target_id.is_none());
|
||||
assert_eq!(target_path, Some("other-group/other-project".to_string()));
|
||||
assert_eq!(target_iid, Some(99));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_multiple_closes_references() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (10, 210, 11, ?1, 'Second issue', 'opened', 1000, 2000, 2000)",
|
||||
rusqlite::params![project_id],
|
||||
)
|
||||
.unwrap();
|
||||
let issue_id_2 = 10i64;
|
||||
|
||||
for target_id in [issue_id, issue_id_2] {
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(target_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
insert_entity_reference(&conn, &ref_).unwrap();
|
||||
}
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_issue_local_id_found() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let resolved = resolve_issue_local_id(&conn, project_id, 10).unwrap();
|
||||
assert_eq!(resolved, Some(issue_id));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_issue_local_id_not_found() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, _issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let resolved = resolve_issue_local_id(&conn, project_id, 999).unwrap();
|
||||
assert!(resolved.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_project_path_found() {
|
||||
let conn = setup_test_db();
|
||||
seed_project_issue_mr(&conn);
|
||||
|
||||
let path = resolve_project_path(&conn, 100).unwrap();
|
||||
assert_eq!(path, Some("group/repo".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_project_path_not_found() {
|
||||
let conn = setup_test_db();
|
||||
|
||||
let path = resolve_project_path(&conn, 999).unwrap();
|
||||
assert!(path.is_none());
|
||||
}
|
||||
}
|
||||
#[path = "references_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
425
src/core/references_tests.rs
Normal file
425
src/core/references_tests.rs
Normal file
@@ -0,0 +1,425 @@
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn seed_project_issue_mr(conn: &Connection) -> (i64, i64, i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (1, 100, 'group/repo', 'https://gitlab.example.com/group/repo', 1000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (1, 200, 10, 1, 'Test issue', 'closed', 1000, 2000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at, source_branch, target_branch)
|
||||
VALUES (1, 300, 5, 1, 'Test MR', 'merged', 1000, 2000, 2000, 'feature', 'main')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
(1, 1, 1)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_from_state_events_basic() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 1, "Should insert exactly one reference");
|
||||
|
||||
let (src_type, src_id, tgt_type, tgt_id, ref_type, method): (
|
||||
String,
|
||||
i64,
|
||||
String,
|
||||
i64,
|
||||
String,
|
||||
String,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT source_entity_type, source_entity_id,
|
||||
target_entity_type, target_entity_id,
|
||||
reference_type, source_method
|
||||
FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| {
|
||||
Ok((
|
||||
row.get(0)?,
|
||||
row.get(1)?,
|
||||
row.get(2)?,
|
||||
row.get(3)?,
|
||||
row.get(4)?,
|
||||
row.get(5)?,
|
||||
))
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(src_type, "merge_request");
|
||||
assert_eq!(src_id, mr_id, "Source should be the MR's local DB id");
|
||||
assert_eq!(tgt_type, "issue");
|
||||
assert_eq!(tgt_id, issue_id, "Target should be the issue's local DB id");
|
||||
assert_eq!(ref_type, "closes");
|
||||
assert_eq!(method, "api");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_dedup_with_closes_issues() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references
|
||||
(project_id, source_entity_type, source_entity_id,
|
||||
target_entity_type, target_entity_id,
|
||||
reference_type, source_method, created_at)
|
||||
VALUES (?1, 'merge_request', ?2, 'issue', ?3, 'closes', 'api', 3000)",
|
||||
rusqlite::params![project_id, mr_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 0, "Should not insert duplicate reference");
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(total, 1, "Should still have exactly one reference");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_no_source_mr() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, NULL)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count, 0, "Should not create refs when no source MR");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_mr_not_synced() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, ?1, ?2, NULL, 'closed', 3000, 999)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(
|
||||
count, 0,
|
||||
"Should not create ref when MR is not synced locally"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count1 = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count1, 1);
|
||||
|
||||
let count2 = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert_eq!(count2, 0, "Second run should insert nothing (idempotent)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_multiple_events_same_mr_issue() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, ?1, ?2, NULL, 'closed', 3000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, ?1, ?2, NULL, 'closed', 4000, 5)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, project_id).unwrap();
|
||||
assert!(count <= 2, "At most 2 inserts attempted");
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM entity_references WHERE project_id = ?1",
|
||||
[project_id],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
total, 1,
|
||||
"Only one unique reference should exist for same MR->issue pair"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_refs_scoped_to_project() {
|
||||
let conn = setup_test_db();
|
||||
seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url, created_at, updated_at)
|
||||
VALUES (2, 101, 'group/other', 'https://gitlab.example.com/group/other', 1000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (2, 201, 10, 2, 'Other issue', 'closed', 1000, 2000, 2000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at, source_branch, target_branch)
|
||||
VALUES (2, 301, 5, 2, 'Other MR', 'merged', 1000, 2000, 2000, 'feature', 'main')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (1, 1, 1, NULL, 'closed', 3000, 5)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events
|
||||
(gitlab_id, project_id, issue_id, merge_request_id, state,
|
||||
created_at, source_merge_request_iid)
|
||||
VALUES (2, 2, 2, NULL, 'closed', 3000, 5)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let count = extract_refs_from_state_events(&conn, 1).unwrap();
|
||||
assert_eq!(count, 1);
|
||||
|
||||
let total: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM entity_references", [], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(total, 1, "Only project 1 refs should be created");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_creates_row() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(issue_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let inserted = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(inserted);
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_idempotent() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(issue_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let first = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(first);
|
||||
|
||||
let second = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(!second, "Duplicate insert should be ignored");
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 1, "Still just one reference");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_entity_reference_cross_project_unresolved() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, _issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: None,
|
||||
target_project_path: Some("other-group/other-project"),
|
||||
target_entity_iid: Some(99),
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
|
||||
let inserted = insert_entity_reference(&conn, &ref_).unwrap();
|
||||
assert!(inserted);
|
||||
|
||||
let (target_id, target_path, target_iid): (Option<i64>, Option<String>, Option<i64>) = conn
|
||||
.query_row(
|
||||
"SELECT target_entity_id, target_project_path, target_entity_iid \
|
||||
FROM entity_references WHERE source_entity_id = ?1",
|
||||
[mr_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(target_id.is_none());
|
||||
assert_eq!(target_path, Some("other-group/other-project".to_string()));
|
||||
assert_eq!(target_iid, Some(99));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_multiple_closes_references() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, iid, project_id, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (10, 210, 11, ?1, 'Second issue', 'opened', 1000, 2000, 2000)",
|
||||
rusqlite::params![project_id],
|
||||
)
|
||||
.unwrap();
|
||||
let issue_id_2 = 10i64;
|
||||
|
||||
for target_id in [issue_id, issue_id_2] {
|
||||
let ref_ = EntityReference {
|
||||
project_id,
|
||||
source_entity_type: "merge_request",
|
||||
source_entity_id: mr_id,
|
||||
target_entity_type: "issue",
|
||||
target_entity_id: Some(target_id),
|
||||
target_project_path: None,
|
||||
target_entity_iid: None,
|
||||
reference_type: "closes",
|
||||
source_method: "api",
|
||||
};
|
||||
insert_entity_reference(&conn, &ref_).unwrap();
|
||||
}
|
||||
|
||||
let count = count_references_for_source(&conn, "merge_request", mr_id).unwrap();
|
||||
assert_eq!(count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_issue_local_id_found() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let resolved = resolve_issue_local_id(&conn, project_id, 10).unwrap();
|
||||
assert_eq!(resolved, Some(issue_id));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_issue_local_id_not_found() {
|
||||
let conn = setup_test_db();
|
||||
let (project_id, _issue_id, _mr_id) = seed_project_issue_mr(&conn);
|
||||
|
||||
let resolved = resolve_issue_local_id(&conn, project_id, 999).unwrap();
|
||||
assert!(resolved.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_project_path_found() {
|
||||
let conn = setup_test_db();
|
||||
seed_project_issue_mr(&conn);
|
||||
|
||||
let path = resolve_project_path(&conn, 100).unwrap();
|
||||
assert_eq!(path, Some("group/repo".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_project_path_not_found() {
|
||||
let conn = setup_test_db();
|
||||
|
||||
let path = resolve_project_path(&conn, 999).unwrap();
|
||||
assert!(path.is_none());
|
||||
}
|
||||
@@ -66,153 +66,5 @@ impl SyncRunRecorder {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_start() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "abc12345").unwrap();
|
||||
assert!(recorder.row_id > 0);
|
||||
|
||||
let (status, command, run_id): (String, String, String) = conn
|
||||
.query_row(
|
||||
"SELECT status, command, run_id FROM sync_runs WHERE id = ?1",
|
||||
[recorder.row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "running");
|
||||
assert_eq!(command, "sync");
|
||||
assert_eq!(run_id, "abc12345");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_succeed() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "def67890").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
let metrics = vec![StageTiming {
|
||||
name: "ingest".to_string(),
|
||||
project: None,
|
||||
elapsed_ms: 1200,
|
||||
items_processed: 50,
|
||||
items_skipped: 0,
|
||||
errors: 2,
|
||||
rate_limit_hits: 0,
|
||||
retries: 0,
|
||||
sub_stages: vec![],
|
||||
}];
|
||||
|
||||
recorder.succeed(&conn, &metrics, 50, 2).unwrap();
|
||||
|
||||
let (status, finished_at, metrics_json, total_items, total_errors): (
|
||||
String,
|
||||
Option<i64>,
|
||||
Option<String>,
|
||||
i64,
|
||||
i64,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT status, finished_at, metrics_json, total_items_processed, total_errors
|
||||
FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| {
|
||||
Ok((
|
||||
row.get(0)?,
|
||||
row.get(1)?,
|
||||
row.get(2)?,
|
||||
row.get(3)?,
|
||||
row.get(4)?,
|
||||
))
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "succeeded");
|
||||
assert!(finished_at.is_some());
|
||||
assert!(metrics_json.is_some());
|
||||
assert_eq!(total_items, 50);
|
||||
assert_eq!(total_errors, 2);
|
||||
|
||||
let parsed: Vec<StageTiming> = serde_json::from_str(&metrics_json.unwrap()).unwrap();
|
||||
assert_eq!(parsed.len(), 1);
|
||||
assert_eq!(parsed[0].name, "ingest");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_fail() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "ingest issues", "fail0001").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
recorder.fail(&conn, "GitLab auth failed", None).unwrap();
|
||||
|
||||
let (status, finished_at, error, metrics_json): (
|
||||
String,
|
||||
Option<i64>,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT status, finished_at, error, metrics_json
|
||||
FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "failed");
|
||||
assert!(finished_at.is_some());
|
||||
assert_eq!(error.as_deref(), Some("GitLab auth failed"));
|
||||
assert!(metrics_json.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_fail_with_partial_metrics() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "part0001").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
let partial_metrics = vec![StageTiming {
|
||||
name: "ingest_issues".to_string(),
|
||||
project: Some("group/repo".to_string()),
|
||||
elapsed_ms: 800,
|
||||
items_processed: 30,
|
||||
items_skipped: 0,
|
||||
errors: 0,
|
||||
rate_limit_hits: 1,
|
||||
retries: 0,
|
||||
sub_stages: vec![],
|
||||
}];
|
||||
|
||||
recorder
|
||||
.fail(&conn, "Embedding failed", Some(&partial_metrics))
|
||||
.unwrap();
|
||||
|
||||
let (status, metrics_json): (String, Option<String>) = conn
|
||||
.query_row(
|
||||
"SELECT status, metrics_json FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "failed");
|
||||
assert!(metrics_json.is_some());
|
||||
|
||||
let parsed: Vec<StageTiming> = serde_json::from_str(&metrics_json.unwrap()).unwrap();
|
||||
assert_eq!(parsed.len(), 1);
|
||||
assert_eq!(parsed[0].name, "ingest_issues");
|
||||
}
|
||||
}
|
||||
#[path = "sync_run_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
148
src/core/sync_run_tests.rs
Normal file
148
src/core/sync_run_tests.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_start() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "abc12345").unwrap();
|
||||
assert!(recorder.row_id > 0);
|
||||
|
||||
let (status, command, run_id): (String, String, String) = conn
|
||||
.query_row(
|
||||
"SELECT status, command, run_id FROM sync_runs WHERE id = ?1",
|
||||
[recorder.row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "running");
|
||||
assert_eq!(command, "sync");
|
||||
assert_eq!(run_id, "abc12345");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_succeed() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "def67890").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
let metrics = vec![StageTiming {
|
||||
name: "ingest".to_string(),
|
||||
project: None,
|
||||
elapsed_ms: 1200,
|
||||
items_processed: 50,
|
||||
items_skipped: 0,
|
||||
errors: 2,
|
||||
rate_limit_hits: 0,
|
||||
retries: 0,
|
||||
sub_stages: vec![],
|
||||
}];
|
||||
|
||||
recorder.succeed(&conn, &metrics, 50, 2).unwrap();
|
||||
|
||||
let (status, finished_at, metrics_json, total_items, total_errors): (
|
||||
String,
|
||||
Option<i64>,
|
||||
Option<String>,
|
||||
i64,
|
||||
i64,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT status, finished_at, metrics_json, total_items_processed, total_errors
|
||||
FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| {
|
||||
Ok((
|
||||
row.get(0)?,
|
||||
row.get(1)?,
|
||||
row.get(2)?,
|
||||
row.get(3)?,
|
||||
row.get(4)?,
|
||||
))
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "succeeded");
|
||||
assert!(finished_at.is_some());
|
||||
assert!(metrics_json.is_some());
|
||||
assert_eq!(total_items, 50);
|
||||
assert_eq!(total_errors, 2);
|
||||
|
||||
let parsed: Vec<StageTiming> = serde_json::from_str(&metrics_json.unwrap()).unwrap();
|
||||
assert_eq!(parsed.len(), 1);
|
||||
assert_eq!(parsed[0].name, "ingest");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_fail() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "ingest issues", "fail0001").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
recorder.fail(&conn, "GitLab auth failed", None).unwrap();
|
||||
|
||||
let (status, finished_at, error, metrics_json): (
|
||||
String,
|
||||
Option<i64>,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
) = conn
|
||||
.query_row(
|
||||
"SELECT status, finished_at, error, metrics_json
|
||||
FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?, row.get(3)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "failed");
|
||||
assert!(finished_at.is_some());
|
||||
assert_eq!(error.as_deref(), Some("GitLab auth failed"));
|
||||
assert!(metrics_json.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_run_recorder_fail_with_partial_metrics() {
|
||||
let conn = setup_test_db();
|
||||
let recorder = SyncRunRecorder::start(&conn, "sync", "part0001").unwrap();
|
||||
let row_id = recorder.row_id;
|
||||
|
||||
let partial_metrics = vec![StageTiming {
|
||||
name: "ingest_issues".to_string(),
|
||||
project: Some("group/repo".to_string()),
|
||||
elapsed_ms: 800,
|
||||
items_processed: 30,
|
||||
items_skipped: 0,
|
||||
errors: 0,
|
||||
rate_limit_hits: 1,
|
||||
retries: 0,
|
||||
sub_stages: vec![],
|
||||
}];
|
||||
|
||||
recorder
|
||||
.fail(&conn, "Embedding failed", Some(&partial_metrics))
|
||||
.unwrap();
|
||||
|
||||
let (status, metrics_json): (String, Option<String>) = conn
|
||||
.query_row(
|
||||
"SELECT status, metrics_json FROM sync_runs WHERE id = ?1",
|
||||
[row_id],
|
||||
|row| Ok((row.get(0)?, row.get(1)?)),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(status, "failed");
|
||||
assert!(metrics_json.is_some());
|
||||
|
||||
let parsed: Vec<StageTiming> = serde_json::from_str(&metrics_json.unwrap()).unwrap();
|
||||
assert_eq!(parsed.len(), 1);
|
||||
assert_eq!(parsed[0].name, "ingest_issues");
|
||||
}
|
||||
@@ -370,326 +370,5 @@ fn entity_id_column(entity: &EntityRef) -> Result<(&'static str, i64)> {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (?1, ?2, ?3, 'Auth bug', 'opened', 'alice', 1000, 2000, 3000, 'https://gitlab.com/group/project/-/issues/1')",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, project_id: i64, iid: i64, merged_at: Option<i64>) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, merged_at, merge_user_username, web_url) VALUES (?1, ?2, ?3, 'Fix auth', 'merged', 'bob', 1000, 5000, 6000, ?4, 'charlie', 'https://gitlab.com/group/project/-/merge_requests/10')",
|
||||
rusqlite::params![iid * 100, project_id, iid, merged_at],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn make_entity_ref(entity_type: &str, entity_id: i64, iid: i64) -> EntityRef {
|
||||
EntityRef {
|
||||
entity_type: entity_type.to_owned(),
|
||||
entity_id,
|
||||
entity_iid: iid,
|
||||
project_path: "group/project".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_state_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
state: &str,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, 'alice', ?6)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, state, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_label_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
action: &str,
|
||||
label_name: Option<&str>,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, label_name, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_milestone_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
action: &str,
|
||||
milestone_title: Option<&str>,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_milestone_events (gitlab_id, project_id, issue_id, merge_request_id, action, milestone_title, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, milestone_title, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_creation_event() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
assert_eq!(events.len(), 1);
|
||||
assert!(matches!(events[0].event_type, TimelineEventType::Created));
|
||||
assert_eq!(events[0].timestamp, 1000);
|
||||
assert_eq!(events[0].actor, Some("alice".to_owned()));
|
||||
assert!(events[0].is_seed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_state_events() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 4000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Created + 2 state changes = 3
|
||||
assert_eq!(events.len(), 3);
|
||||
assert!(matches!(events[0].event_type, TimelineEventType::Created));
|
||||
assert!(matches!(
|
||||
events[1].event_type,
|
||||
TimelineEventType::StateChanged { ref state } if state == "closed"
|
||||
));
|
||||
assert!(matches!(
|
||||
events[2].event_type,
|
||||
TimelineEventType::StateChanged { ref state } if state == "reopened"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_merged_dedup() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let mr_id = insert_mr(&conn, project_id, 10, Some(5000));
|
||||
|
||||
// Also add a state event for 'merged' — this should NOT produce a StateChanged
|
||||
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Should have Created + Merged (not Created + StateChanged{merged} + Merged)
|
||||
let merged_count = events
|
||||
.iter()
|
||||
.filter(|e| matches!(e.event_type, TimelineEventType::Merged))
|
||||
.count();
|
||||
let state_merged_count = events
|
||||
.iter()
|
||||
.filter(|e| matches!(&e.event_type, TimelineEventType::StateChanged { state } if state == "merged"))
|
||||
.count();
|
||||
|
||||
assert_eq!(merged_count, 1);
|
||||
assert_eq!(state_merged_count, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_null_label_fallback() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_label_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let label_event = events.iter().find(|e| {
|
||||
matches!(&e.event_type, TimelineEventType::LabelAdded { label } if label == "[deleted label]")
|
||||
});
|
||||
assert!(label_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_null_milestone_fallback() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_milestone_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let ms_event = events.iter().find(|e| {
|
||||
matches!(&e.event_type, TimelineEventType::MilestoneSet { milestone } if milestone == "[deleted milestone]")
|
||||
});
|
||||
assert!(ms_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_since_filter() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
// Since 4000: should exclude Created (1000) and closed (3000)
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], Some(4000), 100).unwrap();
|
||||
assert_eq!(events.len(), 1);
|
||||
assert_eq!(events[0].timestamp, 5000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_chronological_sort() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10, Some(4000));
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_label_event(
|
||||
&conn,
|
||||
project_id,
|
||||
None,
|
||||
Some(mr_id),
|
||||
"add",
|
||||
Some("bug"),
|
||||
2000,
|
||||
);
|
||||
|
||||
let seeds = vec![
|
||||
make_entity_ref("issue", issue_id, 1),
|
||||
make_entity_ref("merge_request", mr_id, 10),
|
||||
];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Verify chronological order
|
||||
for window in events.windows(2) {
|
||||
assert!(window[0].timestamp <= window[1].timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_respects_limit() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
for i in 0..20 {
|
||||
insert_state_event(
|
||||
&conn,
|
||||
project_id,
|
||||
Some(issue_id),
|
||||
None,
|
||||
"closed",
|
||||
3000 + i * 100,
|
||||
);
|
||||
}
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, total) = collect_events(&conn, &seeds, &[], &[], None, 5).unwrap();
|
||||
assert_eq!(events.len(), 5);
|
||||
// 20 state changes + 1 created = 21 total before limit
|
||||
assert_eq!(total, 21);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_evidence_notes_included() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
let evidence = vec![TimelineEvent {
|
||||
timestamp: 2500,
|
||||
entity_type: "issue".to_owned(),
|
||||
entity_id: issue_id,
|
||||
entity_iid: 1,
|
||||
project_path: "group/project".to_owned(),
|
||||
event_type: TimelineEventType::NoteEvidence {
|
||||
note_id: 42,
|
||||
snippet: "relevant note".to_owned(),
|
||||
discussion_id: Some(1),
|
||||
},
|
||||
summary: "Note by alice".to_owned(),
|
||||
actor: Some("alice".to_owned()),
|
||||
url: None,
|
||||
is_seed: true,
|
||||
}];
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &evidence, None, 100).unwrap();
|
||||
|
||||
let note_event = events.iter().find(|e| {
|
||||
matches!(
|
||||
&e.event_type,
|
||||
TimelineEventType::NoteEvidence { note_id, .. } if *note_id == 42
|
||||
)
|
||||
});
|
||||
assert!(note_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_merged_fallback_to_state_event() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
// MR with merged_at = NULL
|
||||
let mr_id = insert_mr(&conn, project_id, 10, None);
|
||||
|
||||
// But has a state event for 'merged'
|
||||
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let merged = events
|
||||
.iter()
|
||||
.find(|e| matches!(e.event_type, TimelineEventType::Merged));
|
||||
assert!(merged.is_some());
|
||||
assert_eq!(merged.unwrap().timestamp, 5000);
|
||||
}
|
||||
}
|
||||
#[path = "timeline_collect_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
321
src/core/timeline_collect_tests.rs
Normal file
321
src/core/timeline_collect_tests.rs
Normal file
@@ -0,0 +1,321 @@
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (?1, ?2, ?3, 'Auth bug', 'opened', 'alice', 1000, 2000, 3000, 'https://gitlab.com/group/project/-/issues/1')",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, project_id: i64, iid: i64, merged_at: Option<i64>) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, merged_at, merge_user_username, web_url) VALUES (?1, ?2, ?3, 'Fix auth', 'merged', 'bob', 1000, 5000, 6000, ?4, 'charlie', 'https://gitlab.com/group/project/-/merge_requests/10')",
|
||||
rusqlite::params![iid * 100, project_id, iid, merged_at],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn make_entity_ref(entity_type: &str, entity_id: i64, iid: i64) -> EntityRef {
|
||||
EntityRef {
|
||||
entity_type: entity_type.to_owned(),
|
||||
entity_id,
|
||||
entity_iid: iid,
|
||||
project_path: "group/project".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_state_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
state: &str,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, 'alice', ?6)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, state, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_label_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
action: &str,
|
||||
label_name: Option<&str>,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, label_name, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn insert_milestone_event(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
action: &str,
|
||||
milestone_title: Option<&str>,
|
||||
created_at: i64,
|
||||
) {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO resource_milestone_events (gitlab_id, project_id, issue_id, merge_request_id, action, milestone_title, actor_username, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, 'alice', ?7)",
|
||||
rusqlite::params![gitlab_id, project_id, issue_id, mr_id, action, milestone_title, created_at],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_creation_event() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
assert_eq!(events.len(), 1);
|
||||
assert!(matches!(events[0].event_type, TimelineEventType::Created));
|
||||
assert_eq!(events[0].timestamp, 1000);
|
||||
assert_eq!(events[0].actor, Some("alice".to_owned()));
|
||||
assert!(events[0].is_seed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_state_events() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 4000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Created + 2 state changes = 3
|
||||
assert_eq!(events.len(), 3);
|
||||
assert!(matches!(events[0].event_type, TimelineEventType::Created));
|
||||
assert!(matches!(
|
||||
events[1].event_type,
|
||||
TimelineEventType::StateChanged { ref state } if state == "closed"
|
||||
));
|
||||
assert!(matches!(
|
||||
events[2].event_type,
|
||||
TimelineEventType::StateChanged { ref state } if state == "reopened"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_merged_dedup() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let mr_id = insert_mr(&conn, project_id, 10, Some(5000));
|
||||
|
||||
// Also add a state event for 'merged' — this should NOT produce a StateChanged
|
||||
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Should have Created + Merged (not Created + StateChanged{merged} + Merged)
|
||||
let merged_count = events
|
||||
.iter()
|
||||
.filter(|e| matches!(e.event_type, TimelineEventType::Merged))
|
||||
.count();
|
||||
let state_merged_count = events
|
||||
.iter()
|
||||
.filter(|e| matches!(&e.event_type, TimelineEventType::StateChanged { state } if state == "merged"))
|
||||
.count();
|
||||
|
||||
assert_eq!(merged_count, 1);
|
||||
assert_eq!(state_merged_count, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_null_label_fallback() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_label_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let label_event = events.iter().find(|e| {
|
||||
matches!(&e.event_type, TimelineEventType::LabelAdded { label } if label == "[deleted label]")
|
||||
});
|
||||
assert!(label_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_null_milestone_fallback() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_milestone_event(&conn, project_id, Some(issue_id), None, "add", None, 2000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let ms_event = events.iter().find(|e| {
|
||||
matches!(&e.event_type, TimelineEventType::MilestoneSet { milestone } if milestone == "[deleted milestone]")
|
||||
});
|
||||
assert!(ms_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_since_filter() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "reopened", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
// Since 4000: should exclude Created (1000) and closed (3000)
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], Some(4000), 100).unwrap();
|
||||
assert_eq!(events.len(), 1);
|
||||
assert_eq!(events[0].timestamp, 5000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_chronological_sort() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10, Some(4000));
|
||||
|
||||
insert_state_event(&conn, project_id, Some(issue_id), None, "closed", 3000);
|
||||
insert_label_event(
|
||||
&conn,
|
||||
project_id,
|
||||
None,
|
||||
Some(mr_id),
|
||||
"add",
|
||||
Some("bug"),
|
||||
2000,
|
||||
);
|
||||
|
||||
let seeds = vec![
|
||||
make_entity_ref("issue", issue_id, 1),
|
||||
make_entity_ref("merge_request", mr_id, 10),
|
||||
];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
// Verify chronological order
|
||||
for window in events.windows(2) {
|
||||
assert!(window[0].timestamp <= window[1].timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_respects_limit() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
for i in 0..20 {
|
||||
insert_state_event(
|
||||
&conn,
|
||||
project_id,
|
||||
Some(issue_id),
|
||||
None,
|
||||
"closed",
|
||||
3000 + i * 100,
|
||||
);
|
||||
}
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, total) = collect_events(&conn, &seeds, &[], &[], None, 5).unwrap();
|
||||
assert_eq!(events.len(), 5);
|
||||
// 20 state changes + 1 created = 21 total before limit
|
||||
assert_eq!(total, 21);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_evidence_notes_included() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
let evidence = vec![TimelineEvent {
|
||||
timestamp: 2500,
|
||||
entity_type: "issue".to_owned(),
|
||||
entity_id: issue_id,
|
||||
entity_iid: 1,
|
||||
project_path: "group/project".to_owned(),
|
||||
event_type: TimelineEventType::NoteEvidence {
|
||||
note_id: 42,
|
||||
snippet: "relevant note".to_owned(),
|
||||
discussion_id: Some(1),
|
||||
},
|
||||
summary: "Note by alice".to_owned(),
|
||||
actor: Some("alice".to_owned()),
|
||||
url: None,
|
||||
is_seed: true,
|
||||
}];
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &evidence, None, 100).unwrap();
|
||||
|
||||
let note_event = events.iter().find(|e| {
|
||||
matches!(
|
||||
&e.event_type,
|
||||
TimelineEventType::NoteEvidence { note_id, .. } if *note_id == 42
|
||||
)
|
||||
});
|
||||
assert!(note_event.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_collect_merged_fallback_to_state_event() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
// MR with merged_at = NULL
|
||||
let mr_id = insert_mr(&conn, project_id, 10, None);
|
||||
|
||||
// But has a state event for 'merged'
|
||||
insert_state_event(&conn, project_id, None, Some(mr_id), "merged", 5000);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let (events, _) = collect_events(&conn, &seeds, &[], &[], None, 100).unwrap();
|
||||
|
||||
let merged = events
|
||||
.iter()
|
||||
.find(|e| matches!(e.event_type, TimelineEventType::Merged));
|
||||
assert!(merged.is_some());
|
||||
assert_eq!(merged.unwrap().timestamp, 5000);
|
||||
}
|
||||
@@ -248,310 +248,5 @@ fn find_incoming(
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test', 'opened', 'alice', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_ref(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
target_type: &str,
|
||||
target_id: Option<i64>,
|
||||
ref_type: &str,
|
||||
source_method: &str,
|
||||
) {
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, reference_type, source_method, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, 1000)",
|
||||
rusqlite::params![project_id, source_type, source_id, target_type, target_id, ref_type, source_method],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn make_entity_ref(entity_type: &str, entity_id: i64, iid: i64) -> EntityRef {
|
||||
EntityRef {
|
||||
entity_type: entity_type.to_owned(),
|
||||
entity_id,
|
||||
entity_iid: iid,
|
||||
project_path: "group/project".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_depth_zero() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
let result = expand_timeline(&conn, &seeds, 0, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
assert!(result.unresolved_references.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_finds_linked_entity() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR closes issue
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
assert_eq!(
|
||||
result.expanded_entities[0].entity_ref.entity_type,
|
||||
"merge_request"
|
||||
);
|
||||
assert_eq!(result.expanded_entities[0].entity_ref.entity_iid, 10);
|
||||
assert_eq!(result.expanded_entities[0].depth, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_bidirectional() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR closes issue (MR is source, issue is target)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
// Starting from MR should find the issue (outgoing)
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
assert_eq!(result.expanded_entities[0].entity_ref.entity_type, "issue");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_respects_max_entities() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
// Create 10 MRs that all close this issue
|
||||
for i in 2..=11 {
|
||||
let mr_id = insert_mr(&conn, project_id, i);
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
}
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 3).unwrap();
|
||||
|
||||
assert!(result.expanded_entities.len() <= 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_skips_mentions_by_default() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR mentions issue (should be skipped by default)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"mentioned",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_includes_mentions_when_flagged() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR mentions issue
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"mentioned",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, true, 100).unwrap();
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_collects_unresolved() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
// Unresolved cross-project reference
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method, created_at) VALUES (?1, 'issue', ?2, 'issue', NULL, 'other/repo', 42, 'closes', 'description_parse', 1000)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
assert_eq!(result.unresolved_references.len(), 1);
|
||||
assert_eq!(
|
||||
result.unresolved_references[0].target_project,
|
||||
Some("other/repo".to_owned())
|
||||
);
|
||||
assert_eq!(result.unresolved_references[0].target_iid, Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_tracks_provenance() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
let expanded = &result.expanded_entities[0];
|
||||
assert_eq!(expanded.via_reference_type, "closes");
|
||||
assert_eq!(expanded.via_source_method, "api");
|
||||
assert_eq!(expanded.via_from.entity_type, "issue");
|
||||
assert_eq!(expanded.via_from.entity_id, issue_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_no_duplicates() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// Two references from MR to same issue (different methods)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"related",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
// Should only appear once (first-come wins)
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_empty_seeds() {
|
||||
let conn = setup_test_db();
|
||||
let result = expand_timeline(&conn, &[], 1, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
}
|
||||
}
|
||||
#[path = "timeline_expand_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
305
src/core/timeline_expand_tests.rs
Normal file
305
src/core/timeline_expand_tests.rs
Normal file
@@ -0,0 +1,305 @@
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test', 'opened', 'alice', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_ref(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
target_type: &str,
|
||||
target_id: Option<i64>,
|
||||
ref_type: &str,
|
||||
source_method: &str,
|
||||
) {
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, reference_type, source_method, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, 1000)",
|
||||
rusqlite::params![project_id, source_type, source_id, target_type, target_id, ref_type, source_method],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn make_entity_ref(entity_type: &str, entity_id: i64, iid: i64) -> EntityRef {
|
||||
EntityRef {
|
||||
entity_type: entity_type.to_owned(),
|
||||
entity_id,
|
||||
entity_iid: iid,
|
||||
project_path: "group/project".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_depth_zero() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
|
||||
let result = expand_timeline(&conn, &seeds, 0, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
assert!(result.unresolved_references.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_finds_linked_entity() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR closes issue
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
assert_eq!(
|
||||
result.expanded_entities[0].entity_ref.entity_type,
|
||||
"merge_request"
|
||||
);
|
||||
assert_eq!(result.expanded_entities[0].entity_ref.entity_iid, 10);
|
||||
assert_eq!(result.expanded_entities[0].depth, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_bidirectional() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR closes issue (MR is source, issue is target)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
// Starting from MR should find the issue (outgoing)
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
assert_eq!(result.expanded_entities[0].entity_ref.entity_type, "issue");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_respects_max_entities() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
// Create 10 MRs that all close this issue
|
||||
for i in 2..=11 {
|
||||
let mr_id = insert_mr(&conn, project_id, i);
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
}
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 3).unwrap();
|
||||
|
||||
assert!(result.expanded_entities.len() <= 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_skips_mentions_by_default() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR mentions issue (should be skipped by default)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"mentioned",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_includes_mentions_when_flagged() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// MR mentions issue
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"mentioned",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, true, 100).unwrap();
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_collects_unresolved() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
|
||||
// Unresolved cross-project reference
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method, created_at) VALUES (?1, 'issue', ?2, 'issue', NULL, 'other/repo', 42, 'closes', 'description_parse', 1000)",
|
||||
rusqlite::params![project_id, issue_id],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
assert_eq!(result.unresolved_references.len(), 1);
|
||||
assert_eq!(
|
||||
result.unresolved_references[0].target_project,
|
||||
Some("other/repo".to_owned())
|
||||
);
|
||||
assert_eq!(result.unresolved_references[0].target_iid, Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_tracks_provenance() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("issue", issue_id, 1)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
let expanded = &result.expanded_entities[0];
|
||||
assert_eq!(expanded.via_reference_type, "closes");
|
||||
assert_eq!(expanded.via_source_method, "api");
|
||||
assert_eq!(expanded.via_from.entity_type, "issue");
|
||||
assert_eq!(expanded.via_from.entity_id, issue_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_no_duplicates() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_project(&conn);
|
||||
let issue_id = insert_issue(&conn, project_id, 1);
|
||||
let mr_id = insert_mr(&conn, project_id, 10);
|
||||
|
||||
// Two references from MR to same issue (different methods)
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"closes",
|
||||
"api",
|
||||
);
|
||||
insert_ref(
|
||||
&conn,
|
||||
project_id,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
"issue",
|
||||
Some(issue_id),
|
||||
"related",
|
||||
"note_parse",
|
||||
);
|
||||
|
||||
let seeds = vec![make_entity_ref("merge_request", mr_id, 10)];
|
||||
let result = expand_timeline(&conn, &seeds, 1, false, 100).unwrap();
|
||||
|
||||
// Should only appear once (first-come wins)
|
||||
assert_eq!(result.expanded_entities.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expand_empty_seeds() {
|
||||
let conn = setup_test_db();
|
||||
let result = expand_timeline(&conn, &[], 1, false, 100).unwrap();
|
||||
assert!(result.expanded_entities.is_empty());
|
||||
}
|
||||
@@ -233,320 +233,5 @@ fn truncate_to_chars(s: &str, max_chars: usize) -> String {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_test_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test issue', 'opened', 'alice', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_document(
|
||||
conn: &Connection,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
project_id: i64,
|
||||
content: &str,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
rusqlite::params![source_type, source_id, project_id, content, format!("hash_{source_id}")],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_discussion(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
) -> i64 {
|
||||
let noteable_type = if issue_id.is_some() {
|
||||
"Issue"
|
||||
} else {
|
||||
"MergeRequest"
|
||||
};
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
|
||||
rusqlite::params![format!("disc_{}", rand::random::<u32>()), project_id, issue_id, mr_id, noteable_type],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_note(
|
||||
conn: &Connection,
|
||||
discussion_id: i64,
|
||||
project_id: i64,
|
||||
body: &str,
|
||||
is_system: bool,
|
||||
) -> i64 {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, 'alice', ?5, 5000, 5000, 5000)",
|
||||
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, body],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_empty_query_returns_empty() {
|
||||
let conn = setup_test_db();
|
||||
let result = seed_timeline(&conn, "", None, None, 50, 10).unwrap();
|
||||
assert!(result.seed_entities.is_empty());
|
||||
assert!(result.evidence_notes.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_no_matches_returns_empty() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"unrelated content here",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "nonexistent_xyzzy_query", None, None, 50, 10).unwrap();
|
||||
assert!(result.seed_entities.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_finds_issue() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 42);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"authentication error in login flow",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_finds_mr() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let mr_id = insert_test_mr(&conn, project_id, 99);
|
||||
insert_document(
|
||||
&conn,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
project_id,
|
||||
"fix authentication bug",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_deduplicates_entities() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 10);
|
||||
|
||||
// Two documents referencing the same issue
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"authentication error first doc",
|
||||
);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"authentication error second doc",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
// Should deduplicate: both map to the same issue
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 10);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_resolves_discussion_to_parent() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 7);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"deployment pipeline failed",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 7);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_evidence_capped() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
|
||||
// Create 15 discussion documents with notes about "deployment"
|
||||
for i in 0..15 {
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
&format!("deployment issue number {i}"),
|
||||
);
|
||||
insert_note(
|
||||
&conn,
|
||||
disc_id,
|
||||
project_id,
|
||||
&format!("deployment note {i}"),
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 5).unwrap();
|
||||
assert!(result.evidence_notes.len() <= 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_evidence_snippet_truncated() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"deployment configuration",
|
||||
);
|
||||
|
||||
let long_body = "x".repeat(500);
|
||||
insert_note(&conn, disc_id, project_id, &long_body, false);
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap();
|
||||
assert!(!result.evidence_notes.is_empty());
|
||||
if let TimelineEventType::NoteEvidence { snippet, .. } =
|
||||
&result.evidence_notes[0].event_type
|
||||
{
|
||||
assert!(snippet.chars().count() <= 200);
|
||||
} else {
|
||||
panic!("Expected NoteEvidence");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_respects_project_filter() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
|
||||
// Insert a second project
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (2, 'other/repo', 'https://gitlab.com/other/repo')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
let project2_id = conn.last_insert_rowid();
|
||||
|
||||
let issue1_id = insert_test_issue(&conn, project_id, 1);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue1_id,
|
||||
project_id,
|
||||
"authentication error",
|
||||
);
|
||||
|
||||
let issue2_id = insert_test_issue(&conn, project2_id, 2);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue2_id,
|
||||
project2_id,
|
||||
"authentication error",
|
||||
);
|
||||
|
||||
// Filter to project 1 only
|
||||
let result =
|
||||
seed_timeline(&conn, "authentication", Some(project_id), None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_short() {
|
||||
assert_eq!(truncate_to_chars("hello", 200), "hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_long() {
|
||||
let long = "a".repeat(300);
|
||||
let result = truncate_to_chars(&long, 200);
|
||||
assert_eq!(result.chars().count(), 200);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_multibyte() {
|
||||
let s = "\u{1F600}".repeat(300); // emoji
|
||||
let result = truncate_to_chars(&s, 200);
|
||||
assert_eq!(result.chars().count(), 200);
|
||||
// Verify valid UTF-8
|
||||
assert!(std::str::from_utf8(result.as_bytes()).is_ok());
|
||||
}
|
||||
}
|
||||
#[path = "timeline_seed_tests.rs"]
|
||||
mod tests;
|
||||
|
||||
312
src/core/timeline_seed_tests.rs
Normal file
312
src/core/timeline_seed_tests.rs
Normal file
@@ -0,0 +1,312 @@
|
||||
use super::*;
|
||||
use crate::core::db::{create_connection, run_migrations};
|
||||
use std::path::Path;
|
||||
|
||||
fn setup_test_db() -> Connection {
|
||||
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||
run_migrations(&conn).unwrap();
|
||||
conn
|
||||
}
|
||||
|
||||
fn insert_test_project(conn: &Connection) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/project', 'https://gitlab.com/group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_test_issue(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test issue', 'opened', 'alice', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_test_mr(conn: &Connection, project_id: i64, iid: i64) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, 'Test MR', 'opened', 'bob', 1000, 2000, 3000)",
|
||||
rusqlite::params![iid * 100, project_id, iid],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_document(
|
||||
conn: &Connection,
|
||||
source_type: &str,
|
||||
source_id: i64,
|
||||
project_id: i64,
|
||||
content: &str,
|
||||
) -> i64 {
|
||||
conn.execute(
|
||||
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
rusqlite::params![source_type, source_id, project_id, content, format!("hash_{source_id}")],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_discussion(
|
||||
conn: &Connection,
|
||||
project_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
) -> i64 {
|
||||
let noteable_type = if issue_id.is_some() {
|
||||
"Issue"
|
||||
} else {
|
||||
"MergeRequest"
|
||||
};
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, merge_request_id, noteable_type, last_seen_at) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
|
||||
rusqlite::params![format!("disc_{}", rand::random::<u32>()), project_id, issue_id, mr_id, noteable_type],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
fn insert_note(
|
||||
conn: &Connection,
|
||||
discussion_id: i64,
|
||||
project_id: i64,
|
||||
body: &str,
|
||||
is_system: bool,
|
||||
) -> i64 {
|
||||
let gitlab_id: i64 = rand::random::<u32>().into();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, author_username, body, created_at, updated_at, last_seen_at) VALUES (?1, ?2, ?3, ?4, 'alice', ?5, 5000, 5000, 5000)",
|
||||
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32, body],
|
||||
)
|
||||
.unwrap();
|
||||
conn.last_insert_rowid()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_empty_query_returns_empty() {
|
||||
let conn = setup_test_db();
|
||||
let result = seed_timeline(&conn, "", None, None, 50, 10).unwrap();
|
||||
assert!(result.seed_entities.is_empty());
|
||||
assert!(result.evidence_notes.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_no_matches_returns_empty() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"unrelated content here",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "nonexistent_xyzzy_query", None, None, 50, 10).unwrap();
|
||||
assert!(result.seed_entities.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_finds_issue() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 42);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"authentication error in login flow",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 42);
|
||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_finds_mr() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let mr_id = insert_test_mr(&conn, project_id, 99);
|
||||
insert_document(
|
||||
&conn,
|
||||
"merge_request",
|
||||
mr_id,
|
||||
project_id,
|
||||
"fix authentication bug",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "merge_request");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 99);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_deduplicates_entities() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 10);
|
||||
|
||||
// Two documents referencing the same issue
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue_id,
|
||||
project_id,
|
||||
"authentication error first doc",
|
||||
);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"authentication error second doc",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "authentication", None, None, 50, 10).unwrap();
|
||||
// Should deduplicate: both map to the same issue
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 10);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_resolves_discussion_to_parent() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 7);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"deployment pipeline failed",
|
||||
);
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].entity_type, "issue");
|
||||
assert_eq!(result.seed_entities[0].entity_iid, 7);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_evidence_capped() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
|
||||
// Create 15 discussion documents with notes about "deployment"
|
||||
for i in 0..15 {
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
&format!("deployment issue number {i}"),
|
||||
);
|
||||
insert_note(
|
||||
&conn,
|
||||
disc_id,
|
||||
project_id,
|
||||
&format!("deployment note {i}"),
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 5).unwrap();
|
||||
assert!(result.evidence_notes.len() <= 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_evidence_snippet_truncated() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
let issue_id = insert_test_issue(&conn, project_id, 1);
|
||||
let disc_id = insert_discussion(&conn, project_id, Some(issue_id), None);
|
||||
insert_document(
|
||||
&conn,
|
||||
"discussion",
|
||||
disc_id,
|
||||
project_id,
|
||||
"deployment configuration",
|
||||
);
|
||||
|
||||
let long_body = "x".repeat(500);
|
||||
insert_note(&conn, disc_id, project_id, &long_body, false);
|
||||
|
||||
let result = seed_timeline(&conn, "deployment", None, None, 50, 10).unwrap();
|
||||
assert!(!result.evidence_notes.is_empty());
|
||||
if let TimelineEventType::NoteEvidence { snippet, .. } = &result.evidence_notes[0].event_type {
|
||||
assert!(snippet.chars().count() <= 200);
|
||||
} else {
|
||||
panic!("Expected NoteEvidence");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_respects_project_filter() {
|
||||
let conn = setup_test_db();
|
||||
let project_id = insert_test_project(&conn);
|
||||
|
||||
// Insert a second project
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (2, 'other/repo', 'https://gitlab.com/other/repo')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
let project2_id = conn.last_insert_rowid();
|
||||
|
||||
let issue1_id = insert_test_issue(&conn, project_id, 1);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue1_id,
|
||||
project_id,
|
||||
"authentication error",
|
||||
);
|
||||
|
||||
let issue2_id = insert_test_issue(&conn, project2_id, 2);
|
||||
insert_document(
|
||||
&conn,
|
||||
"issue",
|
||||
issue2_id,
|
||||
project2_id,
|
||||
"authentication error",
|
||||
);
|
||||
|
||||
// Filter to project 1 only
|
||||
let result = seed_timeline(&conn, "authentication", Some(project_id), None, 50, 10).unwrap();
|
||||
assert_eq!(result.seed_entities.len(), 1);
|
||||
assert_eq!(result.seed_entities[0].project_path, "group/project");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_short() {
|
||||
assert_eq!(truncate_to_chars("hello", 200), "hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_long() {
|
||||
let long = "a".repeat(300);
|
||||
let result = truncate_to_chars(&long, 200);
|
||||
assert_eq!(result.chars().count(), 200);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_to_chars_multibyte() {
|
||||
let s = "\u{1F600}".repeat(300); // emoji
|
||||
let result = truncate_to_chars(&s, 200);
|
||||
assert_eq!(result.chars().count(), 200);
|
||||
// Verify valid UTF-8
|
||||
assert!(std::str::from_utf8(result.as_bytes()).is_ok());
|
||||
}
|
||||
Reference in New Issue
Block a user