refactor: extract unit tests into separate _tests.rs files

Move inline #[cfg(test)] mod tests { ... } blocks from 22 source files
into dedicated _tests.rs companion files, wired via:

    #[cfg(test)]
    #[path = "module_tests.rs"]
    mod tests;

This keeps implementation-focused source files leaner and more scannable
while preserving full access to private items through `use super::*;`.

Modules extracted:
  core:      db, note_parser, payloads, project, references, sync_run,
             timeline_collect, timeline_expand, timeline_seed
  cli:       list (55 tests), who (75 tests)
  documents: extractor (43 tests), regenerator
  embedding: change_detector, chunking
  gitlab:    graphql (wiremock async tests), transformers/issue
  ingestion: dirty_tracker, discussions, issues, mr_diffs

Also adds conflicts_with("explain_score") to the --detail flag in the
who command to prevent mutually exclusive flags from being combined.

All 629 unit tests pass. No behavior changes.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Taylor Eernisse
2026-02-13 10:54:02 -05:00
parent 5c2df3df3b
commit 7e0e6a91f2
43 changed files with 11672 additions and 11942 deletions

View File

@@ -124,173 +124,5 @@ pub fn record_dirty_error(
}
#[cfg(test)]
mod tests {
use super::*;
fn setup_db() -> Connection {
let conn = Connection::open_in_memory().unwrap();
conn.execute_batch("
CREATE TABLE dirty_sources (
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion','note')),
source_id INTEGER NOT NULL,
queued_at INTEGER NOT NULL,
attempt_count INTEGER NOT NULL DEFAULT 0,
last_attempt_at INTEGER,
last_error TEXT,
next_attempt_at INTEGER,
PRIMARY KEY(source_type, source_id)
);
CREATE INDEX idx_dirty_sources_next_attempt ON dirty_sources(next_attempt_at);
").unwrap();
conn
}
#[test]
fn test_mark_dirty_inserts() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_mark_dirty_tx_inserts() {
let mut conn = setup_db();
{
let tx = conn.transaction().unwrap();
mark_dirty_tx(&tx, SourceType::Issue, 1).unwrap();
tx.commit().unwrap();
}
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_requeue_resets_backoff() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
record_dirty_error(&conn, SourceType::Issue, 1, "test error").unwrap();
let attempt: i64 = conn
.query_row(
"SELECT attempt_count FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(attempt, 1);
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
let attempt: i64 = conn
.query_row(
"SELECT attempt_count FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(attempt, 0);
let next_at: Option<i64> = conn
.query_row(
"SELECT next_attempt_at FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert!(next_at.is_none());
}
#[test]
fn test_get_respects_backoff() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
conn.execute(
"UPDATE dirty_sources SET next_attempt_at = 9999999999999 WHERE source_id = 1",
[],
)
.unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_get_orders_by_attempt_count() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
conn.execute(
"UPDATE dirty_sources SET attempt_count = 2 WHERE source_id = 1",
[],
)
.unwrap();
mark_dirty(&conn, SourceType::Issue, 2).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 2);
assert_eq!(results[0].1, 2);
assert_eq!(results[1].1, 1);
}
#[test]
fn test_batch_size_500() {
let conn = setup_db();
for i in 0..600 {
mark_dirty(&conn, SourceType::Issue, i).unwrap();
}
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 500);
}
#[test]
fn test_clear_removes() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
clear_dirty(&conn, SourceType::Issue, 1).unwrap();
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 0);
}
#[test]
fn test_mark_dirty_note_type() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Note, 42).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].0, SourceType::Note);
assert_eq!(results[0].1, 42);
clear_dirty(&conn, SourceType::Note, 42).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_drain_loop() {
let conn = setup_db();
for i in 0..1200 {
mark_dirty(&conn, SourceType::Issue, i).unwrap();
}
let mut total = 0;
loop {
let batch = get_dirty_sources(&conn).unwrap();
if batch.is_empty() {
break;
}
for (st, id) in &batch {
clear_dirty(&conn, *st, *id).unwrap();
}
total += batch.len();
}
assert_eq!(total, 1200);
}
}
#[path = "dirty_tracker_tests.rs"]
mod tests;

View File

@@ -0,0 +1,168 @@
use super::*;
fn setup_db() -> Connection {
let conn = Connection::open_in_memory().unwrap();
conn.execute_batch("
CREATE TABLE dirty_sources (
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion','note')),
source_id INTEGER NOT NULL,
queued_at INTEGER NOT NULL,
attempt_count INTEGER NOT NULL DEFAULT 0,
last_attempt_at INTEGER,
last_error TEXT,
next_attempt_at INTEGER,
PRIMARY KEY(source_type, source_id)
);
CREATE INDEX idx_dirty_sources_next_attempt ON dirty_sources(next_attempt_at);
").unwrap();
conn
}
#[test]
fn test_mark_dirty_inserts() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_mark_dirty_tx_inserts() {
let mut conn = setup_db();
{
let tx = conn.transaction().unwrap();
mark_dirty_tx(&tx, SourceType::Issue, 1).unwrap();
tx.commit().unwrap();
}
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_requeue_resets_backoff() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
record_dirty_error(&conn, SourceType::Issue, 1, "test error").unwrap();
let attempt: i64 = conn
.query_row(
"SELECT attempt_count FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(attempt, 1);
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
let attempt: i64 = conn
.query_row(
"SELECT attempt_count FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(attempt, 0);
let next_at: Option<i64> = conn
.query_row(
"SELECT next_attempt_at FROM dirty_sources WHERE source_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert!(next_at.is_none());
}
#[test]
fn test_get_respects_backoff() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
conn.execute(
"UPDATE dirty_sources SET next_attempt_at = 9999999999999 WHERE source_id = 1",
[],
)
.unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_get_orders_by_attempt_count() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
conn.execute(
"UPDATE dirty_sources SET attempt_count = 2 WHERE source_id = 1",
[],
)
.unwrap();
mark_dirty(&conn, SourceType::Issue, 2).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 2);
assert_eq!(results[0].1, 2);
assert_eq!(results[1].1, 1);
}
#[test]
fn test_batch_size_500() {
let conn = setup_db();
for i in 0..600 {
mark_dirty(&conn, SourceType::Issue, i).unwrap();
}
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 500);
}
#[test]
fn test_clear_removes() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Issue, 1).unwrap();
clear_dirty(&conn, SourceType::Issue, 1).unwrap();
let count: i64 = conn
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |r| r.get(0))
.unwrap();
assert_eq!(count, 0);
}
#[test]
fn test_mark_dirty_note_type() {
let conn = setup_db();
mark_dirty(&conn, SourceType::Note, 42).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].0, SourceType::Note);
assert_eq!(results[0].1, 42);
clear_dirty(&conn, SourceType::Note, 42).unwrap();
let results = get_dirty_sources(&conn).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_drain_loop() {
let conn = setup_db();
for i in 0..1200 {
mark_dirty(&conn, SourceType::Issue, i).unwrap();
}
let mut total = 0;
loop {
let batch = get_dirty_sources(&conn).unwrap();
if batch.is_empty() {
break;
}
for (st, id) in &batch {
clear_dirty(&conn, *st, *id).unwrap();
}
total += batch.len();
}
assert_eq!(total, 1200);
}

View File

@@ -467,475 +467,5 @@ fn update_issue_sync_timestamp(conn: &Connection, issue_id: i64, updated_at: i64
}
#[cfg(test)]
mod tests {
use super::*;
use crate::core::db::{create_connection, run_migrations};
use crate::gitlab::transformers::NormalizedNote;
use std::path::Path;
#[test]
fn result_default_has_zero_counts() {
let result = IngestDiscussionsResult::default();
assert_eq!(result.discussions_fetched, 0);
assert_eq!(result.discussions_upserted, 0);
assert_eq!(result.notes_upserted, 0);
}
fn setup() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
[],
)
.unwrap();
conn.execute(
"INSERT INTO issues (gitlab_id, iid, project_id, title, state, author_username, created_at, updated_at, last_seen_at) \
VALUES (100, 1, 1, 'Test Issue', 'opened', 'testuser', 1000, 2000, 3000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, noteable_type, individual_note, last_seen_at, resolvable, resolved) \
VALUES ('disc-1', 1, 1, 'Issue', 0, 3000, 0, 0)",
[],
)
.unwrap();
conn
}
fn get_discussion_id(conn: &Connection) -> i64 {
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
.unwrap()
}
#[allow(clippy::too_many_arguments)]
fn make_note(
gitlab_id: i64,
project_id: i64,
body: &str,
note_type: Option<&str>,
created_at: i64,
updated_at: i64,
resolved: bool,
resolved_by: Option<&str>,
) -> NormalizedNote {
NormalizedNote {
gitlab_id,
project_id,
note_type: note_type.map(String::from),
is_system: false,
author_id: None,
author_username: "testuser".to_string(),
body: body.to_string(),
created_at,
updated_at,
last_seen_at: updated_at,
position: 0,
resolvable: false,
resolved,
resolved_by: resolved_by.map(String::from),
resolved_at: None,
position_old_path: None,
position_new_path: None,
position_old_line: None,
position_new_line: None,
position_type: None,
position_line_range_start: None,
position_line_range_end: None,
position_base_sha: None,
position_start_sha: None,
position_head_sha: None,
}
}
#[test]
fn test_issue_note_upsert_stable_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let last_seen_at = 5000;
let note1 = make_note(1001, 1, "First note", None, 1000, 2000, false, None);
let note2 = make_note(1002, 1, "Second note", None, 1000, 2000, false, None);
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, last_seen_at, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, last_seen_at, None).unwrap();
let id1 = out1.local_note_id;
let id2 = out2.local_note_id;
// Re-sync same gitlab_ids
let out1b = upsert_note_for_issue(&conn, disc_id, &note1, last_seen_at + 1, None).unwrap();
let out2b = upsert_note_for_issue(&conn, disc_id, &note2, last_seen_at + 1, None).unwrap();
assert_eq!(id1, out1b.local_note_id);
assert_eq!(id2, out2b.local_note_id);
}
#[test]
fn test_issue_note_upsert_detects_body_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(2001, 1, "Original body", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let mut changed = make_note(2001, 1, "Updated body", None, 1000, 3000, false, None);
changed.updated_at = 3000;
let outcome = upsert_note_for_issue(&conn, disc_id, &changed, 5001, None).unwrap();
assert!(outcome.changed_semantics);
}
#[test]
fn test_issue_note_upsert_unchanged_returns_false() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(3001, 1, "Same body", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Re-sync identical note
let outcome = upsert_note_for_issue(&conn, disc_id, &note, 5001, None).unwrap();
assert!(!outcome.changed_semantics);
}
#[test]
fn test_issue_note_upsert_updated_at_only_does_not_mark_semantic_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(4001, 1, "Body stays", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Only change updated_at (non-semantic field)
let mut same = make_note(4001, 1, "Body stays", None, 1000, 9999, false, None);
same.updated_at = 9999;
let outcome = upsert_note_for_issue(&conn, disc_id, &same, 5001, None).unwrap();
assert!(!outcome.changed_semantics);
}
#[test]
fn test_issue_note_sweep_removes_stale() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note1 = make_note(5001, 1, "Keep me", None, 1000, 2000, false, None);
let note2 = make_note(5002, 1, "Stale me", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
// Re-sync only note1 with newer timestamp
upsert_note_for_issue(&conn, disc_id, &note1, 6000, None).unwrap();
// Sweep should remove note2 (last_seen_at=5000 < 6000)
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
assert_eq!(swept, 1);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM notes WHERE discussion_id = ?",
[disc_id],
|row| row.get(0),
)
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_issue_note_upsert_returns_local_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(6001, 1, "Check my ID", None, 1000, 2000, false, None);
let outcome = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Verify the local_note_id matches what's in the DB
let db_id: i64 = conn
.query_row(
"SELECT id FROM notes WHERE gitlab_id = ?",
[6001_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(outcome.local_note_id, db_id);
}
#[test]
fn test_issue_note_upsert_captures_author_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let mut note = make_note(7001, 1, "With author", None, 1000, 2000, false, None);
note.author_id = Some(12345);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let stored: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7001_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored, Some(12345));
}
#[test]
fn test_note_upsert_author_id_nullable() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(7002, 1, "No author id", None, 1000, 2000, false, None);
// author_id defaults to None in make_note
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let stored: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7002_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored, None);
}
#[test]
fn test_note_author_id_survives_username_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let mut note = make_note(7003, 1, "Original body", None, 1000, 2000, false, None);
note.author_id = Some(99999);
note.author_username = "oldname".to_string();
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Re-sync with changed username, changed body, same author_id
let mut updated = make_note(7003, 1, "Updated body", None, 1000, 3000, false, None);
updated.author_id = Some(99999);
updated.author_username = "newname".to_string();
upsert_note_for_issue(&conn, disc_id, &updated, 5001, None).unwrap();
// author_id must survive the re-sync intact
let stored_id: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7003_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored_id, Some(99999));
}
fn insert_note_document(conn: &Connection, note_local_id: i64) {
conn.execute(
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
VALUES ('note', ?1, 1, 'note content', 'hash123')",
[note_local_id],
)
.unwrap();
}
fn insert_note_dirty_source(conn: &Connection, note_local_id: i64) {
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
VALUES ('note', ?1, 1000)",
[note_local_id],
)
.unwrap();
}
fn count_note_documents(conn: &Connection, note_local_id: i64) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?",
[note_local_id],
|row| row.get(0),
)
.unwrap()
}
fn count_note_dirty_sources(conn: &Connection, note_local_id: i64) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note' AND source_id = ?",
[note_local_id],
|row| row.get(0),
)
.unwrap()
}
#[test]
fn test_issue_note_sweep_deletes_note_documents_immediately() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert 3 notes
let note1 = make_note(9001, 1, "Keep me", None, 1000, 2000, false, None);
let note2 = make_note(9002, 1, "Keep me too", None, 1000, 2000, false, None);
let note3 = make_note(9003, 1, "Stale me", None, 1000, 2000, false, None);
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
let out3 = upsert_note_for_issue(&conn, disc_id, &note3, 5000, None).unwrap();
// Add documents for all 3
insert_note_document(&conn, out1.local_note_id);
insert_note_document(&conn, out2.local_note_id);
insert_note_document(&conn, out3.local_note_id);
// Add dirty_sources for note3
insert_note_dirty_source(&conn, out3.local_note_id);
// Re-sync only notes 1 and 2 with newer timestamp
upsert_note_for_issue(&conn, disc_id, &note1, 6000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &note2, 6000, None).unwrap();
// Sweep should remove note3 and its document + dirty_source
sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
// Stale note's document should be gone
assert_eq!(count_note_documents(&conn, out3.local_note_id), 0);
assert_eq!(count_note_dirty_sources(&conn, out3.local_note_id), 0);
// Kept notes' documents should survive
assert_eq!(count_note_documents(&conn, out1.local_note_id), 1);
assert_eq!(count_note_documents(&conn, out2.local_note_id), 1);
}
#[test]
fn test_sweep_deletion_handles_note_without_document() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(9004, 1, "No doc", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Don't insert any document -- sweep should still work without error
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
assert_eq!(swept, 1);
}
#[test]
fn test_set_based_deletion_atomicity() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert a stale note with both document and dirty_source
let note = make_note(9005, 1, "Stale with deps", None, 1000, 2000, false, None);
let out = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
insert_note_document(&conn, out.local_note_id);
insert_note_dirty_source(&conn, out.local_note_id);
// Verify they exist before sweep
assert_eq!(count_note_documents(&conn, out.local_note_id), 1);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
// The sweep function already runs inside a transaction (called from
// ingest_discussions_for_issue's tx). Simulate by wrapping in a transaction.
let tx = conn.unchecked_transaction().unwrap();
sweep_stale_issue_notes(&tx, disc_id, 6000).unwrap();
tx.commit().unwrap();
// All three DELETEs must have happened
assert_eq!(count_note_documents(&conn, out.local_note_id), 0);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 0);
let note_count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM notes WHERE gitlab_id = ?",
[9005_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(note_count, 0);
}
fn count_dirty_notes(conn: &Connection) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
[],
|row| row.get(0),
)
.unwrap()
}
#[test]
fn test_parent_title_change_marks_notes_dirty() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert two user notes and one system note
let note1 = make_note(10001, 1, "User note 1", None, 1000, 2000, false, None);
let note2 = make_note(10002, 1, "User note 2", None, 1000, 2000, false, None);
let mut sys_note = make_note(10003, 1, "System note", None, 1000, 2000, false, None);
sys_note.is_system = true;
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &sys_note, 5000, None).unwrap();
// Clear any dirty_sources from individual note upserts
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
.unwrap();
assert_eq!(count_dirty_notes(&conn), 0);
// Simulate parent title change triggering discussion re-ingest:
// update the issue title, then run the propagation SQL
conn.execute("UPDATE issues SET title = 'Changed Title' WHERE id = 1", [])
.unwrap();
// Run the propagation query (same as in ingestion code)
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
SELECT 'note', n.id, ?1
FROM notes n
WHERE n.discussion_id = ?2 AND n.is_system = 0
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
params![now_ms(), disc_id],
)
.unwrap();
// Both user notes should be dirty, system note should not
assert_eq!(count_dirty_notes(&conn), 2);
assert_eq!(count_note_dirty_sources(&conn, out1.local_note_id), 1);
assert_eq!(count_note_dirty_sources(&conn, out2.local_note_id), 1);
}
#[test]
fn test_parent_label_change_marks_notes_dirty() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert one user note
let note = make_note(11001, 1, "User note", None, 1000, 2000, false, None);
let out = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Clear dirty_sources
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
.unwrap();
// Simulate label change on parent issue (labels are part of issue metadata)
conn.execute("UPDATE issues SET updated_at = 9999 WHERE id = 1", [])
.unwrap();
// Run propagation query
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
SELECT 'note', n.id, ?1
FROM notes n
WHERE n.discussion_id = ?2 AND n.is_system = 0
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
params![now_ms(), disc_id],
)
.unwrap();
assert_eq!(count_dirty_notes(&conn), 1);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
}
}
#[path = "discussions_tests.rs"]
mod tests;

View File

@@ -0,0 +1,470 @@
use super::*;
use crate::core::db::{create_connection, run_migrations};
use crate::gitlab::transformers::NormalizedNote;
use std::path::Path;
#[test]
fn result_default_has_zero_counts() {
let result = IngestDiscussionsResult::default();
assert_eq!(result.discussions_fetched, 0);
assert_eq!(result.discussions_upserted, 0);
assert_eq!(result.notes_upserted, 0);
}
fn setup() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
[],
)
.unwrap();
conn.execute(
"INSERT INTO issues (gitlab_id, iid, project_id, title, state, author_username, created_at, updated_at, last_seen_at) \
VALUES (100, 1, 1, 'Test Issue', 'opened', 'testuser', 1000, 2000, 3000)",
[],
)
.unwrap();
conn.execute(
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, noteable_type, individual_note, last_seen_at, resolvable, resolved) \
VALUES ('disc-1', 1, 1, 'Issue', 0, 3000, 0, 0)",
[],
)
.unwrap();
conn
}
fn get_discussion_id(conn: &Connection) -> i64 {
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
.unwrap()
}
#[allow(clippy::too_many_arguments)]
fn make_note(
gitlab_id: i64,
project_id: i64,
body: &str,
note_type: Option<&str>,
created_at: i64,
updated_at: i64,
resolved: bool,
resolved_by: Option<&str>,
) -> NormalizedNote {
NormalizedNote {
gitlab_id,
project_id,
note_type: note_type.map(String::from),
is_system: false,
author_id: None,
author_username: "testuser".to_string(),
body: body.to_string(),
created_at,
updated_at,
last_seen_at: updated_at,
position: 0,
resolvable: false,
resolved,
resolved_by: resolved_by.map(String::from),
resolved_at: None,
position_old_path: None,
position_new_path: None,
position_old_line: None,
position_new_line: None,
position_type: None,
position_line_range_start: None,
position_line_range_end: None,
position_base_sha: None,
position_start_sha: None,
position_head_sha: None,
}
}
#[test]
fn test_issue_note_upsert_stable_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let last_seen_at = 5000;
let note1 = make_note(1001, 1, "First note", None, 1000, 2000, false, None);
let note2 = make_note(1002, 1, "Second note", None, 1000, 2000, false, None);
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, last_seen_at, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, last_seen_at, None).unwrap();
let id1 = out1.local_note_id;
let id2 = out2.local_note_id;
// Re-sync same gitlab_ids
let out1b = upsert_note_for_issue(&conn, disc_id, &note1, last_seen_at + 1, None).unwrap();
let out2b = upsert_note_for_issue(&conn, disc_id, &note2, last_seen_at + 1, None).unwrap();
assert_eq!(id1, out1b.local_note_id);
assert_eq!(id2, out2b.local_note_id);
}
#[test]
fn test_issue_note_upsert_detects_body_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(2001, 1, "Original body", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let mut changed = make_note(2001, 1, "Updated body", None, 1000, 3000, false, None);
changed.updated_at = 3000;
let outcome = upsert_note_for_issue(&conn, disc_id, &changed, 5001, None).unwrap();
assert!(outcome.changed_semantics);
}
#[test]
fn test_issue_note_upsert_unchanged_returns_false() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(3001, 1, "Same body", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Re-sync identical note
let outcome = upsert_note_for_issue(&conn, disc_id, &note, 5001, None).unwrap();
assert!(!outcome.changed_semantics);
}
#[test]
fn test_issue_note_upsert_updated_at_only_does_not_mark_semantic_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(4001, 1, "Body stays", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Only change updated_at (non-semantic field)
let mut same = make_note(4001, 1, "Body stays", None, 1000, 9999, false, None);
same.updated_at = 9999;
let outcome = upsert_note_for_issue(&conn, disc_id, &same, 5001, None).unwrap();
assert!(!outcome.changed_semantics);
}
#[test]
fn test_issue_note_sweep_removes_stale() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note1 = make_note(5001, 1, "Keep me", None, 1000, 2000, false, None);
let note2 = make_note(5002, 1, "Stale me", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
// Re-sync only note1 with newer timestamp
upsert_note_for_issue(&conn, disc_id, &note1, 6000, None).unwrap();
// Sweep should remove note2 (last_seen_at=5000 < 6000)
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
assert_eq!(swept, 1);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM notes WHERE discussion_id = ?",
[disc_id],
|row| row.get(0),
)
.unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_issue_note_upsert_returns_local_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(6001, 1, "Check my ID", None, 1000, 2000, false, None);
let outcome = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Verify the local_note_id matches what's in the DB
let db_id: i64 = conn
.query_row(
"SELECT id FROM notes WHERE gitlab_id = ?",
[6001_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(outcome.local_note_id, db_id);
}
#[test]
fn test_issue_note_upsert_captures_author_id() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let mut note = make_note(7001, 1, "With author", None, 1000, 2000, false, None);
note.author_id = Some(12345);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let stored: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7001_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored, Some(12345));
}
#[test]
fn test_note_upsert_author_id_nullable() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(7002, 1, "No author id", None, 1000, 2000, false, None);
// author_id defaults to None in make_note
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
let stored: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7002_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored, None);
}
#[test]
fn test_note_author_id_survives_username_change() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let mut note = make_note(7003, 1, "Original body", None, 1000, 2000, false, None);
note.author_id = Some(99999);
note.author_username = "oldname".to_string();
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Re-sync with changed username, changed body, same author_id
let mut updated = make_note(7003, 1, "Updated body", None, 1000, 3000, false, None);
updated.author_id = Some(99999);
updated.author_username = "newname".to_string();
upsert_note_for_issue(&conn, disc_id, &updated, 5001, None).unwrap();
// author_id must survive the re-sync intact
let stored_id: Option<i64> = conn
.query_row(
"SELECT author_id FROM notes WHERE gitlab_id = ?",
[7003_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(stored_id, Some(99999));
}
fn insert_note_document(conn: &Connection, note_local_id: i64) {
conn.execute(
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
VALUES ('note', ?1, 1, 'note content', 'hash123')",
[note_local_id],
)
.unwrap();
}
fn insert_note_dirty_source(conn: &Connection, note_local_id: i64) {
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
VALUES ('note', ?1, 1000)",
[note_local_id],
)
.unwrap();
}
fn count_note_documents(conn: &Connection, note_local_id: i64) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?",
[note_local_id],
|row| row.get(0),
)
.unwrap()
}
fn count_note_dirty_sources(conn: &Connection, note_local_id: i64) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note' AND source_id = ?",
[note_local_id],
|row| row.get(0),
)
.unwrap()
}
#[test]
fn test_issue_note_sweep_deletes_note_documents_immediately() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert 3 notes
let note1 = make_note(9001, 1, "Keep me", None, 1000, 2000, false, None);
let note2 = make_note(9002, 1, "Keep me too", None, 1000, 2000, false, None);
let note3 = make_note(9003, 1, "Stale me", None, 1000, 2000, false, None);
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
let out3 = upsert_note_for_issue(&conn, disc_id, &note3, 5000, None).unwrap();
// Add documents for all 3
insert_note_document(&conn, out1.local_note_id);
insert_note_document(&conn, out2.local_note_id);
insert_note_document(&conn, out3.local_note_id);
// Add dirty_sources for note3
insert_note_dirty_source(&conn, out3.local_note_id);
// Re-sync only notes 1 and 2 with newer timestamp
upsert_note_for_issue(&conn, disc_id, &note1, 6000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &note2, 6000, None).unwrap();
// Sweep should remove note3 and its document + dirty_source
sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
// Stale note's document should be gone
assert_eq!(count_note_documents(&conn, out3.local_note_id), 0);
assert_eq!(count_note_dirty_sources(&conn, out3.local_note_id), 0);
// Kept notes' documents should survive
assert_eq!(count_note_documents(&conn, out1.local_note_id), 1);
assert_eq!(count_note_documents(&conn, out2.local_note_id), 1);
}
#[test]
fn test_sweep_deletion_handles_note_without_document() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
let note = make_note(9004, 1, "No doc", None, 1000, 2000, false, None);
upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Don't insert any document -- sweep should still work without error
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
assert_eq!(swept, 1);
}
#[test]
fn test_set_based_deletion_atomicity() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert a stale note with both document and dirty_source
let note = make_note(9005, 1, "Stale with deps", None, 1000, 2000, false, None);
let out = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
insert_note_document(&conn, out.local_note_id);
insert_note_dirty_source(&conn, out.local_note_id);
// Verify they exist before sweep
assert_eq!(count_note_documents(&conn, out.local_note_id), 1);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
// The sweep function already runs inside a transaction (called from
// ingest_discussions_for_issue's tx). Simulate by wrapping in a transaction.
let tx = conn.unchecked_transaction().unwrap();
sweep_stale_issue_notes(&tx, disc_id, 6000).unwrap();
tx.commit().unwrap();
// All three DELETEs must have happened
assert_eq!(count_note_documents(&conn, out.local_note_id), 0);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 0);
let note_count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM notes WHERE gitlab_id = ?",
[9005_i64],
|row| row.get(0),
)
.unwrap();
assert_eq!(note_count, 0);
}
fn count_dirty_notes(conn: &Connection) -> i64 {
conn.query_row(
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
[],
|row| row.get(0),
)
.unwrap()
}
#[test]
fn test_parent_title_change_marks_notes_dirty() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert two user notes and one system note
let note1 = make_note(10001, 1, "User note 1", None, 1000, 2000, false, None);
let note2 = make_note(10002, 1, "User note 2", None, 1000, 2000, false, None);
let mut sys_note = make_note(10003, 1, "System note", None, 1000, 2000, false, None);
sys_note.is_system = true;
let out1 = upsert_note_for_issue(&conn, disc_id, &note1, 5000, None).unwrap();
let out2 = upsert_note_for_issue(&conn, disc_id, &note2, 5000, None).unwrap();
upsert_note_for_issue(&conn, disc_id, &sys_note, 5000, None).unwrap();
// Clear any dirty_sources from individual note upserts
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
.unwrap();
assert_eq!(count_dirty_notes(&conn), 0);
// Simulate parent title change triggering discussion re-ingest:
// update the issue title, then run the propagation SQL
conn.execute("UPDATE issues SET title = 'Changed Title' WHERE id = 1", [])
.unwrap();
// Run the propagation query (same as in ingestion code)
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
SELECT 'note', n.id, ?1
FROM notes n
WHERE n.discussion_id = ?2 AND n.is_system = 0
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
params![now_ms(), disc_id],
)
.unwrap();
// Both user notes should be dirty, system note should not
assert_eq!(count_dirty_notes(&conn), 2);
assert_eq!(count_note_dirty_sources(&conn, out1.local_note_id), 1);
assert_eq!(count_note_dirty_sources(&conn, out2.local_note_id), 1);
}
#[test]
fn test_parent_label_change_marks_notes_dirty() {
let conn = setup();
let disc_id = get_discussion_id(&conn);
// Insert one user note
let note = make_note(11001, 1, "User note", None, 1000, 2000, false, None);
let out = upsert_note_for_issue(&conn, disc_id, &note, 5000, None).unwrap();
// Clear dirty_sources
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
.unwrap();
// Simulate label change on parent issue (labels are part of issue metadata)
conn.execute("UPDATE issues SET updated_at = 9999 WHERE id = 1", [])
.unwrap();
// Run propagation query
conn.execute(
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
SELECT 'note', n.id, ?1
FROM notes n
WHERE n.discussion_id = ?2 AND n.is_system = 0
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
params![now_ms(), disc_id],
)
.unwrap();
assert_eq!(count_dirty_notes(&conn), 1);
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
}

View File

@@ -138,29 +138,6 @@ fn passes_cursor_filter_with_ts(gitlab_id: i64, issue_ts: i64, cursor: &SyncCurs
true
}
#[cfg(test)]
fn passes_cursor_filter(issue: &GitLabIssue, cursor: &SyncCursor) -> Result<bool> {
let Some(cursor_ts) = cursor.updated_at_cursor else {
return Ok(true);
};
let issue_ts = parse_timestamp(&issue.updated_at)?;
if issue_ts < cursor_ts {
return Ok(false);
}
if issue_ts == cursor_ts
&& cursor
.tie_breaker_id
.is_some_and(|cursor_id| issue.id <= cursor_id)
{
return Ok(false);
}
Ok(true)
}
fn process_single_issue(
conn: &Connection,
config: &Config,
@@ -423,78 +400,5 @@ fn parse_timestamp(ts: &str) -> Result<i64> {
}
#[cfg(test)]
mod tests {
use super::*;
use crate::gitlab::types::GitLabAuthor;
fn make_test_issue(id: i64, updated_at: &str) -> GitLabIssue {
GitLabIssue {
id,
iid: id,
project_id: 100,
title: format!("Issue {}", id),
description: None,
state: "opened".to_string(),
created_at: "2024-01-01T00:00:00.000Z".to_string(),
updated_at: updated_at.to_string(),
closed_at: None,
author: GitLabAuthor {
id: 1,
username: "test".to_string(),
name: "Test".to_string(),
},
assignees: vec![],
labels: vec![],
milestone: None,
due_date: None,
web_url: "https://example.com".to_string(),
}
}
#[test]
fn cursor_filter_allows_newer_issues() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue = make_test_issue(101, "2024-01-16T10:00:00.000Z");
assert!(passes_cursor_filter(&issue, &cursor).unwrap_or(false));
}
#[test]
fn cursor_filter_blocks_older_issues() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue = make_test_issue(99, "2024-01-14T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue, &cursor).unwrap_or(true));
}
#[test]
fn cursor_filter_uses_tie_breaker_for_same_timestamp() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue1 = make_test_issue(101, "2024-01-15T10:00:00.000Z");
assert!(passes_cursor_filter(&issue1, &cursor).unwrap_or(false));
let issue2 = make_test_issue(100, "2024-01-15T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue2, &cursor).unwrap_or(true));
let issue3 = make_test_issue(99, "2024-01-15T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue3, &cursor).unwrap_or(true));
}
#[test]
fn cursor_filter_allows_all_when_no_cursor() {
let cursor = SyncCursor::default();
let issue = make_test_issue(1, "2020-01-01T00:00:00.000Z");
assert!(passes_cursor_filter(&issue, &cursor).unwrap_or(false));
}
}
#[path = "issues_tests.rs"]
mod tests;

View File

@@ -0,0 +1,95 @@
use super::*;
use crate::gitlab::types::GitLabAuthor;
fn passes_cursor_filter(issue: &GitLabIssue, cursor: &SyncCursor) -> Result<bool> {
let Some(cursor_ts) = cursor.updated_at_cursor else {
return Ok(true);
};
let issue_ts = parse_timestamp(&issue.updated_at)?;
if issue_ts < cursor_ts {
return Ok(false);
}
if issue_ts == cursor_ts
&& cursor
.tie_breaker_id
.is_some_and(|cursor_id| issue.id <= cursor_id)
{
return Ok(false);
}
Ok(true)
}
fn make_test_issue(id: i64, updated_at: &str) -> GitLabIssue {
GitLabIssue {
id,
iid: id,
project_id: 100,
title: format!("Issue {}", id),
description: None,
state: "opened".to_string(),
created_at: "2024-01-01T00:00:00.000Z".to_string(),
updated_at: updated_at.to_string(),
closed_at: None,
author: GitLabAuthor {
id: 1,
username: "test".to_string(),
name: "Test".to_string(),
},
assignees: vec![],
labels: vec![],
milestone: None,
due_date: None,
web_url: "https://example.com".to_string(),
}
}
#[test]
fn cursor_filter_allows_newer_issues() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue = make_test_issue(101, "2024-01-16T10:00:00.000Z");
assert!(passes_cursor_filter(&issue, &cursor).unwrap_or(false));
}
#[test]
fn cursor_filter_blocks_older_issues() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue = make_test_issue(99, "2024-01-14T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue, &cursor).unwrap_or(true));
}
#[test]
fn cursor_filter_uses_tie_breaker_for_same_timestamp() {
let cursor = SyncCursor {
updated_at_cursor: Some(1705312800000),
tie_breaker_id: Some(100),
};
let issue1 = make_test_issue(101, "2024-01-15T10:00:00.000Z");
assert!(passes_cursor_filter(&issue1, &cursor).unwrap_or(false));
let issue2 = make_test_issue(100, "2024-01-15T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue2, &cursor).unwrap_or(true));
let issue3 = make_test_issue(99, "2024-01-15T10:00:00.000Z");
assert!(!passes_cursor_filter(&issue3, &cursor).unwrap_or(true));
}
#[test]
fn cursor_filter_allows_all_when_no_cursor() {
let cursor = SyncCursor::default();
let issue = make_test_issue(1, "2020-01-01T00:00:00.000Z");
assert!(passes_cursor_filter(&issue, &cursor).unwrap_or(false));
}

View File

@@ -66,207 +66,5 @@ pub fn upsert_mr_file_changes(
}
#[cfg(test)]
mod tests {
use super::*;
use crate::core::db::{create_connection, run_migrations};
use std::path::Path;
fn setup() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
// Insert a test project
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
[],
).unwrap();
// Insert a test MR
conn.execute(
"INSERT INTO merge_requests (gitlab_id, iid, project_id, title, state, draft, source_branch, target_branch, author_username, created_at, updated_at, last_seen_at) \
VALUES (100, 1, 1, 'Test MR', 'merged', 0, 'feature', 'main', 'testuser', 1000, 2000, 3000)",
[],
).unwrap();
conn
}
#[test]
fn test_derive_change_type_added() {
let diff = GitLabMrDiff {
old_path: String::new(),
new_path: "src/new.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "added");
}
#[test]
fn test_derive_change_type_renamed() {
let diff = GitLabMrDiff {
old_path: "src/old.rs".to_string(),
new_path: "src/new.rs".to_string(),
new_file: false,
renamed_file: true,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "renamed");
}
#[test]
fn test_derive_change_type_deleted() {
let diff = GitLabMrDiff {
old_path: "src/gone.rs".to_string(),
new_path: "src/gone.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: true,
};
assert_eq!(derive_change_type(&diff), "deleted");
}
#[test]
fn test_derive_change_type_modified() {
let diff = GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "modified");
}
#[test]
fn test_upsert_inserts_file_changes() {
let conn = setup();
let diffs = [
GitLabMrDiff {
old_path: String::new(),
new_path: "src/new.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
},
GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
];
let inserted = upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
assert_eq!(inserted, 2);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE merge_request_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(count, 2);
}
#[test]
fn test_upsert_replaces_existing() {
let conn = setup();
let diffs_v1 = [GitLabMrDiff {
old_path: String::new(),
new_path: "src/old.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs_v1).unwrap();
let diffs_v2 = [
GitLabMrDiff {
old_path: "src/a.rs".to_string(),
new_path: "src/a.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
GitLabMrDiff {
old_path: "src/b.rs".to_string(),
new_path: "src/b.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
];
let inserted = upsert_mr_file_changes(&conn, 1, 1, &diffs_v2).unwrap();
assert_eq!(inserted, 2);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE merge_request_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(count, 2);
// The old "src/old.rs" should be gone
let old_count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE new_path = 'src/old.rs'",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(old_count, 0);
}
#[test]
fn test_renamed_stores_old_path() {
let conn = setup();
let diffs = [GitLabMrDiff {
old_path: "src/old_name.rs".to_string(),
new_path: "src/new_name.rs".to_string(),
new_file: false,
renamed_file: true,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
let (old_path, change_type): (Option<String>, String) = conn
.query_row(
"SELECT old_path, change_type FROM mr_file_changes WHERE new_path = 'src/new_name.rs'",
[],
|r| Ok((r.get(0)?, r.get(1)?)),
)
.unwrap();
assert_eq!(old_path.as_deref(), Some("src/old_name.rs"));
assert_eq!(change_type, "renamed");
}
#[test]
fn test_non_renamed_has_null_old_path() {
let conn = setup();
let diffs = [GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
let old_path: Option<String> = conn
.query_row(
"SELECT old_path FROM mr_file_changes WHERE new_path = 'src/lib.rs'",
[],
|r| r.get(0),
)
.unwrap();
assert!(old_path.is_none());
}
}
#[path = "mr_diffs_tests.rs"]
mod tests;

View File

@@ -0,0 +1,202 @@
use super::*;
use crate::core::db::{create_connection, run_migrations};
use std::path::Path;
fn setup() -> Connection {
let conn = create_connection(Path::new(":memory:")).unwrap();
run_migrations(&conn).unwrap();
// Insert a test project
conn.execute(
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
[],
).unwrap();
// Insert a test MR
conn.execute(
"INSERT INTO merge_requests (gitlab_id, iid, project_id, title, state, draft, source_branch, target_branch, author_username, created_at, updated_at, last_seen_at) \
VALUES (100, 1, 1, 'Test MR', 'merged', 0, 'feature', 'main', 'testuser', 1000, 2000, 3000)",
[],
).unwrap();
conn
}
#[test]
fn test_derive_change_type_added() {
let diff = GitLabMrDiff {
old_path: String::new(),
new_path: "src/new.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "added");
}
#[test]
fn test_derive_change_type_renamed() {
let diff = GitLabMrDiff {
old_path: "src/old.rs".to_string(),
new_path: "src/new.rs".to_string(),
new_file: false,
renamed_file: true,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "renamed");
}
#[test]
fn test_derive_change_type_deleted() {
let diff = GitLabMrDiff {
old_path: "src/gone.rs".to_string(),
new_path: "src/gone.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: true,
};
assert_eq!(derive_change_type(&diff), "deleted");
}
#[test]
fn test_derive_change_type_modified() {
let diff = GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
};
assert_eq!(derive_change_type(&diff), "modified");
}
#[test]
fn test_upsert_inserts_file_changes() {
let conn = setup();
let diffs = [
GitLabMrDiff {
old_path: String::new(),
new_path: "src/new.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
},
GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
];
let inserted = upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
assert_eq!(inserted, 2);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE merge_request_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(count, 2);
}
#[test]
fn test_upsert_replaces_existing() {
let conn = setup();
let diffs_v1 = [GitLabMrDiff {
old_path: String::new(),
new_path: "src/old.rs".to_string(),
new_file: true,
renamed_file: false,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs_v1).unwrap();
let diffs_v2 = [
GitLabMrDiff {
old_path: "src/a.rs".to_string(),
new_path: "src/a.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
GitLabMrDiff {
old_path: "src/b.rs".to_string(),
new_path: "src/b.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
},
];
let inserted = upsert_mr_file_changes(&conn, 1, 1, &diffs_v2).unwrap();
assert_eq!(inserted, 2);
let count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE merge_request_id = 1",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(count, 2);
// The old "src/old.rs" should be gone
let old_count: i64 = conn
.query_row(
"SELECT COUNT(*) FROM mr_file_changes WHERE new_path = 'src/old.rs'",
[],
|r| r.get(0),
)
.unwrap();
assert_eq!(old_count, 0);
}
#[test]
fn test_renamed_stores_old_path() {
let conn = setup();
let diffs = [GitLabMrDiff {
old_path: "src/old_name.rs".to_string(),
new_path: "src/new_name.rs".to_string(),
new_file: false,
renamed_file: true,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
let (old_path, change_type): (Option<String>, String) = conn
.query_row(
"SELECT old_path, change_type FROM mr_file_changes WHERE new_path = 'src/new_name.rs'",
[],
|r| Ok((r.get(0)?, r.get(1)?)),
)
.unwrap();
assert_eq!(old_path.as_deref(), Some("src/old_name.rs"));
assert_eq!(change_type, "renamed");
}
#[test]
fn test_non_renamed_has_null_old_path() {
let conn = setup();
let diffs = [GitLabMrDiff {
old_path: "src/lib.rs".to_string(),
new_path: "src/lib.rs".to_string(),
new_file: false,
renamed_file: false,
deleted_file: false,
}];
upsert_mr_file_changes(&conn, 1, 1, &diffs).unwrap();
let old_path: Option<String> = conn
.query_row(
"SELECT old_path FROM mr_file_changes WHERE new_path = 'src/lib.rs'",
[],
|r| r.get(0),
)
.unwrap();
assert!(old_path.is_none());
}