Applies the same doc comment cleanup to test files: - Removes test module headers (//! lines) - Removes obvious test function comments - Retains comments explaining non-obvious test scenarios Test names should be descriptive enough to convey intent without additional comments. Complex test setup or assertions that need explanation retain their comments. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
376 lines
11 KiB
Rust
376 lines
11 KiB
Rust
use rusqlite::Connection;
|
|
use std::path::PathBuf;
|
|
|
|
fn get_migrations_dir() -> PathBuf {
|
|
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("migrations")
|
|
}
|
|
|
|
fn apply_migrations(conn: &Connection, through_version: i32) {
|
|
let migrations_dir = get_migrations_dir();
|
|
|
|
for version in 1..=through_version {
|
|
let _filename = format!("{:03}_*.sql", version);
|
|
let entries: Vec<_> = std::fs::read_dir(&migrations_dir)
|
|
.unwrap()
|
|
.filter_map(|e| e.ok())
|
|
.filter(|e| {
|
|
e.file_name()
|
|
.to_string_lossy()
|
|
.starts_with(&format!("{:03}", version))
|
|
})
|
|
.collect();
|
|
|
|
assert!(!entries.is_empty(), "Migration {} not found", version);
|
|
|
|
let sql = std::fs::read_to_string(entries[0].path()).unwrap();
|
|
conn.execute_batch(&sql)
|
|
.unwrap_or_else(|e| panic!("Migration {} failed: {}", version, e));
|
|
}
|
|
}
|
|
|
|
fn create_test_db() -> Connection {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
conn.pragma_update(None, "foreign_keys", "ON").unwrap();
|
|
conn
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_creates_issues_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(issues)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"gitlab_id".to_string()));
|
|
assert!(columns.contains(&"project_id".to_string()));
|
|
assert!(columns.contains(&"iid".to_string()));
|
|
assert!(columns.contains(&"title".to_string()));
|
|
assert!(columns.contains(&"state".to_string()));
|
|
assert!(columns.contains(&"author_username".to_string()));
|
|
assert!(columns.contains(&"discussions_synced_for_updated_at".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_creates_labels_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(labels)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"name".to_string()));
|
|
assert!(columns.contains(&"project_id".to_string()));
|
|
assert!(columns.contains(&"color".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_creates_discussions_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(discussions)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"gitlab_discussion_id".to_string()));
|
|
assert!(columns.contains(&"issue_id".to_string()));
|
|
assert!(columns.contains(&"noteable_type".to_string()));
|
|
assert!(columns.contains(&"individual_note".to_string()));
|
|
assert!(columns.contains(&"first_note_at".to_string()));
|
|
assert!(columns.contains(&"last_note_at".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_creates_notes_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(notes)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"gitlab_id".to_string()));
|
|
assert!(columns.contains(&"discussion_id".to_string()));
|
|
assert!(columns.contains(&"note_type".to_string()));
|
|
assert!(columns.contains(&"is_system".to_string()));
|
|
assert!(columns.contains(&"body".to_string()));
|
|
assert!(columns.contains(&"position_old_path".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_enforces_state_check() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
).unwrap();
|
|
|
|
conn.execute(
|
|
"INSERT INTO issues (gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (1, 1, 1, 'opened', 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
|
|
conn.execute(
|
|
"INSERT INTO issues (gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (2, 1, 2, 'closed', 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
|
|
let result = conn.execute(
|
|
"INSERT INTO issues (gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (3, 1, 3, 'invalid', 1000, 1000, 1000)",
|
|
[],
|
|
);
|
|
|
|
assert!(result.is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_enforces_noteable_type_check() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (1, 1, 1, 1, 'opened', 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
|
|
conn.execute(
|
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at)
|
|
VALUES ('abc123', 1, 1, 'Issue', 1000)",
|
|
[],
|
|
).unwrap();
|
|
|
|
let result = conn.execute(
|
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at)
|
|
VALUES ('def456', 1, 1, 'Commit', 1000)",
|
|
[],
|
|
);
|
|
assert!(result.is_err());
|
|
|
|
let result = conn.execute(
|
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, noteable_type, last_seen_at)
|
|
VALUES ('ghi789', 1, 'Issue', 1000)",
|
|
[],
|
|
);
|
|
assert!(result.is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_cascades_on_project_delete() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (1, 1, 1, 1, 'opened', 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO labels (id, project_id, name) VALUES (1, 1, 'bug')",
|
|
[],
|
|
)
|
|
.unwrap();
|
|
conn.execute(
|
|
"INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 1)",
|
|
[],
|
|
)
|
|
.unwrap();
|
|
conn.execute(
|
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at)
|
|
VALUES (1, 'disc1', 1, 1, 'Issue', 1000)",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO notes (gitlab_id, discussion_id, project_id, created_at, updated_at, last_seen_at)
|
|
VALUES (1, 1, 1, 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
|
|
conn.execute("DELETE FROM projects WHERE id = 1", [])
|
|
.unwrap();
|
|
|
|
let issue_count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM issues", [], |r| r.get(0))
|
|
.unwrap();
|
|
let label_count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM labels", [], |r| r.get(0))
|
|
.unwrap();
|
|
let discussion_count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM discussions", [], |r| r.get(0))
|
|
.unwrap();
|
|
let note_count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM notes", [], |r| r.get(0))
|
|
.unwrap();
|
|
|
|
assert_eq!(issue_count, 0);
|
|
assert_eq!(label_count, 0);
|
|
assert_eq!(discussion_count, 0);
|
|
assert_eq!(note_count, 0);
|
|
}
|
|
|
|
#[test]
|
|
fn migration_002_updates_schema_version() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 2);
|
|
|
|
let version: i32 = conn
|
|
.query_row("SELECT MAX(version) FROM schema_version", [], |r| r.get(0))
|
|
.unwrap();
|
|
|
|
assert_eq!(version, 2);
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_creates_milestones_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(milestones)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"id".to_string()));
|
|
assert!(columns.contains(&"gitlab_id".to_string()));
|
|
assert!(columns.contains(&"project_id".to_string()));
|
|
assert!(columns.contains(&"iid".to_string()));
|
|
assert!(columns.contains(&"title".to_string()));
|
|
assert!(columns.contains(&"description".to_string()));
|
|
assert!(columns.contains(&"state".to_string()));
|
|
assert!(columns.contains(&"due_date".to_string()));
|
|
assert!(columns.contains(&"web_url".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_creates_issue_assignees_table() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(issue_assignees)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"issue_id".to_string()));
|
|
assert!(columns.contains(&"username".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_adds_new_columns_to_issues() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
let columns: Vec<String> = conn
|
|
.prepare("PRAGMA table_info(issues)")
|
|
.unwrap()
|
|
.query_map([], |row| row.get(1))
|
|
.unwrap()
|
|
.filter_map(|r| r.ok())
|
|
.collect();
|
|
|
|
assert!(columns.contains(&"due_date".to_string()));
|
|
assert!(columns.contains(&"milestone_id".to_string()));
|
|
assert!(columns.contains(&"milestone_title".to_string()));
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_milestones_cascade_on_project_delete() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO milestones (id, gitlab_id, project_id, iid, title) VALUES (1, 500, 1, 1, 'v1.0')",
|
|
[],
|
|
).unwrap();
|
|
|
|
conn.execute("DELETE FROM projects WHERE id = 1", [])
|
|
.unwrap();
|
|
|
|
let count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM milestones", [], |r| r.get(0))
|
|
.unwrap();
|
|
assert_eq!(count, 0);
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_assignees_cascade_on_issue_delete() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, state, created_at, updated_at, last_seen_at)
|
|
VALUES (1, 1, 1, 1, 'opened', 1000, 1000, 1000)",
|
|
[],
|
|
).unwrap();
|
|
conn.execute(
|
|
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'alice')",
|
|
[],
|
|
)
|
|
.unwrap();
|
|
|
|
conn.execute("DELETE FROM issues WHERE id = 1", []).unwrap();
|
|
|
|
let count: i64 = conn
|
|
.query_row("SELECT COUNT(*) FROM issue_assignees", [], |r| r.get(0))
|
|
.unwrap();
|
|
assert_eq!(count, 0);
|
|
}
|
|
|
|
#[test]
|
|
fn migration_005_updates_schema_version() {
|
|
let conn = create_test_db();
|
|
apply_migrations(&conn, 5);
|
|
|
|
let version: i32 = conn
|
|
.query_row("SELECT MAX(version) FROM schema_version", [], |r| r.get(0))
|
|
.unwrap();
|
|
|
|
assert_eq!(version, 5);
|
|
}
|