Implement a personal work dashboard that shows everything relevant to the
configured GitLab user: open issues assigned to them, MRs they authored,
MRs they are reviewing, and a chronological activity feed.
Design decisions:
- Attention state computed from GitLab interaction data (comments, reviews)
with no local state tracking -- purely derived from existing synced data
- Username resolution: --user flag > config.gitlab.username > actionable error
- Project scoping: --project (fuzzy) | --all | default_project | all
- Section filtering: --issues, --mrs, --activity (combinable, default = all)
- Activity feed controlled by --since (default 30d); work item sections
always show all open items regardless of --since
Architecture (src/cli/commands/me/):
- types.rs: MeDashboard, MeSummary, AttentionState data types
- queries.rs: 4 SQL queries (open_issues, authored_mrs, reviewing_mrs,
activity) using existing issue_assignees, mr_reviewers, notes tables
- render_human.rs: colored terminal output with attention state indicators
- render_robot.rs: {ok, data, meta} JSON envelope with field selection
- mod.rs: orchestration (resolve_username, resolve_project_scope, run_me)
- me_tests.rs: comprehensive unit tests covering all query paths
Config additions:
- New optional gitlab.username field in config.json
- Tests for config with/without username
- Existing test configs updated with username: None
CLI wiring:
- MeArgs struct with section filter, since, project, all, user, fields flags
- Autocorrect support for me command flags
- LoreRenderer::try_get() for safe renderer access in me module
- Robot mode field selection presets (me_items, me_activity)
- handle_me() in main.rs command dispatch
Also fixes duplicate assertions in surgical sync tests (removed 6
duplicate assert! lines that were copy-paste artifacts).
Spec: docs/lore-me-spec.md
641 lines
21 KiB
Rust
641 lines
21 KiB
Rust
use std::path::Path;
|
|
|
|
use super::*;
|
|
use crate::core::config::{
|
|
Config, EmbeddingConfig, GitLabConfig, LoggingConfig, ProjectConfig, ScoringConfig,
|
|
StorageConfig, SyncConfig,
|
|
};
|
|
use crate::core::db::{create_connection, run_migrations};
|
|
use crate::gitlab::types::{GitLabAuthor, GitLabMergeRequest};
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Test helpers
|
|
// ---------------------------------------------------------------------------
|
|
|
|
fn setup_db() -> rusqlite::Connection {
|
|
let conn = create_connection(Path::new(":memory:")).expect("in-memory DB");
|
|
run_migrations(&conn).expect("migrations");
|
|
conn.execute(
|
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url)
|
|
VALUES (100, 'group/repo', 'https://example.com/group/repo')",
|
|
[],
|
|
)
|
|
.expect("insert project");
|
|
conn
|
|
}
|
|
|
|
fn test_config() -> Config {
|
|
Config {
|
|
gitlab: GitLabConfig {
|
|
base_url: "https://gitlab.example.com".to_string(),
|
|
token_env_var: "GITLAB_TOKEN".to_string(),
|
|
token: None,
|
|
username: None,
|
|
},
|
|
projects: vec![ProjectConfig {
|
|
path: "group/repo".to_string(),
|
|
}],
|
|
default_project: None,
|
|
sync: SyncConfig::default(),
|
|
storage: StorageConfig::default(),
|
|
embedding: EmbeddingConfig::default(),
|
|
logging: LoggingConfig::default(),
|
|
scoring: ScoringConfig::default(),
|
|
}
|
|
}
|
|
|
|
fn make_test_issue(iid: i64, updated_at: &str) -> GitLabIssue {
|
|
GitLabIssue {
|
|
id: iid * 1000, // unique gitlab_id
|
|
iid,
|
|
project_id: 100,
|
|
title: format!("Test issue {iid}"),
|
|
description: Some("Description".to_string()),
|
|
state: "opened".to_string(),
|
|
created_at: "2026-01-01T00:00:00.000+00:00".to_string(),
|
|
updated_at: updated_at.to_string(),
|
|
closed_at: None,
|
|
author: GitLabAuthor {
|
|
id: 1,
|
|
username: "testuser".to_string(),
|
|
name: "Test User".to_string(),
|
|
},
|
|
assignees: vec![],
|
|
labels: vec![],
|
|
milestone: None,
|
|
due_date: None,
|
|
web_url: format!("https://example.com/group/repo/-/issues/{iid}"),
|
|
}
|
|
}
|
|
|
|
fn make_test_mr(iid: i64, updated_at: &str) -> GitLabMergeRequest {
|
|
GitLabMergeRequest {
|
|
id: iid * 1000,
|
|
iid,
|
|
project_id: 100,
|
|
title: format!("Test MR {iid}"),
|
|
description: Some("MR description".to_string()),
|
|
state: "opened".to_string(),
|
|
draft: false,
|
|
work_in_progress: false,
|
|
source_branch: "feature".to_string(),
|
|
target_branch: "main".to_string(),
|
|
sha: Some("abc123".to_string()),
|
|
references: None,
|
|
detailed_merge_status: None,
|
|
merge_status_legacy: None,
|
|
created_at: "2026-01-01T00:00:00.000+00:00".to_string(),
|
|
updated_at: updated_at.to_string(),
|
|
merged_at: None,
|
|
closed_at: None,
|
|
author: GitLabAuthor {
|
|
id: 1,
|
|
username: "testuser".to_string(),
|
|
name: "Test User".to_string(),
|
|
},
|
|
merge_user: None,
|
|
merged_by: None,
|
|
labels: vec![],
|
|
assignees: vec![],
|
|
reviewers: vec![],
|
|
web_url: format!("https://example.com/group/repo/-/merge_requests/{iid}"),
|
|
merge_commit_sha: None,
|
|
squash_commit_sha: None,
|
|
}
|
|
}
|
|
|
|
fn get_db_updated_at_helper(conn: &rusqlite::Connection, table: &str, iid: i64) -> Option<i64> {
|
|
let sql = match table {
|
|
"issues" => "SELECT updated_at FROM issues WHERE project_id = 1 AND iid = ?1",
|
|
"merge_requests" => {
|
|
"SELECT updated_at FROM merge_requests WHERE project_id = 1 AND iid = ?1"
|
|
}
|
|
_ => return None,
|
|
};
|
|
conn.query_row(sql, [iid], |row| row.get(0)).ok()
|
|
}
|
|
|
|
fn get_dirty_keys(conn: &rusqlite::Connection) -> Vec<(String, i64)> {
|
|
let mut stmt = conn
|
|
.prepare("SELECT source_type, source_id FROM dirty_sources ORDER BY source_type, source_id")
|
|
.expect("prepare dirty_sources query");
|
|
stmt.query_map([], |row| {
|
|
let st: String = row.get(0)?;
|
|
let id: i64 = row.get(1)?;
|
|
Ok((st, id))
|
|
})
|
|
.expect("query dirty_sources")
|
|
.collect::<std::result::Result<Vec<_>, _>>()
|
|
.expect("collect dirty_sources")
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// is_stale unit tests
|
|
// ---------------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_is_stale_parses_iso8601() {
|
|
// 2026-02-17T12:00:00.000+00:00 -> 1771243200000 ms
|
|
let result = is_stale("2026-02-17T12:00:00.000+00:00", Some(1_771_329_600_000));
|
|
assert!(result.is_ok());
|
|
// Same timestamp => stale
|
|
assert!(result.unwrap());
|
|
}
|
|
|
|
#[test]
|
|
fn test_is_stale_handles_none_db_value() {
|
|
let result = is_stale("2026-02-17T12:00:00.000+00:00", None);
|
|
assert!(result.is_ok());
|
|
assert!(!result.unwrap(), "no DB row means not stale");
|
|
}
|
|
|
|
#[test]
|
|
fn test_is_stale_with_z_suffix() {
|
|
let result = is_stale("2026-02-17T12:00:00Z", Some(1_771_329_600_000));
|
|
assert!(result.is_ok());
|
|
assert!(result.unwrap(), "Z suffix should parse same as +00:00");
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Issue ingestion tests
|
|
// ---------------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_ingest_issue_by_iid_upserts_and_marks_dirty() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let issue = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
let result = ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
assert!(!result.dirty_source_keys.is_empty());
|
|
|
|
// Verify DB row exists
|
|
let db_ts = get_db_updated_at_helper(&conn, "issues", 42);
|
|
assert!(db_ts.is_some(), "issue should exist in DB");
|
|
|
|
// Verify dirty marking
|
|
let dirty = get_dirty_keys(&conn);
|
|
assert!(
|
|
dirty.iter().any(|(t, _)| t == "issue"),
|
|
"dirty_sources should contain an issue entry"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_toctou_skips_stale_issue() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let issue = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
// First ingest succeeds
|
|
let r1 = ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
assert!(!r1.skipped_stale);
|
|
|
|
// Clear dirty to check second ingest doesn't re-mark
|
|
conn.execute("DELETE FROM dirty_sources", []).unwrap();
|
|
|
|
// Second ingest with same timestamp should be skipped
|
|
let r2 = ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
assert!(r2.skipped_stale);
|
|
assert!(r2.dirty_source_keys.is_empty());
|
|
|
|
// No new dirty mark
|
|
let dirty = get_dirty_keys(&conn);
|
|
assert!(dirty.is_empty(), "stale skip should not create dirty marks");
|
|
}
|
|
|
|
#[test]
|
|
fn test_toctou_allows_newer_issue() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
// Ingest at T1
|
|
let issue_t1 = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_issue_by_iid(&conn, &config, 1, &issue_t1).unwrap();
|
|
|
|
conn.execute("DELETE FROM dirty_sources", []).unwrap();
|
|
|
|
// Ingest at T2 (newer) — should succeed
|
|
let issue_t2 = make_test_issue(42, "2026-02-17T13:00:00.000+00:00");
|
|
let result = ingest_issue_by_iid(&conn, &config, 1, &issue_t2).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
}
|
|
|
|
#[test]
|
|
fn test_ingest_issue_returns_dirty_source_keys() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let issue = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
let result = ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
|
|
assert_eq!(result.dirty_source_keys.len(), 1);
|
|
let (source_type, local_id) = &result.dirty_source_keys[0];
|
|
assert_eq!(source_type.as_str(), "issue");
|
|
assert!(*local_id > 0, "local_id should be positive");
|
|
}
|
|
|
|
#[test]
|
|
fn test_ingest_issue_updates_existing() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
let issue_v1 = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_issue_by_iid(&conn, &config, 1, &issue_v1).unwrap();
|
|
|
|
let ts1 = get_db_updated_at_helper(&conn, "issues", 42).unwrap();
|
|
|
|
// Newer version
|
|
let issue_v2 = make_test_issue(42, "2026-02-17T14:00:00.000+00:00");
|
|
let result = ingest_issue_by_iid(&conn, &config, 1, &issue_v2).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
let ts2 = get_db_updated_at_helper(&conn, "issues", 42).unwrap();
|
|
assert!(ts2 > ts1, "DB timestamp should increase after update");
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// MR ingestion tests
|
|
// ---------------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_ingest_mr_by_iid_upserts_and_marks_dirty() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let mr = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
let result = ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
assert!(!result.dirty_source_keys.is_empty());
|
|
|
|
let db_ts = get_db_updated_at_helper(&conn, "merge_requests", 101);
|
|
assert!(db_ts.is_some(), "MR should exist in DB");
|
|
|
|
let dirty = get_dirty_keys(&conn);
|
|
assert!(
|
|
dirty.iter().any(|(t, _)| t == "merge_request"),
|
|
"dirty_sources should contain a merge_request entry"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_toctou_skips_stale_mr() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let mr = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
let r1 = ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
assert!(!r1.skipped_stale);
|
|
|
|
conn.execute("DELETE FROM dirty_sources", []).unwrap();
|
|
|
|
let r2 = ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
assert!(r2.skipped_stale);
|
|
assert!(r2.dirty_source_keys.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn test_toctou_allows_newer_mr() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
let mr_t1 = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_mr_by_iid(&conn, &config, 1, &mr_t1).unwrap();
|
|
|
|
conn.execute("DELETE FROM dirty_sources", []).unwrap();
|
|
|
|
let mr_t2 = make_test_mr(101, "2026-02-17T13:00:00.000+00:00");
|
|
let result = ingest_mr_by_iid(&conn, &config, 1, &mr_t2).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
}
|
|
|
|
#[test]
|
|
fn test_ingest_mr_returns_dirty_source_keys() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
let mr = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
|
|
let result = ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
|
|
assert_eq!(result.dirty_source_keys.len(), 1);
|
|
let (source_type, local_id) = &result.dirty_source_keys[0];
|
|
assert_eq!(source_type.as_str(), "merge_request");
|
|
assert!(*local_id > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_ingest_mr_updates_existing() {
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
let mr_v1 = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_mr_by_iid(&conn, &config, 1, &mr_v1).unwrap();
|
|
|
|
let ts1 = get_db_updated_at_helper(&conn, "merge_requests", 101).unwrap();
|
|
|
|
let mr_v2 = make_test_mr(101, "2026-02-17T14:00:00.000+00:00");
|
|
let result = ingest_mr_by_iid(&conn, &config, 1, &mr_v2).unwrap();
|
|
|
|
assert!(!result.skipped_stale);
|
|
let ts2 = get_db_updated_at_helper(&conn, "merge_requests", 101).unwrap();
|
|
assert!(ts2 > ts1, "DB timestamp should increase after update");
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Preflight fetch test (wiremock)
|
|
// ---------------------------------------------------------------------------
|
|
|
|
#[tokio::test]
|
|
async fn test_preflight_fetch_returns_issues_and_mrs() {
|
|
use wiremock::matchers::{method, path};
|
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
|
|
|
let mock_server = MockServer::start().await;
|
|
|
|
// Issue fixture
|
|
let issue_json = serde_json::json!({
|
|
"id": 42000,
|
|
"iid": 42,
|
|
"project_id": 100,
|
|
"title": "Test issue 42",
|
|
"description": "desc",
|
|
"state": "opened",
|
|
"created_at": "2026-01-01T00:00:00.000+00:00",
|
|
"updated_at": "2026-02-17T12:00:00.000+00:00",
|
|
"author": {"id": 1, "username": "testuser", "name": "Test User"},
|
|
"assignees": [],
|
|
"labels": [],
|
|
"web_url": "https://example.com/group/repo/-/issues/42"
|
|
});
|
|
|
|
// MR fixture
|
|
let mr_json = serde_json::json!({
|
|
"id": 101000,
|
|
"iid": 101,
|
|
"project_id": 100,
|
|
"title": "Test MR 101",
|
|
"description": "mr desc",
|
|
"state": "opened",
|
|
"draft": false,
|
|
"work_in_progress": false,
|
|
"source_branch": "feature",
|
|
"target_branch": "main",
|
|
"sha": "abc123",
|
|
"created_at": "2026-01-01T00:00:00.000+00:00",
|
|
"updated_at": "2026-02-17T12:00:00.000+00:00",
|
|
"author": {"id": 1, "username": "testuser", "name": "Test User"},
|
|
"labels": [],
|
|
"assignees": [],
|
|
"reviewers": [],
|
|
"web_url": "https://example.com/group/repo/-/merge_requests/101"
|
|
});
|
|
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/issues/42"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(&issue_json))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/merge_requests/101"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(&mr_json))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
let client = GitLabClient::new(&mock_server.uri(), "test-token", None);
|
|
let targets = vec![
|
|
("issue".to_string(), 42i64),
|
|
("merge_request".to_string(), 101i64),
|
|
];
|
|
|
|
let result = preflight_fetch(&client, 100, &targets).await;
|
|
|
|
assert_eq!(result.issues.len(), 1);
|
|
assert_eq!(result.issues[0].iid, 42);
|
|
assert_eq!(result.merge_requests.len(), 1);
|
|
assert_eq!(result.merge_requests[0].iid, 101);
|
|
assert!(result.failures.is_empty());
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Dependent helper tests (bd-kanh)
|
|
// ---------------------------------------------------------------------------
|
|
|
|
#[tokio::test]
|
|
async fn test_fetch_dependents_for_issue_empty_events() {
|
|
use wiremock::matchers::{method, path};
|
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
|
|
|
let mock_server = MockServer::start().await;
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
// Insert an issue so we have a local_id
|
|
let issue = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
let local_id: i64 = conn
|
|
.query_row(
|
|
"SELECT id FROM issues WHERE project_id = 1 AND iid = 42",
|
|
[],
|
|
|row| row.get(0),
|
|
)
|
|
.unwrap();
|
|
|
|
// Mock empty resource event endpoints
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/issues/42/resource_state_events"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/issues/42/resource_label_events"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/issues/42/resource_milestone_events",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock empty discussions endpoint
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/issues/42/discussions"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
let client = GitLabClient::new(&mock_server.uri(), "test-token", None);
|
|
|
|
let result = fetch_dependents_for_issue(&client, &conn, 1, 100, 42, local_id, &config)
|
|
.await
|
|
.unwrap();
|
|
|
|
assert_eq!(result.resource_events_fetched, 0);
|
|
assert_eq!(result.discussions_fetched, 0);
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_fetch_dependents_for_mr_empty_events() {
|
|
use wiremock::matchers::{method, path};
|
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
|
|
|
let mock_server = MockServer::start().await;
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
// Insert an MR so we have a local_id
|
|
let mr = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
let local_id: i64 = conn
|
|
.query_row(
|
|
"SELECT id FROM merge_requests WHERE project_id = 1 AND iid = 101",
|
|
[],
|
|
|row| row.get(0),
|
|
)
|
|
.unwrap();
|
|
|
|
// Mock empty resource event endpoints for MR
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/merge_requests/101/resource_state_events",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/merge_requests/101/resource_label_events",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/merge_requests/101/resource_milestone_events",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock empty discussions endpoint for MR
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/merge_requests/101/discussions"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock empty closes_issues endpoint
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/merge_requests/101/closes_issues",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock empty diffs endpoint
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/merge_requests/101/diffs"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
let client = GitLabClient::new(&mock_server.uri(), "test-token", None);
|
|
|
|
let result = fetch_dependents_for_mr(&client, &conn, 1, 100, 101, local_id, &config)
|
|
.await
|
|
.unwrap();
|
|
|
|
assert_eq!(result.resource_events_fetched, 0);
|
|
assert_eq!(result.discussions_fetched, 0);
|
|
assert_eq!(result.closes_issues_stored, 0);
|
|
assert_eq!(result.file_changes_stored, 0);
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_fetch_dependents_for_mr_with_closes_issues() {
|
|
use wiremock::matchers::{method, path};
|
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
|
|
|
let mock_server = MockServer::start().await;
|
|
let conn = setup_db();
|
|
let config = test_config();
|
|
|
|
// Insert issue and MR so references can resolve
|
|
let issue = make_test_issue(42, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_issue_by_iid(&conn, &config, 1, &issue).unwrap();
|
|
|
|
let mr = make_test_mr(101, "2026-02-17T12:00:00.000+00:00");
|
|
ingest_mr_by_iid(&conn, &config, 1, &mr).unwrap();
|
|
let mr_local_id: i64 = conn
|
|
.query_row(
|
|
"SELECT id FROM merge_requests WHERE project_id = 1 AND iid = 101",
|
|
[],
|
|
|row| row.get(0),
|
|
)
|
|
.unwrap();
|
|
|
|
// Mock empty resource events
|
|
for endpoint in [
|
|
"resource_state_events",
|
|
"resource_label_events",
|
|
"resource_milestone_events",
|
|
] {
|
|
Mock::given(method("GET"))
|
|
.and(path(format!(
|
|
"/api/v4/projects/100/merge_requests/101/{endpoint}"
|
|
)))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
}
|
|
|
|
// Mock empty discussions
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/merge_requests/101/discussions"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock closes_issues with one reference
|
|
Mock::given(method("GET"))
|
|
.and(path(
|
|
"/api/v4/projects/100/merge_requests/101/closes_issues",
|
|
))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([
|
|
{
|
|
"id": 42000,
|
|
"iid": 42,
|
|
"project_id": 100,
|
|
"title": "Test issue 42",
|
|
"state": "opened",
|
|
"web_url": "https://example.com/group/repo/-/issues/42"
|
|
}
|
|
])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
// Mock empty diffs
|
|
Mock::given(method("GET"))
|
|
.and(path("/api/v4/projects/100/merge_requests/101/diffs"))
|
|
.respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([])))
|
|
.mount(&mock_server)
|
|
.await;
|
|
|
|
let client = GitLabClient::new(&mock_server.uri(), "test-token", None);
|
|
|
|
let result = fetch_dependents_for_mr(&client, &conn, 1, 100, 101, mr_local_id, &config)
|
|
.await
|
|
.unwrap();
|
|
|
|
assert_eq!(result.closes_issues_stored, 1);
|
|
}
|