Phase 4 (bd-1df9) — all 5 acceptance criteria met: - Sync screen with delta ledger (bd-2x2h, bd-y095) - Doctor screen with health checks (bd-2iqk) - Stats screen with document counts (bd-2iqk) - CLI integration: lore tui subcommand (bd-26lp) - CLI integration: lore sync --tui flag (bd-3l56) Phase 5 (bd-3h00) — session persistence + instance lock + text width: - text_width.rs: Unicode-aware measurement, truncation, padding (16 tests) - instance_lock.rs: Advisory PID lock with stale recovery (6 tests) - session.rs: Atomic write + CRC32 checksum + quarantine (9 tests) Closes: bd-26lp, bd-3h00, bd-3l56, bd-1df9, bd-y095
872 lines
30 KiB
Rust
872 lines
30 KiB
Rust
#![allow(dead_code)]
|
|
|
|
use anyhow::{Context, Result};
|
|
use rusqlite::Connection;
|
|
|
|
use crate::message::{EntityKey, EntityKind, TimelineEvent, TimelineEventKind};
|
|
use crate::state::timeline::TimelineScope;
|
|
|
|
/// Internal filter resolved from a [`TimelineScope`].
|
|
///
|
|
/// Translates the user-facing scope (which uses `EntityKey` with project_id + iid)
|
|
/// into internal DB ids for efficient querying.
|
|
enum TimelineFilter {
|
|
/// No filtering — return all events.
|
|
All,
|
|
/// Filter to events for a specific issue (internal DB id).
|
|
Issue(i64),
|
|
/// Filter to events for a specific MR (internal DB id).
|
|
MergeRequest(i64),
|
|
/// Filter to events by a specific actor.
|
|
Actor(String),
|
|
}
|
|
|
|
/// Resolve a [`TimelineScope`] into a concrete [`TimelineFilter`].
|
|
fn resolve_timeline_scope(conn: &Connection, scope: &TimelineScope) -> Result<TimelineFilter> {
|
|
match scope {
|
|
TimelineScope::All => Ok(TimelineFilter::All),
|
|
TimelineScope::Entity(key) => {
|
|
let (table, kind_label) = match key.kind {
|
|
EntityKind::Issue => ("issues", "issue"),
|
|
EntityKind::MergeRequest => ("merge_requests", "merge request"),
|
|
};
|
|
let sql = format!("SELECT id FROM {table} WHERE project_id = ?1 AND iid = ?2");
|
|
let id: i64 = conn
|
|
.query_row(&sql, rusqlite::params![key.project_id, key.iid], |r| {
|
|
r.get(0)
|
|
})
|
|
.with_context(|| {
|
|
format!(
|
|
"resolving {kind_label} #{} in project {}",
|
|
key.iid, key.project_id
|
|
)
|
|
})?;
|
|
match key.kind {
|
|
EntityKind::Issue => Ok(TimelineFilter::Issue(id)),
|
|
EntityKind::MergeRequest => Ok(TimelineFilter::MergeRequest(id)),
|
|
}
|
|
}
|
|
TimelineScope::Author(name) => Ok(TimelineFilter::Actor(name.clone())),
|
|
}
|
|
}
|
|
|
|
/// Fetch timeline events from raw resource event tables.
|
|
///
|
|
/// Queries `issues`/`merge_requests` for Created events, plus
|
|
/// `resource_state_events`, `resource_label_events`, and
|
|
/// `resource_milestone_events` for lifecycle events. Results are sorted
|
|
/// by timestamp descending (most recent first) and truncated to `limit`.
|
|
pub fn fetch_timeline_events(
|
|
conn: &Connection,
|
|
scope: &TimelineScope,
|
|
limit: usize,
|
|
) -> Result<Vec<TimelineEvent>> {
|
|
let filter = resolve_timeline_scope(conn, scope)?;
|
|
let mut events = Vec::new();
|
|
|
|
// Each collector is given the full limit. After merge-sorting, we truncate
|
|
// to `limit`. Worst case we hold 4*limit events in memory (bounded).
|
|
collect_tl_created_events(conn, &filter, limit, &mut events)?;
|
|
collect_tl_state_events(conn, &filter, limit, &mut events)?;
|
|
collect_tl_label_events(conn, &filter, limit, &mut events)?;
|
|
collect_tl_milestone_events(conn, &filter, limit, &mut events)?;
|
|
|
|
// Sort by timestamp descending (most recent first), with stable tiebreak.
|
|
events.sort_by(|a, b| {
|
|
b.timestamp_ms
|
|
.cmp(&a.timestamp_ms)
|
|
.then_with(|| a.entity_key.kind.cmp(&b.entity_key.kind))
|
|
.then_with(|| a.entity_key.iid.cmp(&b.entity_key.iid))
|
|
});
|
|
|
|
events.truncate(limit);
|
|
Ok(events)
|
|
}
|
|
|
|
/// Collect Created events from issues and merge_requests tables.
|
|
fn collect_tl_created_events(
|
|
conn: &Connection,
|
|
filter: &TimelineFilter,
|
|
limit: usize,
|
|
events: &mut Vec<TimelineEvent>,
|
|
) -> Result<()> {
|
|
// Issue created events.
|
|
if !matches!(filter, TimelineFilter::MergeRequest(_)) {
|
|
let (where_clause, mut params) = match filter {
|
|
TimelineFilter::All => (
|
|
"1=1".to_string(),
|
|
Vec::<Box<dyn rusqlite::types::ToSql>>::new(),
|
|
),
|
|
TimelineFilter::Issue(id) => (
|
|
"i.id = ?1".to_string(),
|
|
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::Actor(name) => (
|
|
"i.author_username = ?1".to_string(),
|
|
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::MergeRequest(_) => unreachable!(),
|
|
};
|
|
|
|
let limit_param = params.len() + 1;
|
|
let sql = format!(
|
|
"SELECT i.created_at, i.iid, i.title, i.author_username, i.project_id, p.path_with_namespace
|
|
FROM issues i
|
|
JOIN projects p ON p.id = i.project_id
|
|
WHERE {where_clause}
|
|
ORDER BY i.created_at DESC
|
|
LIMIT ?{limit_param}"
|
|
);
|
|
params.push(Box::new(limit as i64));
|
|
|
|
let mut stmt = conn
|
|
.prepare(&sql)
|
|
.context("preparing issue created query")?;
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> =
|
|
params.iter().map(AsRef::as_ref).collect();
|
|
let rows = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get::<_, i64>(0)?,
|
|
row.get::<_, i64>(1)?,
|
|
row.get::<_, Option<String>>(2)?,
|
|
row.get::<_, Option<String>>(3)?,
|
|
row.get::<_, i64>(4)?,
|
|
row.get::<_, String>(5)?,
|
|
))
|
|
})
|
|
.context("querying issue created events")?;
|
|
|
|
for row in rows {
|
|
let (created_at, iid, title, author, project_id, project_path) =
|
|
row.context("reading issue created row")?;
|
|
let title_str = title.as_deref().unwrap_or("(untitled)");
|
|
events.push(TimelineEvent {
|
|
timestamp_ms: created_at,
|
|
entity_key: EntityKey::issue(project_id, iid),
|
|
event_kind: TimelineEventKind::Created,
|
|
summary: format!("Issue #{iid} created: {title_str}"),
|
|
detail: title,
|
|
actor: author,
|
|
project_path,
|
|
});
|
|
}
|
|
}
|
|
|
|
// MR created events.
|
|
if !matches!(filter, TimelineFilter::Issue(_)) {
|
|
let (where_clause, mut params) = match filter {
|
|
TimelineFilter::All => (
|
|
"1=1".to_string(),
|
|
Vec::<Box<dyn rusqlite::types::ToSql>>::new(),
|
|
),
|
|
TimelineFilter::MergeRequest(id) => (
|
|
"mr.id = ?1".to_string(),
|
|
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::Actor(name) => (
|
|
"mr.author_username = ?1".to_string(),
|
|
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::Issue(_) => unreachable!(),
|
|
};
|
|
|
|
let limit_param = params.len() + 1;
|
|
let sql = format!(
|
|
"SELECT mr.created_at, mr.iid, mr.title, mr.author_username, mr.project_id, p.path_with_namespace
|
|
FROM merge_requests mr
|
|
JOIN projects p ON p.id = mr.project_id
|
|
WHERE {where_clause}
|
|
ORDER BY mr.created_at DESC
|
|
LIMIT ?{limit_param}"
|
|
);
|
|
params.push(Box::new(limit as i64));
|
|
|
|
let mut stmt = conn.prepare(&sql).context("preparing MR created query")?;
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> =
|
|
params.iter().map(AsRef::as_ref).collect();
|
|
let rows = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get::<_, i64>(0)?,
|
|
row.get::<_, i64>(1)?,
|
|
row.get::<_, Option<String>>(2)?,
|
|
row.get::<_, Option<String>>(3)?,
|
|
row.get::<_, i64>(4)?,
|
|
row.get::<_, String>(5)?,
|
|
))
|
|
})
|
|
.context("querying MR created events")?;
|
|
|
|
for row in rows {
|
|
let (created_at, iid, title, author, project_id, project_path) =
|
|
row.context("reading MR created row")?;
|
|
let title_str = title.as_deref().unwrap_or("(untitled)");
|
|
events.push(TimelineEvent {
|
|
timestamp_ms: created_at,
|
|
entity_key: EntityKey::mr(project_id, iid),
|
|
event_kind: TimelineEventKind::Created,
|
|
summary: format!("MR !{iid} created: {title_str}"),
|
|
detail: title,
|
|
actor: author,
|
|
project_path,
|
|
});
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Helper: build WHERE clause and params for resource event tables.
|
|
///
|
|
/// Resource event tables have `issue_id` and `merge_request_id` columns
|
|
/// (exactly one is non-NULL per row), plus `actor_username`.
|
|
fn resource_event_where(filter: &TimelineFilter) -> (String, Vec<Box<dyn rusqlite::types::ToSql>>) {
|
|
match filter {
|
|
TimelineFilter::All => ("1=1".to_string(), Vec::new()),
|
|
TimelineFilter::Issue(id) => (
|
|
"e.issue_id = ?1".to_string(),
|
|
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::MergeRequest(id) => (
|
|
"e.merge_request_id = ?1".to_string(),
|
|
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
TimelineFilter::Actor(name) => (
|
|
"e.actor_username = ?1".to_string(),
|
|
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
|
),
|
|
}
|
|
}
|
|
|
|
/// Resolve a resource event row's entity to an EntityKey.
|
|
fn resolve_event_entity(
|
|
issue_id: Option<i64>,
|
|
mr_id: Option<i64>,
|
|
issue_iid: Option<i64>,
|
|
mr_iid: Option<i64>,
|
|
issue_project_id: Option<i64>,
|
|
mr_project_id: Option<i64>,
|
|
) -> Option<(EntityKey, i64)> {
|
|
if let (Some(iid), Some(pid)) = (issue_iid, issue_project_id) {
|
|
Some((EntityKey::issue(pid, iid), pid))
|
|
} else if let (Some(iid), Some(pid)) = (mr_iid, mr_project_id) {
|
|
Some((EntityKey::mr(pid, iid), pid))
|
|
} else {
|
|
// Orphaned event — entity was deleted.
|
|
let _ = (issue_id, mr_id); // suppress unused warnings
|
|
None
|
|
}
|
|
}
|
|
|
|
/// Collect state change events from `resource_state_events`.
|
|
fn collect_tl_state_events(
|
|
conn: &Connection,
|
|
filter: &TimelineFilter,
|
|
limit: usize,
|
|
events: &mut Vec<TimelineEvent>,
|
|
) -> Result<()> {
|
|
let (where_clause, mut params) = resource_event_where(filter);
|
|
let limit_param = params.len() + 1;
|
|
|
|
let sql = format!(
|
|
"SELECT e.created_at, e.state, e.actor_username,
|
|
e.issue_id, e.merge_request_id,
|
|
i.iid, mr.iid, i.project_id, mr.project_id,
|
|
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
|
FROM resource_state_events e
|
|
LEFT JOIN issues i ON i.id = e.issue_id
|
|
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
|
LEFT JOIN projects pi ON pi.id = i.project_id
|
|
LEFT JOIN projects pm ON pm.id = mr.project_id
|
|
WHERE {where_clause}
|
|
ORDER BY e.created_at DESC
|
|
LIMIT ?{limit_param}"
|
|
);
|
|
params.push(Box::new(limit as i64));
|
|
|
|
let mut stmt = conn.prepare(&sql).context("preparing state events query")?;
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
|
let rows = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get::<_, i64>(0)?,
|
|
row.get::<_, String>(1)?,
|
|
row.get::<_, Option<String>>(2)?,
|
|
row.get::<_, Option<i64>>(3)?,
|
|
row.get::<_, Option<i64>>(4)?,
|
|
row.get::<_, Option<i64>>(5)?,
|
|
row.get::<_, Option<i64>>(6)?,
|
|
row.get::<_, Option<i64>>(7)?,
|
|
row.get::<_, Option<i64>>(8)?,
|
|
row.get::<_, Option<String>>(9)?,
|
|
))
|
|
})
|
|
.context("querying state events")?;
|
|
|
|
for row in rows {
|
|
let (
|
|
created_at,
|
|
state,
|
|
actor,
|
|
issue_id,
|
|
mr_id,
|
|
issue_iid,
|
|
mr_iid,
|
|
issue_pid,
|
|
mr_pid,
|
|
project_path,
|
|
) = row.context("reading state event row")?;
|
|
|
|
let Some((entity_key, _pid)) =
|
|
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
|
else {
|
|
continue;
|
|
};
|
|
|
|
let (event_kind, summary) = if state == "merged" {
|
|
(
|
|
TimelineEventKind::Merged,
|
|
format!("MR !{} merged", entity_key.iid),
|
|
)
|
|
} else {
|
|
(
|
|
TimelineEventKind::StateChanged,
|
|
format!("State changed to {state}"),
|
|
)
|
|
};
|
|
|
|
events.push(TimelineEvent {
|
|
timestamp_ms: created_at,
|
|
entity_key,
|
|
event_kind,
|
|
summary,
|
|
detail: Some(state),
|
|
actor,
|
|
project_path: project_path.unwrap_or_default(),
|
|
});
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Collect label change events from `resource_label_events`.
|
|
fn collect_tl_label_events(
|
|
conn: &Connection,
|
|
filter: &TimelineFilter,
|
|
limit: usize,
|
|
events: &mut Vec<TimelineEvent>,
|
|
) -> Result<()> {
|
|
let (where_clause, mut params) = resource_event_where(filter);
|
|
let limit_param = params.len() + 1;
|
|
|
|
let sql = format!(
|
|
"SELECT e.created_at, e.action, e.label_name, e.actor_username,
|
|
e.issue_id, e.merge_request_id,
|
|
i.iid, mr.iid, i.project_id, mr.project_id,
|
|
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
|
FROM resource_label_events e
|
|
LEFT JOIN issues i ON i.id = e.issue_id
|
|
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
|
LEFT JOIN projects pi ON pi.id = i.project_id
|
|
LEFT JOIN projects pm ON pm.id = mr.project_id
|
|
WHERE {where_clause}
|
|
ORDER BY e.created_at DESC
|
|
LIMIT ?{limit_param}"
|
|
);
|
|
params.push(Box::new(limit as i64));
|
|
|
|
let mut stmt = conn.prepare(&sql).context("preparing label events query")?;
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
|
let rows = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get::<_, i64>(0)?,
|
|
row.get::<_, String>(1)?,
|
|
row.get::<_, String>(2)?,
|
|
row.get::<_, Option<String>>(3)?,
|
|
row.get::<_, Option<i64>>(4)?,
|
|
row.get::<_, Option<i64>>(5)?,
|
|
row.get::<_, Option<i64>>(6)?,
|
|
row.get::<_, Option<i64>>(7)?,
|
|
row.get::<_, Option<i64>>(8)?,
|
|
row.get::<_, Option<i64>>(9)?,
|
|
row.get::<_, Option<String>>(10)?,
|
|
))
|
|
})
|
|
.context("querying label events")?;
|
|
|
|
for row in rows {
|
|
let (
|
|
created_at,
|
|
action,
|
|
label_name,
|
|
actor,
|
|
issue_id,
|
|
mr_id,
|
|
issue_iid,
|
|
mr_iid,
|
|
issue_pid,
|
|
mr_pid,
|
|
project_path,
|
|
) = row.context("reading label event row")?;
|
|
|
|
let Some((entity_key, _pid)) =
|
|
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
|
else {
|
|
continue;
|
|
};
|
|
|
|
let (event_kind, summary) = match action.as_str() {
|
|
"add" => (
|
|
TimelineEventKind::LabelAdded,
|
|
format!("Label added: {label_name}"),
|
|
),
|
|
"remove" => (
|
|
TimelineEventKind::LabelRemoved,
|
|
format!("Label removed: {label_name}"),
|
|
),
|
|
_ => continue,
|
|
};
|
|
|
|
events.push(TimelineEvent {
|
|
timestamp_ms: created_at,
|
|
entity_key,
|
|
event_kind,
|
|
summary,
|
|
detail: Some(label_name),
|
|
actor,
|
|
project_path: project_path.unwrap_or_default(),
|
|
});
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Collect milestone change events from `resource_milestone_events`.
|
|
fn collect_tl_milestone_events(
|
|
conn: &Connection,
|
|
filter: &TimelineFilter,
|
|
limit: usize,
|
|
events: &mut Vec<TimelineEvent>,
|
|
) -> Result<()> {
|
|
let (where_clause, mut params) = resource_event_where(filter);
|
|
let limit_param = params.len() + 1;
|
|
|
|
let sql = format!(
|
|
"SELECT e.created_at, e.action, e.milestone_title, e.actor_username,
|
|
e.issue_id, e.merge_request_id,
|
|
i.iid, mr.iid, i.project_id, mr.project_id,
|
|
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
|
FROM resource_milestone_events e
|
|
LEFT JOIN issues i ON i.id = e.issue_id
|
|
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
|
LEFT JOIN projects pi ON pi.id = i.project_id
|
|
LEFT JOIN projects pm ON pm.id = mr.project_id
|
|
WHERE {where_clause}
|
|
ORDER BY e.created_at DESC
|
|
LIMIT ?{limit_param}"
|
|
);
|
|
params.push(Box::new(limit as i64));
|
|
|
|
let mut stmt = conn
|
|
.prepare(&sql)
|
|
.context("preparing milestone events query")?;
|
|
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
|
let rows = stmt
|
|
.query_map(param_refs.as_slice(), |row| {
|
|
Ok((
|
|
row.get::<_, i64>(0)?,
|
|
row.get::<_, String>(1)?,
|
|
row.get::<_, String>(2)?,
|
|
row.get::<_, Option<String>>(3)?,
|
|
row.get::<_, Option<i64>>(4)?,
|
|
row.get::<_, Option<i64>>(5)?,
|
|
row.get::<_, Option<i64>>(6)?,
|
|
row.get::<_, Option<i64>>(7)?,
|
|
row.get::<_, Option<i64>>(8)?,
|
|
row.get::<_, Option<i64>>(9)?,
|
|
row.get::<_, Option<String>>(10)?,
|
|
))
|
|
})
|
|
.context("querying milestone events")?;
|
|
|
|
for row in rows {
|
|
let (
|
|
created_at,
|
|
action,
|
|
milestone_title,
|
|
actor,
|
|
issue_id,
|
|
mr_id,
|
|
issue_iid,
|
|
mr_iid,
|
|
issue_pid,
|
|
mr_pid,
|
|
project_path,
|
|
) = row.context("reading milestone event row")?;
|
|
|
|
let Some((entity_key, _pid)) =
|
|
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
|
else {
|
|
continue;
|
|
};
|
|
|
|
let (event_kind, summary) = match action.as_str() {
|
|
"add" => (
|
|
TimelineEventKind::MilestoneSet,
|
|
format!("Milestone set: {milestone_title}"),
|
|
),
|
|
"remove" => (
|
|
TimelineEventKind::MilestoneRemoved,
|
|
format!("Milestone removed: {milestone_title}"),
|
|
),
|
|
_ => continue,
|
|
};
|
|
|
|
events.push(TimelineEvent {
|
|
timestamp_ms: created_at,
|
|
entity_key,
|
|
event_kind,
|
|
summary,
|
|
detail: Some(milestone_title),
|
|
actor,
|
|
project_path: project_path.unwrap_or_default(),
|
|
});
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
/// Create the minimal schema needed for timeline queries.
|
|
fn create_dashboard_schema(conn: &Connection) {
|
|
conn.execute_batch(
|
|
"
|
|
CREATE TABLE projects (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
|
path_with_namespace TEXT NOT NULL
|
|
);
|
|
CREATE TABLE issues (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_id INTEGER UNIQUE NOT NULL,
|
|
project_id INTEGER NOT NULL,
|
|
iid INTEGER NOT NULL,
|
|
title TEXT,
|
|
state TEXT NOT NULL,
|
|
author_username TEXT,
|
|
created_at INTEGER NOT NULL,
|
|
updated_at INTEGER NOT NULL,
|
|
last_seen_at INTEGER NOT NULL
|
|
);
|
|
CREATE TABLE merge_requests (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_id INTEGER UNIQUE NOT NULL,
|
|
project_id INTEGER NOT NULL,
|
|
iid INTEGER NOT NULL,
|
|
title TEXT,
|
|
state TEXT,
|
|
author_username TEXT,
|
|
created_at INTEGER,
|
|
updated_at INTEGER,
|
|
last_seen_at INTEGER NOT NULL
|
|
);
|
|
",
|
|
)
|
|
.expect("create dashboard schema");
|
|
}
|
|
|
|
fn insert_issue(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
|
conn.execute(
|
|
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
|
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
|
rusqlite::params![iid * 100, iid, format!("Issue {iid}"), state, updated_at],
|
|
)
|
|
.expect("insert issue");
|
|
}
|
|
|
|
fn insert_mr(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
|
conn.execute(
|
|
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
|
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
|
rusqlite::params![iid * 100 + 50, iid, format!("MR {iid}"), state, updated_at],
|
|
)
|
|
.expect("insert mr");
|
|
}
|
|
|
|
/// Add resource event tables to an existing schema.
|
|
fn add_resource_event_tables(conn: &Connection) {
|
|
conn.execute_batch(
|
|
"
|
|
CREATE TABLE IF NOT EXISTS resource_state_events (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_id INTEGER NOT NULL,
|
|
project_id INTEGER NOT NULL,
|
|
issue_id INTEGER,
|
|
merge_request_id INTEGER,
|
|
state TEXT NOT NULL,
|
|
actor_gitlab_id INTEGER,
|
|
actor_username TEXT,
|
|
created_at INTEGER NOT NULL,
|
|
source_commit TEXT,
|
|
source_merge_request_iid INTEGER
|
|
);
|
|
CREATE TABLE IF NOT EXISTS resource_label_events (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_id INTEGER NOT NULL,
|
|
project_id INTEGER NOT NULL,
|
|
issue_id INTEGER,
|
|
merge_request_id INTEGER,
|
|
action TEXT NOT NULL,
|
|
label_name TEXT NOT NULL,
|
|
actor_gitlab_id INTEGER,
|
|
actor_username TEXT,
|
|
created_at INTEGER NOT NULL
|
|
);
|
|
CREATE TABLE IF NOT EXISTS resource_milestone_events (
|
|
id INTEGER PRIMARY KEY,
|
|
gitlab_id INTEGER NOT NULL,
|
|
project_id INTEGER NOT NULL,
|
|
issue_id INTEGER,
|
|
merge_request_id INTEGER,
|
|
action TEXT NOT NULL,
|
|
milestone_title TEXT NOT NULL,
|
|
milestone_id INTEGER,
|
|
actor_gitlab_id INTEGER,
|
|
actor_username TEXT,
|
|
created_at INTEGER NOT NULL
|
|
);
|
|
",
|
|
)
|
|
.expect("create resource event tables");
|
|
}
|
|
|
|
/// Create a full timeline test schema (dashboard schema + resource events).
|
|
fn create_timeline_schema(conn: &Connection) {
|
|
create_dashboard_schema(conn);
|
|
add_resource_event_tables(conn);
|
|
// Insert a project for test entities.
|
|
conn.execute(
|
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
|
[],
|
|
)
|
|
.expect("insert test project");
|
|
}
|
|
|
|
fn insert_state_event(
|
|
conn: &Connection,
|
|
gitlab_id: i64,
|
|
issue_id: Option<i64>,
|
|
mr_id: Option<i64>,
|
|
state: &str,
|
|
actor: &str,
|
|
created_at: i64,
|
|
) {
|
|
conn.execute(
|
|
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at)
|
|
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6)",
|
|
rusqlite::params![gitlab_id, issue_id, mr_id, state, actor, created_at],
|
|
)
|
|
.expect("insert state event");
|
|
}
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
fn insert_label_event(
|
|
conn: &Connection,
|
|
gitlab_id: i64,
|
|
issue_id: Option<i64>,
|
|
mr_id: Option<i64>,
|
|
action: &str,
|
|
label: &str,
|
|
actor: &str,
|
|
created_at: i64,
|
|
) {
|
|
conn.execute(
|
|
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at)
|
|
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6, ?7)",
|
|
rusqlite::params![gitlab_id, issue_id, mr_id, action, label, actor, created_at],
|
|
)
|
|
.expect("insert label event");
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_scoped() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
// Create two issues.
|
|
let now = 1_700_000_000_000_i64;
|
|
insert_issue(&conn, 1, "opened", now - 100_000);
|
|
insert_issue(&conn, 2, "opened", now - 50_000);
|
|
|
|
// Get internal IDs.
|
|
let issue1_id: i64 = conn
|
|
.query_row("SELECT id FROM issues WHERE iid = 1", [], |r| r.get(0))
|
|
.unwrap();
|
|
let issue2_id: i64 = conn
|
|
.query_row("SELECT id FROM issues WHERE iid = 2", [], |r| r.get(0))
|
|
.unwrap();
|
|
|
|
// State events: issue 1 closed, issue 2 label added.
|
|
insert_state_event(
|
|
&conn,
|
|
1,
|
|
Some(issue1_id),
|
|
None,
|
|
"closed",
|
|
"alice",
|
|
now - 80_000,
|
|
);
|
|
insert_label_event(
|
|
&conn,
|
|
2,
|
|
Some(issue2_id),
|
|
None,
|
|
"add",
|
|
"bug",
|
|
"bob",
|
|
now - 30_000,
|
|
);
|
|
|
|
// Fetch scoped to issue 1.
|
|
let scope = TimelineScope::Entity(EntityKey::issue(1, 1));
|
|
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
|
|
|
// Should only have issue 1's events: Created + StateChanged.
|
|
assert_eq!(events.len(), 2);
|
|
for event in &events {
|
|
assert_eq!(event.entity_key.iid, 1, "All events should be for issue #1");
|
|
}
|
|
// Most recent first.
|
|
assert!(events[0].timestamp_ms >= events[1].timestamp_ms);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_all_scope() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let now = 1_700_000_000_000_i64;
|
|
insert_issue(&conn, 1, "opened", now - 100_000);
|
|
insert_issue(&conn, 2, "opened", now - 50_000);
|
|
|
|
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
|
|
|
// Should have Created events for both issues.
|
|
assert_eq!(events.len(), 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_author_scope() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let now = 1_700_000_000_000_i64;
|
|
insert_issue(&conn, 1, "opened", now - 100_000); // default: no author_username in insert_issue
|
|
|
|
let issue1_id: i64 = conn
|
|
.query_row("SELECT id FROM issues WHERE iid = 1", [], |r| r.get(0))
|
|
.unwrap();
|
|
|
|
// State events by different actors.
|
|
insert_state_event(
|
|
&conn,
|
|
1,
|
|
Some(issue1_id),
|
|
None,
|
|
"closed",
|
|
"alice",
|
|
now - 80_000,
|
|
);
|
|
insert_state_event(
|
|
&conn,
|
|
2,
|
|
Some(issue1_id),
|
|
None,
|
|
"reopened",
|
|
"bob",
|
|
now - 60_000,
|
|
);
|
|
|
|
let scope = TimelineScope::Author("alice".into());
|
|
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
|
|
|
// Should only get alice's state event (Created events don't have author set via insert_issue).
|
|
assert!(events.iter().all(|e| e.actor.as_deref() == Some("alice")));
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_respects_limit() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let now = 1_700_000_000_000_i64;
|
|
for i in 1..=10 {
|
|
insert_issue(&conn, i, "opened", now - (i * 10_000));
|
|
}
|
|
|
|
let events = fetch_timeline_events(&conn, &TimelineScope::All, 3).unwrap();
|
|
assert_eq!(events.len(), 3);
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_sorted_most_recent_first() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let now = 1_700_000_000_000_i64;
|
|
insert_issue(&conn, 1, "opened", now - 200_000);
|
|
insert_issue(&conn, 2, "opened", now - 100_000);
|
|
insert_issue(&conn, 3, "opened", now - 300_000);
|
|
|
|
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
|
|
|
for window in events.windows(2) {
|
|
assert!(
|
|
window[0].timestamp_ms >= window[1].timestamp_ms,
|
|
"Events should be sorted most-recent-first"
|
|
);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_state_merged_is_merged_kind() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let now = 1_700_000_000_000_i64;
|
|
insert_mr(&conn, 1, "merged", now - 100_000);
|
|
|
|
let mr_id: i64 = conn
|
|
.query_row("SELECT id FROM merge_requests WHERE iid = 1", [], |r| {
|
|
r.get(0)
|
|
})
|
|
.unwrap();
|
|
|
|
insert_state_event(&conn, 1, None, Some(mr_id), "merged", "alice", now - 50_000);
|
|
|
|
let scope = TimelineScope::Entity(EntityKey::mr(1, 1));
|
|
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
|
|
|
let merged_events: Vec<_> = events
|
|
.iter()
|
|
.filter(|e| e.event_kind == TimelineEventKind::Merged)
|
|
.collect();
|
|
assert_eq!(merged_events.len(), 1);
|
|
assert_eq!(merged_events[0].summary, "MR !1 merged");
|
|
}
|
|
|
|
#[test]
|
|
fn test_fetch_timeline_empty_db() {
|
|
let conn = Connection::open_in_memory().unwrap();
|
|
create_timeline_schema(&conn);
|
|
|
|
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
|
assert!(events.is_empty());
|
|
}
|
|
}
|