feat(tui): Phase 3 power features — Who, Search, Timeline, Trace, File History screens
Complete TUI Phase 3 implementation with all 5 power feature screens: - Who screen: 5 modes (expert/workload/reviews/active/overlap) with mode tabs, input bar, result rendering, and hint bar - Search screen: full-text search with result list and scoring display - Timeline screen: chronological event feed with time-relative display - Trace screen: file provenance chains with expand/collapse, rename tracking, and linked issues/discussions - File History screen: per-file MR timeline with rename chain display and discussion snippets Also includes: - Command palette overlay (fuzzy search) - Bootstrap screen (initial sync flow) - Action layer split from monolithic action.rs to per-screen modules - Entity and render cache infrastructure - Shared who_types module in core crate - All screens wired into view/mod.rs dispatch - 597 tests passing, clippy clean (pedantic + nursery), fmt clean
This commit is contained in:
File diff suppressed because it is too large
Load Diff
298
crates/lore-tui/src/action/bootstrap.rs
Normal file
298
crates/lore-tui/src/action/bootstrap.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::state::bootstrap::{DataReadiness, SchemaCheck};
|
||||
|
||||
/// Minimum schema version required by this TUI version.
|
||||
pub const MINIMUM_SCHEMA_VERSION: i32 = 20;
|
||||
|
||||
/// Check the schema version of the database.
|
||||
///
|
||||
/// Returns [`SchemaCheck::NoDB`] if the `schema_version` table doesn't exist,
|
||||
/// [`SchemaCheck::Incompatible`] if the version is below the minimum,
|
||||
/// or [`SchemaCheck::Compatible`] if all is well.
|
||||
pub fn check_schema_version(conn: &Connection, minimum: i32) -> SchemaCheck {
|
||||
// Check if schema_version table exists.
|
||||
let table_exists: bool = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='schema_version'",
|
||||
[],
|
||||
|r| r.get::<_, i64>(0),
|
||||
)
|
||||
.map(|c| c > 0)
|
||||
.unwrap_or(false);
|
||||
|
||||
if !table_exists {
|
||||
return SchemaCheck::NoDB;
|
||||
}
|
||||
|
||||
// Read the current version.
|
||||
match conn.query_row("SELECT version FROM schema_version LIMIT 1", [], |r| {
|
||||
r.get::<_, i32>(0)
|
||||
}) {
|
||||
Ok(version) if version >= minimum => SchemaCheck::Compatible { version },
|
||||
Ok(found) => SchemaCheck::Incompatible { found, minimum },
|
||||
Err(_) => SchemaCheck::NoDB,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check whether the database has enough data to skip the bootstrap screen.
|
||||
///
|
||||
/// Counts issues, merge requests, and search documents. The `documents` table
|
||||
/// may not exist on older schemas, so its absence is treated as "no documents."
|
||||
pub fn check_data_readiness(conn: &Connection) -> Result<DataReadiness> {
|
||||
let has_issues: bool = conn
|
||||
.query_row("SELECT EXISTS(SELECT 1 FROM issues LIMIT 1)", [], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.context("checking issues")?;
|
||||
|
||||
let has_mrs: bool = conn
|
||||
.query_row(
|
||||
"SELECT EXISTS(SELECT 1 FROM merge_requests LIMIT 1)",
|
||||
[],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.context("checking merge requests")?;
|
||||
|
||||
// documents table may not exist yet (created by generate-docs).
|
||||
let has_documents: bool = conn
|
||||
.query_row("SELECT EXISTS(SELECT 1 FROM documents LIMIT 1)", [], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
let schema_version = conn
|
||||
.query_row("SELECT version FROM schema_version LIMIT 1", [], |r| {
|
||||
r.get::<_, i32>(0)
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
Ok(DataReadiness {
|
||||
has_issues,
|
||||
has_mrs,
|
||||
has_documents,
|
||||
schema_version,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Create the minimal schema needed for bootstrap / data-readiness queries.
|
||||
fn create_dashboard_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE documents (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source_type TEXT NOT NULL,
|
||||
source_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
content_text TEXT NOT NULL,
|
||||
content_hash TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE embedding_metadata (
|
||||
document_id INTEGER NOT NULL,
|
||||
chunk_index INTEGER NOT NULL DEFAULT 0,
|
||||
model TEXT NOT NULL,
|
||||
dims INTEGER NOT NULL,
|
||||
document_hash TEXT NOT NULL,
|
||||
chunk_hash TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
PRIMARY KEY(document_id, chunk_index)
|
||||
);
|
||||
CREATE TABLE sync_runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
started_at INTEGER NOT NULL,
|
||||
heartbeat_at INTEGER NOT NULL,
|
||||
finished_at INTEGER,
|
||||
status TEXT NOT NULL,
|
||||
command TEXT NOT NULL,
|
||||
error TEXT
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create dashboard schema");
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100, iid, format!("Issue {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert issue");
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100 + 50, iid, format!("MR {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert mr");
|
||||
}
|
||||
|
||||
/// TDD anchor test from bead spec.
|
||||
#[test]
|
||||
fn test_schema_preflight_rejects_old() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE schema_version (version INTEGER);
|
||||
INSERT INTO schema_version (version) VALUES (1);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = check_schema_version(&conn, 20);
|
||||
assert!(matches!(
|
||||
result,
|
||||
SchemaCheck::Incompatible {
|
||||
found: 1,
|
||||
minimum: 20
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schema_preflight_accepts_compatible() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE schema_version (version INTEGER);
|
||||
INSERT INTO schema_version (version) VALUES (26);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = check_schema_version(&conn, 20);
|
||||
assert!(matches!(result, SchemaCheck::Compatible { version: 26 }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schema_preflight_exact_minimum() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE schema_version (version INTEGER);
|
||||
INSERT INTO schema_version (version) VALUES (20);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = check_schema_version(&conn, 20);
|
||||
assert!(matches!(result, SchemaCheck::Compatible { version: 20 }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schema_preflight_no_db() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
let result = check_schema_version(&conn, 20);
|
||||
assert!(matches!(result, SchemaCheck::NoDB));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schema_preflight_empty_schema_version_table() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
conn.execute_batch("CREATE TABLE schema_version (version INTEGER);")
|
||||
.unwrap();
|
||||
|
||||
let result = check_schema_version(&conn, 20);
|
||||
assert!(matches!(result, SchemaCheck::NoDB));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_data_readiness_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE schema_version (version INTEGER);
|
||||
INSERT INTO schema_version (version) VALUES (26);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let readiness = check_data_readiness(&conn).unwrap();
|
||||
assert!(!readiness.has_issues);
|
||||
assert!(!readiness.has_mrs);
|
||||
assert!(!readiness.has_documents);
|
||||
assert_eq!(readiness.schema_version, 26);
|
||||
assert!(!readiness.has_any_data());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_data_readiness_with_data() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE schema_version (version INTEGER);
|
||||
INSERT INTO schema_version (version) VALUES (26);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
insert_issue(&conn, 1, "opened", 1_700_000_000_000);
|
||||
insert_mr(&conn, 1, "merged", 1_700_000_000_000);
|
||||
|
||||
let readiness = check_data_readiness(&conn).unwrap();
|
||||
assert!(readiness.has_issues);
|
||||
assert!(readiness.has_mrs);
|
||||
assert!(!readiness.has_documents);
|
||||
assert_eq!(readiness.schema_version, 26);
|
||||
assert!(readiness.has_any_data());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_data_readiness_documents_table_missing() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
// No documents table — should still work.
|
||||
|
||||
let readiness = check_data_readiness(&conn).unwrap();
|
||||
assert!(!readiness.has_documents);
|
||||
}
|
||||
}
|
||||
485
crates/lore-tui/src/action/dashboard.rs
Normal file
485
crates/lore-tui/src/action/dashboard.rs
Normal file
@@ -0,0 +1,485 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::clock::Clock;
|
||||
use crate::state::dashboard::{
|
||||
DashboardData, EntityCounts, LastSyncInfo, ProjectSyncInfo, RecentActivityItem,
|
||||
};
|
||||
|
||||
/// Fetch all data for the dashboard screen.
|
||||
///
|
||||
/// Runs aggregation queries for entity counts, per-project sync freshness,
|
||||
/// recent activity, and the last sync run summary.
|
||||
pub fn fetch_dashboard(conn: &Connection, clock: &dyn Clock) -> Result<DashboardData> {
|
||||
let counts = fetch_entity_counts(conn)?;
|
||||
let projects = fetch_project_sync_info(conn, clock)?;
|
||||
let recent = fetch_recent_activity(conn, clock)?;
|
||||
let last_sync = fetch_last_sync(conn)?;
|
||||
|
||||
Ok(DashboardData {
|
||||
counts,
|
||||
projects,
|
||||
recent,
|
||||
last_sync,
|
||||
})
|
||||
}
|
||||
|
||||
/// Count all entities in the database.
|
||||
fn fetch_entity_counts(conn: &Connection) -> Result<EntityCounts> {
|
||||
let issues_total: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM issues", [], |r| r.get(0))
|
||||
.context("counting issues")?;
|
||||
|
||||
let issues_open: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM issues WHERE state = 'opened'",
|
||||
[],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.context("counting open issues")?;
|
||||
|
||||
let mrs_total: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM merge_requests", [], |r| r.get(0))
|
||||
.context("counting merge requests")?;
|
||||
|
||||
let mrs_open: i64 = conn
|
||||
.query_row(
|
||||
"SELECT COUNT(*) FROM merge_requests WHERE state = 'opened'",
|
||||
[],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.context("counting open merge requests")?;
|
||||
|
||||
let discussions: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM discussions", [], |r| r.get(0))
|
||||
.context("counting discussions")?;
|
||||
|
||||
let notes_total: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM notes", [], |r| r.get(0))
|
||||
.context("counting notes")?;
|
||||
|
||||
let notes_system: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM notes WHERE is_system = 1", [], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.context("counting system notes")?;
|
||||
|
||||
let notes_system_pct = if notes_total > 0 {
|
||||
u8::try_from(notes_system * 100 / notes_total).unwrap_or(100)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let documents: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM documents", [], |r| r.get(0))
|
||||
.context("counting documents")?;
|
||||
|
||||
let embeddings: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM embedding_metadata", [], |r| r.get(0))
|
||||
.context("counting embeddings")?;
|
||||
|
||||
#[allow(clippy::cast_sign_loss)] // SQL COUNT(*) is always >= 0
|
||||
Ok(EntityCounts {
|
||||
issues_open: issues_open as u64,
|
||||
issues_total: issues_total as u64,
|
||||
mrs_open: mrs_open as u64,
|
||||
mrs_total: mrs_total as u64,
|
||||
discussions: discussions as u64,
|
||||
notes_total: notes_total as u64,
|
||||
notes_system_pct,
|
||||
documents: documents as u64,
|
||||
embeddings: embeddings as u64,
|
||||
})
|
||||
}
|
||||
|
||||
/// Per-project sync freshness based on the most recent sync_runs entry.
|
||||
fn fetch_project_sync_info(conn: &Connection, clock: &dyn Clock) -> Result<Vec<ProjectSyncInfo>> {
|
||||
let now_ms = clock.now_ms();
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT p.path_with_namespace,
|
||||
MAX(sr.finished_at) as last_sync_ms
|
||||
FROM projects p
|
||||
LEFT JOIN sync_runs sr ON sr.status = 'succeeded'
|
||||
AND sr.finished_at IS NOT NULL
|
||||
GROUP BY p.id
|
||||
ORDER BY p.path_with_namespace",
|
||||
)
|
||||
.context("preparing project sync query")?;
|
||||
|
||||
let rows = stmt
|
||||
.query_map([], |row| {
|
||||
let path: String = row.get(0)?;
|
||||
let last_sync_ms: Option<i64> = row.get(1)?;
|
||||
Ok((path, last_sync_ms))
|
||||
})
|
||||
.context("querying project sync info")?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
let (path, last_sync_ms) = row.context("reading project sync row")?;
|
||||
let minutes_since_sync = match last_sync_ms {
|
||||
Some(ms) => {
|
||||
let elapsed_ms = now_ms.saturating_sub(ms);
|
||||
u64::try_from(elapsed_ms / 60_000).unwrap_or(u64::MAX)
|
||||
}
|
||||
None => u64::MAX, // Never synced.
|
||||
};
|
||||
result.push(ProjectSyncInfo {
|
||||
path,
|
||||
minutes_since_sync,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Recent activity: the 20 most recently updated issues and MRs.
|
||||
fn fetch_recent_activity(conn: &Connection, clock: &dyn Clock) -> Result<Vec<RecentActivityItem>> {
|
||||
let now_ms = clock.now_ms();
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT entity_type, iid, title, state, updated_at FROM (
|
||||
SELECT 'issue' AS entity_type, iid, title, state, updated_at
|
||||
FROM issues
|
||||
UNION ALL
|
||||
SELECT 'mr' AS entity_type, iid, title, state, updated_at
|
||||
FROM merge_requests
|
||||
)
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 20",
|
||||
)
|
||||
.context("preparing recent activity query")?;
|
||||
|
||||
let rows = stmt
|
||||
.query_map([], |row| {
|
||||
let entity_type: String = row.get(0)?;
|
||||
let iid: i64 = row.get(1)?;
|
||||
let title: String = row.get::<_, Option<String>>(2)?.unwrap_or_default();
|
||||
let state: String = row.get::<_, Option<String>>(3)?.unwrap_or_default();
|
||||
let updated_at: i64 = row.get(4)?;
|
||||
Ok((entity_type, iid, title, state, updated_at))
|
||||
})
|
||||
.context("querying recent activity")?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for row in rows {
|
||||
let (entity_type, iid, title, state, updated_at) =
|
||||
row.context("reading recent activity row")?;
|
||||
let elapsed_ms = now_ms.saturating_sub(updated_at);
|
||||
let minutes_ago = u64::try_from(elapsed_ms / 60_000).unwrap_or(u64::MAX);
|
||||
result.push(RecentActivityItem {
|
||||
entity_type,
|
||||
iid: iid as u64,
|
||||
title,
|
||||
state,
|
||||
minutes_ago,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// The most recent sync run summary.
|
||||
fn fetch_last_sync(conn: &Connection) -> Result<Option<LastSyncInfo>> {
|
||||
let result = conn.query_row(
|
||||
"SELECT status, finished_at, command, error
|
||||
FROM sync_runs
|
||||
ORDER BY id DESC
|
||||
LIMIT 1",
|
||||
[],
|
||||
|row| {
|
||||
Ok(LastSyncInfo {
|
||||
status: row.get(0)?,
|
||||
finished_at: row.get(1)?,
|
||||
command: row.get(2)?,
|
||||
error: row.get(3)?,
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(info) => Ok(Some(info)),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
||||
Err(e) => Err(e).context("querying last sync run"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::clock::FakeClock;
|
||||
|
||||
/// Create the minimal schema needed for dashboard queries.
|
||||
fn create_dashboard_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE documents (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source_type TEXT NOT NULL,
|
||||
source_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
content_text TEXT NOT NULL,
|
||||
content_hash TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE embedding_metadata (
|
||||
document_id INTEGER NOT NULL,
|
||||
chunk_index INTEGER NOT NULL DEFAULT 0,
|
||||
model TEXT NOT NULL,
|
||||
dims INTEGER NOT NULL,
|
||||
document_hash TEXT NOT NULL,
|
||||
chunk_hash TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
PRIMARY KEY(document_id, chunk_index)
|
||||
);
|
||||
CREATE TABLE sync_runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
started_at INTEGER NOT NULL,
|
||||
heartbeat_at INTEGER NOT NULL,
|
||||
finished_at INTEGER,
|
||||
status TEXT NOT NULL,
|
||||
command TEXT NOT NULL,
|
||||
error TEXT
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create dashboard schema");
|
||||
}
|
||||
|
||||
/// Insert a test issue.
|
||||
fn insert_issue(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100, iid, format!("Issue {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert issue");
|
||||
}
|
||||
|
||||
/// Insert a test MR.
|
||||
fn insert_mr(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100 + 50, iid, format!("MR {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert mr");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_counts() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
// 5 issues: 3 open, 2 closed.
|
||||
let now_ms = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now_ms - 10_000);
|
||||
insert_issue(&conn, 2, "opened", now_ms - 20_000);
|
||||
insert_issue(&conn, 3, "opened", now_ms - 30_000);
|
||||
insert_issue(&conn, 4, "closed", now_ms - 40_000);
|
||||
insert_issue(&conn, 5, "closed", now_ms - 50_000);
|
||||
|
||||
let clock = FakeClock::from_ms(now_ms);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.counts.issues_open, 3);
|
||||
assert_eq!(data.counts.issues_total, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_mr_counts() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
let now_ms = 1_700_000_000_000_i64;
|
||||
insert_mr(&conn, 1, "opened", now_ms);
|
||||
insert_mr(&conn, 2, "merged", now_ms);
|
||||
insert_mr(&conn, 3, "opened", now_ms);
|
||||
insert_mr(&conn, 4, "closed", now_ms);
|
||||
|
||||
let clock = FakeClock::from_ms(now_ms);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.counts.mrs_open, 2);
|
||||
assert_eq!(data.counts.mrs_total, 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_empty_database() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
let clock = FakeClock::from_ms(1_700_000_000_000);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.counts.issues_open, 0);
|
||||
assert_eq!(data.counts.issues_total, 0);
|
||||
assert_eq!(data.counts.mrs_open, 0);
|
||||
assert_eq!(data.counts.mrs_total, 0);
|
||||
assert_eq!(data.counts.notes_system_pct, 0);
|
||||
assert!(data.projects.is_empty());
|
||||
assert!(data.recent.is_empty());
|
||||
assert!(data.last_sync.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_notes_system_pct() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
// 4 notes: 1 system, 3 user -> 25% system.
|
||||
for i in 0..4 {
|
||||
conn.execute(
|
||||
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, 1, ?2, 1000, 1000, 1000)",
|
||||
rusqlite::params![i, if i == 0 { 1 } else { 0 }],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let clock = FakeClock::from_ms(1_700_000_000_000);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.counts.notes_total, 4);
|
||||
assert_eq!(data.counts.notes_system_pct, 25);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_project_sync_info() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (1, 'group/alpha')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (2, 'group/beta')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Sync ran 30 minutes ago. sync_runs is global (no project_id),
|
||||
// so all projects see the same last-sync time.
|
||||
let now_ms = 1_700_000_000_000_i64;
|
||||
conn.execute(
|
||||
"INSERT INTO sync_runs (started_at, heartbeat_at, finished_at, status, command)
|
||||
VALUES (?1, ?1, ?2, 'succeeded', 'sync')",
|
||||
[now_ms - 30 * 60_000, now_ms - 30 * 60_000],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let clock = FakeClock::from_ms(now_ms);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.projects.len(), 2);
|
||||
assert_eq!(data.projects[0].path, "group/alpha");
|
||||
assert_eq!(data.projects[0].minutes_since_sync, 30);
|
||||
assert_eq!(data.projects[1].path, "group/beta");
|
||||
assert_eq!(data.projects[1].minutes_since_sync, 30); // Same: sync_runs is global.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_recent_activity_ordered() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
let now_ms = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now_ms - 60_000); // 1 min ago
|
||||
insert_mr(&conn, 1, "merged", now_ms - 120_000); // 2 min ago
|
||||
insert_issue(&conn, 2, "closed", now_ms - 180_000); // 3 min ago
|
||||
|
||||
let clock = FakeClock::from_ms(now_ms);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
assert_eq!(data.recent.len(), 3);
|
||||
assert_eq!(data.recent[0].entity_type, "issue");
|
||||
assert_eq!(data.recent[0].iid, 1);
|
||||
assert_eq!(data.recent[0].minutes_ago, 1);
|
||||
assert_eq!(data.recent[1].entity_type, "mr");
|
||||
assert_eq!(data.recent[1].minutes_ago, 2);
|
||||
assert_eq!(data.recent[2].entity_type, "issue");
|
||||
assert_eq!(data.recent[2].minutes_ago, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_dashboard_last_sync() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
let now_ms = 1_700_000_000_000_i64;
|
||||
conn.execute(
|
||||
"INSERT INTO sync_runs (started_at, heartbeat_at, finished_at, status, command, error)
|
||||
VALUES (?1, ?1, ?2, 'failed', 'sync', 'network timeout')",
|
||||
[now_ms - 60_000, now_ms - 50_000],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO sync_runs (started_at, heartbeat_at, finished_at, status, command)
|
||||
VALUES (?1, ?1, ?2, 'succeeded', 'sync')",
|
||||
[now_ms - 30_000, now_ms - 20_000],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let clock = FakeClock::from_ms(now_ms);
|
||||
let data = fetch_dashboard(&conn, &clock).unwrap();
|
||||
|
||||
let sync = data.last_sync.unwrap();
|
||||
assert_eq!(sync.status, "succeeded");
|
||||
assert_eq!(sync.command, "sync");
|
||||
assert!(sync.error.is_none());
|
||||
}
|
||||
}
|
||||
383
crates/lore-tui/src/action/file_history.rs
Normal file
383
crates/lore-tui/src/action/file_history.rs
Normal file
@@ -0,0 +1,383 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! File History screen actions — query MRs that touched a file path.
|
||||
//!
|
||||
//! Wraps the SQL queries from `lore::cli::commands::file_history` but uses
|
||||
//! an injected `Connection` (TUI manages its own DB connection).
|
||||
|
||||
use anyhow::Result;
|
||||
use rusqlite::Connection;
|
||||
|
||||
use lore::core::file_history::resolve_rename_chain;
|
||||
|
||||
use crate::state::file_history::{FileDiscussion, FileHistoryMr, FileHistoryResult};
|
||||
|
||||
/// Maximum rename chain BFS depth.
|
||||
const MAX_RENAME_HOPS: usize = 10;
|
||||
|
||||
/// Default result limit.
|
||||
const DEFAULT_LIMIT: usize = 50;
|
||||
|
||||
/// Fetch file history: MRs that touched a file path, with optional rename resolution.
|
||||
pub fn fetch_file_history(
|
||||
conn: &Connection,
|
||||
project_id: Option<i64>,
|
||||
path: &str,
|
||||
follow_renames: bool,
|
||||
merged_only: bool,
|
||||
include_discussions: bool,
|
||||
) -> Result<FileHistoryResult> {
|
||||
// Resolve rename chain unless disabled.
|
||||
let (all_paths, renames_followed) = if !follow_renames {
|
||||
(vec![path.to_string()], false)
|
||||
} else if let Some(pid) = project_id {
|
||||
let chain = resolve_rename_chain(conn, pid, path, MAX_RENAME_HOPS)?;
|
||||
let followed = chain.len() > 1;
|
||||
(chain, followed)
|
||||
} else {
|
||||
// Without project scope, can't resolve renames.
|
||||
(vec![path.to_string()], false)
|
||||
};
|
||||
|
||||
let paths_searched = all_paths.len();
|
||||
|
||||
// Build IN clause placeholders.
|
||||
let placeholders: Vec<String> = (0..all_paths.len())
|
||||
.map(|i| format!("?{}", i + 2))
|
||||
.collect();
|
||||
let in_clause = placeholders.join(", ");
|
||||
|
||||
let merged_filter = if merged_only {
|
||||
" AND mr.state = 'merged'"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let project_filter = if project_id.is_some() {
|
||||
"AND mfc.project_id = ?1"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let limit_param = all_paths.len() + 2;
|
||||
let sql = format!(
|
||||
"SELECT DISTINCT \
|
||||
mr.iid, mr.title, mr.state, mr.author_username, \
|
||||
mfc.change_type, mr.merged_at, mr.updated_at, mr.merge_commit_sha \
|
||||
FROM mr_file_changes mfc \
|
||||
JOIN merge_requests mr ON mr.id = mfc.merge_request_id \
|
||||
WHERE mfc.new_path IN ({in_clause}) {project_filter} {merged_filter} \
|
||||
ORDER BY COALESCE(mr.merged_at, mr.updated_at) DESC \
|
||||
LIMIT ?{limit_param}"
|
||||
);
|
||||
|
||||
let mut stmt = conn.prepare(&sql)?;
|
||||
|
||||
// Bind: ?1=project_id, ?2..?N+1=paths, ?N+2=limit.
|
||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||
params.push(Box::new(project_id.unwrap_or(0)));
|
||||
for p in &all_paths {
|
||||
params.push(Box::new(p.clone()));
|
||||
}
|
||||
params.push(Box::new(DEFAULT_LIMIT as i64));
|
||||
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||
|
||||
let merge_requests: Vec<FileHistoryMr> = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok(FileHistoryMr {
|
||||
iid: row.get(0)?,
|
||||
title: row.get(1)?,
|
||||
state: row.get(2)?,
|
||||
author_username: row.get(3)?,
|
||||
change_type: row.get(4)?,
|
||||
merged_at_ms: row.get(5)?,
|
||||
updated_at_ms: row.get::<_, i64>(6)?,
|
||||
merge_commit_sha: row.get(7)?,
|
||||
})
|
||||
})?
|
||||
.filter_map(std::result::Result::ok)
|
||||
.collect();
|
||||
|
||||
let total_mrs = merge_requests.len();
|
||||
|
||||
// Optionally fetch DiffNote discussions.
|
||||
let discussions = if include_discussions && !merge_requests.is_empty() {
|
||||
fetch_file_discussions(conn, &all_paths, project_id)?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
Ok(FileHistoryResult {
|
||||
path: path.to_string(),
|
||||
rename_chain: all_paths,
|
||||
renames_followed,
|
||||
merge_requests,
|
||||
discussions,
|
||||
total_mrs,
|
||||
paths_searched,
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch DiffNote discussions referencing the given file paths.
|
||||
fn fetch_file_discussions(
|
||||
conn: &Connection,
|
||||
paths: &[String],
|
||||
project_id: Option<i64>,
|
||||
) -> Result<Vec<FileDiscussion>> {
|
||||
let placeholders: Vec<String> = (0..paths.len()).map(|i| format!("?{}", i + 2)).collect();
|
||||
let in_clause = placeholders.join(", ");
|
||||
|
||||
let project_filter = if project_id.is_some() {
|
||||
"AND d.project_id = ?1"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let sql = format!(
|
||||
"SELECT d.gitlab_discussion_id, n.author_username, n.body, n.new_path, n.created_at \
|
||||
FROM notes n \
|
||||
JOIN discussions d ON d.id = n.discussion_id \
|
||||
WHERE n.new_path IN ({in_clause}) {project_filter} \
|
||||
AND n.is_system = 0 \
|
||||
ORDER BY n.created_at DESC \
|
||||
LIMIT 50"
|
||||
);
|
||||
|
||||
let mut stmt = conn.prepare(&sql)?;
|
||||
|
||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||
params.push(Box::new(project_id.unwrap_or(0)));
|
||||
for p in paths {
|
||||
params.push(Box::new(p.clone()));
|
||||
}
|
||||
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
|
||||
|
||||
let discussions: Vec<FileDiscussion> = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
let body: String = row.get(2)?;
|
||||
let snippet = if body.len() > 200 {
|
||||
format!("{}...", &body[..body.floor_char_boundary(200)])
|
||||
} else {
|
||||
body
|
||||
};
|
||||
Ok(FileDiscussion {
|
||||
discussion_id: row.get(0)?,
|
||||
author_username: row.get(1)?,
|
||||
body_snippet: snippet,
|
||||
path: row.get(3)?,
|
||||
created_at_ms: row.get(4)?,
|
||||
})
|
||||
})?
|
||||
.filter_map(std::result::Result::ok)
|
||||
.collect();
|
||||
|
||||
Ok(discussions)
|
||||
}
|
||||
|
||||
/// Fetch distinct file paths from mr_file_changes for autocomplete.
|
||||
pub fn fetch_file_history_paths(conn: &Connection, project_id: Option<i64>) -> Result<Vec<String>> {
|
||||
let sql = if project_id.is_some() {
|
||||
"SELECT DISTINCT new_path FROM mr_file_changes WHERE project_id = ?1 ORDER BY new_path LIMIT 5000"
|
||||
} else {
|
||||
"SELECT DISTINCT new_path FROM mr_file_changes ORDER BY new_path LIMIT 5000"
|
||||
};
|
||||
|
||||
let mut stmt = conn.prepare(sql)?;
|
||||
let paths: Vec<String> = if let Some(pid) = project_id {
|
||||
stmt.query_map([pid], |row| row.get(0))?
|
||||
.filter_map(std::result::Result::ok)
|
||||
.collect()
|
||||
} else {
|
||||
stmt.query_map([], |row| row.get(0))?
|
||||
.filter_map(std::result::Result::ok)
|
||||
.collect()
|
||||
};
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Minimal schema for file history queries.
|
||||
fn create_file_history_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_id INTEGER,
|
||||
author_username TEXT,
|
||||
draft INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
merged_at INTEGER,
|
||||
merge_commit_sha TEXT,
|
||||
web_url TEXT,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE mr_file_changes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
new_path TEXT NOT NULL,
|
||||
old_path TEXT,
|
||||
change_type TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
note_type TEXT,
|
||||
new_path TEXT,
|
||||
old_path TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create file history schema");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_file_history_empty_db() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_file_history_schema(&conn);
|
||||
|
||||
let result = fetch_file_history(&conn, None, "src/lib.rs", false, false, false).unwrap();
|
||||
assert!(result.merge_requests.is_empty());
|
||||
assert_eq!(result.total_mrs, 0);
|
||||
assert_eq!(result.path, "src/lib.rs");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_file_history_returns_mrs() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_file_history_schema(&conn);
|
||||
|
||||
// Insert project, MR, and file change.
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'grp/repo')",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
|
||||
VALUES (1, 1000, 1, 42, 'Fix auth', 'merged', 'alice', 1700000000000, 1700000000000)",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) \
|
||||
VALUES (1, 1, 'src/auth.rs', 'modified')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result =
|
||||
fetch_file_history(&conn, Some(1), "src/auth.rs", false, false, false).unwrap();
|
||||
assert_eq!(result.merge_requests.len(), 1);
|
||||
assert_eq!(result.merge_requests[0].iid, 42);
|
||||
assert_eq!(result.merge_requests[0].title, "Fix auth");
|
||||
assert_eq!(result.merge_requests[0].change_type, "modified");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_file_history_merged_only() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_file_history_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'grp/repo')",
|
||||
[],
|
||||
).unwrap();
|
||||
// Merged MR.
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
|
||||
VALUES (1, 1000, 1, 42, 'Merged MR', 'merged', 'alice', 1700000000000, 1700000000000)",
|
||||
[],
|
||||
).unwrap();
|
||||
// Open MR.
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
|
||||
VALUES (2, 1001, 1, 43, 'Open MR', 'opened', 'bob', 1700000000000, 1700000000000)",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (1, 1, 'src/lib.rs', 'modified')",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (2, 1, 'src/lib.rs', 'modified')",
|
||||
[],
|
||||
).unwrap();
|
||||
|
||||
// Without merged_only: both MRs.
|
||||
let all = fetch_file_history(&conn, Some(1), "src/lib.rs", false, false, false).unwrap();
|
||||
assert_eq!(all.merge_requests.len(), 2);
|
||||
|
||||
// With merged_only: only the merged one.
|
||||
let merged = fetch_file_history(&conn, Some(1), "src/lib.rs", false, true, false).unwrap();
|
||||
assert_eq!(merged.merge_requests.len(), 1);
|
||||
assert_eq!(merged.merge_requests[0].state, "merged");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_file_history_paths_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_file_history_schema(&conn);
|
||||
|
||||
let paths = fetch_file_history_paths(&conn, None).unwrap();
|
||||
assert!(paths.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_file_history_paths_returns_distinct() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_file_history_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (1, 1, 'src/a.rs', 'modified')",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (2, 1, 'src/a.rs', 'modified')",
|
||||
[],
|
||||
).unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (3, 1, 'src/b.rs', 'added')",
|
||||
[],
|
||||
).unwrap();
|
||||
|
||||
let paths = fetch_file_history_paths(&conn, None).unwrap();
|
||||
assert_eq!(paths, vec!["src/a.rs", "src/b.rs"]);
|
||||
}
|
||||
}
|
||||
611
crates/lore-tui/src/action/issue_detail.rs
Normal file
611
crates/lore-tui/src/action/issue_detail.rs
Normal file
@@ -0,0 +1,611 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::message::EntityKey;
|
||||
use crate::state::issue_detail::{IssueDetailData, IssueMetadata};
|
||||
use crate::view::common::cross_ref::{CrossRef, CrossRefKind};
|
||||
use crate::view::common::discussion_tree::{DiscussionNode, NoteNode};
|
||||
|
||||
/// Fetch issue metadata and cross-references (Phase 1 load).
|
||||
///
|
||||
/// Runs inside a single read transaction for snapshot consistency.
|
||||
/// Returns metadata + cross-refs; discussions are loaded separately.
|
||||
pub fn fetch_issue_detail(conn: &Connection, key: &EntityKey) -> Result<IssueDetailData> {
|
||||
let metadata = fetch_issue_metadata(conn, key)?;
|
||||
let cross_refs = fetch_issue_cross_refs(conn, key)?;
|
||||
Ok(IssueDetailData {
|
||||
metadata,
|
||||
cross_refs,
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch issue metadata from the local DB.
|
||||
fn fetch_issue_metadata(conn: &Connection, key: &EntityKey) -> Result<IssueMetadata> {
|
||||
let row = conn
|
||||
.query_row(
|
||||
"SELECT i.iid, p.path_with_namespace, i.title,
|
||||
COALESCE(i.description, ''), i.state, i.author_username,
|
||||
COALESCE(i.milestone_title, ''),
|
||||
i.due_date, i.created_at, i.updated_at,
|
||||
COALESCE(i.web_url, ''),
|
||||
(SELECT COUNT(*) FROM discussions d
|
||||
WHERE d.issue_id = i.id AND d.noteable_type = 'Issue')
|
||||
FROM issues i
|
||||
JOIN projects p ON p.id = i.project_id
|
||||
WHERE i.project_id = ?1 AND i.iid = ?2",
|
||||
rusqlite::params![key.project_id, key.iid],
|
||||
|row| {
|
||||
Ok(IssueMetadata {
|
||||
iid: row.get(0)?,
|
||||
project_path: row.get(1)?,
|
||||
title: row.get(2)?,
|
||||
description: row.get(3)?,
|
||||
state: row.get(4)?,
|
||||
author: row.get::<_, Option<String>>(5)?.unwrap_or_default(),
|
||||
assignees: Vec::new(), // Fetched separately below.
|
||||
labels: Vec::new(), // Fetched separately below.
|
||||
milestone: {
|
||||
let m: String = row.get(6)?;
|
||||
if m.is_empty() { None } else { Some(m) }
|
||||
},
|
||||
due_date: row.get(7)?,
|
||||
created_at: row.get(8)?,
|
||||
updated_at: row.get(9)?,
|
||||
web_url: row.get(10)?,
|
||||
discussion_count: row.get::<_, i64>(11)? as usize,
|
||||
})
|
||||
},
|
||||
)
|
||||
.context("fetching issue metadata")?;
|
||||
|
||||
// Fetch assignees.
|
||||
let mut assignees_stmt = conn
|
||||
.prepare("SELECT username FROM issue_assignees WHERE issue_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2)")
|
||||
.context("preparing assignees query")?;
|
||||
let assignees: Vec<String> = assignees_stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |r| r.get(0))
|
||||
.context("fetching assignees")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading assignee row")?;
|
||||
|
||||
// Fetch labels.
|
||||
let mut labels_stmt = conn
|
||||
.prepare(
|
||||
"SELECT l.name FROM issue_labels il
|
||||
JOIN labels l ON l.id = il.label_id
|
||||
WHERE il.issue_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2)
|
||||
ORDER BY l.name",
|
||||
)
|
||||
.context("preparing labels query")?;
|
||||
let labels: Vec<String> = labels_stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |r| r.get(0))
|
||||
.context("fetching labels")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading label row")?;
|
||||
|
||||
Ok(IssueMetadata {
|
||||
assignees,
|
||||
labels,
|
||||
..row
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch cross-references for an issue from the entity_references table.
|
||||
fn fetch_issue_cross_refs(conn: &Connection, key: &EntityKey) -> Result<Vec<CrossRef>> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT er.reference_type, er.target_entity_type, er.target_entity_id,
|
||||
er.target_entity_iid, er.target_project_path,
|
||||
CASE
|
||||
WHEN er.target_entity_type = 'issue'
|
||||
THEN (SELECT title FROM issues WHERE id = er.target_entity_id)
|
||||
WHEN er.target_entity_type = 'merge_request'
|
||||
THEN (SELECT title FROM merge_requests WHERE id = er.target_entity_id)
|
||||
ELSE NULL
|
||||
END as entity_title,
|
||||
CASE
|
||||
WHEN er.target_entity_id IS NOT NULL
|
||||
THEN (SELECT project_id FROM issues WHERE id = er.target_entity_id
|
||||
UNION ALL
|
||||
SELECT project_id FROM merge_requests WHERE id = er.target_entity_id
|
||||
LIMIT 1)
|
||||
ELSE NULL
|
||||
END as target_project_id
|
||||
FROM entity_references er
|
||||
WHERE er.source_entity_type = 'issue'
|
||||
AND er.source_entity_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2)
|
||||
ORDER BY er.reference_type, er.target_entity_iid",
|
||||
)
|
||||
.context("preparing cross-ref query")?;
|
||||
|
||||
let refs = stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| {
|
||||
let ref_type: String = row.get(0)?;
|
||||
let target_type: String = row.get(1)?;
|
||||
let target_id: Option<i64> = row.get(2)?;
|
||||
let target_iid: Option<i64> = row.get(3)?;
|
||||
let target_path: Option<String> = row.get(4)?;
|
||||
let title: Option<String> = row.get(5)?;
|
||||
let target_project_id: Option<i64> = row.get(6)?;
|
||||
|
||||
let kind = match (ref_type.as_str(), target_type.as_str()) {
|
||||
("closes", "merge_request") => CrossRefKind::ClosingMr,
|
||||
("related", "issue") => CrossRefKind::RelatedIssue,
|
||||
_ => CrossRefKind::MentionedIn,
|
||||
};
|
||||
|
||||
let iid = target_iid.unwrap_or(0);
|
||||
let project_id = target_project_id.unwrap_or(key.project_id);
|
||||
|
||||
let entity_key = match target_type.as_str() {
|
||||
"merge_request" => EntityKey::mr(project_id, iid),
|
||||
_ => EntityKey::issue(project_id, iid),
|
||||
};
|
||||
|
||||
let label = title.unwrap_or_else(|| {
|
||||
let prefix = if target_type == "merge_request" {
|
||||
"!"
|
||||
} else {
|
||||
"#"
|
||||
};
|
||||
let path = target_path.unwrap_or_default();
|
||||
if path.is_empty() {
|
||||
format!("{prefix}{iid}")
|
||||
} else {
|
||||
format!("{path}{prefix}{iid}")
|
||||
}
|
||||
});
|
||||
|
||||
let navigable = target_id.is_some();
|
||||
|
||||
Ok(CrossRef {
|
||||
kind,
|
||||
entity_key,
|
||||
label,
|
||||
navigable,
|
||||
})
|
||||
})
|
||||
.context("fetching cross-refs")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading cross-ref row")?;
|
||||
|
||||
Ok(refs)
|
||||
}
|
||||
|
||||
/// Fetch discussions for an issue (Phase 2 async load).
|
||||
///
|
||||
/// Returns `DiscussionNode` tree suitable for the discussion tree widget.
|
||||
pub fn fetch_issue_discussions(conn: &Connection, key: &EntityKey) -> Result<Vec<DiscussionNode>> {
|
||||
let issue_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2",
|
||||
rusqlite::params![key.project_id, key.iid],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.context("looking up issue id")?;
|
||||
|
||||
let mut disc_stmt = conn
|
||||
.prepare(
|
||||
"SELECT d.id, d.gitlab_discussion_id, d.resolvable, d.resolved
|
||||
FROM discussions d
|
||||
WHERE d.issue_id = ?1 AND d.noteable_type = 'Issue'
|
||||
ORDER BY d.first_note_at ASC, d.id ASC",
|
||||
)
|
||||
.context("preparing discussions query")?;
|
||||
|
||||
let mut note_stmt = conn
|
||||
.prepare(
|
||||
"SELECT n.author_username, n.body, n.created_at, n.is_system,
|
||||
n.note_type, n.position_new_path, n.position_new_line
|
||||
FROM notes n
|
||||
WHERE n.discussion_id = ?1
|
||||
ORDER BY n.position ASC, n.created_at ASC",
|
||||
)
|
||||
.context("preparing notes query")?;
|
||||
|
||||
let disc_rows: Vec<_> = disc_stmt
|
||||
.query_map(rusqlite::params![issue_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?, // id
|
||||
row.get::<_, String>(1)?, // gitlab_discussion_id
|
||||
row.get::<_, bool>(2)?, // resolvable
|
||||
row.get::<_, bool>(3)?, // resolved
|
||||
))
|
||||
})
|
||||
.context("fetching discussions")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading discussion row")?;
|
||||
|
||||
let mut discussions = Vec::new();
|
||||
for (disc_db_id, discussion_id, resolvable, resolved) in disc_rows {
|
||||
let notes: Vec<NoteNode> = note_stmt
|
||||
.query_map(rusqlite::params![disc_db_id], |row| {
|
||||
Ok(NoteNode {
|
||||
author: row.get::<_, Option<String>>(0)?.unwrap_or_default(),
|
||||
body: row.get::<_, Option<String>>(1)?.unwrap_or_default(),
|
||||
created_at: row.get(2)?,
|
||||
is_system: row.get(3)?,
|
||||
is_diff_note: row.get::<_, Option<String>>(4)?.as_deref() == Some("DiffNote"),
|
||||
diff_file_path: row.get(5)?,
|
||||
diff_new_line: row.get(6)?,
|
||||
})
|
||||
})
|
||||
.context("fetching notes")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading note row")?;
|
||||
|
||||
discussions.push(DiscussionNode {
|
||||
discussion_id,
|
||||
notes,
|
||||
resolvable,
|
||||
resolved,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(discussions)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn create_issue_detail_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
state TEXT NOT NULL DEFAULT 'opened',
|
||||
author_username TEXT,
|
||||
milestone_title TEXT,
|
||||
due_date TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
web_url TEXT,
|
||||
UNIQUE(project_id, iid)
|
||||
);
|
||||
CREATE TABLE issue_assignees (
|
||||
issue_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
UNIQUE(issue_id, username)
|
||||
);
|
||||
CREATE TABLE labels (
|
||||
id INTEGER PRIMARY KEY,
|
||||
project_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issue_labels (
|
||||
issue_id INTEGER NOT NULL,
|
||||
label_id INTEGER NOT NULL,
|
||||
UNIQUE(issue_id, label_id)
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
noteable_type TEXT NOT NULL,
|
||||
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||
resolved INTEGER NOT NULL DEFAULT 0,
|
||||
first_note_at INTEGER
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
note_type TEXT,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
position INTEGER,
|
||||
position_new_path TEXT,
|
||||
position_new_line INTEGER
|
||||
);
|
||||
CREATE TABLE entity_references (
|
||||
id INTEGER PRIMARY KEY,
|
||||
project_id INTEGER NOT NULL,
|
||||
source_entity_type TEXT NOT NULL,
|
||||
source_entity_id INTEGER NOT NULL,
|
||||
target_entity_type TEXT NOT NULL,
|
||||
target_entity_id INTEGER,
|
||||
target_project_path TEXT,
|
||||
target_entity_iid INTEGER,
|
||||
reference_type TEXT NOT NULL,
|
||||
source_method TEXT NOT NULL DEFAULT 'api',
|
||||
created_at INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
state TEXT NOT NULL DEFAULT 'opened',
|
||||
UNIQUE(project_id, iid)
|
||||
);
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn setup_issue_detail_data(conn: &Connection) {
|
||||
// Project.
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Issue.
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, description, state, author_username, milestone_title, due_date, created_at, updated_at, web_url)
|
||||
VALUES (1, 1000, 1, 42, 'Fix authentication flow', 'Detailed description here', 'opened', 'alice', 'v1.0', '2026-03-01', 1700000000000, 1700000060000, 'https://gitlab.com/group/project/-/issues/42')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Assignees.
|
||||
conn.execute(
|
||||
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'bob')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'charlie')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Labels.
|
||||
conn.execute(
|
||||
"INSERT INTO labels (id, project_id, name) VALUES (1, 1, 'backend')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO labels (id, project_id, name) VALUES (2, 1, 'urgent')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 1)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 2)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Discussions + notes.
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, resolvable, resolved, first_note_at)
|
||||
VALUES (1, 'disc-aaa', 1, 1, 'Issue', 0, 0, 1700000010000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type)
|
||||
VALUES (1, 10001, 1, 1, 'alice', 'This looks good overall', 1700000010000, 1700000010000, 0, 0, 'DiscussionNote')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type)
|
||||
VALUES (2, 10002, 1, 1, 'bob', 'Agreed, but see my comment below', 1700000020000, 1700000020000, 1, 0, 'DiscussionNote')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// System note discussion.
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, first_note_at)
|
||||
VALUES (2, 'disc-bbb', 1, 1, 'Issue', 1700000030000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type)
|
||||
VALUES (3, 10003, 2, 1, 'system', 'changed the description', 1700000030000, 1700000030000, 0, 1, NULL)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Closing MR cross-ref.
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state)
|
||||
VALUES (1, 2000, 1, 10, 'Fix auth MR', 'opened')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_entity_iid, reference_type)
|
||||
VALUES (1, 'issue', 1, 'merge_request', 1, 10, 'closes')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_basic() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.iid, 42);
|
||||
assert_eq!(data.metadata.title, "Fix authentication flow");
|
||||
assert_eq!(data.metadata.state, "opened");
|
||||
assert_eq!(data.metadata.author, "alice");
|
||||
assert_eq!(data.metadata.project_path, "group/project");
|
||||
assert_eq!(data.metadata.milestone, Some("v1.0".to_string()));
|
||||
assert_eq!(data.metadata.due_date, Some("2026-03-01".to_string()));
|
||||
assert_eq!(
|
||||
data.metadata.web_url,
|
||||
"https://gitlab.com/group/project/-/issues/42"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_assignees() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.assignees.len(), 2);
|
||||
assert!(data.metadata.assignees.contains(&"bob".to_string()));
|
||||
assert!(data.metadata.assignees.contains(&"charlie".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_labels() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.labels, vec!["backend", "urgent"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_cross_refs() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.cross_refs.len(), 1);
|
||||
assert_eq!(data.cross_refs[0].kind, CrossRefKind::ClosingMr);
|
||||
assert_eq!(data.cross_refs[0].entity_key, EntityKey::mr(1, 10));
|
||||
assert_eq!(data.cross_refs[0].label, "Fix auth MR");
|
||||
assert!(data.cross_refs[0].navigable);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_discussion_count() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.discussion_count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_discussions_basic() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let discussions = fetch_issue_discussions(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(discussions.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_discussions_notes() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let discussions = fetch_issue_discussions(&conn, &key).unwrap();
|
||||
|
||||
// First discussion has 2 notes.
|
||||
assert_eq!(discussions[0].notes.len(), 2);
|
||||
assert_eq!(discussions[0].notes[0].author, "alice");
|
||||
assert_eq!(discussions[0].notes[0].body, "This looks good overall");
|
||||
assert_eq!(discussions[0].notes[1].author, "bob");
|
||||
assert!(!discussions[0].notes[0].is_system);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_discussions_system_note() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let discussions = fetch_issue_discussions(&conn, &key).unwrap();
|
||||
|
||||
// Second discussion is a system note.
|
||||
assert_eq!(discussions[1].notes.len(), 1);
|
||||
assert!(discussions[1].notes[0].is_system);
|
||||
assert_eq!(discussions[1].notes[0].body, "changed the description");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_discussions_ordering() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 42);
|
||||
let discussions = fetch_issue_discussions(&conn, &key).unwrap();
|
||||
|
||||
// Ordered by first_note_at.
|
||||
assert_eq!(discussions[0].discussion_id, "disc-aaa");
|
||||
assert_eq!(discussions[1].discussion_id, "disc-bbb");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_not_found() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
setup_issue_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::issue(1, 999);
|
||||
let result = fetch_issue_detail(&conn, &key);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_detail_no_description() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_detail_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, description, state, created_at, updated_at)
|
||||
VALUES (1, 1000, 1, 1, 'No desc', NULL, 'opened', 0, 0)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let key = EntityKey::issue(1, 1);
|
||||
let data = fetch_issue_detail(&conn, &key).unwrap();
|
||||
assert_eq!(data.metadata.description, "");
|
||||
}
|
||||
}
|
||||
532
crates/lore-tui/src/action/issue_list.rs
Normal file
532
crates/lore-tui/src/action/issue_list.rs
Normal file
@@ -0,0 +1,532 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::state::issue_list::{
|
||||
IssueCursor, IssueFilter, IssueListPage, IssueListRow, SortField, SortOrder,
|
||||
};
|
||||
|
||||
/// Page size for issue list queries.
|
||||
const ISSUE_PAGE_SIZE: usize = 50;
|
||||
|
||||
/// Fetch a page of issues matching the given filter and sort.
|
||||
///
|
||||
/// Uses keyset pagination: when `cursor` is `Some`, returns rows after
|
||||
/// (less-than for DESC, greater-than for ASC) the cursor boundary.
|
||||
/// When `snapshot_fence` is `Some`, limits results to rows updated_at <= fence
|
||||
/// to prevent newly synced items from shifting the page window.
|
||||
pub fn fetch_issue_list(
|
||||
conn: &Connection,
|
||||
filter: &IssueFilter,
|
||||
sort_field: SortField,
|
||||
sort_order: SortOrder,
|
||||
cursor: Option<&IssueCursor>,
|
||||
snapshot_fence: Option<i64>,
|
||||
) -> Result<IssueListPage> {
|
||||
// -- Build dynamic WHERE conditions and params --------------------------
|
||||
let mut conditions: Vec<String> = Vec::new();
|
||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||
|
||||
// Filter: project_id
|
||||
if let Some(pid) = filter.project_id {
|
||||
conditions.push("i.project_id = ?".into());
|
||||
params.push(Box::new(pid));
|
||||
}
|
||||
|
||||
// Filter: state
|
||||
if let Some(ref state) = filter.state {
|
||||
conditions.push("i.state = ?".into());
|
||||
params.push(Box::new(state.clone()));
|
||||
}
|
||||
|
||||
// Filter: author
|
||||
if let Some(ref author) = filter.author {
|
||||
conditions.push("i.author_username = ?".into());
|
||||
params.push(Box::new(author.clone()));
|
||||
}
|
||||
|
||||
// Filter: label (via join)
|
||||
let label_join = if let Some(ref label) = filter.label {
|
||||
conditions.push("fl.name = ?".into());
|
||||
params.push(Box::new(label.clone()));
|
||||
"JOIN issue_labels fil ON fil.issue_id = i.id \
|
||||
JOIN labels fl ON fl.id = fil.label_id"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
// Filter: free_text (LIKE on title)
|
||||
if let Some(ref text) = filter.free_text {
|
||||
conditions.push("i.title LIKE ?".into());
|
||||
params.push(Box::new(format!("%{text}%")));
|
||||
}
|
||||
|
||||
// Snapshot fence
|
||||
if let Some(fence) = snapshot_fence {
|
||||
conditions.push("i.updated_at <= ?".into());
|
||||
params.push(Box::new(fence));
|
||||
}
|
||||
|
||||
// -- Count query (before cursor filter) ---------------------------------
|
||||
let where_clause = if conditions.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("WHERE {}", conditions.join(" AND "))
|
||||
};
|
||||
|
||||
let count_sql = format!(
|
||||
"SELECT COUNT(DISTINCT i.id) FROM issues i \
|
||||
JOIN projects p ON p.id = i.project_id \
|
||||
{label_join} {where_clause}"
|
||||
);
|
||||
let count_params: Vec<&dyn rusqlite::types::ToSql> =
|
||||
params.iter().map(|b| b.as_ref()).collect();
|
||||
|
||||
let total_count: i64 = conn
|
||||
.query_row(&count_sql, count_params.as_slice(), |r| r.get(0))
|
||||
.context("counting issues for list")?;
|
||||
|
||||
// -- Keyset cursor condition -------------------------------------------
|
||||
let (sort_col, sort_dir) = sort_column_and_dir(sort_field, sort_order);
|
||||
let cursor_op = if sort_dir == "DESC" { "<" } else { ">" };
|
||||
|
||||
if let Some(c) = cursor {
|
||||
conditions.push(format!("({sort_col}, i.iid) {cursor_op} (?, ?)"));
|
||||
params.push(Box::new(c.updated_at));
|
||||
params.push(Box::new(c.iid));
|
||||
}
|
||||
|
||||
// -- Data query ---------------------------------------------------------
|
||||
let where_clause_full = if conditions.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("WHERE {}", conditions.join(" AND "))
|
||||
};
|
||||
|
||||
let data_sql = format!(
|
||||
"SELECT p.path_with_namespace, i.iid, i.title, i.state, \
|
||||
i.author_username, i.updated_at, \
|
||||
GROUP_CONCAT(DISTINCT l.name) AS label_names \
|
||||
FROM issues i \
|
||||
JOIN projects p ON p.id = i.project_id \
|
||||
{label_join} \
|
||||
LEFT JOIN issue_labels il ON il.issue_id = i.id \
|
||||
LEFT JOIN labels l ON l.id = il.label_id \
|
||||
{where_clause_full} \
|
||||
GROUP BY i.id \
|
||||
ORDER BY {sort_col} {sort_dir}, i.iid {sort_dir} \
|
||||
LIMIT ?"
|
||||
);
|
||||
|
||||
// +1 to detect if there's a next page
|
||||
let fetch_limit = (ISSUE_PAGE_SIZE + 1) as i64;
|
||||
params.push(Box::new(fetch_limit));
|
||||
|
||||
let all_params: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|b| b.as_ref()).collect();
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare(&data_sql)
|
||||
.context("preparing issue list query")?;
|
||||
|
||||
let rows_result = stmt
|
||||
.query_map(all_params.as_slice(), |row| {
|
||||
let project_path: String = row.get(0)?;
|
||||
let iid: i64 = row.get(1)?;
|
||||
let title: String = row.get::<_, Option<String>>(2)?.unwrap_or_default();
|
||||
let state: String = row.get::<_, Option<String>>(3)?.unwrap_or_default();
|
||||
let author: String = row.get::<_, Option<String>>(4)?.unwrap_or_default();
|
||||
let updated_at: i64 = row.get(5)?;
|
||||
let label_names: Option<String> = row.get(6)?;
|
||||
|
||||
let labels = label_names
|
||||
.map(|s| s.split(',').map(String::from).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(IssueListRow {
|
||||
project_path,
|
||||
iid,
|
||||
title,
|
||||
state,
|
||||
author,
|
||||
labels,
|
||||
updated_at,
|
||||
})
|
||||
})
|
||||
.context("querying issue list")?;
|
||||
|
||||
let mut rows: Vec<IssueListRow> = Vec::new();
|
||||
for row in rows_result {
|
||||
rows.push(row.context("reading issue list row")?);
|
||||
}
|
||||
|
||||
// Determine next cursor from the last row (if we got more than page size)
|
||||
let has_next = rows.len() > ISSUE_PAGE_SIZE;
|
||||
if has_next {
|
||||
rows.truncate(ISSUE_PAGE_SIZE);
|
||||
}
|
||||
|
||||
let next_cursor = if has_next {
|
||||
rows.last().map(|r| IssueCursor {
|
||||
updated_at: r.updated_at,
|
||||
iid: r.iid,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
#[allow(clippy::cast_sign_loss)]
|
||||
Ok(IssueListPage {
|
||||
rows,
|
||||
next_cursor,
|
||||
total_count: total_count as u64,
|
||||
})
|
||||
}
|
||||
|
||||
/// Map sort field + order to SQL column name and direction keyword.
|
||||
fn sort_column_and_dir(field: SortField, order: SortOrder) -> (&'static str, &'static str) {
|
||||
let col = match field {
|
||||
SortField::UpdatedAt => "i.updated_at",
|
||||
SortField::Iid => "i.iid",
|
||||
SortField::Title => "i.title",
|
||||
SortField::State => "i.state",
|
||||
SortField::Author => "i.author_username",
|
||||
};
|
||||
let dir = match order {
|
||||
SortOrder::Desc => "DESC",
|
||||
SortOrder::Asc => "ASC",
|
||||
};
|
||||
(col, dir)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Create the minimal schema needed for issue list queries.
|
||||
fn create_issue_list_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE labels (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER,
|
||||
project_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
color TEXT,
|
||||
description TEXT
|
||||
);
|
||||
CREATE TABLE issue_labels (
|
||||
issue_id INTEGER NOT NULL,
|
||||
label_id INTEGER NOT NULL,
|
||||
PRIMARY KEY(issue_id, label_id)
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create issue list schema");
|
||||
}
|
||||
|
||||
/// Insert a test issue with an author.
|
||||
fn insert_issue_full(conn: &Connection, iid: i64, state: &str, author: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6, ?6, ?6)",
|
||||
rusqlite::params![
|
||||
iid * 100,
|
||||
iid,
|
||||
format!("Issue {iid}"),
|
||||
state,
|
||||
author,
|
||||
updated_at
|
||||
],
|
||||
)
|
||||
.expect("insert issue full");
|
||||
}
|
||||
|
||||
/// Attach a label to an issue.
|
||||
fn attach_label(conn: &Connection, issue_iid: i64, label_name: &str) {
|
||||
// Find issue id.
|
||||
let issue_id: i64 = conn
|
||||
.query_row("SELECT id FROM issues WHERE iid = ?", [issue_iid], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.expect("find issue");
|
||||
|
||||
// Ensure label exists.
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO labels (project_id, name) VALUES (1, ?)",
|
||||
[label_name],
|
||||
)
|
||||
.expect("insert label");
|
||||
let label_id: i64 = conn
|
||||
.query_row("SELECT id FROM labels WHERE name = ?", [label_name], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.expect("find label");
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO issue_labels (issue_id, label_id) VALUES (?, ?)",
|
||||
[issue_id, label_id],
|
||||
)
|
||||
.expect("attach label");
|
||||
}
|
||||
|
||||
fn setup_issue_list_data(conn: &Connection) {
|
||||
let base = 1_700_000_000_000_i64;
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (1, 'group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
insert_issue_full(conn, 1, "opened", "alice", base - 10_000);
|
||||
insert_issue_full(conn, 2, "opened", "bob", base - 20_000);
|
||||
insert_issue_full(conn, 3, "closed", "alice", base - 30_000);
|
||||
insert_issue_full(conn, 4, "opened", "charlie", base - 40_000);
|
||||
insert_issue_full(conn, 5, "closed", "bob", base - 50_000);
|
||||
|
||||
attach_label(conn, 1, "bug");
|
||||
attach_label(conn, 1, "critical");
|
||||
attach_label(conn, 2, "feature");
|
||||
attach_label(conn, 4, "bug");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_basic() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter::default();
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 5);
|
||||
assert_eq!(page.rows.len(), 5);
|
||||
// Newest first.
|
||||
assert_eq!(page.rows[0].iid, 1);
|
||||
assert_eq!(page.rows[4].iid, 5);
|
||||
assert!(page.next_cursor.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_filter_state() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter {
|
||||
state: Some("opened".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 3);
|
||||
assert_eq!(page.rows.len(), 3);
|
||||
assert!(page.rows.iter().all(|r| r.state == "opened"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_filter_author() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter {
|
||||
author: Some("alice".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 2);
|
||||
assert_eq!(page.rows.len(), 2);
|
||||
assert!(page.rows.iter().all(|r| r.author == "alice"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_filter_label() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter {
|
||||
label: Some("bug".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 2); // issues 1 and 4
|
||||
assert_eq!(page.rows.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_labels_aggregated() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter::default();
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Issue 1 has labels "bug" and "critical".
|
||||
let issue1 = page.rows.iter().find(|r| r.iid == 1).unwrap();
|
||||
assert_eq!(issue1.labels.len(), 2);
|
||||
assert!(issue1.labels.contains(&"bug".to_string()));
|
||||
assert!(issue1.labels.contains(&"critical".to_string()));
|
||||
|
||||
// Issue 5 has no labels.
|
||||
let issue5 = page.rows.iter().find(|r| r.iid == 5).unwrap();
|
||||
assert!(issue5.labels.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_sort_ascending() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter::default();
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Asc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Oldest first.
|
||||
assert_eq!(page.rows[0].iid, 5);
|
||||
assert_eq!(page.rows[4].iid, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_snapshot_fence() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let base = 1_700_000_000_000_i64;
|
||||
// Fence at base-25000: should exclude issues 1 (at base-10000) and 2 (at base-20000).
|
||||
let fence = base - 25_000;
|
||||
let filter = IssueFilter::default();
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
Some(fence),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 3);
|
||||
assert_eq!(page.rows.len(), 3);
|
||||
assert!(page.rows.iter().all(|r| r.updated_at <= fence));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (1, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&IssueFilter::default(),
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 0);
|
||||
assert!(page.rows.is_empty());
|
||||
assert!(page.next_cursor.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_issue_list_free_text() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_issue_list_schema(&conn);
|
||||
setup_issue_list_data(&conn);
|
||||
|
||||
let filter = IssueFilter {
|
||||
free_text: Some("Issue 3".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_issue_list(
|
||||
&conn,
|
||||
&filter,
|
||||
SortField::UpdatedAt,
|
||||
SortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 1);
|
||||
assert_eq!(page.rows[0].iid, 3);
|
||||
}
|
||||
}
|
||||
29
crates/lore-tui/src/action/mod.rs
Normal file
29
crates/lore-tui/src/action/mod.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
//! Action layer — pure data-fetching functions for TUI screens.
|
||||
//!
|
||||
//! Actions query the local SQLite database and return data structs.
|
||||
//! They never touch terminal state, never spawn tasks, and use injected
|
||||
//! [`Clock`] for time calculations (deterministic tests).
|
||||
|
||||
mod bootstrap;
|
||||
mod dashboard;
|
||||
mod file_history;
|
||||
mod issue_detail;
|
||||
mod issue_list;
|
||||
mod mr_detail;
|
||||
mod mr_list;
|
||||
mod search;
|
||||
mod timeline;
|
||||
mod trace;
|
||||
mod who;
|
||||
|
||||
pub use bootstrap::*;
|
||||
pub use dashboard::*;
|
||||
pub use file_history::*;
|
||||
pub use issue_detail::*;
|
||||
pub use issue_list::*;
|
||||
pub use mr_detail::*;
|
||||
pub use mr_list::*;
|
||||
pub use search::*;
|
||||
pub use timeline::*;
|
||||
pub use trace::*;
|
||||
pub use who::*;
|
||||
694
crates/lore-tui/src/action/mr_detail.rs
Normal file
694
crates/lore-tui/src/action/mr_detail.rs
Normal file
@@ -0,0 +1,694 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::message::EntityKey;
|
||||
use crate::state::mr_detail::{FileChange, FileChangeType, MrDetailData, MrMetadata};
|
||||
use crate::view::common::cross_ref::{CrossRef, CrossRefKind};
|
||||
use crate::view::common::discussion_tree::{DiscussionNode, NoteNode};
|
||||
|
||||
/// Fetch MR metadata + cross-refs + file changes (Phase 1 composite).
|
||||
pub fn fetch_mr_detail(conn: &Connection, key: &EntityKey) -> Result<MrDetailData> {
|
||||
let metadata = fetch_mr_metadata(conn, key)?;
|
||||
let cross_refs = fetch_mr_cross_refs(conn, key)?;
|
||||
let file_changes = fetch_mr_file_changes(conn, key)?;
|
||||
Ok(MrDetailData {
|
||||
metadata,
|
||||
cross_refs,
|
||||
file_changes,
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch MR metadata from the local DB.
|
||||
fn fetch_mr_metadata(conn: &Connection, key: &EntityKey) -> Result<MrMetadata> {
|
||||
let row = conn
|
||||
.query_row(
|
||||
"SELECT m.iid, p.path_with_namespace, m.title,
|
||||
COALESCE(m.description, ''), m.state, m.draft,
|
||||
m.author_username, m.source_branch, m.target_branch,
|
||||
COALESCE(m.detailed_merge_status, ''),
|
||||
m.created_at, m.updated_at, m.merged_at,
|
||||
COALESCE(m.web_url, ''),
|
||||
(SELECT COUNT(*) FROM discussions d WHERE d.merge_request_id = m.id) AS disc_count,
|
||||
(SELECT COUNT(*) FROM mr_file_changes fc WHERE fc.merge_request_id = m.id) AS fc_count
|
||||
FROM merge_requests m
|
||||
JOIN projects p ON p.id = m.project_id
|
||||
WHERE m.project_id = ?1 AND m.iid = ?2",
|
||||
rusqlite::params![key.project_id, key.iid],
|
||||
|row| {
|
||||
Ok(MrMetadata {
|
||||
iid: row.get(0)?,
|
||||
project_path: row.get(1)?,
|
||||
title: row.get::<_, Option<String>>(2)?.unwrap_or_default(),
|
||||
description: row.get(3)?,
|
||||
state: row.get::<_, Option<String>>(4)?.unwrap_or_default(),
|
||||
draft: row.get(5)?,
|
||||
author: row.get::<_, Option<String>>(6)?.unwrap_or_default(),
|
||||
assignees: Vec::new(),
|
||||
reviewers: Vec::new(),
|
||||
labels: Vec::new(),
|
||||
source_branch: row.get::<_, Option<String>>(7)?.unwrap_or_default(),
|
||||
target_branch: row.get::<_, Option<String>>(8)?.unwrap_or_default(),
|
||||
merge_status: row.get(9)?,
|
||||
created_at: row.get(10)?,
|
||||
updated_at: row.get(11)?,
|
||||
merged_at: row.get(12)?,
|
||||
web_url: row.get(13)?,
|
||||
discussion_count: row.get::<_, i64>(14)? as usize,
|
||||
file_change_count: row.get::<_, i64>(15)? as usize,
|
||||
})
|
||||
},
|
||||
)
|
||||
.context("fetching MR metadata")?;
|
||||
|
||||
// Fetch assignees.
|
||||
let mut assignees_stmt = conn
|
||||
.prepare(
|
||||
"SELECT username FROM mr_assignees
|
||||
WHERE merge_request_id = (
|
||||
SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2
|
||||
)
|
||||
ORDER BY username",
|
||||
)
|
||||
.context("preparing assignees query")?;
|
||||
let assignees: Vec<String> = assignees_stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0))
|
||||
.context("fetching assignees")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading assignee row")?;
|
||||
|
||||
// Fetch reviewers.
|
||||
let mut reviewers_stmt = conn
|
||||
.prepare(
|
||||
"SELECT username FROM mr_reviewers
|
||||
WHERE merge_request_id = (
|
||||
SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2
|
||||
)
|
||||
ORDER BY username",
|
||||
)
|
||||
.context("preparing reviewers query")?;
|
||||
let reviewers: Vec<String> = reviewers_stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0))
|
||||
.context("fetching reviewers")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading reviewer row")?;
|
||||
|
||||
// Fetch labels.
|
||||
let mut labels_stmt = conn
|
||||
.prepare(
|
||||
"SELECT l.name FROM mr_labels ml
|
||||
JOIN labels l ON ml.label_id = l.id
|
||||
WHERE ml.merge_request_id = (
|
||||
SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2
|
||||
)
|
||||
ORDER BY l.name",
|
||||
)
|
||||
.context("preparing labels query")?;
|
||||
let labels: Vec<String> = labels_stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0))
|
||||
.context("fetching labels")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading label row")?;
|
||||
|
||||
let mut result = row;
|
||||
result.assignees = assignees;
|
||||
result.reviewers = reviewers;
|
||||
result.labels = labels;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Fetch cross-references for an MR.
|
||||
fn fetch_mr_cross_refs(conn: &Connection, key: &EntityKey) -> Result<Vec<CrossRef>> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT er.reference_type, er.target_entity_type,
|
||||
er.target_entity_id, er.target_entity_iid,
|
||||
er.target_project_path,
|
||||
CASE
|
||||
WHEN er.target_entity_type = 'issue'
|
||||
THEN (SELECT title FROM issues WHERE id = er.target_entity_id)
|
||||
WHEN er.target_entity_type = 'merge_request'
|
||||
THEN (SELECT title FROM merge_requests WHERE id = er.target_entity_id)
|
||||
ELSE NULL
|
||||
END as entity_title,
|
||||
CASE
|
||||
WHEN er.target_entity_id IS NOT NULL
|
||||
THEN (SELECT project_id FROM issues WHERE id = er.target_entity_id
|
||||
UNION ALL
|
||||
SELECT project_id FROM merge_requests WHERE id = er.target_entity_id
|
||||
LIMIT 1)
|
||||
ELSE NULL
|
||||
END as target_project_id
|
||||
FROM entity_references er
|
||||
WHERE er.source_entity_type = 'merge_request'
|
||||
AND er.source_entity_id = (SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2)
|
||||
ORDER BY er.reference_type, er.target_entity_iid",
|
||||
)
|
||||
.context("preparing MR cross-refs query")?;
|
||||
|
||||
let refs: Vec<CrossRef> = stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| {
|
||||
let ref_type: String = row.get(0)?;
|
||||
let target_type: String = row.get(1)?;
|
||||
let _target_id: Option<i64> = row.get(2)?;
|
||||
let target_iid: Option<i64> = row.get(3)?;
|
||||
let target_path: Option<String> = row.get(4)?;
|
||||
let title: Option<String> = row.get(5)?;
|
||||
let target_project_id: Option<i64> = row.get(6)?;
|
||||
|
||||
let kind = match (ref_type.as_str(), target_type.as_str()) {
|
||||
("closes", "issue") => CrossRefKind::ClosingMr,
|
||||
("related", "issue") => CrossRefKind::RelatedIssue,
|
||||
_ => CrossRefKind::MentionedIn,
|
||||
};
|
||||
|
||||
let iid = target_iid.unwrap_or(0);
|
||||
let project_id = target_project_id.unwrap_or(key.project_id);
|
||||
|
||||
let entity_key = match target_type.as_str() {
|
||||
"merge_request" => EntityKey::mr(project_id, iid),
|
||||
_ => EntityKey::issue(project_id, iid),
|
||||
};
|
||||
|
||||
let label = title.unwrap_or_else(|| {
|
||||
let prefix = if target_type == "merge_request" {
|
||||
"!"
|
||||
} else {
|
||||
"#"
|
||||
};
|
||||
let path = target_path.clone().unwrap_or_default();
|
||||
if path.is_empty() {
|
||||
format!("{prefix}{iid}")
|
||||
} else {
|
||||
format!("{path}{prefix}{iid}")
|
||||
}
|
||||
});
|
||||
|
||||
Ok(CrossRef {
|
||||
kind,
|
||||
entity_key,
|
||||
label,
|
||||
navigable: target_project_id.is_some(),
|
||||
})
|
||||
})
|
||||
.context("fetching MR cross-refs")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading cross-ref row")?;
|
||||
|
||||
Ok(refs)
|
||||
}
|
||||
|
||||
/// Fetch file changes for an MR.
|
||||
fn fetch_mr_file_changes(conn: &Connection, key: &EntityKey) -> Result<Vec<FileChange>> {
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT fc.old_path, fc.new_path, fc.change_type
|
||||
FROM mr_file_changes fc
|
||||
WHERE fc.merge_request_id = (
|
||||
SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2
|
||||
)
|
||||
ORDER BY fc.new_path",
|
||||
)
|
||||
.context("preparing file changes query")?;
|
||||
|
||||
let changes: Vec<FileChange> = stmt
|
||||
.query_map(rusqlite::params![key.project_id, key.iid], |row| {
|
||||
Ok(FileChange {
|
||||
old_path: row.get(0)?,
|
||||
new_path: row.get(1)?,
|
||||
change_type: FileChangeType::parse_db(&row.get::<_, String>(2).unwrap_or_default()),
|
||||
})
|
||||
})
|
||||
.context("fetching file changes")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading file change row")?;
|
||||
|
||||
Ok(changes)
|
||||
}
|
||||
|
||||
/// Fetch discussions for an MR (Phase 2 async load).
|
||||
pub fn fetch_mr_discussions(conn: &Connection, key: &EntityKey) -> Result<Vec<DiscussionNode>> {
|
||||
let mr_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2",
|
||||
rusqlite::params![key.project_id, key.iid],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.context("looking up MR id for discussions")?;
|
||||
|
||||
let mut disc_stmt = conn
|
||||
.prepare(
|
||||
"SELECT d.id, d.gitlab_discussion_id, d.resolvable, d.resolved
|
||||
FROM discussions d
|
||||
WHERE d.merge_request_id = ?1
|
||||
ORDER BY d.first_note_at ASC",
|
||||
)
|
||||
.context("preparing MR discussions query")?;
|
||||
|
||||
let mut note_stmt = conn
|
||||
.prepare(
|
||||
"SELECT n.author_username, n.body, n.created_at, n.is_system,
|
||||
n.note_type, n.position_new_path, n.position_new_line
|
||||
FROM notes n
|
||||
WHERE n.discussion_id = ?1
|
||||
ORDER BY n.position ASC, n.created_at ASC",
|
||||
)
|
||||
.context("preparing MR notes query")?;
|
||||
|
||||
let disc_rows: Vec<_> = disc_stmt
|
||||
.query_map(rusqlite::params![mr_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?, // id
|
||||
row.get::<_, String>(1)?, // gitlab_discussion_id
|
||||
row.get::<_, bool>(2)?, // resolvable
|
||||
row.get::<_, bool>(3)?, // resolved
|
||||
))
|
||||
})
|
||||
.context("fetching MR discussions")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading discussion row")?;
|
||||
|
||||
let mut discussions = Vec::new();
|
||||
for (disc_db_id, discussion_id, resolvable, resolved) in disc_rows {
|
||||
let notes: Vec<NoteNode> = note_stmt
|
||||
.query_map(rusqlite::params![disc_db_id], |row| {
|
||||
Ok(NoteNode {
|
||||
author: row.get::<_, Option<String>>(0)?.unwrap_or_default(),
|
||||
body: row.get::<_, Option<String>>(1)?.unwrap_or_default(),
|
||||
created_at: row.get(2)?,
|
||||
is_system: row.get(3)?,
|
||||
is_diff_note: row.get::<_, Option<String>>(4)?.as_deref() == Some("DiffNote"),
|
||||
diff_file_path: row.get(5)?,
|
||||
diff_new_line: row.get(6)?,
|
||||
})
|
||||
})
|
||||
.context("fetching notes")?
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("reading note row")?;
|
||||
|
||||
discussions.push(DiscussionNode {
|
||||
discussion_id,
|
||||
notes,
|
||||
resolvable,
|
||||
resolved,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(discussions)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn create_issue_detail_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
state TEXT NOT NULL DEFAULT 'opened',
|
||||
author_username TEXT,
|
||||
milestone_title TEXT,
|
||||
due_date TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
web_url TEXT,
|
||||
UNIQUE(project_id, iid)
|
||||
);
|
||||
CREATE TABLE issue_assignees (
|
||||
issue_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
UNIQUE(issue_id, username)
|
||||
);
|
||||
CREATE TABLE labels (
|
||||
id INTEGER PRIMARY KEY,
|
||||
project_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issue_labels (
|
||||
issue_id INTEGER NOT NULL,
|
||||
label_id INTEGER NOT NULL,
|
||||
UNIQUE(issue_id, label_id)
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
noteable_type TEXT NOT NULL,
|
||||
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||
resolved INTEGER NOT NULL DEFAULT 0,
|
||||
first_note_at INTEGER
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
note_type TEXT,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
position INTEGER,
|
||||
position_new_path TEXT,
|
||||
position_new_line INTEGER
|
||||
);
|
||||
CREATE TABLE entity_references (
|
||||
id INTEGER PRIMARY KEY,
|
||||
project_id INTEGER NOT NULL,
|
||||
source_entity_type TEXT NOT NULL,
|
||||
source_entity_id INTEGER NOT NULL,
|
||||
target_entity_type TEXT NOT NULL,
|
||||
target_entity_id INTEGER,
|
||||
target_project_path TEXT,
|
||||
target_entity_iid INTEGER,
|
||||
reference_type TEXT NOT NULL,
|
||||
source_method TEXT NOT NULL DEFAULT 'api',
|
||||
created_at INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
state TEXT NOT NULL DEFAULT 'opened',
|
||||
UNIQUE(project_id, iid)
|
||||
);
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn create_mr_detail_schema(conn: &Connection) {
|
||||
create_issue_detail_schema(conn);
|
||||
// Add MR-specific columns and tables on top of the base schema.
|
||||
conn.execute_batch(
|
||||
"
|
||||
-- Add columns to merge_requests that the detail query needs.
|
||||
ALTER TABLE merge_requests ADD COLUMN description TEXT;
|
||||
ALTER TABLE merge_requests ADD COLUMN draft INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE merge_requests ADD COLUMN author_username TEXT;
|
||||
ALTER TABLE merge_requests ADD COLUMN source_branch TEXT;
|
||||
ALTER TABLE merge_requests ADD COLUMN target_branch TEXT;
|
||||
ALTER TABLE merge_requests ADD COLUMN detailed_merge_status TEXT;
|
||||
ALTER TABLE merge_requests ADD COLUMN created_at INTEGER;
|
||||
ALTER TABLE merge_requests ADD COLUMN updated_at INTEGER;
|
||||
ALTER TABLE merge_requests ADD COLUMN merged_at INTEGER;
|
||||
ALTER TABLE merge_requests ADD COLUMN web_url TEXT;
|
||||
|
||||
CREATE TABLE mr_assignees (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
UNIQUE(merge_request_id, username)
|
||||
);
|
||||
CREATE TABLE mr_reviewers (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
UNIQUE(merge_request_id, username)
|
||||
);
|
||||
CREATE TABLE mr_labels (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
label_id INTEGER NOT NULL,
|
||||
UNIQUE(merge_request_id, label_id)
|
||||
);
|
||||
CREATE TABLE mr_file_changes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
old_path TEXT,
|
||||
new_path TEXT NOT NULL,
|
||||
change_type TEXT NOT NULL
|
||||
);
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn setup_mr_detail_data(conn: &Connection) {
|
||||
// Project (if not already inserted).
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// MR.
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, description, state, draft, author_username, source_branch, target_branch, detailed_merge_status, created_at, updated_at, merged_at, web_url)
|
||||
VALUES (1, 2000, 1, 10, 'Fix auth flow', 'MR description', 'opened', 0, 'alice', 'fix-auth', 'main', 'mergeable', 1700000000000, 1700000060000, NULL, 'https://gitlab.com/group/project/-/merge_requests/10')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Assignees.
|
||||
conn.execute(
|
||||
"INSERT INTO mr_assignees (merge_request_id, username) VALUES (1, 'bob')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Reviewers.
|
||||
conn.execute(
|
||||
"INSERT INTO mr_reviewers (merge_request_id, username) VALUES (1, 'carol')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Labels.
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO labels (id, project_id, name) VALUES (10, 1, 'backend')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_labels (merge_request_id, label_id) VALUES (1, 10)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// File changes.
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type)
|
||||
VALUES (1, 1, NULL, 'src/auth.rs', 'modified')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type)
|
||||
VALUES (1, 1, NULL, 'src/lib.rs', 'added')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type)
|
||||
VALUES (1, 1, 'src/old.rs', 'src/new.rs', 'renamed')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Discussion with a note.
|
||||
conn.execute(
|
||||
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, noteable_type, resolvable, resolved, first_note_at)
|
||||
VALUES (1, 'mr_disc_1', 1, 1, 'MergeRequest', 1, 0, 1700000010000)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, note_type, is_system, author_username, body, created_at, updated_at, position, position_new_path, position_new_line)
|
||||
VALUES (1, 5001, 1, 1, 'DiffNote', 0, 'alice', 'Please fix this', 1700000010000, 1700000010000, 0, 'src/auth.rs', 42)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Cross-reference (MR closes issue).
|
||||
conn.execute(
|
||||
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at)
|
||||
VALUES (1, 1000, 1, 5, 'Auth bug', 'opened', 0, 0)",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method)
|
||||
VALUES (1, 'merge_request', 1, 'issue', 1, 'group/project', 5, 'closes', 'api')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_basic_metadata() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
setup_mr_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.iid, 10);
|
||||
assert_eq!(data.metadata.title, "Fix auth flow");
|
||||
assert_eq!(data.metadata.description, "MR description");
|
||||
assert_eq!(data.metadata.state, "opened");
|
||||
assert!(!data.metadata.draft);
|
||||
assert_eq!(data.metadata.author, "alice");
|
||||
assert_eq!(data.metadata.source_branch, "fix-auth");
|
||||
assert_eq!(data.metadata.target_branch, "main");
|
||||
assert_eq!(data.metadata.merge_status, "mergeable");
|
||||
assert!(data.metadata.merged_at.is_none());
|
||||
assert_eq!(
|
||||
data.metadata.web_url,
|
||||
"https://gitlab.com/group/project/-/merge_requests/10"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_assignees_reviewers_labels() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
setup_mr_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.metadata.assignees, vec!["bob"]);
|
||||
assert_eq!(data.metadata.reviewers, vec!["carol"]);
|
||||
assert_eq!(data.metadata.labels, vec!["backend"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_file_changes() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
setup_mr_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.file_changes.len(), 3);
|
||||
assert_eq!(data.metadata.file_change_count, 3);
|
||||
|
||||
// Ordered by new_path.
|
||||
assert_eq!(data.file_changes[0].new_path, "src/auth.rs");
|
||||
assert_eq!(data.file_changes[0].change_type, FileChangeType::Modified);
|
||||
|
||||
assert_eq!(data.file_changes[1].new_path, "src/lib.rs");
|
||||
assert_eq!(data.file_changes[1].change_type, FileChangeType::Added);
|
||||
|
||||
assert_eq!(data.file_changes[2].new_path, "src/new.rs");
|
||||
assert_eq!(data.file_changes[2].change_type, FileChangeType::Renamed);
|
||||
assert_eq!(data.file_changes[2].old_path.as_deref(), Some("src/old.rs"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_cross_refs() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
setup_mr_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(data.cross_refs.len(), 1);
|
||||
assert_eq!(data.cross_refs[0].kind, CrossRefKind::ClosingMr);
|
||||
assert_eq!(data.cross_refs[0].label, "Auth bug");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_discussions() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
setup_mr_detail_data(&conn);
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let discussions = fetch_mr_discussions(&conn, &key).unwrap();
|
||||
|
||||
assert_eq!(discussions.len(), 1);
|
||||
assert_eq!(discussions[0].discussion_id, "mr_disc_1");
|
||||
assert!(discussions[0].resolvable);
|
||||
assert!(!discussions[0].resolved);
|
||||
assert_eq!(discussions[0].notes.len(), 1);
|
||||
assert_eq!(discussions[0].notes[0].author, "alice");
|
||||
assert_eq!(discussions[0].notes[0].body, "Please fix this");
|
||||
assert!(discussions[0].notes[0].is_diff_note);
|
||||
assert_eq!(
|
||||
discussions[0].notes[0].diff_file_path.as_deref(),
|
||||
Some("src/auth.rs")
|
||||
);
|
||||
assert_eq!(discussions[0].notes[0].diff_new_line, Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_not_found() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
|
||||
// Insert project but no MR.
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let key = EntityKey::mr(1, 99);
|
||||
assert!(fetch_mr_detail(&conn, &key).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_no_file_changes() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, web_url)
|
||||
VALUES (1, 2000, 1, 10, 'Empty MR', 'opened', 0, 0, '')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
assert!(data.file_changes.is_empty());
|
||||
assert_eq!(data.metadata.file_change_count, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_detail_draft() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_detail_schema(&conn);
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, draft, created_at, updated_at, web_url)
|
||||
VALUES (1, 2000, 1, 10, 'Draft: WIP', 'opened', 1, 0, 0, '')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let key = EntityKey::mr(1, 10);
|
||||
let data = fetch_mr_detail(&conn, &key).unwrap();
|
||||
assert!(data.metadata.draft);
|
||||
}
|
||||
}
|
||||
629
crates/lore-tui/src/action/mr_list.rs
Normal file
629
crates/lore-tui/src/action/mr_list.rs
Normal file
@@ -0,0 +1,629 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::state::mr_list::{MrCursor, MrFilter, MrListPage, MrListRow, MrSortField, MrSortOrder};
|
||||
|
||||
/// Page size for MR list queries.
|
||||
const MR_PAGE_SIZE: usize = 50;
|
||||
|
||||
/// Fetch a page of merge requests matching the given filter and sort.
|
||||
///
|
||||
/// Uses keyset pagination and snapshot fence — same pattern as issues.
|
||||
pub fn fetch_mr_list(
|
||||
conn: &Connection,
|
||||
filter: &MrFilter,
|
||||
sort_field: MrSortField,
|
||||
sort_order: MrSortOrder,
|
||||
cursor: Option<&MrCursor>,
|
||||
snapshot_fence: Option<i64>,
|
||||
) -> Result<MrListPage> {
|
||||
// -- Build dynamic WHERE conditions and params --------------------------
|
||||
let mut conditions: Vec<String> = Vec::new();
|
||||
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
|
||||
|
||||
if let Some(pid) = filter.project_id {
|
||||
conditions.push("m.project_id = ?".into());
|
||||
params.push(Box::new(pid));
|
||||
}
|
||||
|
||||
if let Some(ref state) = filter.state {
|
||||
conditions.push("m.state = ?".into());
|
||||
params.push(Box::new(state.clone()));
|
||||
}
|
||||
|
||||
if let Some(ref author) = filter.author {
|
||||
conditions.push("m.author_username = ?".into());
|
||||
params.push(Box::new(author.clone()));
|
||||
}
|
||||
|
||||
if let Some(draft) = filter.draft {
|
||||
conditions.push("m.draft = ?".into());
|
||||
params.push(Box::new(i64::from(draft)));
|
||||
}
|
||||
|
||||
if let Some(ref target) = filter.target_branch {
|
||||
conditions.push("m.target_branch = ?".into());
|
||||
params.push(Box::new(target.clone()));
|
||||
}
|
||||
|
||||
if let Some(ref source) = filter.source_branch {
|
||||
conditions.push("m.source_branch = ?".into());
|
||||
params.push(Box::new(source.clone()));
|
||||
}
|
||||
|
||||
// Filter: reviewer (via join on mr_reviewers)
|
||||
let reviewer_join = if let Some(ref reviewer) = filter.reviewer {
|
||||
conditions.push("rv.username = ?".into());
|
||||
params.push(Box::new(reviewer.clone()));
|
||||
"JOIN mr_reviewers rv ON rv.merge_request_id = m.id"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
// Filter: label (via join on mr_labels + labels)
|
||||
let label_join = if let Some(ref label) = filter.label {
|
||||
conditions.push("fl.name = ?".into());
|
||||
params.push(Box::new(label.clone()));
|
||||
"JOIN mr_labels fil ON fil.merge_request_id = m.id \
|
||||
JOIN labels fl ON fl.id = fil.label_id"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
// Filter: free_text (LIKE on title)
|
||||
if let Some(ref text) = filter.free_text {
|
||||
conditions.push("m.title LIKE ?".into());
|
||||
params.push(Box::new(format!("%{text}%")));
|
||||
}
|
||||
|
||||
// Snapshot fence
|
||||
if let Some(fence) = snapshot_fence {
|
||||
conditions.push("m.updated_at <= ?".into());
|
||||
params.push(Box::new(fence));
|
||||
}
|
||||
|
||||
// -- Count query (before cursor filter) ---------------------------------
|
||||
let where_clause = if conditions.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("WHERE {}", conditions.join(" AND "))
|
||||
};
|
||||
|
||||
let count_sql = format!(
|
||||
"SELECT COUNT(DISTINCT m.id) FROM merge_requests m \
|
||||
JOIN projects p ON p.id = m.project_id \
|
||||
{reviewer_join} {label_join} {where_clause}"
|
||||
);
|
||||
let count_params: Vec<&dyn rusqlite::types::ToSql> =
|
||||
params.iter().map(|b| b.as_ref()).collect();
|
||||
|
||||
let total_count: i64 = conn
|
||||
.query_row(&count_sql, count_params.as_slice(), |r| r.get(0))
|
||||
.context("counting MRs for list")?;
|
||||
|
||||
// -- Keyset cursor condition -------------------------------------------
|
||||
let (sort_col, sort_dir) = mr_sort_column_and_dir(sort_field, sort_order);
|
||||
let cursor_op = if sort_dir == "DESC" { "<" } else { ">" };
|
||||
|
||||
if let Some(c) = cursor {
|
||||
conditions.push(format!("({sort_col}, m.iid) {cursor_op} (?, ?)"));
|
||||
params.push(Box::new(c.updated_at));
|
||||
params.push(Box::new(c.iid));
|
||||
}
|
||||
|
||||
// -- Data query ---------------------------------------------------------
|
||||
let where_clause_full = if conditions.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("WHERE {}", conditions.join(" AND "))
|
||||
};
|
||||
|
||||
let data_sql = format!(
|
||||
"SELECT p.path_with_namespace, m.iid, m.title, m.state, \
|
||||
m.author_username, m.target_branch, m.updated_at, m.draft, \
|
||||
GROUP_CONCAT(DISTINCT l.name) AS label_names \
|
||||
FROM merge_requests m \
|
||||
JOIN projects p ON p.id = m.project_id \
|
||||
{reviewer_join} \
|
||||
{label_join} \
|
||||
LEFT JOIN mr_labels ml ON ml.merge_request_id = m.id \
|
||||
LEFT JOIN labels l ON l.id = ml.label_id \
|
||||
{where_clause_full} \
|
||||
GROUP BY m.id \
|
||||
ORDER BY {sort_col} {sort_dir}, m.iid {sort_dir} \
|
||||
LIMIT ?"
|
||||
);
|
||||
|
||||
let fetch_limit = (MR_PAGE_SIZE + 1) as i64;
|
||||
params.push(Box::new(fetch_limit));
|
||||
|
||||
let all_params: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|b| b.as_ref()).collect();
|
||||
|
||||
let mut stmt = conn.prepare(&data_sql).context("preparing MR list query")?;
|
||||
|
||||
let rows_result = stmt
|
||||
.query_map(all_params.as_slice(), |row| {
|
||||
let project_path: String = row.get(0)?;
|
||||
let iid: i64 = row.get(1)?;
|
||||
let title: String = row.get::<_, Option<String>>(2)?.unwrap_or_default();
|
||||
let state: String = row.get::<_, Option<String>>(3)?.unwrap_or_default();
|
||||
let author: String = row.get::<_, Option<String>>(4)?.unwrap_or_default();
|
||||
let target_branch: String = row.get::<_, Option<String>>(5)?.unwrap_or_default();
|
||||
let updated_at: i64 = row.get(6)?;
|
||||
let draft_int: i64 = row.get(7)?;
|
||||
let label_names: Option<String> = row.get(8)?;
|
||||
|
||||
let labels = label_names
|
||||
.map(|s| s.split(',').map(String::from).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(MrListRow {
|
||||
project_path,
|
||||
iid,
|
||||
title,
|
||||
state,
|
||||
author,
|
||||
target_branch,
|
||||
labels,
|
||||
updated_at,
|
||||
draft: draft_int != 0,
|
||||
})
|
||||
})
|
||||
.context("querying MR list")?;
|
||||
|
||||
let mut rows: Vec<MrListRow> = Vec::new();
|
||||
for row in rows_result {
|
||||
rows.push(row.context("reading MR list row")?);
|
||||
}
|
||||
|
||||
let has_next = rows.len() > MR_PAGE_SIZE;
|
||||
if has_next {
|
||||
rows.truncate(MR_PAGE_SIZE);
|
||||
}
|
||||
|
||||
let next_cursor = if has_next {
|
||||
rows.last().map(|r| MrCursor {
|
||||
updated_at: r.updated_at,
|
||||
iid: r.iid,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
#[allow(clippy::cast_sign_loss)]
|
||||
Ok(MrListPage {
|
||||
rows,
|
||||
next_cursor,
|
||||
total_count: total_count as u64,
|
||||
})
|
||||
}
|
||||
|
||||
/// Map MR sort field + order to SQL column name and direction keyword.
|
||||
fn mr_sort_column_and_dir(field: MrSortField, order: MrSortOrder) -> (&'static str, &'static str) {
|
||||
let col = match field {
|
||||
MrSortField::UpdatedAt => "m.updated_at",
|
||||
MrSortField::Iid => "m.iid",
|
||||
MrSortField::Title => "m.title",
|
||||
MrSortField::State => "m.state",
|
||||
MrSortField::Author => "m.author_username",
|
||||
MrSortField::TargetBranch => "m.target_branch",
|
||||
};
|
||||
let dir = match order {
|
||||
MrSortOrder::Desc => "DESC",
|
||||
MrSortOrder::Asc => "ASC",
|
||||
};
|
||||
(col, dir)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Create the schema needed for MR list tests.
|
||||
fn create_mr_list_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL,
|
||||
draft INTEGER NOT NULL DEFAULT 0,
|
||||
target_branch TEXT,
|
||||
source_branch TEXT
|
||||
);
|
||||
CREATE TABLE labels (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER,
|
||||
project_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
color TEXT,
|
||||
description TEXT
|
||||
);
|
||||
CREATE TABLE mr_labels (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
label_id INTEGER NOT NULL,
|
||||
PRIMARY KEY(merge_request_id, label_id)
|
||||
);
|
||||
CREATE TABLE mr_reviewers (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
PRIMARY KEY(merge_request_id, username)
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create MR list schema");
|
||||
}
|
||||
|
||||
/// Insert a test MR with full fields.
|
||||
fn insert_mr_full(
|
||||
conn: &Connection,
|
||||
iid: i64,
|
||||
state: &str,
|
||||
author: &str,
|
||||
target_branch: &str,
|
||||
draft: bool,
|
||||
updated_at: i64,
|
||||
) {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests \
|
||||
(gitlab_id, project_id, iid, title, state, author_username, \
|
||||
target_branch, draft, created_at, updated_at, last_seen_at) \
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?8, ?8)",
|
||||
rusqlite::params![
|
||||
iid * 100 + 50,
|
||||
iid,
|
||||
format!("MR {iid}"),
|
||||
state,
|
||||
author,
|
||||
target_branch,
|
||||
i64::from(draft),
|
||||
updated_at,
|
||||
],
|
||||
)
|
||||
.expect("insert mr full");
|
||||
}
|
||||
|
||||
/// Attach a label to an MR.
|
||||
fn attach_mr_label(conn: &Connection, mr_iid: i64, label_name: &str) {
|
||||
let mr_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT id FROM merge_requests WHERE iid = ?",
|
||||
[mr_iid],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.expect("find mr");
|
||||
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO labels (project_id, name) VALUES (1, ?)",
|
||||
[label_name],
|
||||
)
|
||||
.expect("insert label");
|
||||
let label_id: i64 = conn
|
||||
.query_row("SELECT id FROM labels WHERE name = ?", [label_name], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.expect("find label");
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO mr_labels (merge_request_id, label_id) VALUES (?, ?)",
|
||||
[mr_id, label_id],
|
||||
)
|
||||
.expect("attach mr label");
|
||||
}
|
||||
|
||||
/// Add a reviewer to an MR.
|
||||
fn add_mr_reviewer(conn: &Connection, mr_iid: i64, username: &str) {
|
||||
let mr_id: i64 = conn
|
||||
.query_row(
|
||||
"SELECT id FROM merge_requests WHERE iid = ?",
|
||||
[mr_iid],
|
||||
|r| r.get(0),
|
||||
)
|
||||
.expect("find mr");
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO mr_reviewers (merge_request_id, username) VALUES (?, ?)",
|
||||
rusqlite::params![mr_id, username],
|
||||
)
|
||||
.expect("add mr reviewer");
|
||||
}
|
||||
|
||||
fn setup_mr_list_data(conn: &Connection) {
|
||||
let base = 1_700_000_000_000_i64;
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (1, 'group/project')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
insert_mr_full(conn, 1, "opened", "alice", "main", false, base - 10_000);
|
||||
insert_mr_full(conn, 2, "opened", "bob", "main", true, base - 20_000);
|
||||
insert_mr_full(conn, 3, "merged", "alice", "develop", false, base - 30_000);
|
||||
insert_mr_full(conn, 4, "opened", "charlie", "main", true, base - 40_000);
|
||||
insert_mr_full(conn, 5, "closed", "bob", "release", false, base - 50_000);
|
||||
|
||||
attach_mr_label(conn, 1, "backend");
|
||||
attach_mr_label(conn, 1, "urgent");
|
||||
attach_mr_label(conn, 2, "frontend");
|
||||
attach_mr_label(conn, 4, "backend");
|
||||
|
||||
add_mr_reviewer(conn, 1, "diana");
|
||||
add_mr_reviewer(conn, 2, "diana");
|
||||
add_mr_reviewer(conn, 3, "edward");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_basic() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter::default();
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 5);
|
||||
assert_eq!(page.rows.len(), 5);
|
||||
assert_eq!(page.rows[0].iid, 1); // newest first
|
||||
assert_eq!(page.rows[4].iid, 5);
|
||||
assert!(page.next_cursor.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_filter_state() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
state: Some("opened".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 3);
|
||||
assert!(page.rows.iter().all(|r| r.state == "opened"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_filter_draft() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
draft: Some(true),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 2); // MRs 2 and 4
|
||||
assert!(page.rows.iter().all(|r| r.draft));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_filter_target_branch() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
target_branch: Some("main".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 3); // MRs 1, 2, 4
|
||||
assert!(page.rows.iter().all(|r| r.target_branch == "main"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_filter_reviewer() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
reviewer: Some("diana".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 2); // MRs 1 and 2
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_filter_label() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
label: Some("backend".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 2); // MRs 1 and 4
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_labels_aggregated() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter::default();
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mr1 = page.rows.iter().find(|r| r.iid == 1).unwrap();
|
||||
assert_eq!(mr1.labels.len(), 2);
|
||||
assert!(mr1.labels.contains(&"backend".to_string()));
|
||||
assert!(mr1.labels.contains(&"urgent".to_string()));
|
||||
|
||||
let mr5 = page.rows.iter().find(|r| r.iid == 5).unwrap();
|
||||
assert!(mr5.labels.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_sort_ascending() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter::default();
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Asc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.rows[0].iid, 5); // oldest first
|
||||
assert_eq!(page.rows[4].iid, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_snapshot_fence() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let base = 1_700_000_000_000_i64;
|
||||
let fence = base - 25_000;
|
||||
let filter = MrFilter::default();
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
Some(fence),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 3);
|
||||
assert!(page.rows.iter().all(|r| r.updated_at <= fence));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
conn.execute(
|
||||
"INSERT INTO projects (gitlab_project_id, path_with_namespace) VALUES (1, 'g/p')",
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&MrFilter::default(),
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 0);
|
||||
assert!(page.rows.is_empty());
|
||||
assert!(page.next_cursor.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_mr_list_free_text() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_mr_list_schema(&conn);
|
||||
setup_mr_list_data(&conn);
|
||||
|
||||
let filter = MrFilter {
|
||||
free_text: Some("MR 3".into()),
|
||||
..Default::default()
|
||||
};
|
||||
let page = fetch_mr_list(
|
||||
&conn,
|
||||
&filter,
|
||||
MrSortField::UpdatedAt,
|
||||
MrSortOrder::Desc,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.total_count, 1);
|
||||
assert_eq!(page.rows[0].iid, 3);
|
||||
}
|
||||
}
|
||||
361
crates/lore-tui/src/action/search.rs
Normal file
361
crates/lore-tui/src/action/search.rs
Normal file
@@ -0,0 +1,361 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::message::{EntityKey, EntityKind, SearchMode, SearchResult};
|
||||
use crate::state::search::SearchCapabilities;
|
||||
|
||||
/// Probe the database to detect available search indexes.
|
||||
///
|
||||
/// Checks for FTS5 documents and embedding metadata. Returns capabilities
|
||||
/// that the UI uses to gate available search modes.
|
||||
pub fn fetch_search_capabilities(conn: &Connection) -> Result<SearchCapabilities> {
|
||||
// FTS: check if documents_fts has rows via the docsize shadow table
|
||||
// (B-tree, not virtual table scan).
|
||||
let has_fts = conn
|
||||
.query_row(
|
||||
"SELECT EXISTS(SELECT 1 FROM documents_fts_docsize LIMIT 1)",
|
||||
[],
|
||||
|r| r.get::<_, bool>(0),
|
||||
)
|
||||
.unwrap_or(false);
|
||||
|
||||
// Embeddings: count rows in embedding_metadata.
|
||||
let embedding_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM embedding_metadata", [], |r| r.get(0))
|
||||
.unwrap_or(0);
|
||||
|
||||
let has_embeddings = embedding_count > 0;
|
||||
|
||||
// Coverage: embeddings / documents percentage.
|
||||
let doc_count: i64 = conn
|
||||
.query_row("SELECT COUNT(*) FROM documents", [], |r| r.get(0))
|
||||
.unwrap_or(0);
|
||||
|
||||
let embedding_coverage_pct = if doc_count > 0 {
|
||||
(embedding_count as f32 / doc_count as f32 * 100.0).min(100.0)
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
Ok(SearchCapabilities {
|
||||
has_fts,
|
||||
has_embeddings,
|
||||
embedding_coverage_pct,
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a search query against the local database.
|
||||
///
|
||||
/// Dispatches to the correct search backend based on mode:
|
||||
/// - Lexical: FTS5 only (documents_fts)
|
||||
/// - Hybrid: FTS5 + vector merge via RRF
|
||||
/// - Semantic: vector cosine similarity only
|
||||
///
|
||||
/// Returns results sorted by score descending.
|
||||
pub fn execute_search(
|
||||
conn: &Connection,
|
||||
query: &str,
|
||||
mode: SearchMode,
|
||||
limit: usize,
|
||||
) -> Result<Vec<SearchResult>> {
|
||||
if query.trim().is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
match mode {
|
||||
SearchMode::Lexical => execute_fts_search(conn, query, limit),
|
||||
SearchMode::Hybrid | SearchMode::Semantic => {
|
||||
// Hybrid and Semantic require the full search pipeline from the
|
||||
// core crate (async, Ollama client). For now, fall back to FTS
|
||||
// for Hybrid and return empty for Semantic-only.
|
||||
// TODO: Wire up async search dispatch when core search is integrated.
|
||||
if mode == SearchMode::Hybrid {
|
||||
execute_fts_search(conn, query, limit)
|
||||
} else {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// FTS5 full-text search against the documents table.
|
||||
fn execute_fts_search(conn: &Connection, query: &str, limit: usize) -> Result<Vec<SearchResult>> {
|
||||
// Sanitize the query for FTS5 (escape special chars, wrap terms in quotes).
|
||||
let safe_query = sanitize_fts_query(query);
|
||||
if safe_query.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
// Resolve project_path via JOIN through projects table.
|
||||
// Resolve iid via JOIN through the source entity table (issues or merge_requests).
|
||||
// snippet column 1 = content_text (column 0 is title).
|
||||
let mut stmt = conn
|
||||
.prepare(
|
||||
"SELECT d.source_type, d.source_id, d.title, d.project_id,
|
||||
p.path_with_namespace,
|
||||
snippet(documents_fts, 1, '>>>', '<<<', '...', 32) AS snip,
|
||||
bm25(documents_fts) AS score,
|
||||
COALESCE(i.iid, mr.iid) AS entity_iid
|
||||
FROM documents_fts
|
||||
JOIN documents d ON documents_fts.rowid = d.id
|
||||
JOIN projects p ON p.id = d.project_id
|
||||
LEFT JOIN issues i ON d.source_type = 'issue' AND i.id = d.source_id
|
||||
LEFT JOIN merge_requests mr ON d.source_type = 'merge_request' AND mr.id = d.source_id
|
||||
WHERE documents_fts MATCH ?1
|
||||
ORDER BY score
|
||||
LIMIT ?2",
|
||||
)
|
||||
.context("preparing FTS search query")?;
|
||||
|
||||
let rows = stmt
|
||||
.query_map(rusqlite::params![safe_query, limit as i64], |row| {
|
||||
let source_type: String = row.get(0)?;
|
||||
let _source_id: i64 = row.get(1)?;
|
||||
let title: String = row.get::<_, Option<String>>(2)?.unwrap_or_default();
|
||||
let project_id: i64 = row.get(3)?;
|
||||
let project_path: String = row.get::<_, Option<String>>(4)?.unwrap_or_default();
|
||||
let snippet: String = row.get::<_, Option<String>>(5)?.unwrap_or_default();
|
||||
let score: f64 = row.get(6)?;
|
||||
let entity_iid: Option<i64> = row.get(7)?;
|
||||
Ok((
|
||||
source_type,
|
||||
project_id,
|
||||
title,
|
||||
project_path,
|
||||
snippet,
|
||||
score,
|
||||
entity_iid,
|
||||
))
|
||||
})
|
||||
.context("executing FTS search")?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
for row in rows {
|
||||
let (source_type, project_id, title, project_path, snippet, score, entity_iid) =
|
||||
row.context("reading FTS search row")?;
|
||||
|
||||
let kind = match source_type.as_str() {
|
||||
"issue" => EntityKind::Issue,
|
||||
"merge_request" | "mr" => EntityKind::MergeRequest,
|
||||
_ => continue, // Skip unknown source types (discussion, note).
|
||||
};
|
||||
|
||||
// Skip if we couldn't resolve the entity's iid (orphaned document).
|
||||
let Some(iid) = entity_iid else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let key = EntityKey {
|
||||
project_id,
|
||||
iid,
|
||||
kind,
|
||||
};
|
||||
|
||||
results.push(SearchResult {
|
||||
key,
|
||||
title,
|
||||
score: score.abs(), // bm25 returns negative scores; lower = better.
|
||||
snippet,
|
||||
project_path,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Sanitize a user query for FTS5 MATCH syntax.
|
||||
///
|
||||
/// Wraps individual terms in double quotes to prevent FTS5 syntax errors
|
||||
/// from user-typed operators (AND, OR, NOT, *, etc.).
|
||||
fn sanitize_fts_query(query: &str) -> String {
|
||||
query
|
||||
.split_whitespace()
|
||||
.map(|term| {
|
||||
// Strip any existing quotes and re-wrap.
|
||||
let clean = term.replace('"', "");
|
||||
if clean.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("\"{clean}\"")
|
||||
}
|
||||
})
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Create the minimal schema needed for search queries.
|
||||
fn create_dashboard_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE documents (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source_type TEXT NOT NULL,
|
||||
source_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
content_text TEXT NOT NULL,
|
||||
content_hash TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE embedding_metadata (
|
||||
document_id INTEGER NOT NULL,
|
||||
chunk_index INTEGER NOT NULL DEFAULT 0,
|
||||
model TEXT NOT NULL,
|
||||
dims INTEGER NOT NULL,
|
||||
document_hash TEXT NOT NULL,
|
||||
chunk_hash TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
PRIMARY KEY(document_id, chunk_index)
|
||||
);
|
||||
CREATE TABLE sync_runs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
started_at INTEGER NOT NULL,
|
||||
heartbeat_at INTEGER NOT NULL,
|
||||
finished_at INTEGER,
|
||||
status TEXT NOT NULL,
|
||||
command TEXT NOT NULL,
|
||||
error TEXT
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create dashboard schema");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_fts_query_wraps_terms() {
|
||||
let result = sanitize_fts_query("hello world");
|
||||
assert_eq!(result, r#""hello" "world""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_fts_query_strips_quotes() {
|
||||
let result = sanitize_fts_query(r#""hello" "world""#);
|
||||
assert_eq!(result, r#""hello" "world""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_fts_query_empty() {
|
||||
assert_eq!(sanitize_fts_query(""), "");
|
||||
assert_eq!(sanitize_fts_query(" "), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sanitize_fts_query_special_chars() {
|
||||
// FTS5 operators should be safely wrapped in quotes.
|
||||
let result = sanitize_fts_query("NOT AND OR");
|
||||
assert_eq!(result, r#""NOT" "AND" "OR""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_search_capabilities_no_tables() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
|
||||
let caps = fetch_search_capabilities(&conn).unwrap();
|
||||
assert!(!caps.has_fts);
|
||||
assert!(!caps.has_embeddings);
|
||||
assert!(!caps.has_any_index());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_search_capabilities_with_fts() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
// Create FTS table and its shadow table.
|
||||
conn.execute_batch(
|
||||
"CREATE VIRTUAL TABLE documents_fts USING fts5(content);
|
||||
INSERT INTO documents_fts(content) VALUES ('test document');",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let caps = fetch_search_capabilities(&conn).unwrap();
|
||||
assert!(caps.has_fts);
|
||||
assert!(!caps.has_embeddings);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_search_capabilities_with_embeddings() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_dashboard_schema(&conn);
|
||||
// Insert a document so coverage calculation works.
|
||||
conn.execute_batch(
|
||||
"INSERT INTO documents(id, source_type, source_id, project_id, content_text, content_hash)
|
||||
VALUES (1, 'issue', 1, 1, 'body text', 'abc');
|
||||
INSERT INTO embedding_metadata(document_id, chunk_index, model, dims, document_hash, chunk_hash, created_at)
|
||||
VALUES (1, 0, 'test', 384, 'abc', 'def', 1700000000);",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let caps = fetch_search_capabilities(&conn).unwrap();
|
||||
assert!(!caps.has_fts);
|
||||
assert!(caps.has_embeddings);
|
||||
assert!(caps.embedding_coverage_pct > 0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_execute_search_empty_query_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
let results = execute_search(&conn, "", SearchMode::Lexical, 10).unwrap();
|
||||
assert!(results.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_execute_search_whitespace_only_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
let results = execute_search(&conn, " ", SearchMode::Lexical, 10).unwrap();
|
||||
assert!(results.is_empty());
|
||||
}
|
||||
}
|
||||
845
crates/lore-tui/src/action/timeline.rs
Normal file
845
crates/lore-tui/src/action/timeline.rs
Normal file
@@ -0,0 +1,845 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::message::{EntityKey, EntityKind, TimelineEvent, TimelineEventKind};
|
||||
use crate::state::timeline::TimelineScope;
|
||||
|
||||
/// Internal filter resolved from a [`TimelineScope`].
|
||||
///
|
||||
/// Translates the user-facing scope (which uses `EntityKey` with project_id + iid)
|
||||
/// into internal DB ids for efficient querying.
|
||||
enum TimelineFilter {
|
||||
/// No filtering — return all events.
|
||||
All,
|
||||
/// Filter to events for a specific issue (internal DB id).
|
||||
Issue(i64),
|
||||
/// Filter to events for a specific MR (internal DB id).
|
||||
MergeRequest(i64),
|
||||
/// Filter to events by a specific actor.
|
||||
Actor(String),
|
||||
}
|
||||
|
||||
/// Resolve a [`TimelineScope`] into a concrete [`TimelineFilter`].
|
||||
fn resolve_timeline_scope(conn: &Connection, scope: &TimelineScope) -> Result<TimelineFilter> {
|
||||
match scope {
|
||||
TimelineScope::All => Ok(TimelineFilter::All),
|
||||
TimelineScope::Entity(key) => {
|
||||
let (table, kind_label) = match key.kind {
|
||||
EntityKind::Issue => ("issues", "issue"),
|
||||
EntityKind::MergeRequest => ("merge_requests", "merge request"),
|
||||
};
|
||||
let sql = format!("SELECT id FROM {table} WHERE project_id = ?1 AND iid = ?2");
|
||||
let id: i64 = conn
|
||||
.query_row(&sql, rusqlite::params![key.project_id, key.iid], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"resolving {kind_label} #{} in project {}",
|
||||
key.iid, key.project_id
|
||||
)
|
||||
})?;
|
||||
match key.kind {
|
||||
EntityKind::Issue => Ok(TimelineFilter::Issue(id)),
|
||||
EntityKind::MergeRequest => Ok(TimelineFilter::MergeRequest(id)),
|
||||
}
|
||||
}
|
||||
TimelineScope::Author(name) => Ok(TimelineFilter::Actor(name.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch timeline events from raw resource event tables.
|
||||
///
|
||||
/// Queries `issues`/`merge_requests` for Created events, plus
|
||||
/// `resource_state_events`, `resource_label_events`, and
|
||||
/// `resource_milestone_events` for lifecycle events. Results are sorted
|
||||
/// by timestamp descending (most recent first) and truncated to `limit`.
|
||||
pub fn fetch_timeline_events(
|
||||
conn: &Connection,
|
||||
scope: &TimelineScope,
|
||||
limit: usize,
|
||||
) -> Result<Vec<TimelineEvent>> {
|
||||
let filter = resolve_timeline_scope(conn, scope)?;
|
||||
let mut events = Vec::new();
|
||||
|
||||
collect_tl_created_events(conn, &filter, &mut events)?;
|
||||
collect_tl_state_events(conn, &filter, &mut events)?;
|
||||
collect_tl_label_events(conn, &filter, &mut events)?;
|
||||
collect_tl_milestone_events(conn, &filter, &mut events)?;
|
||||
|
||||
// Sort by timestamp descending (most recent first), with stable tiebreak.
|
||||
events.sort_by(|a, b| {
|
||||
b.timestamp_ms
|
||||
.cmp(&a.timestamp_ms)
|
||||
.then_with(|| a.entity_key.kind.cmp(&b.entity_key.kind))
|
||||
.then_with(|| a.entity_key.iid.cmp(&b.entity_key.iid))
|
||||
});
|
||||
|
||||
events.truncate(limit);
|
||||
Ok(events)
|
||||
}
|
||||
|
||||
/// Collect Created events from issues and merge_requests tables.
|
||||
fn collect_tl_created_events(
|
||||
conn: &Connection,
|
||||
filter: &TimelineFilter,
|
||||
events: &mut Vec<TimelineEvent>,
|
||||
) -> Result<()> {
|
||||
// Issue created events.
|
||||
if !matches!(filter, TimelineFilter::MergeRequest(_)) {
|
||||
let (where_clause, params) = match filter {
|
||||
TimelineFilter::All => (
|
||||
"1=1".to_string(),
|
||||
Vec::<Box<dyn rusqlite::types::ToSql>>::new(),
|
||||
),
|
||||
TimelineFilter::Issue(id) => (
|
||||
"i.id = ?1".to_string(),
|
||||
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::Actor(name) => (
|
||||
"i.author_username = ?1".to_string(),
|
||||
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::MergeRequest(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let sql = format!(
|
||||
"SELECT i.created_at, i.iid, i.title, i.author_username, i.project_id, p.path_with_namespace
|
||||
FROM issues i
|
||||
JOIN projects p ON p.id = i.project_id
|
||||
WHERE {where_clause}"
|
||||
);
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare(&sql)
|
||||
.context("preparing issue created query")?;
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> =
|
||||
params.iter().map(AsRef::as_ref).collect();
|
||||
let rows = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, Option<String>>(2)?,
|
||||
row.get::<_, Option<String>>(3)?,
|
||||
row.get::<_, i64>(4)?,
|
||||
row.get::<_, String>(5)?,
|
||||
))
|
||||
})
|
||||
.context("querying issue created events")?;
|
||||
|
||||
for row in rows {
|
||||
let (created_at, iid, title, author, project_id, project_path) =
|
||||
row.context("reading issue created row")?;
|
||||
let title_str = title.as_deref().unwrap_or("(untitled)");
|
||||
events.push(TimelineEvent {
|
||||
timestamp_ms: created_at,
|
||||
entity_key: EntityKey::issue(project_id, iid),
|
||||
event_kind: TimelineEventKind::Created,
|
||||
summary: format!("Issue #{iid} created: {title_str}"),
|
||||
detail: title,
|
||||
actor: author,
|
||||
project_path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// MR created events.
|
||||
if !matches!(filter, TimelineFilter::Issue(_)) {
|
||||
let (where_clause, params) = match filter {
|
||||
TimelineFilter::All => (
|
||||
"1=1".to_string(),
|
||||
Vec::<Box<dyn rusqlite::types::ToSql>>::new(),
|
||||
),
|
||||
TimelineFilter::MergeRequest(id) => (
|
||||
"mr.id = ?1".to_string(),
|
||||
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::Actor(name) => (
|
||||
"mr.author_username = ?1".to_string(),
|
||||
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::Issue(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let sql = format!(
|
||||
"SELECT mr.created_at, mr.iid, mr.title, mr.author_username, mr.project_id, p.path_with_namespace
|
||||
FROM merge_requests mr
|
||||
JOIN projects p ON p.id = mr.project_id
|
||||
WHERE {where_clause}"
|
||||
);
|
||||
|
||||
let mut stmt = conn.prepare(&sql).context("preparing MR created query")?;
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> =
|
||||
params.iter().map(AsRef::as_ref).collect();
|
||||
let rows = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, Option<String>>(2)?,
|
||||
row.get::<_, Option<String>>(3)?,
|
||||
row.get::<_, i64>(4)?,
|
||||
row.get::<_, String>(5)?,
|
||||
))
|
||||
})
|
||||
.context("querying MR created events")?;
|
||||
|
||||
for row in rows {
|
||||
let (created_at, iid, title, author, project_id, project_path) =
|
||||
row.context("reading MR created row")?;
|
||||
let title_str = title.as_deref().unwrap_or("(untitled)");
|
||||
events.push(TimelineEvent {
|
||||
timestamp_ms: created_at,
|
||||
entity_key: EntityKey::mr(project_id, iid),
|
||||
event_kind: TimelineEventKind::Created,
|
||||
summary: format!("MR !{iid} created: {title_str}"),
|
||||
detail: title,
|
||||
actor: author,
|
||||
project_path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper: build WHERE clause and params for resource event tables.
|
||||
///
|
||||
/// Resource event tables have `issue_id` and `merge_request_id` columns
|
||||
/// (exactly one is non-NULL per row), plus `actor_username`.
|
||||
fn resource_event_where(filter: &TimelineFilter) -> (String, Vec<Box<dyn rusqlite::types::ToSql>>) {
|
||||
match filter {
|
||||
TimelineFilter::All => ("1=1".to_string(), Vec::new()),
|
||||
TimelineFilter::Issue(id) => (
|
||||
"e.issue_id = ?1".to_string(),
|
||||
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::MergeRequest(id) => (
|
||||
"e.merge_request_id = ?1".to_string(),
|
||||
vec![Box::new(*id) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
TimelineFilter::Actor(name) => (
|
||||
"e.actor_username = ?1".to_string(),
|
||||
vec![Box::new(name.clone()) as Box<dyn rusqlite::types::ToSql>],
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a resource event row's entity to an EntityKey.
|
||||
fn resolve_event_entity(
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
issue_iid: Option<i64>,
|
||||
mr_iid: Option<i64>,
|
||||
issue_project_id: Option<i64>,
|
||||
mr_project_id: Option<i64>,
|
||||
) -> Option<(EntityKey, i64)> {
|
||||
if let (Some(iid), Some(pid)) = (issue_iid, issue_project_id) {
|
||||
Some((EntityKey::issue(pid, iid), pid))
|
||||
} else if let (Some(iid), Some(pid)) = (mr_iid, mr_project_id) {
|
||||
Some((EntityKey::mr(pid, iid), pid))
|
||||
} else {
|
||||
// Orphaned event — entity was deleted.
|
||||
let _ = (issue_id, mr_id); // suppress unused warnings
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect state change events from `resource_state_events`.
|
||||
fn collect_tl_state_events(
|
||||
conn: &Connection,
|
||||
filter: &TimelineFilter,
|
||||
events: &mut Vec<TimelineEvent>,
|
||||
) -> Result<()> {
|
||||
let (where_clause, params) = resource_event_where(filter);
|
||||
|
||||
let sql = format!(
|
||||
"SELECT e.created_at, e.state, e.actor_username,
|
||||
e.issue_id, e.merge_request_id,
|
||||
i.iid, mr.iid, i.project_id, mr.project_id,
|
||||
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
||||
FROM resource_state_events e
|
||||
LEFT JOIN issues i ON i.id = e.issue_id
|
||||
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
||||
LEFT JOIN projects pi ON pi.id = i.project_id
|
||||
LEFT JOIN projects pm ON pm.id = mr.project_id
|
||||
WHERE {where_clause}"
|
||||
);
|
||||
|
||||
let mut stmt = conn.prepare(&sql).context("preparing state events query")?;
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
||||
let rows = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, String>(1)?,
|
||||
row.get::<_, Option<String>>(2)?,
|
||||
row.get::<_, Option<i64>>(3)?,
|
||||
row.get::<_, Option<i64>>(4)?,
|
||||
row.get::<_, Option<i64>>(5)?,
|
||||
row.get::<_, Option<i64>>(6)?,
|
||||
row.get::<_, Option<i64>>(7)?,
|
||||
row.get::<_, Option<i64>>(8)?,
|
||||
row.get::<_, Option<String>>(9)?,
|
||||
))
|
||||
})
|
||||
.context("querying state events")?;
|
||||
|
||||
for row in rows {
|
||||
let (
|
||||
created_at,
|
||||
state,
|
||||
actor,
|
||||
issue_id,
|
||||
mr_id,
|
||||
issue_iid,
|
||||
mr_iid,
|
||||
issue_pid,
|
||||
mr_pid,
|
||||
project_path,
|
||||
) = row.context("reading state event row")?;
|
||||
|
||||
let Some((entity_key, _pid)) =
|
||||
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let (event_kind, summary) = if state == "merged" {
|
||||
(
|
||||
TimelineEventKind::Merged,
|
||||
format!("MR !{} merged", entity_key.iid),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
TimelineEventKind::StateChanged,
|
||||
format!("State changed to {state}"),
|
||||
)
|
||||
};
|
||||
|
||||
events.push(TimelineEvent {
|
||||
timestamp_ms: created_at,
|
||||
entity_key,
|
||||
event_kind,
|
||||
summary,
|
||||
detail: Some(state),
|
||||
actor,
|
||||
project_path: project_path.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Collect label change events from `resource_label_events`.
|
||||
fn collect_tl_label_events(
|
||||
conn: &Connection,
|
||||
filter: &TimelineFilter,
|
||||
events: &mut Vec<TimelineEvent>,
|
||||
) -> Result<()> {
|
||||
let (where_clause, params) = resource_event_where(filter);
|
||||
|
||||
let sql = format!(
|
||||
"SELECT e.created_at, e.action, e.label_name, e.actor_username,
|
||||
e.issue_id, e.merge_request_id,
|
||||
i.iid, mr.iid, i.project_id, mr.project_id,
|
||||
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
||||
FROM resource_label_events e
|
||||
LEFT JOIN issues i ON i.id = e.issue_id
|
||||
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
||||
LEFT JOIN projects pi ON pi.id = i.project_id
|
||||
LEFT JOIN projects pm ON pm.id = mr.project_id
|
||||
WHERE {where_clause}"
|
||||
);
|
||||
|
||||
let mut stmt = conn.prepare(&sql).context("preparing label events query")?;
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
||||
let rows = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, String>(1)?,
|
||||
row.get::<_, String>(2)?,
|
||||
row.get::<_, Option<String>>(3)?,
|
||||
row.get::<_, Option<i64>>(4)?,
|
||||
row.get::<_, Option<i64>>(5)?,
|
||||
row.get::<_, Option<i64>>(6)?,
|
||||
row.get::<_, Option<i64>>(7)?,
|
||||
row.get::<_, Option<i64>>(8)?,
|
||||
row.get::<_, Option<i64>>(9)?,
|
||||
row.get::<_, Option<String>>(10)?,
|
||||
))
|
||||
})
|
||||
.context("querying label events")?;
|
||||
|
||||
for row in rows {
|
||||
let (
|
||||
created_at,
|
||||
action,
|
||||
label_name,
|
||||
actor,
|
||||
issue_id,
|
||||
mr_id,
|
||||
issue_iid,
|
||||
mr_iid,
|
||||
issue_pid,
|
||||
mr_pid,
|
||||
project_path,
|
||||
) = row.context("reading label event row")?;
|
||||
|
||||
let Some((entity_key, _pid)) =
|
||||
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let (event_kind, summary) = match action.as_str() {
|
||||
"add" => (
|
||||
TimelineEventKind::LabelAdded,
|
||||
format!("Label added: {label_name}"),
|
||||
),
|
||||
"remove" => (
|
||||
TimelineEventKind::LabelRemoved,
|
||||
format!("Label removed: {label_name}"),
|
||||
),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
events.push(TimelineEvent {
|
||||
timestamp_ms: created_at,
|
||||
entity_key,
|
||||
event_kind,
|
||||
summary,
|
||||
detail: Some(label_name),
|
||||
actor,
|
||||
project_path: project_path.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Collect milestone change events from `resource_milestone_events`.
|
||||
fn collect_tl_milestone_events(
|
||||
conn: &Connection,
|
||||
filter: &TimelineFilter,
|
||||
events: &mut Vec<TimelineEvent>,
|
||||
) -> Result<()> {
|
||||
let (where_clause, params) = resource_event_where(filter);
|
||||
|
||||
let sql = format!(
|
||||
"SELECT e.created_at, e.action, e.milestone_title, e.actor_username,
|
||||
e.issue_id, e.merge_request_id,
|
||||
i.iid, mr.iid, i.project_id, mr.project_id,
|
||||
COALESCE(pi.path_with_namespace, pm.path_with_namespace) AS project_path
|
||||
FROM resource_milestone_events e
|
||||
LEFT JOIN issues i ON i.id = e.issue_id
|
||||
LEFT JOIN merge_requests mr ON mr.id = e.merge_request_id
|
||||
LEFT JOIN projects pi ON pi.id = i.project_id
|
||||
LEFT JOIN projects pm ON pm.id = mr.project_id
|
||||
WHERE {where_clause}"
|
||||
);
|
||||
|
||||
let mut stmt = conn
|
||||
.prepare(&sql)
|
||||
.context("preparing milestone events query")?;
|
||||
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(AsRef::as_ref).collect();
|
||||
let rows = stmt
|
||||
.query_map(param_refs.as_slice(), |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, String>(1)?,
|
||||
row.get::<_, String>(2)?,
|
||||
row.get::<_, Option<String>>(3)?,
|
||||
row.get::<_, Option<i64>>(4)?,
|
||||
row.get::<_, Option<i64>>(5)?,
|
||||
row.get::<_, Option<i64>>(6)?,
|
||||
row.get::<_, Option<i64>>(7)?,
|
||||
row.get::<_, Option<i64>>(8)?,
|
||||
row.get::<_, Option<i64>>(9)?,
|
||||
row.get::<_, Option<String>>(10)?,
|
||||
))
|
||||
})
|
||||
.context("querying milestone events")?;
|
||||
|
||||
for row in rows {
|
||||
let (
|
||||
created_at,
|
||||
action,
|
||||
milestone_title,
|
||||
actor,
|
||||
issue_id,
|
||||
mr_id,
|
||||
issue_iid,
|
||||
mr_iid,
|
||||
issue_pid,
|
||||
mr_pid,
|
||||
project_path,
|
||||
) = row.context("reading milestone event row")?;
|
||||
|
||||
let Some((entity_key, _pid)) =
|
||||
resolve_event_entity(issue_id, mr_id, issue_iid, mr_iid, issue_pid, mr_pid)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let (event_kind, summary) = match action.as_str() {
|
||||
"add" => (
|
||||
TimelineEventKind::MilestoneSet,
|
||||
format!("Milestone set: {milestone_title}"),
|
||||
),
|
||||
"remove" => (
|
||||
TimelineEventKind::MilestoneRemoved,
|
||||
format!("Milestone removed: {milestone_title}"),
|
||||
),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
events.push(TimelineEvent {
|
||||
timestamp_ms: created_at,
|
||||
entity_key,
|
||||
event_kind,
|
||||
summary,
|
||||
detail: Some(milestone_title),
|
||||
actor,
|
||||
project_path: project_path.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Create the minimal schema needed for timeline queries.
|
||||
fn create_dashboard_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create dashboard schema");
|
||||
}
|
||||
|
||||
fn insert_issue(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO issues (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100, iid, format!("Issue {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert issue");
|
||||
}
|
||||
|
||||
fn insert_mr(conn: &Connection, iid: i64, state: &str, updated_at: i64) {
|
||||
conn.execute(
|
||||
"INSERT INTO merge_requests (gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?5, ?5)",
|
||||
rusqlite::params![iid * 100 + 50, iid, format!("MR {iid}"), state, updated_at],
|
||||
)
|
||||
.expect("insert mr");
|
||||
}
|
||||
|
||||
/// Add resource event tables to an existing schema.
|
||||
fn add_resource_event_tables(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS resource_state_events (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
state TEXT NOT NULL,
|
||||
actor_gitlab_id INTEGER,
|
||||
actor_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
source_commit TEXT,
|
||||
source_merge_request_iid INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS resource_label_events (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
action TEXT NOT NULL,
|
||||
label_name TEXT NOT NULL,
|
||||
actor_gitlab_id INTEGER,
|
||||
actor_username TEXT,
|
||||
created_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS resource_milestone_events (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
action TEXT NOT NULL,
|
||||
milestone_title TEXT NOT NULL,
|
||||
milestone_id INTEGER,
|
||||
actor_gitlab_id INTEGER,
|
||||
actor_username TEXT,
|
||||
created_at INTEGER NOT NULL
|
||||
);
|
||||
",
|
||||
)
|
||||
.expect("create resource event tables");
|
||||
}
|
||||
|
||||
/// Create a full timeline test schema (dashboard schema + resource events).
|
||||
fn create_timeline_schema(conn: &Connection) {
|
||||
create_dashboard_schema(conn);
|
||||
add_resource_event_tables(conn);
|
||||
// Insert a project for test entities.
|
||||
conn.execute(
|
||||
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')",
|
||||
[],
|
||||
)
|
||||
.expect("insert test project");
|
||||
}
|
||||
|
||||
fn insert_state_event(
|
||||
conn: &Connection,
|
||||
gitlab_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
state: &str,
|
||||
actor: &str,
|
||||
created_at: i64,
|
||||
) {
|
||||
conn.execute(
|
||||
"INSERT INTO resource_state_events (gitlab_id, project_id, issue_id, merge_request_id, state, actor_username, created_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6)",
|
||||
rusqlite::params![gitlab_id, issue_id, mr_id, state, actor, created_at],
|
||||
)
|
||||
.expect("insert state event");
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn insert_label_event(
|
||||
conn: &Connection,
|
||||
gitlab_id: i64,
|
||||
issue_id: Option<i64>,
|
||||
mr_id: Option<i64>,
|
||||
action: &str,
|
||||
label: &str,
|
||||
actor: &str,
|
||||
created_at: i64,
|
||||
) {
|
||||
conn.execute(
|
||||
"INSERT INTO resource_label_events (gitlab_id, project_id, issue_id, merge_request_id, action, label_name, actor_username, created_at)
|
||||
VALUES (?1, 1, ?2, ?3, ?4, ?5, ?6, ?7)",
|
||||
rusqlite::params![gitlab_id, issue_id, mr_id, action, label, actor, created_at],
|
||||
)
|
||||
.expect("insert label event");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_scoped() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
// Create two issues.
|
||||
let now = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now - 100_000);
|
||||
insert_issue(&conn, 2, "opened", now - 50_000);
|
||||
|
||||
// Get internal IDs.
|
||||
let issue1_id: i64 = conn
|
||||
.query_row("SELECT id FROM issues WHERE iid = 1", [], |r| r.get(0))
|
||||
.unwrap();
|
||||
let issue2_id: i64 = conn
|
||||
.query_row("SELECT id FROM issues WHERE iid = 2", [], |r| r.get(0))
|
||||
.unwrap();
|
||||
|
||||
// State events: issue 1 closed, issue 2 label added.
|
||||
insert_state_event(
|
||||
&conn,
|
||||
1,
|
||||
Some(issue1_id),
|
||||
None,
|
||||
"closed",
|
||||
"alice",
|
||||
now - 80_000,
|
||||
);
|
||||
insert_label_event(
|
||||
&conn,
|
||||
2,
|
||||
Some(issue2_id),
|
||||
None,
|
||||
"add",
|
||||
"bug",
|
||||
"bob",
|
||||
now - 30_000,
|
||||
);
|
||||
|
||||
// Fetch scoped to issue 1.
|
||||
let scope = TimelineScope::Entity(EntityKey::issue(1, 1));
|
||||
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
||||
|
||||
// Should only have issue 1's events: Created + StateChanged.
|
||||
assert_eq!(events.len(), 2);
|
||||
for event in &events {
|
||||
assert_eq!(event.entity_key.iid, 1, "All events should be for issue #1");
|
||||
}
|
||||
// Most recent first.
|
||||
assert!(events[0].timestamp_ms >= events[1].timestamp_ms);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_all_scope() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let now = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now - 100_000);
|
||||
insert_issue(&conn, 2, "opened", now - 50_000);
|
||||
|
||||
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
||||
|
||||
// Should have Created events for both issues.
|
||||
assert_eq!(events.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_author_scope() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let now = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now - 100_000); // default: no author_username in insert_issue
|
||||
|
||||
let issue1_id: i64 = conn
|
||||
.query_row("SELECT id FROM issues WHERE iid = 1", [], |r| r.get(0))
|
||||
.unwrap();
|
||||
|
||||
// State events by different actors.
|
||||
insert_state_event(
|
||||
&conn,
|
||||
1,
|
||||
Some(issue1_id),
|
||||
None,
|
||||
"closed",
|
||||
"alice",
|
||||
now - 80_000,
|
||||
);
|
||||
insert_state_event(
|
||||
&conn,
|
||||
2,
|
||||
Some(issue1_id),
|
||||
None,
|
||||
"reopened",
|
||||
"bob",
|
||||
now - 60_000,
|
||||
);
|
||||
|
||||
let scope = TimelineScope::Author("alice".into());
|
||||
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
||||
|
||||
// Should only get alice's state event (Created events don't have author set via insert_issue).
|
||||
assert!(events.iter().all(|e| e.actor.as_deref() == Some("alice")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_respects_limit() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let now = 1_700_000_000_000_i64;
|
||||
for i in 1..=10 {
|
||||
insert_issue(&conn, i, "opened", now - (i * 10_000));
|
||||
}
|
||||
|
||||
let events = fetch_timeline_events(&conn, &TimelineScope::All, 3).unwrap();
|
||||
assert_eq!(events.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_sorted_most_recent_first() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let now = 1_700_000_000_000_i64;
|
||||
insert_issue(&conn, 1, "opened", now - 200_000);
|
||||
insert_issue(&conn, 2, "opened", now - 100_000);
|
||||
insert_issue(&conn, 3, "opened", now - 300_000);
|
||||
|
||||
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
||||
|
||||
for window in events.windows(2) {
|
||||
assert!(
|
||||
window[0].timestamp_ms >= window[1].timestamp_ms,
|
||||
"Events should be sorted most-recent-first"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_state_merged_is_merged_kind() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let now = 1_700_000_000_000_i64;
|
||||
insert_mr(&conn, 1, "merged", now - 100_000);
|
||||
|
||||
let mr_id: i64 = conn
|
||||
.query_row("SELECT id FROM merge_requests WHERE iid = 1", [], |r| {
|
||||
r.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
insert_state_event(&conn, 1, None, Some(mr_id), "merged", "alice", now - 50_000);
|
||||
|
||||
let scope = TimelineScope::Entity(EntityKey::mr(1, 1));
|
||||
let events = fetch_timeline_events(&conn, &scope, 100).unwrap();
|
||||
|
||||
let merged_events: Vec<_> = events
|
||||
.iter()
|
||||
.filter(|e| e.event_kind == TimelineEventKind::Merged)
|
||||
.collect();
|
||||
assert_eq!(merged_events.len(), 1);
|
||||
assert_eq!(merged_events[0].summary, "MR !1 merged");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_timeline_empty_db() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_timeline_schema(&conn);
|
||||
|
||||
let events = fetch_timeline_events(&conn, &TimelineScope::All, 100).unwrap();
|
||||
assert!(events.is_empty());
|
||||
}
|
||||
}
|
||||
234
crates/lore-tui/src/action/trace.rs
Normal file
234
crates/lore-tui/src/action/trace.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Trace screen actions — fetch file provenance chains from the local database.
|
||||
//!
|
||||
//! Wraps `run_trace()` from `lore::core::trace` and provides an autocomplete
|
||||
//! path query for the input field.
|
||||
|
||||
use anyhow::Result;
|
||||
use rusqlite::Connection;
|
||||
|
||||
use lore::core::trace::{self, TraceResult};
|
||||
|
||||
/// Default limit for trace chain results in TUI queries.
|
||||
const DEFAULT_LIMIT: usize = 50;
|
||||
|
||||
/// Fetch trace chains for a file path.
|
||||
///
|
||||
/// Wraps [`trace::run_trace()`] with TUI defaults.
|
||||
pub fn fetch_trace(
|
||||
conn: &Connection,
|
||||
project_id: Option<i64>,
|
||||
path: &str,
|
||||
follow_renames: bool,
|
||||
include_discussions: bool,
|
||||
) -> Result<TraceResult> {
|
||||
Ok(trace::run_trace(
|
||||
conn,
|
||||
project_id,
|
||||
path,
|
||||
follow_renames,
|
||||
include_discussions,
|
||||
DEFAULT_LIMIT,
|
||||
)?)
|
||||
}
|
||||
|
||||
/// Fetch known file paths from `mr_file_changes` for autocomplete.
|
||||
///
|
||||
/// Returns distinct `new_path` values scoped to the given project (or all
|
||||
/// projects if `None`), sorted alphabetically.
|
||||
pub fn fetch_known_paths(conn: &Connection, project_id: Option<i64>) -> Result<Vec<String>> {
|
||||
let mut paths = if let Some(pid) = project_id {
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT DISTINCT new_path FROM mr_file_changes WHERE project_id = ?1 ORDER BY new_path",
|
||||
)?;
|
||||
let rows = stmt.query_map([pid], |row| row.get::<_, String>(0))?;
|
||||
rows.filter_map(Result::ok).collect::<Vec<_>>()
|
||||
} else {
|
||||
let mut stmt =
|
||||
conn.prepare("SELECT DISTINCT new_path FROM mr_file_changes ORDER BY new_path")?;
|
||||
let rows = stmt.query_map([], |row| row.get::<_, String>(0))?;
|
||||
rows.filter_map(Result::ok).collect::<Vec<_>>()
|
||||
};
|
||||
paths.sort();
|
||||
paths.dedup();
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Minimal schema for trace queries.
|
||||
fn create_trace_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
draft INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
merged_at INTEGER,
|
||||
closed_at INTEGER,
|
||||
web_url TEXT,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE mr_file_changes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
new_path TEXT NOT NULL,
|
||||
old_path TEXT,
|
||||
change_type TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE entity_references (
|
||||
id INTEGER PRIMARY KEY,
|
||||
source_entity_type TEXT NOT NULL,
|
||||
source_entity_id INTEGER NOT NULL,
|
||||
target_entity_type TEXT NOT NULL,
|
||||
target_entity_id INTEGER,
|
||||
target_iid INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
reference_type TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
web_url TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||
resolved INTEGER NOT NULL DEFAULT 0,
|
||||
last_note_at INTEGER NOT NULL DEFAULT 0,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
note_type TEXT,
|
||||
position_new_path TEXT,
|
||||
position_old_path TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE INDEX idx_mfc_new_path_project_mr
|
||||
ON mr_file_changes(new_path, project_id, merge_request_id);
|
||||
CREATE INDEX idx_mfc_old_path_project_mr
|
||||
ON mr_file_changes(old_path, project_id, merge_request_id);
|
||||
",
|
||||
)
|
||||
.expect("create trace schema");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_trace_empty_db() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_trace_schema(&conn);
|
||||
|
||||
let result = fetch_trace(&conn, None, "src/main.rs", true, true).unwrap();
|
||||
assert!(result.trace_chains.is_empty());
|
||||
assert_eq!(result.total_chains, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_trace_with_mr() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_trace_schema(&conn);
|
||||
|
||||
// Insert a project, MR, and file change.
|
||||
conn.execute_batch(
|
||||
"
|
||||
INSERT INTO projects(id, gitlab_project_id, path_with_namespace)
|
||||
VALUES (1, 100, 'group/project');
|
||||
INSERT INTO merge_requests(id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at)
|
||||
VALUES (1, 200, 1, 42, 'Add main.rs', 'merged', 'alice', 1700000000000, 1700000000000);
|
||||
INSERT INTO mr_file_changes(id, merge_request_id, project_id, new_path, change_type)
|
||||
VALUES (1, 1, 1, 'src/main.rs', 'added');
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let result = fetch_trace(&conn, Some(1), "src/main.rs", true, false).unwrap();
|
||||
assert_eq!(result.trace_chains.len(), 1);
|
||||
assert_eq!(result.trace_chains[0].mr_iid, 42);
|
||||
assert_eq!(result.trace_chains[0].mr_author, "alice");
|
||||
assert_eq!(result.trace_chains[0].change_type, "added");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_known_paths_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_trace_schema(&conn);
|
||||
|
||||
let paths = fetch_known_paths(&conn, None).unwrap();
|
||||
assert!(paths.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_known_paths_with_data() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_trace_schema(&conn);
|
||||
|
||||
conn.execute_batch(
|
||||
"
|
||||
INSERT INTO mr_file_changes(id, merge_request_id, project_id, new_path, change_type)
|
||||
VALUES (1, 1, 1, 'src/b.rs', 'added'),
|
||||
(2, 1, 1, 'src/a.rs', 'modified'),
|
||||
(3, 2, 1, 'src/b.rs', 'modified');
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let paths = fetch_known_paths(&conn, None).unwrap();
|
||||
assert_eq!(paths, vec!["src/a.rs", "src/b.rs"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_known_paths_scoped_to_project() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_trace_schema(&conn);
|
||||
|
||||
conn.execute_batch(
|
||||
"
|
||||
INSERT INTO mr_file_changes(id, merge_request_id, project_id, new_path, change_type)
|
||||
VALUES (1, 1, 1, 'src/a.rs', 'added'),
|
||||
(2, 2, 2, 'src/b.rs', 'added');
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let paths = fetch_known_paths(&conn, Some(1)).unwrap();
|
||||
assert_eq!(paths, vec!["src/a.rs"]);
|
||||
}
|
||||
}
|
||||
285
crates/lore-tui/src/action/who.rs
Normal file
285
crates/lore-tui/src/action/who.rs
Normal file
@@ -0,0 +1,285 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Who screen actions — fetch people-intelligence data from the local database.
|
||||
//!
|
||||
//! Each function wraps a `query_*` function from `lore::cli::commands::who`
|
||||
//! and returns the appropriate [`WhoResult`] variant.
|
||||
|
||||
use anyhow::Result;
|
||||
use rusqlite::Connection;
|
||||
|
||||
use lore::cli::commands::who;
|
||||
use lore::core::config::ScoringConfig;
|
||||
use lore::core::who_types::WhoResult;
|
||||
|
||||
/// Default limit for result rows in TUI who queries.
|
||||
const DEFAULT_LIMIT: usize = 20;
|
||||
|
||||
/// Default time window: 6 months in milliseconds.
|
||||
const SIX_MONTHS_MS: i64 = 180 * 24 * 60 * 60 * 1000;
|
||||
|
||||
/// Fetch expert results for a file path.
|
||||
pub fn fetch_who_expert(
|
||||
conn: &Connection,
|
||||
path: &str,
|
||||
project_id: Option<i64>,
|
||||
scoring: &ScoringConfig,
|
||||
now_ms: i64,
|
||||
) -> Result<WhoResult> {
|
||||
let since_ms = now_ms - SIX_MONTHS_MS;
|
||||
let result = who::query_expert(
|
||||
conn,
|
||||
path,
|
||||
project_id,
|
||||
since_ms,
|
||||
now_ms,
|
||||
DEFAULT_LIMIT,
|
||||
scoring,
|
||||
false, // detail
|
||||
false, // explain_score
|
||||
false, // include_bots
|
||||
)?;
|
||||
Ok(WhoResult::Expert(result))
|
||||
}
|
||||
|
||||
/// Fetch workload summary for a username.
|
||||
pub fn fetch_who_workload(
|
||||
conn: &Connection,
|
||||
username: &str,
|
||||
project_id: Option<i64>,
|
||||
include_closed: bool,
|
||||
) -> Result<WhoResult> {
|
||||
let result = who::query_workload(
|
||||
conn,
|
||||
username,
|
||||
project_id,
|
||||
None, // since_ms — show all for workload
|
||||
DEFAULT_LIMIT,
|
||||
include_closed,
|
||||
)?;
|
||||
Ok(WhoResult::Workload(result))
|
||||
}
|
||||
|
||||
/// Fetch review activity breakdown for a username.
|
||||
pub fn fetch_who_reviews(
|
||||
conn: &Connection,
|
||||
username: &str,
|
||||
project_id: Option<i64>,
|
||||
now_ms: i64,
|
||||
) -> Result<WhoResult> {
|
||||
let since_ms = now_ms - SIX_MONTHS_MS;
|
||||
let result = who::query_reviews(conn, username, project_id, since_ms)?;
|
||||
Ok(WhoResult::Reviews(result))
|
||||
}
|
||||
|
||||
/// Fetch recent active (unresolved) discussions.
|
||||
pub fn fetch_who_active(
|
||||
conn: &Connection,
|
||||
project_id: Option<i64>,
|
||||
include_closed: bool,
|
||||
now_ms: i64,
|
||||
) -> Result<WhoResult> {
|
||||
// Active mode default window: 7 days.
|
||||
let seven_days_ms: i64 = 7 * 24 * 60 * 60 * 1000;
|
||||
let since_ms = now_ms - seven_days_ms;
|
||||
let result = who::query_active(conn, project_id, since_ms, DEFAULT_LIMIT, include_closed)?;
|
||||
Ok(WhoResult::Active(result))
|
||||
}
|
||||
|
||||
/// Fetch overlap (shared file knowledge) for a path.
|
||||
pub fn fetch_who_overlap(
|
||||
conn: &Connection,
|
||||
path: &str,
|
||||
project_id: Option<i64>,
|
||||
now_ms: i64,
|
||||
) -> Result<WhoResult> {
|
||||
let since_ms = now_ms - SIX_MONTHS_MS;
|
||||
let result = who::query_overlap(conn, path, project_id, since_ms, DEFAULT_LIMIT)?;
|
||||
Ok(WhoResult::Overlap(result))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
/// Minimal schema for who queries (matches the real DB schema).
|
||||
fn create_who_schema(conn: &Connection) {
|
||||
conn.execute_batch(
|
||||
"
|
||||
CREATE TABLE projects (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_project_id INTEGER UNIQUE NOT NULL,
|
||||
path_with_namespace TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE issues (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT NOT NULL,
|
||||
author_username TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE merge_requests (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
iid INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
state TEXT,
|
||||
author_username TEXT,
|
||||
draft INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
merged_at INTEGER,
|
||||
closed_at INTEGER,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE issue_assignees (
|
||||
issue_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
PRIMARY KEY(issue_id, username)
|
||||
);
|
||||
CREATE TABLE mr_reviewers (
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
PRIMARY KEY(merge_request_id, username)
|
||||
);
|
||||
CREATE TABLE mr_file_changes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
merge_request_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
new_path TEXT NOT NULL,
|
||||
old_path TEXT,
|
||||
change_type TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE discussions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_discussion_id TEXT NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
noteable_type TEXT NOT NULL,
|
||||
issue_id INTEGER,
|
||||
merge_request_id INTEGER,
|
||||
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||
resolved INTEGER NOT NULL DEFAULT 0,
|
||||
last_note_at INTEGER NOT NULL DEFAULT 0,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE notes (
|
||||
id INTEGER PRIMARY KEY,
|
||||
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||
discussion_id INTEGER NOT NULL,
|
||||
project_id INTEGER NOT NULL,
|
||||
is_system INTEGER NOT NULL DEFAULT 0,
|
||||
author_username TEXT,
|
||||
body TEXT,
|
||||
note_type TEXT,
|
||||
position_new_path TEXT,
|
||||
position_old_path TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
last_seen_at INTEGER NOT NULL
|
||||
);
|
||||
-- Indexes needed by who queries
|
||||
CREATE INDEX idx_notes_diffnote_path_created
|
||||
ON notes(position_new_path, created_at)
|
||||
WHERE note_type = 'DiffNote' AND is_system = 0;
|
||||
CREATE INDEX idx_notes_old_path_author
|
||||
ON notes(position_old_path, author_username)
|
||||
WHERE note_type = 'DiffNote' AND is_system = 0;
|
||||
CREATE INDEX idx_mfc_new_path_project_mr
|
||||
ON mr_file_changes(new_path, project_id, merge_request_id);
|
||||
CREATE INDEX idx_mfc_old_path_project_mr
|
||||
ON mr_file_changes(old_path, project_id, merge_request_id);
|
||||
",
|
||||
)
|
||||
.expect("create who schema");
|
||||
}
|
||||
|
||||
fn default_scoring() -> ScoringConfig {
|
||||
ScoringConfig::default()
|
||||
}
|
||||
|
||||
fn now_ms() -> i64 {
|
||||
1_700_000_000_000 // Fixed timestamp for deterministic tests.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_who_expert_empty_db_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_who_schema(&conn);
|
||||
|
||||
let result = fetch_who_expert(&conn, "src/", None, &default_scoring(), now_ms()).unwrap();
|
||||
match result {
|
||||
WhoResult::Expert(expert) => {
|
||||
assert!(expert.experts.is_empty());
|
||||
assert!(!expert.truncated);
|
||||
}
|
||||
_ => panic!("Expected Expert variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_who_workload_empty_db_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_who_schema(&conn);
|
||||
|
||||
let result = fetch_who_workload(&conn, "alice", None, false).unwrap();
|
||||
match result {
|
||||
WhoResult::Workload(wl) => {
|
||||
assert_eq!(wl.username, "alice");
|
||||
assert!(wl.assigned_issues.is_empty());
|
||||
assert!(wl.authored_mrs.is_empty());
|
||||
}
|
||||
_ => panic!("Expected Workload variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_who_reviews_empty_db_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_who_schema(&conn);
|
||||
|
||||
let result = fetch_who_reviews(&conn, "alice", None, now_ms()).unwrap();
|
||||
match result {
|
||||
WhoResult::Reviews(rev) => {
|
||||
assert_eq!(rev.username, "alice");
|
||||
assert_eq!(rev.total_diffnotes, 0);
|
||||
}
|
||||
_ => panic!("Expected Reviews variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_who_active_empty_db_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_who_schema(&conn);
|
||||
|
||||
let result = fetch_who_active(&conn, None, false, now_ms()).unwrap();
|
||||
match result {
|
||||
WhoResult::Active(active) => {
|
||||
assert!(active.discussions.is_empty());
|
||||
assert_eq!(active.total_unresolved_in_window, 0);
|
||||
}
|
||||
_ => panic!("Expected Active variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_who_overlap_empty_db_returns_empty() {
|
||||
let conn = Connection::open_in_memory().unwrap();
|
||||
create_who_schema(&conn);
|
||||
|
||||
let result = fetch_who_overlap(&conn, "src/", None, now_ms()).unwrap();
|
||||
match result {
|
||||
WhoResult::Overlap(overlap) => {
|
||||
assert!(overlap.users.is_empty());
|
||||
assert!(!overlap.truncated);
|
||||
}
|
||||
_ => panic!("Expected Overlap variant"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,6 +139,39 @@ fn test_g_then_i_navigates_to_issues() {
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_g_then_s_on_bootstrap_starts_sync_in_place() {
|
||||
let mut app = test_app();
|
||||
app.update(Msg::NavigateTo(Screen::Bootstrap));
|
||||
|
||||
// First key: 'g'
|
||||
let key_g = KeyEvent::new(KeyCode::Char('g'));
|
||||
app.update(Msg::RawEvent(Event::Key(key_g)));
|
||||
|
||||
// Second key: 's'
|
||||
let key_s = KeyEvent::new(KeyCode::Char('s'));
|
||||
app.update(Msg::RawEvent(Event::Key(key_s)));
|
||||
|
||||
assert!(app.navigation.is_at(&Screen::Bootstrap));
|
||||
assert!(app.state.bootstrap.sync_started);
|
||||
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_g_then_s_from_dashboard_navigates_to_sync_screen() {
|
||||
let mut app = test_app();
|
||||
|
||||
// First key: 'g'
|
||||
let key_g = KeyEvent::new(KeyCode::Char('g'));
|
||||
app.update(Msg::RawEvent(Event::Key(key_g)));
|
||||
|
||||
// Second key: 's'
|
||||
let key_s = KeyEvent::new(KeyCode::Char('s'));
|
||||
app.update(Msg::RawEvent(Event::Key(key_s)));
|
||||
|
||||
assert!(app.navigation.is_at(&Screen::Sync));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_go_prefix_timeout_cancels() {
|
||||
let clock = FakeClock::new(chrono::Utc::now());
|
||||
@@ -328,3 +361,19 @@ fn test_default_is_new() {
|
||||
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sync_completed_from_bootstrap_resets_navigation_and_state() {
|
||||
let mut app = test_app();
|
||||
|
||||
app.update(Msg::NavigateTo(Screen::Bootstrap));
|
||||
app.update(Msg::SyncStarted);
|
||||
assert!(app.state.bootstrap.sync_started);
|
||||
assert!(app.navigation.is_at(&Screen::Bootstrap));
|
||||
|
||||
app.update(Msg::SyncCompleted { elapsed_ms: 1234 });
|
||||
|
||||
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||
assert_eq!(app.navigation.depth(), 1);
|
||||
assert!(!app.state.bootstrap.sync_started);
|
||||
}
|
||||
|
||||
@@ -125,13 +125,44 @@ impl LoreApp {
|
||||
}
|
||||
|
||||
/// Handle keys in Palette mode.
|
||||
fn handle_palette_mode_key(&mut self, key: &KeyEvent, _screen: &Screen) -> Cmd<Msg> {
|
||||
if key.code == KeyCode::Escape {
|
||||
self.input_mode = InputMode::Normal;
|
||||
return Cmd::none();
|
||||
fn handle_palette_mode_key(&mut self, key: &KeyEvent, screen: &Screen) -> Cmd<Msg> {
|
||||
match key.code {
|
||||
KeyCode::Escape => {
|
||||
self.state.command_palette.close();
|
||||
self.input_mode = InputMode::Normal;
|
||||
Cmd::none()
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
if let Some(cmd_id) = self.state.command_palette.selected_command_id() {
|
||||
self.state.command_palette.close();
|
||||
self.input_mode = InputMode::Normal;
|
||||
self.execute_command(cmd_id, screen)
|
||||
} else {
|
||||
Cmd::none()
|
||||
}
|
||||
}
|
||||
KeyCode::Up => {
|
||||
self.state.command_palette.select_prev();
|
||||
Cmd::none()
|
||||
}
|
||||
KeyCode::Down => {
|
||||
self.state.command_palette.select_next();
|
||||
Cmd::none()
|
||||
}
|
||||
KeyCode::Backspace => {
|
||||
self.state
|
||||
.command_palette
|
||||
.delete_back(&self.command_registry, screen);
|
||||
Cmd::none()
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
self.state
|
||||
.command_palette
|
||||
.insert_char(c, &self.command_registry, screen);
|
||||
Cmd::none()
|
||||
}
|
||||
_ => Cmd::none(),
|
||||
}
|
||||
// Palette key dispatch will be expanded in the palette widget phase.
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
/// Handle the second key of a g-prefix sequence.
|
||||
@@ -153,7 +184,7 @@ impl LoreApp {
|
||||
}
|
||||
|
||||
/// Execute a command by ID.
|
||||
fn execute_command(&mut self, id: &str, _screen: &Screen) -> Cmd<Msg> {
|
||||
fn execute_command(&mut self, id: &str, screen: &Screen) -> Cmd<Msg> {
|
||||
match id {
|
||||
"quit" => Cmd::quit(),
|
||||
"go_back" => {
|
||||
@@ -166,7 +197,10 @@ impl LoreApp {
|
||||
}
|
||||
"command_palette" => {
|
||||
self.input_mode = InputMode::Palette;
|
||||
self.state.command_palette.query_focused = true;
|
||||
let screen = self.navigation.current().clone();
|
||||
self.state
|
||||
.command_palette
|
||||
.open(&self.command_registry, &screen);
|
||||
Cmd::none()
|
||||
}
|
||||
"open_in_browser" => {
|
||||
@@ -183,7 +217,16 @@ impl LoreApp {
|
||||
"go_search" => self.navigate_to(Screen::Search),
|
||||
"go_timeline" => self.navigate_to(Screen::Timeline),
|
||||
"go_who" => self.navigate_to(Screen::Who),
|
||||
"go_sync" => self.navigate_to(Screen::Sync),
|
||||
"go_file_history" => self.navigate_to(Screen::FileHistory),
|
||||
"go_trace" => self.navigate_to(Screen::Trace),
|
||||
"go_sync" => {
|
||||
if screen == &Screen::Bootstrap {
|
||||
self.state.bootstrap.sync_started = true;
|
||||
Cmd::none()
|
||||
} else {
|
||||
self.navigate_to(Screen::Sync)
|
||||
}
|
||||
}
|
||||
"jump_back" => {
|
||||
self.navigation.jump_back();
|
||||
Cmd::none()
|
||||
@@ -239,11 +282,7 @@ impl LoreApp {
|
||||
pub(crate) fn handle_msg(&mut self, msg: Msg) -> Cmd<Msg> {
|
||||
// Record in crash context.
|
||||
self.crash_context.push(CrashEvent::MsgDispatched {
|
||||
msg_name: format!("{msg:?}")
|
||||
.split('(')
|
||||
.next()
|
||||
.unwrap_or("?")
|
||||
.to_string(),
|
||||
msg_name: msg.variant_name().to_string(),
|
||||
screen: self.navigation.current().label().to_string(),
|
||||
});
|
||||
|
||||
@@ -351,16 +390,24 @@ impl LoreApp {
|
||||
Cmd::none()
|
||||
}
|
||||
Msg::DiscussionsLoaded {
|
||||
generation,
|
||||
generation: _,
|
||||
key,
|
||||
discussions,
|
||||
} => {
|
||||
let screen = Screen::IssueDetail(key.clone());
|
||||
if self
|
||||
.supervisor
|
||||
.is_current(&TaskKey::LoadScreen(screen.clone()), generation)
|
||||
{
|
||||
self.state.issue_detail.apply_discussions(discussions);
|
||||
// Progressive hydration: the parent detail task already called
|
||||
// supervisor.complete(), so is_current() would return false.
|
||||
// Instead, check that the detail state still expects this key.
|
||||
match key.kind {
|
||||
crate::message::EntityKind::Issue => {
|
||||
if self.state.issue_detail.current_key.as_ref() == Some(&key) {
|
||||
self.state.issue_detail.apply_discussions(discussions);
|
||||
}
|
||||
}
|
||||
crate::message::EntityKind::MergeRequest => {
|
||||
if self.state.mr_detail.current_key.as_ref() == Some(&key) {
|
||||
self.state.mr_detail.apply_discussions(discussions);
|
||||
}
|
||||
}
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
@@ -384,6 +431,86 @@ impl LoreApp {
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
// --- Sync lifecycle (Bootstrap auto-transition) ---
|
||||
Msg::SyncStarted => {
|
||||
if *self.navigation.current() == Screen::Bootstrap {
|
||||
self.state.bootstrap.sync_started = true;
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
Msg::SyncCompleted { .. } => {
|
||||
// If we came from Bootstrap, replace nav history with Dashboard.
|
||||
if *self.navigation.current() == Screen::Bootstrap {
|
||||
self.state.bootstrap.sync_started = false;
|
||||
self.navigation.reset_to(Screen::Dashboard);
|
||||
|
||||
// Trigger a fresh dashboard load without preserving Bootstrap in history.
|
||||
let dashboard = Screen::Dashboard;
|
||||
let load_state = if self.state.load_state.was_visited(&dashboard) {
|
||||
LoadState::Refreshing
|
||||
} else {
|
||||
LoadState::LoadingInitial
|
||||
};
|
||||
self.state.set_loading(dashboard.clone(), load_state);
|
||||
let _handle = self.supervisor.submit(TaskKey::LoadScreen(dashboard));
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
// --- Who screen ---
|
||||
Msg::WhoResultLoaded { generation, result } => {
|
||||
if self
|
||||
.supervisor
|
||||
.is_current(&TaskKey::LoadScreen(Screen::Who), generation)
|
||||
{
|
||||
self.state.who.apply_results(generation, *result);
|
||||
self.state.set_loading(Screen::Who, LoadState::Idle);
|
||||
self.supervisor
|
||||
.complete(&TaskKey::LoadScreen(Screen::Who), generation);
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
Msg::WhoModeChanged => {
|
||||
// Mode tab changed — view will re-render from state.
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
// --- File History screen ---
|
||||
Msg::FileHistoryLoaded { generation, result } => {
|
||||
if self
|
||||
.supervisor
|
||||
.is_current(&TaskKey::LoadScreen(Screen::FileHistory), generation)
|
||||
{
|
||||
self.state.file_history.apply_results(generation, *result);
|
||||
self.state.set_loading(Screen::FileHistory, LoadState::Idle);
|
||||
self.supervisor
|
||||
.complete(&TaskKey::LoadScreen(Screen::FileHistory), generation);
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
Msg::FileHistoryKnownPathsLoaded { paths } => {
|
||||
self.state.file_history.known_paths = paths;
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
// --- Trace screen ---
|
||||
Msg::TraceResultLoaded { generation, result } => {
|
||||
if self
|
||||
.supervisor
|
||||
.is_current(&TaskKey::LoadScreen(Screen::Trace), generation)
|
||||
{
|
||||
self.state.trace.apply_result(generation, *result);
|
||||
self.state.set_loading(Screen::Trace, LoadState::Idle);
|
||||
self.supervisor
|
||||
.complete(&TaskKey::LoadScreen(Screen::Trace), generation);
|
||||
}
|
||||
Cmd::none()
|
||||
}
|
||||
Msg::TraceKnownPathsLoaded { paths } => {
|
||||
self.state.trace.known_paths = paths;
|
||||
Cmd::none()
|
||||
}
|
||||
|
||||
// All other message variants: no-op for now.
|
||||
// Future phases will fill these in as screens are implemented.
|
||||
_ => Cmd::none(),
|
||||
|
||||
232
crates/lore-tui/src/entity_cache.rs
Normal file
232
crates/lore-tui/src/entity_cache.rs
Normal file
@@ -0,0 +1,232 @@
|
||||
//! Bounded LRU entity cache for near-instant detail view reopens.
|
||||
//!
|
||||
//! Caches `IssueDetail` / `MrDetail` payloads keyed on [`EntityKey`].
|
||||
//! Tick-based LRU eviction keeps the most-recently-accessed entries alive
|
||||
//! while bounding memory usage. Selective invalidation removes only
|
||||
//! stale entries after a sync, rather than flushing the whole cache.
|
||||
//!
|
||||
//! Single-threaded (TUI event loop) — no `Arc`/`Mutex` needed.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::message::EntityKey;
|
||||
|
||||
/// Default entity cache capacity (sufficient for drill-in/out workflows).
|
||||
const DEFAULT_CAPACITY: usize = 64;
|
||||
|
||||
/// Bounded LRU cache keyed on [`EntityKey`].
|
||||
///
|
||||
/// Each entry stores its value alongside a monotonic tick recording the
|
||||
/// last access time. On capacity overflow, the entry with the lowest
|
||||
/// tick (least recently used) is evicted.
|
||||
pub struct EntityCache<V> {
|
||||
entries: HashMap<EntityKey, (V, u64)>,
|
||||
capacity: usize,
|
||||
tick: u64,
|
||||
}
|
||||
|
||||
impl<V> EntityCache<V> {
|
||||
/// Create a new cache with the default capacity (64).
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
entries: HashMap::with_capacity(DEFAULT_CAPACITY),
|
||||
capacity: DEFAULT_CAPACITY,
|
||||
tick: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new cache with the given capacity.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `capacity` is zero.
|
||||
#[must_use]
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
assert!(capacity > 0, "EntityCache capacity must be > 0");
|
||||
Self {
|
||||
entries: HashMap::with_capacity(capacity),
|
||||
capacity,
|
||||
tick: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up an entry, bumping its access tick to keep it alive.
|
||||
pub fn get(&mut self, key: &EntityKey) -> Option<&V> {
|
||||
self.tick += 1;
|
||||
let tick = self.tick;
|
||||
self.entries.get_mut(key).map(|(val, t)| {
|
||||
*t = tick;
|
||||
&*val
|
||||
})
|
||||
}
|
||||
|
||||
/// Insert an entry, evicting the least-recently-accessed entry if at capacity.
|
||||
pub fn put(&mut self, key: EntityKey, value: V) {
|
||||
self.tick += 1;
|
||||
let tick = self.tick;
|
||||
|
||||
// If key already exists, just update in place.
|
||||
if let Some(entry) = self.entries.get_mut(&key) {
|
||||
*entry = (value, tick);
|
||||
return;
|
||||
}
|
||||
|
||||
// Evict LRU if at capacity.
|
||||
if self.entries.len() >= self.capacity {
|
||||
if let Some(lru_key) = self
|
||||
.entries
|
||||
.iter()
|
||||
.min_by_key(|(_, (_, t))| *t)
|
||||
.map(|(k, _)| k.clone())
|
||||
{
|
||||
self.entries.remove(&lru_key);
|
||||
}
|
||||
}
|
||||
|
||||
self.entries.insert(key, (value, tick));
|
||||
}
|
||||
|
||||
/// Remove only the specified keys, leaving all other entries intact.
|
||||
pub fn invalidate(&mut self, keys: &[EntityKey]) {
|
||||
for key in keys {
|
||||
self.entries.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of entries currently cached.
|
||||
#[must_use]
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
/// Whether the cache is empty.
|
||||
#[must_use]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.entries.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> Default for EntityCache<V> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::message::EntityKey;
|
||||
|
||||
fn issue(iid: i64) -> EntityKey {
|
||||
EntityKey::issue(1, iid)
|
||||
}
|
||||
|
||||
fn mr(iid: i64) -> EntityKey {
|
||||
EntityKey::mr(1, iid)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_returns_recently_put_item() {
|
||||
let mut cache = EntityCache::with_capacity(4);
|
||||
cache.put(issue(1), "issue-1");
|
||||
assert_eq!(cache.get(&issue(1)), Some(&"issue-1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_returns_none_for_missing_key() {
|
||||
let mut cache: EntityCache<&str> = EntityCache::with_capacity(4);
|
||||
assert_eq!(cache.get(&issue(99)), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lru_eviction_removes_least_recently_used() {
|
||||
let mut cache = EntityCache::with_capacity(3);
|
||||
cache.put(issue(1), "a"); // tick 1
|
||||
cache.put(issue(2), "b"); // tick 2
|
||||
cache.put(issue(3), "c"); // tick 3
|
||||
|
||||
// Access issue(1) to bump its tick above issue(2).
|
||||
cache.get(&issue(1)); // tick 4 -> issue(1) now most recent
|
||||
|
||||
// Insert a 4th item: should evict issue(2) (tick 2, lowest).
|
||||
cache.put(issue(4), "d"); // tick 5
|
||||
|
||||
assert_eq!(cache.get(&issue(1)), Some(&"a"), "issue(1) should survive (recently accessed)");
|
||||
assert_eq!(cache.get(&issue(2)), None, "issue(2) should be evicted (LRU)");
|
||||
assert_eq!(cache.get(&issue(3)), Some(&"c"), "issue(3) should survive");
|
||||
assert_eq!(cache.get(&issue(4)), Some(&"d"), "issue(4) just inserted");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_put_overwrites_existing_key() {
|
||||
let mut cache = EntityCache::with_capacity(4);
|
||||
cache.put(issue(1), "v1");
|
||||
cache.put(issue(1), "v2");
|
||||
assert_eq!(cache.get(&issue(1)), Some(&"v2"));
|
||||
assert_eq!(cache.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalidate_removes_only_specified_keys() {
|
||||
let mut cache = EntityCache::with_capacity(8);
|
||||
cache.put(issue(1), "a");
|
||||
cache.put(issue(2), "b");
|
||||
cache.put(mr(3), "c");
|
||||
cache.put(mr(4), "d");
|
||||
|
||||
cache.invalidate(&[issue(2), mr(4)]);
|
||||
|
||||
assert_eq!(cache.get(&issue(1)), Some(&"a"), "issue(1) not invalidated");
|
||||
assert_eq!(cache.get(&issue(2)), None, "issue(2) was invalidated");
|
||||
assert_eq!(cache.get(&mr(3)), Some(&"c"), "mr(3) not invalidated");
|
||||
assert_eq!(cache.get(&mr(4)), None, "mr(4) was invalidated");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalidate_with_nonexistent_keys_is_noop() {
|
||||
let mut cache = EntityCache::with_capacity(4);
|
||||
cache.put(issue(1), "a");
|
||||
cache.invalidate(&[issue(99), mr(99)]);
|
||||
assert_eq!(cache.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_capacity_is_64() {
|
||||
let cache: EntityCache<String> = EntityCache::new();
|
||||
assert_eq!(cache.capacity, DEFAULT_CAPACITY);
|
||||
assert_eq!(cache.capacity, 64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_len_and_is_empty() {
|
||||
let mut cache = EntityCache::with_capacity(4);
|
||||
assert!(cache.is_empty());
|
||||
assert_eq!(cache.len(), 0);
|
||||
|
||||
cache.put(issue(1), "a");
|
||||
assert!(!cache.is_empty());
|
||||
assert_eq!(cache.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "capacity must be > 0")]
|
||||
fn test_zero_capacity_panics() {
|
||||
let _: EntityCache<String> = EntityCache::with_capacity(0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed_entity_kinds() {
|
||||
let mut cache = EntityCache::with_capacity(4);
|
||||
// Same iid, different kinds — should be separate entries.
|
||||
cache.put(issue(42), "issue-42");
|
||||
cache.put(mr(42), "mr-42");
|
||||
|
||||
assert_eq!(cache.get(&issue(42)), Some(&"issue-42"));
|
||||
assert_eq!(cache.get(&mr(42)), Some(&"mr-42"));
|
||||
assert_eq!(cache.len(), 2);
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,10 @@ pub mod view; // View layer: render_screen + common widgets (bd-26f2)
|
||||
pub mod action; // Data-fetching actions for TUI screens (bd-35g5+)
|
||||
pub mod filter_dsl; // Filter DSL tokenizer for list screen filter bars (bd-18qs)
|
||||
|
||||
// Phase 4 modules.
|
||||
pub mod entity_cache; // Bounded LRU entity cache for detail view reopens (bd-2og9)
|
||||
pub mod render_cache; // Bounded render cache for expensive per-frame computations (bd-2og9)
|
||||
|
||||
/// Options controlling how the TUI launches.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LaunchOptions {
|
||||
@@ -52,6 +56,14 @@ pub struct LaunchOptions {
|
||||
///
|
||||
/// Loads config from `options.config_path` (or default location),
|
||||
/// opens the database read-only, and enters the FrankenTUI event loop.
|
||||
///
|
||||
/// ## Preflight sequence
|
||||
///
|
||||
/// 1. **Schema preflight** — validate the database schema version before
|
||||
/// creating the app. If incompatible, print an actionable error and exit
|
||||
/// with a non-zero code.
|
||||
/// 2. **Data readiness** — check whether the database has any entity data.
|
||||
/// If empty, start on the Bootstrap screen; otherwise start on Dashboard.
|
||||
pub fn launch_tui(options: LaunchOptions) -> Result<()> {
|
||||
let _options = options;
|
||||
// Phase 1 will wire this to LoreApp + App::fullscreen().run()
|
||||
@@ -59,6 +71,30 @@ pub fn launch_tui(options: LaunchOptions) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the schema preflight check.
|
||||
///
|
||||
/// Returns `Ok(())` if the schema is compatible, or an error with an
|
||||
/// actionable message if it's not. The caller should exit non-zero on error.
|
||||
pub fn schema_preflight(conn: &rusqlite::Connection) -> Result<()> {
|
||||
use state::bootstrap::SchemaCheck;
|
||||
|
||||
match action::check_schema_version(conn, action::MINIMUM_SCHEMA_VERSION) {
|
||||
SchemaCheck::Compatible { .. } => Ok(()),
|
||||
SchemaCheck::NoDB => {
|
||||
anyhow::bail!(
|
||||
"No lore database found.\n\
|
||||
Run 'lore init' to create a config, then 'lore sync' to fetch data."
|
||||
);
|
||||
}
|
||||
SchemaCheck::Incompatible { found, minimum } => {
|
||||
anyhow::bail!(
|
||||
"Database schema version {found} is too old (minimum: {minimum}).\n\
|
||||
Run 'lore migrate' to upgrade, or 'lore sync' to rebuild."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Launch the TUI with an initial sync pass.
|
||||
///
|
||||
/// Runs `lore sync` in the background while displaying a progress screen,
|
||||
|
||||
@@ -18,7 +18,7 @@ use ftui::Event;
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Distinguishes issue vs merge request in an [`EntityKey`].
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub enum EntityKind {
|
||||
Issue,
|
||||
MergeRequest,
|
||||
@@ -84,6 +84,8 @@ pub enum Screen {
|
||||
Search,
|
||||
Timeline,
|
||||
Who,
|
||||
Trace,
|
||||
FileHistory,
|
||||
Sync,
|
||||
Stats,
|
||||
Doctor,
|
||||
@@ -103,6 +105,8 @@ impl Screen {
|
||||
Self::Search => "Search",
|
||||
Self::Timeline => "Timeline",
|
||||
Self::Who => "Who",
|
||||
Self::Trace => "Trace",
|
||||
Self::FileHistory => "File History",
|
||||
Self::Sync => "Sync",
|
||||
Self::Stats => "Stats",
|
||||
Self::Doctor => "Doctor",
|
||||
@@ -285,6 +289,24 @@ pub enum Msg {
|
||||
},
|
||||
WhoModeChanged,
|
||||
|
||||
// --- Trace ---
|
||||
TraceResultLoaded {
|
||||
generation: u64,
|
||||
result: Box<lore::core::trace::TraceResult>,
|
||||
},
|
||||
TraceKnownPathsLoaded {
|
||||
paths: Vec<String>,
|
||||
},
|
||||
|
||||
// --- File History ---
|
||||
FileHistoryLoaded {
|
||||
generation: u64,
|
||||
result: Box<crate::state::file_history::FileHistoryResult>,
|
||||
},
|
||||
FileHistoryKnownPathsLoaded {
|
||||
paths: Vec<String>,
|
||||
},
|
||||
|
||||
// --- Sync ---
|
||||
SyncStarted,
|
||||
SyncProgress {
|
||||
@@ -332,6 +354,72 @@ pub enum Msg {
|
||||
Quit,
|
||||
}
|
||||
|
||||
impl Msg {
|
||||
/// Return the variant name as a static string without formatting payload.
|
||||
///
|
||||
/// Used by crash context to cheaply record which message was dispatched.
|
||||
pub fn variant_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::RawEvent(_) => "RawEvent",
|
||||
Self::Tick => "Tick",
|
||||
Self::Resize { .. } => "Resize",
|
||||
Self::NavigateTo(_) => "NavigateTo",
|
||||
Self::GoBack => "GoBack",
|
||||
Self::GoForward => "GoForward",
|
||||
Self::GoHome => "GoHome",
|
||||
Self::JumpBack(_) => "JumpBack",
|
||||
Self::JumpForward(_) => "JumpForward",
|
||||
Self::OpenCommandPalette => "OpenCommandPalette",
|
||||
Self::CloseCommandPalette => "CloseCommandPalette",
|
||||
Self::CommandPaletteInput(_) => "CommandPaletteInput",
|
||||
Self::CommandPaletteSelect(_) => "CommandPaletteSelect",
|
||||
Self::IssueListLoaded { .. } => "IssueListLoaded",
|
||||
Self::IssueListFilterChanged(_) => "IssueListFilterChanged",
|
||||
Self::IssueListSortChanged => "IssueListSortChanged",
|
||||
Self::IssueSelected(_) => "IssueSelected",
|
||||
Self::MrListLoaded { .. } => "MrListLoaded",
|
||||
Self::MrListFilterChanged(_) => "MrListFilterChanged",
|
||||
Self::MrSelected(_) => "MrSelected",
|
||||
Self::IssueDetailLoaded { .. } => "IssueDetailLoaded",
|
||||
Self::MrDetailLoaded { .. } => "MrDetailLoaded",
|
||||
Self::DiscussionsLoaded { .. } => "DiscussionsLoaded",
|
||||
Self::SearchQueryChanged(_) => "SearchQueryChanged",
|
||||
Self::SearchRequestStarted { .. } => "SearchRequestStarted",
|
||||
Self::SearchExecuted { .. } => "SearchExecuted",
|
||||
Self::SearchResultSelected(_) => "SearchResultSelected",
|
||||
Self::SearchModeChanged => "SearchModeChanged",
|
||||
Self::SearchCapabilitiesLoaded => "SearchCapabilitiesLoaded",
|
||||
Self::TimelineLoaded { .. } => "TimelineLoaded",
|
||||
Self::TimelineEntitySelected(_) => "TimelineEntitySelected",
|
||||
Self::WhoResultLoaded { .. } => "WhoResultLoaded",
|
||||
Self::WhoModeChanged => "WhoModeChanged",
|
||||
Self::TraceResultLoaded { .. } => "TraceResultLoaded",
|
||||
Self::TraceKnownPathsLoaded { .. } => "TraceKnownPathsLoaded",
|
||||
Self::FileHistoryLoaded { .. } => "FileHistoryLoaded",
|
||||
Self::FileHistoryKnownPathsLoaded { .. } => "FileHistoryKnownPathsLoaded",
|
||||
Self::SyncStarted => "SyncStarted",
|
||||
Self::SyncProgress { .. } => "SyncProgress",
|
||||
Self::SyncProgressBatch { .. } => "SyncProgressBatch",
|
||||
Self::SyncLogLine(_) => "SyncLogLine",
|
||||
Self::SyncBackpressureDrop => "SyncBackpressureDrop",
|
||||
Self::SyncCompleted { .. } => "SyncCompleted",
|
||||
Self::SyncCancelled => "SyncCancelled",
|
||||
Self::SyncFailed(_) => "SyncFailed",
|
||||
Self::SyncStreamStats { .. } => "SyncStreamStats",
|
||||
Self::SearchDebounceArmed { .. } => "SearchDebounceArmed",
|
||||
Self::SearchDebounceFired { .. } => "SearchDebounceFired",
|
||||
Self::DashboardLoaded { .. } => "DashboardLoaded",
|
||||
Self::Error(_) => "Error",
|
||||
Self::ShowHelp => "ShowHelp",
|
||||
Self::ShowCliEquivalent => "ShowCliEquivalent",
|
||||
Self::OpenInBrowser => "OpenInBrowser",
|
||||
Self::BlurTextInput => "BlurTextInput",
|
||||
Self::ScrollToTopCurrentScreen => "ScrollToTopCurrentScreen",
|
||||
Self::Quit => "Quit",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert terminal events into messages.
|
||||
///
|
||||
/// FrankenTUI requires `From<Event>` on the message type so the runtime
|
||||
@@ -373,26 +461,130 @@ pub struct Discussion {
|
||||
pub notes: Vec<String>,
|
||||
}
|
||||
|
||||
/// Placeholder for a search result.
|
||||
// ---------------------------------------------------------------------------
|
||||
// SearchMode
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Search mode determines which backend index is used.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
|
||||
pub enum SearchMode {
|
||||
/// FTS5 only — fast, always available if documents are indexed.
|
||||
#[default]
|
||||
Lexical,
|
||||
/// FTS5 + vector RRF merge — best quality when embeddings exist.
|
||||
Hybrid,
|
||||
/// Vector-only cosine similarity — requires Ollama embeddings.
|
||||
Semantic,
|
||||
}
|
||||
|
||||
impl SearchMode {
|
||||
/// Short label for the mode indicator in the query bar.
|
||||
#[must_use]
|
||||
pub fn label(self) -> &'static str {
|
||||
match self {
|
||||
Self::Lexical => "FTS",
|
||||
Self::Hybrid => "Hybrid",
|
||||
Self::Semantic => "Vec",
|
||||
}
|
||||
}
|
||||
|
||||
/// Cycle to the next mode, wrapping around.
|
||||
#[must_use]
|
||||
pub fn next(self) -> Self {
|
||||
match self {
|
||||
Self::Lexical => Self::Hybrid,
|
||||
Self::Hybrid => Self::Semantic,
|
||||
Self::Semantic => Self::Lexical,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SearchMode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.label())
|
||||
}
|
||||
}
|
||||
|
||||
/// A search result from the local database.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearchResult {
|
||||
pub key: EntityKey,
|
||||
pub title: String,
|
||||
pub score: f64,
|
||||
pub snippet: String,
|
||||
pub project_path: String,
|
||||
}
|
||||
|
||||
/// Placeholder for a timeline event.
|
||||
// ---------------------------------------------------------------------------
|
||||
// TimelineEventKind
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Event kind for color coding in the TUI timeline.
|
||||
///
|
||||
/// Derived from raw resource event tables in the local database.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum TimelineEventKind {
|
||||
/// Entity was created.
|
||||
Created,
|
||||
/// State changed (opened/closed/reopened/locked).
|
||||
StateChanged,
|
||||
/// Label added to entity.
|
||||
LabelAdded,
|
||||
/// Label removed from entity.
|
||||
LabelRemoved,
|
||||
/// Milestone set on entity.
|
||||
MilestoneSet,
|
||||
/// Milestone removed from entity.
|
||||
MilestoneRemoved,
|
||||
/// Merge request was merged.
|
||||
Merged,
|
||||
}
|
||||
|
||||
impl TimelineEventKind {
|
||||
/// Short display label for the event kind badge.
|
||||
#[must_use]
|
||||
pub fn label(self) -> &'static str {
|
||||
match self {
|
||||
Self::Created => "Created",
|
||||
Self::StateChanged => "State",
|
||||
Self::LabelAdded => "+Label",
|
||||
Self::LabelRemoved => "-Label",
|
||||
Self::MilestoneSet => "+Mile",
|
||||
Self::MilestoneRemoved => "-Mile",
|
||||
Self::Merged => "Merged",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// TimelineEvent
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// A timeline event for TUI display.
|
||||
///
|
||||
/// Produced by [`crate::action::fetch_timeline_events`] from raw
|
||||
/// resource event tables. Contains enough data for the view to
|
||||
/// render color-coded events with navigable entity references.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TimelineEvent {
|
||||
pub timestamp: String,
|
||||
pub description: String,
|
||||
/// Epoch milliseconds (UTC).
|
||||
pub timestamp_ms: i64,
|
||||
/// Entity this event belongs to (for navigation).
|
||||
pub entity_key: EntityKey,
|
||||
/// Event kind for color coding.
|
||||
pub event_kind: TimelineEventKind,
|
||||
/// Human-readable summary (e.g., "State changed to closed").
|
||||
pub summary: String,
|
||||
/// Optional detail text (e.g., label name, new state value).
|
||||
pub detail: Option<String>,
|
||||
/// Who performed the action.
|
||||
pub actor: Option<String>,
|
||||
/// Project path for display (e.g., "group/project").
|
||||
pub project_path: String,
|
||||
}
|
||||
|
||||
/// Placeholder for who/people intelligence result.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct WhoResult {
|
||||
pub experts: Vec<String>,
|
||||
}
|
||||
// WhoResult is re-exported from the lore core crate.
|
||||
pub use lore::core::who_types::WhoResult;
|
||||
|
||||
// DashboardData moved to crate::state::dashboard (enriched with
|
||||
// EntityCounts, ProjectSyncInfo, RecentActivityItem, LastSyncInfo).
|
||||
@@ -500,4 +692,49 @@ mod tests {
|
||||
let msg = Msg::from(Event::Focus(true));
|
||||
assert!(matches!(msg, Msg::RawEvent(Event::Focus(true))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_mode_labels() {
|
||||
assert_eq!(SearchMode::Lexical.label(), "FTS");
|
||||
assert_eq!(SearchMode::Hybrid.label(), "Hybrid");
|
||||
assert_eq!(SearchMode::Semantic.label(), "Vec");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_mode_next_cycles() {
|
||||
assert_eq!(SearchMode::Lexical.next(), SearchMode::Hybrid);
|
||||
assert_eq!(SearchMode::Hybrid.next(), SearchMode::Semantic);
|
||||
assert_eq!(SearchMode::Semantic.next(), SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_mode_display() {
|
||||
assert_eq!(format!("{}", SearchMode::Lexical), "FTS");
|
||||
assert_eq!(format!("{}", SearchMode::Hybrid), "Hybrid");
|
||||
assert_eq!(format!("{}", SearchMode::Semantic), "Vec");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_mode_default_is_lexical() {
|
||||
assert_eq!(SearchMode::default(), SearchMode::Lexical);
|
||||
}
|
||||
|
||||
// -- TimelineEventKind tests --
|
||||
|
||||
#[test]
|
||||
fn test_timeline_event_kind_labels() {
|
||||
assert_eq!(TimelineEventKind::Created.label(), "Created");
|
||||
assert_eq!(TimelineEventKind::StateChanged.label(), "State");
|
||||
assert_eq!(TimelineEventKind::LabelAdded.label(), "+Label");
|
||||
assert_eq!(TimelineEventKind::LabelRemoved.label(), "-Label");
|
||||
assert_eq!(TimelineEventKind::MilestoneSet.label(), "+Mile");
|
||||
assert_eq!(TimelineEventKind::MilestoneRemoved.label(), "-Mile");
|
||||
assert_eq!(TimelineEventKind::Merged.label(), "Merged");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_timeline_event_kind_equality() {
|
||||
assert_eq!(TimelineEventKind::Created, TimelineEventKind::Created);
|
||||
assert_ne!(TimelineEventKind::Created, TimelineEventKind::Merged);
|
||||
}
|
||||
}
|
||||
|
||||
252
crates/lore-tui/src/render_cache.rs
Normal file
252
crates/lore-tui/src/render_cache.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
//! Bounded render cache for expensive per-frame computations.
|
||||
//!
|
||||
//! Caches pre-computed render artifacts (markdown to styled text, discussion
|
||||
//! tree layout, issue body rendering) keyed on `(content_hash, terminal_width)`.
|
||||
//! Width is part of the key because line wrapping changes with terminal size.
|
||||
//!
|
||||
//! Invalidation strategies:
|
||||
//! - **Width change** (`invalidate_width`): purge entries not matching current width
|
||||
//! - **Theme change** (`invalidate_all`): full clear (colors changed)
|
||||
//!
|
||||
//! Single-threaded (TUI event loop) — no `Arc`/`Mutex` needed.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Default render cache capacity.
|
||||
const DEFAULT_CAPACITY: usize = 256;
|
||||
|
||||
/// Cache key: content identity + terminal width that produced the render.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct RenderCacheKey {
|
||||
/// Hash of the source content (e.g., `DefaultHasher` or FxHash of text).
|
||||
pub content_hash: u64,
|
||||
/// Terminal width at the time of rendering.
|
||||
pub terminal_width: u16,
|
||||
}
|
||||
|
||||
impl RenderCacheKey {
|
||||
/// Create a new render cache key.
|
||||
#[must_use]
|
||||
pub fn new(content_hash: u64, terminal_width: u16) -> Self {
|
||||
Self {
|
||||
content_hash,
|
||||
terminal_width,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Bounded cache for pre-computed render artifacts.
|
||||
///
|
||||
/// Uses simple capacity-bounded insertion. When at capacity, the oldest
|
||||
/// entry (lowest insertion order) is evicted. This is simpler than full
|
||||
/// LRU because render cache hits tend to be ephemeral — the current
|
||||
/// frame's renders are the most important.
|
||||
pub struct RenderCache<V> {
|
||||
entries: HashMap<RenderCacheKey, (V, u64)>,
|
||||
capacity: usize,
|
||||
tick: u64,
|
||||
}
|
||||
|
||||
impl<V> RenderCache<V> {
|
||||
/// Create a new cache with the default capacity (256).
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
entries: HashMap::with_capacity(DEFAULT_CAPACITY),
|
||||
capacity: DEFAULT_CAPACITY,
|
||||
tick: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new cache with the given capacity.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `capacity` is zero.
|
||||
#[must_use]
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
assert!(capacity > 0, "RenderCache capacity must be > 0");
|
||||
Self {
|
||||
entries: HashMap::with_capacity(capacity),
|
||||
capacity,
|
||||
tick: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up a cached render artifact.
|
||||
pub fn get(&self, key: &RenderCacheKey) -> Option<&V> {
|
||||
self.entries.get(key).map(|(v, _)| v)
|
||||
}
|
||||
|
||||
/// Insert a render artifact, evicting the oldest entry if at capacity.
|
||||
pub fn put(&mut self, key: RenderCacheKey, value: V) {
|
||||
self.tick += 1;
|
||||
let tick = self.tick;
|
||||
|
||||
if let Some(entry) = self.entries.get_mut(&key) {
|
||||
*entry = (value, tick);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.entries.len() >= self.capacity {
|
||||
if let Some(oldest_key) = self
|
||||
.entries
|
||||
.iter()
|
||||
.min_by_key(|(_, (_, t))| *t)
|
||||
.map(|(k, _)| *k)
|
||||
{
|
||||
self.entries.remove(&oldest_key);
|
||||
}
|
||||
}
|
||||
|
||||
self.entries.insert(key, (value, tick));
|
||||
}
|
||||
|
||||
/// Remove entries NOT matching the given width (terminal resize).
|
||||
///
|
||||
/// After a resize, only entries rendered at the new width are still valid.
|
||||
pub fn invalidate_width(&mut self, keep_width: u16) {
|
||||
self.entries
|
||||
.retain(|k, _| k.terminal_width == keep_width);
|
||||
}
|
||||
|
||||
/// Clear the entire cache (theme change — all colors invalidated).
|
||||
pub fn invalidate_all(&mut self) {
|
||||
self.entries.clear();
|
||||
}
|
||||
|
||||
/// Number of entries currently cached.
|
||||
#[must_use]
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
/// Whether the cache is empty.
|
||||
#[must_use]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.entries.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> Default for RenderCache<V> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn key(hash: u64, width: u16) -> RenderCacheKey {
|
||||
RenderCacheKey::new(hash, width)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_returns_recently_put_item() {
|
||||
let mut cache = RenderCache::with_capacity(4);
|
||||
cache.put(key(100, 80), "rendered-a");
|
||||
assert_eq!(cache.get(&key(100, 80)), Some(&"rendered-a"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_returns_none_for_missing_key() {
|
||||
let cache: RenderCache<&str> = RenderCache::with_capacity(4);
|
||||
assert_eq!(cache.get(&key(100, 80)), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_same_hash_different_width_are_separate() {
|
||||
let mut cache = RenderCache::with_capacity(4);
|
||||
cache.put(key(100, 80), "wide");
|
||||
cache.put(key(100, 40), "narrow");
|
||||
|
||||
assert_eq!(cache.get(&key(100, 80)), Some(&"wide"));
|
||||
assert_eq!(cache.get(&key(100, 40)), Some(&"narrow"));
|
||||
assert_eq!(cache.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_put_overwrites_existing_key() {
|
||||
let mut cache = RenderCache::with_capacity(4);
|
||||
cache.put(key(100, 80), "v1");
|
||||
cache.put(key(100, 80), "v2");
|
||||
assert_eq!(cache.get(&key(100, 80)), Some(&"v2"));
|
||||
assert_eq!(cache.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eviction_at_capacity() {
|
||||
let mut cache = RenderCache::with_capacity(2);
|
||||
cache.put(key(1, 80), "a"); // tick 1
|
||||
cache.put(key(2, 80), "b"); // tick 2
|
||||
cache.put(key(3, 80), "c"); // tick 3 -> evicts key(1) (tick 1, oldest)
|
||||
|
||||
assert_eq!(cache.get(&key(1, 80)), None, "oldest should be evicted");
|
||||
assert_eq!(cache.get(&key(2, 80)), Some(&"b"));
|
||||
assert_eq!(cache.get(&key(3, 80)), Some(&"c"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalidate_width_removes_non_matching() {
|
||||
let mut cache = RenderCache::with_capacity(8);
|
||||
cache.put(key(1, 80), "a");
|
||||
cache.put(key(2, 80), "b");
|
||||
cache.put(key(3, 120), "c");
|
||||
cache.put(key(4, 40), "d");
|
||||
|
||||
cache.invalidate_width(80);
|
||||
|
||||
assert_eq!(cache.get(&key(1, 80)), Some(&"a"), "width=80 kept");
|
||||
assert_eq!(cache.get(&key(2, 80)), Some(&"b"), "width=80 kept");
|
||||
assert_eq!(cache.get(&key(3, 120)), None, "width=120 removed");
|
||||
assert_eq!(cache.get(&key(4, 40)), None, "width=40 removed");
|
||||
assert_eq!(cache.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalidate_all_clears_everything() {
|
||||
let mut cache = RenderCache::with_capacity(8);
|
||||
cache.put(key(1, 80), "a");
|
||||
cache.put(key(2, 120), "b");
|
||||
cache.put(key(3, 40), "c");
|
||||
|
||||
cache.invalidate_all();
|
||||
|
||||
assert!(cache.is_empty());
|
||||
assert_eq!(cache.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_capacity_is_256() {
|
||||
let cache: RenderCache<String> = RenderCache::new();
|
||||
assert_eq!(cache.capacity, DEFAULT_CAPACITY);
|
||||
assert_eq!(cache.capacity, 256);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_len_and_is_empty() {
|
||||
let mut cache = RenderCache::with_capacity(4);
|
||||
assert!(cache.is_empty());
|
||||
|
||||
cache.put(key(1, 80), "a");
|
||||
assert!(!cache.is_empty());
|
||||
assert_eq!(cache.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "capacity must be > 0")]
|
||||
fn test_zero_capacity_panics() {
|
||||
let _: RenderCache<String> = RenderCache::with_capacity(0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalidate_width_on_empty_cache_is_noop() {
|
||||
let mut cache: RenderCache<&str> = RenderCache::with_capacity(4);
|
||||
cache.invalidate_width(80);
|
||||
assert!(cache.is_empty());
|
||||
}
|
||||
}
|
||||
160
crates/lore-tui/src/state/bootstrap.rs
Normal file
160
crates/lore-tui/src/state/bootstrap.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
#![allow(dead_code)] // Phase 2.5: consumed by Bootstrap screen
|
||||
|
||||
//! Bootstrap screen state.
|
||||
//!
|
||||
//! Handles first-launch and empty-database scenarios. The schema
|
||||
//! preflight runs before the TUI event loop; the bootstrap screen
|
||||
//! guides users to sync when no data is available.
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// DataReadiness
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Result of checking whether the database has enough data to show the TUI.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct DataReadiness {
|
||||
/// Database has at least one issue.
|
||||
pub has_issues: bool,
|
||||
/// Database has at least one merge request.
|
||||
pub has_mrs: bool,
|
||||
/// Database has at least one search document.
|
||||
pub has_documents: bool,
|
||||
/// Current schema version from the schema_version table.
|
||||
pub schema_version: i32,
|
||||
}
|
||||
|
||||
impl DataReadiness {
|
||||
/// Whether the database has any entity data at all.
|
||||
#[must_use]
|
||||
pub fn has_any_data(&self) -> bool {
|
||||
self.has_issues || self.has_mrs
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SchemaCheck
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Result of schema version validation.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SchemaCheck {
|
||||
/// Schema is at or above the minimum required version.
|
||||
Compatible { version: i32 },
|
||||
/// No database or no schema_version table found.
|
||||
NoDB,
|
||||
/// Schema exists but is too old for this TUI version.
|
||||
Incompatible { found: i32, minimum: i32 },
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// BootstrapState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the Bootstrap screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct BootstrapState {
|
||||
/// Whether a data readiness check has completed.
|
||||
pub readiness: Option<DataReadiness>,
|
||||
/// Whether the user has initiated a sync from the bootstrap screen.
|
||||
pub sync_started: bool,
|
||||
}
|
||||
|
||||
impl BootstrapState {
|
||||
/// Apply a data readiness result.
|
||||
pub fn apply_readiness(&mut self, readiness: DataReadiness) {
|
||||
self.readiness = Some(readiness);
|
||||
}
|
||||
|
||||
/// Whether we have data (and should auto-transition to Dashboard).
|
||||
#[must_use]
|
||||
pub fn should_transition_to_dashboard(&self) -> bool {
|
||||
self.readiness
|
||||
.as_ref()
|
||||
.is_some_and(DataReadiness::has_any_data)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_data_readiness_has_any_data() {
|
||||
let empty = DataReadiness {
|
||||
has_issues: false,
|
||||
has_mrs: false,
|
||||
has_documents: false,
|
||||
schema_version: 26,
|
||||
};
|
||||
assert!(!empty.has_any_data());
|
||||
|
||||
let with_issues = DataReadiness {
|
||||
has_issues: true,
|
||||
..empty.clone()
|
||||
};
|
||||
assert!(with_issues.has_any_data());
|
||||
|
||||
let with_mrs = DataReadiness {
|
||||
has_mrs: true,
|
||||
..empty
|
||||
};
|
||||
assert!(with_mrs.has_any_data());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_schema_check_variants() {
|
||||
let compat = SchemaCheck::Compatible { version: 26 };
|
||||
assert!(matches!(compat, SchemaCheck::Compatible { version: 26 }));
|
||||
|
||||
let no_db = SchemaCheck::NoDB;
|
||||
assert!(matches!(no_db, SchemaCheck::NoDB));
|
||||
|
||||
let incompat = SchemaCheck::Incompatible {
|
||||
found: 10,
|
||||
minimum: 20,
|
||||
};
|
||||
assert!(matches!(
|
||||
incompat,
|
||||
SchemaCheck::Incompatible {
|
||||
found: 10,
|
||||
minimum: 20
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bootstrap_state_default() {
|
||||
let state = BootstrapState::default();
|
||||
assert!(state.readiness.is_none());
|
||||
assert!(!state.sync_started);
|
||||
assert!(!state.should_transition_to_dashboard());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bootstrap_state_apply_readiness_empty() {
|
||||
let mut state = BootstrapState::default();
|
||||
state.apply_readiness(DataReadiness {
|
||||
has_issues: false,
|
||||
has_mrs: false,
|
||||
has_documents: false,
|
||||
schema_version: 26,
|
||||
});
|
||||
assert!(!state.should_transition_to_dashboard());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bootstrap_state_apply_readiness_with_data() {
|
||||
let mut state = BootstrapState::default();
|
||||
state.apply_readiness(DataReadiness {
|
||||
has_issues: true,
|
||||
has_mrs: false,
|
||||
has_documents: false,
|
||||
schema_version: 26,
|
||||
});
|
||||
assert!(state.should_transition_to_dashboard());
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,304 @@
|
||||
#![allow(dead_code)]
|
||||
//! Command palette state and fuzzy matching.
|
||||
//!
|
||||
//! The command palette is a modal overlay (Ctrl+P) that provides fuzzy-match
|
||||
//! access to all commands. Populated from [`CommandRegistry::palette_entries`].
|
||||
|
||||
//! Command palette state.
|
||||
use crate::commands::{CommandId, CommandRegistry};
|
||||
use crate::message::Screen;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PaletteEntry
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// A single entry in the filtered palette list.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PaletteEntry {
|
||||
/// Command ID for execution.
|
||||
pub id: CommandId,
|
||||
/// Human-readable label.
|
||||
pub label: &'static str,
|
||||
/// Keybinding display string (e.g., "g i").
|
||||
pub keybinding: Option<String>,
|
||||
/// Help text / description.
|
||||
pub help_text: &'static str,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CommandPaletteState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the command palette overlay.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct CommandPaletteState {
|
||||
/// Current query text.
|
||||
pub query: String,
|
||||
/// Whether the query input is focused.
|
||||
pub query_focused: bool,
|
||||
/// Cursor position within the query string (byte offset).
|
||||
pub cursor: usize,
|
||||
/// Index of the currently selected entry in `filtered`.
|
||||
pub selected_index: usize,
|
||||
/// Filtered and scored palette entries.
|
||||
pub filtered: Vec<PaletteEntry>,
|
||||
}
|
||||
|
||||
impl CommandPaletteState {
|
||||
/// Open the palette: reset query, focus input, populate with all commands.
|
||||
pub fn open(&mut self, registry: &CommandRegistry, screen: &Screen) {
|
||||
self.query.clear();
|
||||
self.cursor = 0;
|
||||
self.query_focused = true;
|
||||
self.selected_index = 0;
|
||||
self.refilter(registry, screen);
|
||||
}
|
||||
|
||||
/// Close the palette: unfocus and clear state.
|
||||
pub fn close(&mut self) {
|
||||
self.query_focused = false;
|
||||
self.query.clear();
|
||||
self.cursor = 0;
|
||||
self.selected_index = 0;
|
||||
self.filtered.clear();
|
||||
}
|
||||
|
||||
/// Insert a character at the cursor position.
|
||||
pub fn insert_char(&mut self, c: char, registry: &CommandRegistry, screen: &Screen) {
|
||||
self.query.insert(self.cursor, c);
|
||||
self.cursor += c.len_utf8();
|
||||
self.selected_index = 0;
|
||||
self.refilter(registry, screen);
|
||||
}
|
||||
|
||||
/// Delete the character before the cursor.
|
||||
pub fn delete_back(&mut self, registry: &CommandRegistry, screen: &Screen) {
|
||||
if self.cursor > 0 {
|
||||
// Find the previous character boundary.
|
||||
let prev = self.query[..self.cursor]
|
||||
.char_indices()
|
||||
.next_back()
|
||||
.map_or(0, |(i, _)| i);
|
||||
self.query.drain(prev..self.cursor);
|
||||
self.cursor = prev;
|
||||
self.selected_index = 0;
|
||||
self.refilter(registry, screen);
|
||||
}
|
||||
}
|
||||
|
||||
/// Move selection up by one.
|
||||
pub fn select_prev(&mut self) {
|
||||
self.selected_index = self.selected_index.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move selection down by one.
|
||||
pub fn select_next(&mut self) {
|
||||
if !self.filtered.is_empty() {
|
||||
self.selected_index = (self.selected_index + 1).min(self.filtered.len() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the currently selected entry's command ID.
|
||||
#[must_use]
|
||||
pub fn selected_command_id(&self) -> Option<CommandId> {
|
||||
self.filtered.get(self.selected_index).map(|e| e.id)
|
||||
}
|
||||
|
||||
/// Whether the palette is visible/active.
|
||||
#[must_use]
|
||||
pub fn is_open(&self) -> bool {
|
||||
self.query_focused
|
||||
}
|
||||
|
||||
/// Recompute the filtered list from the registry.
|
||||
fn refilter(&mut self, registry: &CommandRegistry, screen: &Screen) {
|
||||
let entries = registry.palette_entries(screen);
|
||||
let query_lower = self.query.to_lowercase();
|
||||
|
||||
self.filtered = entries
|
||||
.into_iter()
|
||||
.filter(|cmd| {
|
||||
if query_lower.is_empty() {
|
||||
return true;
|
||||
}
|
||||
fuzzy_match(&query_lower, cmd.label) || fuzzy_match(&query_lower, cmd.help_text)
|
||||
})
|
||||
.map(|cmd| PaletteEntry {
|
||||
id: cmd.id,
|
||||
label: cmd.label,
|
||||
keybinding: cmd.keybinding.as_ref().map(|kb| kb.display()),
|
||||
help_text: cmd.help_text,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Clamp selection.
|
||||
if !self.filtered.is_empty() {
|
||||
self.selected_index = self.selected_index.min(self.filtered.len() - 1);
|
||||
} else {
|
||||
self.selected_index = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Fuzzy matching
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Subsequence fuzzy match: every character in `query` must appear in `text`
|
||||
/// in order, case-insensitive.
|
||||
fn fuzzy_match(query: &str, text: &str) -> bool {
|
||||
let text_lower = text.to_lowercase();
|
||||
let mut text_chars = text_lower.chars();
|
||||
for qc in query.chars() {
|
||||
if !text_chars.any(|tc| tc == qc) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::commands::build_registry;
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_match_exact() {
|
||||
assert!(fuzzy_match("quit", "Quit"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_match_subsequence() {
|
||||
assert!(fuzzy_match("gi", "Go to Issues"));
|
||||
assert!(fuzzy_match("iss", "Go to Issues"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_match_case_insensitive() {
|
||||
assert!(fuzzy_match("help", "Show keybinding help overlay"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_match_no_match() {
|
||||
assert!(!fuzzy_match("xyz", "Quit"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_match_empty_query() {
|
||||
assert!(fuzzy_match("", "anything"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_open_populates_all() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
assert!(state.query_focused);
|
||||
assert!(state.query.is_empty());
|
||||
assert!(!state.filtered.is_empty());
|
||||
// All palette-eligible commands for Dashboard should be present.
|
||||
let palette_count = registry.palette_entries(&Screen::Dashboard).len();
|
||||
assert_eq!(state.filtered.len(), palette_count);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_filter_narrows_results() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
let all_count = state.filtered.len();
|
||||
state.insert_char('i', ®istry, &Screen::Dashboard);
|
||||
state.insert_char('s', ®istry, &Screen::Dashboard);
|
||||
state.insert_char('s', ®istry, &Screen::Dashboard);
|
||||
|
||||
// "iss" should match "Go to Issues" but not most other commands.
|
||||
assert!(state.filtered.len() < all_count);
|
||||
assert!(state.filtered.iter().any(|e| e.label == "Go to Issues"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_delete_back_widens_results() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
state.insert_char('q', ®istry, &Screen::Dashboard);
|
||||
let narrow_count = state.filtered.len();
|
||||
state.delete_back(®istry, &Screen::Dashboard);
|
||||
// After deleting, query is empty — should show all commands again.
|
||||
assert!(state.filtered.len() > narrow_count);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_select_navigation() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_prev(); // Should not go below 0.
|
||||
assert_eq!(state.selected_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_selected_command_id() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
assert!(state.selected_command_id().is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_close_resets() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
state.insert_char('q', ®istry, &Screen::Dashboard);
|
||||
state.select_next();
|
||||
|
||||
state.close();
|
||||
assert!(!state.query_focused);
|
||||
assert!(state.query.is_empty());
|
||||
assert_eq!(state.selected_index, 0);
|
||||
assert!(state.filtered.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_empty_query_no_match_returns_empty() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
// Type something that matches nothing.
|
||||
for c in "zzzzzz".chars() {
|
||||
state.insert_char(c, ®istry, &Screen::Dashboard);
|
||||
}
|
||||
assert!(state.filtered.is_empty());
|
||||
assert!(state.selected_command_id().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_palette_keybinding_display() {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
|
||||
// "Quit" should have keybinding "q".
|
||||
let quit_entry = state.filtered.iter().find(|e| e.id == "quit");
|
||||
assert!(quit_entry.is_some());
|
||||
assert_eq!(quit_entry.unwrap().keybinding.as_deref(), Some("q"));
|
||||
}
|
||||
}
|
||||
|
||||
364
crates/lore-tui/src/state/file_history.rs
Normal file
364
crates/lore-tui/src/state/file_history.rs
Normal file
@@ -0,0 +1,364 @@
|
||||
//! File History screen state — per-file MR timeline with rename tracking.
|
||||
//!
|
||||
//! Shows which MRs touched a file over time, resolving renames via BFS.
|
||||
//! Users enter a file path, toggle options (follow renames, merged only,
|
||||
//! show discussions), and browse a chronological MR list.
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// FileHistoryState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the File History screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct FileHistoryState {
|
||||
/// User-entered file path.
|
||||
pub path_input: String,
|
||||
/// Cursor position within `path_input` (byte offset).
|
||||
pub path_cursor: usize,
|
||||
/// Whether the path input field has keyboard focus.
|
||||
pub path_focused: bool,
|
||||
|
||||
/// The most recent result (None until first query).
|
||||
pub result: Option<FileHistoryResult>,
|
||||
|
||||
/// Index of the currently selected MR in the result list.
|
||||
pub selected_mr_index: usize,
|
||||
/// Vertical scroll offset for the MR list.
|
||||
pub scroll_offset: u16,
|
||||
|
||||
/// Whether to follow rename chains (default true).
|
||||
pub follow_renames: bool,
|
||||
/// Whether to show only merged MRs (default false).
|
||||
pub merged_only: bool,
|
||||
/// Whether to show inline discussion snippets (default false).
|
||||
pub show_discussions: bool,
|
||||
|
||||
/// Cached list of known file paths for autocomplete.
|
||||
pub known_paths: Vec<String>,
|
||||
/// Filtered autocomplete matches for current input.
|
||||
pub autocomplete_matches: Vec<String>,
|
||||
/// Currently highlighted autocomplete suggestion index.
|
||||
pub autocomplete_index: usize,
|
||||
|
||||
/// Monotonic generation counter for stale-response detection.
|
||||
pub generation: u64,
|
||||
/// Whether a query is currently in-flight.
|
||||
pub loading: bool,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Result types (local to TUI — avoids coupling to CLI command structs)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Full result of a file-history query.
|
||||
#[derive(Debug)]
|
||||
pub struct FileHistoryResult {
|
||||
/// The queried file path.
|
||||
pub path: String,
|
||||
/// Resolved rename chain (may be just the original path).
|
||||
pub rename_chain: Vec<String>,
|
||||
/// Whether renames were actually followed.
|
||||
pub renames_followed: bool,
|
||||
/// MRs that touched any path in the rename chain.
|
||||
pub merge_requests: Vec<FileHistoryMr>,
|
||||
/// DiffNote discussion snippets on the file (when requested).
|
||||
pub discussions: Vec<FileDiscussion>,
|
||||
/// Total MR count (may exceed displayed count if limited).
|
||||
pub total_mrs: usize,
|
||||
/// Number of distinct file paths searched.
|
||||
pub paths_searched: usize,
|
||||
}
|
||||
|
||||
/// A single MR that touched the file.
|
||||
#[derive(Debug)]
|
||||
pub struct FileHistoryMr {
|
||||
pub iid: i64,
|
||||
pub title: String,
|
||||
/// "merged", "opened", or "closed".
|
||||
pub state: String,
|
||||
pub author_username: String,
|
||||
/// "added", "modified", "deleted", or "renamed".
|
||||
pub change_type: String,
|
||||
pub merged_at_ms: Option<i64>,
|
||||
pub updated_at_ms: i64,
|
||||
pub merge_commit_sha: Option<String>,
|
||||
}
|
||||
|
||||
/// A DiffNote discussion snippet on the file.
|
||||
#[derive(Debug)]
|
||||
pub struct FileDiscussion {
|
||||
pub discussion_id: String,
|
||||
pub author_username: String,
|
||||
pub body_snippet: String,
|
||||
pub path: String,
|
||||
pub created_at_ms: i64,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// State methods
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
impl FileHistoryState {
|
||||
/// Enter the screen: focus the path input.
|
||||
pub fn enter(&mut self) {
|
||||
self.path_focused = true;
|
||||
self.path_cursor = self.path_input.len();
|
||||
}
|
||||
|
||||
/// Leave the screen: blur all inputs.
|
||||
pub fn leave(&mut self) {
|
||||
self.path_focused = false;
|
||||
}
|
||||
|
||||
/// Whether any text input has focus.
|
||||
#[must_use]
|
||||
pub fn has_text_focus(&self) -> bool {
|
||||
self.path_focused
|
||||
}
|
||||
|
||||
/// Blur all inputs.
|
||||
pub fn blur(&mut self) {
|
||||
self.path_focused = false;
|
||||
}
|
||||
|
||||
/// Submit the current path (trigger a query).
|
||||
/// Returns the generation for stale detection.
|
||||
pub fn submit(&mut self) -> u64 {
|
||||
self.loading = true;
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
/// Apply query results if generation matches.
|
||||
pub fn apply_results(&mut self, generation: u64, result: FileHistoryResult) {
|
||||
if generation != self.generation {
|
||||
return; // Stale response — discard.
|
||||
}
|
||||
self.result = Some(result);
|
||||
self.loading = false;
|
||||
self.selected_mr_index = 0;
|
||||
self.scroll_offset = 0;
|
||||
}
|
||||
|
||||
/// Toggle follow_renames. Returns new generation for re-query.
|
||||
pub fn toggle_follow_renames(&mut self) -> u64 {
|
||||
self.follow_renames = !self.follow_renames;
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
/// Toggle merged_only. Returns new generation for re-query.
|
||||
pub fn toggle_merged_only(&mut self) -> u64 {
|
||||
self.merged_only = !self.merged_only;
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
/// Toggle show_discussions. Returns new generation for re-query.
|
||||
pub fn toggle_show_discussions(&mut self) -> u64 {
|
||||
self.show_discussions = !self.show_discussions;
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
// --- Input field operations ---
|
||||
|
||||
/// Insert a char at cursor.
|
||||
pub fn insert_char(&mut self, c: char) {
|
||||
if self.path_focused {
|
||||
self.path_input.insert(self.path_cursor, c);
|
||||
self.path_cursor += c.len_utf8();
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete the char before cursor.
|
||||
pub fn delete_char_before_cursor(&mut self) {
|
||||
if self.path_focused && self.path_cursor > 0 {
|
||||
let prev = prev_char_boundary(&self.path_input, self.path_cursor);
|
||||
self.path_input.drain(prev..self.path_cursor);
|
||||
self.path_cursor = prev;
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor left.
|
||||
pub fn cursor_left(&mut self) {
|
||||
if self.path_focused && self.path_cursor > 0 {
|
||||
self.path_cursor = prev_char_boundary(&self.path_input, self.path_cursor);
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor right.
|
||||
pub fn cursor_right(&mut self) {
|
||||
if self.path_focused && self.path_cursor < self.path_input.len() {
|
||||
self.path_cursor = next_char_boundary(&self.path_input, self.path_cursor);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Selection navigation ---
|
||||
|
||||
/// Move selection up.
|
||||
pub fn select_prev(&mut self) {
|
||||
self.selected_mr_index = self.selected_mr_index.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move selection down (bounded by result count).
|
||||
pub fn select_next(&mut self, result_count: usize) {
|
||||
if result_count > 0 {
|
||||
self.selected_mr_index = (self.selected_mr_index + 1).min(result_count - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure the selected row is visible within the viewport.
|
||||
pub fn ensure_visible(&mut self, viewport_height: usize) {
|
||||
if viewport_height == 0 {
|
||||
return;
|
||||
}
|
||||
let offset = self.scroll_offset as usize;
|
||||
if self.selected_mr_index < offset {
|
||||
self.scroll_offset = self.selected_mr_index as u16;
|
||||
} else if self.selected_mr_index >= offset + viewport_height {
|
||||
self.scroll_offset = (self.selected_mr_index - viewport_height + 1) as u16;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Internal ---
|
||||
|
||||
fn bump_generation(&mut self) -> u64 {
|
||||
self.generation += 1;
|
||||
self.generation
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the byte offset of the previous char boundary.
|
||||
fn prev_char_boundary(s: &str, pos: usize) -> usize {
|
||||
let mut i = pos.saturating_sub(1);
|
||||
while i > 0 && !s.is_char_boundary(i) {
|
||||
i -= 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
/// Find the byte offset of the next char boundary.
|
||||
fn next_char_boundary(s: &str, pos: usize) -> usize {
|
||||
let mut i = pos + 1;
|
||||
while i < s.len() && !s.is_char_boundary(i) {
|
||||
i += 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_state() {
|
||||
let state = FileHistoryState::default();
|
||||
assert!(state.path_input.is_empty());
|
||||
assert!(!state.path_focused);
|
||||
assert!(state.result.is_none());
|
||||
assert!(!state.follow_renames); // Default false, toggled on by user
|
||||
assert!(!state.merged_only);
|
||||
assert!(!state.show_discussions);
|
||||
assert_eq!(state.generation, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enter_focuses_path() {
|
||||
let mut state = FileHistoryState {
|
||||
path_input: "src/lib.rs".into(),
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
state.enter();
|
||||
assert!(state.path_focused);
|
||||
assert_eq!(state.path_cursor, 10);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_submit_bumps_generation() {
|
||||
let mut state = FileHistoryState::default();
|
||||
let generation = state.submit();
|
||||
assert_eq!(generation, 1);
|
||||
assert!(state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stale_response_discarded() {
|
||||
let mut state = FileHistoryState::default();
|
||||
let stale_gen = state.submit();
|
||||
// Bump again (user toggled an option).
|
||||
let _new_gen = state.toggle_merged_only();
|
||||
// Stale result arrives.
|
||||
state.apply_results(
|
||||
stale_gen,
|
||||
FileHistoryResult {
|
||||
path: "src/lib.rs".into(),
|
||||
rename_chain: vec!["src/lib.rs".into()],
|
||||
renames_followed: false,
|
||||
merge_requests: vec![],
|
||||
discussions: vec![],
|
||||
total_mrs: 0,
|
||||
paths_searched: 1,
|
||||
},
|
||||
);
|
||||
assert!(state.result.is_none()); // Discarded.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_options_bump_generation() {
|
||||
let mut state = FileHistoryState::default();
|
||||
let g1 = state.toggle_follow_renames();
|
||||
assert_eq!(g1, 1);
|
||||
assert!(state.follow_renames);
|
||||
|
||||
let g2 = state.toggle_merged_only();
|
||||
assert_eq!(g2, 2);
|
||||
assert!(state.merged_only);
|
||||
|
||||
let g3 = state.toggle_show_discussions();
|
||||
assert_eq!(g3, 3);
|
||||
assert!(state.show_discussions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_and_delete_char() {
|
||||
let mut state = FileHistoryState {
|
||||
path_focused: true,
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
state.insert_char('s');
|
||||
state.insert_char('r');
|
||||
state.insert_char('c');
|
||||
assert_eq!(state.path_input, "src");
|
||||
assert_eq!(state.path_cursor, 3);
|
||||
|
||||
state.delete_char_before_cursor();
|
||||
assert_eq!(state.path_input, "sr");
|
||||
assert_eq!(state.path_cursor, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_prev_next() {
|
||||
let mut state = FileHistoryState::default();
|
||||
state.select_next(5);
|
||||
assert_eq!(state.selected_mr_index, 1);
|
||||
state.select_next(5);
|
||||
assert_eq!(state.selected_mr_index, 2);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_mr_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_mr_index, 0);
|
||||
state.select_prev(); // Should not underflow.
|
||||
assert_eq!(state.selected_mr_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ensure_visible() {
|
||||
let mut state = FileHistoryState {
|
||||
selected_mr_index: 15,
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
state.ensure_visible(5);
|
||||
assert_eq!(state.scroll_offset, 11); // 15 - 5 + 1
|
||||
}
|
||||
}
|
||||
@@ -13,8 +13,10 @@
|
||||
//! [`LoreApp`](crate::app::LoreApp) which dispatches through the
|
||||
//! [`TaskSupervisor`](crate::task_supervisor::TaskSupervisor).
|
||||
|
||||
pub mod bootstrap;
|
||||
pub mod command_palette;
|
||||
pub mod dashboard;
|
||||
pub mod file_history;
|
||||
pub mod issue_detail;
|
||||
pub mod issue_list;
|
||||
pub mod mr_detail;
|
||||
@@ -22,6 +24,7 @@ pub mod mr_list;
|
||||
pub mod search;
|
||||
pub mod sync;
|
||||
pub mod timeline;
|
||||
pub mod trace;
|
||||
pub mod who;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
@@ -29,8 +32,10 @@ use std::collections::{HashMap, HashSet};
|
||||
use crate::message::Screen;
|
||||
|
||||
// Re-export screen states for convenience.
|
||||
pub use bootstrap::BootstrapState;
|
||||
pub use command_palette::CommandPaletteState;
|
||||
pub use dashboard::DashboardState;
|
||||
pub use file_history::FileHistoryState;
|
||||
pub use issue_detail::IssueDetailState;
|
||||
pub use issue_list::IssueListState;
|
||||
pub use mr_detail::MrDetailState;
|
||||
@@ -38,6 +43,7 @@ pub use mr_list::MrListState;
|
||||
pub use search::SearchState;
|
||||
pub use sync::SyncState;
|
||||
pub use timeline::TimelineState;
|
||||
pub use trace::TraceState;
|
||||
pub use who::WhoState;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -163,6 +169,7 @@ pub struct ScopeContext {
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AppState {
|
||||
// Per-screen states.
|
||||
pub bootstrap: BootstrapState,
|
||||
pub dashboard: DashboardState,
|
||||
pub issue_list: IssueListState,
|
||||
pub issue_detail: IssueDetailState,
|
||||
@@ -171,6 +178,8 @@ pub struct AppState {
|
||||
pub search: SearchState,
|
||||
pub timeline: TimelineState,
|
||||
pub who: WhoState,
|
||||
pub trace: TraceState,
|
||||
pub file_history: FileHistoryState,
|
||||
pub sync: SyncState,
|
||||
pub command_palette: CommandPaletteState,
|
||||
|
||||
@@ -205,6 +214,9 @@ impl AppState {
|
||||
|| self.mr_list.filter_focused
|
||||
|| self.search.query_focused
|
||||
|| self.command_palette.query_focused
|
||||
|| self.who.has_text_focus()
|
||||
|| self.trace.has_text_focus()
|
||||
|| self.file_history.has_text_focus()
|
||||
}
|
||||
|
||||
/// Remove focus from all text inputs.
|
||||
@@ -213,6 +225,9 @@ impl AppState {
|
||||
self.mr_list.filter_focused = false;
|
||||
self.search.query_focused = false;
|
||||
self.command_palette.query_focused = false;
|
||||
self.who.blur();
|
||||
self.trace.blur();
|
||||
self.file_history.blur();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,569 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Search screen state.
|
||||
//! Search screen state — query input, mode selection, capability detection.
|
||||
//!
|
||||
//! The search screen supports three modes ([`SearchMode`]): Lexical (FTS5),
|
||||
//! Hybrid (FTS+vector RRF), and Semantic (vector-only). Available modes are
|
||||
//! gated by [`SearchCapabilities`], which probes the database on screen entry.
|
||||
|
||||
use crate::message::SearchResult;
|
||||
use crate::message::{SearchMode, SearchResult};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SearchCapabilities
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// What search indexes are available in the local database.
|
||||
///
|
||||
/// Detected once on screen entry by probing FTS and embedding tables.
|
||||
/// Used to gate which [`SearchMode`] values are selectable.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct SearchCapabilities {
|
||||
/// FTS5 `documents_fts` table has rows.
|
||||
pub has_fts: bool,
|
||||
/// `embedding_metadata` table has rows.
|
||||
pub has_embeddings: bool,
|
||||
/// Percentage of documents that have embeddings (0.0–100.0).
|
||||
pub embedding_coverage_pct: f32,
|
||||
}
|
||||
|
||||
impl Default for SearchCapabilities {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
has_fts: false,
|
||||
has_embeddings: false,
|
||||
embedding_coverage_pct: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchCapabilities {
|
||||
/// Whether the given mode is usable with these capabilities.
|
||||
#[must_use]
|
||||
pub fn supports_mode(&self, mode: SearchMode) -> bool {
|
||||
match mode {
|
||||
SearchMode::Lexical => self.has_fts,
|
||||
SearchMode::Hybrid => self.has_fts && self.has_embeddings,
|
||||
SearchMode::Semantic => self.has_embeddings,
|
||||
}
|
||||
}
|
||||
|
||||
/// The best default mode given current capabilities.
|
||||
#[must_use]
|
||||
pub fn best_default_mode(&self) -> SearchMode {
|
||||
if self.has_fts && self.has_embeddings {
|
||||
SearchMode::Hybrid
|
||||
} else if self.has_fts {
|
||||
SearchMode::Lexical
|
||||
} else if self.has_embeddings {
|
||||
SearchMode::Semantic
|
||||
} else {
|
||||
SearchMode::Lexical // Fallback; UI will show "no indexes" message
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether any search index is available at all.
|
||||
#[must_use]
|
||||
pub fn has_any_index(&self) -> bool {
|
||||
self.has_fts || self.has_embeddings
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SearchState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the search screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SearchState {
|
||||
/// Current query text.
|
||||
pub query: String,
|
||||
/// Whether the query input has keyboard focus.
|
||||
pub query_focused: bool,
|
||||
/// Cursor position within the query string (byte offset).
|
||||
pub cursor: usize,
|
||||
/// Active search mode.
|
||||
pub mode: SearchMode,
|
||||
/// Available search capabilities (detected on screen entry).
|
||||
pub capabilities: SearchCapabilities,
|
||||
/// Current result set.
|
||||
pub results: Vec<SearchResult>,
|
||||
/// Index of the selected result in the list.
|
||||
pub selected_index: usize,
|
||||
/// Monotonic generation counter for stale-response detection.
|
||||
pub generation: u64,
|
||||
/// Whether a search request is in-flight.
|
||||
pub loading: bool,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
/// Enter the search screen: focus query, detect capabilities.
|
||||
pub fn enter(&mut self, capabilities: SearchCapabilities) {
|
||||
self.query_focused = true;
|
||||
self.cursor = self.query.len();
|
||||
self.capabilities = capabilities;
|
||||
// Pick the best mode for detected capabilities.
|
||||
if !self.capabilities.supports_mode(self.mode) {
|
||||
self.mode = self.capabilities.best_default_mode();
|
||||
}
|
||||
}
|
||||
|
||||
/// Leave the search screen: blur focus.
|
||||
pub fn leave(&mut self) {
|
||||
self.query_focused = false;
|
||||
}
|
||||
|
||||
/// Focus the query input.
|
||||
pub fn focus_query(&mut self) {
|
||||
self.query_focused = true;
|
||||
self.cursor = self.query.len();
|
||||
}
|
||||
|
||||
/// Blur the query input.
|
||||
pub fn blur_query(&mut self) {
|
||||
self.query_focused = false;
|
||||
}
|
||||
|
||||
/// Insert a character at the cursor position.
|
||||
///
|
||||
/// Returns the new generation (caller should arm debounce timer).
|
||||
pub fn insert_char(&mut self, c: char) -> u64 {
|
||||
self.query.insert(self.cursor, c);
|
||||
self.cursor += c.len_utf8();
|
||||
self.generation += 1;
|
||||
self.generation
|
||||
}
|
||||
|
||||
/// Delete the character before the cursor (backspace).
|
||||
///
|
||||
/// Returns the new generation if changed, or `None` if cursor was at start.
|
||||
pub fn delete_back(&mut self) -> Option<u64> {
|
||||
if self.cursor == 0 {
|
||||
return None;
|
||||
}
|
||||
let prev = self.query[..self.cursor]
|
||||
.char_indices()
|
||||
.next_back()
|
||||
.map_or(0, |(i, _)| i);
|
||||
self.query.drain(prev..self.cursor);
|
||||
self.cursor = prev;
|
||||
self.generation += 1;
|
||||
Some(self.generation)
|
||||
}
|
||||
|
||||
/// Move cursor left by one character.
|
||||
pub fn cursor_left(&mut self) {
|
||||
if self.cursor > 0 {
|
||||
self.cursor = self.query[..self.cursor]
|
||||
.char_indices()
|
||||
.next_back()
|
||||
.map_or(0, |(i, _)| i);
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor right by one character.
|
||||
pub fn cursor_right(&mut self) {
|
||||
if self.cursor < self.query.len() {
|
||||
self.cursor = self.query[self.cursor..]
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(self.query.len(), |ch| self.cursor + ch.len_utf8());
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor to the start of the query.
|
||||
pub fn cursor_home(&mut self) {
|
||||
self.cursor = 0;
|
||||
}
|
||||
|
||||
/// Move cursor to the end of the query.
|
||||
pub fn cursor_end(&mut self) {
|
||||
self.cursor = self.query.len();
|
||||
}
|
||||
|
||||
/// Cycle to the next available search mode (skip unsupported modes).
|
||||
pub fn cycle_mode(&mut self) {
|
||||
let start = self.mode;
|
||||
let mut candidate = start.next();
|
||||
// Cycle through at most 3 modes to find a supported one.
|
||||
for _ in 0..3 {
|
||||
if self.capabilities.supports_mode(candidate) {
|
||||
self.mode = candidate;
|
||||
return;
|
||||
}
|
||||
candidate = candidate.next();
|
||||
}
|
||||
// No supported mode found (shouldn't happen if has_any_index is true).
|
||||
}
|
||||
|
||||
/// Apply search results from an async response.
|
||||
///
|
||||
/// Only applies if the generation matches (stale guard).
|
||||
pub fn apply_results(&mut self, generation: u64, results: Vec<SearchResult>) {
|
||||
if generation != self.generation {
|
||||
return; // Stale response — discard.
|
||||
}
|
||||
self.results = results;
|
||||
self.selected_index = 0;
|
||||
self.loading = false;
|
||||
}
|
||||
|
||||
/// Move selection up in the results list.
|
||||
pub fn select_prev(&mut self) {
|
||||
self.selected_index = self.selected_index.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move selection down in the results list.
|
||||
pub fn select_next(&mut self) {
|
||||
if !self.results.is_empty() {
|
||||
self.selected_index = (self.selected_index + 1).min(self.results.len() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the currently selected result, if any.
|
||||
#[must_use]
|
||||
pub fn selected_result(&self) -> Option<&SearchResult> {
|
||||
self.results.get(self.selected_index)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::message::{EntityKey, SearchMode};
|
||||
|
||||
fn fts_only() -> SearchCapabilities {
|
||||
SearchCapabilities {
|
||||
has_fts: true,
|
||||
has_embeddings: false,
|
||||
embedding_coverage_pct: 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn full_caps() -> SearchCapabilities {
|
||||
SearchCapabilities {
|
||||
has_fts: true,
|
||||
has_embeddings: true,
|
||||
embedding_coverage_pct: 85.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn embeddings_only() -> SearchCapabilities {
|
||||
SearchCapabilities {
|
||||
has_fts: false,
|
||||
has_embeddings: true,
|
||||
embedding_coverage_pct: 100.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn no_indexes() -> SearchCapabilities {
|
||||
SearchCapabilities::default()
|
||||
}
|
||||
|
||||
fn sample_result(iid: i64) -> SearchResult {
|
||||
SearchResult {
|
||||
key: EntityKey::issue(1, iid),
|
||||
title: format!("Issue #{iid}"),
|
||||
score: 0.95,
|
||||
snippet: "matched text here".into(),
|
||||
project_path: "group/project".into(),
|
||||
}
|
||||
}
|
||||
|
||||
// -- SearchCapabilities tests --
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_supports_mode_fts_only() {
|
||||
let caps = fts_only();
|
||||
assert!(caps.supports_mode(SearchMode::Lexical));
|
||||
assert!(!caps.supports_mode(SearchMode::Hybrid));
|
||||
assert!(!caps.supports_mode(SearchMode::Semantic));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_supports_mode_full() {
|
||||
let caps = full_caps();
|
||||
assert!(caps.supports_mode(SearchMode::Lexical));
|
||||
assert!(caps.supports_mode(SearchMode::Hybrid));
|
||||
assert!(caps.supports_mode(SearchMode::Semantic));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_supports_mode_embeddings_only() {
|
||||
let caps = embeddings_only();
|
||||
assert!(!caps.supports_mode(SearchMode::Lexical));
|
||||
assert!(!caps.supports_mode(SearchMode::Hybrid));
|
||||
assert!(caps.supports_mode(SearchMode::Semantic));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_best_default_hybrid_when_both() {
|
||||
assert_eq!(full_caps().best_default_mode(), SearchMode::Hybrid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_best_default_lexical_when_fts_only() {
|
||||
assert_eq!(fts_only().best_default_mode(), SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_best_default_semantic_when_embeddings_only() {
|
||||
assert_eq!(embeddings_only().best_default_mode(), SearchMode::Semantic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_best_default_lexical_when_none() {
|
||||
assert_eq!(no_indexes().best_default_mode(), SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities_has_any_index() {
|
||||
assert!(fts_only().has_any_index());
|
||||
assert!(full_caps().has_any_index());
|
||||
assert!(embeddings_only().has_any_index());
|
||||
assert!(!no_indexes().has_any_index());
|
||||
}
|
||||
|
||||
// -- SearchState tests --
|
||||
|
||||
#[test]
|
||||
fn test_enter_focuses_and_preserves_supported_mode() {
|
||||
let mut state = SearchState::default();
|
||||
// Default mode is Lexical, which full_caps supports — preserved.
|
||||
state.enter(full_caps());
|
||||
assert!(state.query_focused);
|
||||
assert_eq!(state.mode, SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enter_preserves_mode_if_supported() {
|
||||
let mut state = SearchState {
|
||||
mode: SearchMode::Lexical,
|
||||
..SearchState::default()
|
||||
};
|
||||
state.enter(full_caps());
|
||||
// Lexical is supported by full_caps, so it stays.
|
||||
assert_eq!(state.mode, SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enter_overrides_unsupported_mode() {
|
||||
let mut state = SearchState {
|
||||
mode: SearchMode::Hybrid,
|
||||
..SearchState::default()
|
||||
};
|
||||
state.enter(fts_only());
|
||||
// Hybrid requires embeddings, so fallback to Lexical.
|
||||
assert_eq!(state.mode, SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_char_and_cursor() {
|
||||
let mut state = SearchState::default();
|
||||
let generation1 = state.insert_char('h');
|
||||
let generation2 = state.insert_char('i');
|
||||
assert_eq!(state.query, "hi");
|
||||
assert_eq!(state.cursor, 2);
|
||||
assert_eq!(generation1, 1);
|
||||
assert_eq!(generation2, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_back() {
|
||||
let mut state = SearchState::default();
|
||||
state.insert_char('a');
|
||||
state.insert_char('b');
|
||||
state.insert_char('c');
|
||||
|
||||
let generation = state.delete_back();
|
||||
assert!(generation.is_some());
|
||||
assert_eq!(state.query, "ab");
|
||||
assert_eq!(state.cursor, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_back_at_start_returns_none() {
|
||||
let mut state = SearchState::default();
|
||||
state.insert_char('a');
|
||||
state.cursor = 0;
|
||||
assert!(state.delete_back().is_none());
|
||||
assert_eq!(state.query, "a");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cursor_movement() {
|
||||
let mut state = SearchState::default();
|
||||
state.insert_char('a');
|
||||
state.insert_char('b');
|
||||
state.insert_char('c');
|
||||
assert_eq!(state.cursor, 3);
|
||||
|
||||
state.cursor_left();
|
||||
assert_eq!(state.cursor, 2);
|
||||
state.cursor_left();
|
||||
assert_eq!(state.cursor, 1);
|
||||
state.cursor_right();
|
||||
assert_eq!(state.cursor, 2);
|
||||
state.cursor_home();
|
||||
assert_eq!(state.cursor, 0);
|
||||
state.cursor_end();
|
||||
assert_eq!(state.cursor, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cursor_left_at_start_is_noop() {
|
||||
let mut state = SearchState::default();
|
||||
state.cursor_left();
|
||||
assert_eq!(state.cursor, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cursor_right_at_end_is_noop() {
|
||||
let mut state = SearchState::default();
|
||||
state.insert_char('x');
|
||||
state.cursor_right();
|
||||
assert_eq!(state.cursor, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cycle_mode_full_caps() {
|
||||
let mut state = SearchState {
|
||||
capabilities: full_caps(),
|
||||
mode: SearchMode::Lexical,
|
||||
..SearchState::default()
|
||||
};
|
||||
|
||||
state.cycle_mode();
|
||||
assert_eq!(state.mode, SearchMode::Hybrid);
|
||||
state.cycle_mode();
|
||||
assert_eq!(state.mode, SearchMode::Semantic);
|
||||
state.cycle_mode();
|
||||
assert_eq!(state.mode, SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cycle_mode_fts_only_stays_lexical() {
|
||||
let mut state = SearchState {
|
||||
capabilities: fts_only(),
|
||||
mode: SearchMode::Lexical,
|
||||
..SearchState::default()
|
||||
};
|
||||
|
||||
state.cycle_mode();
|
||||
// Hybrid and Semantic unsupported, wraps back to Lexical.
|
||||
assert_eq!(state.mode, SearchMode::Lexical);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cycle_mode_embeddings_only() {
|
||||
let mut state = SearchState {
|
||||
capabilities: embeddings_only(),
|
||||
mode: SearchMode::Semantic,
|
||||
..SearchState::default()
|
||||
};
|
||||
|
||||
state.cycle_mode();
|
||||
// Lexical and Hybrid unsupported, wraps back to Semantic.
|
||||
assert_eq!(state.mode, SearchMode::Semantic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_results_matching_generation() {
|
||||
let mut state = SearchState::default();
|
||||
let generation = state.insert_char('q');
|
||||
|
||||
let results = vec![sample_result(1), sample_result(2)];
|
||||
state.apply_results(generation, results);
|
||||
|
||||
assert_eq!(state.results.len(), 2);
|
||||
assert_eq!(state.selected_index, 0);
|
||||
assert!(!state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_results_stale_generation_discarded() {
|
||||
let mut state = SearchState::default();
|
||||
state.insert_char('q'); // gen=1
|
||||
state.insert_char('u'); // gen=2
|
||||
|
||||
let stale_results = vec![sample_result(99)];
|
||||
state.apply_results(1, stale_results); // gen 1 is stale
|
||||
|
||||
assert!(state.results.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_prev_next() {
|
||||
let mut state = SearchState {
|
||||
results: vec![sample_result(1), sample_result(2), sample_result(3)],
|
||||
..SearchState::default()
|
||||
};
|
||||
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_next(); // Clamps at end.
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_prev(); // Clamps at start.
|
||||
assert_eq!(state.selected_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_selected_result() {
|
||||
let mut state = SearchState::default();
|
||||
assert!(state.selected_result().is_none());
|
||||
|
||||
state.results = vec![sample_result(42)];
|
||||
let result = state.selected_result().unwrap();
|
||||
assert_eq!(result.key.iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_leave_blurs_focus() {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_only());
|
||||
assert!(state.query_focused);
|
||||
state.leave();
|
||||
assert!(!state.query_focused);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_focus_query_moves_cursor_to_end() {
|
||||
let mut state = SearchState {
|
||||
query: "hello".into(),
|
||||
cursor: 0,
|
||||
..SearchState::default()
|
||||
};
|
||||
state.focus_query();
|
||||
assert!(state.query_focused);
|
||||
assert_eq!(state.cursor, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unicode_cursor_handling() {
|
||||
let mut state = SearchState::default();
|
||||
// Insert a multi-byte character.
|
||||
state.insert_char('田');
|
||||
assert_eq!(state.cursor, 3); // 田 is 3 bytes in UTF-8
|
||||
state.insert_char('中');
|
||||
assert_eq!(state.cursor, 6);
|
||||
|
||||
state.cursor_left();
|
||||
assert_eq!(state.cursor, 3);
|
||||
state.cursor_right();
|
||||
assert_eq!(state.cursor, 6);
|
||||
|
||||
state.delete_back();
|
||||
assert_eq!(state.query, "田");
|
||||
assert_eq!(state.cursor, 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,271 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Timeline screen state.
|
||||
//! Timeline screen state — event stream, scope filtering, navigation.
|
||||
//!
|
||||
//! The timeline displays a chronological event stream from resource event
|
||||
//! tables. Events can be scoped to a specific entity, author, or shown
|
||||
//! globally. [`TimelineScope`] gates the query; [`TimelineState`] manages
|
||||
//! the scroll position, selected event, and generation counter for
|
||||
//! stale-response detection.
|
||||
|
||||
use crate::message::TimelineEvent;
|
||||
use crate::message::{EntityKey, TimelineEvent};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// TimelineScope
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Scope filter for the timeline event query.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub enum TimelineScope {
|
||||
/// All events across all entities.
|
||||
#[default]
|
||||
All,
|
||||
/// Events for a specific entity (issue or MR).
|
||||
Entity(EntityKey),
|
||||
/// Events by a specific actor.
|
||||
Author(String),
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// TimelineState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the timeline screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TimelineState {
|
||||
/// Loaded timeline events (sorted by timestamp, most recent first).
|
||||
pub events: Vec<TimelineEvent>,
|
||||
pub scroll_offset: u16,
|
||||
/// Active scope filter.
|
||||
pub scope: TimelineScope,
|
||||
/// Index of the selected event in the list.
|
||||
pub selected_index: usize,
|
||||
/// Scroll offset for the visible window.
|
||||
pub scroll_offset: usize,
|
||||
/// Monotonic generation counter for stale-response detection.
|
||||
pub generation: u64,
|
||||
/// Whether a fetch is in-flight.
|
||||
pub loading: bool,
|
||||
}
|
||||
|
||||
impl TimelineState {
|
||||
/// Enter the timeline screen. Bumps generation for fresh data.
|
||||
pub fn enter(&mut self) -> u64 {
|
||||
self.generation += 1;
|
||||
self.loading = true;
|
||||
self.generation
|
||||
}
|
||||
|
||||
/// Set the scope filter and bump generation.
|
||||
///
|
||||
/// Returns the new generation (caller should trigger a re-fetch).
|
||||
pub fn set_scope(&mut self, scope: TimelineScope) -> u64 {
|
||||
self.scope = scope;
|
||||
self.generation += 1;
|
||||
self.loading = true;
|
||||
self.generation
|
||||
}
|
||||
|
||||
/// Apply timeline events from an async response.
|
||||
///
|
||||
/// Only applies if the generation matches (stale guard).
|
||||
pub fn apply_results(&mut self, generation: u64, events: Vec<TimelineEvent>) {
|
||||
if generation != self.generation {
|
||||
return; // Stale response — discard.
|
||||
}
|
||||
self.events = events;
|
||||
self.selected_index = 0;
|
||||
self.scroll_offset = 0;
|
||||
self.loading = false;
|
||||
}
|
||||
|
||||
/// Move selection up in the event list.
|
||||
pub fn select_prev(&mut self) {
|
||||
self.selected_index = self.selected_index.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move selection down in the event list.
|
||||
pub fn select_next(&mut self) {
|
||||
if !self.events.is_empty() {
|
||||
self.selected_index = (self.selected_index + 1).min(self.events.len() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the currently selected event, if any.
|
||||
#[must_use]
|
||||
pub fn selected_event(&self) -> Option<&TimelineEvent> {
|
||||
self.events.get(self.selected_index)
|
||||
}
|
||||
|
||||
/// Ensure the selected index is visible given the viewport height.
|
||||
///
|
||||
/// Adjusts `scroll_offset` so the selected item is within the
|
||||
/// visible window.
|
||||
pub fn ensure_visible(&mut self, viewport_height: usize) {
|
||||
if viewport_height == 0 {
|
||||
return;
|
||||
}
|
||||
if self.selected_index < self.scroll_offset {
|
||||
self.scroll_offset = self.selected_index;
|
||||
} else if self.selected_index >= self.scroll_offset + viewport_height {
|
||||
self.scroll_offset = self.selected_index - viewport_height + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::message::TimelineEventKind;
|
||||
|
||||
fn sample_event(timestamp_ms: i64, iid: i64) -> TimelineEvent {
|
||||
TimelineEvent {
|
||||
timestamp_ms,
|
||||
entity_key: EntityKey::issue(1, iid),
|
||||
event_kind: TimelineEventKind::Created,
|
||||
summary: format!("Issue #{iid} created"),
|
||||
detail: None,
|
||||
actor: Some("alice".into()),
|
||||
project_path: "group/project".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_timeline_scope_default_is_all() {
|
||||
assert_eq!(TimelineScope::default(), TimelineScope::All);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enter_bumps_generation() {
|
||||
let mut state = TimelineState::default();
|
||||
let generation = state.enter();
|
||||
assert_eq!(generation, 1);
|
||||
assert!(state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_scope_bumps_generation() {
|
||||
let mut state = TimelineState::default();
|
||||
let gen1 = state.set_scope(TimelineScope::Author("bob".into()));
|
||||
assert_eq!(gen1, 1);
|
||||
assert_eq!(state.scope, TimelineScope::Author("bob".into()));
|
||||
|
||||
let gen2 = state.set_scope(TimelineScope::All);
|
||||
assert_eq!(gen2, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_results_matching_generation() {
|
||||
let mut state = TimelineState::default();
|
||||
let generation = state.enter();
|
||||
|
||||
let events = vec![sample_event(3000, 1), sample_event(2000, 2)];
|
||||
state.apply_results(generation, events);
|
||||
|
||||
assert_eq!(state.events.len(), 2);
|
||||
assert_eq!(state.selected_index, 0);
|
||||
assert!(!state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_results_stale_generation_discarded() {
|
||||
let mut state = TimelineState::default();
|
||||
state.enter(); // gen=1
|
||||
let _gen2 = state.enter(); // gen=2
|
||||
|
||||
let stale_events = vec![sample_event(1000, 99)];
|
||||
state.apply_results(1, stale_events); // gen 1 is stale
|
||||
|
||||
assert!(state.events.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_prev_next() {
|
||||
let mut state = TimelineState {
|
||||
events: vec![
|
||||
sample_event(3000, 1),
|
||||
sample_event(2000, 2),
|
||||
sample_event(1000, 3),
|
||||
],
|
||||
..TimelineState::default()
|
||||
};
|
||||
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_next(); // Clamps at end.
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_prev(); // Clamps at start.
|
||||
assert_eq!(state.selected_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_selected_event() {
|
||||
let mut state = TimelineState::default();
|
||||
assert!(state.selected_event().is_none());
|
||||
|
||||
state.events = vec![sample_event(3000, 42)];
|
||||
let event = state.selected_event().unwrap();
|
||||
assert_eq!(event.entity_key.iid, 42);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ensure_visible_scrolls_down() {
|
||||
let mut state = TimelineState {
|
||||
events: vec![
|
||||
sample_event(5000, 1),
|
||||
sample_event(4000, 2),
|
||||
sample_event(3000, 3),
|
||||
sample_event(2000, 4),
|
||||
sample_event(1000, 5),
|
||||
],
|
||||
selected_index: 4,
|
||||
scroll_offset: 0,
|
||||
..TimelineState::default()
|
||||
};
|
||||
state.ensure_visible(3);
|
||||
assert_eq!(state.scroll_offset, 2); // 4 - 3 + 1 = 2
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ensure_visible_scrolls_up() {
|
||||
let mut state = TimelineState {
|
||||
events: vec![
|
||||
sample_event(5000, 1),
|
||||
sample_event(4000, 2),
|
||||
sample_event(3000, 3),
|
||||
],
|
||||
selected_index: 0,
|
||||
scroll_offset: 2,
|
||||
..TimelineState::default()
|
||||
};
|
||||
state.ensure_visible(3);
|
||||
assert_eq!(state.scroll_offset, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ensure_visible_zero_viewport() {
|
||||
let mut state = TimelineState {
|
||||
scroll_offset: 5,
|
||||
..TimelineState::default()
|
||||
};
|
||||
state.ensure_visible(0);
|
||||
assert_eq!(state.scroll_offset, 5); // Unchanged.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_next_on_empty_is_noop() {
|
||||
let mut state = TimelineState::default();
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_index, 0);
|
||||
}
|
||||
}
|
||||
|
||||
556
crates/lore-tui/src/state/trace.rs
Normal file
556
crates/lore-tui/src/state/trace.rs
Normal file
@@ -0,0 +1,556 @@
|
||||
//! Trace screen state — file → MR → issue chain drill-down.
|
||||
//!
|
||||
//! Users enter a file path, and the trace query resolves rename chains,
|
||||
//! finds MRs that touched the file, links issues via entity_references,
|
||||
//! and extracts DiffNote discussions. Each result chain can be
|
||||
//! expanded/collapsed independently.
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use lore::core::trace::TraceResult;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// TraceState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the Trace screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TraceState {
|
||||
/// User-entered file path (with optional :line suffix).
|
||||
pub path_input: String,
|
||||
/// Cursor position within `path_input`.
|
||||
pub path_cursor: usize,
|
||||
/// Whether the path input field has keyboard focus.
|
||||
pub path_focused: bool,
|
||||
|
||||
/// Parsed line filter from `:N` suffix (stored but not yet used for highlighting).
|
||||
pub line_filter: Option<u32>,
|
||||
|
||||
/// The most recent trace result (None until first query).
|
||||
pub result: Option<TraceResult>,
|
||||
|
||||
/// Index of the currently selected chain in the trace result.
|
||||
pub selected_chain_index: usize,
|
||||
/// Set of chain indices that are currently expanded.
|
||||
pub expanded_chains: HashSet<usize>,
|
||||
|
||||
/// Whether to follow rename chains in the query (default true).
|
||||
pub follow_renames: bool,
|
||||
/// Whether to include DiffNote discussions (default true).
|
||||
pub include_discussions: bool,
|
||||
|
||||
/// Vertical scroll offset for the chain list.
|
||||
pub scroll_offset: u16,
|
||||
|
||||
/// Cached list of known file paths for autocomplete.
|
||||
pub known_paths: Vec<String>,
|
||||
/// Filtered autocomplete matches for current input.
|
||||
pub autocomplete_matches: Vec<String>,
|
||||
/// Currently highlighted autocomplete suggestion index.
|
||||
pub autocomplete_index: usize,
|
||||
|
||||
/// Generation counter for stale response guard.
|
||||
pub generation: u64,
|
||||
/// Whether a query is in flight.
|
||||
pub loading: bool,
|
||||
}
|
||||
|
||||
impl TraceState {
|
||||
/// Initialize defaults for a fresh Trace screen entry.
|
||||
pub fn enter(&mut self) {
|
||||
self.path_focused = true;
|
||||
self.follow_renames = true;
|
||||
self.include_discussions = true;
|
||||
}
|
||||
|
||||
/// Clean up when leaving the Trace screen.
|
||||
pub fn leave(&mut self) {
|
||||
self.path_focused = false;
|
||||
}
|
||||
|
||||
/// Submit the current path input as a trace query.
|
||||
///
|
||||
/// Bumps generation, parses the :line suffix, and returns the
|
||||
/// new generation if the path is non-empty.
|
||||
pub fn submit(&mut self) -> Option<u64> {
|
||||
let trimmed = self.path_input.trim();
|
||||
if trimmed.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (path, line) = lore::cli::commands::trace::parse_trace_path(trimmed);
|
||||
self.path_input = path;
|
||||
self.path_cursor = self.path_input.len();
|
||||
self.line_filter = line;
|
||||
|
||||
self.generation += 1;
|
||||
self.loading = true;
|
||||
self.selected_chain_index = 0;
|
||||
self.expanded_chains.clear();
|
||||
self.scroll_offset = 0;
|
||||
self.path_focused = false;
|
||||
self.autocomplete_matches.clear();
|
||||
|
||||
Some(self.generation)
|
||||
}
|
||||
|
||||
/// Apply a trace result, guarded by generation counter.
|
||||
pub fn apply_result(&mut self, generation: u64, result: TraceResult) {
|
||||
if generation != self.generation {
|
||||
return; // Stale response — discard.
|
||||
}
|
||||
self.result = Some(result);
|
||||
self.loading = false;
|
||||
}
|
||||
|
||||
/// Toggle the expand/collapse state of the selected chain.
|
||||
pub fn toggle_expand(&mut self) {
|
||||
if self.expanded_chains.contains(&self.selected_chain_index) {
|
||||
self.expanded_chains.remove(&self.selected_chain_index);
|
||||
} else {
|
||||
self.expanded_chains.insert(self.selected_chain_index);
|
||||
}
|
||||
}
|
||||
|
||||
/// Toggle follow_renames and bump generation (triggers re-fetch).
|
||||
pub fn toggle_follow_renames(&mut self) -> Option<u64> {
|
||||
self.follow_renames = !self.follow_renames;
|
||||
self.requery()
|
||||
}
|
||||
|
||||
/// Toggle include_discussions and bump generation (triggers re-fetch).
|
||||
pub fn toggle_include_discussions(&mut self) -> Option<u64> {
|
||||
self.include_discussions = !self.include_discussions;
|
||||
self.requery()
|
||||
}
|
||||
|
||||
/// Re-query with current settings if path is non-empty.
|
||||
fn requery(&mut self) -> Option<u64> {
|
||||
if self.path_input.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
self.generation += 1;
|
||||
self.loading = true;
|
||||
self.selected_chain_index = 0;
|
||||
self.expanded_chains.clear();
|
||||
self.scroll_offset = 0;
|
||||
Some(self.generation)
|
||||
}
|
||||
|
||||
/// Select the previous chain.
|
||||
pub fn select_prev(&mut self) {
|
||||
if self.selected_chain_index > 0 {
|
||||
self.selected_chain_index -= 1;
|
||||
self.ensure_visible();
|
||||
}
|
||||
}
|
||||
|
||||
/// Select the next chain.
|
||||
pub fn select_next(&mut self) {
|
||||
let max = self.chain_count().saturating_sub(1);
|
||||
if self.selected_chain_index < max {
|
||||
self.selected_chain_index += 1;
|
||||
self.ensure_visible();
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of trace chains in the current result.
|
||||
fn chain_count(&self) -> usize {
|
||||
self.result.as_ref().map_or(0, |r| r.trace_chains.len())
|
||||
}
|
||||
|
||||
/// Ensure the selected chain is visible within the scroll viewport.
|
||||
fn ensure_visible(&mut self) {
|
||||
let idx = self.selected_chain_index as u16;
|
||||
if idx < self.scroll_offset {
|
||||
self.scroll_offset = idx;
|
||||
}
|
||||
// Rough viewport — exact height adjusted in render.
|
||||
}
|
||||
|
||||
/// Whether the text input has focus.
|
||||
#[must_use]
|
||||
pub fn has_text_focus(&self) -> bool {
|
||||
self.path_focused
|
||||
}
|
||||
|
||||
/// Remove focus from all text inputs.
|
||||
pub fn blur(&mut self) {
|
||||
self.path_focused = false;
|
||||
self.autocomplete_matches.clear();
|
||||
}
|
||||
|
||||
/// Focus the path input.
|
||||
pub fn focus_input(&mut self) {
|
||||
self.path_focused = true;
|
||||
self.update_autocomplete();
|
||||
}
|
||||
|
||||
// --- Text editing helpers ---
|
||||
|
||||
/// Insert a character at the cursor position.
|
||||
pub fn insert_char(&mut self, ch: char) {
|
||||
let byte_pos = self
|
||||
.path_input
|
||||
.char_indices()
|
||||
.nth(self.path_cursor)
|
||||
.map_or(self.path_input.len(), |(i, _)| i);
|
||||
self.path_input.insert(byte_pos, ch);
|
||||
self.path_cursor += 1;
|
||||
self.update_autocomplete();
|
||||
}
|
||||
|
||||
/// Delete the character before the cursor.
|
||||
pub fn delete_char_before_cursor(&mut self) {
|
||||
if self.path_cursor == 0 {
|
||||
return;
|
||||
}
|
||||
self.path_cursor -= 1;
|
||||
let byte_pos = self
|
||||
.path_input
|
||||
.char_indices()
|
||||
.nth(self.path_cursor)
|
||||
.map_or(self.path_input.len(), |(i, _)| i);
|
||||
let end = self
|
||||
.path_input
|
||||
.char_indices()
|
||||
.nth(self.path_cursor + 1)
|
||||
.map_or(self.path_input.len(), |(i, _)| i);
|
||||
self.path_input.drain(byte_pos..end);
|
||||
self.update_autocomplete();
|
||||
}
|
||||
|
||||
/// Move cursor left.
|
||||
pub fn cursor_left(&mut self) {
|
||||
self.path_cursor = self.path_cursor.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move cursor right.
|
||||
pub fn cursor_right(&mut self) {
|
||||
let max = self.path_input.chars().count();
|
||||
if self.path_cursor < max {
|
||||
self.path_cursor += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Autocomplete ---
|
||||
|
||||
/// Update autocomplete matches based on current input.
|
||||
pub fn update_autocomplete(&mut self) {
|
||||
let input_lower = self.path_input.to_lowercase();
|
||||
if input_lower.is_empty() {
|
||||
self.autocomplete_matches.clear();
|
||||
self.autocomplete_index = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
self.autocomplete_matches = self
|
||||
.known_paths
|
||||
.iter()
|
||||
.filter(|p| p.to_lowercase().contains(&input_lower))
|
||||
.take(10) // Limit visible suggestions.
|
||||
.cloned()
|
||||
.collect();
|
||||
self.autocomplete_index = 0;
|
||||
}
|
||||
|
||||
/// Cycle to the next autocomplete suggestion.
|
||||
pub fn autocomplete_next(&mut self) {
|
||||
if self.autocomplete_matches.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.autocomplete_index = (self.autocomplete_index + 1) % self.autocomplete_matches.len();
|
||||
}
|
||||
|
||||
/// Accept the current autocomplete suggestion into the path input.
|
||||
pub fn accept_autocomplete(&mut self) {
|
||||
if let Some(match_) = self.autocomplete_matches.get(self.autocomplete_index) {
|
||||
self.path_input = match_.clone();
|
||||
self.path_cursor = self.path_input.chars().count();
|
||||
self.autocomplete_matches.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trace_state_default() {
|
||||
let state = TraceState::default();
|
||||
assert!(state.path_input.is_empty());
|
||||
assert!(!state.path_focused);
|
||||
assert!(!state.follow_renames); // Default false, enter() sets true.
|
||||
assert!(state.result.is_none());
|
||||
assert_eq!(state.generation, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_state_enter_sets_defaults() {
|
||||
let mut state = TraceState::default();
|
||||
state.enter();
|
||||
assert!(state.path_focused);
|
||||
assert!(state.follow_renames);
|
||||
assert!(state.include_discussions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_submit_empty_returns_none() {
|
||||
let mut state = TraceState::default();
|
||||
assert!(state.submit().is_none());
|
||||
assert_eq!(state.generation, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_submit_with_path_bumps_generation() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/main.rs".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
let generation = state.submit();
|
||||
assert_eq!(generation, Some(1));
|
||||
assert_eq!(state.generation, 1);
|
||||
assert!(state.loading);
|
||||
assert!(!state.path_focused);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_submit_parses_line_suffix() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/main.rs:42".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.submit();
|
||||
assert_eq!(state.path_input, "src/main.rs");
|
||||
assert_eq!(state.line_filter, Some(42));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_result_matching_generation() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/lib.rs".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.submit(); // generation = 1
|
||||
|
||||
let result = TraceResult {
|
||||
path: "src/lib.rs".into(),
|
||||
resolved_paths: vec![],
|
||||
renames_followed: false,
|
||||
trace_chains: vec![],
|
||||
total_chains: 0,
|
||||
};
|
||||
|
||||
state.apply_result(1, result);
|
||||
assert!(state.result.is_some());
|
||||
assert!(!state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_result_stale_generation_discarded() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/lib.rs".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.submit(); // generation = 1
|
||||
state.path_input = "src/other.rs".into();
|
||||
state.submit(); // generation = 2
|
||||
|
||||
let stale_result = TraceResult {
|
||||
path: "src/lib.rs".into(),
|
||||
resolved_paths: vec![],
|
||||
renames_followed: false,
|
||||
trace_chains: vec![],
|
||||
total_chains: 0,
|
||||
};
|
||||
|
||||
state.apply_result(1, stale_result); // Stale — should be discarded.
|
||||
assert!(state.result.is_none());
|
||||
assert!(state.loading); // Still loading.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_expand() {
|
||||
let mut state = TraceState {
|
||||
selected_chain_index: 2,
|
||||
..TraceState::default()
|
||||
};
|
||||
|
||||
state.toggle_expand();
|
||||
assert!(state.expanded_chains.contains(&2));
|
||||
|
||||
state.toggle_expand();
|
||||
assert!(!state.expanded_chains.contains(&2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_follow_renames_requeues() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/main.rs".into(),
|
||||
path_focused: true,
|
||||
follow_renames: true,
|
||||
include_discussions: true,
|
||||
..TraceState::default()
|
||||
};
|
||||
assert!(state.follow_renames);
|
||||
|
||||
let generation = state.toggle_follow_renames();
|
||||
assert!(!state.follow_renames);
|
||||
assert_eq!(generation, Some(1));
|
||||
assert!(state.loading);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_include_discussions_requeues() {
|
||||
let mut state = TraceState {
|
||||
path_input: "src/main.rs".into(),
|
||||
path_focused: true,
|
||||
follow_renames: true,
|
||||
include_discussions: true,
|
||||
..TraceState::default()
|
||||
};
|
||||
assert!(state.include_discussions);
|
||||
|
||||
let generation = state.toggle_include_discussions();
|
||||
assert!(!state.include_discussions);
|
||||
assert_eq!(generation, Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_prev_next() {
|
||||
let mut state = TraceState {
|
||||
result: Some(TraceResult {
|
||||
path: "x".into(),
|
||||
resolved_paths: vec![],
|
||||
renames_followed: false,
|
||||
trace_chains: vec![
|
||||
lore::core::trace::TraceChain {
|
||||
mr_iid: 1,
|
||||
mr_title: "a".into(),
|
||||
mr_state: "merged".into(),
|
||||
mr_author: "x".into(),
|
||||
change_type: "modified".into(),
|
||||
merged_at_iso: None,
|
||||
updated_at_iso: "2024-01-01".into(),
|
||||
web_url: None,
|
||||
issues: vec![],
|
||||
discussions: vec![],
|
||||
},
|
||||
lore::core::trace::TraceChain {
|
||||
mr_iid: 2,
|
||||
mr_title: "b".into(),
|
||||
mr_state: "merged".into(),
|
||||
mr_author: "y".into(),
|
||||
change_type: "added".into(),
|
||||
merged_at_iso: None,
|
||||
updated_at_iso: "2024-01-02".into(),
|
||||
web_url: None,
|
||||
issues: vec![],
|
||||
discussions: vec![],
|
||||
},
|
||||
],
|
||||
total_chains: 2,
|
||||
}),
|
||||
..TraceState::default()
|
||||
};
|
||||
|
||||
assert_eq!(state.selected_chain_index, 0);
|
||||
state.select_next();
|
||||
assert_eq!(state.selected_chain_index, 1);
|
||||
state.select_next(); // Clamped at max.
|
||||
assert_eq!(state.selected_chain_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_chain_index, 0);
|
||||
state.select_prev(); // Clamped at 0.
|
||||
assert_eq!(state.selected_chain_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_char_and_delete() {
|
||||
let mut state = TraceState::default();
|
||||
state.insert_char('a');
|
||||
state.insert_char('b');
|
||||
state.insert_char('c');
|
||||
assert_eq!(state.path_input, "abc");
|
||||
assert_eq!(state.path_cursor, 3);
|
||||
|
||||
state.delete_char_before_cursor();
|
||||
assert_eq!(state.path_input, "ab");
|
||||
assert_eq!(state.path_cursor, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_filters() {
|
||||
let mut state = TraceState {
|
||||
known_paths: vec!["src/a.rs".into(), "src/b.rs".into(), "lib/c.rs".into()],
|
||||
path_input: "src/".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.update_autocomplete();
|
||||
assert_eq!(state.autocomplete_matches.len(), 2);
|
||||
assert!(state.autocomplete_matches.contains(&"src/a.rs".to_string()));
|
||||
assert!(state.autocomplete_matches.contains(&"src/b.rs".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_next_cycles() {
|
||||
let mut state = TraceState {
|
||||
known_paths: vec!["a.rs".into(), "ab.rs".into()],
|
||||
path_input: "a".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.update_autocomplete();
|
||||
assert_eq!(state.autocomplete_matches.len(), 2);
|
||||
assert_eq!(state.autocomplete_index, 0);
|
||||
|
||||
state.autocomplete_next();
|
||||
assert_eq!(state.autocomplete_index, 1);
|
||||
|
||||
state.autocomplete_next();
|
||||
assert_eq!(state.autocomplete_index, 0); // Wrapped.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_accept_autocomplete() {
|
||||
let mut state = TraceState {
|
||||
known_paths: vec!["src/main.rs".into()],
|
||||
path_input: "src/".into(),
|
||||
..TraceState::default()
|
||||
};
|
||||
state.update_autocomplete();
|
||||
assert_eq!(state.autocomplete_matches.len(), 1);
|
||||
|
||||
state.accept_autocomplete();
|
||||
assert_eq!(state.path_input, "src/main.rs");
|
||||
assert!(state.autocomplete_matches.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_text_focus() {
|
||||
let state = TraceState::default();
|
||||
assert!(!state.has_text_focus());
|
||||
let state = TraceState {
|
||||
path_focused: true,
|
||||
..TraceState::default()
|
||||
};
|
||||
assert!(state.has_text_focus());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_blur_clears_focus_and_autocomplete() {
|
||||
let mut state = TraceState {
|
||||
path_focused: true,
|
||||
autocomplete_matches: vec!["a".into()],
|
||||
..TraceState::default()
|
||||
};
|
||||
|
||||
state.blur();
|
||||
assert!(!state.path_focused);
|
||||
assert!(state.autocomplete_matches.is_empty());
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,516 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Who (people intelligence) screen state.
|
||||
//!
|
||||
//! Manages 5 query modes (Expert, Workload, Reviews, Active, Overlap),
|
||||
//! input fields (path or username depending on mode), and result display.
|
||||
|
||||
use crate::message::WhoResult;
|
||||
use lore::core::who_types::WhoResult;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// WhoMode
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// The 5 query modes for the who screen.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub enum WhoMode {
|
||||
/// File-path expertise scores.
|
||||
#[default]
|
||||
Expert,
|
||||
/// Issue/MR assignment workload for a username.
|
||||
Workload,
|
||||
/// Review activity breakdown for a username.
|
||||
Reviews,
|
||||
/// Recent unresolved discussions (no input needed).
|
||||
Active,
|
||||
/// Shared file knowledge between contributors.
|
||||
Overlap,
|
||||
}
|
||||
|
||||
impl WhoMode {
|
||||
/// Short label for mode tab rendering.
|
||||
#[must_use]
|
||||
pub fn label(self) -> &'static str {
|
||||
match self {
|
||||
Self::Expert => "Expert",
|
||||
Self::Workload => "Workload",
|
||||
Self::Reviews => "Reviews",
|
||||
Self::Active => "Active",
|
||||
Self::Overlap => "Overlap",
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether this mode requires a path input.
|
||||
#[must_use]
|
||||
pub fn needs_path(self) -> bool {
|
||||
matches!(self, Self::Expert | Self::Overlap)
|
||||
}
|
||||
|
||||
/// Whether this mode requires a username input.
|
||||
#[must_use]
|
||||
pub fn needs_username(self) -> bool {
|
||||
matches!(self, Self::Workload | Self::Reviews)
|
||||
}
|
||||
|
||||
/// Whether include_closed affects this mode's query.
|
||||
#[must_use]
|
||||
pub fn affected_by_include_closed(self) -> bool {
|
||||
matches!(self, Self::Workload | Self::Active)
|
||||
}
|
||||
|
||||
/// Cycle to the next mode (wraps around).
|
||||
#[must_use]
|
||||
pub fn next(self) -> Self {
|
||||
match self {
|
||||
Self::Expert => Self::Workload,
|
||||
Self::Workload => Self::Reviews,
|
||||
Self::Reviews => Self::Active,
|
||||
Self::Active => Self::Overlap,
|
||||
Self::Overlap => Self::Expert,
|
||||
}
|
||||
}
|
||||
|
||||
/// All modes in order.
|
||||
pub const ALL: [Self; 5] = [
|
||||
Self::Expert,
|
||||
Self::Workload,
|
||||
Self::Reviews,
|
||||
Self::Active,
|
||||
Self::Overlap,
|
||||
];
|
||||
|
||||
/// Mode from 1-based number key (1=Expert, 2=Workload, ..., 5=Overlap).
|
||||
#[must_use]
|
||||
pub fn from_number(n: u8) -> Option<Self> {
|
||||
match n {
|
||||
1 => Some(Self::Expert),
|
||||
2 => Some(Self::Workload),
|
||||
3 => Some(Self::Reviews),
|
||||
4 => Some(Self::Active),
|
||||
5 => Some(Self::Overlap),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// WhoState
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// State for the who/people screen.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct WhoState {
|
||||
/// Active query mode.
|
||||
pub mode: WhoMode,
|
||||
/// Current result (if any).
|
||||
pub result: Option<WhoResult>,
|
||||
pub scroll_offset: u16,
|
||||
|
||||
// Input fields.
|
||||
/// Path input text (used by Expert and Overlap modes).
|
||||
pub path: String,
|
||||
/// Cursor position within path string (byte offset).
|
||||
pub path_cursor: usize,
|
||||
/// Whether the path input has focus.
|
||||
pub path_focused: bool,
|
||||
|
||||
/// Username input text (used by Workload and Reviews modes).
|
||||
pub username: String,
|
||||
/// Cursor position within username string (byte offset).
|
||||
pub username_cursor: usize,
|
||||
/// Whether the username input has focus.
|
||||
pub username_focused: bool,
|
||||
|
||||
/// Toggle: include closed entities in Workload/Active queries.
|
||||
pub include_closed: bool,
|
||||
|
||||
// Result navigation.
|
||||
/// Index of the selected row in the result list.
|
||||
pub selected_index: usize,
|
||||
/// Vertical scroll offset for the result area.
|
||||
pub scroll_offset: usize,
|
||||
|
||||
// Async coordination.
|
||||
/// Monotonic generation counter for stale-response detection.
|
||||
pub generation: u64,
|
||||
/// Whether a query is in-flight.
|
||||
pub loading: bool,
|
||||
}
|
||||
|
||||
impl WhoState {
|
||||
/// Enter the who screen: focus the appropriate input.
|
||||
pub fn enter(&mut self) {
|
||||
self.focus_input_for_mode();
|
||||
}
|
||||
|
||||
/// Leave the who screen: blur all inputs.
|
||||
pub fn leave(&mut self) {
|
||||
self.path_focused = false;
|
||||
self.username_focused = false;
|
||||
}
|
||||
|
||||
/// Switch to a different mode. Clears result and resets selection.
|
||||
/// Returns the new generation for stale detection.
|
||||
pub fn set_mode(&mut self, mode: WhoMode) -> u64 {
|
||||
if self.mode == mode {
|
||||
return self.generation;
|
||||
}
|
||||
self.mode = mode;
|
||||
self.result = None;
|
||||
self.selected_index = 0;
|
||||
self.scroll_offset = 0;
|
||||
self.focus_input_for_mode();
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
/// Toggle include_closed. Returns new generation if the mode is affected.
|
||||
pub fn toggle_include_closed(&mut self) -> Option<u64> {
|
||||
self.include_closed = !self.include_closed;
|
||||
if self.mode.affected_by_include_closed() {
|
||||
Some(self.bump_generation())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply query results if generation matches.
|
||||
pub fn apply_results(&mut self, generation: u64, result: WhoResult) {
|
||||
if generation != self.generation {
|
||||
return; // Stale response — discard.
|
||||
}
|
||||
self.result = Some(result);
|
||||
self.loading = false;
|
||||
self.selected_index = 0;
|
||||
self.scroll_offset = 0;
|
||||
}
|
||||
|
||||
/// Submit the current input (trigger a query).
|
||||
/// Returns the generation for the new query.
|
||||
pub fn submit(&mut self) -> u64 {
|
||||
self.loading = true;
|
||||
self.bump_generation()
|
||||
}
|
||||
|
||||
// --- Input field operations ---
|
||||
|
||||
/// Insert a char at cursor in the active input field.
|
||||
pub fn insert_char(&mut self, c: char) {
|
||||
if self.path_focused {
|
||||
self.path.insert(self.path_cursor, c);
|
||||
self.path_cursor += c.len_utf8();
|
||||
} else if self.username_focused {
|
||||
self.username.insert(self.username_cursor, c);
|
||||
self.username_cursor += c.len_utf8();
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete the char before cursor in the active input field.
|
||||
pub fn delete_char_before_cursor(&mut self) {
|
||||
if self.path_focused && self.path_cursor > 0 {
|
||||
let prev = prev_char_boundary(&self.path, self.path_cursor);
|
||||
self.path.drain(prev..self.path_cursor);
|
||||
self.path_cursor = prev;
|
||||
} else if self.username_focused && self.username_cursor > 0 {
|
||||
let prev = prev_char_boundary(&self.username, self.username_cursor);
|
||||
self.username.drain(prev..self.username_cursor);
|
||||
self.username_cursor = prev;
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor left in the active input.
|
||||
pub fn cursor_left(&mut self) {
|
||||
if self.path_focused && self.path_cursor > 0 {
|
||||
self.path_cursor = prev_char_boundary(&self.path, self.path_cursor);
|
||||
} else if self.username_focused && self.username_cursor > 0 {
|
||||
self.username_cursor = prev_char_boundary(&self.username, self.username_cursor);
|
||||
}
|
||||
}
|
||||
|
||||
/// Move cursor right in the active input.
|
||||
pub fn cursor_right(&mut self) {
|
||||
if self.path_focused && self.path_cursor < self.path.len() {
|
||||
self.path_cursor = next_char_boundary(&self.path, self.path_cursor);
|
||||
} else if self.username_focused && self.username_cursor < self.username.len() {
|
||||
self.username_cursor = next_char_boundary(&self.username, self.username_cursor);
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether any input field has focus.
|
||||
#[must_use]
|
||||
pub fn has_text_focus(&self) -> bool {
|
||||
self.path_focused || self.username_focused
|
||||
}
|
||||
|
||||
/// Blur all inputs.
|
||||
pub fn blur(&mut self) {
|
||||
self.path_focused = false;
|
||||
self.username_focused = false;
|
||||
}
|
||||
|
||||
/// Focus the appropriate input for the current mode.
|
||||
pub fn focus_input_for_mode(&mut self) {
|
||||
self.path_focused = self.mode.needs_path();
|
||||
self.username_focused = self.mode.needs_username();
|
||||
// Place cursor at end of text.
|
||||
if self.path_focused {
|
||||
self.path_cursor = self.path.len();
|
||||
}
|
||||
if self.username_focused {
|
||||
self.username_cursor = self.username.len();
|
||||
}
|
||||
}
|
||||
|
||||
// --- Selection navigation ---
|
||||
|
||||
/// Move selection up.
|
||||
pub fn select_prev(&mut self) {
|
||||
self.selected_index = self.selected_index.saturating_sub(1);
|
||||
}
|
||||
|
||||
/// Move selection down (bounded by result count).
|
||||
pub fn select_next(&mut self, result_count: usize) {
|
||||
if result_count > 0 {
|
||||
self.selected_index = (self.selected_index + 1).min(result_count - 1);
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure the selected row is visible within the viewport.
|
||||
pub fn ensure_visible(&mut self, viewport_height: usize) {
|
||||
if viewport_height == 0 {
|
||||
return;
|
||||
}
|
||||
if self.selected_index < self.scroll_offset {
|
||||
self.scroll_offset = self.selected_index;
|
||||
} else if self.selected_index >= self.scroll_offset + viewport_height {
|
||||
self.scroll_offset = self.selected_index - viewport_height + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Internal ---
|
||||
|
||||
fn bump_generation(&mut self) -> u64 {
|
||||
self.generation += 1;
|
||||
self.generation
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the byte offset of the previous char boundary.
|
||||
fn prev_char_boundary(s: &str, pos: usize) -> usize {
|
||||
let mut i = pos.saturating_sub(1);
|
||||
while i > 0 && !s.is_char_boundary(i) {
|
||||
i -= 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
/// Find the byte offset of the next char boundary.
|
||||
fn next_char_boundary(s: &str, pos: usize) -> usize {
|
||||
let mut i = pos + 1;
|
||||
while i < s.len() && !s.is_char_boundary(i) {
|
||||
i += 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_defaults_to_expert() {
|
||||
assert_eq!(WhoMode::default(), WhoMode::Expert);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_labels() {
|
||||
assert_eq!(WhoMode::Expert.label(), "Expert");
|
||||
assert_eq!(WhoMode::Active.label(), "Active");
|
||||
assert_eq!(WhoMode::Overlap.label(), "Overlap");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_needs_path() {
|
||||
assert!(WhoMode::Expert.needs_path());
|
||||
assert!(WhoMode::Overlap.needs_path());
|
||||
assert!(!WhoMode::Workload.needs_path());
|
||||
assert!(!WhoMode::Reviews.needs_path());
|
||||
assert!(!WhoMode::Active.needs_path());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_needs_username() {
|
||||
assert!(WhoMode::Workload.needs_username());
|
||||
assert!(WhoMode::Reviews.needs_username());
|
||||
assert!(!WhoMode::Expert.needs_username());
|
||||
assert!(!WhoMode::Active.needs_username());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_next_cycles() {
|
||||
let start = WhoMode::Expert;
|
||||
let m = start.next().next().next().next().next();
|
||||
assert_eq!(m, start);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_mode_from_number() {
|
||||
assert_eq!(WhoMode::from_number(1), Some(WhoMode::Expert));
|
||||
assert_eq!(WhoMode::from_number(5), Some(WhoMode::Overlap));
|
||||
assert_eq!(WhoMode::from_number(0), None);
|
||||
assert_eq!(WhoMode::from_number(6), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_who_state_default() {
|
||||
let state = WhoState::default();
|
||||
assert_eq!(state.mode, WhoMode::Expert);
|
||||
assert!(state.result.is_none());
|
||||
assert!(!state.include_closed);
|
||||
assert_eq!(state.generation, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_mode_bumps_generation() {
|
||||
let mut state = WhoState::default();
|
||||
let generation = state.set_mode(WhoMode::Workload);
|
||||
assert_eq!(generation, 1);
|
||||
assert_eq!(state.mode, WhoMode::Workload);
|
||||
assert!(state.result.is_none());
|
||||
assert!(state.username_focused);
|
||||
assert!(!state.path_focused);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_mode_same_does_not_bump() {
|
||||
let mut state = WhoState::default();
|
||||
let generation = state.set_mode(WhoMode::Expert);
|
||||
assert_eq!(generation, 0); // No bump for same mode.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_include_closed_returns_gen_for_affected_modes() {
|
||||
let state = &mut WhoState {
|
||||
mode: WhoMode::Workload,
|
||||
..WhoState::default()
|
||||
};
|
||||
let generation = state.toggle_include_closed();
|
||||
assert!(generation.is_some());
|
||||
assert!(state.include_closed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_toggle_include_closed_returns_none_for_unaffected_modes() {
|
||||
let state = &mut WhoState {
|
||||
mode: WhoMode::Expert,
|
||||
..WhoState::default()
|
||||
};
|
||||
let generation = state.toggle_include_closed();
|
||||
assert!(generation.is_none());
|
||||
assert!(state.include_closed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stale_response_guard() {
|
||||
let mut state = WhoState::default();
|
||||
let stale_gen = state.submit();
|
||||
// Bump generation again (simulating user changed mode).
|
||||
let _new_gen = state.set_mode(WhoMode::Active);
|
||||
// Old response arrives — should be discarded.
|
||||
state.apply_results(
|
||||
stale_gen,
|
||||
WhoResult::Active(lore::core::who_types::ActiveResult {
|
||||
discussions: vec![],
|
||||
total_unresolved_in_window: 0,
|
||||
truncated: false,
|
||||
}),
|
||||
);
|
||||
assert!(state.result.is_none()); // Stale, discarded.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_and_delete_char() {
|
||||
let mut state = WhoState {
|
||||
path_focused: true,
|
||||
..WhoState::default()
|
||||
};
|
||||
state.insert_char('s');
|
||||
state.insert_char('r');
|
||||
state.insert_char('c');
|
||||
assert_eq!(state.path, "src");
|
||||
assert_eq!(state.path_cursor, 3);
|
||||
|
||||
state.delete_char_before_cursor();
|
||||
assert_eq!(state.path, "sr");
|
||||
assert_eq!(state.path_cursor, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cursor_movement() {
|
||||
let mut state = WhoState {
|
||||
username_focused: true,
|
||||
username: "alice".into(),
|
||||
username_cursor: 5,
|
||||
..WhoState::default()
|
||||
};
|
||||
|
||||
state.cursor_left();
|
||||
assert_eq!(state.username_cursor, 4);
|
||||
state.cursor_right();
|
||||
assert_eq!(state.username_cursor, 5);
|
||||
// Right at end is clamped.
|
||||
state.cursor_right();
|
||||
assert_eq!(state.username_cursor, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_prev_next() {
|
||||
let mut state = WhoState::default();
|
||||
state.select_next(5);
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_next(5);
|
||||
assert_eq!(state.selected_index, 2);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 1);
|
||||
state.select_prev();
|
||||
assert_eq!(state.selected_index, 0);
|
||||
state.select_prev(); // Should not underflow.
|
||||
assert_eq!(state.selected_index, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ensure_visible() {
|
||||
let mut state = WhoState {
|
||||
selected_index: 15,
|
||||
..WhoState::default()
|
||||
};
|
||||
state.ensure_visible(5);
|
||||
assert_eq!(state.scroll_offset, 11); // 15 - 5 + 1
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_enter_focuses_correct_input() {
|
||||
let mut state = WhoState {
|
||||
mode: WhoMode::Expert,
|
||||
..WhoState::default()
|
||||
};
|
||||
state.enter();
|
||||
assert!(state.path_focused);
|
||||
assert!(!state.username_focused);
|
||||
|
||||
state.mode = WhoMode::Reviews;
|
||||
state.enter();
|
||||
assert!(!state.path_focused);
|
||||
// Reviews needs username.
|
||||
// focus_input_for_mode is called in enter().
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_affected_by_include_closed() {
|
||||
assert!(WhoMode::Workload.affected_by_include_closed());
|
||||
assert!(WhoMode::Active.affected_by_include_closed());
|
||||
assert!(!WhoMode::Expert.affected_by_include_closed());
|
||||
assert!(!WhoMode::Reviews.affected_by_include_closed());
|
||||
assert!(!WhoMode::Overlap.affected_by_include_closed());
|
||||
}
|
||||
}
|
||||
|
||||
134
crates/lore-tui/src/view/bootstrap.rs
Normal file
134
crates/lore-tui/src/view/bootstrap.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
#![allow(dead_code)] // Phase 2.5: consumed by render_screen dispatch
|
||||
|
||||
//! Bootstrap screen view.
|
||||
//!
|
||||
//! Shown when the database has no entity data. Guides users to run
|
||||
//! a sync to populate the database.
|
||||
|
||||
use ftui::core::geometry::Rect;
|
||||
use ftui::render::cell::{Cell, PackedRgba};
|
||||
use ftui::render::drawing::Draw;
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::state::bootstrap::BootstrapState;
|
||||
|
||||
// Colors (Flexoki palette).
|
||||
const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||
const MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||
const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); // orange
|
||||
|
||||
/// Render the bootstrap screen.
|
||||
///
|
||||
/// Centers a message in the content area, guiding the user to start a sync.
|
||||
/// When a sync is in progress, shows a "syncing" message instead.
|
||||
pub fn render_bootstrap(frame: &mut Frame<'_>, state: &BootstrapState, area: Rect) {
|
||||
if area.width < 10 || area.height < 5 {
|
||||
return;
|
||||
}
|
||||
|
||||
let center_y = area.y + area.height / 2;
|
||||
let max_x = area.x.saturating_add(area.width);
|
||||
|
||||
// Title.
|
||||
let title = "No data found";
|
||||
let title_x = area.x + area.width.saturating_sub(title.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
title_x,
|
||||
center_y.saturating_sub(2),
|
||||
title,
|
||||
Cell {
|
||||
fg: ACCENT,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
if state.sync_started {
|
||||
// Sync in progress.
|
||||
let msg = "Syncing data from GitLab...";
|
||||
let msg_x = area.x + area.width.saturating_sub(msg.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
msg_x,
|
||||
center_y,
|
||||
msg,
|
||||
Cell {
|
||||
fg: TEXT,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
} else {
|
||||
// Prompt user to start sync.
|
||||
let msg = "Run sync to get started.";
|
||||
let msg_x = area.x + area.width.saturating_sub(msg.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
msg_x,
|
||||
center_y,
|
||||
msg,
|
||||
Cell {
|
||||
fg: TEXT,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
let hint = "Press 'g' then 's' to start sync, or 'q' to quit.";
|
||||
let hint_x = area.x + area.width.saturating_sub(hint.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
hint_x,
|
||||
center_y + 2,
|
||||
hint,
|
||||
Cell {
|
||||
fg: MUTED,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_bootstrap_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = BootstrapState::default();
|
||||
render_bootstrap(&mut frame, &state, Rect::new(0, 1, 80, 22));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_bootstrap_sync_started() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = BootstrapState {
|
||||
sync_started: true,
|
||||
..Default::default()
|
||||
};
|
||||
render_bootstrap(&mut frame, &state, Rect::new(0, 1, 80, 22));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_bootstrap_tiny_area_noop() {
|
||||
with_frame!(8, 3, |frame| {
|
||||
let state = BootstrapState::default();
|
||||
render_bootstrap(&mut frame, &state, Rect::new(0, 0, 8, 3));
|
||||
// Should not panic — early return for tiny areas.
|
||||
});
|
||||
}
|
||||
}
|
||||
389
crates/lore-tui/src/view/command_palette.rs
Normal file
389
crates/lore-tui/src/view/command_palette.rs
Normal file
@@ -0,0 +1,389 @@
|
||||
//! Command palette overlay — modal fuzzy-match command picker.
|
||||
//!
|
||||
//! Renders a centered modal with a query input at the top and a scrollable
|
||||
//! list of matching commands below. Keybinding hints are right-aligned.
|
||||
|
||||
use ftui::core::geometry::Rect;
|
||||
use ftui::render::cell::Cell;
|
||||
use ftui::render::drawing::{BorderChars, Draw};
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::state::command_palette::CommandPaletteState;
|
||||
|
||||
use super::{ACCENT, BG_SURFACE, BORDER, TEXT, TEXT_MUTED};
|
||||
|
||||
fn text_cell_width(text: &str) -> u16 {
|
||||
text.chars().count().min(u16::MAX as usize) as u16
|
||||
}
|
||||
|
||||
fn cursor_cell_offset(query: &str, cursor: usize) -> u16 {
|
||||
let mut idx = cursor.min(query.len());
|
||||
while idx > 0 && !query.is_char_boundary(idx) {
|
||||
idx -= 1;
|
||||
}
|
||||
text_cell_width(&query[..idx])
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// render_command_palette
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the command palette overlay centered on the screen.
|
||||
///
|
||||
/// Only renders if `state.is_open()`. The modal is 60% width, 50% height,
|
||||
/// capped at 60x20.
|
||||
pub fn render_command_palette(frame: &mut Frame<'_>, state: &CommandPaletteState, area: Rect) {
|
||||
if !state.is_open() {
|
||||
return;
|
||||
}
|
||||
if area.height < 5 || area.width < 20 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Modal dimensions: 60% of screen, capped.
|
||||
let modal_width = (area.width * 3 / 5).clamp(30, 60);
|
||||
let modal_height = (area.height / 2).clamp(6, 20);
|
||||
|
||||
let modal_x = area.x + (area.width.saturating_sub(modal_width)) / 2;
|
||||
let modal_y = area.y + (area.height.saturating_sub(modal_height)) / 2;
|
||||
let modal_rect = Rect::new(modal_x, modal_y, modal_width, modal_height);
|
||||
|
||||
// Clear background.
|
||||
let bg_cell = Cell {
|
||||
fg: TEXT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
for y in modal_rect.y..modal_rect.bottom() {
|
||||
for x in modal_rect.x..modal_rect.right() {
|
||||
frame.buffer.set(x, y, bg_cell);
|
||||
}
|
||||
}
|
||||
|
||||
// Border.
|
||||
let border_cell = Cell {
|
||||
fg: BORDER,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.draw_border(modal_rect, BorderChars::ROUNDED, border_cell);
|
||||
|
||||
// Title.
|
||||
let title = " Command Palette ";
|
||||
let title_x = modal_x + (modal_width.saturating_sub(title.len() as u16)) / 2;
|
||||
let title_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(title_x, modal_y, title, title_cell, modal_rect.right());
|
||||
|
||||
// Inner content area (inside border).
|
||||
let inner = Rect::new(
|
||||
modal_x + 2,
|
||||
modal_y + 1,
|
||||
modal_width.saturating_sub(4),
|
||||
modal_height.saturating_sub(2),
|
||||
);
|
||||
if inner.width < 4 || inner.height < 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
// --- Query input line ---
|
||||
let query_y = inner.y;
|
||||
let prompt = "> ";
|
||||
let prompt_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let query_start =
|
||||
frame.print_text_clipped(inner.x, query_y, prompt, prompt_cell, inner.right());
|
||||
|
||||
let query_display = if state.query.is_empty() {
|
||||
"Type to filter..."
|
||||
} else {
|
||||
&state.query
|
||||
};
|
||||
let query_fg = if state.query.is_empty() {
|
||||
TEXT_MUTED
|
||||
} else {
|
||||
TEXT
|
||||
};
|
||||
let q_cell = Cell {
|
||||
fg: query_fg,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(query_start, query_y, query_display, q_cell, inner.right());
|
||||
|
||||
// Cursor indicator (if query focused and not showing placeholder).
|
||||
if !state.query.is_empty() {
|
||||
let cursor_x = query_start.saturating_add(cursor_cell_offset(&state.query, state.cursor));
|
||||
if cursor_x < inner.right() {
|
||||
let cursor_cell = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: TEXT,
|
||||
..Cell::default()
|
||||
};
|
||||
// Draw cursor block. If at end of text, draw a space.
|
||||
let cursor_char = state
|
||||
.query
|
||||
.get(state.cursor..)
|
||||
.and_then(|s| s.chars().next())
|
||||
.unwrap_or(' ');
|
||||
frame.print_text_clipped(
|
||||
cursor_x,
|
||||
query_y,
|
||||
&cursor_char.to_string(),
|
||||
cursor_cell,
|
||||
inner.right(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Separator ---
|
||||
let sep_y = query_y + 1;
|
||||
if sep_y >= inner.bottom() {
|
||||
return;
|
||||
}
|
||||
let sep_cell = Cell {
|
||||
fg: BORDER,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let sep_line = "─".repeat(inner.width as usize);
|
||||
frame.print_text_clipped(inner.x, sep_y, &sep_line, sep_cell, inner.right());
|
||||
|
||||
// --- Results list ---
|
||||
let list_y = sep_y + 1;
|
||||
let list_height = inner.bottom().saturating_sub(list_y) as usize;
|
||||
if list_height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
if state.filtered.is_empty() {
|
||||
let msg = if state.query.is_empty() {
|
||||
"No commands available"
|
||||
} else {
|
||||
"No matching commands"
|
||||
};
|
||||
let msg_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(inner.x, list_y, msg, msg_cell, inner.right());
|
||||
return;
|
||||
}
|
||||
|
||||
// Scroll so the selected item is always visible.
|
||||
let scroll_offset = if state.selected_index >= list_height {
|
||||
state.selected_index - list_height + 1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let normal_cell = Cell {
|
||||
fg: TEXT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let selected_cell = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
let key_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let key_selected_cell = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
for (i, entry) in state
|
||||
.filtered
|
||||
.iter()
|
||||
.skip(scroll_offset)
|
||||
.enumerate()
|
||||
.take(list_height)
|
||||
{
|
||||
let y = list_y + i as u16;
|
||||
let is_selected = i + scroll_offset == state.selected_index;
|
||||
|
||||
let (label_style, kb_style) = if is_selected {
|
||||
(selected_cell, key_selected_cell)
|
||||
} else {
|
||||
(normal_cell, key_cell)
|
||||
};
|
||||
|
||||
// Fill row background for selected item.
|
||||
if is_selected {
|
||||
for x in inner.x..inner.right() {
|
||||
frame.buffer.set(x, y, selected_cell);
|
||||
}
|
||||
}
|
||||
|
||||
// Label (left-aligned).
|
||||
frame.print_text_clipped(inner.x, y, entry.label, label_style, inner.right());
|
||||
|
||||
// Keybinding (right-aligned).
|
||||
if let Some(ref kb) = entry.keybinding {
|
||||
let kb_width = text_cell_width(kb);
|
||||
let kb_x = inner.right().saturating_sub(kb_width);
|
||||
if kb_x > inner.x + text_cell_width(entry.label).saturating_add(1) {
|
||||
frame.print_text_clipped(kb_x, y, kb, kb_style, inner.right());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scroll indicator.
|
||||
if state.filtered.len() > list_height {
|
||||
let indicator = format!(
|
||||
" {}/{} ",
|
||||
(scroll_offset + list_height).min(state.filtered.len()),
|
||||
state.filtered.len()
|
||||
);
|
||||
let ind_x = modal_rect
|
||||
.right()
|
||||
.saturating_sub(indicator.len() as u16 + 1);
|
||||
let ind_y = modal_rect.bottom().saturating_sub(1);
|
||||
let ind_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(ind_x, ind_y, &indicator, ind_cell, modal_rect.right());
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::commands::build_registry;
|
||||
use crate::message::Screen;
|
||||
use crate::state::command_palette::CommandPaletteState;
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_closed_is_noop() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = CommandPaletteState::default();
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
// No content rendered when palette is closed.
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_open_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
|
||||
// Should have rendered content in center area.
|
||||
let has_content = (25..55u16).any(|x| {
|
||||
(8..16u16).any(|y| {
|
||||
let cell = frame.buffer.get(x, y).unwrap();
|
||||
!cell.is_empty()
|
||||
})
|
||||
});
|
||||
assert!(has_content, "Expected palette overlay in center area");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_with_query() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
state.insert_char('q', ®istry, &Screen::Dashboard);
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_unicode_cursor_uses_char_offset() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
state.insert_char('é', ®istry, &Screen::Dashboard);
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
|
||||
let area = Rect::new(0, 0, 80, 24);
|
||||
let modal_width = (area.width * 3 / 5).clamp(30, 60);
|
||||
let modal_height = (area.height / 2).clamp(6, 20);
|
||||
let modal_x = area.x + (area.width.saturating_sub(modal_width)) / 2;
|
||||
let modal_y = area.y + (area.height.saturating_sub(modal_height)) / 2;
|
||||
let inner = Rect::new(
|
||||
modal_x + 2,
|
||||
modal_y + 1,
|
||||
modal_width.saturating_sub(4),
|
||||
modal_height.saturating_sub(2),
|
||||
);
|
||||
|
||||
// Prompt "> " is two cells; one unicode scalar should place cursor at +1.
|
||||
let query_y = inner.y;
|
||||
let cursor_x = inner.x + 3;
|
||||
let cell = frame
|
||||
.buffer
|
||||
.get(cursor_x, query_y)
|
||||
.expect("cursor position must be in bounds");
|
||||
assert_eq!(cell.bg, TEXT);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_with_selection() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
state.select_next();
|
||||
state.select_next();
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_tiny_terminal_noop() {
|
||||
with_frame!(15, 4, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 15, 4));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_palette_no_results() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let registry = build_registry();
|
||||
let mut state = CommandPaletteState::default();
|
||||
state.open(®istry, &Screen::Dashboard);
|
||||
for c in "zzzzzz".chars() {
|
||||
state.insert_char(c, ®istry, &Screen::Dashboard);
|
||||
}
|
||||
render_command_palette(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -181,13 +181,16 @@ pub fn render_cross_refs(
|
||||
// Spacing
|
||||
x = frame.print_text_clipped(x, y, " ", badge_style, max_x);
|
||||
|
||||
// Entity prefix + label
|
||||
// Entity prefix + label — derive sigil from entity kind, not ref kind.
|
||||
let prefix = match cr.kind {
|
||||
CrossRefKind::ClosingMr | CrossRefKind::MentionedIn => {
|
||||
format!("!{} ", cr.entity_key.iid)
|
||||
}
|
||||
CrossRefKind::RelatedIssue => {
|
||||
format!("#{} ", cr.entity_key.iid)
|
||||
CrossRefKind::ClosingMr => format!("!{} ", cr.entity_key.iid),
|
||||
CrossRefKind::RelatedIssue => format!("#{} ", cr.entity_key.iid),
|
||||
CrossRefKind::MentionedIn => {
|
||||
let sigil = match cr.entity_key.kind {
|
||||
crate::message::EntityKind::MergeRequest => "!",
|
||||
crate::message::EntityKind::Issue => "#",
|
||||
};
|
||||
format!("{sigil}{} ", cr.entity_key.iid)
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
578
crates/lore-tui/src/view/file_history.rs
Normal file
578
crates/lore-tui/src/view/file_history.rs
Normal file
@@ -0,0 +1,578 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! File History view — renders per-file MR timeline with rename chains.
|
||||
//!
|
||||
//! Layout:
|
||||
//! ```text
|
||||
//! +-----------------------------------+
|
||||
//! | Path: [src/lib.rs_] [R] [M] [D] | <- path input + option toggles
|
||||
//! | Rename chain: a.rs -> b.rs -> ... | <- shown when renames followed
|
||||
//! | 5 merge requests across 2 paths | <- summary line
|
||||
//! +-----------------------------------+
|
||||
//! | > !42 Fix auth @alice modified ... | <- MR list (selected = >)
|
||||
//! | !39 Refactor @bob renamed ... |
|
||||
//! | @carol: "This looks off..." | <- inline discussion (if toggled)
|
||||
//! +-----------------------------------+
|
||||
//! | r:renames m:merged d:discussions | <- hint bar
|
||||
//! +-----------------------------------+
|
||||
//! ```
|
||||
|
||||
use ftui::render::cell::{Cell, PackedRgba};
|
||||
use ftui::render::drawing::Draw;
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::state::file_history::{FileHistoryResult, FileHistoryState};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Colors (Flexoki palette)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||
const TEXT_MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||
const BG_SURFACE: PackedRgba = PackedRgba::rgb(0x28, 0x28, 0x24); // bg-2
|
||||
const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); // orange
|
||||
const GREEN: PackedRgba = PackedRgba::rgb(0x87, 0x9A, 0x39); // green
|
||||
const CYAN: PackedRgba = PackedRgba::rgb(0x3A, 0xA9, 0x9F); // cyan
|
||||
const YELLOW: PackedRgba = PackedRgba::rgb(0xD0, 0xA2, 0x15); // yellow
|
||||
const RED: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // red
|
||||
const SELECTION_BG: PackedRgba = PackedRgba::rgb(0x34, 0x34, 0x31); // bg-3
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the File History screen.
|
||||
pub fn render_file_history(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &FileHistoryState,
|
||||
area: ftui::core::geometry::Rect,
|
||||
) {
|
||||
if area.width < 10 || area.height < 3 {
|
||||
return; // Terminal too small.
|
||||
}
|
||||
|
||||
let x = area.x;
|
||||
let max_x = area.right();
|
||||
let width = area.width;
|
||||
let mut y = area.y;
|
||||
|
||||
// --- Path input bar ---
|
||||
render_path_input(frame, state, x, y, width);
|
||||
y += 1;
|
||||
|
||||
if area.height < 5 {
|
||||
return;
|
||||
}
|
||||
|
||||
// --- Option toggles indicator ---
|
||||
render_toggle_indicators(frame, state, x, y, width);
|
||||
y += 1;
|
||||
|
||||
// --- Loading indicator ---
|
||||
if state.loading {
|
||||
render_loading(frame, x, y, max_x);
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(result) = &state.result else {
|
||||
render_empty_state(frame, x, y, max_x);
|
||||
return;
|
||||
};
|
||||
|
||||
// --- Rename chain (if followed) ---
|
||||
if result.renames_followed && result.rename_chain.len() > 1 {
|
||||
render_rename_chain(frame, &result.rename_chain, x, y, max_x);
|
||||
y += 1;
|
||||
}
|
||||
|
||||
// --- Summary line ---
|
||||
render_summary(frame, result, x, y, max_x);
|
||||
y += 1;
|
||||
|
||||
if result.merge_requests.is_empty() {
|
||||
render_no_results(frame, x, y, max_x);
|
||||
return;
|
||||
}
|
||||
|
||||
// Reserve 1 row for hint bar at the bottom.
|
||||
let hint_y = area.bottom().saturating_sub(1);
|
||||
let list_height = hint_y.saturating_sub(y) as usize;
|
||||
|
||||
if list_height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// --- MR list ---
|
||||
render_mr_list(frame, result, state, x, y, width, list_height);
|
||||
|
||||
// --- Hint bar ---
|
||||
render_hint_bar(frame, x, hint_y, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Components
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn render_path_input(frame: &mut Frame<'_>, state: &FileHistoryState, x: u16, y: u16, width: u16) {
|
||||
let max_x = x + width;
|
||||
let label = "Path: ";
|
||||
let label_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_label = frame.print_text_clipped(x, y, label, label_style, max_x);
|
||||
|
||||
// Input text.
|
||||
let input_style = Cell {
|
||||
fg: if state.path_focused { TEXT } else { TEXT_MUTED },
|
||||
..Cell::default()
|
||||
};
|
||||
let display_text = if state.path_input.is_empty() && !state.path_focused {
|
||||
"type a file path..."
|
||||
} else {
|
||||
&state.path_input
|
||||
};
|
||||
frame.print_text_clipped(after_label, y, display_text, input_style, max_x);
|
||||
|
||||
// Cursor indicator.
|
||||
if state.path_focused {
|
||||
let cursor_x = after_label + state.path_cursor as u16;
|
||||
if cursor_x < max_x {
|
||||
let cursor_cell = Cell {
|
||||
fg: PackedRgba::rgb(0x10, 0x0F, 0x0F), // dark bg
|
||||
bg: TEXT,
|
||||
..Cell::default()
|
||||
};
|
||||
let ch = state
|
||||
.path_input
|
||||
.get(state.path_cursor..)
|
||||
.and_then(|s| s.chars().next())
|
||||
.unwrap_or(' ');
|
||||
frame.print_text_clipped(cursor_x, y, &ch.to_string(), cursor_cell, max_x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_toggle_indicators(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &FileHistoryState,
|
||||
x: u16,
|
||||
y: u16,
|
||||
width: u16,
|
||||
) {
|
||||
let max_x = x + width;
|
||||
|
||||
let on_style = Cell {
|
||||
fg: GREEN,
|
||||
..Cell::default()
|
||||
};
|
||||
let off_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
let renames_tag = if state.follow_renames {
|
||||
"[renames:on]"
|
||||
} else {
|
||||
"[renames:off]"
|
||||
};
|
||||
let merged_tag = if state.merged_only {
|
||||
"[merged:on]"
|
||||
} else {
|
||||
"[merged:off]"
|
||||
};
|
||||
let disc_tag = if state.show_discussions {
|
||||
"[disc:on]"
|
||||
} else {
|
||||
"[disc:off]"
|
||||
};
|
||||
|
||||
let renames_style = if state.follow_renames {
|
||||
on_style
|
||||
} else {
|
||||
off_style
|
||||
};
|
||||
let merged_style = if state.merged_only {
|
||||
on_style
|
||||
} else {
|
||||
off_style
|
||||
};
|
||||
let disc_style = if state.show_discussions {
|
||||
on_style
|
||||
} else {
|
||||
off_style
|
||||
};
|
||||
|
||||
let after_r = frame.print_text_clipped(x + 1, y, renames_tag, renames_style, max_x);
|
||||
let after_m = frame.print_text_clipped(after_r + 1, y, merged_tag, merged_style, max_x);
|
||||
frame.print_text_clipped(after_m + 1, y, disc_tag, disc_style, max_x);
|
||||
}
|
||||
|
||||
fn render_rename_chain(frame: &mut Frame<'_>, chain: &[String], x: u16, y: u16, max_x: u16) {
|
||||
let label_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let chain_style = Cell {
|
||||
fg: CYAN,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
let after_label = frame.print_text_clipped(x + 1, y, "Renames: ", label_style, max_x);
|
||||
let chain_str = chain.join(" -> ");
|
||||
frame.print_text_clipped(after_label, y, &chain_str, chain_style, max_x);
|
||||
}
|
||||
|
||||
fn render_summary(frame: &mut Frame<'_>, result: &FileHistoryResult, x: u16, y: u16, max_x: u16) {
|
||||
let summary = if result.paths_searched > 1 {
|
||||
format!(
|
||||
"{} merge request{} across {} paths",
|
||||
result.total_mrs,
|
||||
if result.total_mrs == 1 { "" } else { "s" },
|
||||
result.paths_searched,
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{} merge request{}",
|
||||
result.total_mrs,
|
||||
if result.total_mrs == 1 { "" } else { "s" },
|
||||
)
|
||||
};
|
||||
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, &summary, style, max_x);
|
||||
}
|
||||
|
||||
fn render_mr_list(
|
||||
frame: &mut Frame<'_>,
|
||||
result: &FileHistoryResult,
|
||||
state: &FileHistoryState,
|
||||
x: u16,
|
||||
start_y: u16,
|
||||
width: u16,
|
||||
height: usize,
|
||||
) {
|
||||
let max_x = x + width;
|
||||
let offset = state.scroll_offset as usize;
|
||||
|
||||
for (i, mr) in result
|
||||
.merge_requests
|
||||
.iter()
|
||||
.skip(offset)
|
||||
.enumerate()
|
||||
.take(height)
|
||||
{
|
||||
let y = start_y + i as u16;
|
||||
let row_idx = offset + i;
|
||||
let selected = row_idx == state.selected_mr_index;
|
||||
|
||||
// Selection background.
|
||||
if selected {
|
||||
let bg_cell = Cell {
|
||||
bg: SELECTION_BG,
|
||||
..Cell::default()
|
||||
};
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, bg_cell);
|
||||
}
|
||||
}
|
||||
|
||||
// State icon.
|
||||
let (icon, icon_color) = match mr.state.as_str() {
|
||||
"merged" => ("M", GREEN),
|
||||
"opened" => ("O", YELLOW),
|
||||
"closed" => ("C", RED),
|
||||
_ => ("?", TEXT_MUTED),
|
||||
};
|
||||
let prefix = if selected { "> " } else { " " };
|
||||
let sel_bg = if selected { SELECTION_BG } else { BG_SURFACE };
|
||||
|
||||
let prefix_style = Cell {
|
||||
fg: ACCENT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_prefix = frame.print_text_clipped(x, y, prefix, prefix_style, max_x);
|
||||
|
||||
let icon_style = Cell {
|
||||
fg: icon_color,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_icon = frame.print_text_clipped(after_prefix, y, icon, icon_style, max_x);
|
||||
|
||||
// !iid
|
||||
let iid_str = format!(" !{}", mr.iid);
|
||||
let ref_style = Cell {
|
||||
fg: ACCENT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_iid = frame.print_text_clipped(after_icon, y, &iid_str, ref_style, max_x);
|
||||
|
||||
// Title (truncated).
|
||||
let title = truncate_str(&mr.title, 35);
|
||||
let title_style = Cell {
|
||||
fg: TEXT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_title = frame.print_text_clipped(after_iid + 1, y, &title, title_style, max_x);
|
||||
|
||||
// @author + change_type
|
||||
let meta = format!(
|
||||
"@{} {}",
|
||||
truncate_str(&mr.author_username, 12),
|
||||
mr.change_type
|
||||
);
|
||||
let meta_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(after_title + 1, y, &meta, meta_style, max_x);
|
||||
}
|
||||
|
||||
// Inline discussion snippets (rendered beneath MRs when toggled on).
|
||||
// For simplicity, discussions are shown as a separate block after the MR list
|
||||
// in this initial implementation. Full inline rendering (grouped by MR) is
|
||||
// a follow-up enhancement.
|
||||
if state.show_discussions && !result.discussions.is_empty() {
|
||||
let disc_start_y = start_y + result.merge_requests.len().min(height) as u16;
|
||||
let remaining = height.saturating_sub(result.merge_requests.len().min(height));
|
||||
render_discussions(frame, result, x, disc_start_y, max_x, remaining);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_discussions(
|
||||
frame: &mut Frame<'_>,
|
||||
result: &FileHistoryResult,
|
||||
x: u16,
|
||||
start_y: u16,
|
||||
max_x: u16,
|
||||
max_rows: usize,
|
||||
) {
|
||||
if max_rows == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let sep_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, start_y, "-- discussions --", sep_style, max_x);
|
||||
|
||||
let disc_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let author_style = Cell {
|
||||
fg: CYAN,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
for (i, disc) in result
|
||||
.discussions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.take(max_rows.saturating_sub(1))
|
||||
{
|
||||
let y = start_y + 1 + i as u16;
|
||||
let after_author = frame.print_text_clipped(
|
||||
x + 2,
|
||||
y,
|
||||
&format!("@{}: ", disc.author_username),
|
||||
author_style,
|
||||
max_x,
|
||||
);
|
||||
let snippet = truncate_str(&disc.body_snippet, 60);
|
||||
frame.print_text_clipped(after_author, y, &snippet, disc_style, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_loading(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, "Loading file history...", style, max_x);
|
||||
}
|
||||
|
||||
fn render_empty_state(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(
|
||||
x + 1,
|
||||
y,
|
||||
"Enter a file path and press Enter to search.",
|
||||
style,
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
|
||||
fn render_no_results(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, "No MRs found for this file.", style, max_x);
|
||||
frame.print_text_clipped(
|
||||
x + 1,
|
||||
y + 1,
|
||||
"Hint: Ensure 'lore sync' has fetched MR file changes.",
|
||||
style,
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
|
||||
fn render_hint_bar(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
// Fill background.
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, style);
|
||||
}
|
||||
|
||||
let hints = "/:path r:renames m:merged d:discussions Enter:open MR q:back";
|
||||
frame.print_text_clipped(x + 1, y, hints, style, max_x);
|
||||
}
|
||||
|
||||
/// Truncate a string to at most `max_chars` display characters.
|
||||
fn truncate_str(s: &str, max_chars: usize) -> String {
|
||||
if s.chars().count() <= max_chars {
|
||||
s.to_string()
|
||||
} else {
|
||||
let truncated: String = s.chars().take(max_chars.saturating_sub(1)).collect();
|
||||
format!("{truncated}…")
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::state::file_history::{FileHistoryMr, FileHistoryResult, FileHistoryState};
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
fn test_area(w: u16, h: u16) -> ftui::core::geometry::Rect {
|
||||
ftui::core::geometry::Rect {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: w,
|
||||
height: h,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_empty_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = FileHistoryState::default();
|
||||
render_file_history(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_tiny_terminal_noop() {
|
||||
with_frame!(5, 2, |frame| {
|
||||
let state = FileHistoryState::default();
|
||||
render_file_history(&mut frame, &state, test_area(5, 2));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_loading() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = FileHistoryState {
|
||||
loading: true,
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
render_file_history(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_with_results() {
|
||||
with_frame!(100, 30, |frame| {
|
||||
let state = FileHistoryState {
|
||||
result: Some(FileHistoryResult {
|
||||
path: "src/lib.rs".into(),
|
||||
rename_chain: vec!["src/lib.rs".into()],
|
||||
renames_followed: false,
|
||||
merge_requests: vec![
|
||||
FileHistoryMr {
|
||||
iid: 42,
|
||||
title: "Fix authentication flow".into(),
|
||||
state: "merged".into(),
|
||||
author_username: "alice".into(),
|
||||
change_type: "modified".into(),
|
||||
merged_at_ms: Some(1_700_000_000_000),
|
||||
updated_at_ms: 1_700_000_000_000,
|
||||
merge_commit_sha: Some("abc123".into()),
|
||||
},
|
||||
FileHistoryMr {
|
||||
iid: 39,
|
||||
title: "Refactor module structure".into(),
|
||||
state: "opened".into(),
|
||||
author_username: "bob".into(),
|
||||
change_type: "renamed".into(),
|
||||
merged_at_ms: None,
|
||||
updated_at_ms: 1_699_000_000_000,
|
||||
merge_commit_sha: None,
|
||||
},
|
||||
],
|
||||
discussions: vec![],
|
||||
total_mrs: 2,
|
||||
paths_searched: 1,
|
||||
}),
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
render_file_history(&mut frame, &state, test_area(100, 30));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_with_rename_chain() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = FileHistoryState {
|
||||
result: Some(FileHistoryResult {
|
||||
path: "src/old.rs".into(),
|
||||
rename_chain: vec!["src/old.rs".into(), "src/new.rs".into()],
|
||||
renames_followed: true,
|
||||
merge_requests: vec![],
|
||||
discussions: vec![],
|
||||
total_mrs: 0,
|
||||
paths_searched: 2,
|
||||
}),
|
||||
..FileHistoryState::default()
|
||||
};
|
||||
render_file_history(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_str() {
|
||||
assert_eq!(truncate_str("hello", 10), "hello");
|
||||
assert_eq!(truncate_str("hello world", 5), "hell…");
|
||||
assert_eq!(truncate_str("", 5), "");
|
||||
}
|
||||
}
|
||||
@@ -295,7 +295,7 @@ fn render_metadata_row(
|
||||
if !meta.labels.is_empty() {
|
||||
cx = frame.print_text_clipped(cx, y, " | ", muted_style, max_x);
|
||||
let labels_text = meta.labels.join(", ");
|
||||
let _ = frame.print_text_clipped(cx, y, &labels_text, muted_style, max_x);
|
||||
cx = frame.print_text_clipped(cx, y, &labels_text, muted_style, max_x);
|
||||
}
|
||||
|
||||
if !meta.assignees.is_empty() {
|
||||
|
||||
@@ -6,28 +6,43 @@
|
||||
//! It composes the layout: breadcrumb bar, screen content area, status
|
||||
//! bar, and optional overlays (help, error toast).
|
||||
|
||||
pub mod bootstrap;
|
||||
pub mod command_palette;
|
||||
pub mod common;
|
||||
pub mod dashboard;
|
||||
pub mod file_history;
|
||||
pub mod issue_detail;
|
||||
pub mod issue_list;
|
||||
pub mod mr_detail;
|
||||
pub mod mr_list;
|
||||
pub mod search;
|
||||
pub mod timeline;
|
||||
pub mod trace;
|
||||
pub mod who;
|
||||
|
||||
use ftui::layout::{Constraint, Flex};
|
||||
use ftui::render::cell::PackedRgba;
|
||||
use ftui::render::cell::{Cell, PackedRgba};
|
||||
use ftui::render::drawing::Draw;
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::app::LoreApp;
|
||||
use crate::message::Screen;
|
||||
|
||||
use bootstrap::render_bootstrap;
|
||||
use command_palette::render_command_palette;
|
||||
use common::{
|
||||
render_breadcrumb, render_error_toast, render_help_overlay, render_loading, render_status_bar,
|
||||
};
|
||||
use dashboard::render_dashboard;
|
||||
use file_history::render_file_history;
|
||||
use issue_detail::render_issue_detail;
|
||||
use issue_list::render_issue_list;
|
||||
use mr_detail::render_mr_detail;
|
||||
use mr_list::render_mr_list;
|
||||
use search::render_search;
|
||||
use timeline::render_timeline;
|
||||
use trace::render_trace;
|
||||
use who::render_who;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Colors (hardcoded Flexoki palette — will use Theme in Phase 2)
|
||||
@@ -41,6 +56,41 @@ const ERROR_BG: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // red
|
||||
const ERROR_FG: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||
const BORDER: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||
|
||||
fn render_sync_placeholder(frame: &mut Frame<'_>, area: ftui::core::geometry::Rect) {
|
||||
if area.width < 10 || area.height < 5 {
|
||||
return;
|
||||
}
|
||||
|
||||
let max_x = area.right();
|
||||
let center_y = area.y + area.height / 2;
|
||||
|
||||
let title = "Sync";
|
||||
let title_x = area.x + area.width.saturating_sub(title.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
title_x,
|
||||
center_y.saturating_sub(1),
|
||||
title,
|
||||
Cell {
|
||||
fg: ACCENT,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
let body = "Run `lore sync` in another terminal.";
|
||||
let body_x = area.x + area.width.saturating_sub(body.len() as u16) / 2;
|
||||
frame.print_text_clipped(
|
||||
body_x,
|
||||
center_y + 1,
|
||||
body,
|
||||
Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// render_screen
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -91,7 +141,11 @@ pub fn render_screen(frame: &mut Frame<'_>, app: &LoreApp) {
|
||||
render_loading(frame, content_area, load_state, TEXT, TEXT_MUTED, 0);
|
||||
|
||||
// Per-screen content dispatch (other screens wired in later phases).
|
||||
if screen == &Screen::Dashboard {
|
||||
if screen == &Screen::Bootstrap {
|
||||
render_bootstrap(frame, &app.state.bootstrap, content_area);
|
||||
} else if screen == &Screen::Sync {
|
||||
render_sync_placeholder(frame, content_area);
|
||||
} else if screen == &Screen::Dashboard {
|
||||
render_dashboard(frame, &app.state.dashboard, content_area);
|
||||
} else if screen == &Screen::IssueList {
|
||||
render_issue_list(frame, &app.state.issue_list, content_area);
|
||||
@@ -101,6 +155,16 @@ pub fn render_screen(frame: &mut Frame<'_>, app: &LoreApp) {
|
||||
render_issue_detail(frame, &app.state.issue_detail, content_area, &*app.clock);
|
||||
} else if matches!(screen, Screen::MrDetail(_)) {
|
||||
render_mr_detail(frame, &app.state.mr_detail, content_area, &*app.clock);
|
||||
} else if screen == &Screen::Search {
|
||||
render_search(frame, &app.state.search, content_area);
|
||||
} else if screen == &Screen::Timeline {
|
||||
render_timeline(frame, &app.state.timeline, content_area, &*app.clock);
|
||||
} else if screen == &Screen::Who {
|
||||
render_who(frame, &app.state.who, content_area);
|
||||
} else if screen == &Screen::FileHistory {
|
||||
render_file_history(frame, &app.state.file_history, content_area);
|
||||
} else if screen == &Screen::Trace {
|
||||
render_trace(frame, &app.state.trace, content_area);
|
||||
}
|
||||
|
||||
// --- Status bar ---
|
||||
@@ -122,6 +186,9 @@ pub fn render_screen(frame: &mut Frame<'_>, app: &LoreApp) {
|
||||
render_error_toast(frame, bounds, error_msg, ERROR_BG, ERROR_FG);
|
||||
}
|
||||
|
||||
// Command palette overlay.
|
||||
render_command_palette(frame, &app.state.command_palette, bounds);
|
||||
|
||||
// Help overlay.
|
||||
if app.state.show_help {
|
||||
render_help_overlay(
|
||||
@@ -199,4 +266,21 @@ mod tests {
|
||||
render_screen(&mut frame, &app);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_screen_sync_has_content() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let mut app = LoreApp::new();
|
||||
app.navigation.push(Screen::Sync);
|
||||
render_screen(&mut frame, &app);
|
||||
|
||||
let has_content = (20..60u16).any(|x| {
|
||||
(8..16u16).any(|y| frame.buffer.get(x, y).is_some_and(|cell| !cell.is_empty()))
|
||||
});
|
||||
assert!(
|
||||
has_content,
|
||||
"Expected sync placeholder content in center area"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -203,16 +203,20 @@ fn render_metadata_row(
|
||||
|
||||
/// Render tab bar: `[Overview] [Files (3)] [Discussions (2)]`.
|
||||
fn render_tab_bar(frame: &mut Frame<'_>, state: &MrDetailState, x: u16, y: u16, max_x: u16) -> u16 {
|
||||
// Use metadata counts before async data loads to avoid showing 0.
|
||||
let disc_count = if state.discussions_loaded {
|
||||
state.discussions.len()
|
||||
} else {
|
||||
state.metadata.as_ref().map_or(0, |m| m.discussion_count)
|
||||
};
|
||||
|
||||
let tabs = [
|
||||
(MrTab::Overview, "Overview".to_string()),
|
||||
(
|
||||
MrTab::Files,
|
||||
format!("Files ({})", state.file_changes.len()),
|
||||
),
|
||||
(
|
||||
MrTab::Discussions,
|
||||
format!("Discussions ({})", state.discussions.len()),
|
||||
),
|
||||
(MrTab::Discussions, format!("Discussions ({disc_count})")),
|
||||
];
|
||||
|
||||
let mut cx = x;
|
||||
|
||||
492
crates/lore-tui/src/view/search.rs
Normal file
492
crates/lore-tui/src/view/search.rs
Normal file
@@ -0,0 +1,492 @@
|
||||
#![allow(dead_code)] // Phase 3: consumed by view/mod.rs screen dispatch
|
||||
|
||||
//! Search screen view — query bar, mode indicator, and results list.
|
||||
//!
|
||||
//! Layout:
|
||||
//! ```text
|
||||
//! +--[ FTS ]--- Search ──────────────────────+
|
||||
//! | > query text here_ |
|
||||
//! +───────────────────────────────────────────+
|
||||
//! | #42 Fix login bug group/proj |
|
||||
//! | !99 Add retry logic group/proj |
|
||||
//! | #10 Update docs other/repo |
|
||||
//! +───────────────────────────────────────────+
|
||||
//! | Tab: mode /: focus j/k: nav Enter: go |
|
||||
//! +───────────────────────────────────────────+
|
||||
//! ```
|
||||
|
||||
use ftui::core::geometry::Rect;
|
||||
use ftui::render::cell::Cell;
|
||||
use ftui::render::drawing::Draw;
|
||||
|
||||
/// Count display-width columns for a string (char count, not byte count).
|
||||
fn text_cell_width(text: &str) -> u16 {
|
||||
text.chars().count().min(u16::MAX as usize) as u16
|
||||
}
|
||||
|
||||
/// Convert a byte-offset cursor position to a display-column offset.
|
||||
fn cursor_cell_offset(query: &str, cursor: usize) -> u16 {
|
||||
let mut idx = cursor.min(query.len());
|
||||
while idx > 0 && !query.is_char_boundary(idx) {
|
||||
idx -= 1;
|
||||
}
|
||||
text_cell_width(&query[..idx])
|
||||
}
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::message::EntityKind;
|
||||
use crate::state::search::SearchState;
|
||||
|
||||
use super::{ACCENT, BG_SURFACE, BORDER, TEXT, TEXT_MUTED};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// render_search
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the search screen.
|
||||
///
|
||||
/// Composes: mode indicator + query bar (row 0), separator (row 1),
|
||||
/// results list (fill), and a hint bar at the bottom.
|
||||
pub fn render_search(frame: &mut Frame<'_>, state: &SearchState, area: Rect) {
|
||||
if area.height < 4 || area.width < 20 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut y = area.y;
|
||||
let max_x = area.right();
|
||||
|
||||
// -- Mode indicator + query bar ------------------------------------------
|
||||
y = render_query_bar(frame, state, area.x, y, area.width, max_x);
|
||||
|
||||
// -- Separator -----------------------------------------------------------
|
||||
if y >= area.bottom() {
|
||||
return;
|
||||
}
|
||||
let sep_cell = Cell {
|
||||
fg: BORDER,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let sep_line = "─".repeat(area.width as usize);
|
||||
frame.print_text_clipped(area.x, y, &sep_line, sep_cell, max_x);
|
||||
y += 1;
|
||||
|
||||
// -- No-index warning ----------------------------------------------------
|
||||
if !state.capabilities.has_any_index() {
|
||||
if y >= area.bottom() {
|
||||
return;
|
||||
}
|
||||
let warn_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(area.x + 1, y, "No search indexes found.", warn_cell, max_x);
|
||||
y += 1;
|
||||
if y < area.bottom() {
|
||||
let hint_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(
|
||||
area.x + 1,
|
||||
y,
|
||||
"Run: lore generate-docs && lore embed",
|
||||
hint_cell,
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// -- Results list --------------------------------------------------------
|
||||
let bottom_hint_row = area.bottom().saturating_sub(1);
|
||||
let list_bottom = bottom_hint_row;
|
||||
let list_height = list_bottom.saturating_sub(y) as usize;
|
||||
|
||||
if list_height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
if state.results.is_empty() {
|
||||
render_empty_state(frame, state, area.x + 1, y, max_x);
|
||||
} else {
|
||||
render_result_list(frame, state, area.x, y, area.width, list_height);
|
||||
}
|
||||
|
||||
// -- Bottom hint bar -----------------------------------------------------
|
||||
if bottom_hint_row < area.bottom() {
|
||||
render_hint_bar(frame, state, area.x, bottom_hint_row, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Query bar
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the mode badge and query input. Returns the next y position.
|
||||
fn render_query_bar(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &SearchState,
|
||||
x: u16,
|
||||
y: u16,
|
||||
width: u16,
|
||||
max_x: u16,
|
||||
) -> u16 {
|
||||
// Mode badge: [ FTS ] or [ Hybrid ] or [ Vec ]
|
||||
let mode_label = format!("[ {} ]", state.mode.label());
|
||||
let mode_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_mode = frame.print_text_clipped(x, y, &mode_label, mode_cell, max_x);
|
||||
|
||||
// Space separator.
|
||||
let after_sep = frame.print_text_clipped(
|
||||
after_mode,
|
||||
y,
|
||||
" ",
|
||||
Cell {
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
// Prompt.
|
||||
let prompt = "> ";
|
||||
let prompt_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_prompt = frame.print_text_clipped(after_sep, y, prompt, prompt_cell, max_x);
|
||||
|
||||
// Query text (or placeholder).
|
||||
let (display_text, text_fg) = if state.query.is_empty() {
|
||||
("Type to search...", TEXT_MUTED)
|
||||
} else {
|
||||
(state.query.as_str(), TEXT)
|
||||
};
|
||||
let text_cell = Cell {
|
||||
fg: text_fg,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(after_prompt, y, display_text, text_cell, max_x);
|
||||
|
||||
// Cursor (only when focused and has query text).
|
||||
if state.query_focused && !state.query.is_empty() {
|
||||
let cursor_x = after_prompt + cursor_cell_offset(&state.query, state.cursor);
|
||||
if cursor_x < max_x {
|
||||
let cursor_cell = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: TEXT,
|
||||
..Cell::default()
|
||||
};
|
||||
let cursor_char = state
|
||||
.query
|
||||
.get(state.cursor..)
|
||||
.and_then(|s| s.chars().next())
|
||||
.unwrap_or(' ');
|
||||
frame.print_text_clipped(cursor_x, y, &cursor_char.to_string(), cursor_cell, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// Loading indicator (right-aligned).
|
||||
if state.loading {
|
||||
let loading_text = " searching... ";
|
||||
let loading_x = (x + width).saturating_sub(loading_text.len() as u16);
|
||||
let loading_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(loading_x, y, loading_text, loading_cell, max_x);
|
||||
}
|
||||
|
||||
y + 1
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Empty state
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Show a message when there are no results.
|
||||
fn render_empty_state(frame: &mut Frame<'_>, state: &SearchState, x: u16, y: u16, max_x: u16) {
|
||||
let msg = if state.query.is_empty() {
|
||||
"Enter a search query above"
|
||||
} else {
|
||||
"No results found"
|
||||
};
|
||||
let cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x, y, msg, cell, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Result list
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the scrollable list of search results.
|
||||
fn render_result_list(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &SearchState,
|
||||
x: u16,
|
||||
start_y: u16,
|
||||
width: u16,
|
||||
list_height: usize,
|
||||
) {
|
||||
let max_x = x + width;
|
||||
|
||||
// Scroll so selected item is always visible.
|
||||
let scroll_offset = if state.selected_index >= list_height {
|
||||
state.selected_index - list_height + 1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let normal = Cell {
|
||||
fg: TEXT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let selected = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
let muted = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let muted_selected = Cell {
|
||||
fg: BG_SURFACE,
|
||||
bg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
for (i, result) in state
|
||||
.results
|
||||
.iter()
|
||||
.skip(scroll_offset)
|
||||
.enumerate()
|
||||
.take(list_height)
|
||||
{
|
||||
let y = start_y + i as u16;
|
||||
let is_selected = i + scroll_offset == state.selected_index;
|
||||
|
||||
let (label_style, detail_style) = if is_selected {
|
||||
(selected, muted_selected)
|
||||
} else {
|
||||
(normal, muted)
|
||||
};
|
||||
|
||||
// Fill row background for selected item.
|
||||
if is_selected {
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, selected);
|
||||
}
|
||||
}
|
||||
|
||||
// Entity prefix: # for issues, ! for MRs.
|
||||
let prefix = match result.key.kind {
|
||||
EntityKind::Issue => "#",
|
||||
EntityKind::MergeRequest => "!",
|
||||
};
|
||||
let iid_str = format!("{}{}", prefix, result.key.iid);
|
||||
let after_iid = frame.print_text_clipped(x + 1, y, &iid_str, label_style, max_x);
|
||||
|
||||
// Title.
|
||||
let after_title =
|
||||
frame.print_text_clipped(after_iid + 1, y, &result.title, label_style, max_x);
|
||||
|
||||
// Project path (right-aligned).
|
||||
let path_width = result.project_path.len() as u16 + 2;
|
||||
let path_x = max_x.saturating_sub(path_width);
|
||||
if path_x > after_title + 1 {
|
||||
frame.print_text_clipped(path_x, y, &result.project_path, detail_style, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// Scroll indicator (overlaid on last visible row when results overflow).
|
||||
if state.results.len() > list_height && list_height > 0 {
|
||||
let indicator = format!(
|
||||
" {}/{} ",
|
||||
(scroll_offset + list_height).min(state.results.len()),
|
||||
state.results.len()
|
||||
);
|
||||
let ind_x = max_x.saturating_sub(indicator.len() as u16);
|
||||
let ind_y = start_y + list_height.saturating_sub(1) as u16;
|
||||
let ind_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(ind_x, ind_y, &indicator, ind_cell, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Hint bar
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render keybinding hints at the bottom of the search screen.
|
||||
fn render_hint_bar(frame: &mut Frame<'_>, state: &SearchState, x: u16, y: u16, max_x: u16) {
|
||||
let hint_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
let hints = if state.query_focused {
|
||||
"Tab: mode Esc: blur Enter: search"
|
||||
} else {
|
||||
"Tab: mode /: focus j/k: nav Enter: open"
|
||||
};
|
||||
|
||||
frame.print_text_clipped(x + 1, y, hints, hint_cell, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::message::{EntityKey, SearchResult};
|
||||
use crate::state::search::{SearchCapabilities, SearchState};
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
fn fts_caps() -> SearchCapabilities {
|
||||
SearchCapabilities {
|
||||
has_fts: true,
|
||||
has_embeddings: false,
|
||||
embedding_coverage_pct: 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn sample_results(count: usize) -> Vec<SearchResult> {
|
||||
(0..count)
|
||||
.map(|i| SearchResult {
|
||||
key: EntityKey::issue(1, (i + 1) as i64),
|
||||
title: format!("Result {}", i + 1),
|
||||
score: 1.0 - (i as f64 * 0.1),
|
||||
snippet: "matched text".into(),
|
||||
project_path: "group/project".into(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_empty_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = SearchState::default();
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_with_capabilities_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_with_results_no_panic() {
|
||||
with_frame!(100, 30, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
state.results = sample_results(5);
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 100, 30));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_with_query_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
state.insert_char('h');
|
||||
state.insert_char('e');
|
||||
state.insert_char('l');
|
||||
state.insert_char('l');
|
||||
state.insert_char('o');
|
||||
state.results = sample_results(3);
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_with_selection_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
state.results = sample_results(10);
|
||||
state.select_next();
|
||||
state.select_next();
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_tiny_terminal_noop() {
|
||||
with_frame!(15, 3, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 15, 3));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_no_indexes_warning() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = SearchState::default();
|
||||
// capabilities are default (no indexes)
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
// Should show "No search indexes found" without panicking.
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_loading_indicator() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
state.loading = true;
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_search_scrollable_results() {
|
||||
with_frame!(80, 10, |frame| {
|
||||
let mut state = SearchState::default();
|
||||
state.enter(fts_caps());
|
||||
state.results = sample_results(20);
|
||||
// Select item near the bottom to trigger scrolling.
|
||||
for _ in 0..15 {
|
||||
state.select_next();
|
||||
}
|
||||
render_search(&mut frame, &state, Rect::new(0, 0, 80, 10));
|
||||
});
|
||||
}
|
||||
}
|
||||
449
crates/lore-tui/src/view/timeline.rs
Normal file
449
crates/lore-tui/src/view/timeline.rs
Normal file
@@ -0,0 +1,449 @@
|
||||
#![allow(dead_code)] // Phase 3: consumed by view/mod.rs screen dispatch
|
||||
|
||||
//! Timeline screen view — chronological event stream with color-coded types.
|
||||
//!
|
||||
//! Layout:
|
||||
//! ```text
|
||||
//! +─── Timeline ──────────────────────────────+
|
||||
//! | 3h ago #42 Created: Fix login bug |
|
||||
//! | 2h ago #42 State changed to closed |
|
||||
//! | 1h ago !99 Label added: backend |
|
||||
//! | 30m ago !99 Merged |
|
||||
//! +───────────────────────────────────────────+
|
||||
//! | j/k: nav Enter: open q: back |
|
||||
//! +───────────────────────────────────────────+
|
||||
//! ```
|
||||
|
||||
use ftui::core::geometry::Rect;
|
||||
use ftui::render::cell::{Cell, PackedRgba};
|
||||
use ftui::render::drawing::Draw;
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::clock::Clock;
|
||||
use crate::message::TimelineEventKind;
|
||||
use crate::state::timeline::TimelineState;
|
||||
use crate::view::common::discussion_tree::format_relative_time;
|
||||
|
||||
use super::{ACCENT, BG_SURFACE, BORDER, TEXT, TEXT_MUTED};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Colors for event kinds (Flexoki palette)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const GREEN: PackedRgba = PackedRgba::rgb(0x87, 0x9A, 0x39); // Created
|
||||
const YELLOW: PackedRgba = PackedRgba::rgb(0xD0, 0xA2, 0x15); // StateChanged
|
||||
const RED: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // Closed (via StateChanged)
|
||||
const PURPLE: PackedRgba = PackedRgba::rgb(0x8B, 0x7E, 0xC8); // Merged
|
||||
const CYAN: PackedRgba = PackedRgba::rgb(0x3A, 0xA9, 0x9F); // Label
|
||||
const SELECTED_FG: PackedRgba = PackedRgba::rgb(0x10, 0x0F, 0x0F); // bg (dark)
|
||||
|
||||
/// Map event kind to its display color.
|
||||
fn event_color(kind: TimelineEventKind, detail: Option<&str>) -> PackedRgba {
|
||||
match kind {
|
||||
TimelineEventKind::Created => GREEN,
|
||||
TimelineEventKind::StateChanged => {
|
||||
if detail == Some("closed") {
|
||||
RED
|
||||
} else {
|
||||
YELLOW
|
||||
}
|
||||
}
|
||||
TimelineEventKind::LabelAdded | TimelineEventKind::LabelRemoved => CYAN,
|
||||
TimelineEventKind::MilestoneSet | TimelineEventKind::MilestoneRemoved => ACCENT,
|
||||
TimelineEventKind::Merged => PURPLE,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// render_timeline
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the timeline screen.
|
||||
///
|
||||
/// Composes: scope header (row 0), separator (row 1),
|
||||
/// event list (fill), and a hint bar at the bottom.
|
||||
pub fn render_timeline(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &TimelineState,
|
||||
area: Rect,
|
||||
clock: &dyn Clock,
|
||||
) {
|
||||
if area.height < 4 || area.width < 20 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut y = area.y;
|
||||
let max_x = area.right();
|
||||
|
||||
// -- Scope header --
|
||||
let scope_label = match &state.scope {
|
||||
crate::state::timeline::TimelineScope::All => "All events".to_string(),
|
||||
crate::state::timeline::TimelineScope::Entity(key) => {
|
||||
let sigil = match key.kind {
|
||||
crate::message::EntityKind::Issue => "#",
|
||||
crate::message::EntityKind::MergeRequest => "!",
|
||||
};
|
||||
format!("Entity {sigil}{}", key.iid)
|
||||
}
|
||||
crate::state::timeline::TimelineScope::Author(name) => format!("Author: {name}"),
|
||||
};
|
||||
|
||||
let header = format!("Timeline: {scope_label}");
|
||||
let header_cell = Cell {
|
||||
fg: ACCENT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(area.x, y, &header, header_cell, max_x);
|
||||
y += 1;
|
||||
|
||||
// -- Separator --
|
||||
if y >= area.bottom() {
|
||||
return;
|
||||
}
|
||||
let sep_cell = Cell {
|
||||
fg: BORDER,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let sep_line = "─".repeat(area.width as usize);
|
||||
frame.print_text_clipped(area.x, y, &sep_line, sep_cell, max_x);
|
||||
y += 1;
|
||||
|
||||
// -- Event list --
|
||||
let bottom_hint_row = area.bottom().saturating_sub(1);
|
||||
let list_height = bottom_hint_row.saturating_sub(y) as usize;
|
||||
|
||||
if list_height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
if state.events.is_empty() {
|
||||
render_empty_state(frame, state, area.x + 1, y, max_x);
|
||||
} else {
|
||||
render_event_list(frame, state, area.x, y, area.width, list_height, clock);
|
||||
}
|
||||
|
||||
// -- Hint bar --
|
||||
if bottom_hint_row < area.bottom() {
|
||||
render_hint_bar(frame, area.x, bottom_hint_row, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Empty state
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn render_empty_state(frame: &mut Frame<'_>, state: &TimelineState, x: u16, y: u16, max_x: u16) {
|
||||
let msg = if state.loading {
|
||||
"Loading timeline..."
|
||||
} else {
|
||||
"No timeline events found"
|
||||
};
|
||||
let cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x, y, msg, cell, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Event list
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the scrollable list of timeline events.
|
||||
fn render_event_list(
|
||||
frame: &mut Frame<'_>,
|
||||
state: &TimelineState,
|
||||
x: u16,
|
||||
start_y: u16,
|
||||
width: u16,
|
||||
list_height: usize,
|
||||
clock: &dyn Clock,
|
||||
) {
|
||||
let max_x = x + width;
|
||||
|
||||
// Scroll so selected item is always visible.
|
||||
let scroll_offset = if state.selected_index >= list_height {
|
||||
state.selected_index - list_height + 1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let selected_cell = Cell {
|
||||
fg: SELECTED_FG,
|
||||
bg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
for (i, event) in state
|
||||
.events
|
||||
.iter()
|
||||
.skip(scroll_offset)
|
||||
.enumerate()
|
||||
.take(list_height)
|
||||
{
|
||||
let y = start_y + i as u16;
|
||||
let is_selected = i + scroll_offset == state.selected_index;
|
||||
|
||||
let kind_color = event_color(event.event_kind, event.detail.as_deref());
|
||||
|
||||
// Fill row background for selected item.
|
||||
if is_selected {
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, selected_cell);
|
||||
}
|
||||
}
|
||||
|
||||
let mut cx = x + 1;
|
||||
|
||||
// Timestamp gutter (right-aligned in ~10 chars).
|
||||
let time_str = format_relative_time(event.timestamp_ms, clock);
|
||||
let time_width = 10u16;
|
||||
let time_x = cx + time_width.saturating_sub(time_str.len() as u16);
|
||||
let time_cell = if is_selected {
|
||||
selected_cell
|
||||
} else {
|
||||
Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
}
|
||||
};
|
||||
frame.print_text_clipped(time_x, y, &time_str, time_cell, cx + time_width);
|
||||
cx += time_width + 1;
|
||||
|
||||
// Entity prefix: #42 or !99
|
||||
let prefix = match event.entity_key.kind {
|
||||
crate::message::EntityKind::Issue => "#",
|
||||
crate::message::EntityKind::MergeRequest => "!",
|
||||
};
|
||||
let entity_str = format!("{prefix}{}", event.entity_key.iid);
|
||||
let entity_cell = if is_selected {
|
||||
selected_cell
|
||||
} else {
|
||||
Cell {
|
||||
fg: kind_color,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
}
|
||||
};
|
||||
let after_entity = frame.print_text_clipped(cx, y, &entity_str, entity_cell, max_x);
|
||||
cx = after_entity + 1;
|
||||
|
||||
// Event kind badge.
|
||||
let badge = event.event_kind.label();
|
||||
let badge_cell = if is_selected {
|
||||
selected_cell
|
||||
} else {
|
||||
Cell {
|
||||
fg: kind_color,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
}
|
||||
};
|
||||
let after_badge = frame.print_text_clipped(cx, y, badge, badge_cell, max_x);
|
||||
cx = after_badge + 1;
|
||||
|
||||
// Summary text.
|
||||
let summary_cell = if is_selected {
|
||||
selected_cell
|
||||
} else {
|
||||
Cell {
|
||||
fg: TEXT,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
}
|
||||
};
|
||||
frame.print_text_clipped(cx, y, &event.summary, summary_cell, max_x);
|
||||
|
||||
// Actor (right-aligned) if there's room.
|
||||
if let Some(ref actor) = event.actor {
|
||||
let actor_str = format!(" {actor} ");
|
||||
let actor_width = actor_str.len() as u16;
|
||||
let actor_x = max_x.saturating_sub(actor_width);
|
||||
if actor_x > cx + 5 {
|
||||
let actor_cell = if is_selected {
|
||||
selected_cell
|
||||
} else {
|
||||
Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
}
|
||||
};
|
||||
frame.print_text_clipped(actor_x, y, &actor_str, actor_cell, max_x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scroll indicator (overlaid on last visible row when events overflow).
|
||||
if state.events.len() > list_height && list_height > 0 {
|
||||
let indicator = format!(
|
||||
" {}/{} ",
|
||||
(scroll_offset + list_height).min(state.events.len()),
|
||||
state.events.len()
|
||||
);
|
||||
let ind_x = max_x.saturating_sub(indicator.len() as u16);
|
||||
let ind_y = start_y + list_height.saturating_sub(1) as u16;
|
||||
let ind_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(ind_x, ind_y, &indicator, ind_cell, max_x);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Hint bar
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn render_hint_bar(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let hint_cell = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
let hints = "j/k: nav Enter: open q: back";
|
||||
frame.print_text_clipped(x + 1, y, hints, hint_cell, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::clock::FakeClock;
|
||||
use crate::message::{EntityKey, TimelineEvent, TimelineEventKind};
|
||||
use crate::state::timeline::TimelineState;
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
fn sample_event(timestamp_ms: i64, iid: i64, kind: TimelineEventKind) -> TimelineEvent {
|
||||
TimelineEvent {
|
||||
timestamp_ms,
|
||||
entity_key: EntityKey::issue(1, iid),
|
||||
event_kind: kind,
|
||||
summary: format!("Event for #{iid}"),
|
||||
detail: None,
|
||||
actor: Some("alice".into()),
|
||||
project_path: "group/project".into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn test_clock() -> FakeClock {
|
||||
FakeClock::from_ms(1_700_000_100_000)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_empty_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TimelineState::default();
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_with_events_no_panic() {
|
||||
with_frame!(100, 30, |frame| {
|
||||
let state = TimelineState {
|
||||
events: vec![
|
||||
sample_event(1_700_000_000_000, 1, TimelineEventKind::Created),
|
||||
sample_event(1_700_000_050_000, 2, TimelineEventKind::StateChanged),
|
||||
sample_event(1_700_000_080_000, 3, TimelineEventKind::Merged),
|
||||
],
|
||||
..TimelineState::default()
|
||||
};
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 100, 30), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_with_selection_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TimelineState {
|
||||
events: vec![
|
||||
sample_event(1_700_000_000_000, 1, TimelineEventKind::Created),
|
||||
sample_event(1_700_000_050_000, 2, TimelineEventKind::LabelAdded),
|
||||
],
|
||||
selected_index: 1,
|
||||
..TimelineState::default()
|
||||
};
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_tiny_terminal_noop() {
|
||||
with_frame!(15, 3, |frame| {
|
||||
let state = TimelineState::default();
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 15, 3), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_loading_state() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TimelineState {
|
||||
loading: true,
|
||||
..TimelineState::default()
|
||||
};
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_timeline_scrollable_events() {
|
||||
with_frame!(80, 10, |frame| {
|
||||
let state = TimelineState {
|
||||
events: (0..20)
|
||||
.map(|i| {
|
||||
sample_event(
|
||||
1_700_000_000_000 + i * 10_000,
|
||||
i + 1,
|
||||
TimelineEventKind::Created,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
selected_index: 15,
|
||||
..TimelineState::default()
|
||||
};
|
||||
let clock = test_clock();
|
||||
render_timeline(&mut frame, &state, Rect::new(0, 0, 80, 10), &clock);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_event_color_created_is_green() {
|
||||
assert_eq!(event_color(TimelineEventKind::Created, None), GREEN);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_event_color_closed_is_red() {
|
||||
assert_eq!(
|
||||
event_color(TimelineEventKind::StateChanged, Some("closed")),
|
||||
RED
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_event_color_merged_is_purple() {
|
||||
assert_eq!(event_color(TimelineEventKind::Merged, None), PURPLE);
|
||||
}
|
||||
}
|
||||
627
crates/lore-tui/src/view/trace.rs
Normal file
627
crates/lore-tui/src/view/trace.rs
Normal file
@@ -0,0 +1,627 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
//! Trace view — file → MR → issue chain drill-down.
|
||||
//!
|
||||
//! Layout:
|
||||
//! ```text
|
||||
//! +-----------------------------------+
|
||||
//! | Path: [src/main.rs_] [R] [D] | <- path input + toggles
|
||||
//! | Renames: old.rs -> new.rs | <- shown when renames followed
|
||||
//! | 3 trace chains | <- summary
|
||||
//! +-----------------------------------+
|
||||
//! | > M !42 Fix auth @alice modified | <- collapsed chain (selected)
|
||||
//! | O !39 Refactor @bob renamed | <- collapsed chain
|
||||
//! | M !35 Init @carol added | <- expanded chain header
|
||||
//! | #12 Bug: login broken (close) | <- linked issue
|
||||
//! | @dave: "This path needs..." | <- discussion snippet
|
||||
//! +-----------------------------------+
|
||||
//! | Enter:expand r:renames d:disc | <- hint bar
|
||||
//! +-----------------------------------+
|
||||
//! ```
|
||||
|
||||
use ftui::render::cell::{Cell, PackedRgba};
|
||||
use ftui::render::drawing::Draw;
|
||||
use ftui::render::frame::Frame;
|
||||
|
||||
use crate::state::trace::TraceState;
|
||||
use lore::core::trace::TraceResult;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Colors (Flexoki palette)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||
const TEXT_MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||
const BG_SURFACE: PackedRgba = PackedRgba::rgb(0x28, 0x28, 0x24); // bg-2
|
||||
const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); // orange
|
||||
const GREEN: PackedRgba = PackedRgba::rgb(0x87, 0x9A, 0x39); // green
|
||||
const CYAN: PackedRgba = PackedRgba::rgb(0x3A, 0xA9, 0x9F); // cyan
|
||||
const YELLOW: PackedRgba = PackedRgba::rgb(0xD0, 0xA2, 0x15); // yellow
|
||||
const RED: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // red
|
||||
const PURPLE: PackedRgba = PackedRgba::rgb(0x8B, 0x7E, 0xC8); // purple
|
||||
const SELECTION_BG: PackedRgba = PackedRgba::rgb(0x34, 0x34, 0x31); // bg-3
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render the Trace screen.
|
||||
pub fn render_trace(frame: &mut Frame<'_>, state: &TraceState, area: ftui::core::geometry::Rect) {
|
||||
if area.width < 10 || area.height < 3 {
|
||||
return;
|
||||
}
|
||||
|
||||
let x = area.x;
|
||||
let max_x = area.right();
|
||||
let width = area.width;
|
||||
let mut y = area.y;
|
||||
|
||||
// --- Path input ---
|
||||
render_path_input(frame, state, x, y, width);
|
||||
y += 1;
|
||||
|
||||
if area.height < 5 {
|
||||
return;
|
||||
}
|
||||
|
||||
// --- Toggle indicators ---
|
||||
render_toggle_indicators(frame, state, x, y, width);
|
||||
y += 1;
|
||||
|
||||
// --- Loading ---
|
||||
if state.loading {
|
||||
render_loading(frame, x, y, max_x);
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(result) = &state.result else {
|
||||
render_empty_state(frame, x, y, max_x);
|
||||
return;
|
||||
};
|
||||
|
||||
// --- Rename chain ---
|
||||
if result.renames_followed && result.resolved_paths.len() > 1 {
|
||||
render_rename_chain(frame, &result.resolved_paths, x, y, max_x);
|
||||
y += 1;
|
||||
}
|
||||
|
||||
// --- Summary ---
|
||||
render_summary(frame, result, x, y, max_x);
|
||||
y += 1;
|
||||
|
||||
if result.trace_chains.is_empty() {
|
||||
render_no_results(frame, x, y, max_x);
|
||||
return;
|
||||
}
|
||||
|
||||
// Reserve hint bar.
|
||||
let hint_y = area.bottom().saturating_sub(1);
|
||||
let list_height = hint_y.saturating_sub(y) as usize;
|
||||
|
||||
if list_height == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// --- Chain list ---
|
||||
render_chain_list(frame, result, state, x, y, width, list_height);
|
||||
|
||||
// --- Hint bar ---
|
||||
render_hint_bar(frame, x, hint_y, max_x);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Components
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn render_path_input(frame: &mut Frame<'_>, state: &TraceState, x: u16, y: u16, width: u16) {
|
||||
let max_x = x + width;
|
||||
let label = "Path: ";
|
||||
let label_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_label = frame.print_text_clipped(x, y, label, label_style, max_x);
|
||||
|
||||
let input_style = Cell {
|
||||
fg: if state.path_focused { TEXT } else { TEXT_MUTED },
|
||||
..Cell::default()
|
||||
};
|
||||
let display_text = if state.path_input.is_empty() && !state.path_focused {
|
||||
"type a file path..."
|
||||
} else {
|
||||
&state.path_input
|
||||
};
|
||||
frame.print_text_clipped(after_label, y, display_text, input_style, max_x);
|
||||
|
||||
// Cursor.
|
||||
if state.path_focused {
|
||||
let cursor_x = after_label + state.path_cursor as u16;
|
||||
if cursor_x < max_x {
|
||||
let cursor_cell = Cell {
|
||||
fg: PackedRgba::rgb(0x10, 0x0F, 0x0F),
|
||||
bg: TEXT,
|
||||
..Cell::default()
|
||||
};
|
||||
let ch = state
|
||||
.path_input
|
||||
.chars()
|
||||
.nth(state.path_cursor)
|
||||
.unwrap_or(' ');
|
||||
frame.print_text_clipped(cursor_x, y, &ch.to_string(), cursor_cell, max_x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_toggle_indicators(frame: &mut Frame<'_>, state: &TraceState, x: u16, y: u16, width: u16) {
|
||||
let max_x = x + width;
|
||||
|
||||
let on_style = Cell {
|
||||
fg: GREEN,
|
||||
..Cell::default()
|
||||
};
|
||||
let off_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
let renames_tag = if state.follow_renames {
|
||||
"[renames:on]"
|
||||
} else {
|
||||
"[renames:off]"
|
||||
};
|
||||
let disc_tag = if state.include_discussions {
|
||||
"[disc:on]"
|
||||
} else {
|
||||
"[disc:off]"
|
||||
};
|
||||
|
||||
let renames_style = if state.follow_renames {
|
||||
on_style
|
||||
} else {
|
||||
off_style
|
||||
};
|
||||
let disc_style = if state.include_discussions {
|
||||
on_style
|
||||
} else {
|
||||
off_style
|
||||
};
|
||||
|
||||
let after_r = frame.print_text_clipped(x + 1, y, renames_tag, renames_style, max_x);
|
||||
frame.print_text_clipped(after_r + 1, y, disc_tag, disc_style, max_x);
|
||||
}
|
||||
|
||||
fn render_rename_chain(frame: &mut Frame<'_>, paths: &[String], x: u16, y: u16, max_x: u16) {
|
||||
let label_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let chain_style = Cell {
|
||||
fg: CYAN,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
let after_label = frame.print_text_clipped(x + 1, y, "Renames: ", label_style, max_x);
|
||||
|
||||
// For long chains, show first 2 + "..." + last.
|
||||
let chain_str = if paths.len() > 5 {
|
||||
let first_two = paths[..2].join(" -> ");
|
||||
let last = &paths[paths.len() - 1];
|
||||
format!("{first_two} -> ... ({} more) -> {last}", paths.len() - 3)
|
||||
} else {
|
||||
paths.join(" -> ")
|
||||
};
|
||||
frame.print_text_clipped(after_label, y, &chain_str, chain_style, max_x);
|
||||
}
|
||||
|
||||
fn render_summary(frame: &mut Frame<'_>, result: &TraceResult, x: u16, y: u16, max_x: u16) {
|
||||
let summary = format!(
|
||||
"{} trace chain{}",
|
||||
result.total_chains,
|
||||
if result.total_chains == 1 { "" } else { "s" },
|
||||
);
|
||||
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, &summary, style, max_x);
|
||||
}
|
||||
|
||||
fn render_chain_list(
|
||||
frame: &mut Frame<'_>,
|
||||
result: &TraceResult,
|
||||
state: &TraceState,
|
||||
x: u16,
|
||||
start_y: u16,
|
||||
width: u16,
|
||||
height: usize,
|
||||
) {
|
||||
let max_x = x + width;
|
||||
let mut row = 0;
|
||||
|
||||
for (chain_idx, chain) in result.trace_chains.iter().enumerate() {
|
||||
if row >= height {
|
||||
break;
|
||||
}
|
||||
|
||||
let y = start_y + row as u16;
|
||||
let selected = chain_idx == state.selected_chain_index;
|
||||
let expanded = state.expanded_chains.contains(&chain_idx);
|
||||
|
||||
// Selection background.
|
||||
if selected {
|
||||
let bg_cell = Cell {
|
||||
bg: SELECTION_BG,
|
||||
..Cell::default()
|
||||
};
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, bg_cell);
|
||||
}
|
||||
}
|
||||
|
||||
let sel_bg = if selected { SELECTION_BG } else { BG_SURFACE };
|
||||
|
||||
// Expand indicator.
|
||||
let expand_icon = if expanded { "v " } else { "> " };
|
||||
let prefix = if selected { expand_icon } else { " " };
|
||||
let prefix_style = Cell {
|
||||
fg: ACCENT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_prefix = frame.print_text_clipped(x, y, prefix, prefix_style, max_x);
|
||||
|
||||
// State icon.
|
||||
let (icon, icon_color) = match chain.mr_state.as_str() {
|
||||
"merged" => ("M", PURPLE),
|
||||
"opened" => ("O", GREEN),
|
||||
"closed" => ("C", RED),
|
||||
_ => ("?", TEXT_MUTED),
|
||||
};
|
||||
let icon_style = Cell {
|
||||
fg: icon_color,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_icon = frame.print_text_clipped(after_prefix, y, icon, icon_style, max_x);
|
||||
|
||||
// !iid
|
||||
let iid_str = format!(" !{}", chain.mr_iid);
|
||||
let ref_style = Cell {
|
||||
fg: ACCENT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_iid = frame.print_text_clipped(after_icon, y, &iid_str, ref_style, max_x);
|
||||
|
||||
// Title.
|
||||
let title = truncate_str(&chain.mr_title, 30);
|
||||
let title_style = Cell {
|
||||
fg: TEXT,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_title = frame.print_text_clipped(after_iid + 1, y, &title, title_style, max_x);
|
||||
|
||||
// @author + change_type
|
||||
let meta = format!(
|
||||
"@{} {}",
|
||||
truncate_str(&chain.mr_author, 12),
|
||||
chain.change_type
|
||||
);
|
||||
let meta_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: sel_bg,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(after_title + 1, y, &meta, meta_style, max_x);
|
||||
|
||||
row += 1;
|
||||
|
||||
// Expanded content: linked issues + discussions.
|
||||
if expanded {
|
||||
// Issues.
|
||||
for issue in &chain.issues {
|
||||
if row >= height {
|
||||
break;
|
||||
}
|
||||
let iy = start_y + row as u16;
|
||||
|
||||
let issue_icon = match issue.state.as_str() {
|
||||
"opened" => "O",
|
||||
"closed" => "C",
|
||||
_ => "?",
|
||||
};
|
||||
let issue_icon_color = match issue.state.as_str() {
|
||||
"opened" => GREEN,
|
||||
"closed" => RED,
|
||||
_ => TEXT_MUTED,
|
||||
};
|
||||
|
||||
let indent_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_indent = frame.print_text_clipped(
|
||||
x + 4,
|
||||
iy,
|
||||
issue_icon,
|
||||
Cell {
|
||||
fg: issue_icon_color,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
let issue_ref = format!(" #{} ", issue.iid);
|
||||
let issue_ref_style = Cell {
|
||||
fg: YELLOW,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_ref =
|
||||
frame.print_text_clipped(after_indent, iy, &issue_ref, issue_ref_style, max_x);
|
||||
|
||||
let issue_title = truncate_str(&issue.title, 40);
|
||||
let _ = indent_style; // suppress unused
|
||||
frame.print_text_clipped(
|
||||
after_ref,
|
||||
iy,
|
||||
&issue_title,
|
||||
Cell {
|
||||
fg: TEXT,
|
||||
..Cell::default()
|
||||
},
|
||||
max_x,
|
||||
);
|
||||
|
||||
row += 1;
|
||||
}
|
||||
|
||||
// Discussions.
|
||||
for disc in &chain.discussions {
|
||||
if row >= height {
|
||||
break;
|
||||
}
|
||||
let dy = start_y + row as u16;
|
||||
|
||||
let author = format!("@{}: ", truncate_str(&disc.author_username, 12));
|
||||
let author_style = Cell {
|
||||
fg: CYAN,
|
||||
..Cell::default()
|
||||
};
|
||||
let after_author =
|
||||
frame.print_text_clipped(x + 4, dy, &author, author_style, max_x);
|
||||
|
||||
let snippet = truncate_str(&disc.body, 60);
|
||||
let snippet_style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(after_author, dy, &snippet, snippet_style, max_x);
|
||||
|
||||
row += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_loading(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: ACCENT,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, "Tracing file provenance...", style, max_x);
|
||||
}
|
||||
|
||||
fn render_empty_state(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(
|
||||
x + 1,
|
||||
y,
|
||||
"Enter a file path and press Enter to trace.",
|
||||
style,
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
|
||||
fn render_no_results(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
..Cell::default()
|
||||
};
|
||||
frame.print_text_clipped(x + 1, y, "No trace chains found.", style, max_x);
|
||||
frame.print_text_clipped(
|
||||
x + 1,
|
||||
y + 1,
|
||||
"Hint: Run 'lore sync' to fetch MR file changes.",
|
||||
style,
|
||||
max_x,
|
||||
);
|
||||
}
|
||||
|
||||
fn render_hint_bar(frame: &mut Frame<'_>, x: u16, y: u16, max_x: u16) {
|
||||
let style = Cell {
|
||||
fg: TEXT_MUTED,
|
||||
bg: BG_SURFACE,
|
||||
..Cell::default()
|
||||
};
|
||||
|
||||
for col in x..max_x {
|
||||
frame.buffer.set(col, y, style);
|
||||
}
|
||||
|
||||
let hints = "/:path Enter:expand r:renames d:discussions q:back";
|
||||
frame.print_text_clipped(x + 1, y, hints, style, max_x);
|
||||
}
|
||||
|
||||
/// Truncate a string to at most `max_chars` display characters.
|
||||
fn truncate_str(s: &str, max_chars: usize) -> String {
|
||||
if s.chars().count() <= max_chars {
|
||||
s.to_string()
|
||||
} else {
|
||||
let truncated: String = s.chars().take(max_chars.saturating_sub(1)).collect();
|
||||
format!("{truncated}…")
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashSet;
|
||||
|
||||
use super::*;
|
||||
use crate::state::trace::TraceState;
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
use lore::core::trace::{TraceChain, TraceResult};
|
||||
|
||||
macro_rules! with_frame {
|
||||
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||
$body
|
||||
}};
|
||||
}
|
||||
|
||||
fn test_area(w: u16, h: u16) -> ftui::core::geometry::Rect {
|
||||
ftui::core::geometry::Rect {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: w,
|
||||
height: h,
|
||||
}
|
||||
}
|
||||
|
||||
fn sample_chain(iid: i64, title: &str, state: &str) -> TraceChain {
|
||||
TraceChain {
|
||||
mr_iid: iid,
|
||||
mr_title: title.into(),
|
||||
mr_state: state.into(),
|
||||
mr_author: "alice".into(),
|
||||
change_type: "modified".into(),
|
||||
merged_at_iso: None,
|
||||
updated_at_iso: "2024-01-01".into(),
|
||||
web_url: None,
|
||||
issues: vec![],
|
||||
discussions: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_empty_no_panic() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TraceState::default();
|
||||
render_trace(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_tiny_terminal_noop() {
|
||||
with_frame!(5, 2, |frame| {
|
||||
let state = TraceState::default();
|
||||
render_trace(&mut frame, &state, test_area(5, 2));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_loading() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TraceState {
|
||||
loading: true,
|
||||
..TraceState::default()
|
||||
};
|
||||
render_trace(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_with_chains() {
|
||||
with_frame!(100, 30, |frame| {
|
||||
let state = TraceState {
|
||||
result: Some(TraceResult {
|
||||
path: "src/main.rs".into(),
|
||||
resolved_paths: vec!["src/main.rs".into()],
|
||||
renames_followed: false,
|
||||
trace_chains: vec![
|
||||
sample_chain(42, "Fix auth flow", "merged"),
|
||||
sample_chain(39, "Refactor modules", "opened"),
|
||||
],
|
||||
total_chains: 2,
|
||||
}),
|
||||
..TraceState::default()
|
||||
};
|
||||
render_trace(&mut frame, &state, test_area(100, 30));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_expanded_chain() {
|
||||
with_frame!(100, 30, |frame| {
|
||||
let state = TraceState {
|
||||
expanded_chains: HashSet::from([0]),
|
||||
result: Some(TraceResult {
|
||||
path: "src/main.rs".into(),
|
||||
resolved_paths: vec!["src/main.rs".into()],
|
||||
renames_followed: false,
|
||||
trace_chains: vec![TraceChain {
|
||||
mr_iid: 42,
|
||||
mr_title: "Fix auth".into(),
|
||||
mr_state: "merged".into(),
|
||||
mr_author: "alice".into(),
|
||||
change_type: "modified".into(),
|
||||
merged_at_iso: None,
|
||||
updated_at_iso: "2024-01-01".into(),
|
||||
web_url: None,
|
||||
issues: vec![lore::core::trace::TraceIssue {
|
||||
iid: 12,
|
||||
title: "Login broken".into(),
|
||||
state: "closed".into(),
|
||||
reference_type: "closes".into(),
|
||||
web_url: None,
|
||||
}],
|
||||
discussions: vec![lore::core::trace::TraceDiscussion {
|
||||
discussion_id: "abc".into(),
|
||||
mr_iid: 42,
|
||||
author_username: "bob".into(),
|
||||
body: "This path needs review".into(),
|
||||
path: "src/main.rs".into(),
|
||||
created_at_iso: "2024-01-01".into(),
|
||||
}],
|
||||
}],
|
||||
total_chains: 1,
|
||||
}),
|
||||
..TraceState::default()
|
||||
};
|
||||
render_trace(&mut frame, &state, test_area(100, 30));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_render_with_rename_chain() {
|
||||
with_frame!(80, 24, |frame| {
|
||||
let state = TraceState {
|
||||
result: Some(TraceResult {
|
||||
path: "src/old.rs".into(),
|
||||
resolved_paths: vec!["src/old.rs".into(), "src/new.rs".into()],
|
||||
renames_followed: true,
|
||||
trace_chains: vec![],
|
||||
total_chains: 0,
|
||||
}),
|
||||
..TraceState::default()
|
||||
};
|
||||
render_trace(&mut frame, &state, test_area(80, 24));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_truncate_str() {
|
||||
assert_eq!(truncate_str("hello", 10), "hello");
|
||||
assert_eq!(truncate_str("hello world", 5), "hell…");
|
||||
assert_eq!(truncate_str("", 5), "");
|
||||
}
|
||||
}
|
||||
1049
crates/lore-tui/src/view/who.rs
Normal file
1049
crates/lore-tui/src/view/who.rs
Normal file
File diff suppressed because it is too large
Load Diff
636
crates/lore-tui/tests/vertical_slice.rs
Normal file
636
crates/lore-tui/tests/vertical_slice.rs
Normal file
@@ -0,0 +1,636 @@
|
||||
//! Vertical slice integration tests for TUI Phase 2.
|
||||
//!
|
||||
//! Validates that core screens work together end-to-end with synthetic
|
||||
//! data flows, navigation preserves state, stale results are dropped,
|
||||
//! and input mode is always recoverable.
|
||||
|
||||
use ftui::render::frame::Frame;
|
||||
use ftui::render::grapheme_pool::GraphemePool;
|
||||
use ftui::{Cmd, Event, KeyCode, KeyEvent, Model, Modifiers};
|
||||
|
||||
use lore_tui::app::LoreApp;
|
||||
use lore_tui::clock::FakeClock;
|
||||
use lore_tui::message::{EntityKey, InputMode, Msg, Screen};
|
||||
use lore_tui::state::dashboard::{DashboardData, EntityCounts, LastSyncInfo};
|
||||
use lore_tui::state::issue_detail::{IssueDetailData, IssueMetadata};
|
||||
use lore_tui::state::issue_list::{IssueListPage, IssueListRow};
|
||||
use lore_tui::state::mr_detail::MrDetailData;
|
||||
use lore_tui::state::mr_list::{MrListPage, MrListRow};
|
||||
use lore_tui::task_supervisor::TaskKey;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn test_app() -> LoreApp {
|
||||
let mut app = LoreApp::new();
|
||||
app.clock = Box::new(FakeClock::new(chrono::Utc::now()));
|
||||
app
|
||||
}
|
||||
|
||||
fn synthetic_dashboard_data() -> DashboardData {
|
||||
DashboardData {
|
||||
counts: EntityCounts {
|
||||
issues_total: 10,
|
||||
issues_open: 5,
|
||||
mrs_total: 8,
|
||||
mrs_open: 3,
|
||||
discussions: 15,
|
||||
notes_total: 50,
|
||||
notes_system_pct: 20,
|
||||
documents: 20,
|
||||
embeddings: 100,
|
||||
},
|
||||
projects: vec![],
|
||||
recent: vec![],
|
||||
last_sync: Some(LastSyncInfo {
|
||||
status: "succeeded".into(),
|
||||
finished_at: Some(1_700_000_000_000),
|
||||
command: "sync".into(),
|
||||
error: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_issue_list_page() -> IssueListPage {
|
||||
IssueListPage {
|
||||
rows: vec![
|
||||
IssueListRow {
|
||||
project_path: "group/project".into(),
|
||||
iid: 1,
|
||||
title: "First issue".into(),
|
||||
state: "opened".into(),
|
||||
author: "alice".into(),
|
||||
labels: vec!["backend".into()],
|
||||
updated_at: 1_700_000_000_000,
|
||||
},
|
||||
IssueListRow {
|
||||
project_path: "group/project".into(),
|
||||
iid: 2,
|
||||
title: "Second issue".into(),
|
||||
state: "closed".into(),
|
||||
author: "bob".into(),
|
||||
labels: vec![],
|
||||
updated_at: 1_700_000_010_000,
|
||||
},
|
||||
],
|
||||
next_cursor: None,
|
||||
total_count: 2,
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_issue_detail() -> IssueDetailData {
|
||||
IssueDetailData {
|
||||
metadata: IssueMetadata {
|
||||
iid: 1,
|
||||
project_path: "group/project".into(),
|
||||
title: "First issue".into(),
|
||||
description: "Test description".into(),
|
||||
state: "opened".into(),
|
||||
author: "alice".into(),
|
||||
assignees: vec!["bob".into()],
|
||||
labels: vec!["backend".into()],
|
||||
milestone: None,
|
||||
due_date: None,
|
||||
created_at: 1_700_000_000_000,
|
||||
updated_at: 1_700_000_060_000,
|
||||
web_url: "https://gitlab.com/group/project/-/issues/1".into(),
|
||||
discussion_count: 2,
|
||||
},
|
||||
cross_refs: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_mr_list_page() -> MrListPage {
|
||||
MrListPage {
|
||||
rows: vec![MrListRow {
|
||||
project_path: "group/project".into(),
|
||||
iid: 10,
|
||||
title: "Fix auth".into(),
|
||||
state: "opened".into(),
|
||||
author: "alice".into(),
|
||||
labels: vec![],
|
||||
updated_at: 1_700_000_000_000,
|
||||
draft: false,
|
||||
target_branch: "main".into(),
|
||||
}],
|
||||
next_cursor: None,
|
||||
total_count: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_mr_detail() -> MrDetailData {
|
||||
MrDetailData {
|
||||
metadata: lore_tui::state::mr_detail::MrMetadata {
|
||||
iid: 10,
|
||||
project_path: "group/project".into(),
|
||||
title: "Fix auth".into(),
|
||||
description: "MR description".into(),
|
||||
state: "opened".into(),
|
||||
draft: false,
|
||||
author: "alice".into(),
|
||||
assignees: vec!["bob".into()],
|
||||
reviewers: vec!["carol".into()],
|
||||
labels: vec![],
|
||||
source_branch: "fix-auth".into(),
|
||||
target_branch: "main".into(),
|
||||
merge_status: "mergeable".into(),
|
||||
created_at: 1_700_000_000_000,
|
||||
updated_at: 1_700_000_060_000,
|
||||
merged_at: None,
|
||||
web_url: "https://gitlab.com/group/project/-/merge_requests/10".into(),
|
||||
discussion_count: 1,
|
||||
file_change_count: 2,
|
||||
},
|
||||
cross_refs: vec![],
|
||||
file_changes: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Inject dashboard data with matching generation.
|
||||
fn load_dashboard(app: &mut LoreApp) {
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::Dashboard))
|
||||
.generation;
|
||||
app.update(Msg::DashboardLoaded {
|
||||
generation,
|
||||
data: Box::new(synthetic_dashboard_data()),
|
||||
});
|
||||
}
|
||||
|
||||
/// Navigate to issue list and inject data.
|
||||
fn navigate_and_load_issue_list(app: &mut LoreApp) {
|
||||
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::IssueList))
|
||||
.generation;
|
||||
app.update(Msg::IssueListLoaded {
|
||||
generation,
|
||||
page: synthetic_issue_list_page(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Navigate to issue detail and inject data.
|
||||
fn navigate_and_load_issue_detail(app: &mut LoreApp, key: EntityKey) {
|
||||
let screen = Screen::IssueDetail(key.clone());
|
||||
app.update(Msg::NavigateTo(screen.clone()));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(screen))
|
||||
.generation;
|
||||
app.update(Msg::IssueDetailLoaded {
|
||||
generation,
|
||||
key,
|
||||
data: Box::new(synthetic_issue_detail()),
|
||||
});
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Nav flow tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// TDD anchor: Dashboard -> IssueList -> IssueDetail -> Esc -> IssueList,
|
||||
/// verifies cursor position is preserved on back-navigation.
|
||||
#[test]
|
||||
fn test_dashboard_to_issue_detail_roundtrip() {
|
||||
let mut app = test_app();
|
||||
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||
|
||||
// Navigate to IssueList and load data.
|
||||
navigate_and_load_issue_list(&mut app);
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
|
||||
// Navigate to IssueDetail for issue #1.
|
||||
let issue_key = EntityKey::issue(1, 1);
|
||||
navigate_and_load_issue_detail(&mut app, issue_key);
|
||||
assert!(matches!(app.navigation.current(), Screen::IssueDetail(_)));
|
||||
|
||||
// Go back — should return to IssueList with data preserved.
|
||||
app.update(Msg::GoBack);
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
// Data should still be there (state preserved on navigation).
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
}
|
||||
|
||||
/// Navigate Dashboard -> IssueList -> MrList -> MrDetail -> Home.
|
||||
#[test]
|
||||
fn test_full_nav_flow_home() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Issue list.
|
||||
navigate_and_load_issue_list(&mut app);
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
|
||||
// MR list.
|
||||
app.update(Msg::NavigateTo(Screen::MrList));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::MrList))
|
||||
.generation;
|
||||
app.update(Msg::MrListLoaded {
|
||||
generation,
|
||||
page: synthetic_mr_list_page(),
|
||||
});
|
||||
assert!(app.navigation.is_at(&Screen::MrList));
|
||||
|
||||
// MR detail.
|
||||
let mr_key = EntityKey::mr(1, 10);
|
||||
let mr_screen = Screen::MrDetail(mr_key.clone());
|
||||
app.update(Msg::NavigateTo(mr_screen.clone()));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(mr_screen))
|
||||
.generation;
|
||||
app.update(Msg::MrDetailLoaded {
|
||||
generation,
|
||||
key: mr_key,
|
||||
data: Box::new(synthetic_mr_detail()),
|
||||
});
|
||||
assert!(matches!(app.navigation.current(), Screen::MrDetail(_)));
|
||||
|
||||
// Go home.
|
||||
app.update(Msg::GoHome);
|
||||
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||
}
|
||||
|
||||
/// Verify back-navigation preserves issue list data and MR list data.
|
||||
#[test]
|
||||
fn test_state_preserved_on_back_navigation() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Load issue list.
|
||||
navigate_and_load_issue_list(&mut app);
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
|
||||
// Navigate to MR list.
|
||||
app.update(Msg::NavigateTo(Screen::MrList));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::MrList))
|
||||
.generation;
|
||||
app.update(Msg::MrListLoaded {
|
||||
generation,
|
||||
page: synthetic_mr_list_page(),
|
||||
});
|
||||
|
||||
// Both states should be populated.
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
assert_eq!(app.state.mr_list.rows.len(), 1);
|
||||
|
||||
// Go back to issue list — data should still be there.
|
||||
app.update(Msg::GoBack);
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Stale result guard
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Rapidly navigate between screens, injecting out-of-order results.
|
||||
/// Stale results should be silently dropped.
|
||||
#[test]
|
||||
fn test_stale_result_guard_rapid_navigation() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Navigate to IssueList, capturing generation.
|
||||
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||
let generation1 = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::IssueList))
|
||||
.generation;
|
||||
|
||||
// Quickly navigate away and back — new generation.
|
||||
app.update(Msg::GoBack);
|
||||
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||
let generation2 = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::IssueList))
|
||||
.generation;
|
||||
|
||||
// Late arrival of generation1 — should be dropped.
|
||||
app.update(Msg::IssueListLoaded {
|
||||
generation: generation1,
|
||||
page: IssueListPage {
|
||||
rows: vec![IssueListRow {
|
||||
project_path: "g/p".into(),
|
||||
iid: 999,
|
||||
title: "stale".into(),
|
||||
state: "opened".into(),
|
||||
author: "x".into(),
|
||||
labels: vec![],
|
||||
updated_at: 0,
|
||||
}],
|
||||
next_cursor: None,
|
||||
total_count: 1,
|
||||
},
|
||||
});
|
||||
assert!(
|
||||
app.state.issue_list.rows.is_empty(),
|
||||
"stale result should be dropped"
|
||||
);
|
||||
|
||||
// generation2 should be accepted.
|
||||
app.update(Msg::IssueListLoaded {
|
||||
generation: generation2,
|
||||
page: synthetic_issue_list_page(),
|
||||
});
|
||||
assert_eq!(app.state.issue_list.rows.len(), 2);
|
||||
assert_eq!(app.state.issue_list.rows[0].title, "First issue");
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Input mode fuzz (stuck-input check)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Fuzz 1000 random key sequences and verify:
|
||||
/// 1. No panics
|
||||
/// 2. InputMode is always recoverable via Esc + Ctrl+C
|
||||
/// 3. Final state is consistent
|
||||
#[test]
|
||||
fn test_input_mode_fuzz_no_stuck_state() {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut app = test_app();
|
||||
|
||||
// Deterministic pseudo-random key generation.
|
||||
let keys = [
|
||||
KeyCode::Char('g'),
|
||||
KeyCode::Char('i'),
|
||||
KeyCode::Char('m'),
|
||||
KeyCode::Char('h'),
|
||||
KeyCode::Char('s'),
|
||||
KeyCode::Char('q'),
|
||||
KeyCode::Char('p'),
|
||||
KeyCode::Char('/'),
|
||||
KeyCode::Char('?'),
|
||||
KeyCode::Tab,
|
||||
KeyCode::BackTab,
|
||||
KeyCode::Escape,
|
||||
KeyCode::Enter,
|
||||
KeyCode::Up,
|
||||
KeyCode::Down,
|
||||
KeyCode::Left,
|
||||
KeyCode::Right,
|
||||
KeyCode::Home,
|
||||
KeyCode::End,
|
||||
];
|
||||
|
||||
let modifiers_set = [
|
||||
Modifiers::NONE,
|
||||
Modifiers::SHIFT,
|
||||
Modifiers::CTRL,
|
||||
Modifiers::NONE,
|
||||
Modifiers::NONE,
|
||||
];
|
||||
|
||||
// Run 1000 random key events.
|
||||
for i in 0..1000_u64 {
|
||||
// Simple deterministic hash to pick key + modifier.
|
||||
let mut hasher = DefaultHasher::new();
|
||||
i.hash(&mut hasher);
|
||||
let h = hasher.finish();
|
||||
|
||||
let key_code = keys[(h as usize) % keys.len()];
|
||||
let mods = modifiers_set[((h >> 16) as usize) % modifiers_set.len()];
|
||||
|
||||
// Skip Ctrl+C (would quit) and 'q' in normal mode (would quit).
|
||||
if key_code == KeyCode::Char('c') && mods.contains(Modifiers::CTRL) {
|
||||
continue;
|
||||
}
|
||||
if key_code == KeyCode::Char('q') && mods == Modifiers::NONE {
|
||||
// Only skip if in Normal mode to avoid quitting the test.
|
||||
if matches!(app.input_mode, InputMode::Normal) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let key_event = if mods == Modifiers::NONE {
|
||||
KeyEvent::new(key_code)
|
||||
} else {
|
||||
KeyEvent::new(key_code).with_modifiers(mods)
|
||||
};
|
||||
|
||||
let cmd = app.update(Msg::RawEvent(Event::Key(key_event)));
|
||||
// Should never produce Quit from our filtered set (we skip q and Ctrl+C).
|
||||
if matches!(cmd, Cmd::Quit) {
|
||||
// This can happen from 'q' in non-Normal modes where we didn't filter.
|
||||
// Recreate app to continue fuzzing.
|
||||
app = test_app();
|
||||
}
|
||||
}
|
||||
|
||||
// Recovery check: Esc should always bring us back to Normal mode.
|
||||
app.update(Msg::RawEvent(Event::Key(KeyEvent::new(KeyCode::Escape))));
|
||||
// After Esc, we should be in Normal mode (or if already Normal, stay there).
|
||||
// GoPrefix → Normal, Text → Normal, Palette → Normal.
|
||||
assert!(
|
||||
matches!(app.input_mode, InputMode::Normal),
|
||||
"Esc should always recover to Normal mode, got: {:?}",
|
||||
app.input_mode
|
||||
);
|
||||
|
||||
// Ctrl+C should always quit.
|
||||
let ctrl_c = KeyEvent::new(KeyCode::Char('c')).with_modifiers(Modifiers::CTRL);
|
||||
let cmd = app.update(Msg::RawEvent(Event::Key(ctrl_c)));
|
||||
assert!(matches!(cmd, Cmd::Quit));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Bootstrap → Dashboard transition
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Bootstrap screen should auto-transition to Dashboard when sync completes.
|
||||
#[test]
|
||||
fn test_bootstrap_to_dashboard_after_sync() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Start on Bootstrap screen.
|
||||
app.update(Msg::NavigateTo(Screen::Bootstrap));
|
||||
assert!(app.navigation.is_at(&Screen::Bootstrap));
|
||||
assert!(!app.state.bootstrap.sync_started);
|
||||
|
||||
// User starts sync via key path (g then s).
|
||||
app.update(Msg::RawEvent(Event::Key(KeyEvent::new(KeyCode::Char('g')))));
|
||||
app.update(Msg::RawEvent(Event::Key(KeyEvent::new(KeyCode::Char('s')))));
|
||||
assert!(app.state.bootstrap.sync_started);
|
||||
|
||||
// Sync completes — should auto-transition to Dashboard.
|
||||
app.update(Msg::SyncCompleted { elapsed_ms: 5000 });
|
||||
assert!(
|
||||
app.navigation.is_at(&Screen::Dashboard),
|
||||
"Should auto-transition to Dashboard after sync completes on Bootstrap"
|
||||
);
|
||||
}
|
||||
|
||||
/// SyncCompleted on non-Bootstrap screen should NOT navigate.
|
||||
#[test]
|
||||
fn test_sync_completed_does_not_navigate_from_other_screens() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Navigate to IssueList.
|
||||
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||
|
||||
// SyncCompleted should be a no-op.
|
||||
app.update(Msg::SyncCompleted { elapsed_ms: 3000 });
|
||||
assert!(
|
||||
app.navigation.is_at(&Screen::IssueList),
|
||||
"SyncCompleted should not navigate when not on Bootstrap"
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Render all screens (no-panic check)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Render every screen variant to verify no panics with synthetic data.
|
||||
#[test]
|
||||
fn test_render_all_screens_no_panic() {
|
||||
let mut pool = GraphemePool::new();
|
||||
|
||||
// Load data for all screens.
|
||||
let mut app = test_app();
|
||||
load_dashboard(&mut app);
|
||||
navigate_and_load_issue_list(&mut app);
|
||||
app.update(Msg::GoBack);
|
||||
|
||||
// Load MR list.
|
||||
app.update(Msg::NavigateTo(Screen::MrList));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(Screen::MrList))
|
||||
.generation;
|
||||
app.update(Msg::MrListLoaded {
|
||||
generation,
|
||||
page: synthetic_mr_list_page(),
|
||||
});
|
||||
app.update(Msg::GoBack);
|
||||
|
||||
// Render at each screen.
|
||||
let screens = [
|
||||
Screen::Dashboard,
|
||||
Screen::IssueList,
|
||||
Screen::MrList,
|
||||
Screen::Bootstrap,
|
||||
];
|
||||
|
||||
for screen in &screens {
|
||||
app.update(Msg::NavigateTo(screen.clone()));
|
||||
let mut frame = Frame::new(80, 24, &mut pool);
|
||||
app.view(&mut frame);
|
||||
}
|
||||
|
||||
// Render detail screens.
|
||||
let issue_key = EntityKey::issue(1, 1);
|
||||
navigate_and_load_issue_detail(&mut app, issue_key);
|
||||
{
|
||||
let mut frame = Frame::new(80, 24, &mut pool);
|
||||
app.view(&mut frame);
|
||||
}
|
||||
|
||||
app.update(Msg::GoBack);
|
||||
|
||||
let mr_key = EntityKey::mr(1, 10);
|
||||
let mr_screen = Screen::MrDetail(mr_key.clone());
|
||||
app.update(Msg::NavigateTo(mr_screen.clone()));
|
||||
let generation = app
|
||||
.supervisor
|
||||
.submit(TaskKey::LoadScreen(mr_screen))
|
||||
.generation;
|
||||
app.update(Msg::MrDetailLoaded {
|
||||
generation,
|
||||
key: mr_key,
|
||||
data: Box::new(synthetic_mr_detail()),
|
||||
});
|
||||
{
|
||||
let mut frame = Frame::new(80, 24, &mut pool);
|
||||
app.view(&mut frame);
|
||||
}
|
||||
}
|
||||
|
||||
/// Render at various terminal sizes to catch layout panics.
|
||||
#[test]
|
||||
fn test_render_various_sizes_no_panic() {
|
||||
let mut pool = GraphemePool::new();
|
||||
let app = test_app();
|
||||
|
||||
let sizes: [(u16, u16); 5] = [
|
||||
(80, 24), // Standard
|
||||
(120, 40), // Large
|
||||
(40, 12), // Small
|
||||
(20, 5), // Very small
|
||||
(3, 3), // Minimum
|
||||
];
|
||||
|
||||
for (w, h) in &sizes {
|
||||
let mut frame = Frame::new(*w, *h, &mut pool);
|
||||
app.view(&mut frame);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Navigation depth stress
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Navigate deep and verify back-navigation works correctly.
|
||||
#[test]
|
||||
fn test_deep_navigation_and_unwind() {
|
||||
let mut app = test_app();
|
||||
|
||||
// Navigate through 10 screens.
|
||||
for i in 0..5 {
|
||||
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||
let issue_key = EntityKey::issue(1, i + 1);
|
||||
app.update(Msg::NavigateTo(Screen::IssueDetail(issue_key)));
|
||||
}
|
||||
|
||||
// Should be at IssueDetail depth.
|
||||
assert!(matches!(app.navigation.current(), Screen::IssueDetail(_)));
|
||||
|
||||
// Unwind all the way back to Dashboard.
|
||||
for _ in 0..20 {
|
||||
app.update(Msg::GoBack);
|
||||
if app.navigation.is_at(&Screen::Dashboard) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assert!(
|
||||
app.navigation.is_at(&Screen::Dashboard),
|
||||
"Should eventually reach Dashboard"
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Performance (smoke test — real benchmarks need criterion)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Verify that 100 update() + view() cycles complete quickly.
|
||||
/// This is a smoke test, not a precise benchmark.
|
||||
#[test]
|
||||
fn test_update_view_cycle_performance_smoke() {
|
||||
let mut pool = GraphemePool::new();
|
||||
let mut app = test_app();
|
||||
load_dashboard(&mut app);
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
for _ in 0..100 {
|
||||
app.update(Msg::Tick);
|
||||
let mut frame = Frame::new(80, 24, &mut pool);
|
||||
app.view(&mut frame);
|
||||
}
|
||||
let elapsed = start.elapsed();
|
||||
|
||||
// 100 cycles should complete in well under 1 second.
|
||||
// On a typical machine this takes < 10ms.
|
||||
assert!(
|
||||
elapsed.as_millis() < 1000,
|
||||
"100 update+view cycles took {}ms — too slow",
|
||||
elapsed.as_millis()
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user