Files
gitlore/crates/lore-tui/src/action/file_history.rs
teernisse 146eb61623 feat(tui): Phase 4 completion + Phase 5 session/lock/text-width
Phase 4 (bd-1df9) — all 5 acceptance criteria met:
- Sync screen with delta ledger (bd-2x2h, bd-y095)
- Doctor screen with health checks (bd-2iqk)
- Stats screen with document counts (bd-2iqk)
- CLI integration: lore tui subcommand (bd-26lp)
- CLI integration: lore sync --tui flag (bd-3l56)

Phase 5 (bd-3h00) — session persistence + instance lock + text width:
- text_width.rs: Unicode-aware measurement, truncation, padding (16 tests)
- instance_lock.rs: Advisory PID lock with stale recovery (6 tests)
- session.rs: Atomic write + CRC32 checksum + quarantine (9 tests)

Closes: bd-26lp, bd-3h00, bd-3l56, bd-1df9, bd-y095
2026-02-18 23:51:54 -05:00

380 lines
13 KiB
Rust

#![allow(dead_code)]
//! File History screen actions — query MRs that touched a file path.
//!
//! Wraps the SQL queries from `lore::cli::commands::file_history` but uses
//! an injected `Connection` (TUI manages its own DB connection).
use anyhow::Result;
use rusqlite::Connection;
use lore::core::file_history::resolve_rename_chain;
use crate::state::file_history::{FileDiscussion, FileHistoryMr, FileHistoryResult};
/// Maximum rename chain BFS depth.
const MAX_RENAME_HOPS: usize = 10;
/// Default result limit.
const DEFAULT_LIMIT: usize = 50;
/// Fetch file history: MRs that touched a file path, with optional rename resolution.
pub fn fetch_file_history(
conn: &Connection,
project_id: Option<i64>,
path: &str,
follow_renames: bool,
merged_only: bool,
include_discussions: bool,
) -> Result<FileHistoryResult> {
// Resolve rename chain unless disabled.
let (all_paths, renames_followed) = if !follow_renames {
(vec![path.to_string()], false)
} else if let Some(pid) = project_id {
let chain = resolve_rename_chain(conn, pid, path, MAX_RENAME_HOPS)?;
let followed = chain.len() > 1;
(chain, followed)
} else {
// Without project scope, can't resolve renames.
(vec![path.to_string()], false)
};
let paths_searched = all_paths.len();
// Build IN clause placeholders.
let placeholders: Vec<String> = (0..all_paths.len())
.map(|i| format!("?{}", i + 2))
.collect();
let in_clause = placeholders.join(", ");
let merged_filter = if merged_only {
" AND mr.state = 'merged'"
} else {
""
};
let project_filter = if project_id.is_some() {
"AND mfc.project_id = ?1"
} else {
""
};
let limit_param = all_paths.len() + 2;
let sql = format!(
"SELECT DISTINCT \
mr.iid, mr.title, mr.state, mr.author_username, \
mfc.change_type, mr.merged_at, mr.updated_at, mr.merge_commit_sha \
FROM mr_file_changes mfc \
JOIN merge_requests mr ON mr.id = mfc.merge_request_id \
WHERE mfc.new_path IN ({in_clause}) {project_filter} {merged_filter} \
ORDER BY COALESCE(mr.merged_at, mr.updated_at) DESC \
LIMIT ?{limit_param}"
);
let mut stmt = conn.prepare(&sql)?;
// Bind: ?1=project_id, ?2..?N+1=paths, ?N+2=limit.
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
params.push(Box::new(project_id.unwrap_or(0)));
for p in &all_paths {
params.push(Box::new(p.clone()));
}
params.push(Box::new(DEFAULT_LIMIT as i64));
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let merge_requests: Vec<FileHistoryMr> = stmt
.query_map(param_refs.as_slice(), |row| {
Ok(FileHistoryMr {
iid: row.get(0)?,
title: row.get(1)?,
state: row.get(2)?,
author_username: row.get(3)?,
change_type: row.get(4)?,
merged_at_ms: row.get(5)?,
updated_at_ms: row.get::<_, i64>(6)?,
merge_commit_sha: row.get(7)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
let total_mrs = merge_requests.len();
// Optionally fetch DiffNote discussions.
let discussions = if include_discussions && !merge_requests.is_empty() {
fetch_file_discussions(conn, &all_paths, project_id)?
} else {
Vec::new()
};
Ok(FileHistoryResult {
path: path.to_string(),
rename_chain: all_paths,
renames_followed,
merge_requests,
discussions,
total_mrs,
paths_searched,
})
}
/// Fetch DiffNote discussions referencing the given file paths.
fn fetch_file_discussions(
conn: &Connection,
paths: &[String],
project_id: Option<i64>,
) -> Result<Vec<FileDiscussion>> {
let placeholders: Vec<String> = (0..paths.len()).map(|i| format!("?{}", i + 2)).collect();
let in_clause = placeholders.join(", ");
let project_filter = if project_id.is_some() {
"AND d.project_id = ?1"
} else {
""
};
let sql = format!(
"SELECT d.gitlab_discussion_id, n.author_username, n.body, n.position_new_path, n.created_at \
FROM notes n \
JOIN discussions d ON d.id = n.discussion_id \
WHERE n.position_new_path IN ({in_clause}) {project_filter} \
AND n.is_system = 0 \
ORDER BY n.created_at DESC \
LIMIT 50"
);
let mut stmt = conn.prepare(&sql)?;
let mut params: Vec<Box<dyn rusqlite::types::ToSql>> = Vec::new();
params.push(Box::new(project_id.unwrap_or(0)));
for p in paths {
params.push(Box::new(p.clone()));
}
let param_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
let discussions: Vec<FileDiscussion> = stmt
.query_map(param_refs.as_slice(), |row| {
let body: String = row.get(2)?;
let snippet = if body.len() > 200 {
format!("{}...", &body[..body.floor_char_boundary(200)])
} else {
body
};
Ok(FileDiscussion {
discussion_id: row.get(0)?,
author_username: row.get(1)?,
body_snippet: snippet,
path: row.get(3)?,
created_at_ms: row.get(4)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(discussions)
}
/// Fetch distinct file paths from mr_file_changes for autocomplete.
pub fn fetch_file_history_paths(conn: &Connection, project_id: Option<i64>) -> Result<Vec<String>> {
let sql = if project_id.is_some() {
"SELECT DISTINCT new_path FROM mr_file_changes WHERE project_id = ?1 ORDER BY new_path LIMIT 5000"
} else {
"SELECT DISTINCT new_path FROM mr_file_changes ORDER BY new_path LIMIT 5000"
};
let mut stmt = conn.prepare(sql)?;
let paths: Vec<String> = if let Some(pid) = project_id {
stmt.query_map([pid], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?
} else {
stmt.query_map([], |row| row.get(0))?
.collect::<std::result::Result<Vec<_>, _>>()?
};
Ok(paths)
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
/// Minimal schema for file history queries.
fn create_file_history_schema(conn: &Connection) {
conn.execute_batch(
"
CREATE TABLE projects (
id INTEGER PRIMARY KEY,
gitlab_project_id INTEGER UNIQUE NOT NULL,
path_with_namespace TEXT NOT NULL
);
CREATE TABLE merge_requests (
id INTEGER PRIMARY KEY,
gitlab_id INTEGER UNIQUE NOT NULL,
project_id INTEGER NOT NULL,
iid INTEGER NOT NULL,
title TEXT,
state TEXT,
author_id INTEGER,
author_username TEXT,
draft INTEGER NOT NULL DEFAULT 0,
created_at INTEGER,
updated_at INTEGER,
merged_at INTEGER,
merge_commit_sha TEXT,
web_url TEXT,
last_seen_at INTEGER NOT NULL
);
CREATE TABLE mr_file_changes (
id INTEGER PRIMARY KEY,
merge_request_id INTEGER NOT NULL,
project_id INTEGER NOT NULL,
new_path TEXT NOT NULL,
old_path TEXT,
change_type TEXT NOT NULL
);
CREATE TABLE discussions (
id INTEGER PRIMARY KEY,
gitlab_discussion_id TEXT NOT NULL,
project_id INTEGER NOT NULL,
noteable_type TEXT NOT NULL,
issue_id INTEGER,
merge_request_id INTEGER,
last_seen_at INTEGER NOT NULL
);
CREATE TABLE notes (
id INTEGER PRIMARY KEY,
gitlab_id INTEGER UNIQUE NOT NULL,
discussion_id INTEGER NOT NULL,
project_id INTEGER NOT NULL,
is_system INTEGER NOT NULL DEFAULT 0,
author_username TEXT,
body TEXT,
note_type TEXT,
position_new_path TEXT,
position_old_path TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
last_seen_at INTEGER NOT NULL
);
",
)
.expect("create file history schema");
}
#[test]
fn test_fetch_file_history_empty_db() {
let conn = Connection::open_in_memory().unwrap();
create_file_history_schema(&conn);
let result = fetch_file_history(&conn, None, "src/lib.rs", false, false, false).unwrap();
assert!(result.merge_requests.is_empty());
assert_eq!(result.total_mrs, 0);
assert_eq!(result.path, "src/lib.rs");
}
#[test]
fn test_fetch_file_history_returns_mrs() {
let conn = Connection::open_in_memory().unwrap();
create_file_history_schema(&conn);
// Insert project, MR, and file change.
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'grp/repo')",
[],
).unwrap();
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
VALUES (1, 1000, 1, 42, 'Fix auth', 'merged', 'alice', 1700000000000, 1700000000000)",
[],
).unwrap();
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) \
VALUES (1, 1, 'src/auth.rs', 'modified')",
[],
)
.unwrap();
let result =
fetch_file_history(&conn, Some(1), "src/auth.rs", false, false, false).unwrap();
assert_eq!(result.merge_requests.len(), 1);
assert_eq!(result.merge_requests[0].iid, 42);
assert_eq!(result.merge_requests[0].title, "Fix auth");
assert_eq!(result.merge_requests[0].change_type, "modified");
}
#[test]
fn test_fetch_file_history_merged_only() {
let conn = Connection::open_in_memory().unwrap();
create_file_history_schema(&conn);
conn.execute(
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'grp/repo')",
[],
).unwrap();
// Merged MR.
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
VALUES (1, 1000, 1, 42, 'Merged MR', 'merged', 'alice', 1700000000000, 1700000000000)",
[],
).unwrap();
// Open MR.
conn.execute(
"INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, author_username, updated_at, last_seen_at) \
VALUES (2, 1001, 1, 43, 'Open MR', 'opened', 'bob', 1700000000000, 1700000000000)",
[],
).unwrap();
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (1, 1, 'src/lib.rs', 'modified')",
[],
).unwrap();
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (2, 1, 'src/lib.rs', 'modified')",
[],
).unwrap();
// Without merged_only: both MRs.
let all = fetch_file_history(&conn, Some(1), "src/lib.rs", false, false, false).unwrap();
assert_eq!(all.merge_requests.len(), 2);
// With merged_only: only the merged one.
let merged = fetch_file_history(&conn, Some(1), "src/lib.rs", false, true, false).unwrap();
assert_eq!(merged.merge_requests.len(), 1);
assert_eq!(merged.merge_requests[0].state, "merged");
}
#[test]
fn test_fetch_file_history_paths_empty() {
let conn = Connection::open_in_memory().unwrap();
create_file_history_schema(&conn);
let paths = fetch_file_history_paths(&conn, None).unwrap();
assert!(paths.is_empty());
}
#[test]
fn test_fetch_file_history_paths_returns_distinct() {
let conn = Connection::open_in_memory().unwrap();
create_file_history_schema(&conn);
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (1, 1, 'src/a.rs', 'modified')",
[],
).unwrap();
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (2, 1, 'src/a.rs', 'modified')",
[],
).unwrap();
conn.execute(
"INSERT INTO mr_file_changes (merge_request_id, project_id, new_path, change_type) VALUES (3, 1, 'src/b.rs', 'added')",
[],
).unwrap();
let paths = fetch_file_history_paths(&conn, None).unwrap();
assert_eq!(paths, vec!["src/a.rs", "src/b.rs"]);
}
}