Compare commits
5 Commits
perf-audit
...
a887e8375a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a887e8375a | ||
|
|
eb98595251 | ||
|
|
d224a88738 | ||
|
|
ad4dd6e855 | ||
|
|
83cd16c918 |
312
.beads/.br_history/issues.20260212_211122.jsonl
Normal file
312
.beads/.br_history/issues.20260212_211122.jsonl
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
bd-2kop
|
bd-2cbw
|
||||||
|
|||||||
25
.gitignore
vendored
25
.gitignore
vendored
@@ -1,11 +1,6 @@
|
|||||||
# Dependencies
|
# Rust build output
|
||||||
node_modules/
|
/target
|
||||||
|
**/target/
|
||||||
# Build output
|
|
||||||
dist/
|
|
||||||
|
|
||||||
# Test coverage
|
|
||||||
coverage/
|
|
||||||
|
|
||||||
# IDE
|
# IDE
|
||||||
.idea/
|
.idea/
|
||||||
@@ -25,14 +20,11 @@ Thumbs.db
|
|||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
*.log
|
*.log
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
|
|
||||||
# Local config files
|
# Local config files
|
||||||
lore.config.json
|
lore.config.json
|
||||||
|
|
||||||
# beads
|
# beads viewer cache
|
||||||
.bv/
|
.bv/
|
||||||
|
|
||||||
# SQLite databases (local development)
|
# SQLite databases (local development)
|
||||||
@@ -40,7 +32,8 @@ lore.config.json
|
|||||||
*.db-wal
|
*.db-wal
|
||||||
*.db-shm
|
*.db-shm
|
||||||
|
|
||||||
|
# Profiling / benchmarks
|
||||||
# Added by cargo
|
perf.data
|
||||||
|
perf.data.old
|
||||||
/target
|
flamegraph.svg
|
||||||
|
*.profraw
|
||||||
|
|||||||
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -1106,7 +1106,7 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lore"
|
name = "lore"
|
||||||
version = "0.6.2"
|
version = "0.7.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lore"
|
name = "lore"
|
||||||
version = "0.6.2"
|
version = "0.7.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
description = "Gitlore - Local GitLab data management with semantic search"
|
description = "Gitlore - Local GitLab data management with semantic search"
|
||||||
authors = ["Taylor Eernisse"]
|
authors = ["Taylor Eernisse"]
|
||||||
|
|||||||
3171
crates/lore-tui/Cargo.lock
generated
Normal file
3171
crates/lore-tui/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
46
crates/lore-tui/Cargo.toml
Normal file
46
crates/lore-tui/Cargo.toml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
[package]
|
||||||
|
name = "lore-tui"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
description = "Terminal UI for Gitlore — local GitLab data explorer"
|
||||||
|
authors = ["Taylor Eernisse"]
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "lore-tui"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# FrankenTUI (Elm-architecture TUI framework)
|
||||||
|
ftui = "0.1.1"
|
||||||
|
|
||||||
|
# Lore library (config, db, ingestion, search, etc.)
|
||||||
|
lore = { path = "../.." }
|
||||||
|
|
||||||
|
# CLI
|
||||||
|
clap = { version = "4", features = ["derive", "env"] }
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
anyhow = "1"
|
||||||
|
|
||||||
|
# Time
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
|
||||||
|
# Paths
|
||||||
|
dirs = "6"
|
||||||
|
|
||||||
|
# Database (read-only queries from TUI)
|
||||||
|
rusqlite = { version = "0.38", features = ["bundled"] }
|
||||||
|
|
||||||
|
# Terminal (crossterm for raw mode + event reading, used by ftui runtime)
|
||||||
|
crossterm = "0.28"
|
||||||
|
|
||||||
|
# Serialization (crash context NDJSON dumps)
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
|
||||||
|
# Regex (used by safety module for PII/secret redaction)
|
||||||
|
regex = "1"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3"
|
||||||
4
crates/lore-tui/rust-toolchain.toml
Normal file
4
crates/lore-tui/rust-toolchain.toml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly-2026-02-08"
|
||||||
|
profile = "minimal"
|
||||||
|
components = ["rustfmt", "clippy"]
|
||||||
712
crates/lore-tui/src/app.rs
Normal file
712
crates/lore-tui/src/app.rs
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: methods consumed as screens are implemented
|
||||||
|
|
||||||
|
//! Full FrankenTUI Model implementation for the lore TUI.
|
||||||
|
//!
|
||||||
|
//! LoreApp is the central coordinator: it owns all state, dispatches
|
||||||
|
//! messages through a 5-stage key pipeline, records crash context
|
||||||
|
//! breadcrumbs, manages async tasks via the supervisor, and routes
|
||||||
|
//! view() to per-screen render functions.
|
||||||
|
|
||||||
|
use chrono::TimeDelta;
|
||||||
|
use ftui::{Cmd, Event, Frame, KeyCode, KeyEvent, Model, Modifiers};
|
||||||
|
|
||||||
|
use crate::clock::{Clock, SystemClock};
|
||||||
|
use crate::commands::{CommandRegistry, build_registry};
|
||||||
|
use crate::crash_context::{CrashContext, CrashEvent};
|
||||||
|
use crate::db::DbManager;
|
||||||
|
use crate::message::{InputMode, Msg, Screen};
|
||||||
|
use crate::navigation::NavigationStack;
|
||||||
|
use crate::state::{AppState, LoadState};
|
||||||
|
use crate::task_supervisor::{TaskKey, TaskSupervisor};
|
||||||
|
|
||||||
|
/// Timeout for the g-prefix key sequence.
|
||||||
|
const GO_PREFIX_TIMEOUT: TimeDelta = TimeDelta::milliseconds(500);
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// LoreApp
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Root model for the lore TUI.
|
||||||
|
///
|
||||||
|
/// Owns all state and implements the FrankenTUI Model trait. The
|
||||||
|
/// update() method is the single entry point for all state transitions.
|
||||||
|
pub struct LoreApp {
|
||||||
|
pub state: AppState,
|
||||||
|
pub navigation: NavigationStack,
|
||||||
|
pub supervisor: TaskSupervisor,
|
||||||
|
pub crash_context: CrashContext,
|
||||||
|
pub command_registry: CommandRegistry,
|
||||||
|
pub input_mode: InputMode,
|
||||||
|
pub clock: Box<dyn Clock>,
|
||||||
|
pub db: Option<DbManager>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoreApp {
|
||||||
|
/// Create a new LoreApp with default state.
|
||||||
|
///
|
||||||
|
/// Uses a real system clock and no DB connection (set separately).
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
state: AppState::default(),
|
||||||
|
navigation: NavigationStack::new(),
|
||||||
|
supervisor: TaskSupervisor::new(),
|
||||||
|
crash_context: CrashContext::new(),
|
||||||
|
command_registry: build_registry(),
|
||||||
|
input_mode: InputMode::Normal,
|
||||||
|
clock: Box::new(SystemClock),
|
||||||
|
db: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a LoreApp for testing with a custom clock.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn with_clock(clock: Box<dyn Clock>) -> Self {
|
||||||
|
Self {
|
||||||
|
clock,
|
||||||
|
..Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// Key dispatch
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Normalize terminal key variants for cross-terminal consistency.
|
||||||
|
fn normalize_key(key: &mut KeyEvent) {
|
||||||
|
// BackTab -> Shift+Tab canonical form.
|
||||||
|
if key.code == KeyCode::BackTab {
|
||||||
|
key.code = KeyCode::Tab;
|
||||||
|
key.modifiers |= Modifiers::SHIFT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 5-stage key dispatch pipeline.
|
||||||
|
///
|
||||||
|
/// Returns the Cmd to execute (Quit, None, or a task command).
|
||||||
|
fn interpret_key(&mut self, mut key: KeyEvent) -> Cmd<Msg> {
|
||||||
|
Self::normalize_key(&mut key);
|
||||||
|
|
||||||
|
let screen = self.navigation.current().clone();
|
||||||
|
|
||||||
|
// Record key press in crash context.
|
||||||
|
self.crash_context.push(CrashEvent::KeyPress {
|
||||||
|
key: format!("{:?}", key.code),
|
||||||
|
mode: format!("{:?}", self.input_mode),
|
||||||
|
screen: screen.label().to_string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Stage 1: Quit check ---
|
||||||
|
// Ctrl+C always quits regardless of mode.
|
||||||
|
if key.code == KeyCode::Char('c') && key.modifiers.contains(Modifiers::CTRL) {
|
||||||
|
return Cmd::quit();
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Stage 2: InputMode routing ---
|
||||||
|
match &self.input_mode {
|
||||||
|
InputMode::Text => {
|
||||||
|
return self.handle_text_mode_key(&key, &screen);
|
||||||
|
}
|
||||||
|
InputMode::Palette => {
|
||||||
|
return self.handle_palette_mode_key(&key, &screen);
|
||||||
|
}
|
||||||
|
InputMode::GoPrefix { started_at } => {
|
||||||
|
let elapsed = self.clock.now().signed_duration_since(*started_at);
|
||||||
|
if elapsed > GO_PREFIX_TIMEOUT {
|
||||||
|
// Timeout expired — cancel prefix and re-process as normal.
|
||||||
|
self.input_mode = InputMode::Normal;
|
||||||
|
} else {
|
||||||
|
return self.handle_go_prefix_key(&key, &screen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
InputMode::Normal => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Stage 3: Global shortcuts (Normal mode) ---
|
||||||
|
// 'q' quits.
|
||||||
|
if key.code == KeyCode::Char('q') && key.modifiers == Modifiers::NONE {
|
||||||
|
return Cmd::quit();
|
||||||
|
}
|
||||||
|
|
||||||
|
// 'g' starts prefix sequence.
|
||||||
|
if self
|
||||||
|
.command_registry
|
||||||
|
.is_sequence_starter(&key.code, &key.modifiers)
|
||||||
|
{
|
||||||
|
self.input_mode = InputMode::GoPrefix {
|
||||||
|
started_at: self.clock.now(),
|
||||||
|
};
|
||||||
|
return Cmd::none();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry-based single-key lookup.
|
||||||
|
if let Some(cmd_def) =
|
||||||
|
self.command_registry
|
||||||
|
.lookup_key(&key.code, &key.modifiers, &screen, &self.input_mode)
|
||||||
|
{
|
||||||
|
return self.execute_command(cmd_def.id, &screen);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Stage 4: Screen-local keys ---
|
||||||
|
// Delegated to AppState::interpret_screen_key in future phases.
|
||||||
|
|
||||||
|
// --- Stage 5: Fallback (unhandled) ---
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle keys in Text input mode.
|
||||||
|
///
|
||||||
|
/// Only Esc and Ctrl+P pass through; everything else is consumed by
|
||||||
|
/// the focused text widget (handled in future phases).
|
||||||
|
fn handle_text_mode_key(&mut self, key: &KeyEvent, screen: &Screen) -> Cmd<Msg> {
|
||||||
|
// Esc blurs the text input.
|
||||||
|
if key.code == KeyCode::Escape {
|
||||||
|
self.state.blur_text_focus();
|
||||||
|
self.input_mode = InputMode::Normal;
|
||||||
|
return Cmd::none();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ctrl+P opens palette even in text mode.
|
||||||
|
if let Some(cmd_def) =
|
||||||
|
self.command_registry
|
||||||
|
.lookup_key(&key.code, &key.modifiers, screen, &InputMode::Text)
|
||||||
|
{
|
||||||
|
return self.execute_command(cmd_def.id, screen);
|
||||||
|
}
|
||||||
|
|
||||||
|
// All other keys consumed by text widget (future).
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle keys in Palette mode.
|
||||||
|
fn handle_palette_mode_key(&mut self, key: &KeyEvent, _screen: &Screen) -> Cmd<Msg> {
|
||||||
|
if key.code == KeyCode::Escape {
|
||||||
|
self.input_mode = InputMode::Normal;
|
||||||
|
return Cmd::none();
|
||||||
|
}
|
||||||
|
// Palette key dispatch will be expanded in the palette widget phase.
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handle the second key of a g-prefix sequence.
|
||||||
|
fn handle_go_prefix_key(&mut self, key: &KeyEvent, screen: &Screen) -> Cmd<Msg> {
|
||||||
|
self.input_mode = InputMode::Normal;
|
||||||
|
|
||||||
|
if let Some(cmd_def) = self.command_registry.complete_sequence(
|
||||||
|
&KeyCode::Char('g'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&key.code,
|
||||||
|
&key.modifiers,
|
||||||
|
screen,
|
||||||
|
) {
|
||||||
|
return self.execute_command(cmd_def.id, screen);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalid second key — cancel prefix silently.
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a command by ID.
|
||||||
|
fn execute_command(&mut self, id: &str, _screen: &Screen) -> Cmd<Msg> {
|
||||||
|
match id {
|
||||||
|
"quit" => Cmd::quit(),
|
||||||
|
"go_back" => {
|
||||||
|
self.navigation.pop();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"show_help" => {
|
||||||
|
self.state.show_help = !self.state.show_help;
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"command_palette" => {
|
||||||
|
self.input_mode = InputMode::Palette;
|
||||||
|
self.state.command_palette.query_focused = true;
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"open_in_browser" => {
|
||||||
|
// Will dispatch OpenInBrowser msg in future phase.
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"show_cli" => {
|
||||||
|
// Will show CLI equivalent in future phase.
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"go_home" => self.navigate_to(Screen::Dashboard),
|
||||||
|
"go_issues" => self.navigate_to(Screen::IssueList),
|
||||||
|
"go_mrs" => self.navigate_to(Screen::MrList),
|
||||||
|
"go_search" => self.navigate_to(Screen::Search),
|
||||||
|
"go_timeline" => self.navigate_to(Screen::Timeline),
|
||||||
|
"go_who" => self.navigate_to(Screen::Who),
|
||||||
|
"go_sync" => self.navigate_to(Screen::Sync),
|
||||||
|
"jump_back" => {
|
||||||
|
self.navigation.jump_back();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"jump_forward" => {
|
||||||
|
self.navigation.jump_forward();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
"move_down" | "move_up" | "select_item" | "focus_filter" | "scroll_to_top" => {
|
||||||
|
// Screen-specific actions — delegated in future phases.
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
_ => Cmd::none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// Navigation helpers
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Navigate to a screen, pushing the nav stack and starting a data load.
|
||||||
|
fn navigate_to(&mut self, screen: Screen) -> Cmd<Msg> {
|
||||||
|
let screen_label = screen.label().to_string();
|
||||||
|
let current_label = self.navigation.current().label().to_string();
|
||||||
|
|
||||||
|
self.crash_context.push(CrashEvent::StateTransition {
|
||||||
|
from: current_label,
|
||||||
|
to: screen_label,
|
||||||
|
});
|
||||||
|
|
||||||
|
self.navigation.push(screen.clone());
|
||||||
|
self.state
|
||||||
|
.set_loading(screen.clone(), LoadState::Refreshing);
|
||||||
|
|
||||||
|
// Spawn supervised task for data loading (placeholder — actual DB
|
||||||
|
// query dispatch comes in Phase 2 screen implementations).
|
||||||
|
let _handle = self.supervisor.submit(TaskKey::LoadScreen(screen));
|
||||||
|
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// Message dispatch (non-key)
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Handle non-key messages.
|
||||||
|
fn handle_msg(&mut self, msg: Msg) -> Cmd<Msg> {
|
||||||
|
// Record in crash context.
|
||||||
|
self.crash_context.push(CrashEvent::MsgDispatched {
|
||||||
|
msg_name: format!("{msg:?}")
|
||||||
|
.split('(')
|
||||||
|
.next()
|
||||||
|
.unwrap_or("?")
|
||||||
|
.to_string(),
|
||||||
|
screen: self.navigation.current().label().to_string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
match msg {
|
||||||
|
Msg::Quit => Cmd::quit(),
|
||||||
|
|
||||||
|
// --- Navigation ---
|
||||||
|
Msg::NavigateTo(screen) => self.navigate_to(screen),
|
||||||
|
Msg::GoBack => {
|
||||||
|
self.navigation.pop();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::GoForward => {
|
||||||
|
self.navigation.go_forward();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::GoHome => self.navigate_to(Screen::Dashboard),
|
||||||
|
Msg::JumpBack(_) => {
|
||||||
|
self.navigation.jump_back();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::JumpForward(_) => {
|
||||||
|
self.navigation.jump_forward();
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Error ---
|
||||||
|
Msg::Error(err) => {
|
||||||
|
self.state.set_error(err.to_string());
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Help / UI ---
|
||||||
|
Msg::ShowHelp => {
|
||||||
|
self.state.show_help = !self.state.show_help;
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::BlurTextInput => {
|
||||||
|
self.state.blur_text_focus();
|
||||||
|
self.input_mode = InputMode::Normal;
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Terminal ---
|
||||||
|
Msg::Resize { width, height } => {
|
||||||
|
self.state.terminal_size = (width, height);
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::Tick => Cmd::none(),
|
||||||
|
|
||||||
|
// --- Loaded results (stale guard) ---
|
||||||
|
Msg::IssueListLoaded { generation, rows } => {
|
||||||
|
if self
|
||||||
|
.supervisor
|
||||||
|
.is_current(&TaskKey::LoadScreen(Screen::IssueList), generation)
|
||||||
|
{
|
||||||
|
self.state.issue_list.rows = rows;
|
||||||
|
self.state.set_loading(Screen::IssueList, LoadState::Idle);
|
||||||
|
self.supervisor
|
||||||
|
.complete(&TaskKey::LoadScreen(Screen::IssueList), generation);
|
||||||
|
}
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::MrListLoaded { generation, rows } => {
|
||||||
|
if self
|
||||||
|
.supervisor
|
||||||
|
.is_current(&TaskKey::LoadScreen(Screen::MrList), generation)
|
||||||
|
{
|
||||||
|
self.state.mr_list.rows = rows;
|
||||||
|
self.state.set_loading(Screen::MrList, LoadState::Idle);
|
||||||
|
self.supervisor
|
||||||
|
.complete(&TaskKey::LoadScreen(Screen::MrList), generation);
|
||||||
|
}
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
Msg::DashboardLoaded { generation, data } => {
|
||||||
|
if self
|
||||||
|
.supervisor
|
||||||
|
.is_current(&TaskKey::LoadScreen(Screen::Dashboard), generation)
|
||||||
|
{
|
||||||
|
self.state.dashboard.issue_count = data.issue_count;
|
||||||
|
self.state.dashboard.mr_count = data.mr_count;
|
||||||
|
self.state.set_loading(Screen::Dashboard, LoadState::Idle);
|
||||||
|
self.supervisor
|
||||||
|
.complete(&TaskKey::LoadScreen(Screen::Dashboard), generation);
|
||||||
|
}
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
// All other message variants: no-op for now.
|
||||||
|
// Future phases will fill these in as screens are implemented.
|
||||||
|
_ => Cmd::none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for LoreApp {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Model for LoreApp {
|
||||||
|
type Message = Msg;
|
||||||
|
|
||||||
|
fn init(&mut self) -> Cmd<Self::Message> {
|
||||||
|
// Install crash context panic hook.
|
||||||
|
CrashContext::install_panic_hook(&self.crash_context);
|
||||||
|
CrashContext::prune_crash_files();
|
||||||
|
|
||||||
|
// Navigate to dashboard (will trigger data load in future phase).
|
||||||
|
Cmd::none()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, msg: Self::Message) -> Cmd<Self::Message> {
|
||||||
|
// Route raw key events through the 5-stage pipeline.
|
||||||
|
if let Msg::RawEvent(Event::Key(key)) = msg {
|
||||||
|
return self.interpret_key(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Everything else goes through message dispatch.
|
||||||
|
self.handle_msg(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn view(&self, frame: &mut Frame) {
|
||||||
|
crate::view::render_screen(frame, self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify that `App::fullscreen(LoreApp::new()).run()` compiles.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn _assert_app_fullscreen_compiles() {
|
||||||
|
fn _inner() {
|
||||||
|
use ftui::App;
|
||||||
|
let _app_builder = App::fullscreen(LoreApp::new());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify that `App::inline(LoreApp::new(), 12).run()` compiles.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn _assert_app_inline_compiles() {
|
||||||
|
fn _inner() {
|
||||||
|
use ftui::App;
|
||||||
|
let _app_builder = App::inline(LoreApp::new(), 12);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::clock::FakeClock;
|
||||||
|
|
||||||
|
fn test_app() -> LoreApp {
|
||||||
|
LoreApp::with_clock(Box::new(FakeClock::new(chrono::Utc::now())))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_init_returns_none() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let cmd = app.init();
|
||||||
|
assert!(matches!(cmd, Cmd::None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_quit_returns_quit_cmd() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let cmd = app.update(Msg::Quit);
|
||||||
|
assert!(matches!(cmd, Cmd::Quit));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_tick_returns_none() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let cmd = app.update(Msg::Tick);
|
||||||
|
assert!(matches!(cmd, Cmd::None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_navigate_to_updates_nav_stack() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let cmd = app.update(Msg::NavigateTo(Screen::IssueList));
|
||||||
|
assert!(matches!(cmd, Cmd::None));
|
||||||
|
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||||
|
assert_eq!(app.navigation.depth(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_go_back() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||||
|
app.update(Msg::GoBack);
|
||||||
|
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lore_app_go_forward() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||||
|
app.update(Msg::GoBack);
|
||||||
|
app.update(Msg::GoForward);
|
||||||
|
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ctrl_c_always_quits() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let key = KeyEvent::new(KeyCode::Char('c')).with_modifiers(Modifiers::CTRL);
|
||||||
|
let cmd = app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
assert!(matches!(cmd, Cmd::Quit));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_q_key_quits_in_normal_mode() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let key = KeyEvent::new(KeyCode::Char('q'));
|
||||||
|
let cmd = app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
assert!(matches!(cmd, Cmd::Quit));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_q_key_blocked_in_text_mode() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.input_mode = InputMode::Text;
|
||||||
|
let key = KeyEvent::new(KeyCode::Char('q'));
|
||||||
|
let cmd = app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
// q in text mode should NOT quit.
|
||||||
|
assert!(matches!(cmd, Cmd::None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_esc_blurs_text_mode() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.input_mode = InputMode::Text;
|
||||||
|
app.state.search.query_focused = true;
|
||||||
|
|
||||||
|
let key = KeyEvent::new(KeyCode::Escape);
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||||
|
assert!(!app.state.has_text_focus());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_g_prefix_enters_go_mode() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let key = KeyEvent::new(KeyCode::Char('g'));
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
assert!(matches!(app.input_mode, InputMode::GoPrefix { .. }));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_g_then_i_navigates_to_issues() {
|
||||||
|
let mut app = test_app();
|
||||||
|
|
||||||
|
// First key: 'g'
|
||||||
|
let key_g = KeyEvent::new(KeyCode::Char('g'));
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key_g)));
|
||||||
|
|
||||||
|
// Second key: 'i'
|
||||||
|
let key_i = KeyEvent::new(KeyCode::Char('i'));
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key_i)));
|
||||||
|
|
||||||
|
assert!(app.navigation.is_at(&Screen::IssueList));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_go_prefix_timeout_cancels() {
|
||||||
|
let clock = FakeClock::new(chrono::Utc::now());
|
||||||
|
let mut app = LoreApp::with_clock(Box::new(clock.clone()));
|
||||||
|
|
||||||
|
// Press 'g'.
|
||||||
|
let key_g = KeyEvent::new(KeyCode::Char('g'));
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key_g)));
|
||||||
|
assert!(matches!(app.input_mode, InputMode::GoPrefix { .. }));
|
||||||
|
|
||||||
|
// Advance clock past timeout.
|
||||||
|
clock.advance(TimeDelta::milliseconds(600));
|
||||||
|
|
||||||
|
// Press 'i' after timeout — should NOT navigate to issues.
|
||||||
|
let key_i = KeyEvent::new(KeyCode::Char('i'));
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key_i)));
|
||||||
|
|
||||||
|
// Should still be at Dashboard (no navigation happened).
|
||||||
|
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_show_help_toggles() {
|
||||||
|
let mut app = test_app();
|
||||||
|
assert!(!app.state.show_help);
|
||||||
|
|
||||||
|
app.update(Msg::ShowHelp);
|
||||||
|
assert!(app.state.show_help);
|
||||||
|
|
||||||
|
app.update(Msg::ShowHelp);
|
||||||
|
assert!(!app.state.show_help);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_error_msg_sets_toast() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::Error(crate::message::AppError::DbBusy));
|
||||||
|
assert!(app.state.error_toast.is_some());
|
||||||
|
assert!(app.state.error_toast.as_ref().unwrap().contains("busy"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_resize_updates_terminal_size() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::Resize {
|
||||||
|
width: 120,
|
||||||
|
height: 40,
|
||||||
|
});
|
||||||
|
assert_eq!(app.state.terminal_size, (120, 40));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_stale_result_dropped() {
|
||||||
|
let mut app = test_app();
|
||||||
|
|
||||||
|
// Submit two tasks for IssueList — second supersedes first.
|
||||||
|
let gen1 = app
|
||||||
|
.supervisor
|
||||||
|
.submit(TaskKey::LoadScreen(Screen::IssueList))
|
||||||
|
.generation;
|
||||||
|
let gen2 = app
|
||||||
|
.supervisor
|
||||||
|
.submit(TaskKey::LoadScreen(Screen::IssueList))
|
||||||
|
.generation;
|
||||||
|
|
||||||
|
// Stale result with gen1 should be ignored.
|
||||||
|
app.update(Msg::IssueListLoaded {
|
||||||
|
generation: gen1,
|
||||||
|
rows: vec![crate::message::IssueRow {
|
||||||
|
key: crate::message::EntityKey::issue(1, 1),
|
||||||
|
title: "stale".into(),
|
||||||
|
state: "opened".into(),
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
assert!(app.state.issue_list.rows.is_empty());
|
||||||
|
|
||||||
|
// Current result with gen2 should be applied.
|
||||||
|
app.update(Msg::IssueListLoaded {
|
||||||
|
generation: gen2,
|
||||||
|
rows: vec![crate::message::IssueRow {
|
||||||
|
key: crate::message::EntityKey::issue(1, 2),
|
||||||
|
title: "fresh".into(),
|
||||||
|
state: "opened".into(),
|
||||||
|
}],
|
||||||
|
});
|
||||||
|
assert_eq!(app.state.issue_list.rows.len(), 1);
|
||||||
|
assert_eq!(app.state.issue_list.rows[0].title, "fresh");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_crash_context_records_events() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::Tick);
|
||||||
|
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||||
|
|
||||||
|
// Should have recorded at least 2 events.
|
||||||
|
assert!(app.crash_context.len() >= 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_navigate_sets_loading_state() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.update(Msg::NavigateTo(Screen::IssueList));
|
||||||
|
assert_eq!(
|
||||||
|
*app.state.load_state.get(&Screen::IssueList),
|
||||||
|
LoadState::Refreshing
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_command_palette_opens_from_ctrl_p() {
|
||||||
|
let mut app = test_app();
|
||||||
|
let key = KeyEvent::new(KeyCode::Char('p')).with_modifiers(Modifiers::CTRL);
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Palette));
|
||||||
|
assert!(app.state.command_palette.query_focused);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_esc_closes_palette() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.input_mode = InputMode::Palette;
|
||||||
|
|
||||||
|
let key = KeyEvent::new(KeyCode::Escape);
|
||||||
|
app.update(Msg::RawEvent(Event::Key(key)));
|
||||||
|
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_blur_text_input_msg() {
|
||||||
|
let mut app = test_app();
|
||||||
|
app.input_mode = InputMode::Text;
|
||||||
|
app.state.search.query_focused = true;
|
||||||
|
|
||||||
|
app.update(Msg::BlurTextInput);
|
||||||
|
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||||
|
assert!(!app.state.has_text_focus());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_is_new() {
|
||||||
|
let app = LoreApp::default();
|
||||||
|
assert!(app.navigation.is_at(&Screen::Dashboard));
|
||||||
|
assert!(matches!(app.input_mode, InputMode::Normal));
|
||||||
|
}
|
||||||
|
}
|
||||||
151
crates/lore-tui/src/clock.rs
Normal file
151
crates/lore-tui/src/clock.rs
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
//! Injected clock for deterministic time in tests and consistent frame timestamps.
|
||||||
|
//!
|
||||||
|
//! All relative-time rendering (e.g., "3h ago") uses [`Clock::now()`] rather
|
||||||
|
//! than wall-clock time directly. This enables:
|
||||||
|
//! - Deterministic snapshot tests via [`FakeClock`]
|
||||||
|
//! - Consistent timestamps within a single frame render pass
|
||||||
|
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
use chrono::{DateTime, TimeDelta, Utc};
|
||||||
|
|
||||||
|
/// Trait for obtaining the current time.
|
||||||
|
///
|
||||||
|
/// Inject via `Arc<dyn Clock>` to allow swapping between real and fake clocks.
|
||||||
|
pub trait Clock: Send + Sync {
|
||||||
|
/// Returns the current time.
|
||||||
|
fn now(&self) -> DateTime<Utc>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// SystemClock
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Real wall-clock time via `chrono::Utc::now()`.
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub struct SystemClock;
|
||||||
|
|
||||||
|
impl Clock for SystemClock {
|
||||||
|
fn now(&self) -> DateTime<Utc> {
|
||||||
|
Utc::now()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// FakeClock
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// A controllable clock for tests. Returns a frozen time that can be
|
||||||
|
/// advanced or set explicitly.
|
||||||
|
///
|
||||||
|
/// `FakeClock` is `Clone` (shares the inner `Arc`) and `Send + Sync`
|
||||||
|
/// for use across `Cmd::task` threads.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct FakeClock {
|
||||||
|
inner: Arc<Mutex<DateTime<Utc>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FakeClock {
|
||||||
|
/// Create a fake clock frozen at the given time.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(time: DateTime<Utc>) -> Self {
|
||||||
|
Self {
|
||||||
|
inner: Arc::new(Mutex::new(time)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Advance the clock by `duration`. Uses `checked_add` to handle overflow
|
||||||
|
/// gracefully — if the addition would overflow, the time is not changed.
|
||||||
|
pub fn advance(&self, duration: TimeDelta) {
|
||||||
|
let mut guard = self.inner.lock().expect("FakeClock mutex poisoned");
|
||||||
|
if let Some(advanced) = guard.checked_add_signed(duration) {
|
||||||
|
*guard = advanced;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the clock to an exact time.
|
||||||
|
pub fn set(&self, time: DateTime<Utc>) {
|
||||||
|
let mut guard = self.inner.lock().expect("FakeClock mutex poisoned");
|
||||||
|
*guard = time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clock for FakeClock {
|
||||||
|
fn now(&self) -> DateTime<Utc> {
|
||||||
|
*self.inner.lock().expect("FakeClock mutex poisoned")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use chrono::TimeZone;
|
||||||
|
|
||||||
|
fn fixed_time() -> DateTime<Utc> {
|
||||||
|
Utc.with_ymd_and_hms(2026, 2, 12, 12, 0, 0).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fake_clock_frozen() {
|
||||||
|
let clock = FakeClock::new(fixed_time());
|
||||||
|
let t1 = clock.now();
|
||||||
|
let t2 = clock.now();
|
||||||
|
assert_eq!(t1, t2);
|
||||||
|
assert_eq!(t1, fixed_time());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fake_clock_advance() {
|
||||||
|
let clock = FakeClock::new(fixed_time());
|
||||||
|
clock.advance(TimeDelta::hours(3));
|
||||||
|
let expected = Utc.with_ymd_and_hms(2026, 2, 12, 15, 0, 0).unwrap();
|
||||||
|
assert_eq!(clock.now(), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fake_clock_set() {
|
||||||
|
let clock = FakeClock::new(fixed_time());
|
||||||
|
let new_time = Utc.with_ymd_and_hms(2030, 1, 1, 0, 0, 0).unwrap();
|
||||||
|
clock.set(new_time);
|
||||||
|
assert_eq!(clock.now(), new_time);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fake_clock_clone_shares_state() {
|
||||||
|
let clock1 = FakeClock::new(fixed_time());
|
||||||
|
let clock2 = clock1.clone();
|
||||||
|
clock1.advance(TimeDelta::minutes(30));
|
||||||
|
// Both clones see the advanced time.
|
||||||
|
assert_eq!(clock1.now(), clock2.now());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_system_clock_returns_reasonable_time() {
|
||||||
|
let clock = SystemClock;
|
||||||
|
let now = clock.now();
|
||||||
|
// Sanity: time should be after 2025.
|
||||||
|
assert!(now.year() >= 2025);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fake_clock_is_send_sync() {
|
||||||
|
fn assert_send_sync<T: Send + Sync>() {}
|
||||||
|
assert_send_sync::<FakeClock>();
|
||||||
|
assert_send_sync::<SystemClock>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_clock_trait_object_works() {
|
||||||
|
let fake: Arc<dyn Clock> = Arc::new(FakeClock::new(fixed_time()));
|
||||||
|
assert_eq!(fake.now(), fixed_time());
|
||||||
|
|
||||||
|
let real: Arc<dyn Clock> = Arc::new(SystemClock);
|
||||||
|
let _ = real.now(); // Just verify it doesn't panic.
|
||||||
|
}
|
||||||
|
|
||||||
|
use chrono::Datelike;
|
||||||
|
}
|
||||||
807
crates/lore-tui/src/commands.rs
Normal file
807
crates/lore-tui/src/commands.rs
Normal file
@@ -0,0 +1,807 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: consumed by LoreApp in bd-6pmy
|
||||||
|
|
||||||
|
//! Command registry — single source of truth for all TUI actions.
|
||||||
|
//!
|
||||||
|
//! Every keybinding, palette entry, help text, CLI equivalent, and
|
||||||
|
//! status hint is generated from [`CommandRegistry`]. No hardcoded
|
||||||
|
//! duplicate maps exist in view/state modules.
|
||||||
|
//!
|
||||||
|
//! Supports single-key and two-key sequences (g-prefix vim bindings).
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use ftui::{KeyCode, Modifiers};
|
||||||
|
|
||||||
|
use crate::message::{InputMode, Screen};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Key formatting
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Format a key code + modifiers as a human-readable string.
|
||||||
|
fn format_key(code: KeyCode, modifiers: Modifiers) -> String {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
if modifiers.contains(Modifiers::CTRL) {
|
||||||
|
parts.push("Ctrl");
|
||||||
|
}
|
||||||
|
if modifiers.contains(Modifiers::ALT) {
|
||||||
|
parts.push("Alt");
|
||||||
|
}
|
||||||
|
if modifiers.contains(Modifiers::SHIFT) {
|
||||||
|
parts.push("Shift");
|
||||||
|
}
|
||||||
|
let key_name = match code {
|
||||||
|
KeyCode::Char(c) => c.to_string(),
|
||||||
|
KeyCode::Enter => "Enter".to_string(),
|
||||||
|
KeyCode::Escape => "Esc".to_string(),
|
||||||
|
KeyCode::Tab => "Tab".to_string(),
|
||||||
|
KeyCode::Backspace => "Backspace".to_string(),
|
||||||
|
KeyCode::Delete => "Del".to_string(),
|
||||||
|
KeyCode::Up => "Up".to_string(),
|
||||||
|
KeyCode::Down => "Down".to_string(),
|
||||||
|
KeyCode::Left => "Left".to_string(),
|
||||||
|
KeyCode::Right => "Right".to_string(),
|
||||||
|
KeyCode::Home => "Home".to_string(),
|
||||||
|
KeyCode::End => "End".to_string(),
|
||||||
|
KeyCode::PageUp => "PgUp".to_string(),
|
||||||
|
KeyCode::PageDown => "PgDn".to_string(),
|
||||||
|
KeyCode::F(n) => format!("F{n}"),
|
||||||
|
_ => "?".to_string(),
|
||||||
|
};
|
||||||
|
parts.push(&key_name);
|
||||||
|
// We need to own the joined string.
|
||||||
|
let joined: String = parts.join("+");
|
||||||
|
joined
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// KeyCombo
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// A keybinding: either a single key or a two-key sequence.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum KeyCombo {
|
||||||
|
/// Single key press (e.g., `q`, `Esc`, `Ctrl+P`).
|
||||||
|
Single { code: KeyCode, modifiers: Modifiers },
|
||||||
|
/// Two-key sequence (e.g., `g` then `i` for go-to-issues).
|
||||||
|
Sequence {
|
||||||
|
first_code: KeyCode,
|
||||||
|
first_modifiers: Modifiers,
|
||||||
|
second_code: KeyCode,
|
||||||
|
second_modifiers: Modifiers,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KeyCombo {
|
||||||
|
/// Convenience: single key with no modifiers.
|
||||||
|
#[must_use]
|
||||||
|
pub const fn key(code: KeyCode) -> Self {
|
||||||
|
Self::Single {
|
||||||
|
code,
|
||||||
|
modifiers: Modifiers::NONE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenience: single key with Ctrl modifier.
|
||||||
|
#[must_use]
|
||||||
|
pub const fn ctrl(code: KeyCode) -> Self {
|
||||||
|
Self::Single {
|
||||||
|
code,
|
||||||
|
modifiers: Modifiers::CTRL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convenience: g-prefix sequence (g + char).
|
||||||
|
#[must_use]
|
||||||
|
pub const fn g_then(c: char) -> Self {
|
||||||
|
Self::Sequence {
|
||||||
|
first_code: KeyCode::Char('g'),
|
||||||
|
first_modifiers: Modifiers::NONE,
|
||||||
|
second_code: KeyCode::Char(c),
|
||||||
|
second_modifiers: Modifiers::NONE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Human-readable display string for this key combo.
|
||||||
|
#[must_use]
|
||||||
|
pub fn display(&self) -> String {
|
||||||
|
match self {
|
||||||
|
Self::Single { code, modifiers } => format_key(*code, *modifiers),
|
||||||
|
Self::Sequence {
|
||||||
|
first_code,
|
||||||
|
first_modifiers,
|
||||||
|
second_code,
|
||||||
|
second_modifiers,
|
||||||
|
} => {
|
||||||
|
let first = format_key(*first_code, *first_modifiers);
|
||||||
|
let second = format_key(*second_code, *second_modifiers);
|
||||||
|
format!("{first} {second}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this combo starts with the given key.
|
||||||
|
#[must_use]
|
||||||
|
pub fn starts_with(&self, code: &KeyCode, modifiers: &Modifiers) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Single {
|
||||||
|
code: c,
|
||||||
|
modifiers: m,
|
||||||
|
} => c == code && m == modifiers,
|
||||||
|
Self::Sequence {
|
||||||
|
first_code,
|
||||||
|
first_modifiers,
|
||||||
|
..
|
||||||
|
} => first_code == code && first_modifiers == modifiers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// ScreenFilter
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Specifies which screens a command is available on.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ScreenFilter {
|
||||||
|
/// Available on all screens.
|
||||||
|
Global,
|
||||||
|
/// Available only on specific screens.
|
||||||
|
Only(Vec<Screen>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScreenFilter {
|
||||||
|
/// Whether the command is available on the given screen.
|
||||||
|
#[must_use]
|
||||||
|
pub fn matches(&self, screen: &Screen) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Global => true,
|
||||||
|
Self::Only(screens) => screens.contains(screen),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CommandDef
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Unique command identifier.
|
||||||
|
pub type CommandId = &'static str;
|
||||||
|
|
||||||
|
/// A registered command with its keybinding, help text, and scope.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CommandDef {
|
||||||
|
/// Unique identifier (e.g., "quit", "go_issues").
|
||||||
|
pub id: CommandId,
|
||||||
|
/// Human-readable label for palette and help overlay.
|
||||||
|
pub label: &'static str,
|
||||||
|
/// Keybinding (if any).
|
||||||
|
pub keybinding: Option<KeyCombo>,
|
||||||
|
/// Equivalent `lore` CLI command (for "Show CLI equivalent" feature).
|
||||||
|
pub cli_equivalent: Option<&'static str>,
|
||||||
|
/// Description for help overlay.
|
||||||
|
pub help_text: &'static str,
|
||||||
|
/// Short hint for status bar (e.g., "q:quit").
|
||||||
|
pub status_hint: &'static str,
|
||||||
|
/// Which screens this command is available on.
|
||||||
|
pub available_in: ScreenFilter,
|
||||||
|
/// Whether this command works in Text input mode.
|
||||||
|
pub available_in_text_mode: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CommandRegistry
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Single source of truth for all TUI commands.
|
||||||
|
///
|
||||||
|
/// Built once at startup via [`build_registry`]. Provides O(1) lookup
|
||||||
|
/// by keybinding and per-screen filtering.
|
||||||
|
pub struct CommandRegistry {
|
||||||
|
commands: Vec<CommandDef>,
|
||||||
|
/// Single-key -> command IDs that start with this key.
|
||||||
|
by_single_key: HashMap<(KeyCode, Modifiers), Vec<usize>>,
|
||||||
|
/// Full sequence -> command index (for two-key combos).
|
||||||
|
by_sequence: HashMap<KeyCombo, usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommandRegistry {
|
||||||
|
/// Look up a command by a single key press on a given screen and input mode.
|
||||||
|
///
|
||||||
|
/// Returns `None` if no matching command is found. For sequence starters
|
||||||
|
/// (like 'g'), returns `None` — use [`is_sequence_starter`] to detect
|
||||||
|
/// that case.
|
||||||
|
#[must_use]
|
||||||
|
pub fn lookup_key(
|
||||||
|
&self,
|
||||||
|
code: &KeyCode,
|
||||||
|
modifiers: &Modifiers,
|
||||||
|
screen: &Screen,
|
||||||
|
mode: &InputMode,
|
||||||
|
) -> Option<&CommandDef> {
|
||||||
|
let is_text = matches!(mode, InputMode::Text);
|
||||||
|
let key = (*code, *modifiers);
|
||||||
|
|
||||||
|
let indices = self.by_single_key.get(&key)?;
|
||||||
|
for &idx in indices {
|
||||||
|
let cmd = &self.commands[idx];
|
||||||
|
if !cmd.available_in.matches(screen) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if is_text && !cmd.available_in_text_mode {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Only match Single combos here, not sequence starters.
|
||||||
|
if let Some(KeyCombo::Single { .. }) = &cmd.keybinding {
|
||||||
|
return Some(cmd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Complete a two-key sequence.
|
||||||
|
///
|
||||||
|
/// Called after the first key of a sequence is detected (e.g., after 'g').
|
||||||
|
#[must_use]
|
||||||
|
pub fn complete_sequence(
|
||||||
|
&self,
|
||||||
|
first_code: &KeyCode,
|
||||||
|
first_modifiers: &Modifiers,
|
||||||
|
second_code: &KeyCode,
|
||||||
|
second_modifiers: &Modifiers,
|
||||||
|
screen: &Screen,
|
||||||
|
) -> Option<&CommandDef> {
|
||||||
|
let combo = KeyCombo::Sequence {
|
||||||
|
first_code: *first_code,
|
||||||
|
first_modifiers: *first_modifiers,
|
||||||
|
second_code: *second_code,
|
||||||
|
second_modifiers: *second_modifiers,
|
||||||
|
};
|
||||||
|
let &idx = self.by_sequence.get(&combo)?;
|
||||||
|
let cmd = &self.commands[idx];
|
||||||
|
if cmd.available_in.matches(screen) {
|
||||||
|
Some(cmd)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether a key starts a multi-key sequence (e.g., 'g').
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_sequence_starter(&self, code: &KeyCode, modifiers: &Modifiers) -> bool {
|
||||||
|
self.by_sequence
|
||||||
|
.keys()
|
||||||
|
.any(|combo| combo.starts_with(code, modifiers))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Commands available for the command palette on a given screen.
|
||||||
|
///
|
||||||
|
/// Returned sorted by label.
|
||||||
|
#[must_use]
|
||||||
|
pub fn palette_entries(&self, screen: &Screen) -> Vec<&CommandDef> {
|
||||||
|
let mut entries: Vec<&CommandDef> = self
|
||||||
|
.commands
|
||||||
|
.iter()
|
||||||
|
.filter(|c| c.available_in.matches(screen))
|
||||||
|
.collect();
|
||||||
|
entries.sort_by_key(|c| c.label);
|
||||||
|
entries
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Commands for the help overlay on a given screen.
|
||||||
|
#[must_use]
|
||||||
|
pub fn help_entries(&self, screen: &Screen) -> Vec<&CommandDef> {
|
||||||
|
self.commands
|
||||||
|
.iter()
|
||||||
|
.filter(|c| c.available_in.matches(screen))
|
||||||
|
.filter(|c| c.keybinding.is_some())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Status bar hints for the current screen.
|
||||||
|
#[must_use]
|
||||||
|
pub fn status_hints(&self, screen: &Screen) -> Vec<&str> {
|
||||||
|
self.commands
|
||||||
|
.iter()
|
||||||
|
.filter(|c| c.available_in.matches(screen))
|
||||||
|
.filter(|c| !c.status_hint.is_empty())
|
||||||
|
.map(|c| c.status_hint)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Total number of registered commands.
|
||||||
|
#[must_use]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.commands.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the registry has no commands.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.commands.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// build_registry
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Build the command registry with all TUI commands.
|
||||||
|
///
|
||||||
|
/// This is the single source of truth — every keybinding, help text,
|
||||||
|
/// and palette entry originates here.
|
||||||
|
#[must_use]
|
||||||
|
pub fn build_registry() -> CommandRegistry {
|
||||||
|
let commands = vec![
|
||||||
|
// --- Global commands ---
|
||||||
|
CommandDef {
|
||||||
|
id: "quit",
|
||||||
|
label: "Quit",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('q'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Exit the TUI",
|
||||||
|
status_hint: "q:quit",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_back",
|
||||||
|
label: "Go Back",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Escape)),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Go back to previous screen",
|
||||||
|
status_hint: "esc:back",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: true,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "show_help",
|
||||||
|
label: "Help",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('?'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Show keybinding help overlay",
|
||||||
|
status_hint: "?:help",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "command_palette",
|
||||||
|
label: "Command Palette",
|
||||||
|
keybinding: Some(KeyCombo::ctrl(KeyCode::Char('p'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Open command palette",
|
||||||
|
status_hint: "C-p:palette",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: true,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "open_in_browser",
|
||||||
|
label: "Open in Browser",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('o'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Open current entity in browser",
|
||||||
|
status_hint: "o:browser",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "show_cli",
|
||||||
|
label: "Show CLI Equivalent",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('!'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Show equivalent lore CLI command",
|
||||||
|
status_hint: "",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
// --- Navigation: g-prefix sequences ---
|
||||||
|
CommandDef {
|
||||||
|
id: "go_home",
|
||||||
|
label: "Go to Dashboard",
|
||||||
|
keybinding: Some(KeyCombo::g_then('h')),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Jump to dashboard",
|
||||||
|
status_hint: "gh:home",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_issues",
|
||||||
|
label: "Go to Issues",
|
||||||
|
keybinding: Some(KeyCombo::g_then('i')),
|
||||||
|
cli_equivalent: Some("lore issues"),
|
||||||
|
help_text: "Jump to issue list",
|
||||||
|
status_hint: "gi:issues",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_mrs",
|
||||||
|
label: "Go to Merge Requests",
|
||||||
|
keybinding: Some(KeyCombo::g_then('m')),
|
||||||
|
cli_equivalent: Some("lore mrs"),
|
||||||
|
help_text: "Jump to MR list",
|
||||||
|
status_hint: "gm:mrs",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_search",
|
||||||
|
label: "Go to Search",
|
||||||
|
keybinding: Some(KeyCombo::g_then('/')),
|
||||||
|
cli_equivalent: Some("lore search"),
|
||||||
|
help_text: "Jump to search",
|
||||||
|
status_hint: "g/:search",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_timeline",
|
||||||
|
label: "Go to Timeline",
|
||||||
|
keybinding: Some(KeyCombo::g_then('t')),
|
||||||
|
cli_equivalent: Some("lore timeline"),
|
||||||
|
help_text: "Jump to timeline",
|
||||||
|
status_hint: "gt:timeline",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_who",
|
||||||
|
label: "Go to Who",
|
||||||
|
keybinding: Some(KeyCombo::g_then('w')),
|
||||||
|
cli_equivalent: Some("lore who"),
|
||||||
|
help_text: "Jump to people intelligence",
|
||||||
|
status_hint: "gw:who",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "go_sync",
|
||||||
|
label: "Go to Sync",
|
||||||
|
keybinding: Some(KeyCombo::g_then('s')),
|
||||||
|
cli_equivalent: Some("lore sync"),
|
||||||
|
help_text: "Jump to sync status",
|
||||||
|
status_hint: "gs:sync",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
// --- Vim-style jump list ---
|
||||||
|
CommandDef {
|
||||||
|
id: "jump_back",
|
||||||
|
label: "Jump Back",
|
||||||
|
keybinding: Some(KeyCombo::ctrl(KeyCode::Char('o'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Jump backward through visited detail views",
|
||||||
|
status_hint: "C-o:jump back",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "jump_forward",
|
||||||
|
label: "Jump Forward",
|
||||||
|
keybinding: Some(KeyCombo::ctrl(KeyCode::Char('i'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Jump forward through visited detail views",
|
||||||
|
status_hint: "",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
// --- List navigation ---
|
||||||
|
CommandDef {
|
||||||
|
id: "move_down",
|
||||||
|
label: "Move Down",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('j'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Move cursor down",
|
||||||
|
status_hint: "j:down",
|
||||||
|
available_in: ScreenFilter::Only(vec![
|
||||||
|
Screen::IssueList,
|
||||||
|
Screen::MrList,
|
||||||
|
Screen::Search,
|
||||||
|
Screen::Timeline,
|
||||||
|
]),
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "move_up",
|
||||||
|
label: "Move Up",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('k'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Move cursor up",
|
||||||
|
status_hint: "k:up",
|
||||||
|
available_in: ScreenFilter::Only(vec![
|
||||||
|
Screen::IssueList,
|
||||||
|
Screen::MrList,
|
||||||
|
Screen::Search,
|
||||||
|
Screen::Timeline,
|
||||||
|
]),
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
CommandDef {
|
||||||
|
id: "select_item",
|
||||||
|
label: "Select",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Enter)),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Open selected item",
|
||||||
|
status_hint: "enter:open",
|
||||||
|
available_in: ScreenFilter::Only(vec![
|
||||||
|
Screen::IssueList,
|
||||||
|
Screen::MrList,
|
||||||
|
Screen::Search,
|
||||||
|
]),
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
// --- Filter ---
|
||||||
|
CommandDef {
|
||||||
|
id: "focus_filter",
|
||||||
|
label: "Filter",
|
||||||
|
keybinding: Some(KeyCombo::key(KeyCode::Char('/'))),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Focus the filter input",
|
||||||
|
status_hint: "/:filter",
|
||||||
|
available_in: ScreenFilter::Only(vec![Screen::IssueList, Screen::MrList]),
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
// --- Scroll ---
|
||||||
|
CommandDef {
|
||||||
|
id: "scroll_to_top",
|
||||||
|
label: "Scroll to Top",
|
||||||
|
keybinding: Some(KeyCombo::g_then('g')),
|
||||||
|
cli_equivalent: None,
|
||||||
|
help_text: "Scroll to the top of the current view",
|
||||||
|
status_hint: "",
|
||||||
|
available_in: ScreenFilter::Global,
|
||||||
|
available_in_text_mode: false,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
build_from_defs(commands)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build index maps from a list of command definitions.
|
||||||
|
fn build_from_defs(commands: Vec<CommandDef>) -> CommandRegistry {
|
||||||
|
let mut by_single_key: HashMap<(KeyCode, Modifiers), Vec<usize>> = HashMap::new();
|
||||||
|
let mut by_sequence: HashMap<KeyCombo, usize> = HashMap::new();
|
||||||
|
|
||||||
|
for (idx, cmd) in commands.iter().enumerate() {
|
||||||
|
if let Some(combo) = &cmd.keybinding {
|
||||||
|
match combo {
|
||||||
|
KeyCombo::Single { code, modifiers } => {
|
||||||
|
by_single_key
|
||||||
|
.entry((*code, *modifiers))
|
||||||
|
.or_default()
|
||||||
|
.push(idx);
|
||||||
|
}
|
||||||
|
KeyCombo::Sequence { .. } => {
|
||||||
|
by_sequence.insert(combo.clone(), idx);
|
||||||
|
// Also index the first key so is_sequence_starter works via by_single_key.
|
||||||
|
if let KeyCombo::Sequence {
|
||||||
|
first_code,
|
||||||
|
first_modifiers,
|
||||||
|
..
|
||||||
|
} = combo
|
||||||
|
{
|
||||||
|
by_single_key
|
||||||
|
.entry((*first_code, *first_modifiers))
|
||||||
|
.or_default()
|
||||||
|
.push(idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandRegistry {
|
||||||
|
commands,
|
||||||
|
by_single_key,
|
||||||
|
by_sequence,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use chrono::Utc;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_registry_builds_successfully() {
|
||||||
|
let reg = build_registry();
|
||||||
|
assert!(!reg.is_empty());
|
||||||
|
assert!(reg.len() >= 15);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_registry_lookup_quit() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('q'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&InputMode::Normal,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "quit");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_registry_lookup_quit_blocked_in_text_mode() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('q'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&InputMode::Text,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_registry_esc_works_in_text_mode() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Escape,
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::IssueList,
|
||||||
|
&InputMode::Text,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "go_back");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_registry_ctrl_p_works_in_text_mode() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('p'),
|
||||||
|
&Modifiers::CTRL,
|
||||||
|
&Screen::Search,
|
||||||
|
&InputMode::Text,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "command_palette");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_g_is_sequence_starter() {
|
||||||
|
let reg = build_registry();
|
||||||
|
assert!(reg.is_sequence_starter(&KeyCode::Char('g'), &Modifiers::NONE));
|
||||||
|
assert!(!reg.is_sequence_starter(&KeyCode::Char('x'), &Modifiers::NONE));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_complete_sequence_gi() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.complete_sequence(
|
||||||
|
&KeyCode::Char('g'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&KeyCode::Char('i'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "go_issues");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_complete_sequence_invalid_second_key() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let cmd = reg.complete_sequence(
|
||||||
|
&KeyCode::Char('g'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&KeyCode::Char('x'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_screen_specific_command() {
|
||||||
|
let reg = build_registry();
|
||||||
|
// 'j' (move_down) should work on IssueList
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('j'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::IssueList,
|
||||||
|
&InputMode::Normal,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "move_down");
|
||||||
|
|
||||||
|
// 'j' should NOT match on Dashboard (move_down is list-only).
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('j'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&InputMode::Normal,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_palette_entries_sorted_by_label() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let entries = reg.palette_entries(&Screen::Dashboard);
|
||||||
|
let labels: Vec<&str> = entries.iter().map(|c| c.label).collect();
|
||||||
|
let mut sorted = labels.clone();
|
||||||
|
sorted.sort();
|
||||||
|
assert_eq!(labels, sorted);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_help_entries_only_include_keybindings() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let entries = reg.help_entries(&Screen::Dashboard);
|
||||||
|
for entry in &entries {
|
||||||
|
assert!(
|
||||||
|
entry.keybinding.is_some(),
|
||||||
|
"help entry without keybinding: {}",
|
||||||
|
entry.id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_status_hints_non_empty() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let hints = reg.status_hints(&Screen::Dashboard);
|
||||||
|
assert!(!hints.is_empty());
|
||||||
|
// All returned hints should be non-empty strings.
|
||||||
|
for hint in &hints {
|
||||||
|
assert!(!hint.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cli_equivalents_populated() {
|
||||||
|
let reg = build_registry();
|
||||||
|
let with_cli: Vec<&CommandDef> = reg
|
||||||
|
.commands
|
||||||
|
.iter()
|
||||||
|
.filter(|c| c.cli_equivalent.is_some())
|
||||||
|
.collect();
|
||||||
|
assert!(
|
||||||
|
with_cli.len() >= 5,
|
||||||
|
"expected at least 5 commands with cli_equivalent, got {}",
|
||||||
|
with_cli.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_go_prefix_timeout_detection() {
|
||||||
|
let reg = build_registry();
|
||||||
|
// Simulate GoPrefix mode entering: 'g' detected as sequence starter.
|
||||||
|
assert!(reg.is_sequence_starter(&KeyCode::Char('g'), &Modifiers::NONE));
|
||||||
|
|
||||||
|
// Simulate InputMode::GoPrefix with timeout check.
|
||||||
|
let started = Utc::now();
|
||||||
|
let mode = InputMode::GoPrefix {
|
||||||
|
started_at: started,
|
||||||
|
};
|
||||||
|
// In GoPrefix mode, normal lookup should still work for non-sequence keys.
|
||||||
|
let cmd = reg.lookup_key(
|
||||||
|
&KeyCode::Char('q'),
|
||||||
|
&Modifiers::NONE,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&mode,
|
||||||
|
);
|
||||||
|
assert!(cmd.is_some());
|
||||||
|
assert_eq!(cmd.unwrap().id, "quit");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_all_commands_have_nonempty_help() {
|
||||||
|
let reg = build_registry();
|
||||||
|
for cmd in ®.commands {
|
||||||
|
assert!(
|
||||||
|
!cmd.help_text.is_empty(),
|
||||||
|
"command {} has empty help_text",
|
||||||
|
cmd.id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
443
crates/lore-tui/src/crash_context.rs
Normal file
443
crates/lore-tui/src/crash_context.rs
Normal file
@@ -0,0 +1,443 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: consumed by LoreApp in bd-6pmy
|
||||||
|
|
||||||
|
//! Ring buffer of recent app events for post-mortem crash diagnostics.
|
||||||
|
//!
|
||||||
|
//! The TUI pushes every key press, message dispatch, and state transition
|
||||||
|
//! into [`CrashContext`]. On panic the installed hook dumps the last 2000
|
||||||
|
//! events to `~/.local/share/lore/crash-<timestamp>.json` as NDJSON.
|
||||||
|
//!
|
||||||
|
//! Retention: only the 5 most recent crash files are kept.
|
||||||
|
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::io::{self, BufWriter, Write};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
/// Maximum number of events retained in the ring buffer.
|
||||||
|
const MAX_EVENTS: usize = 2000;
|
||||||
|
|
||||||
|
/// Maximum number of crash files to keep on disk.
|
||||||
|
const MAX_CRASH_FILES: usize = 5;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CrashEvent
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// A single event recorded for crash diagnostics.
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
#[serde(tag = "type")]
|
||||||
|
pub enum CrashEvent {
|
||||||
|
/// A key was pressed.
|
||||||
|
KeyPress {
|
||||||
|
key: String,
|
||||||
|
mode: String,
|
||||||
|
screen: String,
|
||||||
|
},
|
||||||
|
/// A message was dispatched through update().
|
||||||
|
MsgDispatched { msg_name: String, screen: String },
|
||||||
|
/// Navigation changed screens.
|
||||||
|
StateTransition { from: String, to: String },
|
||||||
|
/// An error occurred.
|
||||||
|
Error { message: String },
|
||||||
|
/// Catch-all for ad-hoc diagnostic breadcrumbs.
|
||||||
|
Custom { tag: String, detail: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CrashContext
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Ring buffer of recent app events for panic diagnostics.
|
||||||
|
///
|
||||||
|
/// Holds at most [`MAX_EVENTS`] entries. When full, the oldest event
|
||||||
|
/// is evicted on each push.
|
||||||
|
pub struct CrashContext {
|
||||||
|
events: VecDeque<CrashEvent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CrashContext {
|
||||||
|
/// Create an empty crash context with pre-allocated capacity.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
events: VecDeque::with_capacity(MAX_EVENTS),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Record an event. Evicts the oldest when the buffer is full.
|
||||||
|
pub fn push(&mut self, event: CrashEvent) {
|
||||||
|
if self.events.len() == MAX_EVENTS {
|
||||||
|
self.events.pop_front();
|
||||||
|
}
|
||||||
|
self.events.push_back(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Number of events currently stored.
|
||||||
|
#[must_use]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.events.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the buffer is empty.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.events.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over stored events (oldest first).
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = &CrashEvent> {
|
||||||
|
self.events.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Dump all events to a file as newline-delimited JSON.
|
||||||
|
///
|
||||||
|
/// Creates parent directories if they don't exist.
|
||||||
|
/// Returns `Ok(())` on success, `Err` on I/O failure.
|
||||||
|
pub fn dump_to_file(&self, path: &Path) -> io::Result<()> {
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
std::fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
let file = std::fs::File::create(path)?;
|
||||||
|
let mut writer = BufWriter::new(file);
|
||||||
|
for event in &self.events {
|
||||||
|
match serde_json::to_string(event) {
|
||||||
|
Ok(json) => {
|
||||||
|
writeln!(writer, "{json}")?;
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// Fallback to debug format if serialization fails.
|
||||||
|
writeln!(
|
||||||
|
writer,
|
||||||
|
"{{\"type\":\"SerializationError\",\"debug\":\"{event:?}\"}}"
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writer.flush()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default crash directory: `~/.local/share/lore/`.
|
||||||
|
#[must_use]
|
||||||
|
pub fn crash_dir() -> Option<PathBuf> {
|
||||||
|
dirs::data_local_dir().map(|d| d.join("lore"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a timestamped crash file path.
|
||||||
|
#[must_use]
|
||||||
|
pub fn crash_file_path() -> Option<PathBuf> {
|
||||||
|
let dir = Self::crash_dir()?;
|
||||||
|
let timestamp = chrono::Utc::now().format("%Y%m%d-%H%M%S%.3f");
|
||||||
|
Some(dir.join(format!("crash-{timestamp}.json")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove old crash files, keeping only the most recent [`MAX_CRASH_FILES`].
|
||||||
|
///
|
||||||
|
/// Best-effort: silently ignores I/O errors on individual deletions.
|
||||||
|
pub fn prune_crash_files() {
|
||||||
|
let Some(dir) = Self::crash_dir() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let Ok(entries) = std::fs::read_dir(&dir) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut crash_files: Vec<PathBuf> = entries
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.map(|e| e.path())
|
||||||
|
.filter(|p| {
|
||||||
|
p.file_name()
|
||||||
|
.and_then(|n| n.to_str())
|
||||||
|
.is_some_and(|n| n.starts_with("crash-") && n.ends_with(".json"))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Sort ascending by filename (timestamps sort lexicographically).
|
||||||
|
crash_files.sort();
|
||||||
|
|
||||||
|
if crash_files.len() > MAX_CRASH_FILES {
|
||||||
|
let to_remove = crash_files.len() - MAX_CRASH_FILES;
|
||||||
|
for path in &crash_files[..to_remove] {
|
||||||
|
let _ = std::fs::remove_file(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Install a panic hook that dumps the crash context to disk.
|
||||||
|
///
|
||||||
|
/// Captures the current events via a snapshot. The hook chains with
|
||||||
|
/// the default panic handler so backtraces are still printed.
|
||||||
|
pub fn install_panic_hook(ctx: &Self) {
|
||||||
|
let snapshot: Vec<CrashEvent> = ctx.events.iter().cloned().collect();
|
||||||
|
let prev_hook = std::panic::take_hook();
|
||||||
|
|
||||||
|
std::panic::set_hook(Box::new(move |info| {
|
||||||
|
// Best-effort dump — never panic inside the panic hook.
|
||||||
|
if let Some(path) = Self::crash_file_path() {
|
||||||
|
let mut dump = CrashContext::new();
|
||||||
|
for event in &snapshot {
|
||||||
|
dump.push(event.clone());
|
||||||
|
}
|
||||||
|
// Add the panic info itself as the final event.
|
||||||
|
dump.push(CrashEvent::Error {
|
||||||
|
message: format!("{info}"),
|
||||||
|
});
|
||||||
|
let _ = dump.dump_to_file(&path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chain to the previous hook (prints backtrace, etc.).
|
||||||
|
prev_hook(info);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for CrashContext {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::io::BufRead;
|
||||||
|
|
||||||
|
/// Helper: create a numbered Custom event.
|
||||||
|
fn event(n: usize) -> CrashEvent {
|
||||||
|
CrashEvent::Custom {
|
||||||
|
tag: "test".into(),
|
||||||
|
detail: format!("event-{n}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ring_buffer_evicts_oldest() {
|
||||||
|
let mut ctx = CrashContext::new();
|
||||||
|
for i in 0..2500 {
|
||||||
|
ctx.push(event(i));
|
||||||
|
}
|
||||||
|
assert_eq!(ctx.len(), MAX_EVENTS);
|
||||||
|
|
||||||
|
// First retained event should be #500 (0..499 evicted).
|
||||||
|
let first = ctx.iter().next().unwrap();
|
||||||
|
match first {
|
||||||
|
CrashEvent::Custom { detail, .. } => assert_eq!(detail, "event-500"),
|
||||||
|
other => panic!("unexpected variant: {other:?}"),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last retained event should be #2499.
|
||||||
|
let last = ctx.iter().last().unwrap();
|
||||||
|
match last {
|
||||||
|
CrashEvent::Custom { detail, .. } => assert_eq!(detail, "event-2499"),
|
||||||
|
other => panic!("unexpected variant: {other:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_new_is_empty() {
|
||||||
|
let ctx = CrashContext::new();
|
||||||
|
assert!(ctx.is_empty());
|
||||||
|
assert_eq!(ctx.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_push_increments_len() {
|
||||||
|
let mut ctx = CrashContext::new();
|
||||||
|
ctx.push(event(1));
|
||||||
|
ctx.push(event(2));
|
||||||
|
assert_eq!(ctx.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_push_does_not_evict_below_capacity() {
|
||||||
|
let mut ctx = CrashContext::new();
|
||||||
|
for i in 0..MAX_EVENTS {
|
||||||
|
ctx.push(event(i));
|
||||||
|
}
|
||||||
|
assert_eq!(ctx.len(), MAX_EVENTS);
|
||||||
|
|
||||||
|
// First should still be event-0.
|
||||||
|
match ctx.iter().next().unwrap() {
|
||||||
|
CrashEvent::Custom { detail, .. } => assert_eq!(detail, "event-0"),
|
||||||
|
other => panic!("unexpected: {other:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dump_to_file_writes_ndjson() {
|
||||||
|
let mut ctx = CrashContext::new();
|
||||||
|
ctx.push(CrashEvent::KeyPress {
|
||||||
|
key: "j".into(),
|
||||||
|
mode: "Normal".into(),
|
||||||
|
screen: "Dashboard".into(),
|
||||||
|
});
|
||||||
|
ctx.push(CrashEvent::MsgDispatched {
|
||||||
|
msg_name: "NavigateTo".into(),
|
||||||
|
screen: "Dashboard".into(),
|
||||||
|
});
|
||||||
|
ctx.push(CrashEvent::StateTransition {
|
||||||
|
from: "Dashboard".into(),
|
||||||
|
to: "IssueList".into(),
|
||||||
|
});
|
||||||
|
ctx.push(CrashEvent::Error {
|
||||||
|
message: "db busy".into(),
|
||||||
|
});
|
||||||
|
ctx.push(CrashEvent::Custom {
|
||||||
|
tag: "test".into(),
|
||||||
|
detail: "hello".into(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let path = dir.path().join("test-crash.json");
|
||||||
|
ctx.dump_to_file(&path).unwrap();
|
||||||
|
|
||||||
|
// Verify: each line is valid JSON, total lines == 5.
|
||||||
|
let file = std::fs::File::open(&path).unwrap();
|
||||||
|
let reader = io::BufReader::new(file);
|
||||||
|
let lines: Vec<String> = reader.lines().map(Result::unwrap).collect();
|
||||||
|
assert_eq!(lines.len(), 5);
|
||||||
|
|
||||||
|
// Each line must parse as JSON.
|
||||||
|
for line in &lines {
|
||||||
|
let val: serde_json::Value = serde_json::from_str(line).unwrap();
|
||||||
|
assert!(val.get("type").is_some(), "missing 'type' field: {line}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Spot check first line: KeyPress with correct fields.
|
||||||
|
let first: serde_json::Value = serde_json::from_str(&lines[0]).unwrap();
|
||||||
|
assert_eq!(first["type"], "KeyPress");
|
||||||
|
assert_eq!(first["key"], "j");
|
||||||
|
assert_eq!(first["mode"], "Normal");
|
||||||
|
assert_eq!(first["screen"], "Dashboard");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dump_creates_parent_directories() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let nested = dir.path().join("a").join("b").join("c").join("crash.json");
|
||||||
|
|
||||||
|
let mut ctx = CrashContext::new();
|
||||||
|
ctx.push(event(1));
|
||||||
|
ctx.dump_to_file(&nested).unwrap();
|
||||||
|
|
||||||
|
assert!(nested.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dump_empty_context_creates_empty_file() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let path = dir.path().join("empty.json");
|
||||||
|
|
||||||
|
let ctx = CrashContext::new();
|
||||||
|
ctx.dump_to_file(&path).unwrap();
|
||||||
|
|
||||||
|
let content = std::fs::read_to_string(&path).unwrap();
|
||||||
|
assert!(content.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_prune_keeps_newest_files() {
|
||||||
|
let dir = tempfile::tempdir().unwrap();
|
||||||
|
let crash_dir = dir.path();
|
||||||
|
|
||||||
|
// Create 8 crash files with ordered timestamps.
|
||||||
|
let filenames: Vec<String> = (0..8)
|
||||||
|
.map(|i| format!("crash-2026010{i}-120000.000.json"))
|
||||||
|
.collect();
|
||||||
|
for name in &filenames {
|
||||||
|
std::fs::write(crash_dir.join(name), "{}").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prune, pointing at our temp dir.
|
||||||
|
prune_crash_files_in(crash_dir);
|
||||||
|
|
||||||
|
let remaining: Vec<String> = std::fs::read_dir(crash_dir)
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.map(|e| e.file_name().to_string_lossy().into_owned())
|
||||||
|
.filter(|n| n.starts_with("crash-") && n.ends_with(".json"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
assert_eq!(remaining.len(), MAX_CRASH_FILES);
|
||||||
|
// Oldest 3 should be gone.
|
||||||
|
for name in filenames.iter().take(3) {
|
||||||
|
assert!(!remaining.contains(name));
|
||||||
|
}
|
||||||
|
// Newest 5 should remain.
|
||||||
|
for name in filenames.iter().skip(3) {
|
||||||
|
assert!(remaining.contains(name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_all_event_variants_serialize() {
|
||||||
|
let events = vec![
|
||||||
|
CrashEvent::KeyPress {
|
||||||
|
key: "q".into(),
|
||||||
|
mode: "Normal".into(),
|
||||||
|
screen: "Dashboard".into(),
|
||||||
|
},
|
||||||
|
CrashEvent::MsgDispatched {
|
||||||
|
msg_name: "Quit".into(),
|
||||||
|
screen: "Dashboard".into(),
|
||||||
|
},
|
||||||
|
CrashEvent::StateTransition {
|
||||||
|
from: "Dashboard".into(),
|
||||||
|
to: "IssueList".into(),
|
||||||
|
},
|
||||||
|
CrashEvent::Error {
|
||||||
|
message: "oops".into(),
|
||||||
|
},
|
||||||
|
CrashEvent::Custom {
|
||||||
|
tag: "debug".into(),
|
||||||
|
detail: "trace".into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for event in events {
|
||||||
|
let json = serde_json::to_string(&event).unwrap();
|
||||||
|
let parsed: serde_json::Value = serde_json::from_str(&json).unwrap();
|
||||||
|
assert!(parsed.get("type").is_some());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_is_new() {
|
||||||
|
let ctx = CrashContext::default();
|
||||||
|
assert!(ctx.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// Test helper: prune files in a specific directory (not the real path).
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
|
||||||
|
fn prune_crash_files_in(dir: &Path) {
|
||||||
|
let Ok(entries) = std::fs::read_dir(dir) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut crash_files: Vec<PathBuf> = entries
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.map(|e| e.path())
|
||||||
|
.filter(|p| {
|
||||||
|
p.file_name()
|
||||||
|
.and_then(|n| n.to_str())
|
||||||
|
.is_some_and(|n| n.starts_with("crash-") && n.ends_with(".json"))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
crash_files.sort();
|
||||||
|
|
||||||
|
if crash_files.len() > MAX_CRASH_FILES {
|
||||||
|
let to_remove = crash_files.len() - MAX_CRASH_FILES;
|
||||||
|
for path in &crash_files[..to_remove] {
|
||||||
|
let _ = std::fs::remove_file(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
270
crates/lore-tui/src/db.rs
Normal file
270
crates/lore-tui/src/db.rs
Normal file
@@ -0,0 +1,270 @@
|
|||||||
|
#![allow(dead_code)] // Phase 0: types defined now, consumed in Phase 1+
|
||||||
|
|
||||||
|
//! Database access layer for the TUI.
|
||||||
|
//!
|
||||||
|
//! Provides a read pool (3 connections, round-robin) plus a dedicated writer
|
||||||
|
//! connection. All connections use WAL mode and busy_timeout for concurrency.
|
||||||
|
//!
|
||||||
|
//! The TUI operates read-heavy: parallel queries for dashboard, list views,
|
||||||
|
//! and prefetch. Writes are rare (TUI-local state: scroll positions, bookmarks).
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
/// Number of reader connections in the pool.
|
||||||
|
const READER_COUNT: usize = 3;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// DbManager
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Manages a pool of read-only connections plus a dedicated writer.
|
||||||
|
///
|
||||||
|
/// Designed for `Arc<DbManager>` sharing across FrankenTUI's `Cmd::task`
|
||||||
|
/// background threads. Each reader is individually `Mutex`-protected so
|
||||||
|
/// concurrent tasks can query different readers without blocking.
|
||||||
|
pub struct DbManager {
|
||||||
|
readers: Vec<Mutex<Connection>>,
|
||||||
|
writer: Mutex<Connection>,
|
||||||
|
next_reader: AtomicUsize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DbManager {
|
||||||
|
/// Open a database at `path` with 3 reader + 1 writer connections.
|
||||||
|
///
|
||||||
|
/// All connections get WAL mode, 5000ms busy_timeout, and foreign keys.
|
||||||
|
/// Reader connections additionally set `query_only = ON` as a safety guard.
|
||||||
|
pub fn open(path: &Path) -> Result<Self> {
|
||||||
|
let mut readers = Vec::with_capacity(READER_COUNT);
|
||||||
|
for i in 0..READER_COUNT {
|
||||||
|
let conn =
|
||||||
|
open_connection(path).with_context(|| format!("opening reader connection {i}"))?;
|
||||||
|
conn.pragma_update(None, "query_only", "ON")
|
||||||
|
.context("setting query_only on reader")?;
|
||||||
|
readers.push(Mutex::new(conn));
|
||||||
|
}
|
||||||
|
|
||||||
|
let writer = open_connection(path).context("opening writer connection")?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
readers,
|
||||||
|
writer: Mutex::new(writer),
|
||||||
|
next_reader: AtomicUsize::new(0),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a read-only query against the pool.
|
||||||
|
///
|
||||||
|
/// Selects the next reader via round-robin. The connection is borrowed
|
||||||
|
/// for the duration of `f` and cannot leak outside.
|
||||||
|
pub fn with_reader<F, T>(&self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
F: FnOnce(&Connection) -> Result<T>,
|
||||||
|
{
|
||||||
|
let idx = self.next_reader.fetch_add(1, Ordering::Relaxed) % READER_COUNT;
|
||||||
|
let conn = self.readers[idx].lock().expect("reader mutex poisoned");
|
||||||
|
f(&conn)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a write operation against the dedicated writer.
|
||||||
|
///
|
||||||
|
/// Serialized via a single `Mutex`. The TUI writes infrequently
|
||||||
|
/// (bookmarks, scroll state) so contention is negligible.
|
||||||
|
pub fn with_writer<F, T>(&self, f: F) -> Result<T>
|
||||||
|
where
|
||||||
|
F: FnOnce(&Connection) -> Result<T>,
|
||||||
|
{
|
||||||
|
let conn = self.writer.lock().expect("writer mutex poisoned");
|
||||||
|
f(&conn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Connection setup
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Open a single SQLite connection with TUI-appropriate pragmas.
|
||||||
|
///
|
||||||
|
/// Mirrors lore's `create_connection` pragmas (WAL, busy_timeout, etc.)
|
||||||
|
/// but skips the sqlite-vec extension registration — the TUI reads standard
|
||||||
|
/// tables only, never vec0 virtual tables.
|
||||||
|
fn open_connection(path: &Path) -> Result<Connection> {
|
||||||
|
let conn = Connection::open(path).context("opening SQLite database")?;
|
||||||
|
|
||||||
|
conn.pragma_update(None, "journal_mode", "WAL")?;
|
||||||
|
conn.pragma_update(None, "synchronous", "NORMAL")?;
|
||||||
|
conn.pragma_update(None, "foreign_keys", "ON")?;
|
||||||
|
conn.pragma_update(None, "busy_timeout", 5000)?;
|
||||||
|
conn.pragma_update(None, "temp_store", "MEMORY")?;
|
||||||
|
|
||||||
|
Ok(conn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// Create a temporary database file for testing.
|
||||||
|
///
|
||||||
|
/// Uses an atomic counter + thread ID to guarantee unique paths even
|
||||||
|
/// when tests run in parallel.
|
||||||
|
fn test_db_path() -> std::path::PathBuf {
|
||||||
|
use std::sync::atomic::AtomicU64;
|
||||||
|
static COUNTER: AtomicU64 = AtomicU64::new(0);
|
||||||
|
let n = COUNTER.fetch_add(1, Ordering::Relaxed);
|
||||||
|
let dir = std::env::temp_dir().join("lore-tui-tests");
|
||||||
|
std::fs::create_dir_all(&dir).expect("create test dir");
|
||||||
|
dir.join(format!(
|
||||||
|
"test-{}-{:?}-{n}.db",
|
||||||
|
std::process::id(),
|
||||||
|
std::thread::current().id(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_test_table(conn: &Connection) {
|
||||||
|
conn.execute_batch(
|
||||||
|
"CREATE TABLE IF NOT EXISTS test_items (id INTEGER PRIMARY KEY, name TEXT);",
|
||||||
|
)
|
||||||
|
.expect("create test table");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dbmanager_opens_successfully() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = DbManager::open(&path).expect("open");
|
||||||
|
// Writer creates the test table
|
||||||
|
db.with_writer(|conn| {
|
||||||
|
create_test_table(conn);
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.expect("create table via writer");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_reader_is_query_only() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = DbManager::open(&path).expect("open");
|
||||||
|
|
||||||
|
// Create table via writer first
|
||||||
|
db.with_writer(|conn| {
|
||||||
|
create_test_table(conn);
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Attempt INSERT via reader — should fail
|
||||||
|
let result = db.with_reader(|conn| {
|
||||||
|
conn.execute("INSERT INTO test_items (name) VALUES ('boom')", [])
|
||||||
|
.map_err(|e| anyhow::anyhow!(e))?;
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
assert!(result.is_err(), "reader should reject writes");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_writer_allows_mutations() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = DbManager::open(&path).expect("open");
|
||||||
|
|
||||||
|
db.with_writer(|conn| {
|
||||||
|
create_test_table(conn);
|
||||||
|
conn.execute("INSERT INTO test_items (name) VALUES ('hello')", [])?;
|
||||||
|
let count: i64 = conn.query_row("SELECT COUNT(*) FROM test_items", [], |r| r.get(0))?;
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.expect("writer should allow mutations");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_round_robin_rotates_readers() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = DbManager::open(&path).expect("open");
|
||||||
|
|
||||||
|
// Call with_reader 6 times — should cycle through readers 0,1,2,0,1,2
|
||||||
|
for expected_cycle in 0..2 {
|
||||||
|
for expected_idx in 0..READER_COUNT {
|
||||||
|
let current = db.next_reader.load(Ordering::Relaxed);
|
||||||
|
assert_eq!(
|
||||||
|
current % READER_COUNT,
|
||||||
|
(expected_cycle * READER_COUNT + expected_idx) % READER_COUNT,
|
||||||
|
);
|
||||||
|
db.with_reader(|_conn| Ok(())).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_reader_can_read_writer_data() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = DbManager::open(&path).expect("open");
|
||||||
|
|
||||||
|
db.with_writer(|conn| {
|
||||||
|
create_test_table(conn);
|
||||||
|
conn.execute("INSERT INTO test_items (name) VALUES ('visible')", [])?;
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let name: String = db
|
||||||
|
.with_reader(|conn| {
|
||||||
|
let n: String =
|
||||||
|
conn.query_row("SELECT name FROM test_items WHERE id = 1", [], |r| r.get(0))?;
|
||||||
|
Ok(n)
|
||||||
|
})
|
||||||
|
.expect("reader should see writer's data");
|
||||||
|
|
||||||
|
assert_eq!(name, "visible");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dbmanager_is_send_sync() {
|
||||||
|
fn assert_send_sync<T: Send + Sync>() {}
|
||||||
|
assert_send_sync::<DbManager>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_concurrent_reads() {
|
||||||
|
let path = test_db_path();
|
||||||
|
let db = Arc::new(DbManager::open(&path).expect("open"));
|
||||||
|
|
||||||
|
db.with_writer(|conn| {
|
||||||
|
create_test_table(conn);
|
||||||
|
for i in 0..10 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO test_items (name) VALUES (?1)",
|
||||||
|
[format!("item-{i}")],
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
for _ in 0..6 {
|
||||||
|
let db = Arc::clone(&db);
|
||||||
|
handles.push(std::thread::spawn(move || {
|
||||||
|
db.with_reader(|conn| {
|
||||||
|
let count: i64 =
|
||||||
|
conn.query_row("SELECT COUNT(*) FROM test_items", [], |r| r.get(0))?;
|
||||||
|
assert_eq!(count, 10);
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.expect("concurrent read should succeed");
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
for h in handles {
|
||||||
|
h.join().expect("thread should not panic");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
66
crates/lore-tui/src/lib.rs
Normal file
66
crates/lore-tui/src/lib.rs
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
#![forbid(unsafe_code)]
|
||||||
|
|
||||||
|
//! Gitlore TUI — terminal interface for exploring GitLab data locally.
|
||||||
|
//!
|
||||||
|
//! Built on FrankenTUI (Elm architecture): Model, update, view.
|
||||||
|
//! The `lore` CLI spawns `lore-tui` via PATH lookup at runtime.
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
// Phase 0 modules.
|
||||||
|
pub mod clock; // Clock trait: SystemClock + FakeClock (bd-2lg6)
|
||||||
|
pub mod message; // Msg, Screen, EntityKey, AppError, InputMode (bd-c9gk)
|
||||||
|
|
||||||
|
pub mod safety; // Terminal safety: sanitize + URL policy + redact (bd-3ir1)
|
||||||
|
|
||||||
|
pub mod db; // DbManager: read pool + dedicated writer (bd-2kop)
|
||||||
|
pub mod theme; // Flexoki theme: build_theme, state_color, label_style (bd-5ofk)
|
||||||
|
|
||||||
|
pub mod app; // LoreApp Model trait impl (Phase 0 proof: bd-2emv, full: bd-6pmy)
|
||||||
|
|
||||||
|
// Phase 1 modules.
|
||||||
|
pub mod commands; // CommandRegistry: keybindings, help, palette (bd-38lb)
|
||||||
|
pub mod crash_context; // CrashContext ring buffer + panic hook (bd-2fr7)
|
||||||
|
pub mod navigation; // NavigationStack: back/forward/jump list (bd-1qpp)
|
||||||
|
pub mod state; // AppState, LoadState, ScreenIntent, per-screen states (bd-1v9m)
|
||||||
|
pub mod task_supervisor; // TaskSupervisor: dedup + cancel + generation IDs (bd-3le2)
|
||||||
|
pub mod view; // View layer: render_screen + common widgets (bd-26f2)
|
||||||
|
|
||||||
|
/// Options controlling how the TUI launches.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LaunchOptions {
|
||||||
|
/// Path to lore config file.
|
||||||
|
pub config_path: Option<String>,
|
||||||
|
/// Run a background sync before displaying data.
|
||||||
|
pub sync_on_start: bool,
|
||||||
|
/// Clear cached TUI state and start fresh.
|
||||||
|
pub fresh: bool,
|
||||||
|
/// Render backend: "crossterm" or "native".
|
||||||
|
pub render_mode: String,
|
||||||
|
/// Use ASCII-only box drawing characters.
|
||||||
|
pub ascii: bool,
|
||||||
|
/// Disable alternate screen (render inline).
|
||||||
|
pub no_alt_screen: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Launch the TUI in browse mode (no sync).
|
||||||
|
///
|
||||||
|
/// Loads config from `options.config_path` (or default location),
|
||||||
|
/// opens the database read-only, and enters the FrankenTUI event loop.
|
||||||
|
pub fn launch_tui(options: LaunchOptions) -> Result<()> {
|
||||||
|
let _options = options;
|
||||||
|
// Phase 1 will wire this to LoreApp + App::fullscreen().run()
|
||||||
|
eprintln!("lore-tui: browse mode not yet implemented (Phase 1)");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Launch the TUI with an initial sync pass.
|
||||||
|
///
|
||||||
|
/// Runs `lore sync` in the background while displaying a progress screen,
|
||||||
|
/// then transitions to browse mode once sync completes.
|
||||||
|
pub fn launch_sync_tui(options: LaunchOptions) -> Result<()> {
|
||||||
|
let _options = options;
|
||||||
|
// Phase 2 will implement the sync progress screen
|
||||||
|
eprintln!("lore-tui: sync mode not yet implemented (Phase 2)");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
53
crates/lore-tui/src/main.rs
Normal file
53
crates/lore-tui/src/main.rs
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
#![forbid(unsafe_code)]
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
|
use lore_tui::LaunchOptions;
|
||||||
|
|
||||||
|
/// Terminal UI for Gitlore — explore GitLab issues, MRs, and search locally.
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "lore-tui", version, about)]
|
||||||
|
struct TuiCli {
|
||||||
|
/// Path to lore config file (default: ~/.config/lore/config.json).
|
||||||
|
#[arg(short, long, env = "LORE_CONFIG_PATH")]
|
||||||
|
config: Option<String>,
|
||||||
|
|
||||||
|
/// Run a sync before launching the TUI.
|
||||||
|
#[arg(long)]
|
||||||
|
sync: bool,
|
||||||
|
|
||||||
|
/// Clear cached state and start fresh.
|
||||||
|
#[arg(long)]
|
||||||
|
fresh: bool,
|
||||||
|
|
||||||
|
/// Render mode: "crossterm" (default) or "native".
|
||||||
|
#[arg(long, default_value = "crossterm")]
|
||||||
|
render_mode: String,
|
||||||
|
|
||||||
|
/// Use ASCII-only drawing characters (no Unicode box drawing).
|
||||||
|
#[arg(long)]
|
||||||
|
ascii: bool,
|
||||||
|
|
||||||
|
/// Disable alternate screen (render inline).
|
||||||
|
#[arg(long)]
|
||||||
|
no_alt_screen: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
let cli = TuiCli::parse();
|
||||||
|
|
||||||
|
let options = LaunchOptions {
|
||||||
|
config_path: cli.config,
|
||||||
|
sync_on_start: cli.sync,
|
||||||
|
fresh: cli.fresh,
|
||||||
|
render_mode: cli.render_mode,
|
||||||
|
ascii: cli.ascii,
|
||||||
|
no_alt_screen: cli.no_alt_screen,
|
||||||
|
};
|
||||||
|
|
||||||
|
if options.sync_on_start {
|
||||||
|
lore_tui::launch_sync_tui(options)
|
||||||
|
} else {
|
||||||
|
lore_tui::launch_tui(options)
|
||||||
|
}
|
||||||
|
}
|
||||||
523
crates/lore-tui/src/message.rs
Normal file
523
crates/lore-tui/src/message.rs
Normal file
@@ -0,0 +1,523 @@
|
|||||||
|
#![allow(dead_code)] // Phase 0: types defined now, consumed in Phase 1+
|
||||||
|
|
||||||
|
//! Core types for the lore-tui Elm architecture.
|
||||||
|
//!
|
||||||
|
//! - [`Msg`] — every user action and async result flows through this enum.
|
||||||
|
//! - [`Screen`] — navigation targets.
|
||||||
|
//! - [`EntityKey`] — safe cross-project entity identity.
|
||||||
|
//! - [`AppError`] — structured error display in the TUI.
|
||||||
|
//! - [`InputMode`] — controls key dispatch routing.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use ftui::Event;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// EntityKind
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Distinguishes issue vs merge request in an [`EntityKey`].
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum EntityKind {
|
||||||
|
Issue,
|
||||||
|
MergeRequest,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// EntityKey
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Uniquely identifies an entity (issue or MR) across projects.
|
||||||
|
///
|
||||||
|
/// Bare `iid` is unsafe in multi-project datasets — equality requires
|
||||||
|
/// project_id + iid + kind.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct EntityKey {
|
||||||
|
pub project_id: i64,
|
||||||
|
pub iid: i64,
|
||||||
|
pub kind: EntityKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EntityKey {
|
||||||
|
#[must_use]
|
||||||
|
pub fn issue(project_id: i64, iid: i64) -> Self {
|
||||||
|
Self {
|
||||||
|
project_id,
|
||||||
|
iid,
|
||||||
|
kind: EntityKind::Issue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn mr(project_id: i64, iid: i64) -> Self {
|
||||||
|
Self {
|
||||||
|
project_id,
|
||||||
|
iid,
|
||||||
|
kind: EntityKind::MergeRequest,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for EntityKey {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let prefix = match self.kind {
|
||||||
|
EntityKind::Issue => "#",
|
||||||
|
EntityKind::MergeRequest => "!",
|
||||||
|
};
|
||||||
|
write!(f, "p{}:{}{}", self.project_id, prefix, self.iid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Screen
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Navigation targets within the TUI.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum Screen {
|
||||||
|
Dashboard,
|
||||||
|
IssueList,
|
||||||
|
IssueDetail(EntityKey),
|
||||||
|
MrList,
|
||||||
|
MrDetail(EntityKey),
|
||||||
|
Search,
|
||||||
|
Timeline,
|
||||||
|
Who,
|
||||||
|
Sync,
|
||||||
|
Stats,
|
||||||
|
Doctor,
|
||||||
|
Bootstrap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Screen {
|
||||||
|
/// Human-readable label for breadcrumbs and status bar.
|
||||||
|
#[must_use]
|
||||||
|
pub fn label(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
Self::Dashboard => "Dashboard",
|
||||||
|
Self::IssueList => "Issues",
|
||||||
|
Self::IssueDetail(_) => "Issue",
|
||||||
|
Self::MrList => "Merge Requests",
|
||||||
|
Self::MrDetail(_) => "Merge Request",
|
||||||
|
Self::Search => "Search",
|
||||||
|
Self::Timeline => "Timeline",
|
||||||
|
Self::Who => "Who",
|
||||||
|
Self::Sync => "Sync",
|
||||||
|
Self::Stats => "Stats",
|
||||||
|
Self::Doctor => "Doctor",
|
||||||
|
Self::Bootstrap => "Bootstrap",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this screen shows a specific entity detail view.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_detail_or_entity(&self) -> bool {
|
||||||
|
matches!(self, Self::IssueDetail(_) | Self::MrDetail(_))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// AppError
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Structured error types for user-facing display in the TUI.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum AppError {
|
||||||
|
/// Database is busy (WAL contention).
|
||||||
|
DbBusy,
|
||||||
|
/// Database corruption detected.
|
||||||
|
DbCorruption(String),
|
||||||
|
/// GitLab rate-limited; retry after N seconds (if header present).
|
||||||
|
NetworkRateLimited { retry_after_secs: Option<u64> },
|
||||||
|
/// Network unavailable.
|
||||||
|
NetworkUnavailable,
|
||||||
|
/// GitLab authentication failed.
|
||||||
|
AuthFailed,
|
||||||
|
/// Data parsing error.
|
||||||
|
ParseError(String),
|
||||||
|
/// Internal / unexpected error.
|
||||||
|
Internal(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for AppError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::DbBusy => write!(f, "Database is busy — another process holds the lock"),
|
||||||
|
Self::DbCorruption(detail) => write!(f, "Database corruption: {detail}"),
|
||||||
|
Self::NetworkRateLimited {
|
||||||
|
retry_after_secs: Some(secs),
|
||||||
|
} => write!(f, "Rate limited by GitLab — retry in {secs}s"),
|
||||||
|
Self::NetworkRateLimited {
|
||||||
|
retry_after_secs: None,
|
||||||
|
} => write!(f, "Rate limited by GitLab — try again shortly"),
|
||||||
|
Self::NetworkUnavailable => write!(f, "Network unavailable — working offline"),
|
||||||
|
Self::AuthFailed => write!(f, "GitLab authentication failed — check your token"),
|
||||||
|
Self::ParseError(detail) => write!(f, "Parse error: {detail}"),
|
||||||
|
Self::Internal(detail) => write!(f, "Internal error: {detail}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// InputMode
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Controls how keystrokes are routed through the key dispatch pipeline.
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub enum InputMode {
|
||||||
|
/// Standard navigation mode — keys dispatch to screen-specific handlers.
|
||||||
|
#[default]
|
||||||
|
Normal,
|
||||||
|
/// Text input focused (filter bar, search box).
|
||||||
|
Text,
|
||||||
|
/// Command palette is open.
|
||||||
|
Palette,
|
||||||
|
/// "g" prefix pressed — waiting for second key (500ms timeout).
|
||||||
|
GoPrefix { started_at: DateTime<Utc> },
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Msg
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Every user action and async result flows through this enum.
|
||||||
|
///
|
||||||
|
/// Generation fields (`generation: u64`) on async result variants enable
|
||||||
|
/// stale-response detection: if the generation doesn't match the current
|
||||||
|
/// request generation, the result is silently dropped.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Msg {
|
||||||
|
// --- Terminal events ---
|
||||||
|
/// Raw terminal event (key, mouse, paste, focus, clipboard).
|
||||||
|
RawEvent(Event),
|
||||||
|
/// Periodic tick from runtime subscription.
|
||||||
|
Tick,
|
||||||
|
/// Terminal resized.
|
||||||
|
Resize {
|
||||||
|
width: u16,
|
||||||
|
height: u16,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Navigation ---
|
||||||
|
/// Navigate to a specific screen.
|
||||||
|
NavigateTo(Screen),
|
||||||
|
/// Go back in navigation history.
|
||||||
|
GoBack,
|
||||||
|
/// Go forward in navigation history.
|
||||||
|
GoForward,
|
||||||
|
/// Jump to the dashboard.
|
||||||
|
GoHome,
|
||||||
|
/// Jump back N screens in history.
|
||||||
|
JumpBack(usize),
|
||||||
|
/// Jump forward N screens in history.
|
||||||
|
JumpForward(usize),
|
||||||
|
|
||||||
|
// --- Command palette ---
|
||||||
|
OpenCommandPalette,
|
||||||
|
CloseCommandPalette,
|
||||||
|
CommandPaletteInput(String),
|
||||||
|
CommandPaletteSelect(String),
|
||||||
|
|
||||||
|
// --- Issue list ---
|
||||||
|
IssueListLoaded {
|
||||||
|
generation: u64,
|
||||||
|
rows: Vec<IssueRow>,
|
||||||
|
},
|
||||||
|
IssueListFilterChanged(String),
|
||||||
|
IssueListSortChanged,
|
||||||
|
IssueSelected(EntityKey),
|
||||||
|
|
||||||
|
// --- MR list ---
|
||||||
|
MrListLoaded {
|
||||||
|
generation: u64,
|
||||||
|
rows: Vec<MrRow>,
|
||||||
|
},
|
||||||
|
MrListFilterChanged(String),
|
||||||
|
MrSelected(EntityKey),
|
||||||
|
|
||||||
|
// --- Issue detail ---
|
||||||
|
IssueDetailLoaded {
|
||||||
|
generation: u64,
|
||||||
|
key: EntityKey,
|
||||||
|
detail: Box<IssueDetail>,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- MR detail ---
|
||||||
|
MrDetailLoaded {
|
||||||
|
generation: u64,
|
||||||
|
key: EntityKey,
|
||||||
|
detail: Box<MrDetail>,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Discussions (shared by issue + MR detail) ---
|
||||||
|
DiscussionsLoaded {
|
||||||
|
generation: u64,
|
||||||
|
discussions: Vec<Discussion>,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Search ---
|
||||||
|
SearchQueryChanged(String),
|
||||||
|
SearchRequestStarted {
|
||||||
|
generation: u64,
|
||||||
|
query: String,
|
||||||
|
},
|
||||||
|
SearchExecuted {
|
||||||
|
generation: u64,
|
||||||
|
results: Vec<SearchResult>,
|
||||||
|
},
|
||||||
|
SearchResultSelected(EntityKey),
|
||||||
|
SearchModeChanged,
|
||||||
|
SearchCapabilitiesLoaded,
|
||||||
|
|
||||||
|
// --- Timeline ---
|
||||||
|
TimelineLoaded {
|
||||||
|
generation: u64,
|
||||||
|
events: Vec<TimelineEvent>,
|
||||||
|
},
|
||||||
|
TimelineEntitySelected(EntityKey),
|
||||||
|
|
||||||
|
// --- Who (people) ---
|
||||||
|
WhoResultLoaded {
|
||||||
|
generation: u64,
|
||||||
|
result: Box<WhoResult>,
|
||||||
|
},
|
||||||
|
WhoModeChanged,
|
||||||
|
|
||||||
|
// --- Sync ---
|
||||||
|
SyncStarted,
|
||||||
|
SyncProgress {
|
||||||
|
stage: String,
|
||||||
|
current: u64,
|
||||||
|
total: u64,
|
||||||
|
},
|
||||||
|
SyncProgressBatch {
|
||||||
|
stage: String,
|
||||||
|
batch_size: u64,
|
||||||
|
},
|
||||||
|
SyncLogLine(String),
|
||||||
|
SyncBackpressureDrop,
|
||||||
|
SyncCompleted {
|
||||||
|
elapsed_ms: u64,
|
||||||
|
},
|
||||||
|
SyncCancelled,
|
||||||
|
SyncFailed(String),
|
||||||
|
SyncStreamStats {
|
||||||
|
bytes: u64,
|
||||||
|
items: u64,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Search debounce ---
|
||||||
|
SearchDebounceArmed {
|
||||||
|
generation: u64,
|
||||||
|
},
|
||||||
|
SearchDebounceFired {
|
||||||
|
generation: u64,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Dashboard ---
|
||||||
|
DashboardLoaded {
|
||||||
|
generation: u64,
|
||||||
|
data: Box<DashboardData>,
|
||||||
|
},
|
||||||
|
|
||||||
|
// --- Global actions ---
|
||||||
|
Error(AppError),
|
||||||
|
ShowHelp,
|
||||||
|
ShowCliEquivalent,
|
||||||
|
OpenInBrowser,
|
||||||
|
BlurTextInput,
|
||||||
|
ScrollToTopCurrentScreen,
|
||||||
|
Quit,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert terminal events into messages.
|
||||||
|
///
|
||||||
|
/// FrankenTUI requires `From<Event>` on the message type so the runtime
|
||||||
|
/// can inject terminal events into the model's update loop.
|
||||||
|
impl From<Event> for Msg {
|
||||||
|
fn from(event: Event) -> Self {
|
||||||
|
match event {
|
||||||
|
Event::Resize { width, height } => Self::Resize { width, height },
|
||||||
|
Event::Tick => Self::Tick,
|
||||||
|
other => Self::RawEvent(other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Placeholder data types (will be fleshed out in Phase 1+)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Placeholder for an issue row in list views.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct IssueRow {
|
||||||
|
pub key: EntityKey,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for a merge request row in list views.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct MrRow {
|
||||||
|
pub key: EntityKey,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub draft: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for issue detail payload.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct IssueDetail {
|
||||||
|
pub key: EntityKey,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for MR detail payload.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct MrDetail {
|
||||||
|
pub key: EntityKey,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for a discussion thread.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Discussion {
|
||||||
|
pub id: String,
|
||||||
|
pub notes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for a search result.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SearchResult {
|
||||||
|
pub key: EntityKey,
|
||||||
|
pub title: String,
|
||||||
|
pub score: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for a timeline event.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct TimelineEvent {
|
||||||
|
pub timestamp: String,
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for who/people intelligence result.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct WhoResult {
|
||||||
|
pub experts: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Placeholder for dashboard summary data.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DashboardData {
|
||||||
|
pub issue_count: u64,
|
||||||
|
pub mr_count: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_entity_key_equality() {
|
||||||
|
assert_eq!(EntityKey::issue(1, 42), EntityKey::issue(1, 42));
|
||||||
|
assert_ne!(EntityKey::issue(1, 42), EntityKey::mr(1, 42));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_entity_key_different_projects() {
|
||||||
|
assert_ne!(EntityKey::issue(1, 42), EntityKey::issue(2, 42));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_entity_key_display() {
|
||||||
|
assert_eq!(EntityKey::issue(5, 123).to_string(), "p5:#123");
|
||||||
|
assert_eq!(EntityKey::mr(5, 456).to_string(), "p5:!456");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_entity_key_hash_is_usable_in_collections() {
|
||||||
|
use std::collections::HashSet;
|
||||||
|
let mut set = HashSet::new();
|
||||||
|
set.insert(EntityKey::issue(1, 1));
|
||||||
|
set.insert(EntityKey::issue(1, 1)); // duplicate
|
||||||
|
set.insert(EntityKey::mr(1, 1));
|
||||||
|
assert_eq!(set.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_screen_labels() {
|
||||||
|
assert_eq!(Screen::Dashboard.label(), "Dashboard");
|
||||||
|
assert_eq!(Screen::IssueList.label(), "Issues");
|
||||||
|
assert_eq!(Screen::MrList.label(), "Merge Requests");
|
||||||
|
assert_eq!(Screen::Search.label(), "Search");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_screen_is_detail_or_entity() {
|
||||||
|
assert!(Screen::IssueDetail(EntityKey::issue(1, 1)).is_detail_or_entity());
|
||||||
|
assert!(Screen::MrDetail(EntityKey::mr(1, 1)).is_detail_or_entity());
|
||||||
|
assert!(!Screen::Dashboard.is_detail_or_entity());
|
||||||
|
assert!(!Screen::IssueList.is_detail_or_entity());
|
||||||
|
assert!(!Screen::Search.is_detail_or_entity());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_error_display() {
|
||||||
|
let err = AppError::DbBusy;
|
||||||
|
assert!(err.to_string().contains("busy"));
|
||||||
|
|
||||||
|
let err = AppError::NetworkRateLimited {
|
||||||
|
retry_after_secs: Some(30),
|
||||||
|
};
|
||||||
|
assert!(err.to_string().contains("30s"));
|
||||||
|
|
||||||
|
let err = AppError::NetworkRateLimited {
|
||||||
|
retry_after_secs: None,
|
||||||
|
};
|
||||||
|
assert!(err.to_string().contains("shortly"));
|
||||||
|
|
||||||
|
let err = AppError::AuthFailed;
|
||||||
|
assert!(err.to_string().contains("token"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_input_mode_default_is_normal() {
|
||||||
|
assert!(matches!(InputMode::default(), InputMode::Normal));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_msg_from_event_resize() {
|
||||||
|
let event = Event::Resize {
|
||||||
|
width: 80,
|
||||||
|
height: 24,
|
||||||
|
};
|
||||||
|
let msg = Msg::from(event);
|
||||||
|
assert!(matches!(
|
||||||
|
msg,
|
||||||
|
Msg::Resize {
|
||||||
|
width: 80,
|
||||||
|
height: 24
|
||||||
|
}
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_msg_from_event_tick() {
|
||||||
|
let msg = Msg::from(Event::Tick);
|
||||||
|
assert!(matches!(msg, Msg::Tick));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_msg_from_event_focus_wraps_raw() {
|
||||||
|
let msg = Msg::from(Event::Focus(true));
|
||||||
|
assert!(matches!(msg, Msg::RawEvent(Event::Focus(true))));
|
||||||
|
}
|
||||||
|
}
|
||||||
339
crates/lore-tui/src/navigation.rs
Normal file
339
crates/lore-tui/src/navigation.rs
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: consumed by LoreApp in bd-6pmy
|
||||||
|
|
||||||
|
//! Browser-like navigation stack with vim-style jump list.
|
||||||
|
//!
|
||||||
|
//! Supports back/forward (browser), jump back/forward (vim Ctrl+O/Ctrl+I),
|
||||||
|
//! and breadcrumb generation. State is preserved when navigating away —
|
||||||
|
//! screens are never cleared on pop.
|
||||||
|
|
||||||
|
use crate::message::Screen;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// NavigationStack
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Browser-like navigation with back/forward stacks and a vim jump list.
|
||||||
|
///
|
||||||
|
/// The jump list only records "significant" hops — detail views and
|
||||||
|
/// cross-references — skipping list/dashboard screens that users
|
||||||
|
/// visit briefly during drilling.
|
||||||
|
pub struct NavigationStack {
|
||||||
|
back_stack: Vec<Screen>,
|
||||||
|
current: Screen,
|
||||||
|
forward_stack: Vec<Screen>,
|
||||||
|
jump_list: Vec<Screen>,
|
||||||
|
jump_index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NavigationStack {
|
||||||
|
/// Create a new stack starting at the Dashboard.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
back_stack: Vec::new(),
|
||||||
|
current: Screen::Dashboard,
|
||||||
|
forward_stack: Vec::new(),
|
||||||
|
jump_list: Vec::new(),
|
||||||
|
jump_index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The currently displayed screen.
|
||||||
|
#[must_use]
|
||||||
|
pub fn current(&self) -> &Screen {
|
||||||
|
&self.current
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the current screen matches the given screen.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_at(&self, screen: &Screen) -> bool {
|
||||||
|
&self.current == screen
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Navigate to a new screen.
|
||||||
|
///
|
||||||
|
/// Pushes current to back_stack, clears forward_stack (browser behavior),
|
||||||
|
/// and records detail hops in the jump list.
|
||||||
|
pub fn push(&mut self, screen: Screen) {
|
||||||
|
let old = std::mem::replace(&mut self.current, screen);
|
||||||
|
self.back_stack.push(old);
|
||||||
|
self.forward_stack.clear();
|
||||||
|
|
||||||
|
// Record significant hops in jump list (vim behavior):
|
||||||
|
// truncate any forward entries beyond jump_index, then append.
|
||||||
|
if self.current.is_detail_or_entity() {
|
||||||
|
self.jump_list.truncate(self.jump_index);
|
||||||
|
self.jump_list.push(self.current.clone());
|
||||||
|
self.jump_index = self.jump_list.len();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Go back to the previous screen.
|
||||||
|
///
|
||||||
|
/// Returns `None` at root (can't pop past the initial screen).
|
||||||
|
pub fn pop(&mut self) -> Option<&Screen> {
|
||||||
|
let prev = self.back_stack.pop()?;
|
||||||
|
let old = std::mem::replace(&mut self.current, prev);
|
||||||
|
self.forward_stack.push(old);
|
||||||
|
Some(&self.current)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Go forward (redo a pop).
|
||||||
|
///
|
||||||
|
/// Returns `None` if there's nothing to go forward to.
|
||||||
|
pub fn go_forward(&mut self) -> Option<&Screen> {
|
||||||
|
let next = self.forward_stack.pop()?;
|
||||||
|
let old = std::mem::replace(&mut self.current, next);
|
||||||
|
self.back_stack.push(old);
|
||||||
|
Some(&self.current)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Jump backward through the jump list (vim Ctrl+O).
|
||||||
|
///
|
||||||
|
/// Only visits detail/entity screens.
|
||||||
|
pub fn jump_back(&mut self) -> Option<&Screen> {
|
||||||
|
if self.jump_index == 0 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self.jump_index -= 1;
|
||||||
|
self.jump_list.get(self.jump_index)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Jump forward through the jump list (vim Ctrl+I).
|
||||||
|
pub fn jump_forward(&mut self) -> Option<&Screen> {
|
||||||
|
if self.jump_index >= self.jump_list.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let screen = self.jump_list.get(self.jump_index)?;
|
||||||
|
self.jump_index += 1;
|
||||||
|
Some(screen)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset to a single screen, clearing all history.
|
||||||
|
pub fn reset_to(&mut self, screen: Screen) {
|
||||||
|
self.current = screen;
|
||||||
|
self.back_stack.clear();
|
||||||
|
self.forward_stack.clear();
|
||||||
|
self.jump_list.clear();
|
||||||
|
self.jump_index = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Breadcrumb labels for the current navigation path.
|
||||||
|
///
|
||||||
|
/// Returns the back stack labels plus the current screen label.
|
||||||
|
#[must_use]
|
||||||
|
pub fn breadcrumbs(&self) -> Vec<&str> {
|
||||||
|
self.back_stack
|
||||||
|
.iter()
|
||||||
|
.chain(std::iter::once(&self.current))
|
||||||
|
.map(Screen::label)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Navigation depth (1 = at root, 2 = one push deep, etc.).
|
||||||
|
#[must_use]
|
||||||
|
pub fn depth(&self) -> usize {
|
||||||
|
self.back_stack.len() + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether there's anything to go back to.
|
||||||
|
#[must_use]
|
||||||
|
pub fn can_go_back(&self) -> bool {
|
||||||
|
!self.back_stack.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether there's anything to go forward to.
|
||||||
|
#[must_use]
|
||||||
|
pub fn can_go_forward(&self) -> bool {
|
||||||
|
!self.forward_stack.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for NavigationStack {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::message::EntityKey;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_new_starts_at_dashboard() {
|
||||||
|
let nav = NavigationStack::new();
|
||||||
|
assert!(nav.is_at(&Screen::Dashboard));
|
||||||
|
assert_eq!(nav.depth(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_push_pop_preserves_order() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 42)));
|
||||||
|
|
||||||
|
assert!(nav.is_at(&Screen::IssueDetail(EntityKey::issue(1, 42))));
|
||||||
|
assert_eq!(nav.depth(), 3);
|
||||||
|
|
||||||
|
nav.pop();
|
||||||
|
assert!(nav.is_at(&Screen::IssueList));
|
||||||
|
|
||||||
|
nav.pop();
|
||||||
|
assert!(nav.is_at(&Screen::Dashboard));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_pop_at_root_returns_none() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
assert!(nav.pop().is_none());
|
||||||
|
assert!(nav.is_at(&Screen::Dashboard));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_forward_stack_cleared_on_new_push() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
nav.push(Screen::Search);
|
||||||
|
nav.pop(); // back to IssueList, Search in forward
|
||||||
|
assert!(nav.can_go_forward());
|
||||||
|
|
||||||
|
nav.push(Screen::Timeline); // new push clears forward
|
||||||
|
assert!(!nav.can_go_forward());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_go_forward_restores() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
nav.push(Screen::Search);
|
||||||
|
nav.pop(); // back to IssueList
|
||||||
|
|
||||||
|
let screen = nav.go_forward();
|
||||||
|
assert!(screen.is_some());
|
||||||
|
assert!(nav.is_at(&Screen::Search));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_go_forward_returns_none_when_empty() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
assert!(nav.go_forward().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_jump_list_skips_list_screens() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList); // not a detail — skip
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 1))); // detail — record
|
||||||
|
nav.push(Screen::MrList); // not a detail — skip
|
||||||
|
nav.push(Screen::MrDetail(EntityKey::mr(1, 2))); // detail — record
|
||||||
|
|
||||||
|
assert_eq!(nav.jump_list.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_jump_back_and_forward() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
let issue = Screen::IssueDetail(EntityKey::issue(1, 1));
|
||||||
|
let mr = Screen::MrDetail(EntityKey::mr(1, 2));
|
||||||
|
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
nav.push(issue.clone());
|
||||||
|
nav.push(Screen::MrList);
|
||||||
|
nav.push(mr.clone());
|
||||||
|
|
||||||
|
// jump_index is at 2 (past the end of 2 items)
|
||||||
|
let prev = nav.jump_back();
|
||||||
|
assert_eq!(prev, Some(&mr));
|
||||||
|
|
||||||
|
let prev = nav.jump_back();
|
||||||
|
assert_eq!(prev, Some(&issue));
|
||||||
|
|
||||||
|
// at beginning
|
||||||
|
assert!(nav.jump_back().is_none());
|
||||||
|
|
||||||
|
// forward
|
||||||
|
let next = nav.jump_forward();
|
||||||
|
assert_eq!(next, Some(&issue));
|
||||||
|
|
||||||
|
let next = nav.jump_forward();
|
||||||
|
assert_eq!(next, Some(&mr));
|
||||||
|
|
||||||
|
// at end
|
||||||
|
assert!(nav.jump_forward().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_jump_list_truncates_on_new_push() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 1)));
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 2)));
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 3)));
|
||||||
|
|
||||||
|
// jump back twice
|
||||||
|
nav.jump_back();
|
||||||
|
nav.jump_back();
|
||||||
|
// jump_index = 1, pointing at issue 2
|
||||||
|
|
||||||
|
// new detail push truncates forward entries
|
||||||
|
nav.push(Screen::MrDetail(EntityKey::mr(1, 99)));
|
||||||
|
|
||||||
|
// should have issue(1,1) and mr(1,99), not issue(1,2) or issue(1,3)
|
||||||
|
assert_eq!(nav.jump_list.len(), 2);
|
||||||
|
assert_eq!(nav.jump_list[1], Screen::MrDetail(EntityKey::mr(1, 99)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_reset_clears_all_history() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
nav.push(Screen::Search);
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 1)));
|
||||||
|
|
||||||
|
nav.reset_to(Screen::Dashboard);
|
||||||
|
|
||||||
|
assert!(nav.is_at(&Screen::Dashboard));
|
||||||
|
assert_eq!(nav.depth(), 1);
|
||||||
|
assert!(!nav.can_go_back());
|
||||||
|
assert!(!nav.can_go_forward());
|
||||||
|
assert!(nav.jump_list.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_breadcrumbs_reflect_stack() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
assert_eq!(nav.breadcrumbs(), vec!["Dashboard"]);
|
||||||
|
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
assert_eq!(nav.breadcrumbs(), vec!["Dashboard", "Issues"]);
|
||||||
|
|
||||||
|
nav.push(Screen::IssueDetail(EntityKey::issue(1, 42)));
|
||||||
|
assert_eq!(nav.breadcrumbs(), vec!["Dashboard", "Issues", "Issue"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_default_is_new() {
|
||||||
|
let nav = NavigationStack::default();
|
||||||
|
assert!(nav.is_at(&Screen::Dashboard));
|
||||||
|
assert_eq!(nav.depth(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_can_go_back_and_forward() {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
assert!(!nav.can_go_back());
|
||||||
|
assert!(!nav.can_go_forward());
|
||||||
|
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
assert!(nav.can_go_back());
|
||||||
|
assert!(!nav.can_go_forward());
|
||||||
|
|
||||||
|
nav.pop();
|
||||||
|
assert!(!nav.can_go_back());
|
||||||
|
assert!(nav.can_go_forward());
|
||||||
|
}
|
||||||
|
}
|
||||||
587
crates/lore-tui/src/safety.rs
Normal file
587
crates/lore-tui/src/safety.rs
Normal file
@@ -0,0 +1,587 @@
|
|||||||
|
//! Terminal safety: sanitize untrusted text, URL policy, credential redaction.
|
||||||
|
//!
|
||||||
|
//! GitLab content can contain ANSI escapes, bidi overrides, OSC hyperlinks,
|
||||||
|
//! and C1 control codes that could corrupt terminal rendering. This module
|
||||||
|
//! strips dangerous sequences while preserving a safe SGR subset for readability.
|
||||||
|
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// UrlPolicy
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Controls how OSC 8 hyperlinks in input are handled.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||||
|
pub enum UrlPolicy {
|
||||||
|
/// Remove OSC 8 hyperlinks entirely, keeping only the link text.
|
||||||
|
#[default]
|
||||||
|
Strip,
|
||||||
|
/// Convert hyperlinks to numbered footnotes: `text [1]` with URL list appended.
|
||||||
|
Footnote,
|
||||||
|
/// Pass hyperlinks through unchanged (only for trusted content).
|
||||||
|
Passthrough,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// RedactPattern
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Common patterns for PII/secret redaction.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RedactPattern {
|
||||||
|
patterns: Vec<regex::Regex>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RedactPattern {
|
||||||
|
/// Create a default set of redaction patterns (tokens, emails, etc.).
|
||||||
|
#[must_use]
|
||||||
|
pub fn defaults() -> Self {
|
||||||
|
let patterns = vec![
|
||||||
|
// GitLab personal access tokens
|
||||||
|
regex::Regex::new(r"glpat-[A-Za-z0-9_\-]{20,}").expect("valid regex"),
|
||||||
|
// Generic bearer/API tokens (long hex or base64-ish strings after common prefixes)
|
||||||
|
regex::Regex::new(r"(?i)(token|bearer|api[_-]?key)[\s:=]+\S{8,}").expect("valid regex"),
|
||||||
|
// Email addresses
|
||||||
|
regex::Regex::new(r"[a-zA-Z0-9._%+\-]+@[a-zA-Z0-9.\-]+\.[a-zA-Z]{2,}")
|
||||||
|
.expect("valid regex"),
|
||||||
|
];
|
||||||
|
Self { patterns }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply all redaction patterns to the input string.
|
||||||
|
#[must_use]
|
||||||
|
pub fn redact(&self, input: &str) -> String {
|
||||||
|
let mut result = input.to_string();
|
||||||
|
for pattern in &self.patterns {
|
||||||
|
result = pattern.replace_all(&result, "[REDACTED]").into_owned();
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// sanitize_for_terminal
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Sanitize untrusted text for safe terminal display.
|
||||||
|
///
|
||||||
|
/// - Strips C1 control codes (0x80-0x9F)
|
||||||
|
/// - Strips OSC sequences (ESC ] ... ST)
|
||||||
|
/// - Strips cursor movement CSI sequences (CSI n A/B/C/D/E/F/G/H/J/K)
|
||||||
|
/// - Strips bidi overrides (U+202A-U+202E, U+2066-U+2069)
|
||||||
|
/// - Preserves safe SGR subset (bold, italic, underline, reset, standard colors)
|
||||||
|
///
|
||||||
|
/// `url_policy` controls handling of OSC 8 hyperlinks.
|
||||||
|
#[must_use]
|
||||||
|
pub fn sanitize_for_terminal(input: &str, url_policy: UrlPolicy) -> String {
|
||||||
|
let mut output = String::with_capacity(input.len());
|
||||||
|
let mut footnotes: Vec<String> = Vec::new();
|
||||||
|
let chars: Vec<char> = input.chars().collect();
|
||||||
|
let len = chars.len();
|
||||||
|
let mut i = 0;
|
||||||
|
|
||||||
|
while i < len {
|
||||||
|
let ch = chars[i];
|
||||||
|
|
||||||
|
// --- Bidi overrides ---
|
||||||
|
if is_bidi_override(ch) {
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- C1 control codes (U+0080-U+009F) ---
|
||||||
|
if ('\u{0080}'..='\u{009F}').contains(&ch) {
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- C0 control codes except tab, newline, carriage return ---
|
||||||
|
if ch.is_ascii_control() && ch != '\t' && ch != '\n' && ch != '\r' && ch != '\x1B' {
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ESC sequences ---
|
||||||
|
if ch == '\x1B' {
|
||||||
|
if i + 1 < len {
|
||||||
|
match chars[i + 1] {
|
||||||
|
// CSI sequence: ESC [
|
||||||
|
'[' => {
|
||||||
|
let (consumed, safe_seq) = parse_csi(&chars, i);
|
||||||
|
if let Some(seq) = safe_seq {
|
||||||
|
output.push_str(&seq);
|
||||||
|
}
|
||||||
|
i += consumed;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// OSC sequence: ESC ]
|
||||||
|
']' => {
|
||||||
|
let (consumed, link_text, link_url) = parse_osc(&chars, i);
|
||||||
|
match url_policy {
|
||||||
|
UrlPolicy::Strip => {
|
||||||
|
if let Some(text) = link_text {
|
||||||
|
output.push_str(&text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UrlPolicy::Footnote => {
|
||||||
|
if let (Some(text), Some(url)) = (link_text, link_url) {
|
||||||
|
footnotes.push(url);
|
||||||
|
let _ = write!(output, "{text} [{n}]", n = footnotes.len());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UrlPolicy::Passthrough => {
|
||||||
|
// Reproduce the raw OSC sequence
|
||||||
|
for &ch_raw in &chars[i..len.min(i + consumed)] {
|
||||||
|
output.push(ch_raw);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i += consumed;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Unknown ESC sequence — skip ESC + next char
|
||||||
|
i += 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Trailing ESC at end of input
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Normal character ---
|
||||||
|
output.push(ch);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append footnotes if any
|
||||||
|
if !footnotes.is_empty() {
|
||||||
|
output.push('\n');
|
||||||
|
for (idx, url) in footnotes.iter().enumerate() {
|
||||||
|
let _ = write!(output, "\n[{}] {url}", idx + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Bidi check
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
fn is_bidi_override(ch: char) -> bool {
|
||||||
|
matches!(
|
||||||
|
ch,
|
||||||
|
'\u{202A}' // LRE
|
||||||
|
| '\u{202B}' // RLE
|
||||||
|
| '\u{202C}' // PDF
|
||||||
|
| '\u{202D}' // LRO
|
||||||
|
| '\u{202E}' // RLO
|
||||||
|
| '\u{2066}' // LRI
|
||||||
|
| '\u{2067}' // RLI
|
||||||
|
| '\u{2068}' // FSI
|
||||||
|
| '\u{2069}' // PDI
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CSI parser
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Parse a CSI sequence starting at `chars[start]` (which should be ESC).
|
||||||
|
///
|
||||||
|
/// Returns `(chars_consumed, Option<safe_sequence_string>)`.
|
||||||
|
/// If the CSI is a safe SGR, returns the full sequence string to preserve.
|
||||||
|
/// Otherwise returns None (strip it).
|
||||||
|
fn parse_csi(chars: &[char], start: usize) -> (usize, Option<String>) {
|
||||||
|
// Minimum: ESC [ <final_byte>
|
||||||
|
debug_assert!(chars[start] == '\x1B');
|
||||||
|
debug_assert!(start + 1 < chars.len() && chars[start + 1] == '[');
|
||||||
|
|
||||||
|
let mut i = start + 2; // skip ESC [
|
||||||
|
let len = chars.len();
|
||||||
|
|
||||||
|
// Collect parameter bytes (0x30-0x3F) and intermediate bytes (0x20-0x2F)
|
||||||
|
let param_start = i;
|
||||||
|
while i < len && (chars[i] as u32) >= 0x20 && (chars[i] as u32) <= 0x3F {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect intermediate bytes
|
||||||
|
while i < len && (chars[i] as u32) >= 0x20 && (chars[i] as u32) <= 0x2F {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final byte (0x40-0x7E)
|
||||||
|
if i >= len || (chars[i] as u32) < 0x40 || (chars[i] as u32) > 0x7E {
|
||||||
|
// Malformed — consume what we've seen and strip
|
||||||
|
return (i.saturating_sub(start).max(2), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let final_byte = chars[i];
|
||||||
|
let consumed = i + 1 - start;
|
||||||
|
|
||||||
|
// Only preserve SGR sequences (final byte 'm')
|
||||||
|
if final_byte == 'm' {
|
||||||
|
let param_str: String = chars[param_start..i].iter().collect();
|
||||||
|
if is_safe_sgr(¶m_str) {
|
||||||
|
let full_seq: String = chars[start..start + consumed].iter().collect();
|
||||||
|
return (consumed, Some(full_seq));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Anything else (cursor movement A-H, erase J/K, etc.) is stripped
|
||||||
|
(consumed, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if all SGR parameters in a sequence are in the safe subset.
|
||||||
|
///
|
||||||
|
/// Safe: 0 (reset), 1 (bold), 3 (italic), 4 (underline), 22 (normal intensity),
|
||||||
|
/// 23 (not italic), 24 (not underline), 39 (default fg), 49 (default bg),
|
||||||
|
/// 30-37 (standard fg), 40-47 (standard bg), 90-97 (bright fg), 100-107 (bright bg).
|
||||||
|
fn is_safe_sgr(params: &str) -> bool {
|
||||||
|
if params.is_empty() {
|
||||||
|
return true; // ESC[m is reset
|
||||||
|
}
|
||||||
|
|
||||||
|
for param in params.split(';') {
|
||||||
|
let param = param.trim();
|
||||||
|
if param.is_empty() {
|
||||||
|
continue; // treat empty as 0
|
||||||
|
}
|
||||||
|
let Ok(n) = param.parse::<u32>() else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !is_safe_sgr_code(n) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_safe_sgr_code(n: u32) -> bool {
|
||||||
|
matches!(
|
||||||
|
n,
|
||||||
|
0 // reset
|
||||||
|
| 1 // bold
|
||||||
|
| 3 // italic
|
||||||
|
| 4 // underline
|
||||||
|
| 22 // normal intensity (turn off bold)
|
||||||
|
| 23 // not italic
|
||||||
|
| 24 // not underline
|
||||||
|
| 39 // default foreground
|
||||||
|
| 49 // default background
|
||||||
|
| 30..=37 // standard foreground colors
|
||||||
|
| 40..=47 // standard background colors
|
||||||
|
| 90..=97 // bright foreground colors
|
||||||
|
| 100..=107 // bright background colors
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// OSC parser
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Parse an OSC sequence starting at `chars[start]` (ESC ]).
|
||||||
|
///
|
||||||
|
/// Returns `(chars_consumed, link_text, link_url)`.
|
||||||
|
/// For OSC 8 hyperlinks: `ESC ] 8 ; params ; url ST text ESC ] 8 ; ; ST`
|
||||||
|
/// For other OSC: consumed without extracting link data.
|
||||||
|
fn parse_osc(chars: &[char], start: usize) -> (usize, Option<String>, Option<String>) {
|
||||||
|
debug_assert!(chars[start] == '\x1B');
|
||||||
|
debug_assert!(start + 1 < chars.len() && chars[start + 1] == ']');
|
||||||
|
|
||||||
|
let len = chars.len();
|
||||||
|
let i = start + 2; // skip ESC ]
|
||||||
|
|
||||||
|
// Find ST (String Terminator): ESC \ or BEL (0x07)
|
||||||
|
let osc_end = find_st(chars, i);
|
||||||
|
|
||||||
|
// Check if this is OSC 8 (hyperlink)
|
||||||
|
if i < len && chars[i] == '8' && i + 1 < len && chars[i + 1] == ';' {
|
||||||
|
// OSC 8 hyperlink: ESC ] 8 ; params ; url ST ... ESC ] 8 ; ; ST
|
||||||
|
let osc_content: String = chars[i..osc_end.0].iter().collect();
|
||||||
|
let first_consumed = osc_end.1;
|
||||||
|
|
||||||
|
// Extract URL from "8;params;url"
|
||||||
|
let url = extract_osc8_url(&osc_content);
|
||||||
|
|
||||||
|
// Now find the link text (between first ST and second OSC 8)
|
||||||
|
let after_first_st = start + 2 + first_consumed;
|
||||||
|
let mut text = String::new();
|
||||||
|
let mut j = after_first_st;
|
||||||
|
|
||||||
|
// Collect text until we hit the closing OSC 8 or end of input
|
||||||
|
while j < len {
|
||||||
|
if j + 1 < len && chars[j] == '\x1B' && chars[j + 1] == ']' {
|
||||||
|
// Found another OSC — this should be the closing OSC 8
|
||||||
|
let close_end = find_st(chars, j + 2);
|
||||||
|
return (
|
||||||
|
j + close_end.1 - start + 2,
|
||||||
|
Some(text),
|
||||||
|
url.map(String::from),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
text.push(chars[j]);
|
||||||
|
j += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reached end without closing OSC 8
|
||||||
|
return (j - start, Some(text), url.map(String::from));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-OSC-8: just consume and strip
|
||||||
|
(osc_end.1 + (start + 2 - start), None, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find the String Terminator (ST) for an OSC sequence.
|
||||||
|
/// ST is either ESC \ (two chars) or BEL (0x07).
|
||||||
|
/// Returns (content_end_index, total_consumed_from_content_start).
|
||||||
|
fn find_st(chars: &[char], from: usize) -> (usize, usize) {
|
||||||
|
let len = chars.len();
|
||||||
|
let mut i = from;
|
||||||
|
while i < len {
|
||||||
|
if chars[i] == '\x07' {
|
||||||
|
return (i, i - from + 1);
|
||||||
|
}
|
||||||
|
if i + 1 < len && chars[i] == '\x1B' && chars[i + 1] == '\\' {
|
||||||
|
return (i, i - from + 2);
|
||||||
|
}
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
// Unterminated — consume everything
|
||||||
|
(len, len - from)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract URL from OSC 8 content "8;params;url".
|
||||||
|
fn extract_osc8_url(content: &str) -> Option<&str> {
|
||||||
|
// Format: "8;params;url"
|
||||||
|
let rest = content.strip_prefix("8;")?;
|
||||||
|
// Skip params (up to next ;)
|
||||||
|
let url_start = rest.find(';')? + 1;
|
||||||
|
let url = &rest[url_start..];
|
||||||
|
if url.is_empty() { None } else { Some(url) }
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// --- CSI / cursor movement ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_cursor_movement() {
|
||||||
|
// CSI 5A = cursor up 5
|
||||||
|
let input = "before\x1B[5Aafter";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "beforeafter");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_cursor_movement_all_directions() {
|
||||||
|
for dir in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] {
|
||||||
|
let input = format!("x\x1B[3{dir}y");
|
||||||
|
let result = sanitize_for_terminal(&input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "xy", "failed for direction {dir}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_erase_sequences() {
|
||||||
|
// CSI 2J = erase display
|
||||||
|
let input = "before\x1B[2Jafter";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "beforeafter");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- SGR preservation ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_preserves_bold_italic_underline_reset() {
|
||||||
|
let input = "\x1B[1mbold\x1B[0m \x1B[3mitalic\x1B[0m \x1B[4munderline\x1B[0m";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_preserves_standard_colors() {
|
||||||
|
// Red foreground, green background
|
||||||
|
let input = "\x1B[31mred\x1B[42m on green\x1B[0m";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_preserves_bright_colors() {
|
||||||
|
let input = "\x1B[91mbright red\x1B[0m";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_preserves_combined_safe_sgr() {
|
||||||
|
// Bold + red foreground in one sequence
|
||||||
|
let input = "\x1B[1;31mbold red\x1B[0m";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_unsafe_sgr() {
|
||||||
|
// SGR 8 = hidden text (not in safe list)
|
||||||
|
let input = "\x1B[8mhidden\x1B[0m";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
// SGR 8 stripped, SGR 0 preserved
|
||||||
|
assert_eq!(result, "hidden\x1B[0m");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- C1 control codes ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_c1_control_codes() {
|
||||||
|
// U+008D = Reverse Index, U+009B = CSI (8-bit)
|
||||||
|
let input = format!("before{}middle{}after", '\u{008D}', '\u{009B}');
|
||||||
|
let result = sanitize_for_terminal(&input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "beforemiddleafter");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Bidi overrides ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_bidi_overrides() {
|
||||||
|
let input = format!(
|
||||||
|
"normal{}reversed{}end",
|
||||||
|
'\u{202E}', // RLO
|
||||||
|
'\u{202C}' // PDF
|
||||||
|
);
|
||||||
|
let result = sanitize_for_terminal(&input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "normalreversedend");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_all_bidi_chars() {
|
||||||
|
let bidi_chars = [
|
||||||
|
'\u{202A}', '\u{202B}', '\u{202C}', '\u{202D}', '\u{202E}', '\u{2066}', '\u{2067}',
|
||||||
|
'\u{2068}', '\u{2069}',
|
||||||
|
];
|
||||||
|
for ch in bidi_chars {
|
||||||
|
let input = format!("a{ch}b");
|
||||||
|
let result = sanitize_for_terminal(&input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "ab", "failed for U+{:04X}", ch as u32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- OSC sequences ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strips_osc_sequences() {
|
||||||
|
// OSC 0 (set title): ESC ] 0 ; title BEL
|
||||||
|
let input = "before\x1B]0;My Title\x07after";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "beforeafter");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- OSC 8 hyperlinks ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_url_policy_strip() {
|
||||||
|
// OSC 8 hyperlink: ESC]8;;url ST text ESC]8;; ST
|
||||||
|
let input = "click \x1B]8;;https://example.com\x07here\x1B]8;;\x07 done";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "click here done");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_url_policy_footnote() {
|
||||||
|
let input = "click \x1B]8;;https://example.com\x07here\x1B]8;;\x07 done";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Footnote);
|
||||||
|
assert!(result.contains("here [1]"));
|
||||||
|
assert!(result.contains("[1] https://example.com"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Redaction ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_redact_gitlab_token() {
|
||||||
|
let redactor = RedactPattern::defaults();
|
||||||
|
let input = "My token is glpat-AbCdEfGhIjKlMnOpQrStUvWx";
|
||||||
|
let result = redactor.redact(input);
|
||||||
|
assert_eq!(result, "My token is [REDACTED]");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_redact_email() {
|
||||||
|
let redactor = RedactPattern::defaults();
|
||||||
|
let input = "Contact user@example.com for details";
|
||||||
|
let result = redactor.redact(input);
|
||||||
|
assert_eq!(result, "Contact [REDACTED] for details");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_redact_bearer_token() {
|
||||||
|
let redactor = RedactPattern::defaults();
|
||||||
|
let input = "Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCI";
|
||||||
|
let result = redactor.redact(input);
|
||||||
|
assert!(result.contains("[REDACTED]"));
|
||||||
|
assert!(!result.contains("eyJ"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Edge cases ---
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_empty_input() {
|
||||||
|
assert_eq!(sanitize_for_terminal("", UrlPolicy::Strip), "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_safe_content_passthrough() {
|
||||||
|
let input = "Hello, world! This is normal text.\nWith newlines\tand tabs.";
|
||||||
|
assert_eq!(sanitize_for_terminal(input, UrlPolicy::Strip), input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_trailing_esc() {
|
||||||
|
let input = "text\x1B";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "text");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_malformed_csi_does_not_eat_text() {
|
||||||
|
// ESC [ without a valid final byte before next printable
|
||||||
|
let input = "a\x1B[b";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
// The malformed CSI is consumed but shouldn't eat "b" as text
|
||||||
|
// ESC[ is start, 'b' is final byte (0x62 is in 0x40-0x7E range)
|
||||||
|
// So this is CSI with final byte 'b' (cursor back) — gets stripped
|
||||||
|
assert_eq!(result, "a");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_utf8_adjacent_to_escapes() {
|
||||||
|
let input = "\x1B[1m日本語\x1B[0m text";
|
||||||
|
let result = sanitize_for_terminal(input, UrlPolicy::Strip);
|
||||||
|
assert_eq!(result, "\x1B[1m日本語\x1B[0m text");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fuzz_no_panic() {
|
||||||
|
// 1000 random-ish byte sequences — must not panic
|
||||||
|
for seed in 0u16..1000 {
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
for j in 0..50 {
|
||||||
|
bytes.push(((seed.wrapping_mul(31).wrapping_add(j)) & 0xFF) as u8);
|
||||||
|
}
|
||||||
|
// Best-effort UTF-8
|
||||||
|
let input = String::from_utf8_lossy(&bytes);
|
||||||
|
let _ = sanitize_for_terminal(&input, UrlPolicy::Strip);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
11
crates/lore-tui/src/state/command_palette.rs
Normal file
11
crates/lore-tui/src/state/command_palette.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Command palette state.
|
||||||
|
|
||||||
|
/// State for the command palette overlay.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct CommandPaletteState {
|
||||||
|
pub query: String,
|
||||||
|
pub query_focused: bool,
|
||||||
|
pub selected_index: usize,
|
||||||
|
}
|
||||||
10
crates/lore-tui/src/state/dashboard.rs
Normal file
10
crates/lore-tui/src/state/dashboard.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Dashboard screen state.
|
||||||
|
|
||||||
|
/// State for the dashboard summary screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct DashboardState {
|
||||||
|
pub issue_count: u64,
|
||||||
|
pub mr_count: u64,
|
||||||
|
}
|
||||||
14
crates/lore-tui/src/state/issue_detail.rs
Normal file
14
crates/lore-tui/src/state/issue_detail.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Issue detail screen state.
|
||||||
|
|
||||||
|
use crate::message::{Discussion, EntityKey, IssueDetail};
|
||||||
|
|
||||||
|
/// State for the issue detail screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct IssueDetailState {
|
||||||
|
pub key: Option<EntityKey>,
|
||||||
|
pub detail: Option<IssueDetail>,
|
||||||
|
pub discussions: Vec<Discussion>,
|
||||||
|
pub scroll_offset: u16,
|
||||||
|
}
|
||||||
14
crates/lore-tui/src/state/issue_list.rs
Normal file
14
crates/lore-tui/src/state/issue_list.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Issue list screen state.
|
||||||
|
|
||||||
|
use crate::message::IssueRow;
|
||||||
|
|
||||||
|
/// State for the issue list screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct IssueListState {
|
||||||
|
pub rows: Vec<IssueRow>,
|
||||||
|
pub filter: String,
|
||||||
|
pub filter_focused: bool,
|
||||||
|
pub selected_index: usize,
|
||||||
|
}
|
||||||
335
crates/lore-tui/src/state/mod.rs
Normal file
335
crates/lore-tui/src/state/mod.rs
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: consumed by LoreApp in bd-6pmy
|
||||||
|
|
||||||
|
//! Top-level state composition for the TUI.
|
||||||
|
//!
|
||||||
|
//! Each screen has its own state struct. State is preserved when
|
||||||
|
//! navigating away — screens are never cleared on pop.
|
||||||
|
//!
|
||||||
|
//! [`LoadState`] enables stale-while-revalidate: screens show the last
|
||||||
|
//! available data during a refresh, with a spinner indicating the load.
|
||||||
|
//!
|
||||||
|
//! [`ScreenIntent`] is the pure return type from state handlers — they
|
||||||
|
//! never spawn async tasks directly. The intent is interpreted by
|
||||||
|
//! [`LoreApp`](crate::app::LoreApp) which dispatches through the
|
||||||
|
//! [`TaskSupervisor`](crate::task_supervisor::TaskSupervisor).
|
||||||
|
|
||||||
|
pub mod command_palette;
|
||||||
|
pub mod dashboard;
|
||||||
|
pub mod issue_detail;
|
||||||
|
pub mod issue_list;
|
||||||
|
pub mod mr_detail;
|
||||||
|
pub mod mr_list;
|
||||||
|
pub mod search;
|
||||||
|
pub mod sync;
|
||||||
|
pub mod timeline;
|
||||||
|
pub mod who;
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use crate::message::Screen;
|
||||||
|
|
||||||
|
// Re-export screen states for convenience.
|
||||||
|
pub use command_palette::CommandPaletteState;
|
||||||
|
pub use dashboard::DashboardState;
|
||||||
|
pub use issue_detail::IssueDetailState;
|
||||||
|
pub use issue_list::IssueListState;
|
||||||
|
pub use mr_detail::MrDetailState;
|
||||||
|
pub use mr_list::MrListState;
|
||||||
|
pub use search::SearchState;
|
||||||
|
pub use sync::SyncState;
|
||||||
|
pub use timeline::TimelineState;
|
||||||
|
pub use who::WhoState;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// LoadState
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Loading state for a screen's data.
|
||||||
|
///
|
||||||
|
/// Enables stale-while-revalidate: screens render their last data while
|
||||||
|
/// showing a spinner when `Refreshing`.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||||
|
pub enum LoadState {
|
||||||
|
/// No load in progress, data is current (or screen was never loaded).
|
||||||
|
#[default]
|
||||||
|
Idle,
|
||||||
|
/// First load — no data to show yet, display a full-screen spinner.
|
||||||
|
LoadingInitial,
|
||||||
|
/// Background refresh — show existing data with a spinner indicator.
|
||||||
|
Refreshing,
|
||||||
|
/// Load failed — display the error alongside any stale data.
|
||||||
|
Error(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LoadState {
|
||||||
|
/// Whether data is currently being loaded.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_loading(&self) -> bool {
|
||||||
|
matches!(self, Self::LoadingInitial | Self::Refreshing)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// ScreenLoadStateMap
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Tracks per-screen load state.
|
||||||
|
///
|
||||||
|
/// Returns [`LoadState::Idle`] for screens that haven't been tracked.
|
||||||
|
/// Automatically removes entries set to `Idle` to prevent unbounded growth.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct ScreenLoadStateMap {
|
||||||
|
map: HashMap<Screen, LoadState>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScreenLoadStateMap {
|
||||||
|
/// Get the load state for a screen (defaults to `Idle`).
|
||||||
|
#[must_use]
|
||||||
|
pub fn get(&self, screen: &Screen) -> &LoadState {
|
||||||
|
static IDLE: LoadState = LoadState::Idle;
|
||||||
|
self.map.get(screen).unwrap_or(&IDLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the load state for a screen.
|
||||||
|
///
|
||||||
|
/// Setting to `Idle` removes the entry to prevent map growth.
|
||||||
|
pub fn set(&mut self, screen: Screen, state: LoadState) {
|
||||||
|
if state == LoadState::Idle {
|
||||||
|
self.map.remove(&screen);
|
||||||
|
} else {
|
||||||
|
self.map.insert(screen, state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether any screen is currently loading.
|
||||||
|
#[must_use]
|
||||||
|
pub fn any_loading(&self) -> bool {
|
||||||
|
self.map.values().any(LoadState::is_loading)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// ScreenIntent
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Pure return type from screen state handlers.
|
||||||
|
///
|
||||||
|
/// State handlers must never spawn async work directly — they return
|
||||||
|
/// an intent that [`LoreApp`] interprets and dispatches through the
|
||||||
|
/// [`TaskSupervisor`].
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum ScreenIntent {
|
||||||
|
/// No action needed.
|
||||||
|
None,
|
||||||
|
/// Navigate to a new screen.
|
||||||
|
Navigate(Screen),
|
||||||
|
/// Screen data needs re-querying (e.g., filter changed).
|
||||||
|
RequeryNeeded(Screen),
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// ScopeContext
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Global scope filters applied across all screens.
|
||||||
|
///
|
||||||
|
/// When a project filter is active, all data queries scope to that
|
||||||
|
/// project. The TUI shows the active scope in the status bar.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct ScopeContext {
|
||||||
|
/// Active project filter (project_id).
|
||||||
|
pub project_id: Option<i64>,
|
||||||
|
/// Human-readable project name for display.
|
||||||
|
pub project_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// AppState
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Top-level state composition for the TUI.
|
||||||
|
///
|
||||||
|
/// Each field holds one screen's state. State is preserved when
|
||||||
|
/// navigating away and restored on return.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct AppState {
|
||||||
|
// Per-screen states.
|
||||||
|
pub dashboard: DashboardState,
|
||||||
|
pub issue_list: IssueListState,
|
||||||
|
pub issue_detail: IssueDetailState,
|
||||||
|
pub mr_list: MrListState,
|
||||||
|
pub mr_detail: MrDetailState,
|
||||||
|
pub search: SearchState,
|
||||||
|
pub timeline: TimelineState,
|
||||||
|
pub who: WhoState,
|
||||||
|
pub sync: SyncState,
|
||||||
|
pub command_palette: CommandPaletteState,
|
||||||
|
|
||||||
|
// Cross-cutting state.
|
||||||
|
pub global_scope: ScopeContext,
|
||||||
|
pub load_state: ScreenLoadStateMap,
|
||||||
|
pub error_toast: Option<String>,
|
||||||
|
pub show_help: bool,
|
||||||
|
pub terminal_size: (u16, u16),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppState {
|
||||||
|
/// Set a screen's load state.
|
||||||
|
pub fn set_loading(&mut self, screen: Screen, state: LoadState) {
|
||||||
|
self.load_state.set(screen, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the global error toast.
|
||||||
|
pub fn set_error(&mut self, msg: String) {
|
||||||
|
self.error_toast = Some(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear the global error toast.
|
||||||
|
pub fn clear_error(&mut self) {
|
||||||
|
self.error_toast = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether any text input is currently focused.
|
||||||
|
#[must_use]
|
||||||
|
pub fn has_text_focus(&self) -> bool {
|
||||||
|
self.issue_list.filter_focused
|
||||||
|
|| self.mr_list.filter_focused
|
||||||
|
|| self.search.query_focused
|
||||||
|
|| self.command_palette.query_focused
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove focus from all text inputs.
|
||||||
|
pub fn blur_text_focus(&mut self) {
|
||||||
|
self.issue_list.filter_focused = false;
|
||||||
|
self.mr_list.filter_focused = false;
|
||||||
|
self.search.query_focused = false;
|
||||||
|
self.command_palette.query_focused = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_load_state_default_idle() {
|
||||||
|
let map = ScreenLoadStateMap::default();
|
||||||
|
assert_eq!(*map.get(&Screen::Dashboard), LoadState::Idle);
|
||||||
|
assert_eq!(*map.get(&Screen::IssueList), LoadState::Idle);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_load_state_set_and_get() {
|
||||||
|
let mut map = ScreenLoadStateMap::default();
|
||||||
|
map.set(Screen::Dashboard, LoadState::LoadingInitial);
|
||||||
|
assert_eq!(*map.get(&Screen::Dashboard), LoadState::LoadingInitial);
|
||||||
|
assert_eq!(*map.get(&Screen::IssueList), LoadState::Idle);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_load_state_set_idle_removes_entry() {
|
||||||
|
let mut map = ScreenLoadStateMap::default();
|
||||||
|
map.set(Screen::Dashboard, LoadState::Refreshing);
|
||||||
|
assert_eq!(map.map.len(), 1);
|
||||||
|
|
||||||
|
map.set(Screen::Dashboard, LoadState::Idle);
|
||||||
|
assert_eq!(map.map.len(), 0);
|
||||||
|
assert_eq!(*map.get(&Screen::Dashboard), LoadState::Idle);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_any_loading() {
|
||||||
|
let mut map = ScreenLoadStateMap::default();
|
||||||
|
assert!(!map.any_loading());
|
||||||
|
|
||||||
|
map.set(Screen::Dashboard, LoadState::LoadingInitial);
|
||||||
|
assert!(map.any_loading());
|
||||||
|
|
||||||
|
map.set(Screen::Dashboard, LoadState::Error("oops".into()));
|
||||||
|
assert!(!map.any_loading());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_load_state_is_loading() {
|
||||||
|
assert!(!LoadState::Idle.is_loading());
|
||||||
|
assert!(LoadState::LoadingInitial.is_loading());
|
||||||
|
assert!(LoadState::Refreshing.is_loading());
|
||||||
|
assert!(!LoadState::Error("x".into()).is_loading());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_state_default_compiles() {
|
||||||
|
let state = AppState::default();
|
||||||
|
assert!(!state.show_help);
|
||||||
|
assert!(state.error_toast.is_none());
|
||||||
|
assert_eq!(state.terminal_size, (0, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_state_set_error_and_clear() {
|
||||||
|
let mut state = AppState::default();
|
||||||
|
state.set_error("db busy".into());
|
||||||
|
assert_eq!(state.error_toast.as_deref(), Some("db busy"));
|
||||||
|
|
||||||
|
state.clear_error();
|
||||||
|
assert!(state.error_toast.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_state_has_text_focus() {
|
||||||
|
let mut state = AppState::default();
|
||||||
|
assert!(!state.has_text_focus());
|
||||||
|
|
||||||
|
state.search.query_focused = true;
|
||||||
|
assert!(state.has_text_focus());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_state_blur_text_focus() {
|
||||||
|
let mut state = AppState::default();
|
||||||
|
state.issue_list.filter_focused = true;
|
||||||
|
state.mr_list.filter_focused = true;
|
||||||
|
state.search.query_focused = true;
|
||||||
|
state.command_palette.query_focused = true;
|
||||||
|
|
||||||
|
state.blur_text_focus();
|
||||||
|
|
||||||
|
assert!(!state.has_text_focus());
|
||||||
|
assert!(!state.issue_list.filter_focused);
|
||||||
|
assert!(!state.mr_list.filter_focused);
|
||||||
|
assert!(!state.search.query_focused);
|
||||||
|
assert!(!state.command_palette.query_focused);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_app_state_set_loading() {
|
||||||
|
let mut state = AppState::default();
|
||||||
|
state.set_loading(Screen::IssueList, LoadState::Refreshing);
|
||||||
|
assert_eq!(
|
||||||
|
*state.load_state.get(&Screen::IssueList),
|
||||||
|
LoadState::Refreshing
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_screen_intent_variants() {
|
||||||
|
let none = ScreenIntent::None;
|
||||||
|
let nav = ScreenIntent::Navigate(Screen::IssueList);
|
||||||
|
let requery = ScreenIntent::RequeryNeeded(Screen::Search);
|
||||||
|
|
||||||
|
assert_eq!(none, ScreenIntent::None);
|
||||||
|
assert_eq!(nav, ScreenIntent::Navigate(Screen::IssueList));
|
||||||
|
assert_eq!(requery, ScreenIntent::RequeryNeeded(Screen::Search));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_scope_context_default() {
|
||||||
|
let scope = ScopeContext::default();
|
||||||
|
assert!(scope.project_id.is_none());
|
||||||
|
assert!(scope.project_name.is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
14
crates/lore-tui/src/state/mr_detail.rs
Normal file
14
crates/lore-tui/src/state/mr_detail.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Merge request detail screen state.
|
||||||
|
|
||||||
|
use crate::message::{Discussion, EntityKey, MrDetail};
|
||||||
|
|
||||||
|
/// State for the MR detail screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct MrDetailState {
|
||||||
|
pub key: Option<EntityKey>,
|
||||||
|
pub detail: Option<MrDetail>,
|
||||||
|
pub discussions: Vec<Discussion>,
|
||||||
|
pub scroll_offset: u16,
|
||||||
|
}
|
||||||
14
crates/lore-tui/src/state/mr_list.rs
Normal file
14
crates/lore-tui/src/state/mr_list.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Merge request list screen state.
|
||||||
|
|
||||||
|
use crate::message::MrRow;
|
||||||
|
|
||||||
|
/// State for the MR list screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct MrListState {
|
||||||
|
pub rows: Vec<MrRow>,
|
||||||
|
pub filter: String,
|
||||||
|
pub filter_focused: bool,
|
||||||
|
pub selected_index: usize,
|
||||||
|
}
|
||||||
14
crates/lore-tui/src/state/search.rs
Normal file
14
crates/lore-tui/src/state/search.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Search screen state.
|
||||||
|
|
||||||
|
use crate::message::SearchResult;
|
||||||
|
|
||||||
|
/// State for the search screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct SearchState {
|
||||||
|
pub query: String,
|
||||||
|
pub query_focused: bool,
|
||||||
|
pub results: Vec<SearchResult>,
|
||||||
|
pub selected_index: usize,
|
||||||
|
}
|
||||||
15
crates/lore-tui/src/state/sync.rs
Normal file
15
crates/lore-tui/src/state/sync.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Sync screen state.
|
||||||
|
|
||||||
|
/// State for the sync progress/summary screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct SyncState {
|
||||||
|
pub stage: String,
|
||||||
|
pub current: u64,
|
||||||
|
pub total: u64,
|
||||||
|
pub log_lines: Vec<String>,
|
||||||
|
pub completed: bool,
|
||||||
|
pub elapsed_ms: Option<u64>,
|
||||||
|
pub error: Option<String>,
|
||||||
|
}
|
||||||
12
crates/lore-tui/src/state/timeline.rs
Normal file
12
crates/lore-tui/src/state/timeline.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Timeline screen state.
|
||||||
|
|
||||||
|
use crate::message::TimelineEvent;
|
||||||
|
|
||||||
|
/// State for the timeline screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct TimelineState {
|
||||||
|
pub events: Vec<TimelineEvent>,
|
||||||
|
pub scroll_offset: u16,
|
||||||
|
}
|
||||||
12
crates/lore-tui/src/state/who.rs
Normal file
12
crates/lore-tui/src/state/who.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
//! Who (people intelligence) screen state.
|
||||||
|
|
||||||
|
use crate::message::WhoResult;
|
||||||
|
|
||||||
|
/// State for the who/people screen.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct WhoState {
|
||||||
|
pub result: Option<WhoResult>,
|
||||||
|
pub scroll_offset: u16,
|
||||||
|
}
|
||||||
380
crates/lore-tui/src/task_supervisor.rs
Normal file
380
crates/lore-tui/src/task_supervisor.rs
Normal file
@@ -0,0 +1,380 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: consumed by LoreApp in bd-6pmy
|
||||||
|
|
||||||
|
//! Centralized background task management with dedup and cancellation.
|
||||||
|
//!
|
||||||
|
//! All background work (DB queries, sync, search) flows through
|
||||||
|
//! [`TaskSupervisor`]. Submitting a task with a key that already has an
|
||||||
|
//! active handle cancels the previous task via its [`CancelToken`] and
|
||||||
|
//! bumps the generation counter.
|
||||||
|
//!
|
||||||
|
//! Generation IDs enable stale-result detection: when an async result
|
||||||
|
//! arrives, [`is_current`] checks whether the result's generation
|
||||||
|
//! matches the latest submission for that key.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
|
||||||
|
|
||||||
|
use crate::message::Screen;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// TaskKey
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Deduplication key for background tasks.
|
||||||
|
///
|
||||||
|
/// Two tasks with the same key cannot run concurrently — submitting a
|
||||||
|
/// new task with an existing key cancels the previous one.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum TaskKey {
|
||||||
|
/// Load data for a specific screen.
|
||||||
|
LoadScreen(Screen),
|
||||||
|
/// Global search query.
|
||||||
|
Search,
|
||||||
|
/// Sync stream (only one at a time).
|
||||||
|
SyncStream,
|
||||||
|
/// Re-query after filter change on a specific screen.
|
||||||
|
FilterRequery(Screen),
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// TaskPriority
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Priority levels for task scheduling.
|
||||||
|
///
|
||||||
|
/// Lower numeric value = higher priority.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
pub enum TaskPriority {
|
||||||
|
/// User-initiated input (highest priority).
|
||||||
|
Input = 0,
|
||||||
|
/// Navigation-triggered data load.
|
||||||
|
Navigation = 1,
|
||||||
|
/// Background refresh / prefetch (lowest priority).
|
||||||
|
Background = 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// CancelToken
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Thread-safe cooperative cancellation flag.
|
||||||
|
///
|
||||||
|
/// Background tasks poll [`is_cancelled`] periodically and exit early
|
||||||
|
/// when it returns `true`.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CancelToken {
|
||||||
|
cancelled: AtomicBool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CancelToken {
|
||||||
|
/// Create a new, non-cancelled token.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cancelled: AtomicBool::new(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Signal cancellation.
|
||||||
|
pub fn cancel(&self) {
|
||||||
|
self.cancelled.store(true, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether cancellation has been requested.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_cancelled(&self) -> bool {
|
||||||
|
self.cancelled.load(Ordering::Relaxed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for CancelToken {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// InterruptHandle
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Opaque handle for interrupting a rusqlite operation.
|
||||||
|
///
|
||||||
|
/// Wraps the rusqlite `InterruptHandle` so the supervisor can cancel
|
||||||
|
/// long-running queries. This is only set for tasks that lease a reader
|
||||||
|
/// connection from [`DbManager`](crate::db::DbManager).
|
||||||
|
pub struct InterruptHandle {
|
||||||
|
handle: rusqlite::InterruptHandle,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for InterruptHandle {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("InterruptHandle").finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InterruptHandle {
|
||||||
|
/// Wrap a rusqlite interrupt handle.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(handle: rusqlite::InterruptHandle) -> Self {
|
||||||
|
Self { handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Interrupt the associated SQLite operation.
|
||||||
|
pub fn interrupt(&self) {
|
||||||
|
self.handle.interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// TaskHandle
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Handle returned when a task is submitted.
|
||||||
|
///
|
||||||
|
/// Callers use this to pass the generation ID into async work so
|
||||||
|
/// results can be tagged and checked for staleness.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct TaskHandle {
|
||||||
|
/// Dedup key for this task.
|
||||||
|
pub key: TaskKey,
|
||||||
|
/// Monotonically increasing generation for stale detection.
|
||||||
|
pub generation: u64,
|
||||||
|
/// Cooperative cancellation token (shared with the supervisor).
|
||||||
|
pub cancel: Arc<CancelToken>,
|
||||||
|
/// Optional SQLite interrupt handle for long queries.
|
||||||
|
pub interrupt: Option<InterruptHandle>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// TaskSupervisor
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Manages background tasks with deduplication and cancellation.
|
||||||
|
///
|
||||||
|
/// Only one task per [`TaskKey`] can be active. Submitting a new task
|
||||||
|
/// with an existing key cancels the previous one (via its cancel token
|
||||||
|
/// and optional interrupt handle) before registering the new handle.
|
||||||
|
pub struct TaskSupervisor {
|
||||||
|
active: HashMap<TaskKey, TaskHandle>,
|
||||||
|
next_generation: AtomicU64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaskSupervisor {
|
||||||
|
/// Create a new supervisor with no active tasks.
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
active: HashMap::new(),
|
||||||
|
next_generation: AtomicU64::new(1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Submit a new task, cancelling any existing task with the same key.
|
||||||
|
///
|
||||||
|
/// Returns a [`TaskHandle`] with a fresh generation ID and a shared
|
||||||
|
/// cancel token. The caller clones the `Arc<CancelToken>` and passes
|
||||||
|
/// it into the async work.
|
||||||
|
pub fn submit(&mut self, key: TaskKey) -> &TaskHandle {
|
||||||
|
// Cancel existing task with this key, if any.
|
||||||
|
if let Some(old) = self.active.remove(&key) {
|
||||||
|
old.cancel.cancel();
|
||||||
|
if let Some(interrupt) = &old.interrupt {
|
||||||
|
interrupt.interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let generation = self.next_generation.fetch_add(1, Ordering::Relaxed);
|
||||||
|
let cancel = Arc::new(CancelToken::new());
|
||||||
|
|
||||||
|
let handle = TaskHandle {
|
||||||
|
key: key.clone(),
|
||||||
|
generation,
|
||||||
|
cancel,
|
||||||
|
interrupt: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.active.insert(key.clone(), handle);
|
||||||
|
self.active.get(&key).expect("just inserted")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check whether a generation is current for a given key.
|
||||||
|
///
|
||||||
|
/// Returns `true` only if the key has an active handle with the
|
||||||
|
/// specified generation.
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_current(&self, key: &TaskKey, generation: u64) -> bool {
|
||||||
|
self.active
|
||||||
|
.get(key)
|
||||||
|
.is_some_and(|h| h.generation == generation)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark a task as complete, removing its handle.
|
||||||
|
///
|
||||||
|
/// Only removes the handle if the generation matches the active one.
|
||||||
|
/// This prevents a late-arriving completion from removing a newer
|
||||||
|
/// task's handle.
|
||||||
|
pub fn complete(&mut self, key: &TaskKey, generation: u64) {
|
||||||
|
if self.is_current(key, generation) {
|
||||||
|
self.active.remove(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cancel all active tasks.
|
||||||
|
///
|
||||||
|
/// Used during shutdown to ensure background work stops promptly.
|
||||||
|
pub fn cancel_all(&mut self) {
|
||||||
|
for (_, handle) in self.active.drain() {
|
||||||
|
handle.cancel.cancel();
|
||||||
|
if let Some(interrupt) = &handle.interrupt {
|
||||||
|
interrupt.interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Number of currently active tasks.
|
||||||
|
#[must_use]
|
||||||
|
pub fn active_count(&self) -> usize {
|
||||||
|
self.active.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TaskSupervisor {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_submit_cancels_previous() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
let gen1 = sup.submit(TaskKey::Search).generation;
|
||||||
|
let cancel1 = sup.active.get(&TaskKey::Search).unwrap().cancel.clone();
|
||||||
|
|
||||||
|
let gen2 = sup.submit(TaskKey::Search).generation;
|
||||||
|
|
||||||
|
// First task's token should be cancelled.
|
||||||
|
assert!(cancel1.is_cancelled());
|
||||||
|
// Second task should have a different (higher) generation.
|
||||||
|
assert!(gen2 > gen1);
|
||||||
|
// Only one active task for this key.
|
||||||
|
assert_eq!(sup.active_count(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_current_after_supersede() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
let gen1 = sup.submit(TaskKey::Search).generation;
|
||||||
|
let gen2 = sup.submit(TaskKey::Search).generation;
|
||||||
|
|
||||||
|
assert!(!sup.is_current(&TaskKey::Search, gen1));
|
||||||
|
assert!(sup.is_current(&TaskKey::Search, gen2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_complete_removes_handle() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
let generation = sup.submit(TaskKey::Search).generation;
|
||||||
|
|
||||||
|
assert_eq!(sup.active_count(), 1);
|
||||||
|
sup.complete(&TaskKey::Search, generation);
|
||||||
|
assert_eq!(sup.active_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_complete_ignores_stale() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
let gen1 = sup.submit(TaskKey::Search).generation;
|
||||||
|
let gen2 = sup.submit(TaskKey::Search).generation;
|
||||||
|
|
||||||
|
// Completing with old generation should NOT remove the newer handle.
|
||||||
|
sup.complete(&TaskKey::Search, gen1);
|
||||||
|
assert_eq!(sup.active_count(), 1);
|
||||||
|
assert!(sup.is_current(&TaskKey::Search, gen2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generation_monotonic() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
let g1 = sup.submit(TaskKey::Search).generation;
|
||||||
|
let g2 = sup.submit(TaskKey::SyncStream).generation;
|
||||||
|
let g3 = sup.submit(TaskKey::Search).generation;
|
||||||
|
|
||||||
|
assert!(g1 < g2);
|
||||||
|
assert!(g2 < g3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_different_keys_coexist() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
sup.submit(TaskKey::Search);
|
||||||
|
sup.submit(TaskKey::SyncStream);
|
||||||
|
sup.submit(TaskKey::LoadScreen(Screen::Dashboard));
|
||||||
|
|
||||||
|
assert_eq!(sup.active_count(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cancel_all() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
let cancel_search = {
|
||||||
|
sup.submit(TaskKey::Search);
|
||||||
|
sup.active.get(&TaskKey::Search).unwrap().cancel.clone()
|
||||||
|
};
|
||||||
|
let cancel_sync = {
|
||||||
|
sup.submit(TaskKey::SyncStream);
|
||||||
|
sup.active.get(&TaskKey::SyncStream).unwrap().cancel.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
sup.cancel_all();
|
||||||
|
|
||||||
|
assert!(cancel_search.is_cancelled());
|
||||||
|
assert!(cancel_sync.is_cancelled());
|
||||||
|
assert_eq!(sup.active_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cancel_token_default_is_not_cancelled() {
|
||||||
|
let token = CancelToken::new();
|
||||||
|
assert!(!token.is_cancelled());
|
||||||
|
token.cancel();
|
||||||
|
assert!(token.is_cancelled());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cancel_token_is_send_sync() {
|
||||||
|
fn assert_send_sync<T: Send + Sync>() {}
|
||||||
|
assert_send_sync::<CancelToken>();
|
||||||
|
assert_send_sync::<Arc<CancelToken>>();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_task_supervisor_default() {
|
||||||
|
let sup = TaskSupervisor::default();
|
||||||
|
assert_eq!(sup.active_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_filter_requery_key_distinct_per_screen() {
|
||||||
|
let mut sup = TaskSupervisor::new();
|
||||||
|
|
||||||
|
sup.submit(TaskKey::FilterRequery(Screen::IssueList));
|
||||||
|
sup.submit(TaskKey::FilterRequery(Screen::MrList));
|
||||||
|
|
||||||
|
assert_eq!(sup.active_count(), 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
251
crates/lore-tui/src/theme.rs
Normal file
251
crates/lore-tui/src/theme.rs
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
#![allow(dead_code)] // Phase 0: types defined now, consumed in Phase 1+
|
||||||
|
|
||||||
|
//! Flexoki-based theme for the lore TUI.
|
||||||
|
//!
|
||||||
|
//! Uses FrankenTUI's `AdaptiveColor::adaptive(light, dark)` for automatic
|
||||||
|
//! light/dark mode switching. The palette is [Flexoki](https://stephango.com/flexoki)
|
||||||
|
//! by Steph Ango, designed in Oklab perceptual color space for balanced contrast.
|
||||||
|
|
||||||
|
use ftui::{AdaptiveColor, Color, PackedRgba, Style, Theme};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Flexoki palette constants
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// Base tones
|
||||||
|
const PAPER: Color = Color::rgb(0xFF, 0xFC, 0xF0);
|
||||||
|
const BASE_50: Color = Color::rgb(0xF2, 0xF0, 0xE5);
|
||||||
|
const BASE_100: Color = Color::rgb(0xE6, 0xE4, 0xD9);
|
||||||
|
const BASE_200: Color = Color::rgb(0xCE, 0xCD, 0xC3);
|
||||||
|
const BASE_300: Color = Color::rgb(0xB7, 0xB5, 0xAC);
|
||||||
|
const BASE_400: Color = Color::rgb(0x9F, 0x9D, 0x96);
|
||||||
|
const BASE_500: Color = Color::rgb(0x87, 0x85, 0x80);
|
||||||
|
const BASE_600: Color = Color::rgb(0x6F, 0x6E, 0x69);
|
||||||
|
const BASE_700: Color = Color::rgb(0x57, 0x56, 0x53);
|
||||||
|
const BASE_800: Color = Color::rgb(0x40, 0x3E, 0x3C);
|
||||||
|
const BASE_850: Color = Color::rgb(0x34, 0x33, 0x31);
|
||||||
|
const BASE_900: Color = Color::rgb(0x28, 0x27, 0x26);
|
||||||
|
const BLACK: Color = Color::rgb(0x10, 0x0F, 0x0F);
|
||||||
|
|
||||||
|
// Accent colors — light-600 (for light mode)
|
||||||
|
const RED_600: Color = Color::rgb(0xAF, 0x30, 0x29);
|
||||||
|
const ORANGE_600: Color = Color::rgb(0xBC, 0x52, 0x15);
|
||||||
|
const YELLOW_600: Color = Color::rgb(0xAD, 0x83, 0x01);
|
||||||
|
const GREEN_600: Color = Color::rgb(0x66, 0x80, 0x0B);
|
||||||
|
const CYAN_600: Color = Color::rgb(0x24, 0x83, 0x7B);
|
||||||
|
const BLUE_600: Color = Color::rgb(0x20, 0x5E, 0xA6);
|
||||||
|
const PURPLE_600: Color = Color::rgb(0x5E, 0x40, 0x9D);
|
||||||
|
|
||||||
|
// Accent colors — dark-400 (for dark mode)
|
||||||
|
const RED_400: Color = Color::rgb(0xD1, 0x4D, 0x41);
|
||||||
|
const ORANGE_400: Color = Color::rgb(0xDA, 0x70, 0x2C);
|
||||||
|
const YELLOW_400: Color = Color::rgb(0xD0, 0xA2, 0x15);
|
||||||
|
const GREEN_400: Color = Color::rgb(0x87, 0x9A, 0x39);
|
||||||
|
const CYAN_400: Color = Color::rgb(0x3A, 0xA9, 0x9F);
|
||||||
|
const BLUE_400: Color = Color::rgb(0x43, 0x85, 0xBE);
|
||||||
|
const PURPLE_400: Color = Color::rgb(0x8B, 0x7E, 0xC8);
|
||||||
|
const MAGENTA_400: Color = Color::rgb(0xCE, 0x5D, 0x97);
|
||||||
|
|
||||||
|
// Muted fallback as PackedRgba (for Style::fg)
|
||||||
|
const MUTED_PACKED: PackedRgba = PackedRgba::rgb(0x87, 0x85, 0x80);
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// build_theme
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Build the lore TUI theme with Flexoki adaptive colors.
|
||||||
|
///
|
||||||
|
/// Each of the 19 semantic slots gets an `AdaptiveColor::adaptive(light, dark)`
|
||||||
|
/// pair. FrankenTUI detects the terminal background and resolves accordingly.
|
||||||
|
#[must_use]
|
||||||
|
pub fn build_theme() -> Theme {
|
||||||
|
Theme::builder()
|
||||||
|
.primary(AdaptiveColor::adaptive(BLUE_600, BLUE_400))
|
||||||
|
.secondary(AdaptiveColor::adaptive(CYAN_600, CYAN_400))
|
||||||
|
.accent(AdaptiveColor::adaptive(PURPLE_600, PURPLE_400))
|
||||||
|
.background(AdaptiveColor::adaptive(PAPER, BLACK))
|
||||||
|
.surface(AdaptiveColor::adaptive(BASE_50, BASE_900))
|
||||||
|
.overlay(AdaptiveColor::adaptive(BASE_100, BASE_850))
|
||||||
|
.text(AdaptiveColor::adaptive(BASE_700, BASE_200))
|
||||||
|
.text_muted(AdaptiveColor::adaptive(BASE_500, BASE_500))
|
||||||
|
.text_subtle(AdaptiveColor::adaptive(BASE_400, BASE_600))
|
||||||
|
.success(AdaptiveColor::adaptive(GREEN_600, GREEN_400))
|
||||||
|
.warning(AdaptiveColor::adaptive(YELLOW_600, YELLOW_400))
|
||||||
|
.error(AdaptiveColor::adaptive(RED_600, RED_400))
|
||||||
|
.info(AdaptiveColor::adaptive(BLUE_600, BLUE_400))
|
||||||
|
.border(AdaptiveColor::adaptive(BASE_300, BASE_700))
|
||||||
|
.border_focused(AdaptiveColor::adaptive(BLUE_600, BLUE_400))
|
||||||
|
.selection_bg(AdaptiveColor::adaptive(BASE_100, BASE_800))
|
||||||
|
.selection_fg(AdaptiveColor::adaptive(BASE_700, BASE_100))
|
||||||
|
.scrollbar_track(AdaptiveColor::adaptive(BASE_50, BASE_900))
|
||||||
|
.scrollbar_thumb(AdaptiveColor::adaptive(BASE_300, BASE_700))
|
||||||
|
.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// State colors
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Map a GitLab entity state to a display color.
|
||||||
|
///
|
||||||
|
/// Returns fixed (non-adaptive) colors — state indicators should be
|
||||||
|
/// consistent regardless of light/dark mode.
|
||||||
|
#[must_use]
|
||||||
|
pub fn state_color(state: &str) -> Color {
|
||||||
|
match state {
|
||||||
|
"opened" => GREEN_400,
|
||||||
|
"closed" => RED_400,
|
||||||
|
"merged" => PURPLE_400,
|
||||||
|
"locked" => YELLOW_400,
|
||||||
|
_ => BASE_500,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Event type colors
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Map a timeline event type to a display color.
|
||||||
|
#[must_use]
|
||||||
|
pub fn event_color(event_type: &str) -> Color {
|
||||||
|
match event_type {
|
||||||
|
"created" => GREEN_400,
|
||||||
|
"updated" => BLUE_400,
|
||||||
|
"closed" => RED_400,
|
||||||
|
"merged" => PURPLE_400,
|
||||||
|
"commented" => CYAN_400,
|
||||||
|
"labeled" => ORANGE_400,
|
||||||
|
"milestoned" => YELLOW_400,
|
||||||
|
_ => BASE_500,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Label styling
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Convert a GitLab label hex color (e.g., "#FF0000" or "FF0000") to a Style.
|
||||||
|
///
|
||||||
|
/// Falls back to muted text color if the hex string is invalid.
|
||||||
|
#[must_use]
|
||||||
|
pub fn label_style(hex_color: &str) -> Style {
|
||||||
|
let packed = parse_hex_to_packed(hex_color).unwrap_or(MUTED_PACKED);
|
||||||
|
Style::default().fg(packed)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a hex color string like "#RRGGBB" or "RRGGBB" into a `PackedRgba`.
|
||||||
|
fn parse_hex_to_packed(s: &str) -> Option<PackedRgba> {
|
||||||
|
let hex = s.strip_prefix('#').unwrap_or(s);
|
||||||
|
if hex.len() != 6 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let r = u8::from_str_radix(&hex[0..2], 16).ok()?;
|
||||||
|
let g = u8::from_str_radix(&hex[2..4], 16).ok()?;
|
||||||
|
let b = u8::from_str_radix(&hex[4..6], 16).ok()?;
|
||||||
|
Some(PackedRgba::rgb(r, g, b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_theme_compiles() {
|
||||||
|
let theme = build_theme();
|
||||||
|
// Resolve for dark mode — primary should be Blue-400
|
||||||
|
let resolved = theme.resolve(true);
|
||||||
|
assert_eq!(resolved.primary, BLUE_400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_theme_light_mode() {
|
||||||
|
let theme = build_theme();
|
||||||
|
let resolved = theme.resolve(false);
|
||||||
|
assert_eq!(resolved.primary, BLUE_600);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_theme_all_slots_differ_between_modes() {
|
||||||
|
let theme = build_theme();
|
||||||
|
let dark = theme.resolve(true);
|
||||||
|
let light = theme.resolve(false);
|
||||||
|
// Background should differ (Paper vs Black)
|
||||||
|
assert_ne!(dark.background, light.background);
|
||||||
|
// Text should differ
|
||||||
|
assert_ne!(dark.text, light.text);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_state_color_opened_is_green() {
|
||||||
|
assert_eq!(state_color("opened"), GREEN_400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_state_color_closed_is_red() {
|
||||||
|
assert_eq!(state_color("closed"), RED_400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_state_color_merged_is_purple() {
|
||||||
|
assert_eq!(state_color("merged"), PURPLE_400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_state_color_unknown_returns_muted() {
|
||||||
|
assert_eq!(state_color("unknown"), BASE_500);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_event_color_created_is_green() {
|
||||||
|
assert_eq!(event_color("created"), GREEN_400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_event_color_unknown_returns_muted() {
|
||||||
|
assert_eq!(event_color("whatever"), BASE_500);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_label_style_valid_hex_with_hash() {
|
||||||
|
let style = label_style("#FF0000");
|
||||||
|
assert_eq!(style.fg, Some(PackedRgba::rgb(0xFF, 0x00, 0x00)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_label_style_valid_hex_without_hash() {
|
||||||
|
let style = label_style("00FF00");
|
||||||
|
assert_eq!(style.fg, Some(PackedRgba::rgb(0x00, 0xFF, 0x00)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_label_style_lowercase_hex() {
|
||||||
|
let style = label_style("#ff0000");
|
||||||
|
assert_eq!(style.fg, Some(PackedRgba::rgb(0xFF, 0x00, 0x00)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_label_style_invalid_hex_fallback() {
|
||||||
|
let style = label_style("invalid");
|
||||||
|
assert_eq!(style.fg, Some(MUTED_PACKED));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_label_style_empty_fallback() {
|
||||||
|
let style = label_style("");
|
||||||
|
assert_eq!(style.fg, Some(MUTED_PACKED));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_hex_short_string() {
|
||||||
|
assert!(parse_hex_to_packed("#FFF").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_hex_non_hex_chars() {
|
||||||
|
assert!(parse_hex_to_packed("#GGHHII").is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
208
crates/lore-tui/src/view/common/breadcrumb.rs
Normal file
208
crates/lore-tui/src/view/common/breadcrumb.rs
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
//! Navigation breadcrumb trail ("Dashboard > Issues > #42").
|
||||||
|
|
||||||
|
use ftui::core::geometry::Rect;
|
||||||
|
use ftui::render::cell::{Cell, PackedRgba};
|
||||||
|
use ftui::render::drawing::Draw;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
use crate::navigation::NavigationStack;
|
||||||
|
|
||||||
|
/// Render the navigation breadcrumb trail.
|
||||||
|
///
|
||||||
|
/// Shows "Dashboard > Issues > Issue" with " > " separators. When the
|
||||||
|
/// trail exceeds the available width, entries are truncated from the left
|
||||||
|
/// with a leading "...".
|
||||||
|
pub fn render_breadcrumb(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
nav: &NavigationStack,
|
||||||
|
text_color: PackedRgba,
|
||||||
|
muted_color: PackedRgba,
|
||||||
|
) {
|
||||||
|
if area.height == 0 || area.width < 3 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let crumbs = nav.breadcrumbs();
|
||||||
|
let separator = " > ";
|
||||||
|
|
||||||
|
// Build the full breadcrumb string and calculate width.
|
||||||
|
let full: String = crumbs.join(separator);
|
||||||
|
let max_width = area.width as usize;
|
||||||
|
|
||||||
|
let display = if full.len() <= max_width {
|
||||||
|
full
|
||||||
|
} else {
|
||||||
|
// Truncate from the left: show "... > last_crumbs"
|
||||||
|
truncate_breadcrumb_left(&crumbs, separator, max_width)
|
||||||
|
};
|
||||||
|
|
||||||
|
let base = Cell {
|
||||||
|
fg: text_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
let muted = Cell {
|
||||||
|
fg: muted_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Render each segment with separators in muted color.
|
||||||
|
let mut x = area.x;
|
||||||
|
let max_x = area.x.saturating_add(area.width);
|
||||||
|
|
||||||
|
if let Some(rest) = display.strip_prefix("...") {
|
||||||
|
// Render ellipsis in muted, then the rest
|
||||||
|
x = frame.print_text_clipped(x, area.y, "...", muted, max_x);
|
||||||
|
if !rest.is_empty() {
|
||||||
|
render_crumb_segments(frame, x, area.y, rest, separator, base, muted, max_x);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
render_crumb_segments(frame, x, area.y, &display, separator, base, muted, max_x);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render breadcrumb text with separators in muted color.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn render_crumb_segments(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
start_x: u16,
|
||||||
|
y: u16,
|
||||||
|
text: &str,
|
||||||
|
separator: &str,
|
||||||
|
base: Cell,
|
||||||
|
muted: Cell,
|
||||||
|
max_x: u16,
|
||||||
|
) {
|
||||||
|
let mut x = start_x;
|
||||||
|
let parts: Vec<&str> = text.split(separator).collect();
|
||||||
|
|
||||||
|
for (i, part) in parts.iter().enumerate() {
|
||||||
|
if i > 0 {
|
||||||
|
x = frame.print_text_clipped(x, y, separator, muted, max_x);
|
||||||
|
}
|
||||||
|
x = frame.print_text_clipped(x, y, part, base, max_x);
|
||||||
|
if x >= max_x {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Truncate breadcrumb from the left to fit within max_width.
|
||||||
|
fn truncate_breadcrumb_left(crumbs: &[&str], separator: &str, max_width: usize) -> String {
|
||||||
|
let ellipsis = "...";
|
||||||
|
|
||||||
|
// Try showing progressively fewer crumbs from the right.
|
||||||
|
for skip in 1..crumbs.len() {
|
||||||
|
let tail = &crumbs[skip..];
|
||||||
|
let tail_str: String = tail.join(separator);
|
||||||
|
let candidate = format!("{ellipsis}{separator}{tail_str}");
|
||||||
|
if candidate.len() <= max_width {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last resort: just the current screen truncated.
|
||||||
|
let last = crumbs.last().unwrap_or(&"");
|
||||||
|
if last.len() + ellipsis.len() <= max_width {
|
||||||
|
return format!("{ellipsis}{last}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truly tiny terminal: just ellipsis.
|
||||||
|
ellipsis.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::message::Screen;
|
||||||
|
use crate::navigation::NavigationStack;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn white() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0xFF, 0xFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gray() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0x80, 0x80, 0x80)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_breadcrumb_single_screen() {
|
||||||
|
with_frame!(80, 1, |frame| {
|
||||||
|
let nav = NavigationStack::new();
|
||||||
|
render_breadcrumb(&mut frame, Rect::new(0, 0, 80, 1), &nav, white(), gray());
|
||||||
|
|
||||||
|
let cell = frame.buffer.get(0, 0).unwrap();
|
||||||
|
assert!(
|
||||||
|
cell.content.as_char() == Some('D'),
|
||||||
|
"Expected 'D' at (0,0), got {:?}",
|
||||||
|
cell.content.as_char()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_breadcrumb_multi_screen() {
|
||||||
|
with_frame!(80, 1, |frame| {
|
||||||
|
let mut nav = NavigationStack::new();
|
||||||
|
nav.push(Screen::IssueList);
|
||||||
|
render_breadcrumb(&mut frame, Rect::new(0, 0, 80, 1), &nav, white(), gray());
|
||||||
|
|
||||||
|
let d = frame.buffer.get(0, 0).unwrap();
|
||||||
|
assert_eq!(d.content.as_char(), Some('D'));
|
||||||
|
|
||||||
|
// "Dashboard > Issues" = 'I' at 12
|
||||||
|
let i_cell = frame.buffer.get(12, 0).unwrap();
|
||||||
|
assert_eq!(i_cell.content.as_char(), Some('I'));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_breadcrumb_truncation() {
|
||||||
|
let crumbs = vec!["Dashboard", "Issues", "Issue"];
|
||||||
|
let result = truncate_breadcrumb_left(&crumbs, " > ", 20);
|
||||||
|
assert!(
|
||||||
|
result.starts_with("..."),
|
||||||
|
"Expected ellipsis prefix, got: {result}"
|
||||||
|
);
|
||||||
|
assert!(result.len() <= 20, "Result too long: {result}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_breadcrumb_zero_height_noop() {
|
||||||
|
with_frame!(80, 1, |frame| {
|
||||||
|
let nav = NavigationStack::new();
|
||||||
|
render_breadcrumb(&mut frame, Rect::new(0, 0, 80, 0), &nav, white(), gray());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_truncate_breadcrumb_fits() {
|
||||||
|
let crumbs = vec!["A", "B"];
|
||||||
|
let result = truncate_breadcrumb_left(&crumbs, " > ", 100);
|
||||||
|
assert!(result.contains("..."), "Should always add ellipsis");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_truncate_breadcrumb_single_entry() {
|
||||||
|
let crumbs = vec!["Dashboard"];
|
||||||
|
let result = truncate_breadcrumb_left(&crumbs, " > ", 5);
|
||||||
|
assert_eq!(result, "...");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_truncate_breadcrumb_shows_last_entries() {
|
||||||
|
let crumbs = vec!["Dashboard", "Issues", "Issue Detail"];
|
||||||
|
let result = truncate_breadcrumb_left(&crumbs, " > ", 30);
|
||||||
|
assert!(result.starts_with("..."));
|
||||||
|
assert!(result.contains("Issue Detail"));
|
||||||
|
}
|
||||||
|
}
|
||||||
124
crates/lore-tui/src/view/common/error_toast.rs
Normal file
124
crates/lore-tui/src/view/common/error_toast.rs
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
//! Floating error toast at bottom-right.
|
||||||
|
|
||||||
|
use ftui::core::geometry::Rect;
|
||||||
|
use ftui::render::cell::{Cell, PackedRgba};
|
||||||
|
use ftui::render::drawing::Draw;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
/// Render a floating error toast at the bottom-right of the area.
|
||||||
|
///
|
||||||
|
/// The toast has a colored background and truncates long messages.
|
||||||
|
pub fn render_error_toast(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
msg: &str,
|
||||||
|
error_bg: PackedRgba,
|
||||||
|
error_fg: PackedRgba,
|
||||||
|
) {
|
||||||
|
if area.height < 3 || area.width < 10 || msg.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Toast dimensions: message + padding, max 60 chars or half screen.
|
||||||
|
let max_toast_width = (area.width / 2).clamp(20, 60);
|
||||||
|
let toast_text = if msg.len() as u16 > max_toast_width.saturating_sub(4) {
|
||||||
|
let trunc_len = max_toast_width.saturating_sub(7) as usize;
|
||||||
|
format!(" {}... ", &msg[..trunc_len.min(msg.len())])
|
||||||
|
} else {
|
||||||
|
format!(" {msg} ")
|
||||||
|
};
|
||||||
|
let toast_width = toast_text.len() as u16;
|
||||||
|
let toast_height: u16 = 1;
|
||||||
|
|
||||||
|
// Position: bottom-right with 1-cell margin.
|
||||||
|
let x = area.right().saturating_sub(toast_width + 1);
|
||||||
|
let y = area.bottom().saturating_sub(toast_height + 1);
|
||||||
|
|
||||||
|
let toast_rect = Rect::new(x, y, toast_width, toast_height);
|
||||||
|
|
||||||
|
// Fill background.
|
||||||
|
let bg_cell = Cell {
|
||||||
|
bg: error_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.draw_rect_filled(toast_rect, bg_cell);
|
||||||
|
|
||||||
|
// Render text.
|
||||||
|
let text_cell = Cell {
|
||||||
|
fg: error_fg,
|
||||||
|
bg: error_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.print_text_clipped(x, y, &toast_text, text_cell, area.right());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn white() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0xFF, 0xFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn red_bg() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0x00, 0x00)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_error_toast_renders() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
render_error_toast(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
"Database is busy",
|
||||||
|
red_bg(),
|
||||||
|
white(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let y = 22u16;
|
||||||
|
let has_content = (40..80u16).any(|x| {
|
||||||
|
let cell = frame.buffer.get(x, y).unwrap();
|
||||||
|
!cell.is_empty()
|
||||||
|
});
|
||||||
|
assert!(has_content, "Expected error toast at bottom-right");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_error_toast_empty_message_noop() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
render_error_toast(&mut frame, Rect::new(0, 0, 80, 24), "", red_bg(), white());
|
||||||
|
|
||||||
|
let has_content = (0..80u16).any(|x| {
|
||||||
|
(0..24u16).any(|y| {
|
||||||
|
let cell = frame.buffer.get(x, y).unwrap();
|
||||||
|
!cell.is_empty()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
assert!(!has_content, "Empty message should render nothing");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_error_toast_truncates_long_message() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
let long_msg = "A".repeat(200);
|
||||||
|
render_error_toast(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
&long_msg,
|
||||||
|
red_bg(),
|
||||||
|
white(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
173
crates/lore-tui/src/view/common/help_overlay.rs
Normal file
173
crates/lore-tui/src/view/common/help_overlay.rs
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
//! Centered modal listing keybindings for the current screen.
|
||||||
|
|
||||||
|
use ftui::core::geometry::Rect;
|
||||||
|
use ftui::render::cell::{Cell, PackedRgba};
|
||||||
|
use ftui::render::drawing::Draw;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
use crate::commands::CommandRegistry;
|
||||||
|
use crate::message::Screen;
|
||||||
|
|
||||||
|
/// Render a centered help overlay listing keybindings for the current screen.
|
||||||
|
///
|
||||||
|
/// The overlay is a bordered modal that lists all commands from the
|
||||||
|
/// registry that are available on the current screen.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub fn render_help_overlay(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
registry: &CommandRegistry,
|
||||||
|
screen: &Screen,
|
||||||
|
border_color: PackedRgba,
|
||||||
|
text_color: PackedRgba,
|
||||||
|
muted_color: PackedRgba,
|
||||||
|
scroll_offset: usize,
|
||||||
|
) {
|
||||||
|
if area.height < 5 || area.width < 20 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overlay dimensions: 60% of screen, capped.
|
||||||
|
let overlay_width = (area.width * 3 / 5).clamp(30, 70);
|
||||||
|
let overlay_height = (area.height * 3 / 5).clamp(8, 30);
|
||||||
|
|
||||||
|
let overlay_x = area.x + (area.width.saturating_sub(overlay_width)) / 2;
|
||||||
|
let overlay_y = area.y + (area.height.saturating_sub(overlay_height)) / 2;
|
||||||
|
let overlay_rect = Rect::new(overlay_x, overlay_y, overlay_width, overlay_height);
|
||||||
|
|
||||||
|
// Draw border.
|
||||||
|
let border_cell = Cell {
|
||||||
|
fg: border_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.draw_border(
|
||||||
|
overlay_rect,
|
||||||
|
ftui::render::drawing::BorderChars::ROUNDED,
|
||||||
|
border_cell,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Title.
|
||||||
|
let title = " Help (? to close) ";
|
||||||
|
let title_x = overlay_x + (overlay_width.saturating_sub(title.len() as u16)) / 2;
|
||||||
|
let title_cell = Cell {
|
||||||
|
fg: border_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.print_text_clipped(title_x, overlay_y, title, title_cell, overlay_rect.right());
|
||||||
|
|
||||||
|
// Inner content area (inside border).
|
||||||
|
let inner = Rect::new(
|
||||||
|
overlay_x + 2,
|
||||||
|
overlay_y + 1,
|
||||||
|
overlay_width.saturating_sub(4),
|
||||||
|
overlay_height.saturating_sub(2),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get commands for this screen.
|
||||||
|
let commands = registry.help_entries(screen);
|
||||||
|
let visible_lines = inner.height as usize;
|
||||||
|
|
||||||
|
let key_cell = Cell {
|
||||||
|
fg: text_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
let desc_cell = Cell {
|
||||||
|
fg: muted_color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
for (i, cmd) in commands.iter().skip(scroll_offset).enumerate() {
|
||||||
|
if i >= visible_lines {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let y = inner.y + i as u16;
|
||||||
|
|
||||||
|
// Key binding label (left).
|
||||||
|
let key_label = cmd
|
||||||
|
.keybinding
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(String::new, |kb| kb.display());
|
||||||
|
let label_end = frame.print_text_clipped(inner.x, y, &key_label, key_cell, inner.right());
|
||||||
|
|
||||||
|
// Spacer + description (right).
|
||||||
|
let desc_x = label_end.saturating_add(2);
|
||||||
|
if desc_x < inner.right() {
|
||||||
|
frame.print_text_clipped(desc_x, y, cmd.help_text, desc_cell, inner.right());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scroll indicator if needed.
|
||||||
|
if commands.len() > visible_lines + scroll_offset {
|
||||||
|
let indicator = format!("({}/{})", scroll_offset + visible_lines, commands.len());
|
||||||
|
let ind_x = inner.right().saturating_sub(indicator.len() as u16);
|
||||||
|
let ind_y = overlay_rect.bottom().saturating_sub(1);
|
||||||
|
frame.print_text_clipped(ind_x, ind_y, &indicator, desc_cell, overlay_rect.right());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::commands::build_registry;
|
||||||
|
use crate::message::Screen;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn white() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0xFF, 0xFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gray() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0x80, 0x80, 0x80)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_help_overlay_renders_border() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
let registry = build_registry();
|
||||||
|
render_help_overlay(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
®istry,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
gray(),
|
||||||
|
white(),
|
||||||
|
gray(),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
// The overlay should have non-empty cells in the center area.
|
||||||
|
let has_content = (20..60u16).any(|x| {
|
||||||
|
(8..16u16).any(|y| {
|
||||||
|
let cell = frame.buffer.get(x, y).unwrap();
|
||||||
|
!cell.is_empty()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
assert!(has_content, "Expected help overlay in center area");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_help_overlay_tiny_terminal_noop() {
|
||||||
|
with_frame!(15, 4, |frame| {
|
||||||
|
let registry = build_registry();
|
||||||
|
render_help_overlay(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 15, 4),
|
||||||
|
®istry,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
gray(),
|
||||||
|
white(),
|
||||||
|
gray(),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
179
crates/lore-tui/src/view/common/loading.rs
Normal file
179
crates/lore-tui/src/view/common/loading.rs
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
//! Loading spinner indicators (full-screen and corner).
|
||||||
|
|
||||||
|
use ftui::core::geometry::Rect;
|
||||||
|
use ftui::render::cell::{Cell, PackedRgba};
|
||||||
|
use ftui::render::drawing::Draw;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
use crate::state::LoadState;
|
||||||
|
|
||||||
|
/// Braille spinner frames for loading animation.
|
||||||
|
const SPINNER_FRAMES: &[char] = &['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
|
||||||
|
|
||||||
|
/// Select spinner frame from tick count.
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn spinner_char(tick: u64) -> char {
|
||||||
|
SPINNER_FRAMES[(tick as usize) % SPINNER_FRAMES.len()]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render a loading indicator.
|
||||||
|
///
|
||||||
|
/// - `LoadingInitial`: centered full-screen spinner with "Loading..."
|
||||||
|
/// - `Refreshing`: subtle spinner in top-right corner
|
||||||
|
/// - Other states: no-op
|
||||||
|
pub fn render_loading(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
load_state: &LoadState,
|
||||||
|
text_color: PackedRgba,
|
||||||
|
muted_color: PackedRgba,
|
||||||
|
tick: u64,
|
||||||
|
) {
|
||||||
|
match load_state {
|
||||||
|
LoadState::LoadingInitial => {
|
||||||
|
render_centered_spinner(frame, area, "Loading...", text_color, tick);
|
||||||
|
}
|
||||||
|
LoadState::Refreshing => {
|
||||||
|
render_corner_spinner(frame, area, muted_color, tick);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render a centered spinner with message.
|
||||||
|
fn render_centered_spinner(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
message: &str,
|
||||||
|
color: PackedRgba,
|
||||||
|
tick: u64,
|
||||||
|
) {
|
||||||
|
if area.height == 0 || area.width < 5 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let spinner = spinner_char(tick);
|
||||||
|
let text = format!("{spinner} {message}");
|
||||||
|
let text_len = text.len() as u16;
|
||||||
|
|
||||||
|
// Center horizontally and vertically.
|
||||||
|
let x = area
|
||||||
|
.x
|
||||||
|
.saturating_add(area.width.saturating_sub(text_len) / 2);
|
||||||
|
let y = area.y.saturating_add(area.height / 2);
|
||||||
|
|
||||||
|
let cell = Cell {
|
||||||
|
fg: color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.print_text_clipped(x, y, &text, cell, area.right());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render a subtle spinner in the top-right corner.
|
||||||
|
fn render_corner_spinner(frame: &mut Frame<'_>, area: Rect, color: PackedRgba, tick: u64) {
|
||||||
|
if area.width < 2 || area.height == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let spinner = spinner_char(tick);
|
||||||
|
let x = area.right().saturating_sub(2);
|
||||||
|
let y = area.y;
|
||||||
|
|
||||||
|
let cell = Cell {
|
||||||
|
fg: color,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.print_text_clipped(x, y, &spinner.to_string(), cell, area.right());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn white() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0xFF, 0xFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gray() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0x80, 0x80, 0x80)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_loading_initial_renders_spinner() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
render_loading(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
&LoadState::LoadingInitial,
|
||||||
|
white(),
|
||||||
|
gray(),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
let center_y = 12u16;
|
||||||
|
let has_content = (0..80u16).any(|x| {
|
||||||
|
let cell = frame.buffer.get(x, center_y).unwrap();
|
||||||
|
!cell.is_empty()
|
||||||
|
});
|
||||||
|
assert!(has_content, "Expected loading spinner at center row");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_loading_refreshing_renders_corner() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
render_loading(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
&LoadState::Refreshing,
|
||||||
|
white(),
|
||||||
|
gray(),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
let cell = frame.buffer.get(78, 0).unwrap();
|
||||||
|
assert!(!cell.is_empty(), "Expected corner spinner");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_loading_idle_noop() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
render_loading(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 24),
|
||||||
|
&LoadState::Idle,
|
||||||
|
white(),
|
||||||
|
gray(),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
let has_content = (0..80u16).any(|x| {
|
||||||
|
(0..24u16).any(|y| {
|
||||||
|
let cell = frame.buffer.get(x, y).unwrap();
|
||||||
|
!cell.is_empty()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
assert!(!has_content, "Idle state should render nothing");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_spinner_animation_cycles() {
|
||||||
|
let frame0 = spinner_char(0);
|
||||||
|
let frame1 = spinner_char(1);
|
||||||
|
let frame_wrap = spinner_char(SPINNER_FRAMES.len() as u64);
|
||||||
|
|
||||||
|
assert_ne!(frame0, frame1, "Adjacent frames should differ");
|
||||||
|
assert_eq!(frame0, frame_wrap, "Should wrap around");
|
||||||
|
}
|
||||||
|
}
|
||||||
17
crates/lore-tui/src/view/common/mod.rs
Normal file
17
crates/lore-tui/src/view/common/mod.rs
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
//! Common widgets shared across all TUI screens.
|
||||||
|
//!
|
||||||
|
//! Each widget is a pure rendering function — writes directly into the
|
||||||
|
//! [`Frame`] buffer using ftui's `Draw` trait. No state mutation,
|
||||||
|
//! no side effects.
|
||||||
|
|
||||||
|
mod breadcrumb;
|
||||||
|
mod error_toast;
|
||||||
|
mod help_overlay;
|
||||||
|
mod loading;
|
||||||
|
mod status_bar;
|
||||||
|
|
||||||
|
pub use breadcrumb::render_breadcrumb;
|
||||||
|
pub use error_toast::render_error_toast;
|
||||||
|
pub use help_overlay::render_help_overlay;
|
||||||
|
pub use loading::render_loading;
|
||||||
|
pub use status_bar::render_status_bar;
|
||||||
173
crates/lore-tui/src/view/common/status_bar.rs
Normal file
173
crates/lore-tui/src/view/common/status_bar.rs
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
//! Bottom status bar with key hints and mode indicator.
|
||||||
|
|
||||||
|
use ftui::core::geometry::Rect;
|
||||||
|
use ftui::render::cell::{Cell, PackedRgba};
|
||||||
|
use ftui::render::drawing::Draw;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
use crate::commands::CommandRegistry;
|
||||||
|
use crate::message::{InputMode, Screen};
|
||||||
|
|
||||||
|
/// Render the bottom status bar with key hints and mode indicator.
|
||||||
|
///
|
||||||
|
/// Layout: `[mode] ─── [key hints]`
|
||||||
|
///
|
||||||
|
/// Key hints are sourced from the [`CommandRegistry`] filtered to the
|
||||||
|
/// current screen, showing only the most important bindings.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub fn render_status_bar(
|
||||||
|
frame: &mut Frame<'_>,
|
||||||
|
area: Rect,
|
||||||
|
registry: &CommandRegistry,
|
||||||
|
screen: &Screen,
|
||||||
|
mode: &InputMode,
|
||||||
|
bar_bg: PackedRgba,
|
||||||
|
text_color: PackedRgba,
|
||||||
|
accent_color: PackedRgba,
|
||||||
|
) {
|
||||||
|
if area.height == 0 || area.width < 5 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fill the bar background.
|
||||||
|
let bg_cell = Cell {
|
||||||
|
bg: bar_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
frame.draw_rect_filled(area, bg_cell);
|
||||||
|
|
||||||
|
let mode_label = match mode {
|
||||||
|
InputMode::Normal => "NORMAL",
|
||||||
|
InputMode::Text => "INPUT",
|
||||||
|
InputMode::Palette => "PALETTE",
|
||||||
|
InputMode::GoPrefix { .. } => "g...",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Left side: mode indicator.
|
||||||
|
let mode_cell = Cell {
|
||||||
|
fg: accent_color,
|
||||||
|
bg: bar_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
let mut x = frame.print_text_clipped(
|
||||||
|
area.x.saturating_add(1),
|
||||||
|
area.y,
|
||||||
|
mode_label,
|
||||||
|
mode_cell,
|
||||||
|
area.right(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Spacer.
|
||||||
|
x = x.saturating_add(2);
|
||||||
|
|
||||||
|
// Right side: key hints from registry (formatted as "key:action").
|
||||||
|
let hints = registry.status_hints(screen);
|
||||||
|
let hint_cell = Cell {
|
||||||
|
fg: text_color,
|
||||||
|
bg: bar_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
let key_cell = Cell {
|
||||||
|
fg: accent_color,
|
||||||
|
bg: bar_bg,
|
||||||
|
..Cell::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
for hint in &hints {
|
||||||
|
if x >= area.right().saturating_sub(1) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Split "q:quit" into key part and description part.
|
||||||
|
if let Some((key_part, desc_part)) = hint.split_once(':') {
|
||||||
|
x = frame.print_text_clipped(x, area.y, key_part, key_cell, area.right());
|
||||||
|
x = frame.print_text_clipped(x, area.y, ":", hint_cell, area.right());
|
||||||
|
x = frame.print_text_clipped(x, area.y, desc_part, hint_cell, area.right());
|
||||||
|
} else {
|
||||||
|
x = frame.print_text_clipped(x, area.y, hint, hint_cell, area.right());
|
||||||
|
}
|
||||||
|
x = x.saturating_add(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::commands::build_registry;
|
||||||
|
use crate::message::Screen;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn white() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0xFF, 0xFF, 0xFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gray() -> PackedRgba {
|
||||||
|
PackedRgba::rgb(0x80, 0x80, 0x80)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_status_bar_renders_mode() {
|
||||||
|
with_frame!(80, 1, |frame| {
|
||||||
|
let registry = build_registry();
|
||||||
|
render_status_bar(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 1),
|
||||||
|
®istry,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&InputMode::Normal,
|
||||||
|
gray(),
|
||||||
|
white(),
|
||||||
|
white(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let n_cell = frame.buffer.get(1, 0).unwrap();
|
||||||
|
assert_eq!(n_cell.content.as_char(), Some('N'));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_status_bar_text_mode() {
|
||||||
|
with_frame!(80, 1, |frame| {
|
||||||
|
let registry = build_registry();
|
||||||
|
render_status_bar(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 80, 1),
|
||||||
|
®istry,
|
||||||
|
&Screen::Search,
|
||||||
|
&InputMode::Text,
|
||||||
|
gray(),
|
||||||
|
white(),
|
||||||
|
white(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let i_cell = frame.buffer.get(1, 0).unwrap();
|
||||||
|
assert_eq!(i_cell.content.as_char(), Some('I'));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_status_bar_narrow_terminal() {
|
||||||
|
with_frame!(4, 1, |frame| {
|
||||||
|
let registry = build_registry();
|
||||||
|
render_status_bar(
|
||||||
|
&mut frame,
|
||||||
|
Rect::new(0, 0, 4, 1),
|
||||||
|
®istry,
|
||||||
|
&Screen::Dashboard,
|
||||||
|
&InputMode::Normal,
|
||||||
|
gray(),
|
||||||
|
white(),
|
||||||
|
white(),
|
||||||
|
);
|
||||||
|
let cell = frame.buffer.get(0, 0).unwrap();
|
||||||
|
assert!(cell.is_empty());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
185
crates/lore-tui/src/view/mod.rs
Normal file
185
crates/lore-tui/src/view/mod.rs
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
#![allow(dead_code)] // Phase 1: screen content renders added in Phase 2+
|
||||||
|
|
||||||
|
//! Top-level view dispatch for the lore TUI.
|
||||||
|
//!
|
||||||
|
//! [`render_screen`] is the entry point called from `LoreApp::view()`.
|
||||||
|
//! It composes the layout: breadcrumb bar, screen content area, status
|
||||||
|
//! bar, and optional overlays (help, error toast).
|
||||||
|
|
||||||
|
pub mod common;
|
||||||
|
|
||||||
|
use ftui::layout::{Constraint, Flex};
|
||||||
|
use ftui::render::cell::PackedRgba;
|
||||||
|
use ftui::render::frame::Frame;
|
||||||
|
|
||||||
|
use crate::app::LoreApp;
|
||||||
|
|
||||||
|
use common::{
|
||||||
|
render_breadcrumb, render_error_toast, render_help_overlay, render_loading, render_status_bar,
|
||||||
|
};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Colors (hardcoded Flexoki palette — will use Theme in Phase 2)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||||
|
const TEXT_MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||||
|
const BG_SURFACE: PackedRgba = PackedRgba::rgb(0x28, 0x28, 0x24); // bg-2
|
||||||
|
const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); // orange
|
||||||
|
const ERROR_BG: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // red
|
||||||
|
const ERROR_FG: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx
|
||||||
|
const BORDER: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// render_screen
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Top-level view dispatch: composes breadcrumb + content + status bar + overlays.
|
||||||
|
///
|
||||||
|
/// Called from `LoreApp::view()`. The layout is:
|
||||||
|
/// ```text
|
||||||
|
/// +-----------------------------------+
|
||||||
|
/// | Breadcrumb (1 row) |
|
||||||
|
/// +-----------------------------------+
|
||||||
|
/// | |
|
||||||
|
/// | Screen content (fill) |
|
||||||
|
/// | |
|
||||||
|
/// +-----------------------------------+
|
||||||
|
/// | Status bar (1 row) |
|
||||||
|
/// +-----------------------------------+
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Overlays (help, error toast) render on top of existing content.
|
||||||
|
pub fn render_screen(frame: &mut Frame<'_>, app: &LoreApp) {
|
||||||
|
let bounds = frame.bounds();
|
||||||
|
if bounds.width < 3 || bounds.height < 3 {
|
||||||
|
return; // Terminal too small to render anything useful.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split vertically: breadcrumb (1) | content (fill) | status bar (1).
|
||||||
|
let regions = Flex::vertical()
|
||||||
|
.constraints([
|
||||||
|
Constraint::Fixed(1), // breadcrumb
|
||||||
|
Constraint::Fill, // content
|
||||||
|
Constraint::Fixed(1), // status bar
|
||||||
|
])
|
||||||
|
.split(bounds);
|
||||||
|
|
||||||
|
let breadcrumb_area = regions[0];
|
||||||
|
let content_area = regions[1];
|
||||||
|
let status_area = regions[2];
|
||||||
|
|
||||||
|
let screen = app.navigation.current();
|
||||||
|
|
||||||
|
// --- Breadcrumb ---
|
||||||
|
render_breadcrumb(frame, breadcrumb_area, &app.navigation, TEXT, TEXT_MUTED);
|
||||||
|
|
||||||
|
// --- Screen content ---
|
||||||
|
let load_state = app.state.load_state.get(screen);
|
||||||
|
// tick=0 placeholder — animation wired up when Msg::Tick increments a counter.
|
||||||
|
render_loading(frame, content_area, load_state, TEXT, TEXT_MUTED, 0);
|
||||||
|
|
||||||
|
// Per-screen content dispatch (Phase 2+).
|
||||||
|
// match screen {
|
||||||
|
// Screen::Dashboard => ...,
|
||||||
|
// Screen::IssueList => ...,
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
|
||||||
|
// --- Status bar ---
|
||||||
|
render_status_bar(
|
||||||
|
frame,
|
||||||
|
status_area,
|
||||||
|
&app.command_registry,
|
||||||
|
screen,
|
||||||
|
&app.input_mode,
|
||||||
|
BG_SURFACE,
|
||||||
|
TEXT,
|
||||||
|
ACCENT,
|
||||||
|
);
|
||||||
|
|
||||||
|
// --- Overlays (render last, on top of everything) ---
|
||||||
|
|
||||||
|
// Error toast.
|
||||||
|
if let Some(ref error_msg) = app.state.error_toast {
|
||||||
|
render_error_toast(frame, bounds, error_msg, ERROR_BG, ERROR_FG);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Help overlay.
|
||||||
|
if app.state.show_help {
|
||||||
|
render_help_overlay(
|
||||||
|
frame,
|
||||||
|
bounds,
|
||||||
|
&app.command_registry,
|
||||||
|
screen,
|
||||||
|
BORDER,
|
||||||
|
TEXT,
|
||||||
|
TEXT_MUTED,
|
||||||
|
0, // scroll_offset — tracked in future phase
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::app::LoreApp;
|
||||||
|
use ftui::render::grapheme_pool::GraphemePool;
|
||||||
|
|
||||||
|
macro_rules! with_frame {
|
||||||
|
($width:expr, $height:expr, |$frame:ident| $body:block) => {{
|
||||||
|
let mut pool = GraphemePool::new();
|
||||||
|
let mut $frame = Frame::new($width, $height, &mut pool);
|
||||||
|
$body
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_screen_does_not_panic() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
let app = LoreApp::new();
|
||||||
|
render_screen(&mut frame, &app);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_screen_tiny_terminal_noop() {
|
||||||
|
with_frame!(2, 2, |frame| {
|
||||||
|
let app = LoreApp::new();
|
||||||
|
render_screen(&mut frame, &app);
|
||||||
|
// Should not panic — early return for tiny terminals.
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_screen_with_error_toast() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
let mut app = LoreApp::new();
|
||||||
|
app.state.set_error("test error".into());
|
||||||
|
render_screen(&mut frame, &app);
|
||||||
|
// Should render without panicking.
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_screen_with_help_overlay() {
|
||||||
|
with_frame!(80, 24, |frame| {
|
||||||
|
let mut app = LoreApp::new();
|
||||||
|
app.state.show_help = true;
|
||||||
|
render_screen(&mut frame, &app);
|
||||||
|
// Should render without panicking.
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_render_screen_narrow_terminal() {
|
||||||
|
with_frame!(20, 5, |frame| {
|
||||||
|
let app = LoreApp::new();
|
||||||
|
render_screen(&mut frame, &app);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
21
migrations/022_notes_query_index.sql
Normal file
21
migrations/022_notes_query_index.sql
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
-- Migration 022: Composite query indexes for notes + author_id column
|
||||||
|
-- Optimizes author-scoped and project-scoped date-range queries on notes.
|
||||||
|
-- Adds discussion JOIN indexes and immutable author identity column.
|
||||||
|
|
||||||
|
-- Composite index for author-scoped queries (who command, notes --author)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_notes_user_created
|
||||||
|
ON notes(project_id, author_username COLLATE NOCASE, created_at DESC, id DESC)
|
||||||
|
WHERE is_system = 0;
|
||||||
|
|
||||||
|
-- Composite index for project-scoped date-range queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_notes_project_created
|
||||||
|
ON notes(project_id, created_at DESC, id DESC)
|
||||||
|
WHERE is_system = 0;
|
||||||
|
|
||||||
|
-- Discussion JOIN indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_discussions_issue_id ON discussions(issue_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_discussions_mr_id ON discussions(merge_request_id);
|
||||||
|
|
||||||
|
-- Immutable author identity column (GitLab numeric user ID)
|
||||||
|
ALTER TABLE notes ADD COLUMN author_id INTEGER;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_notes_author_id ON notes(author_id) WHERE author_id IS NOT NULL;
|
||||||
153
migrations/024_note_documents.sql
Normal file
153
migrations/024_note_documents.sql
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
-- Migration 024: Add 'note' source_type to documents and dirty_sources
|
||||||
|
-- SQLite does not support ALTER CONSTRAINT, so we use the table-rebuild pattern.
|
||||||
|
|
||||||
|
-- ============================================================
|
||||||
|
-- 1. Rebuild dirty_sources with updated CHECK constraint
|
||||||
|
-- ============================================================
|
||||||
|
|
||||||
|
CREATE TABLE dirty_sources_new (
|
||||||
|
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion','note')),
|
||||||
|
source_id INTEGER NOT NULL,
|
||||||
|
queued_at INTEGER NOT NULL,
|
||||||
|
attempt_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
last_attempt_at INTEGER,
|
||||||
|
last_error TEXT,
|
||||||
|
next_attempt_at INTEGER,
|
||||||
|
PRIMARY KEY(source_type, source_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO dirty_sources_new SELECT * FROM dirty_sources;
|
||||||
|
DROP TABLE dirty_sources;
|
||||||
|
ALTER TABLE dirty_sources_new RENAME TO dirty_sources;
|
||||||
|
CREATE INDEX idx_dirty_sources_next_attempt ON dirty_sources(next_attempt_at);
|
||||||
|
|
||||||
|
-- ============================================================
|
||||||
|
-- 2. Rebuild documents with updated CHECK constraint
|
||||||
|
-- ============================================================
|
||||||
|
|
||||||
|
-- 2a. Backup junction table data
|
||||||
|
CREATE TEMP TABLE _doc_labels_backup AS SELECT * FROM document_labels;
|
||||||
|
CREATE TEMP TABLE _doc_paths_backup AS SELECT * FROM document_paths;
|
||||||
|
|
||||||
|
-- 2b. Drop all triggers that reference documents
|
||||||
|
DROP TRIGGER IF EXISTS documents_ai;
|
||||||
|
DROP TRIGGER IF EXISTS documents_ad;
|
||||||
|
DROP TRIGGER IF EXISTS documents_au;
|
||||||
|
DROP TRIGGER IF EXISTS documents_embeddings_ad;
|
||||||
|
|
||||||
|
-- 2c. Drop junction tables (they have FK references to documents)
|
||||||
|
DROP TABLE IF EXISTS document_labels;
|
||||||
|
DROP TABLE IF EXISTS document_paths;
|
||||||
|
|
||||||
|
-- 2d. Create new documents table with 'note' in CHECK constraint
|
||||||
|
CREATE TABLE documents_new (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion','note')),
|
||||||
|
source_id INTEGER NOT NULL,
|
||||||
|
project_id INTEGER NOT NULL REFERENCES projects(id),
|
||||||
|
author_username TEXT,
|
||||||
|
label_names TEXT,
|
||||||
|
created_at INTEGER,
|
||||||
|
updated_at INTEGER,
|
||||||
|
url TEXT,
|
||||||
|
title TEXT,
|
||||||
|
content_text TEXT NOT NULL,
|
||||||
|
content_hash TEXT NOT NULL,
|
||||||
|
labels_hash TEXT NOT NULL DEFAULT '',
|
||||||
|
paths_hash TEXT NOT NULL DEFAULT '',
|
||||||
|
is_truncated INTEGER NOT NULL DEFAULT 0,
|
||||||
|
truncated_reason TEXT CHECK (
|
||||||
|
truncated_reason IN (
|
||||||
|
'token_limit_middle_drop','single_note_oversized','first_last_oversized',
|
||||||
|
'hard_cap_oversized'
|
||||||
|
)
|
||||||
|
OR truncated_reason IS NULL
|
||||||
|
),
|
||||||
|
UNIQUE(source_type, source_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- 2e. Copy all existing data
|
||||||
|
INSERT INTO documents_new SELECT * FROM documents;
|
||||||
|
|
||||||
|
-- 2f. Swap tables
|
||||||
|
DROP TABLE documents;
|
||||||
|
ALTER TABLE documents_new RENAME TO documents;
|
||||||
|
|
||||||
|
-- 2g. Recreate all indexes on documents
|
||||||
|
CREATE INDEX idx_documents_project_updated ON documents(project_id, updated_at);
|
||||||
|
CREATE INDEX idx_documents_author ON documents(author_username);
|
||||||
|
CREATE INDEX idx_documents_source ON documents(source_type, source_id);
|
||||||
|
CREATE INDEX idx_documents_hash ON documents(content_hash);
|
||||||
|
|
||||||
|
-- 2h. Recreate junction tables
|
||||||
|
CREATE TABLE document_labels (
|
||||||
|
document_id INTEGER NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
|
||||||
|
label_name TEXT NOT NULL,
|
||||||
|
PRIMARY KEY(document_id, label_name)
|
||||||
|
) WITHOUT ROWID;
|
||||||
|
CREATE INDEX idx_document_labels_label ON document_labels(label_name);
|
||||||
|
|
||||||
|
CREATE TABLE document_paths (
|
||||||
|
document_id INTEGER NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
|
||||||
|
path TEXT NOT NULL,
|
||||||
|
PRIMARY KEY(document_id, path)
|
||||||
|
) WITHOUT ROWID;
|
||||||
|
CREATE INDEX idx_document_paths_path ON document_paths(path);
|
||||||
|
|
||||||
|
-- 2i. Restore junction table data from backups
|
||||||
|
INSERT INTO document_labels SELECT * FROM _doc_labels_backup;
|
||||||
|
INSERT INTO document_paths SELECT * FROM _doc_paths_backup;
|
||||||
|
|
||||||
|
-- 2j. Recreate FTS triggers (from migration 008)
|
||||||
|
CREATE TRIGGER documents_ai AFTER INSERT ON documents BEGIN
|
||||||
|
INSERT INTO documents_fts(rowid, title, content_text)
|
||||||
|
VALUES (new.id, COALESCE(new.title, ''), new.content_text);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER documents_ad AFTER DELETE ON documents BEGIN
|
||||||
|
INSERT INTO documents_fts(documents_fts, rowid, title, content_text)
|
||||||
|
VALUES('delete', old.id, COALESCE(old.title, ''), old.content_text);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER documents_au AFTER UPDATE ON documents
|
||||||
|
WHEN old.title IS NOT new.title OR old.content_text != new.content_text
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO documents_fts(documents_fts, rowid, title, content_text)
|
||||||
|
VALUES('delete', old.id, COALESCE(old.title, ''), old.content_text);
|
||||||
|
INSERT INTO documents_fts(rowid, title, content_text)
|
||||||
|
VALUES (new.id, COALESCE(new.title, ''), new.content_text);
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- 2k. Recreate embeddings cleanup trigger (from migration 009)
|
||||||
|
CREATE TRIGGER documents_embeddings_ad AFTER DELETE ON documents BEGIN
|
||||||
|
DELETE FROM embeddings
|
||||||
|
WHERE rowid >= old.id * 1000
|
||||||
|
AND rowid < (old.id + 1) * 1000;
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- 2l. Rebuild FTS index to ensure consistency after table swap
|
||||||
|
INSERT INTO documents_fts(documents_fts) VALUES('rebuild');
|
||||||
|
|
||||||
|
-- ============================================================
|
||||||
|
-- 3. Defense triggers: clean up documents when notes are
|
||||||
|
-- deleted or flipped to system notes
|
||||||
|
-- ============================================================
|
||||||
|
|
||||||
|
CREATE TRIGGER notes_ad_cleanup AFTER DELETE ON notes
|
||||||
|
WHEN old.is_system = 0
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM documents WHERE source_type = 'note' AND source_id = old.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER notes_au_system_cleanup AFTER UPDATE OF is_system ON notes
|
||||||
|
WHEN NEW.is_system = 1 AND OLD.is_system = 0
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM documents WHERE source_type = 'note' AND source_id = OLD.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- ============================================================
|
||||||
|
-- 4. Drop temp backup tables
|
||||||
|
-- ============================================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS _doc_labels_backup;
|
||||||
|
DROP TABLE IF EXISTS _doc_paths_backup;
|
||||||
8
migrations/025_note_dirty_backfill.sql
Normal file
8
migrations/025_note_dirty_backfill.sql
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
-- Backfill existing non-system notes into dirty queue for document generation.
|
||||||
|
-- Only seeds notes that don't already have documents and aren't already queued.
|
||||||
|
INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, CAST(strftime('%s', 'now') AS INTEGER) * 1000
|
||||||
|
FROM notes n
|
||||||
|
LEFT JOIN documents d ON d.source_type = 'note' AND d.source_id = n.id
|
||||||
|
WHERE n.is_system = 0 AND d.id IS NULL
|
||||||
|
ON CONFLICT(source_type, source_id) DO NOTHING;
|
||||||
@@ -186,6 +186,31 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
("drift", &["--threshold", "--project"]),
|
("drift", &["--threshold", "--project"]),
|
||||||
|
(
|
||||||
|
"notes",
|
||||||
|
&[
|
||||||
|
"--limit",
|
||||||
|
"--fields",
|
||||||
|
"--format",
|
||||||
|
"--author",
|
||||||
|
"--note-type",
|
||||||
|
"--contains",
|
||||||
|
"--note-id",
|
||||||
|
"--gitlab-note-id",
|
||||||
|
"--discussion-id",
|
||||||
|
"--include-system",
|
||||||
|
"--for-issue",
|
||||||
|
"--for-mr",
|
||||||
|
"--project",
|
||||||
|
"--since",
|
||||||
|
"--until",
|
||||||
|
"--path",
|
||||||
|
"--resolution",
|
||||||
|
"--sort",
|
||||||
|
"--asc",
|
||||||
|
"--open",
|
||||||
|
],
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"init",
|
"init",
|
||||||
&[
|
&[
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ pub fn run_generate_docs(
|
|||||||
result.seeded += seed_dirty(&conn, SourceType::Issue, project_filter)?;
|
result.seeded += seed_dirty(&conn, SourceType::Issue, project_filter)?;
|
||||||
result.seeded += seed_dirty(&conn, SourceType::MergeRequest, project_filter)?;
|
result.seeded += seed_dirty(&conn, SourceType::MergeRequest, project_filter)?;
|
||||||
result.seeded += seed_dirty(&conn, SourceType::Discussion, project_filter)?;
|
result.seeded += seed_dirty(&conn, SourceType::Discussion, project_filter)?;
|
||||||
|
result.seeded += seed_dirty_notes(&conn, project_filter)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let regen =
|
let regen =
|
||||||
@@ -67,6 +68,10 @@ fn seed_dirty(
|
|||||||
SourceType::Issue => "issues",
|
SourceType::Issue => "issues",
|
||||||
SourceType::MergeRequest => "merge_requests",
|
SourceType::MergeRequest => "merge_requests",
|
||||||
SourceType::Discussion => "discussions",
|
SourceType::Discussion => "discussions",
|
||||||
|
SourceType::Note => {
|
||||||
|
// NOTE-2E will implement seed_dirty_notes separately (needs is_system filter)
|
||||||
|
unreachable!("Note seeding handled by seed_dirty_notes, not seed_dirty")
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let type_str = source_type.as_str();
|
let type_str = source_type.as_str();
|
||||||
let now = chrono::Utc::now().timestamp_millis();
|
let now = chrono::Utc::now().timestamp_millis();
|
||||||
@@ -125,6 +130,55 @@ fn seed_dirty(
|
|||||||
Ok(total_seeded)
|
Ok(total_seeded)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn seed_dirty_notes(conn: &Connection, project_filter: Option<&str>) -> Result<usize> {
|
||||||
|
let now = chrono::Utc::now().timestamp_millis();
|
||||||
|
let mut total_seeded: usize = 0;
|
||||||
|
let mut last_id: i64 = 0;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let inserted = if let Some(project) = project_filter {
|
||||||
|
let project_id = resolve_project(conn, project)?;
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at, attempt_count, last_attempt_at, last_error, next_attempt_at)
|
||||||
|
SELECT 'note', id, ?1, 0, NULL, NULL, NULL
|
||||||
|
FROM notes WHERE id > ?2 AND project_id = ?3 AND is_system = 0 ORDER BY id LIMIT ?4
|
||||||
|
ON CONFLICT(source_type, source_id) DO NOTHING",
|
||||||
|
rusqlite::params![now, last_id, project_id, FULL_MODE_CHUNK_SIZE],
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at, attempt_count, last_attempt_at, last_error, next_attempt_at)
|
||||||
|
SELECT 'note', id, ?1, 0, NULL, NULL, NULL
|
||||||
|
FROM notes WHERE id > ?2 AND is_system = 0 ORDER BY id LIMIT ?3
|
||||||
|
ON CONFLICT(source_type, source_id) DO NOTHING",
|
||||||
|
rusqlite::params![now, last_id, FULL_MODE_CHUNK_SIZE],
|
||||||
|
)?
|
||||||
|
};
|
||||||
|
|
||||||
|
if inserted == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let max_id: i64 = conn.query_row(
|
||||||
|
"SELECT MAX(id) FROM (SELECT id FROM notes WHERE id > ?1 AND is_system = 0 ORDER BY id LIMIT ?2)",
|
||||||
|
rusqlite::params![last_id, FULL_MODE_CHUNK_SIZE],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
total_seeded += inserted;
|
||||||
|
last_id = max_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
source_type = "note",
|
||||||
|
seeded = total_seeded,
|
||||||
|
"Seeded dirty_sources"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(total_seeded)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn print_generate_docs(result: &GenerateDocsResult) {
|
pub fn print_generate_docs(result: &GenerateDocsResult) {
|
||||||
let mode = if result.full_mode {
|
let mode = if result.full_mode {
|
||||||
"full"
|
"full"
|
||||||
@@ -186,3 +240,81 @@ pub fn print_generate_docs_json(result: &GenerateDocsResult, elapsed_ms: u64) {
|
|||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output).unwrap());
|
println!("{}", serde_json::to_string(&output).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use crate::core::db::{create_connection, run_migrations};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn setup_db() -> Connection {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations(&conn).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (id, gitlab_project_id, path_with_namespace, web_url) VALUES (1, 100, 'group/project', 'https://gitlab.com/group/project')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at) VALUES (1, 10, 1, 1, 'Test', 'opened', 1000, 2000, 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_note(conn: &Connection, id: i64, gitlab_id: i64, is_system: bool) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (?1, ?2, 1, 1, 'alice', 'note body', 1000, 2000, 3000, ?3)",
|
||||||
|
rusqlite::params![id, gitlab_id, is_system as i32],
|
||||||
|
).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_full_seed_includes_notes() {
|
||||||
|
let conn = setup_db();
|
||||||
|
insert_note(&conn, 1, 101, false);
|
||||||
|
insert_note(&conn, 2, 102, false);
|
||||||
|
insert_note(&conn, 3, 103, false);
|
||||||
|
insert_note(&conn, 4, 104, true); // system note — should be excluded
|
||||||
|
|
||||||
|
let seeded = seed_dirty_notes(&conn, None).unwrap();
|
||||||
|
assert_eq!(seeded, 3);
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_count_stable_after_second_generate_docs_full() {
|
||||||
|
let conn = setup_db();
|
||||||
|
insert_note(&conn, 1, 101, false);
|
||||||
|
insert_note(&conn, 2, 102, false);
|
||||||
|
|
||||||
|
let first = seed_dirty_notes(&conn, None).unwrap();
|
||||||
|
assert_eq!(first, 2);
|
||||||
|
|
||||||
|
// Second run should be idempotent (ON CONFLICT DO NOTHING)
|
||||||
|
let second = seed_dirty_notes(&conn, None).unwrap();
|
||||||
|
assert_eq!(second, 0);
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -30,8 +30,10 @@ pub use ingest::{
|
|||||||
};
|
};
|
||||||
pub use init::{InitInputs, InitOptions, InitResult, run_init};
|
pub use init::{InitInputs, InitOptions, InitResult, run_init};
|
||||||
pub use list::{
|
pub use list::{
|
||||||
ListFilters, MrListFilters, open_issue_in_browser, open_mr_in_browser, print_list_issues,
|
ListFilters, MrListFilters, NoteListFilters, open_issue_in_browser, open_mr_in_browser,
|
||||||
print_list_issues_json, print_list_mrs, print_list_mrs_json, run_list_issues, run_list_mrs,
|
print_list_issues, print_list_issues_json, print_list_mrs, print_list_mrs_json,
|
||||||
|
print_list_notes, print_list_notes_csv, print_list_notes_json, print_list_notes_jsonl,
|
||||||
|
query_notes, run_list_issues, run_list_mrs,
|
||||||
};
|
};
|
||||||
pub use search::{
|
pub use search::{
|
||||||
SearchCliFilters, SearchResponse, print_search_results, print_search_results_json, run_search,
|
SearchCliFilters, SearchResponse, print_search_results, print_search_results_json, run_search,
|
||||||
|
|||||||
@@ -334,6 +334,7 @@ pub fn print_search_results(response: &SearchResponse) {
|
|||||||
"issue" => "Issue",
|
"issue" => "Issue",
|
||||||
"merge_request" => "MR",
|
"merge_request" => "MR",
|
||||||
"discussion" => "Discussion",
|
"discussion" => "Discussion",
|
||||||
|
"note" => "Note",
|
||||||
_ => &result.source_type,
|
_ => &result.source_type,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,20 @@ use crate::core::paths::get_db_path;
|
|||||||
use crate::core::project::resolve_project;
|
use crate::core::project::resolve_project;
|
||||||
use crate::core::time::{ms_to_iso, now_ms, parse_since};
|
use crate::core::time::{ms_to_iso, now_ms, parse_since};
|
||||||
|
|
||||||
|
// ─── Decay Math ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Exponential half-life decay: R = 2^(-t/h)
|
||||||
|
/// Returns 1.0 at elapsed=0, 0.5 at elapsed=half_life, 0.0 if half_life=0.
|
||||||
|
#[allow(dead_code)] // Used by bd-13q8 (decay aggregation)
|
||||||
|
fn half_life_decay(elapsed_ms: i64, half_life_days: u32) -> f64 {
|
||||||
|
let days = (elapsed_ms as f64 / 86_400_000.0).max(0.0);
|
||||||
|
let hl = f64::from(half_life_days);
|
||||||
|
if hl <= 0.0 {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
2.0_f64.powf(-days / hl)
|
||||||
|
}
|
||||||
|
|
||||||
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
// ─── Mode Discrimination ────────────────────────────────────────────────────
|
||||||
|
|
||||||
/// Determines which query mode to run based on args.
|
/// Determines which query mode to run based on args.
|
||||||
@@ -3568,6 +3582,7 @@ mod tests {
|
|||||||
author_weight: 5,
|
author_weight: 5,
|
||||||
reviewer_weight: 30,
|
reviewer_weight: 30,
|
||||||
note_bonus: 1,
|
note_bonus: 1,
|
||||||
|
..ScoringConfig::default()
|
||||||
};
|
};
|
||||||
let result = query_expert(&conn, "src/app.rs", None, 0, 20, &flipped, false).unwrap();
|
let result = query_expert(&conn, "src/app.rs", None, 0, 20, &flipped, false).unwrap();
|
||||||
assert_eq!(result.experts[0].username, "the_reviewer");
|
assert_eq!(result.experts[0].username, "the_reviewer");
|
||||||
@@ -3690,4 +3705,38 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ─── half_life_decay tests ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_half_life_decay_math() {
|
||||||
|
let hl_180 = 180;
|
||||||
|
// At t=0, full retention
|
||||||
|
assert!((half_life_decay(0, hl_180) - 1.0).abs() < f64::EPSILON);
|
||||||
|
// At t=half_life, exactly 0.5
|
||||||
|
let one_hl_ms = 180 * 86_400_000_i64;
|
||||||
|
assert!((half_life_decay(one_hl_ms, hl_180) - 0.5).abs() < 1e-10);
|
||||||
|
// At t=2*half_life, exactly 0.25
|
||||||
|
assert!((half_life_decay(2 * one_hl_ms, hl_180) - 0.25).abs() < 1e-10);
|
||||||
|
// Negative elapsed clamped to 0 -> 1.0
|
||||||
|
assert!((half_life_decay(-1000, hl_180) - 1.0).abs() < f64::EPSILON);
|
||||||
|
// Zero half-life -> 0.0 (div-by-zero guard)
|
||||||
|
assert!((half_life_decay(86_400_000, 0)).abs() < f64::EPSILON);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_score_monotonicity_by_age() {
|
||||||
|
let mut seed: u64 = 42;
|
||||||
|
let hl = 90_u32;
|
||||||
|
for _ in 0..50 {
|
||||||
|
seed = seed.wrapping_mul(6_364_136_223_846_793_005).wrapping_add(1);
|
||||||
|
let newer_ms = (seed % 100_000_000) as i64;
|
||||||
|
seed = seed.wrapping_mul(6_364_136_223_846_793_005).wrapping_add(1);
|
||||||
|
let older_ms = newer_ms + (seed % 500_000_000) as i64;
|
||||||
|
assert!(
|
||||||
|
half_life_decay(older_ms, hl) <= half_life_decay(newer_ms, hl),
|
||||||
|
"Monotonicity violated: decay({older_ms}) > decay({newer_ms})"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
114
src/cli/mod.rs
114
src/cli/mod.rs
@@ -112,6 +112,9 @@ pub enum Commands {
|
|||||||
/// List or show merge requests
|
/// List or show merge requests
|
||||||
Mrs(MrsArgs),
|
Mrs(MrsArgs),
|
||||||
|
|
||||||
|
/// List notes from discussions
|
||||||
|
Notes(NotesArgs),
|
||||||
|
|
||||||
/// Ingest data from GitLab
|
/// Ingest data from GitLab
|
||||||
Ingest(IngestArgs),
|
Ingest(IngestArgs),
|
||||||
|
|
||||||
@@ -489,6 +492,113 @@ pub struct MrsArgs {
|
|||||||
pub no_open: bool,
|
pub no_open: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore notes # List 50 most recent notes
|
||||||
|
lore notes --author alice --since 7d # Notes by alice in last 7 days
|
||||||
|
lore notes --for-issue 42 -p group/repo # Notes on issue #42
|
||||||
|
lore notes --path src/ --resolution unresolved # Unresolved diff notes in src/")]
|
||||||
|
pub struct NotesArgs {
|
||||||
|
/// Maximum results
|
||||||
|
#[arg(
|
||||||
|
short = 'n',
|
||||||
|
long = "limit",
|
||||||
|
default_value = "50",
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub limit: usize,
|
||||||
|
|
||||||
|
/// Select output fields (comma-separated, or 'minimal' preset: id,author_username,body,created_at_iso)
|
||||||
|
#[arg(long, help_heading = "Output", value_delimiter = ',')]
|
||||||
|
pub fields: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Output format (table, json, jsonl, csv)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
default_value = "table",
|
||||||
|
value_parser = ["table", "json", "jsonl", "csv"],
|
||||||
|
help_heading = "Output"
|
||||||
|
)]
|
||||||
|
pub format: String,
|
||||||
|
|
||||||
|
/// Filter by author username
|
||||||
|
#[arg(short = 'a', long, help_heading = "Filters")]
|
||||||
|
pub author: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by note type (DiffNote, DiscussionNote)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub note_type: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by body text (substring match)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub contains: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by internal note ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub note_id: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by GitLab note ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub gitlab_note_id: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by discussion ID
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub discussion_id: Option<String>,
|
||||||
|
|
||||||
|
/// Include system notes (excluded by default)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub include_system: bool,
|
||||||
|
|
||||||
|
/// Filter to notes on a specific issue IID (requires --project or default_project)
|
||||||
|
#[arg(long, conflicts_with = "for_mr", help_heading = "Filters")]
|
||||||
|
pub for_issue: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter to notes on a specific MR IID (requires --project or default_project)
|
||||||
|
#[arg(long, conflicts_with = "for_issue", help_heading = "Filters")]
|
||||||
|
pub for_mr: Option<i64>,
|
||||||
|
|
||||||
|
/// Filter by project path
|
||||||
|
#[arg(short = 'p', long, help_heading = "Filters")]
|
||||||
|
pub project: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by time (7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub since: Option<String>,
|
||||||
|
|
||||||
|
/// Filter until date (YYYY-MM-DD, inclusive end-of-day)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub until: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by file path (exact match or prefix with trailing /)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
pub path: Option<String>,
|
||||||
|
|
||||||
|
/// Filter by resolution status (any, unresolved, resolved)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
value_parser = ["any", "unresolved", "resolved"],
|
||||||
|
help_heading = "Filters"
|
||||||
|
)]
|
||||||
|
pub resolution: Option<String>,
|
||||||
|
|
||||||
|
/// Sort field (created, updated)
|
||||||
|
#[arg(
|
||||||
|
long,
|
||||||
|
value_parser = ["created", "updated"],
|
||||||
|
default_value = "created",
|
||||||
|
help_heading = "Sorting"
|
||||||
|
)]
|
||||||
|
pub sort: String,
|
||||||
|
|
||||||
|
/// Sort ascending (default: descending)
|
||||||
|
#[arg(long, help_heading = "Sorting")]
|
||||||
|
pub asc: bool,
|
||||||
|
|
||||||
|
/// Open first matching item in browser
|
||||||
|
#[arg(long, help_heading = "Actions")]
|
||||||
|
pub open: bool,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
pub struct IngestArgs {
|
pub struct IngestArgs {
|
||||||
/// Entity to ingest (issues, mrs). Omit to ingest everything
|
/// Entity to ingest (issues, mrs). Omit to ingest everything
|
||||||
@@ -556,8 +666,8 @@ pub struct SearchArgs {
|
|||||||
#[arg(long, default_value = "hybrid", value_parser = ["lexical", "hybrid", "semantic"], help_heading = "Mode")]
|
#[arg(long, default_value = "hybrid", value_parser = ["lexical", "hybrid", "semantic"], help_heading = "Mode")]
|
||||||
pub mode: String,
|
pub mode: String,
|
||||||
|
|
||||||
/// Filter by source type (issue, mr, discussion)
|
/// Filter by source type (issue, mr, discussion, note)
|
||||||
#[arg(long = "type", value_name = "TYPE", value_parser = ["issue", "mr", "discussion"], help_heading = "Filters")]
|
#[arg(long = "type", value_name = "TYPE", value_parser = ["issue", "mr", "discussion", "note"], help_heading = "Filters")]
|
||||||
pub source_type: Option<String>,
|
pub source_type: Option<String>,
|
||||||
|
|
||||||
/// Filter by author username
|
/// Filter by author username
|
||||||
|
|||||||
@@ -64,6 +64,10 @@ pub fn expand_fields_preset(fields: &[String], entity: &str) -> Vec<String> {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect(),
|
.collect(),
|
||||||
|
"notes" => ["id", "author_username", "body", "created_at_iso"]
|
||||||
|
.iter()
|
||||||
|
.map(|s| (*s).to_string())
|
||||||
|
.collect(),
|
||||||
_ => fields.to_vec(),
|
_ => fields.to_vec(),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -82,3 +86,25 @@ pub fn strip_schemas(commands: &mut serde_json::Value) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_fields_preset_notes() {
|
||||||
|
let fields = vec!["minimal".to_string()];
|
||||||
|
let expanded = expand_fields_preset(&fields, "notes");
|
||||||
|
assert_eq!(
|
||||||
|
expanded,
|
||||||
|
["id", "author_username", "body", "created_at_iso"]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_fields_preset_passthrough() {
|
||||||
|
let fields = vec!["id".to_string(), "body".to_string()];
|
||||||
|
let expanded = expand_fields_preset(&fields, "notes");
|
||||||
|
assert_eq!(expanded, ["id", "body"]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -164,6 +164,38 @@ pub struct ScoringConfig {
|
|||||||
/// Bonus points per individual inline review comment (DiffNote).
|
/// Bonus points per individual inline review comment (DiffNote).
|
||||||
#[serde(rename = "noteBonus")]
|
#[serde(rename = "noteBonus")]
|
||||||
pub note_bonus: i64,
|
pub note_bonus: i64,
|
||||||
|
|
||||||
|
/// Points for being assigned as reviewer (without substantive notes).
|
||||||
|
#[serde(rename = "reviewerAssignmentWeight")]
|
||||||
|
pub reviewer_assignment_weight: i64,
|
||||||
|
|
||||||
|
/// Half-life in days for author contribution decay.
|
||||||
|
#[serde(rename = "authorHalfLifeDays")]
|
||||||
|
pub author_half_life_days: u32,
|
||||||
|
|
||||||
|
/// Half-life in days for reviewer participation decay.
|
||||||
|
#[serde(rename = "reviewerHalfLifeDays")]
|
||||||
|
pub reviewer_half_life_days: u32,
|
||||||
|
|
||||||
|
/// Half-life in days for reviewer-assignment-only decay.
|
||||||
|
#[serde(rename = "reviewerAssignmentHalfLifeDays")]
|
||||||
|
pub reviewer_assignment_half_life_days: u32,
|
||||||
|
|
||||||
|
/// Half-life in days for note/comment decay.
|
||||||
|
#[serde(rename = "noteHalfLifeDays")]
|
||||||
|
pub note_half_life_days: u32,
|
||||||
|
|
||||||
|
/// Multiplier applied to closed (not merged) MRs.
|
||||||
|
#[serde(rename = "closedMrMultiplier")]
|
||||||
|
pub closed_mr_multiplier: f64,
|
||||||
|
|
||||||
|
/// Minimum character count for a reviewer note to be "substantive".
|
||||||
|
#[serde(rename = "reviewerMinNoteChars")]
|
||||||
|
pub reviewer_min_note_chars: u32,
|
||||||
|
|
||||||
|
/// Usernames to exclude from scoring (e.g. bots).
|
||||||
|
#[serde(rename = "excludedUsernames")]
|
||||||
|
pub excluded_usernames: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ScoringConfig {
|
impl Default for ScoringConfig {
|
||||||
@@ -172,6 +204,14 @@ impl Default for ScoringConfig {
|
|||||||
author_weight: 25,
|
author_weight: 25,
|
||||||
reviewer_weight: 10,
|
reviewer_weight: 10,
|
||||||
note_bonus: 1,
|
note_bonus: 1,
|
||||||
|
reviewer_assignment_weight: 3,
|
||||||
|
author_half_life_days: 180,
|
||||||
|
reviewer_half_life_days: 90,
|
||||||
|
reviewer_assignment_half_life_days: 45,
|
||||||
|
note_half_life_days: 45,
|
||||||
|
closed_mr_multiplier: 0.5,
|
||||||
|
reviewer_min_note_chars: 20,
|
||||||
|
excluded_usernames: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -287,6 +327,51 @@ fn validate_scoring(scoring: &ScoringConfig) -> Result<()> {
|
|||||||
details: "scoring.noteBonus must be >= 0".to_string(),
|
details: "scoring.noteBonus must be >= 0".to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if scoring.reviewer_assignment_weight < 0 {
|
||||||
|
return Err(LoreError::ConfigInvalid {
|
||||||
|
details: "scoring.reviewerAssignmentWeight must be >= 0".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (field, value) in [
|
||||||
|
("authorHalfLifeDays", scoring.author_half_life_days),
|
||||||
|
("reviewerHalfLifeDays", scoring.reviewer_half_life_days),
|
||||||
|
(
|
||||||
|
"reviewerAssignmentHalfLifeDays",
|
||||||
|
scoring.reviewer_assignment_half_life_days,
|
||||||
|
),
|
||||||
|
("noteHalfLifeDays", scoring.note_half_life_days),
|
||||||
|
] {
|
||||||
|
if value == 0 || value > 3650 {
|
||||||
|
return Err(LoreError::ConfigInvalid {
|
||||||
|
details: format!("scoring.{field} must be > 0 and <= 3650"),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !scoring.closed_mr_multiplier.is_finite()
|
||||||
|
|| scoring.closed_mr_multiplier <= 0.0
|
||||||
|
|| scoring.closed_mr_multiplier > 1.0
|
||||||
|
{
|
||||||
|
return Err(LoreError::ConfigInvalid {
|
||||||
|
details: "scoring.closedMrMultiplier must be finite and in (0.0, 1.0]".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if scoring.reviewer_min_note_chars > 4096 {
|
||||||
|
return Err(LoreError::ConfigInvalid {
|
||||||
|
details: "scoring.reviewerMinNoteChars must be <= 4096".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for username in &scoring.excluded_usernames {
|
||||||
|
if username.is_empty() {
|
||||||
|
return Err(LoreError::ConfigInvalid {
|
||||||
|
details: "scoring.excludedUsernames entries must be non-empty".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -543,6 +628,78 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_config_validation_rejects_zero_half_life() {
|
||||||
|
let mut cfg = ScoringConfig::default();
|
||||||
|
assert!(validate_scoring(&cfg).is_ok());
|
||||||
|
cfg.author_half_life_days = 0;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.author_half_life_days = 180;
|
||||||
|
cfg.reviewer_half_life_days = 0;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.reviewer_half_life_days = 90;
|
||||||
|
cfg.closed_mr_multiplier = 0.0;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.closed_mr_multiplier = 1.5;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.closed_mr_multiplier = 1.0;
|
||||||
|
assert!(validate_scoring(&cfg).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_config_validation_rejects_absurd_half_life() {
|
||||||
|
let mut cfg = ScoringConfig::default();
|
||||||
|
cfg.author_half_life_days = 5000; // > 3650 cap
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.author_half_life_days = 3650; // boundary: valid
|
||||||
|
assert!(validate_scoring(&cfg).is_ok());
|
||||||
|
cfg.reviewer_min_note_chars = 5000; // > 4096 cap
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.reviewer_min_note_chars = 4096; // boundary: valid
|
||||||
|
assert!(validate_scoring(&cfg).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_config_validation_rejects_nan_multiplier() {
|
||||||
|
let mut cfg = ScoringConfig::default();
|
||||||
|
cfg.closed_mr_multiplier = f64::NAN;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.closed_mr_multiplier = f64::INFINITY;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
cfg.closed_mr_multiplier = f64::NEG_INFINITY;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_config_validation_rejects_negative_assignment_weight() {
|
||||||
|
let mut cfg = ScoringConfig::default();
|
||||||
|
cfg.reviewer_assignment_weight = -1;
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_config_validation_rejects_empty_excluded_username() {
|
||||||
|
let mut cfg = ScoringConfig::default();
|
||||||
|
cfg.excluded_usernames = vec!["".to_string()];
|
||||||
|
assert!(validate_scoring(&cfg).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_scoring_config_defaults() {
|
||||||
|
let cfg = ScoringConfig::default();
|
||||||
|
assert_eq!(cfg.author_weight, 25);
|
||||||
|
assert_eq!(cfg.reviewer_weight, 10);
|
||||||
|
assert_eq!(cfg.note_bonus, 1);
|
||||||
|
assert_eq!(cfg.reviewer_assignment_weight, 3);
|
||||||
|
assert_eq!(cfg.author_half_life_days, 180);
|
||||||
|
assert_eq!(cfg.reviewer_half_life_days, 90);
|
||||||
|
assert_eq!(cfg.reviewer_assignment_half_life_days, 45);
|
||||||
|
assert_eq!(cfg.note_half_life_days, 45);
|
||||||
|
assert!((cfg.closed_mr_multiplier - 0.5).abs() < f64::EPSILON);
|
||||||
|
assert_eq!(cfg.reviewer_min_note_chars, 20);
|
||||||
|
assert!(cfg.excluded_usernames.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_minimal_config_includes_default_project_when_set() {
|
fn test_minimal_config_includes_default_project_when_set() {
|
||||||
let config = MinimalConfig {
|
let config = MinimalConfig {
|
||||||
|
|||||||
648
src/core/db.rs
648
src/core/db.rs
@@ -69,10 +69,22 @@ const MIGRATIONS: &[(&str, &str)] = &[
|
|||||||
"021",
|
"021",
|
||||||
include_str!("../../migrations/021_work_item_status.sql"),
|
include_str!("../../migrations/021_work_item_status.sql"),
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"022",
|
||||||
|
include_str!("../../migrations/022_notes_query_index.sql"),
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"023",
|
"023",
|
||||||
include_str!("../../migrations/023_issue_detail_fields.sql"),
|
include_str!("../../migrations/023_issue_detail_fields.sql"),
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"024",
|
||||||
|
include_str!("../../migrations/024_note_documents.sql"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"025",
|
||||||
|
include_str!("../../migrations/025_note_dirty_backfill.sql"),
|
||||||
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
pub fn create_connection(db_path: &Path) -> Result<Connection> {
|
pub fn create_connection(db_path: &Path) -> Result<Connection> {
|
||||||
@@ -316,3 +328,639 @@ pub fn get_schema_version(conn: &Connection) -> i32 {
|
|||||||
)
|
)
|
||||||
.unwrap_or(0)
|
.unwrap_or(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn setup_migrated_db() -> Connection {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations(&conn).unwrap();
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
fn index_exists(conn: &Connection, index_name: &str) -> bool {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='index' AND name=?1",
|
||||||
|
[index_name],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn column_exists(conn: &Connection, table: &str, column: &str) -> bool {
|
||||||
|
let sql = format!("PRAGMA table_info({})", table);
|
||||||
|
let mut stmt = conn.prepare(&sql).unwrap();
|
||||||
|
let columns: Vec<String> = stmt
|
||||||
|
.query_map([], |row| row.get::<_, String>(1))
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|r| r.ok())
|
||||||
|
.collect();
|
||||||
|
columns.contains(&column.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_022_indexes_exist() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
|
||||||
|
// New indexes from migration 022
|
||||||
|
assert!(
|
||||||
|
index_exists(&conn, "idx_notes_user_created"),
|
||||||
|
"idx_notes_user_created should exist"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
index_exists(&conn, "idx_notes_project_created"),
|
||||||
|
"idx_notes_project_created should exist"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
index_exists(&conn, "idx_notes_author_id"),
|
||||||
|
"idx_notes_author_id should exist"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Discussion JOIN indexes (idx_discussions_issue_id is new;
|
||||||
|
// idx_discussions_mr_id already existed from migration 006 but
|
||||||
|
// IF NOT EXISTS makes it safe)
|
||||||
|
assert!(
|
||||||
|
index_exists(&conn, "idx_discussions_issue_id"),
|
||||||
|
"idx_discussions_issue_id should exist"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
index_exists(&conn, "idx_discussions_mr_id"),
|
||||||
|
"idx_discussions_mr_id should exist"
|
||||||
|
);
|
||||||
|
|
||||||
|
// author_id column on notes
|
||||||
|
assert!(
|
||||||
|
column_exists(&conn, "notes", "author_id"),
|
||||||
|
"notes.author_id column should exist"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Helper: insert a minimal project for FK satisfaction --
|
||||||
|
fn insert_test_project(conn: &Connection) -> i64 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
|
||||||
|
VALUES (1000, 'test/project', 'https://example.com/test/project')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.last_insert_rowid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Helper: insert a minimal issue --
|
||||||
|
fn insert_test_issue(conn: &Connection, project_id: i64) -> i64 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (gitlab_id, project_id, iid, state, author_username, \
|
||||||
|
created_at, updated_at, last_seen_at) \
|
||||||
|
VALUES (100, ?1, 1, 'opened', 'alice', 1000, 1000, 1000)",
|
||||||
|
[project_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.last_insert_rowid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Helper: insert a minimal discussion --
|
||||||
|
fn insert_test_discussion(conn: &Connection, project_id: i64, issue_id: i64) -> i64 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, \
|
||||||
|
noteable_type, last_seen_at) \
|
||||||
|
VALUES ('disc-001', ?1, ?2, 'Issue', 1000)",
|
||||||
|
rusqlite::params![project_id, issue_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.last_insert_rowid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Helper: insert a minimal non-system note --
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn insert_test_note(
|
||||||
|
conn: &Connection,
|
||||||
|
gitlab_id: i64,
|
||||||
|
discussion_id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
is_system: bool,
|
||||||
|
) -> i64 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (gitlab_id, discussion_id, project_id, is_system, \
|
||||||
|
author_username, body, created_at, updated_at, last_seen_at) \
|
||||||
|
VALUES (?1, ?2, ?3, ?4, 'alice', 'note body', 1000, 1000, 1000)",
|
||||||
|
rusqlite::params![gitlab_id, discussion_id, project_id, is_system as i32],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.last_insert_rowid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Helper: insert a document --
|
||||||
|
fn insert_test_document(
|
||||||
|
conn: &Connection,
|
||||||
|
source_type: &str,
|
||||||
|
source_id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
) -> i64 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||||
|
VALUES (?1, ?2, ?3, 'test content', 'hash123')",
|
||||||
|
rusqlite::params![source_type, source_id, project_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.last_insert_rowid()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_allows_note_source_type() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
|
||||||
|
// Should succeed — 'note' is now allowed
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||||
|
VALUES ('note', 1, ?1, 'note content', 'hash-note')",
|
||||||
|
[pid],
|
||||||
|
)
|
||||||
|
.expect("INSERT with source_type='note' into documents should succeed");
|
||||||
|
|
||||||
|
// dirty_sources should also accept 'note'
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
|
||||||
|
VALUES ('note', 1, 1000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.expect("INSERT with source_type='note' into dirty_sources should succeed");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_preserves_existing_data() {
|
||||||
|
// Run migrations up to 023 only, insert data, then apply 024
|
||||||
|
// Migration 024 is at index 23 (0-based). Use hardcoded index so adding
|
||||||
|
// later migrations doesn't silently shift what this test exercises.
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
|
||||||
|
// Apply migrations 001-023 (indices 0..23)
|
||||||
|
run_migrations_up_to(&conn, 23);
|
||||||
|
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
|
||||||
|
// Insert a document with existing source_type
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash, title) \
|
||||||
|
VALUES ('issue', 1, ?1, 'issue content', 'hash-issue', 'Test Issue')",
|
||||||
|
[pid],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let doc_id: i64 = conn.last_insert_rowid();
|
||||||
|
|
||||||
|
// Insert junction data
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO document_labels (document_id, label_name) VALUES (?1, 'bug')",
|
||||||
|
[doc_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO document_paths (document_id, path) VALUES (?1, 'src/main.rs')",
|
||||||
|
[doc_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Insert dirty_sources row
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('issue', 1, 1000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Now apply migration 024 (index 23) — the table-rebuild migration
|
||||||
|
run_single_migration(&conn, 23);
|
||||||
|
|
||||||
|
// Verify document still exists with correct data
|
||||||
|
let (st, content, title): (String, String, String) = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT source_type, content_text, title FROM documents WHERE id = ?1",
|
||||||
|
[doc_id],
|
||||||
|
|row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(st, "issue");
|
||||||
|
assert_eq!(content, "issue content");
|
||||||
|
assert_eq!(title, "Test Issue");
|
||||||
|
|
||||||
|
// Verify junction data preserved
|
||||||
|
let label_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM document_labels WHERE document_id = ?1",
|
||||||
|
[doc_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(label_count, 1);
|
||||||
|
|
||||||
|
let path_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM document_paths WHERE document_id = ?1",
|
||||||
|
[doc_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(path_count, 1);
|
||||||
|
|
||||||
|
// Verify dirty_sources preserved
|
||||||
|
let dirty_count: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(dirty_count, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_fts_triggers_intact() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
|
||||||
|
// Insert a document after migration — FTS trigger should fire
|
||||||
|
let doc_id = insert_test_document(&conn, "note", 1, pid);
|
||||||
|
|
||||||
|
// Verify FTS entry exists
|
||||||
|
let fts_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'test'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert!(fts_count > 0, "FTS trigger should have created an entry");
|
||||||
|
|
||||||
|
// Verify update trigger works
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE documents SET content_text = 'updated content' WHERE id = ?1",
|
||||||
|
[doc_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let fts_updated: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
fts_updated > 0,
|
||||||
|
"FTS update trigger should reflect new content"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify delete trigger works
|
||||||
|
conn.execute("DELETE FROM documents WHERE id = ?1", [doc_id])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let fts_after_delete: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents_fts WHERE documents_fts MATCH 'updated'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
fts_after_delete, 0,
|
||||||
|
"FTS delete trigger should remove the entry"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_row_counts_preserved() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
|
||||||
|
// After full migration, tables should exist and be queryable
|
||||||
|
let doc_count: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM documents", [], |row| row.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(doc_count, 0, "Fresh DB should have 0 documents");
|
||||||
|
|
||||||
|
let dirty_count: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM dirty_sources", [], |row| row.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(dirty_count, 0, "Fresh DB should have 0 dirty_sources");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_integrity_checks_pass() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
|
||||||
|
// PRAGMA integrity_check
|
||||||
|
let integrity: String = conn
|
||||||
|
.query_row("PRAGMA integrity_check", [], |row| row.get(0))
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(integrity, "ok", "Database integrity check should pass");
|
||||||
|
|
||||||
|
// PRAGMA foreign_key_check (returns rows only if there are violations)
|
||||||
|
let fk_violations: i64 = conn
|
||||||
|
.query_row("SELECT COUNT(*) FROM pragma_foreign_key_check", [], |row| {
|
||||||
|
row.get(0)
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(fk_violations, 0, "No foreign key violations should exist");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_note_delete_trigger_cleans_document() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
let note_id = insert_test_note(&conn, 200, disc_id, pid, false);
|
||||||
|
|
||||||
|
// Create a document for this note
|
||||||
|
insert_test_document(&conn, "note", note_id, pid);
|
||||||
|
|
||||||
|
let doc_before: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(doc_before, 1);
|
||||||
|
|
||||||
|
// Delete the note — trigger should remove the document
|
||||||
|
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let doc_after: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
doc_after, 0,
|
||||||
|
"notes_ad_cleanup trigger should delete the document"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_note_system_flip_trigger_cleans_document() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
let note_id = insert_test_note(&conn, 201, disc_id, pid, false);
|
||||||
|
|
||||||
|
// Create a document for this note
|
||||||
|
insert_test_document(&conn, "note", note_id, pid);
|
||||||
|
|
||||||
|
let doc_before: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(doc_before, 1);
|
||||||
|
|
||||||
|
// Flip is_system from 0 to 1 — trigger should remove the document
|
||||||
|
conn.execute("UPDATE notes SET is_system = 1 WHERE id = ?1", [note_id])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let doc_after: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
doc_after, 0,
|
||||||
|
"notes_au_system_cleanup trigger should delete the document"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_024_system_note_delete_trigger_does_not_fire() {
|
||||||
|
let conn = setup_migrated_db();
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
|
||||||
|
// Insert a system note (is_system = true)
|
||||||
|
let note_id = insert_test_note(&conn, 202, disc_id, pid, true);
|
||||||
|
|
||||||
|
// Manually insert a document (shouldn't exist for system notes in practice,
|
||||||
|
// but we test the trigger guard)
|
||||||
|
insert_test_document(&conn, "note", note_id, pid);
|
||||||
|
|
||||||
|
let doc_before: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(doc_before, 1);
|
||||||
|
|
||||||
|
// Delete system note — trigger has WHEN old.is_system = 0 so it should NOT fire
|
||||||
|
conn.execute("DELETE FROM notes WHERE id = ?1", [note_id])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let doc_after: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
doc_after, 1,
|
||||||
|
"notes_ad_cleanup trigger should NOT fire for system notes"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run migrations only up to version `up_to` (inclusive).
|
||||||
|
fn run_migrations_up_to(conn: &Connection, up_to: usize) {
|
||||||
|
conn.execute_batch(
|
||||||
|
"CREATE TABLE IF NOT EXISTS schema_version ( \
|
||||||
|
version INTEGER PRIMARY KEY, applied_at INTEGER NOT NULL, description TEXT);",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for (version_str, sql) in &MIGRATIONS[..up_to] {
|
||||||
|
let version: i32 = version_str.parse().unwrap();
|
||||||
|
conn.execute_batch(sql).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||||
|
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||||
|
rusqlite::params![version, version_str],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a single migration by index (0-based).
|
||||||
|
fn run_single_migration(conn: &Connection, index: usize) {
|
||||||
|
let (version_str, sql) = MIGRATIONS[index];
|
||||||
|
let version: i32 = version_str.parse().unwrap();
|
||||||
|
conn.execute_batch(sql).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO schema_version (version, applied_at, description) \
|
||||||
|
VALUES (?1, strftime('%s', 'now') * 1000, ?2)",
|
||||||
|
rusqlite::params![version, version_str],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_025_backfills_existing_notes() {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
// Run all migrations through 024 (index 0..24)
|
||||||
|
run_migrations_up_to(&conn, 24);
|
||||||
|
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
|
||||||
|
// Insert 5 non-system notes
|
||||||
|
for i in 1..=5 {
|
||||||
|
insert_test_note(&conn, 300 + i, disc_id, pid, false);
|
||||||
|
}
|
||||||
|
// Insert 2 system notes
|
||||||
|
for i in 1..=2 {
|
||||||
|
insert_test_note(&conn, 400 + i, disc_id, pid, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run migration 025
|
||||||
|
run_single_migration(&conn, 24);
|
||||||
|
|
||||||
|
let dirty_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
dirty_count, 5,
|
||||||
|
"Migration 025 should backfill 5 non-system notes"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify system notes were not backfilled
|
||||||
|
let system_note_ids: Vec<i64> = {
|
||||||
|
let mut stmt = conn
|
||||||
|
.prepare(
|
||||||
|
"SELECT source_id FROM dirty_sources WHERE source_type = 'note' ORDER BY source_id",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
stmt.query_map([], |row| row.get(0))
|
||||||
|
.unwrap()
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
// System note ids should not appear
|
||||||
|
let all_system_note_ids: Vec<i64> = {
|
||||||
|
let mut stmt = conn
|
||||||
|
.prepare("SELECT id FROM notes WHERE is_system = 1 ORDER BY id")
|
||||||
|
.unwrap();
|
||||||
|
stmt.query_map([], |row| row.get(0))
|
||||||
|
.unwrap()
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
for sys_id in &all_system_note_ids {
|
||||||
|
assert!(
|
||||||
|
!system_note_ids.contains(sys_id),
|
||||||
|
"System note id {} should not be in dirty_sources",
|
||||||
|
sys_id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_025_idempotent_with_existing_documents() {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations_up_to(&conn, 24);
|
||||||
|
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
|
||||||
|
// Insert 3 non-system notes
|
||||||
|
let note_ids: Vec<i64> = (1..=3)
|
||||||
|
.map(|i| insert_test_note(&conn, 500 + i, disc_id, pid, false))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Create documents for 2 of 3 notes (simulating already-generated docs)
|
||||||
|
insert_test_document(&conn, "note", note_ids[0], pid);
|
||||||
|
insert_test_document(&conn, "note", note_ids[1], pid);
|
||||||
|
|
||||||
|
// Run migration 025
|
||||||
|
run_single_migration(&conn, 24);
|
||||||
|
|
||||||
|
let dirty_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
dirty_count, 1,
|
||||||
|
"Only the note without a document should be backfilled"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the correct note was queued
|
||||||
|
let queued_id: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT source_id FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(queued_id, note_ids[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_migration_025_skips_notes_already_in_dirty_queue() {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations_up_to(&conn, 24);
|
||||||
|
|
||||||
|
let pid = insert_test_project(&conn);
|
||||||
|
let issue_id = insert_test_issue(&conn, pid);
|
||||||
|
let disc_id = insert_test_discussion(&conn, pid, issue_id);
|
||||||
|
|
||||||
|
// Insert 3 non-system notes
|
||||||
|
let note_ids: Vec<i64> = (1..=3)
|
||||||
|
.map(|i| insert_test_note(&conn, 600 + i, disc_id, pid, false))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Pre-queue one note in dirty_sources
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at) VALUES ('note', ?1, 999)",
|
||||||
|
[note_ids[0]],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Run migration 025
|
||||||
|
run_single_migration(&conn, 24);
|
||||||
|
|
||||||
|
let dirty_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
dirty_count, 3,
|
||||||
|
"All 3 notes should be in dirty_sources (1 pre-existing + 2 new)"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the pre-existing entry preserved its original queued_at
|
||||||
|
let original_queued_at: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT queued_at FROM dirty_sources WHERE source_type = 'note' AND source_id = ?1",
|
||||||
|
[note_ids[0]],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
original_queued_at, 999,
|
||||||
|
"ON CONFLICT DO NOTHING should preserve the original queued_at"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,13 +2,14 @@ use chrono::DateTime;
|
|||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::collections::BTreeSet;
|
use std::collections::{BTreeSet, HashMap};
|
||||||
use std::fmt::Write as _;
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
use super::truncation::{
|
use super::truncation::{
|
||||||
MAX_DISCUSSION_BYTES, NoteContent, truncate_discussion, truncate_hard_cap,
|
MAX_DISCUSSION_BYTES, NoteContent, truncate_discussion, truncate_hard_cap,
|
||||||
};
|
};
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
|
use crate::core::time::ms_to_iso;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
@@ -16,6 +17,7 @@ pub enum SourceType {
|
|||||||
Issue,
|
Issue,
|
||||||
MergeRequest,
|
MergeRequest,
|
||||||
Discussion,
|
Discussion,
|
||||||
|
Note,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceType {
|
impl SourceType {
|
||||||
@@ -24,6 +26,7 @@ impl SourceType {
|
|||||||
Self::Issue => "issue",
|
Self::Issue => "issue",
|
||||||
Self::MergeRequest => "merge_request",
|
Self::MergeRequest => "merge_request",
|
||||||
Self::Discussion => "discussion",
|
Self::Discussion => "discussion",
|
||||||
|
Self::Note => "note",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,6 +35,7 @@ impl SourceType {
|
|||||||
"issue" | "issues" => Some(Self::Issue),
|
"issue" | "issues" => Some(Self::Issue),
|
||||||
"mr" | "mrs" | "merge_request" | "merge_requests" => Some(Self::MergeRequest),
|
"mr" | "mrs" | "merge_request" | "merge_requests" => Some(Self::MergeRequest),
|
||||||
"discussion" | "discussions" => Some(Self::Discussion),
|
"discussion" | "discussions" => Some(Self::Discussion),
|
||||||
|
"note" | "notes" => Some(Self::Note),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -515,6 +519,521 @@ pub fn extract_discussion_document(
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn extract_note_document(conn: &Connection, note_id: i64) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT n.id, n.gitlab_id, n.author_username, n.body, n.note_type, n.is_system,
|
||||||
|
n.created_at, n.updated_at, n.position_new_path, n.position_new_line,
|
||||||
|
n.position_old_path, n.position_old_line, n.resolvable, n.resolved, n.resolved_by,
|
||||||
|
d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
WHERE n.id = ?1",
|
||||||
|
rusqlite::params![note_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, bool>(5)?,
|
||||||
|
row.get::<_, i64>(6)?,
|
||||||
|
row.get::<_, i64>(7)?,
|
||||||
|
row.get::<_, Option<String>>(8)?,
|
||||||
|
row.get::<_, Option<i64>>(9)?,
|
||||||
|
row.get::<_, Option<String>>(10)?,
|
||||||
|
row.get::<_, Option<i64>>(11)?,
|
||||||
|
row.get::<_, bool>(12)?,
|
||||||
|
row.get::<_, bool>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
row.get::<_, String>(15)?,
|
||||||
|
row.get::<_, Option<i64>>(16)?,
|
||||||
|
row.get::<_, Option<i64>>(17)?,
|
||||||
|
row.get::<_, String>(18)?,
|
||||||
|
row.get::<_, i64>(19)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
is_system,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
_position_old_line,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
_resolved_by,
|
||||||
|
noteable_type,
|
||||||
|
issue_id,
|
||||||
|
merge_request_id,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if is_system {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (parent_iid, parent_title, parent_web_url, parent_type_label, labels) =
|
||||||
|
match noteable_type.as_str() {
|
||||||
|
"Issue" => {
|
||||||
|
let parent_id = match issue_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT i.iid, i.title, i.web_url FROM issues i WHERE i.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, "Issue", labels)
|
||||||
|
}
|
||||||
|
"MergeRequest" => {
|
||||||
|
let parent_id = match merge_request_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT m.iid, m.title, m.web_url FROM merge_requests m WHERE m.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
(iid, title, web_url, "MergeRequest", labels)
|
||||||
|
}
|
||||||
|
_ => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
build_note_document(
|
||||||
|
note_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
parent_iid,
|
||||||
|
parent_title.as_deref(),
|
||||||
|
parent_web_url.as_deref(),
|
||||||
|
&labels,
|
||||||
|
parent_type_label,
|
||||||
|
&path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParentMetadata {
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub web_url: Option<String>,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub project_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParentMetadataCache {
|
||||||
|
cache: HashMap<(String, i64), Option<ParentMetadata>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ParentMetadataCache {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParentMetadataCache {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cache: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_fetch(
|
||||||
|
&mut self,
|
||||||
|
conn: &Connection,
|
||||||
|
noteable_type: &str,
|
||||||
|
parent_id: i64,
|
||||||
|
project_path: &str,
|
||||||
|
) -> Result<Option<&ParentMetadata>> {
|
||||||
|
let key = (noteable_type.to_string(), parent_id);
|
||||||
|
if !self.cache.contains_key(&key) {
|
||||||
|
let meta = fetch_parent_metadata(conn, noteable_type, parent_id, project_path)?;
|
||||||
|
self.cache.insert(key.clone(), meta);
|
||||||
|
}
|
||||||
|
Ok(self.cache.get(&key).and_then(|m| m.as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_parent_metadata(
|
||||||
|
conn: &Connection,
|
||||||
|
noteable_type: &str,
|
||||||
|
parent_id: i64,
|
||||||
|
project_path: &str,
|
||||||
|
) -> Result<Option<ParentMetadata>> {
|
||||||
|
match noteable_type {
|
||||||
|
"Issue" => {
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT i.iid, i.title, i.web_url FROM issues i WHERE i.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM issue_labels il
|
||||||
|
JOIN labels l ON l.id = il.label_id
|
||||||
|
WHERE il.issue_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
Ok(Some(ParentMetadata {
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
web_url,
|
||||||
|
labels,
|
||||||
|
project_path: project_path.to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
"MergeRequest" => {
|
||||||
|
let parent = conn.query_row(
|
||||||
|
"SELECT m.iid, m.title, m.web_url FROM merge_requests m WHERE m.id = ?1",
|
||||||
|
rusqlite::params![parent_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, Option<String>>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let (iid, title, web_url) = match parent {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
let mut label_stmt = conn.prepare_cached(
|
||||||
|
"SELECT l.name FROM mr_labels ml
|
||||||
|
JOIN labels l ON l.id = ml.label_id
|
||||||
|
WHERE ml.merge_request_id = ?1
|
||||||
|
ORDER BY l.name",
|
||||||
|
)?;
|
||||||
|
let labels: Vec<String> = label_stmt
|
||||||
|
.query_map(rusqlite::params![parent_id], |row| row.get(0))?
|
||||||
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
Ok(Some(ParentMetadata {
|
||||||
|
iid,
|
||||||
|
title,
|
||||||
|
web_url,
|
||||||
|
labels,
|
||||||
|
project_path: project_path.to_string(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
_ => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_note_document_cached(
|
||||||
|
conn: &Connection,
|
||||||
|
note_id: i64,
|
||||||
|
cache: &mut ParentMetadataCache,
|
||||||
|
) -> Result<Option<DocumentData>> {
|
||||||
|
let row = conn.query_row(
|
||||||
|
"SELECT n.id, n.gitlab_id, n.author_username, n.body, n.note_type, n.is_system,
|
||||||
|
n.created_at, n.updated_at, n.position_new_path, n.position_new_line,
|
||||||
|
n.position_old_path, n.position_old_line, n.resolvable, n.resolved, n.resolved_by,
|
||||||
|
d.noteable_type, d.issue_id, d.merge_request_id,
|
||||||
|
p.path_with_namespace, p.id AS project_id
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
JOIN projects p ON n.project_id = p.id
|
||||||
|
WHERE n.id = ?1",
|
||||||
|
rusqlite::params![note_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, i64>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, Option<String>>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, bool>(5)?,
|
||||||
|
row.get::<_, i64>(6)?,
|
||||||
|
row.get::<_, i64>(7)?,
|
||||||
|
row.get::<_, Option<String>>(8)?,
|
||||||
|
row.get::<_, Option<i64>>(9)?,
|
||||||
|
row.get::<_, Option<String>>(10)?,
|
||||||
|
row.get::<_, Option<i64>>(11)?,
|
||||||
|
row.get::<_, bool>(12)?,
|
||||||
|
row.get::<_, bool>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
row.get::<_, String>(15)?,
|
||||||
|
row.get::<_, Option<i64>>(16)?,
|
||||||
|
row.get::<_, Option<i64>>(17)?,
|
||||||
|
row.get::<_, String>(18)?,
|
||||||
|
row.get::<_, i64>(19)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let (
|
||||||
|
_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
is_system,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
_position_old_line,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
_resolved_by,
|
||||||
|
noteable_type,
|
||||||
|
issue_id,
|
||||||
|
merge_request_id,
|
||||||
|
path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
) = match row {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if is_system {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let parent_id = match noteable_type.as_str() {
|
||||||
|
"Issue" => match issue_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
},
|
||||||
|
"MergeRequest" => match merge_request_id {
|
||||||
|
Some(pid) => pid,
|
||||||
|
None => return Ok(None),
|
||||||
|
},
|
||||||
|
_ => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let parent = cache.get_or_fetch(conn, ¬eable_type, parent_id, &path_with_namespace)?;
|
||||||
|
let parent = match parent {
|
||||||
|
Some(p) => p,
|
||||||
|
None => return Ok(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let parent_iid = parent.iid;
|
||||||
|
let parent_title = parent.title.as_deref();
|
||||||
|
let parent_web_url = parent.web_url.as_deref();
|
||||||
|
let labels = parent.labels.clone();
|
||||||
|
let parent_type_label = noteable_type.as_str();
|
||||||
|
|
||||||
|
build_note_document(
|
||||||
|
note_id,
|
||||||
|
gitlab_id,
|
||||||
|
author_username,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
position_new_path,
|
||||||
|
position_new_line,
|
||||||
|
position_old_path,
|
||||||
|
resolvable,
|
||||||
|
resolved,
|
||||||
|
parent_iid,
|
||||||
|
parent_title,
|
||||||
|
parent_web_url,
|
||||||
|
&labels,
|
||||||
|
parent_type_label,
|
||||||
|
&path_with_namespace,
|
||||||
|
project_id,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn build_note_document(
|
||||||
|
note_id: i64,
|
||||||
|
gitlab_id: i64,
|
||||||
|
author_username: Option<String>,
|
||||||
|
body: Option<String>,
|
||||||
|
note_type: Option<String>,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
position_new_path: Option<String>,
|
||||||
|
position_new_line: Option<i64>,
|
||||||
|
position_old_path: Option<String>,
|
||||||
|
resolvable: bool,
|
||||||
|
resolved: bool,
|
||||||
|
parent_iid: i64,
|
||||||
|
parent_title: Option<&str>,
|
||||||
|
parent_web_url: Option<&str>,
|
||||||
|
labels: &[String],
|
||||||
|
parent_type_label: &str,
|
||||||
|
path_with_namespace: &str,
|
||||||
|
project_id: i64,
|
||||||
|
) -> Result<Option<DocumentData>> {
|
||||||
|
let mut path_set = BTreeSet::new();
|
||||||
|
if let Some(ref p) = position_old_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref p) = position_new_path
|
||||||
|
&& !p.is_empty()
|
||||||
|
{
|
||||||
|
path_set.insert(p.clone());
|
||||||
|
}
|
||||||
|
let paths: Vec<String> = path_set.into_iter().collect();
|
||||||
|
|
||||||
|
let url = parent_web_url.map(|wu| format!("{}#note_{}", wu, gitlab_id));
|
||||||
|
|
||||||
|
let display_title = parent_title.unwrap_or("(untitled)");
|
||||||
|
let display_note_type = note_type.as_deref().unwrap_or("Note");
|
||||||
|
let display_author = author_username.as_deref().unwrap_or("unknown");
|
||||||
|
let parent_prefix = if parent_type_label == "Issue" {
|
||||||
|
format!("Issue #{}", parent_iid)
|
||||||
|
} else {
|
||||||
|
format!("MR !{}", parent_iid)
|
||||||
|
};
|
||||||
|
|
||||||
|
let title = format!(
|
||||||
|
"Note by @{} on {}: {}",
|
||||||
|
display_author, parent_prefix, display_title
|
||||||
|
);
|
||||||
|
|
||||||
|
let labels_csv = labels.join(", ");
|
||||||
|
|
||||||
|
let mut content = String::new();
|
||||||
|
let _ = writeln!(content, "[[Note]]");
|
||||||
|
let _ = writeln!(content, "source_type: note");
|
||||||
|
let _ = writeln!(content, "note_gitlab_id: {}", gitlab_id);
|
||||||
|
let _ = writeln!(content, "project: {}", path_with_namespace);
|
||||||
|
let _ = writeln!(content, "parent_type: {}", parent_type_label);
|
||||||
|
let _ = writeln!(content, "parent_iid: {}", parent_iid);
|
||||||
|
let _ = writeln!(content, "parent_title: {}", display_title);
|
||||||
|
let _ = writeln!(content, "note_type: {}", display_note_type);
|
||||||
|
let _ = writeln!(content, "author: @{}", display_author);
|
||||||
|
let _ = writeln!(content, "created_at: {}", ms_to_iso(created_at));
|
||||||
|
if resolvable {
|
||||||
|
let _ = writeln!(content, "resolved: {}", resolved);
|
||||||
|
}
|
||||||
|
if display_note_type == "DiffNote"
|
||||||
|
&& let Some(ref p) = position_new_path
|
||||||
|
{
|
||||||
|
if let Some(line) = position_new_line {
|
||||||
|
let _ = writeln!(content, "path: {}:{}", p, line);
|
||||||
|
} else {
|
||||||
|
let _ = writeln!(content, "path: {}", p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !labels.is_empty() {
|
||||||
|
let _ = writeln!(content, "labels: {}", labels_csv);
|
||||||
|
}
|
||||||
|
if let Some(ref u) = url {
|
||||||
|
let _ = writeln!(content, "url: {}", u);
|
||||||
|
}
|
||||||
|
|
||||||
|
content.push_str("\n--- Body ---\n\n");
|
||||||
|
content.push_str(body.as_deref().unwrap_or(""));
|
||||||
|
|
||||||
|
let labels_hash = compute_list_hash(labels);
|
||||||
|
let paths_hash = compute_list_hash(&paths);
|
||||||
|
|
||||||
|
let hard_cap = truncate_hard_cap(&content);
|
||||||
|
let content_hash = compute_content_hash(&hard_cap.content);
|
||||||
|
|
||||||
|
Ok(Some(DocumentData {
|
||||||
|
source_type: SourceType::Note,
|
||||||
|
source_id: note_id,
|
||||||
|
project_id,
|
||||||
|
author_username,
|
||||||
|
labels: labels.to_vec(),
|
||||||
|
paths,
|
||||||
|
labels_hash,
|
||||||
|
paths_hash,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
url,
|
||||||
|
title: Some(title),
|
||||||
|
content_text: hard_cap.content,
|
||||||
|
content_hash,
|
||||||
|
is_truncated: hard_cap.is_truncated,
|
||||||
|
truncated_reason: hard_cap.reason.map(|r| r.as_str().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -545,6 +1064,26 @@ mod tests {
|
|||||||
assert_eq!(SourceType::parse("ISSUE"), Some(SourceType::Issue));
|
assert_eq!(SourceType::parse("ISSUE"), Some(SourceType::Issue));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_source_type_parse_note() {
|
||||||
|
assert_eq!(SourceType::parse("note"), Some(SourceType::Note));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_source_type_note_as_str() {
|
||||||
|
assert_eq!(SourceType::Note.as_str(), "note");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_source_type_note_display() {
|
||||||
|
assert_eq!(format!("{}", SourceType::Note), "note");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_source_type_parse_notes_alias() {
|
||||||
|
assert_eq!(SourceType::parse("notes"), Some(SourceType::Note));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_source_type_as_str() {
|
fn test_source_type_as_str() {
|
||||||
assert_eq!(SourceType::Issue.as_str(), "issue");
|
assert_eq!(SourceType::Issue.as_str(), "issue");
|
||||||
@@ -1449,4 +1988,354 @@ mod tests {
|
|||||||
let result = extract_discussion_document(&conn, 1).unwrap();
|
let result = extract_discussion_document(&conn, 1).unwrap();
|
||||||
assert!(result.is_none());
|
assert!(result.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn insert_note_with_type(
|
||||||
|
conn: &Connection,
|
||||||
|
id: i64,
|
||||||
|
gitlab_id: i64,
|
||||||
|
discussion_id: i64,
|
||||||
|
author: Option<&str>,
|
||||||
|
body: Option<&str>,
|
||||||
|
created_at: i64,
|
||||||
|
is_system: bool,
|
||||||
|
old_path: Option<&str>,
|
||||||
|
new_path: Option<&str>,
|
||||||
|
old_line: Option<i64>,
|
||||||
|
new_line: Option<i64>,
|
||||||
|
note_type: Option<&str>,
|
||||||
|
resolvable: bool,
|
||||||
|
resolved: bool,
|
||||||
|
) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system, position_old_path, position_new_path, position_old_line, position_new_line, note_type, resolvable, resolved) VALUES (?1, ?2, ?3, 1, ?4, ?5, ?6, ?6, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14)",
|
||||||
|
rusqlite::params![id, gitlab_id, discussion_id, author, body, created_at, is_system as i32, old_path, new_path, old_line, new_line, note_type, resolvable as i32, resolved as i32],
|
||||||
|
).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_basic_format() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
42,
|
||||||
|
Some("Fix login bug"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
Some("johndoe"),
|
||||||
|
Some("https://gitlab.example.com/group/project-one/-/issues/42"),
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
12345,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
Some("This looks like a race condition"),
|
||||||
|
1710460800000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert_eq!(doc.source_type, SourceType::Note);
|
||||||
|
assert_eq!(doc.source_id, 1);
|
||||||
|
assert_eq!(doc.project_id, 1);
|
||||||
|
assert_eq!(doc.author_username, Some("alice".to_string()));
|
||||||
|
assert!(doc.content_text.contains("[[Note]]"));
|
||||||
|
assert!(doc.content_text.contains("source_type: note"));
|
||||||
|
assert!(doc.content_text.contains("note_gitlab_id: 12345"));
|
||||||
|
assert!(doc.content_text.contains("project: group/project-one"));
|
||||||
|
assert!(doc.content_text.contains("parent_type: Issue"));
|
||||||
|
assert!(doc.content_text.contains("parent_iid: 42"));
|
||||||
|
assert!(doc.content_text.contains("parent_title: Fix login bug"));
|
||||||
|
assert!(doc.content_text.contains("author: @alice"));
|
||||||
|
assert!(doc.content_text.contains("--- Body ---"));
|
||||||
|
assert!(
|
||||||
|
doc.content_text
|
||||||
|
.contains("This looks like a race condition")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
doc.title,
|
||||||
|
Some("Note by @alice on Issue #42: Fix login bug".to_string())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
doc.url,
|
||||||
|
Some("https://gitlab.example.com/group/project-one/-/issues/42#note_12345".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_diffnote_with_path() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Refactor auth"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
Some("https://gitlab.example.com/group/project-one/-/issues/10"),
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note_with_type(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
555,
|
||||||
|
1,
|
||||||
|
Some("bob"),
|
||||||
|
Some("Unused variable here"),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
Some("src/old_auth.rs"),
|
||||||
|
Some("src/auth.rs"),
|
||||||
|
Some(10),
|
||||||
|
Some(25),
|
||||||
|
Some("DiffNote"),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert!(doc.content_text.contains("note_type: DiffNote"));
|
||||||
|
assert!(doc.content_text.contains("path: src/auth.rs:25"));
|
||||||
|
assert!(doc.content_text.contains("resolved: false"));
|
||||||
|
assert_eq!(doc.paths, vec!["src/auth.rs", "src/old_auth.rs"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_inherits_parent_labels() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Test"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
insert_label(&conn, 1, "backend");
|
||||||
|
insert_label(&conn, 2, "api");
|
||||||
|
link_issue_label(&conn, 1, 1);
|
||||||
|
link_issue_label(&conn, 1, 2);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
Some("Note body"),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert_eq!(doc.labels, vec!["api", "backend"]);
|
||||||
|
assert!(doc.content_text.contains("labels: api, backend"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_mr_parent() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_mr(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
456,
|
||||||
|
Some("JWT Auth"),
|
||||||
|
Some("desc"),
|
||||||
|
Some("opened"),
|
||||||
|
Some("johndoe"),
|
||||||
|
Some("feature/jwt"),
|
||||||
|
Some("main"),
|
||||||
|
Some("https://gitlab.example.com/group/project-one/-/merge_requests/456"),
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "MergeRequest", None, Some(1));
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
200,
|
||||||
|
1,
|
||||||
|
Some("reviewer"),
|
||||||
|
Some("Needs tests"),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert!(doc.content_text.contains("parent_type: MergeRequest"));
|
||||||
|
assert!(doc.content_text.contains("parent_iid: 456"));
|
||||||
|
assert_eq!(
|
||||||
|
doc.title,
|
||||||
|
Some("Note by @reviewer on MR !456: JWT Auth".to_string())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_system_note_returns_none() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Test"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("bot"),
|
||||||
|
Some("assigned to @alice"),
|
||||||
|
1000,
|
||||||
|
true,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = extract_note_document(&conn, 1).unwrap();
|
||||||
|
assert!(result.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_not_found() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
let result = extract_note_document(&conn, 999).unwrap();
|
||||||
|
assert!(result.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_orphaned_discussion() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_discussion(&conn, 1, "Issue", None, None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
Some("Comment"),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let result = extract_note_document(&conn, 1).unwrap();
|
||||||
|
assert!(result.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_hash_deterministic() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Test"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
Some("Comment"),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc1 = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
let doc2 = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert_eq!(doc1.content_hash, doc2.content_hash);
|
||||||
|
assert_eq!(doc1.labels_hash, doc2.labels_hash);
|
||||||
|
assert_eq!(doc1.paths_hash, doc2.paths_hash);
|
||||||
|
assert_eq!(doc1.content_hash.len(), 64);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_empty_body() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Test"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
Some(""),
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert!(doc.content_text.contains("--- Body ---\n\n"));
|
||||||
|
assert!(!doc.is_truncated);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_document_null_body() {
|
||||||
|
let conn = setup_discussion_test_db();
|
||||||
|
insert_issue(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
Some("Test"),
|
||||||
|
Some("desc"),
|
||||||
|
"opened",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
insert_discussion(&conn, 1, "Issue", Some(1), None);
|
||||||
|
insert_note(
|
||||||
|
&conn,
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
1,
|
||||||
|
Some("alice"),
|
||||||
|
None,
|
||||||
|
1000,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let doc = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
assert!(doc.content_text.contains("--- Body ---\n\n"));
|
||||||
|
assert!(doc.content_text.ends_with("--- Body ---\n\n"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,9 @@ mod regenerator;
|
|||||||
mod truncation;
|
mod truncation;
|
||||||
|
|
||||||
pub use extractor::{
|
pub use extractor::{
|
||||||
DocumentData, SourceType, compute_content_hash, compute_list_hash, extract_discussion_document,
|
DocumentData, ParentMetadataCache, SourceType, compute_content_hash, compute_list_hash,
|
||||||
extract_issue_document, extract_mr_document,
|
extract_discussion_document, extract_issue_document, extract_mr_document,
|
||||||
|
extract_note_document, extract_note_document_cached,
|
||||||
};
|
};
|
||||||
pub use regenerator::{RegenerateResult, regenerate_dirty_documents};
|
pub use regenerator::{RegenerateResult, regenerate_dirty_documents};
|
||||||
pub use truncation::{
|
pub use truncation::{
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ use tracing::{debug, instrument, warn};
|
|||||||
|
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
use crate::documents::{
|
use crate::documents::{
|
||||||
DocumentData, SourceType, extract_discussion_document, extract_issue_document,
|
DocumentData, ParentMetadataCache, SourceType, extract_discussion_document,
|
||||||
extract_mr_document,
|
extract_issue_document, extract_mr_document, extract_note_document_cached,
|
||||||
};
|
};
|
||||||
use crate::ingestion::dirty_tracker::{clear_dirty, get_dirty_sources, record_dirty_error};
|
use crate::ingestion::dirty_tracker::{clear_dirty, get_dirty_sources, record_dirty_error};
|
||||||
|
|
||||||
@@ -27,6 +27,7 @@ pub fn regenerate_dirty_documents(
|
|||||||
let mut result = RegenerateResult::default();
|
let mut result = RegenerateResult::default();
|
||||||
|
|
||||||
let mut estimated_total: usize = 0;
|
let mut estimated_total: usize = 0;
|
||||||
|
let mut cache = ParentMetadataCache::new();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let dirty = get_dirty_sources(conn)?;
|
let dirty = get_dirty_sources(conn)?;
|
||||||
@@ -41,7 +42,7 @@ pub fn regenerate_dirty_documents(
|
|||||||
estimated_total = estimated_total.max(processed_so_far + remaining);
|
estimated_total = estimated_total.max(processed_so_far + remaining);
|
||||||
|
|
||||||
for (source_type, source_id) in &dirty {
|
for (source_type, source_id) in &dirty {
|
||||||
match regenerate_one(conn, *source_type, *source_id) {
|
match regenerate_one(conn, *source_type, *source_id, &mut cache) {
|
||||||
Ok(changed) => {
|
Ok(changed) => {
|
||||||
if changed {
|
if changed {
|
||||||
result.regenerated += 1;
|
result.regenerated += 1;
|
||||||
@@ -83,11 +84,17 @@ pub fn regenerate_dirty_documents(
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn regenerate_one(conn: &Connection, source_type: SourceType, source_id: i64) -> Result<bool> {
|
fn regenerate_one(
|
||||||
|
conn: &Connection,
|
||||||
|
source_type: SourceType,
|
||||||
|
source_id: i64,
|
||||||
|
cache: &mut ParentMetadataCache,
|
||||||
|
) -> Result<bool> {
|
||||||
let doc = match source_type {
|
let doc = match source_type {
|
||||||
SourceType::Issue => extract_issue_document(conn, source_id)?,
|
SourceType::Issue => extract_issue_document(conn, source_id)?,
|
||||||
SourceType::MergeRequest => extract_mr_document(conn, source_id)?,
|
SourceType::MergeRequest => extract_mr_document(conn, source_id)?,
|
||||||
SourceType::Discussion => extract_discussion_document(conn, source_id)?,
|
SourceType::Discussion => extract_discussion_document(conn, source_id)?,
|
||||||
|
SourceType::Note => extract_note_document_cached(conn, source_id, cache)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(doc) = doc else {
|
let Some(doc) = doc else {
|
||||||
@@ -122,11 +129,7 @@ fn upsert_document_inner(conn: &Connection, doc: &DocumentData) -> Result<bool>
|
|||||||
)
|
)
|
||||||
.optional()?;
|
.optional()?;
|
||||||
|
|
||||||
let content_changed = match &existing {
|
// Fast path: if all three hashes match, nothing changed at all.
|
||||||
Some((_, old_content_hash, _, _)) => old_content_hash != &doc.content_hash,
|
|
||||||
None => true,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some((_, ref old_content_hash, ref old_labels_hash, ref old_paths_hash)) = existing
|
if let Some((_, ref old_content_hash, ref old_labels_hash, ref old_paths_hash)) = existing
|
||||||
&& old_content_hash == &doc.content_hash
|
&& old_content_hash == &doc.content_hash
|
||||||
&& old_labels_hash == &doc.labels_hash
|
&& old_labels_hash == &doc.labels_hash
|
||||||
@@ -134,6 +137,7 @@ fn upsert_document_inner(conn: &Connection, doc: &DocumentData) -> Result<bool>
|
|||||||
{
|
{
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
// Past this point at least one hash differs, so the document will be updated.
|
||||||
|
|
||||||
let labels_json = serde_json::to_string(&doc.labels).unwrap_or_else(|_| "[]".to_string());
|
let labels_json = serde_json::to_string(&doc.labels).unwrap_or_else(|_| "[]".to_string());
|
||||||
|
|
||||||
@@ -243,7 +247,8 @@ fn upsert_document_inner(conn: &Connection, doc: &DocumentData) -> Result<bool>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(content_changed)
|
// We passed the triple-hash fast path, so at least one hash differs.
|
||||||
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_document(conn: &Connection, source_type: SourceType, source_id: i64) -> Result<()> {
|
fn delete_document(conn: &Connection, source_type: SourceType, source_id: i64) -> Result<()> {
|
||||||
@@ -473,4 +478,316 @@ mod tests {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(label_count, 1);
|
assert_eq!(label_count, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup_note_db() -> Connection {
|
||||||
|
let conn = setup_db();
|
||||||
|
conn.execute_batch(
|
||||||
|
"
|
||||||
|
CREATE TABLE merge_requests (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||||
|
project_id INTEGER NOT NULL REFERENCES projects(id),
|
||||||
|
iid INTEGER NOT NULL,
|
||||||
|
title TEXT,
|
||||||
|
description TEXT,
|
||||||
|
state TEXT,
|
||||||
|
draft INTEGER NOT NULL DEFAULT 0,
|
||||||
|
author_username TEXT,
|
||||||
|
source_branch TEXT,
|
||||||
|
target_branch TEXT,
|
||||||
|
head_sha TEXT,
|
||||||
|
references_short TEXT,
|
||||||
|
references_full TEXT,
|
||||||
|
detailed_merge_status TEXT,
|
||||||
|
merge_user_username TEXT,
|
||||||
|
created_at INTEGER,
|
||||||
|
updated_at INTEGER,
|
||||||
|
merged_at INTEGER,
|
||||||
|
closed_at INTEGER,
|
||||||
|
last_seen_at INTEGER NOT NULL,
|
||||||
|
discussions_synced_for_updated_at INTEGER,
|
||||||
|
discussions_sync_last_attempt_at INTEGER,
|
||||||
|
discussions_sync_attempts INTEGER DEFAULT 0,
|
||||||
|
discussions_sync_last_error TEXT,
|
||||||
|
resource_events_synced_for_updated_at INTEGER,
|
||||||
|
web_url TEXT,
|
||||||
|
raw_payload_id INTEGER
|
||||||
|
);
|
||||||
|
CREATE TABLE mr_labels (
|
||||||
|
merge_request_id INTEGER REFERENCES merge_requests(id),
|
||||||
|
label_id INTEGER REFERENCES labels(id),
|
||||||
|
PRIMARY KEY(merge_request_id, label_id)
|
||||||
|
);
|
||||||
|
CREATE TABLE discussions (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
gitlab_discussion_id TEXT NOT NULL,
|
||||||
|
project_id INTEGER NOT NULL REFERENCES projects(id),
|
||||||
|
issue_id INTEGER REFERENCES issues(id),
|
||||||
|
merge_request_id INTEGER,
|
||||||
|
noteable_type TEXT NOT NULL,
|
||||||
|
individual_note INTEGER NOT NULL DEFAULT 0,
|
||||||
|
first_note_at INTEGER,
|
||||||
|
last_note_at INTEGER,
|
||||||
|
last_seen_at INTEGER NOT NULL,
|
||||||
|
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||||
|
resolved INTEGER NOT NULL DEFAULT 0
|
||||||
|
);
|
||||||
|
CREATE TABLE notes (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
gitlab_id INTEGER UNIQUE NOT NULL,
|
||||||
|
discussion_id INTEGER NOT NULL REFERENCES discussions(id),
|
||||||
|
project_id INTEGER NOT NULL REFERENCES projects(id),
|
||||||
|
note_type TEXT,
|
||||||
|
is_system INTEGER NOT NULL DEFAULT 0,
|
||||||
|
author_username TEXT,
|
||||||
|
body TEXT,
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
|
updated_at INTEGER NOT NULL,
|
||||||
|
last_seen_at INTEGER NOT NULL,
|
||||||
|
position INTEGER,
|
||||||
|
resolvable INTEGER NOT NULL DEFAULT 0,
|
||||||
|
resolved INTEGER NOT NULL DEFAULT 0,
|
||||||
|
resolved_by TEXT,
|
||||||
|
resolved_at INTEGER,
|
||||||
|
position_old_path TEXT,
|
||||||
|
position_new_path TEXT,
|
||||||
|
position_old_line INTEGER,
|
||||||
|
position_new_line INTEGER,
|
||||||
|
raw_payload_id INTEGER
|
||||||
|
);
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_regenerate_note_document() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (1, 10, 1, 42, 'Test Issue', 'opened', 'alice', 1000, 2000, 3000, 'https://example.com/issues/42')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (1, 100, 1, 1, 'bob', 'This is a note', 1000, 2000, 3000, 0)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
mark_dirty(&conn, SourceType::Note, 1).unwrap();
|
||||||
|
let result = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(result.regenerated, 1);
|
||||||
|
assert_eq!(result.unchanged, 0);
|
||||||
|
assert_eq!(result.errored, 0);
|
||||||
|
|
||||||
|
let (source_type, content): (String, String) = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT source_type, content_text FROM documents WHERE source_id = 1",
|
||||||
|
[],
|
||||||
|
|r| Ok((r.get(0)?, r.get(1)?)),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(source_type, "note");
|
||||||
|
assert!(content.contains("[[Note]]"));
|
||||||
|
assert!(content.contains("author: @bob"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_regenerate_note_system_note_deletes() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at) VALUES (1, 10, 1, 42, 'Test', 'opened', 1000, 2000, 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (1, 100, 1, 1, 'bot', 'assigned to @alice', 1000, 2000, 3000, 1)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Pre-insert a document for this note (simulating a previously-generated doc)
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) VALUES ('note', 1, 1, 'old content', 'oldhash')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
mark_dirty(&conn, SourceType::Note, 1).unwrap();
|
||||||
|
let result = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(result.regenerated, 1);
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_regenerate_note_unchanged() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at, web_url) VALUES (1, 10, 1, 42, 'Test', 'opened', 1000, 2000, 3000, 'https://example.com/issues/42')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (1, 100, 1, 1, 'bob', 'Some note', 1000, 2000, 3000, 0)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
mark_dirty(&conn, SourceType::Note, 1).unwrap();
|
||||||
|
let r1 = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(r1.regenerated, 1);
|
||||||
|
|
||||||
|
mark_dirty(&conn, SourceType::Note, 1).unwrap();
|
||||||
|
let r2 = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(r2.unchanged, 1);
|
||||||
|
assert_eq!(r2.regenerated, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_regeneration_batch_uses_cache() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (1, 10, 1, 42, 'Shared Issue', 'opened', 'alice', 1000, 2000, 3000, 'https://example.com/issues/42')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
for i in 1..=10 {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (?1, ?2, 1, 1, 'bob', ?3, 1000, 2000, 3000, 0)",
|
||||||
|
rusqlite::params![i, i * 100, format!("Note body {}", i)],
|
||||||
|
).unwrap();
|
||||||
|
mark_dirty(&conn, SourceType::Note, i).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(result.regenerated, 10);
|
||||||
|
assert_eq!(result.errored, 0);
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_regeneration_cache_consistent_with_direct_extraction() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, author_username, created_at, updated_at, last_seen_at, web_url) VALUES (1, 10, 1, 42, 'Consistency Check', 'opened', 'alice', 1000, 2000, 3000, 'https://example.com/issues/42')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO labels (id, project_id, name) VALUES (1, 1, 'backend')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 1)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (1, 100, 1, 1, 'bob', 'Some content', 1000, 2000, 3000, 0)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
use crate::documents::extract_note_document;
|
||||||
|
let direct = extract_note_document(&conn, 1).unwrap().unwrap();
|
||||||
|
|
||||||
|
let mut cache = ParentMetadataCache::new();
|
||||||
|
let cached = extract_note_document_cached(&conn, 1, &mut cache)
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(direct.content_text, cached.content_text);
|
||||||
|
assert_eq!(direct.content_hash, cached.content_hash);
|
||||||
|
assert_eq!(direct.labels, cached.labels);
|
||||||
|
assert_eq!(direct.labels_hash, cached.labels_hash);
|
||||||
|
assert_eq!(direct.paths_hash, cached.paths_hash);
|
||||||
|
assert_eq!(direct.title, cached.title);
|
||||||
|
assert_eq!(direct.url, cached.url);
|
||||||
|
assert_eq!(direct.author_username, cached.author_username);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_regeneration_cache_invalidates_across_parents() {
|
||||||
|
let conn = setup_note_db();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at, web_url) VALUES (1, 10, 1, 42, 'Issue Alpha', 'opened', 1000, 2000, 3000, 'https://example.com/issues/42')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, last_seen_at, web_url) VALUES (2, 20, 1, 99, 'Issue Beta', 'opened', 1000, 2000, 3000, 'https://example.com/issues/99')",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (1, 'disc_1', 1, 1, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, last_seen_at) VALUES (2, 'disc_2', 1, 2, 'Issue', 3000)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (1, 100, 1, 1, 'bob', 'Alpha note', 1000, 2000, 3000, 0)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, last_seen_at, is_system) VALUES (2, 200, 2, 1, 'alice', 'Beta note', 1000, 2000, 3000, 0)",
|
||||||
|
[],
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
mark_dirty(&conn, SourceType::Note, 1).unwrap();
|
||||||
|
mark_dirty(&conn, SourceType::Note, 2).unwrap();
|
||||||
|
|
||||||
|
let result = regenerate_dirty_documents(&conn, None).unwrap();
|
||||||
|
assert_eq!(result.regenerated, 2);
|
||||||
|
assert_eq!(result.errored, 0);
|
||||||
|
|
||||||
|
let alpha_content: String = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT content_text FROM documents WHERE source_type = 'note' AND source_id = 1",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let beta_content: String = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT content_text FROM documents WHERE source_type = 'note' AND source_id = 2",
|
||||||
|
[],
|
||||||
|
|r| r.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(alpha_content.contains("parent_iid: 42"));
|
||||||
|
assert!(alpha_content.contains("parent_title: Issue Alpha"));
|
||||||
|
assert!(beta_content.contains("parent_iid: 99"));
|
||||||
|
assert!(beta_content.contains("parent_title: Issue Beta"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ pub struct NormalizedNote {
|
|||||||
pub project_id: i64,
|
pub project_id: i64,
|
||||||
pub note_type: Option<String>,
|
pub note_type: Option<String>,
|
||||||
pub is_system: bool,
|
pub is_system: bool,
|
||||||
|
pub author_id: Option<i64>,
|
||||||
pub author_username: String,
|
pub author_username: String,
|
||||||
pub body: String,
|
pub body: String,
|
||||||
pub created_at: i64,
|
pub created_at: i64,
|
||||||
@@ -160,6 +161,7 @@ fn transform_single_note(
|
|||||||
project_id: local_project_id,
|
project_id: local_project_id,
|
||||||
note_type: note.note_type.clone(),
|
note_type: note.note_type.clone(),
|
||||||
is_system: note.system,
|
is_system: note.system,
|
||||||
|
author_id: Some(note.author.id),
|
||||||
author_username: note.author.username.clone(),
|
author_username: note.author.username.clone(),
|
||||||
body: note.body.clone(),
|
body: note.body.clone(),
|
||||||
created_at: parse_timestamp(¬e.created_at),
|
created_at: parse_timestamp(¬e.created_at),
|
||||||
@@ -265,6 +267,7 @@ fn transform_single_note_strict(
|
|||||||
project_id: local_project_id,
|
project_id: local_project_id,
|
||||||
note_type: note.note_type.clone(),
|
note_type: note.note_type.clone(),
|
||||||
is_system: note.system,
|
is_system: note.system,
|
||||||
|
author_id: Some(note.author.id),
|
||||||
author_username: note.author.username.clone(),
|
author_username: note.author.username.clone(),
|
||||||
body: note.body.clone(),
|
body: note.body.clone(),
|
||||||
created_at,
|
created_at,
|
||||||
|
|||||||
@@ -131,7 +131,7 @@ mod tests {
|
|||||||
let conn = Connection::open_in_memory().unwrap();
|
let conn = Connection::open_in_memory().unwrap();
|
||||||
conn.execute_batch("
|
conn.execute_batch("
|
||||||
CREATE TABLE dirty_sources (
|
CREATE TABLE dirty_sources (
|
||||||
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion')),
|
source_type TEXT NOT NULL CHECK (source_type IN ('issue','merge_request','discussion','note')),
|
||||||
source_id INTEGER NOT NULL,
|
source_id INTEGER NOT NULL,
|
||||||
queued_at INTEGER NOT NULL,
|
queued_at INTEGER NOT NULL,
|
||||||
attempt_count INTEGER NOT NULL DEFAULT 0,
|
attempt_count INTEGER NOT NULL DEFAULT 0,
|
||||||
@@ -258,6 +258,21 @@ mod tests {
|
|||||||
assert_eq!(count, 0);
|
assert_eq!(count, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mark_dirty_note_type() {
|
||||||
|
let conn = setup_db();
|
||||||
|
mark_dirty(&conn, SourceType::Note, 42).unwrap();
|
||||||
|
|
||||||
|
let results = get_dirty_sources(&conn).unwrap();
|
||||||
|
assert_eq!(results.len(), 1);
|
||||||
|
assert_eq!(results[0].0, SourceType::Note);
|
||||||
|
assert_eq!(results[0].1, 42);
|
||||||
|
|
||||||
|
clear_dirty(&conn, SourceType::Note, 42).unwrap();
|
||||||
|
let results = get_dirty_sources(&conn).unwrap();
|
||||||
|
assert!(results.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_drain_loop() {
|
fn test_drain_loop() {
|
||||||
let conn = setup_db();
|
let conn = setup_db();
|
||||||
|
|||||||
@@ -1,17 +1,26 @@
|
|||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use rusqlite::Connection;
|
use rusqlite::{Connection, params};
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use crate::Config;
|
use crate::Config;
|
||||||
use crate::core::error::Result;
|
use crate::core::error::Result;
|
||||||
use crate::core::payloads::{StorePayloadOptions, store_payload};
|
use crate::core::payloads::{StorePayloadOptions, store_payload};
|
||||||
|
use crate::core::time::now_ms;
|
||||||
use crate::documents::SourceType;
|
use crate::documents::SourceType;
|
||||||
use crate::gitlab::GitLabClient;
|
use crate::gitlab::GitLabClient;
|
||||||
use crate::gitlab::transformers::{NoteableRef, transform_discussion, transform_notes};
|
use crate::gitlab::transformers::{
|
||||||
|
NormalizedNote, NoteableRef, transform_discussion, transform_notes,
|
||||||
|
};
|
||||||
use crate::ingestion::dirty_tracker;
|
use crate::ingestion::dirty_tracker;
|
||||||
|
|
||||||
use super::issues::IssueForDiscussionSync;
|
use super::issues::IssueForDiscussionSync;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct NoteUpsertOutcome {
|
||||||
|
pub local_note_id: i64,
|
||||||
|
pub changed_semantics: bool,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct IngestDiscussionsResult {
|
pub struct IngestDiscussionsResult {
|
||||||
pub discussions_fetched: usize,
|
pub discussions_fetched: usize,
|
||||||
@@ -80,6 +89,8 @@ async fn ingest_discussions_for_issue(
|
|||||||
let mut seen_discussion_ids: Vec<String> = Vec::new();
|
let mut seen_discussion_ids: Vec<String> = Vec::new();
|
||||||
let mut pagination_error: Option<crate::core::error::LoreError> = None;
|
let mut pagination_error: Option<crate::core::error::LoreError> = None;
|
||||||
|
|
||||||
|
let run_seen_at = now_ms();
|
||||||
|
|
||||||
while let Some(disc_result) = discussions_stream.next().await {
|
while let Some(disc_result) = discussions_stream.next().await {
|
||||||
let gitlab_discussion = match disc_result {
|
let gitlab_discussion = match disc_result {
|
||||||
Ok(d) => d,
|
Ok(d) => d,
|
||||||
@@ -126,18 +137,29 @@ async fn ingest_discussions_for_issue(
|
|||||||
|
|
||||||
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
||||||
|
|
||||||
|
// Mark child note documents dirty (they inherit parent metadata)
|
||||||
|
tx.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, ?1
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?2 AND n.is_system = 0
|
||||||
|
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
|
||||||
|
params![now_ms(), local_discussion_id],
|
||||||
|
)?;
|
||||||
|
|
||||||
let notes = transform_notes(&gitlab_discussion, local_project_id);
|
let notes = transform_notes(&gitlab_discussion, local_project_id);
|
||||||
let notes_count = notes.len();
|
let notes_count = notes.len();
|
||||||
|
|
||||||
tx.execute(
|
|
||||||
"DELETE FROM notes WHERE discussion_id = ?",
|
|
||||||
[local_discussion_id],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
for note in notes {
|
for note in notes {
|
||||||
insert_note(&tx, local_discussion_id, ¬e, None)?;
|
let outcome =
|
||||||
|
upsert_note_for_issue(&tx, local_discussion_id, ¬e, run_seen_at, None)?;
|
||||||
|
if !note.is_system && outcome.changed_semantics {
|
||||||
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Note, outcome.local_note_id)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sweep_stale_issue_notes(&tx, local_discussion_id, run_seen_at)?;
|
||||||
|
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
|
|
||||||
result.discussions_upserted += 1;
|
result.discussions_upserted += 1;
|
||||||
@@ -198,38 +220,182 @@ fn upsert_discussion(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_note(
|
fn upsert_note_for_issue(
|
||||||
conn: &Connection,
|
conn: &Connection,
|
||||||
discussion_id: i64,
|
discussion_id: i64,
|
||||||
note: &crate::gitlab::transformers::NormalizedNote,
|
note: &NormalizedNote,
|
||||||
|
last_seen_at: i64,
|
||||||
payload_id: Option<i64>,
|
payload_id: Option<i64>,
|
||||||
) -> Result<()> {
|
) -> Result<NoteUpsertOutcome> {
|
||||||
|
// Pre-read for semantic change detection
|
||||||
|
let existing = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT id, body, note_type, resolved, resolved_by,
|
||||||
|
position_old_path, position_new_path, position_old_line, position_new_line,
|
||||||
|
position_type, position_line_range_start, position_line_range_end,
|
||||||
|
position_base_sha, position_start_sha, position_head_sha
|
||||||
|
FROM notes WHERE gitlab_id = ?",
|
||||||
|
params![note.gitlab_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, String>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, bool>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, Option<String>>(5)?,
|
||||||
|
row.get::<_, Option<String>>(6)?,
|
||||||
|
row.get::<_, Option<i32>>(7)?,
|
||||||
|
row.get::<_, Option<i32>>(8)?,
|
||||||
|
row.get::<_, Option<String>>(9)?,
|
||||||
|
row.get::<_, Option<i32>>(10)?,
|
||||||
|
row.get::<_, Option<i32>>(11)?,
|
||||||
|
row.get::<_, Option<String>>(12)?,
|
||||||
|
row.get::<_, Option<String>>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let changed_semantics = match &existing {
|
||||||
|
Some((
|
||||||
|
_id,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
resolved,
|
||||||
|
resolved_by,
|
||||||
|
pos_old_path,
|
||||||
|
pos_new_path,
|
||||||
|
pos_old_line,
|
||||||
|
pos_new_line,
|
||||||
|
pos_type,
|
||||||
|
pos_range_start,
|
||||||
|
pos_range_end,
|
||||||
|
pos_base_sha,
|
||||||
|
pos_start_sha,
|
||||||
|
pos_head_sha,
|
||||||
|
)) => {
|
||||||
|
*body != note.body
|
||||||
|
|| *note_type != note.note_type
|
||||||
|
|| *resolved != note.resolved
|
||||||
|
|| *resolved_by != note.resolved_by
|
||||||
|
|| *pos_old_path != note.position_old_path
|
||||||
|
|| *pos_new_path != note.position_new_path
|
||||||
|
|| *pos_old_line != note.position_old_line
|
||||||
|
|| *pos_new_line != note.position_new_line
|
||||||
|
|| *pos_type != note.position_type
|
||||||
|
|| *pos_range_start != note.position_line_range_start
|
||||||
|
|| *pos_range_end != note.position_line_range_end
|
||||||
|
|| *pos_base_sha != note.position_base_sha
|
||||||
|
|| *pos_start_sha != note.position_start_sha
|
||||||
|
|| *pos_head_sha != note.position_head_sha
|
||||||
|
}
|
||||||
|
None => true,
|
||||||
|
};
|
||||||
|
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"INSERT INTO notes (
|
"INSERT INTO notes (
|
||||||
gitlab_id, discussion_id, project_id, note_type, is_system,
|
gitlab_id, discussion_id, project_id, note_type, is_system,
|
||||||
author_username, body, created_at, updated_at, last_seen_at,
|
author_id, author_username, body, created_at, updated_at, last_seen_at,
|
||||||
position, resolvable, resolved, resolved_by, resolved_at, raw_payload_id
|
position, resolvable, resolved, resolved_by, resolved_at,
|
||||||
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
|
position_old_path, position_new_path, position_old_line, position_new_line,
|
||||||
(
|
position_type, position_line_range_start, position_line_range_end,
|
||||||
|
position_base_sha, position_start_sha, position_head_sha,
|
||||||
|
raw_payload_id
|
||||||
|
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20, ?21, ?22, ?23, ?24, ?25, ?26, ?27)
|
||||||
|
ON CONFLICT(gitlab_id) DO UPDATE SET
|
||||||
|
body = excluded.body,
|
||||||
|
note_type = excluded.note_type,
|
||||||
|
author_id = excluded.author_id,
|
||||||
|
updated_at = excluded.updated_at,
|
||||||
|
last_seen_at = excluded.last_seen_at,
|
||||||
|
resolvable = excluded.resolvable,
|
||||||
|
resolved = excluded.resolved,
|
||||||
|
resolved_by = excluded.resolved_by,
|
||||||
|
resolved_at = excluded.resolved_at,
|
||||||
|
position_old_path = excluded.position_old_path,
|
||||||
|
position_new_path = excluded.position_new_path,
|
||||||
|
position_old_line = excluded.position_old_line,
|
||||||
|
position_new_line = excluded.position_new_line,
|
||||||
|
position_type = excluded.position_type,
|
||||||
|
position_line_range_start = excluded.position_line_range_start,
|
||||||
|
position_line_range_end = excluded.position_line_range_end,
|
||||||
|
position_base_sha = excluded.position_base_sha,
|
||||||
|
position_start_sha = excluded.position_start_sha,
|
||||||
|
position_head_sha = excluded.position_head_sha,
|
||||||
|
raw_payload_id = COALESCE(excluded.raw_payload_id, raw_payload_id)",
|
||||||
|
params![
|
||||||
note.gitlab_id,
|
note.gitlab_id,
|
||||||
discussion_id,
|
discussion_id,
|
||||||
note.project_id,
|
note.project_id,
|
||||||
¬e.note_type,
|
¬e.note_type,
|
||||||
note.is_system,
|
note.is_system,
|
||||||
|
note.author_id,
|
||||||
¬e.author_username,
|
¬e.author_username,
|
||||||
¬e.body,
|
¬e.body,
|
||||||
note.created_at,
|
note.created_at,
|
||||||
note.updated_at,
|
note.updated_at,
|
||||||
note.last_seen_at,
|
last_seen_at,
|
||||||
note.position,
|
note.position,
|
||||||
note.resolvable,
|
note.resolvable,
|
||||||
note.resolved,
|
note.resolved,
|
||||||
¬e.resolved_by,
|
¬e.resolved_by,
|
||||||
note.resolved_at,
|
note.resolved_at,
|
||||||
|
¬e.position_old_path,
|
||||||
|
¬e.position_new_path,
|
||||||
|
note.position_old_line,
|
||||||
|
note.position_new_line,
|
||||||
|
¬e.position_type,
|
||||||
|
note.position_line_range_start,
|
||||||
|
note.position_line_range_end,
|
||||||
|
¬e.position_base_sha,
|
||||||
|
¬e.position_start_sha,
|
||||||
|
¬e.position_head_sha,
|
||||||
payload_id,
|
payload_id,
|
||||||
),
|
],
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
|
||||||
|
let local_note_id: i64 = conn.query_row(
|
||||||
|
"SELECT id FROM notes WHERE gitlab_id = ?",
|
||||||
|
params![note.gitlab_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(NoteUpsertOutcome {
|
||||||
|
local_note_id,
|
||||||
|
changed_semantics,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sweep_stale_issue_notes(
|
||||||
|
conn: &Connection,
|
||||||
|
discussion_id: i64,
|
||||||
|
last_seen_at: i64,
|
||||||
|
) -> Result<usize> {
|
||||||
|
// Step 1: Delete note documents for stale notes
|
||||||
|
conn.execute(
|
||||||
|
"DELETE FROM documents WHERE source_type = 'note' AND source_id IN
|
||||||
|
(SELECT id FROM notes WHERE discussion_id = ?1 AND last_seen_at < ?2 AND is_system = 0)",
|
||||||
|
params![discussion_id, last_seen_at],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Step 2: Delete dirty_sources entries for stale notes
|
||||||
|
conn.execute(
|
||||||
|
"DELETE FROM dirty_sources WHERE source_type = 'note' AND source_id IN
|
||||||
|
(SELECT id FROM notes WHERE discussion_id = ?1 AND last_seen_at < ?2 AND is_system = 0)",
|
||||||
|
params![discussion_id, last_seen_at],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Step 3: Delete the stale notes themselves
|
||||||
|
let deleted = conn.execute(
|
||||||
|
"DELETE FROM notes WHERE discussion_id = ?1 AND last_seen_at < ?2",
|
||||||
|
params![discussion_id, last_seen_at],
|
||||||
|
)?;
|
||||||
|
if deleted > 0 {
|
||||||
|
debug!(discussion_id, deleted, "Swept stale issue notes");
|
||||||
|
}
|
||||||
|
Ok(deleted)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_stale_discussions(
|
fn remove_stale_discussions(
|
||||||
@@ -303,6 +469,9 @@ fn update_issue_sync_timestamp(conn: &Connection, issue_id: i64, updated_at: i64
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::core::db::{create_connection, run_migrations};
|
||||||
|
use crate::gitlab::transformers::NormalizedNote;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn result_default_has_zero_counts() {
|
fn result_default_has_zero_counts() {
|
||||||
@@ -311,4 +480,462 @@ mod tests {
|
|||||||
assert_eq!(result.discussions_upserted, 0);
|
assert_eq!(result.discussions_upserted, 0);
|
||||||
assert_eq!(result.notes_upserted, 0);
|
assert_eq!(result.notes_upserted, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup() -> Connection {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations(&conn).unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
|
||||||
|
VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO issues (gitlab_id, iid, project_id, title, state, author_username, created_at, updated_at, last_seen_at) \
|
||||||
|
VALUES (100, 1, 1, 'Test Issue', 'opened', 'testuser', 1000, 2000, 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, issue_id, noteable_type, individual_note, last_seen_at, resolvable, resolved) \
|
||||||
|
VALUES ('disc-1', 1, 1, 'Issue', 0, 3000, 0, 0)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_discussion_id(conn: &Connection) -> i64 {
|
||||||
|
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn make_note(
|
||||||
|
gitlab_id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
body: &str,
|
||||||
|
note_type: Option<&str>,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
resolved: bool,
|
||||||
|
resolved_by: Option<&str>,
|
||||||
|
) -> NormalizedNote {
|
||||||
|
NormalizedNote {
|
||||||
|
gitlab_id,
|
||||||
|
project_id,
|
||||||
|
note_type: note_type.map(String::from),
|
||||||
|
is_system: false,
|
||||||
|
author_id: None,
|
||||||
|
author_username: "testuser".to_string(),
|
||||||
|
body: body.to_string(),
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
last_seen_at: updated_at,
|
||||||
|
position: 0,
|
||||||
|
resolvable: false,
|
||||||
|
resolved,
|
||||||
|
resolved_by: resolved_by.map(String::from),
|
||||||
|
resolved_at: None,
|
||||||
|
position_old_path: None,
|
||||||
|
position_new_path: None,
|
||||||
|
position_old_line: None,
|
||||||
|
position_new_line: None,
|
||||||
|
position_type: None,
|
||||||
|
position_line_range_start: None,
|
||||||
|
position_line_range_end: None,
|
||||||
|
position_base_sha: None,
|
||||||
|
position_start_sha: None,
|
||||||
|
position_head_sha: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_stable_id() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
let last_seen_at = 5000;
|
||||||
|
|
||||||
|
let note1 = make_note(1001, 1, "First note", None, 1000, 2000, false, None);
|
||||||
|
let note2 = make_note(1002, 1, "Second note", None, 1000, 2000, false, None);
|
||||||
|
|
||||||
|
let out1 = upsert_note_for_issue(&conn, disc_id, ¬e1, last_seen_at, None).unwrap();
|
||||||
|
let out2 = upsert_note_for_issue(&conn, disc_id, ¬e2, last_seen_at, None).unwrap();
|
||||||
|
let id1 = out1.local_note_id;
|
||||||
|
let id2 = out2.local_note_id;
|
||||||
|
|
||||||
|
// Re-sync same gitlab_ids
|
||||||
|
let out1b = upsert_note_for_issue(&conn, disc_id, ¬e1, last_seen_at + 1, None).unwrap();
|
||||||
|
let out2b = upsert_note_for_issue(&conn, disc_id, ¬e2, last_seen_at + 1, None).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(id1, out1b.local_note_id);
|
||||||
|
assert_eq!(id2, out2b.local_note_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_detects_body_change() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(2001, 1, "Original body", None, 1000, 2000, false, None);
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
let mut changed = make_note(2001, 1, "Updated body", None, 1000, 3000, false, None);
|
||||||
|
changed.updated_at = 3000;
|
||||||
|
let outcome = upsert_note_for_issue(&conn, disc_id, &changed, 5001, None).unwrap();
|
||||||
|
assert!(outcome.changed_semantics);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_unchanged_returns_false() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(3001, 1, "Same body", None, 1000, 2000, false, None);
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Re-sync identical note
|
||||||
|
let outcome = upsert_note_for_issue(&conn, disc_id, ¬e, 5001, None).unwrap();
|
||||||
|
assert!(!outcome.changed_semantics);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_updated_at_only_does_not_mark_semantic_change() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(4001, 1, "Body stays", None, 1000, 2000, false, None);
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Only change updated_at (non-semantic field)
|
||||||
|
let mut same = make_note(4001, 1, "Body stays", None, 1000, 9999, false, None);
|
||||||
|
same.updated_at = 9999;
|
||||||
|
let outcome = upsert_note_for_issue(&conn, disc_id, &same, 5001, None).unwrap();
|
||||||
|
assert!(!outcome.changed_semantics);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_sweep_removes_stale() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note1 = make_note(5001, 1, "Keep me", None, 1000, 2000, false, None);
|
||||||
|
let note2 = make_note(5002, 1, "Stale me", None, 1000, 2000, false, None);
|
||||||
|
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e1, 5000, None).unwrap();
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e2, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Re-sync only note1 with newer timestamp
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e1, 6000, None).unwrap();
|
||||||
|
|
||||||
|
// Sweep should remove note2 (last_seen_at=5000 < 6000)
|
||||||
|
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
|
||||||
|
assert_eq!(swept, 1);
|
||||||
|
|
||||||
|
let count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM notes WHERE discussion_id = ?",
|
||||||
|
[disc_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_returns_local_id() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(6001, 1, "Check my ID", None, 1000, 2000, false, None);
|
||||||
|
let outcome = upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Verify the local_note_id matches what's in the DB
|
||||||
|
let db_id: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT id FROM notes WHERE gitlab_id = ?",
|
||||||
|
[6001_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(outcome.local_note_id, db_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_upsert_captures_author_id() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let mut note = make_note(7001, 1, "With author", None, 1000, 2000, false, None);
|
||||||
|
note.author_id = Some(12345);
|
||||||
|
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
let stored: Option<i64> = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT author_id FROM notes WHERE gitlab_id = ?",
|
||||||
|
[7001_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(stored, Some(12345));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_upsert_author_id_nullable() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(7002, 1, "No author id", None, 1000, 2000, false, None);
|
||||||
|
// author_id defaults to None in make_note
|
||||||
|
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
let stored: Option<i64> = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT author_id FROM notes WHERE gitlab_id = ?",
|
||||||
|
[7002_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(stored, None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_note_author_id_survives_username_change() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let mut note = make_note(7003, 1, "Original body", None, 1000, 2000, false, None);
|
||||||
|
note.author_id = Some(99999);
|
||||||
|
note.author_username = "oldname".to_string();
|
||||||
|
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Re-sync with changed username, changed body, same author_id
|
||||||
|
let mut updated = make_note(7003, 1, "Updated body", None, 1000, 3000, false, None);
|
||||||
|
updated.author_id = Some(99999);
|
||||||
|
updated.author_username = "newname".to_string();
|
||||||
|
|
||||||
|
upsert_note_for_issue(&conn, disc_id, &updated, 5001, None).unwrap();
|
||||||
|
|
||||||
|
// author_id must survive the re-sync intact
|
||||||
|
let stored_id: Option<i64> = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT author_id FROM notes WHERE gitlab_id = ?",
|
||||||
|
[7003_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(stored_id, Some(99999));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_note_document(conn: &Connection, note_local_id: i64) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||||
|
VALUES ('note', ?1, 1, 'note content', 'hash123')",
|
||||||
|
[note_local_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_note_dirty_source(conn: &Connection, note_local_id: i64) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at) \
|
||||||
|
VALUES ('note', ?1, 1000)",
|
||||||
|
[note_local_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_note_documents(conn: &Connection, note_local_id: i64) -> i64 {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?",
|
||||||
|
[note_local_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_note_dirty_sources(conn: &Connection, note_local_id: i64) -> i64 {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note' AND source_id = ?",
|
||||||
|
[note_local_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_note_sweep_deletes_note_documents_immediately() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
// Insert 3 notes
|
||||||
|
let note1 = make_note(9001, 1, "Keep me", None, 1000, 2000, false, None);
|
||||||
|
let note2 = make_note(9002, 1, "Keep me too", None, 1000, 2000, false, None);
|
||||||
|
let note3 = make_note(9003, 1, "Stale me", None, 1000, 2000, false, None);
|
||||||
|
|
||||||
|
let out1 = upsert_note_for_issue(&conn, disc_id, ¬e1, 5000, None).unwrap();
|
||||||
|
let out2 = upsert_note_for_issue(&conn, disc_id, ¬e2, 5000, None).unwrap();
|
||||||
|
let out3 = upsert_note_for_issue(&conn, disc_id, ¬e3, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Add documents for all 3
|
||||||
|
insert_note_document(&conn, out1.local_note_id);
|
||||||
|
insert_note_document(&conn, out2.local_note_id);
|
||||||
|
insert_note_document(&conn, out3.local_note_id);
|
||||||
|
|
||||||
|
// Add dirty_sources for note3
|
||||||
|
insert_note_dirty_source(&conn, out3.local_note_id);
|
||||||
|
|
||||||
|
// Re-sync only notes 1 and 2 with newer timestamp
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e1, 6000, None).unwrap();
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e2, 6000, None).unwrap();
|
||||||
|
|
||||||
|
// Sweep should remove note3 and its document + dirty_source
|
||||||
|
sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
|
||||||
|
|
||||||
|
// Stale note's document should be gone
|
||||||
|
assert_eq!(count_note_documents(&conn, out3.local_note_id), 0);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out3.local_note_id), 0);
|
||||||
|
|
||||||
|
// Kept notes' documents should survive
|
||||||
|
assert_eq!(count_note_documents(&conn, out1.local_note_id), 1);
|
||||||
|
assert_eq!(count_note_documents(&conn, out2.local_note_id), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sweep_deletion_handles_note_without_document() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
let note = make_note(9004, 1, "No doc", None, 1000, 2000, false, None);
|
||||||
|
upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Don't insert any document -- sweep should still work without error
|
||||||
|
let swept = sweep_stale_issue_notes(&conn, disc_id, 6000).unwrap();
|
||||||
|
assert_eq!(swept, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_set_based_deletion_atomicity() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
// Insert a stale note with both document and dirty_source
|
||||||
|
let note = make_note(9005, 1, "Stale with deps", None, 1000, 2000, false, None);
|
||||||
|
let out = upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
insert_note_document(&conn, out.local_note_id);
|
||||||
|
insert_note_dirty_source(&conn, out.local_note_id);
|
||||||
|
|
||||||
|
// Verify they exist before sweep
|
||||||
|
assert_eq!(count_note_documents(&conn, out.local_note_id), 1);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
|
||||||
|
|
||||||
|
// The sweep function already runs inside a transaction (called from
|
||||||
|
// ingest_discussions_for_issue's tx). Simulate by wrapping in a transaction.
|
||||||
|
let tx = conn.unchecked_transaction().unwrap();
|
||||||
|
sweep_stale_issue_notes(&tx, disc_id, 6000).unwrap();
|
||||||
|
tx.commit().unwrap();
|
||||||
|
|
||||||
|
// All three DELETEs must have happened
|
||||||
|
assert_eq!(count_note_documents(&conn, out.local_note_id), 0);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 0);
|
||||||
|
|
||||||
|
let note_count: i64 = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT COUNT(*) FROM notes WHERE gitlab_id = ?",
|
||||||
|
[9005_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(note_count, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_dirty_notes(conn: &Connection) -> i64 {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT COUNT(*) FROM dirty_sources WHERE source_type = 'note'",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parent_title_change_marks_notes_dirty() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
// Insert two user notes and one system note
|
||||||
|
let note1 = make_note(10001, 1, "User note 1", None, 1000, 2000, false, None);
|
||||||
|
let note2 = make_note(10002, 1, "User note 2", None, 1000, 2000, false, None);
|
||||||
|
let mut sys_note = make_note(10003, 1, "System note", None, 1000, 2000, false, None);
|
||||||
|
sys_note.is_system = true;
|
||||||
|
|
||||||
|
let out1 = upsert_note_for_issue(&conn, disc_id, ¬e1, 5000, None).unwrap();
|
||||||
|
let out2 = upsert_note_for_issue(&conn, disc_id, ¬e2, 5000, None).unwrap();
|
||||||
|
upsert_note_for_issue(&conn, disc_id, &sys_note, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Clear any dirty_sources from individual note upserts
|
||||||
|
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(count_dirty_notes(&conn), 0);
|
||||||
|
|
||||||
|
// Simulate parent title change triggering discussion re-ingest:
|
||||||
|
// update the issue title, then run the propagation SQL
|
||||||
|
conn.execute("UPDATE issues SET title = 'Changed Title' WHERE id = 1", [])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Run the propagation query (same as in ingestion code)
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, ?1
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?2 AND n.is_system = 0
|
||||||
|
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
|
||||||
|
params![now_ms(), disc_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Both user notes should be dirty, system note should not
|
||||||
|
assert_eq!(count_dirty_notes(&conn), 2);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out1.local_note_id), 1);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out2.local_note_id), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parent_label_change_marks_notes_dirty() {
|
||||||
|
let conn = setup();
|
||||||
|
let disc_id = get_discussion_id(&conn);
|
||||||
|
|
||||||
|
// Insert one user note
|
||||||
|
let note = make_note(11001, 1, "User note", None, 1000, 2000, false, None);
|
||||||
|
let out = upsert_note_for_issue(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Clear dirty_sources
|
||||||
|
conn.execute("DELETE FROM dirty_sources WHERE source_type = 'note'", [])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Simulate label change on parent issue (labels are part of issue metadata)
|
||||||
|
conn.execute("UPDATE issues SET updated_at = 9999 WHERE id = 1", [])
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Run propagation query
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, ?1
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?2 AND n.is_system = 0
|
||||||
|
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
|
||||||
|
params![now_ms(), disc_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(count_dirty_notes(&conn), 1);
|
||||||
|
assert_eq!(count_note_dirty_sources(&conn, out.local_note_id), 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ use crate::gitlab::transformers::{
|
|||||||
};
|
};
|
||||||
use crate::gitlab::types::GitLabDiscussion;
|
use crate::gitlab::types::GitLabDiscussion;
|
||||||
use crate::ingestion::dirty_tracker;
|
use crate::ingestion::dirty_tracker;
|
||||||
|
use crate::ingestion::discussions::NoteUpsertOutcome;
|
||||||
|
|
||||||
use super::merge_requests::MrForDiscussionSync;
|
use super::merge_requests::MrForDiscussionSync;
|
||||||
|
|
||||||
@@ -161,6 +162,16 @@ pub fn write_prefetched_mr_discussions(
|
|||||||
|
|
||||||
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
||||||
|
|
||||||
|
// Mark child note documents dirty (they inherit parent metadata)
|
||||||
|
tx.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, ?1
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?2 AND n.is_system = 0
|
||||||
|
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
|
||||||
|
params![now_ms(), local_discussion_id],
|
||||||
|
)?;
|
||||||
|
|
||||||
for note in &disc.notes {
|
for note in &disc.notes {
|
||||||
let should_store_payload = !note.is_system
|
let should_store_payload = !note.is_system
|
||||||
|| note.position_new_path.is_some()
|
|| note.position_new_path.is_some()
|
||||||
@@ -187,7 +198,11 @@ pub fn write_prefetched_mr_discussions(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
upsert_note(&tx, local_discussion_id, note, run_seen_at, note_payload_id)?;
|
let outcome =
|
||||||
|
upsert_note(&tx, local_discussion_id, note, run_seen_at, note_payload_id)?;
|
||||||
|
if !note.is_system && outcome.changed_semantics {
|
||||||
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Note, outcome.local_note_id)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
@@ -361,6 +376,16 @@ async fn ingest_discussions_for_mr(
|
|||||||
|
|
||||||
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Discussion, local_discussion_id)?;
|
||||||
|
|
||||||
|
// Mark child note documents dirty (they inherit parent metadata)
|
||||||
|
tx.execute(
|
||||||
|
"INSERT INTO dirty_sources (source_type, source_id, queued_at)
|
||||||
|
SELECT 'note', n.id, ?1
|
||||||
|
FROM notes n
|
||||||
|
WHERE n.discussion_id = ?2 AND n.is_system = 0
|
||||||
|
ON CONFLICT(source_type, source_id) DO UPDATE SET queued_at = excluded.queued_at, attempt_count = 0",
|
||||||
|
params![now_ms(), local_discussion_id],
|
||||||
|
)?;
|
||||||
|
|
||||||
for note in ¬es {
|
for note in ¬es {
|
||||||
let should_store_payload = !note.is_system
|
let should_store_payload = !note.is_system
|
||||||
|| note.position_new_path.is_some()
|
|| note.position_new_path.is_some()
|
||||||
@@ -390,7 +415,11 @@ async fn ingest_discussions_for_mr(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
upsert_note(&tx, local_discussion_id, note, run_seen_at, note_payload_id)?;
|
let outcome =
|
||||||
|
upsert_note(&tx, local_discussion_id, note, run_seen_at, note_payload_id)?;
|
||||||
|
if !note.is_system && outcome.changed_semantics {
|
||||||
|
dirty_tracker::mark_dirty_tx(&tx, SourceType::Note, outcome.local_note_id)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
@@ -473,19 +502,87 @@ fn upsert_note(
|
|||||||
note: &NormalizedNote,
|
note: &NormalizedNote,
|
||||||
last_seen_at: i64,
|
last_seen_at: i64,
|
||||||
payload_id: Option<i64>,
|
payload_id: Option<i64>,
|
||||||
) -> Result<()> {
|
) -> Result<NoteUpsertOutcome> {
|
||||||
|
// Pre-read for semantic change detection
|
||||||
|
let existing = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT id, body, note_type, resolved, resolved_by,
|
||||||
|
position_old_path, position_new_path, position_old_line, position_new_line,
|
||||||
|
position_type, position_line_range_start, position_line_range_end,
|
||||||
|
position_base_sha, position_start_sha, position_head_sha
|
||||||
|
FROM notes WHERE gitlab_id = ?",
|
||||||
|
params![note.gitlab_id],
|
||||||
|
|row| {
|
||||||
|
Ok((
|
||||||
|
row.get::<_, i64>(0)?,
|
||||||
|
row.get::<_, String>(1)?,
|
||||||
|
row.get::<_, Option<String>>(2)?,
|
||||||
|
row.get::<_, bool>(3)?,
|
||||||
|
row.get::<_, Option<String>>(4)?,
|
||||||
|
row.get::<_, Option<String>>(5)?,
|
||||||
|
row.get::<_, Option<String>>(6)?,
|
||||||
|
row.get::<_, Option<i32>>(7)?,
|
||||||
|
row.get::<_, Option<i32>>(8)?,
|
||||||
|
row.get::<_, Option<String>>(9)?,
|
||||||
|
row.get::<_, Option<i32>>(10)?,
|
||||||
|
row.get::<_, Option<i32>>(11)?,
|
||||||
|
row.get::<_, Option<String>>(12)?,
|
||||||
|
row.get::<_, Option<String>>(13)?,
|
||||||
|
row.get::<_, Option<String>>(14)?,
|
||||||
|
))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let changed_semantics = match &existing {
|
||||||
|
Some((
|
||||||
|
_id,
|
||||||
|
body,
|
||||||
|
note_type,
|
||||||
|
resolved,
|
||||||
|
resolved_by,
|
||||||
|
pos_old_path,
|
||||||
|
pos_new_path,
|
||||||
|
pos_old_line,
|
||||||
|
pos_new_line,
|
||||||
|
pos_type,
|
||||||
|
pos_range_start,
|
||||||
|
pos_range_end,
|
||||||
|
pos_base_sha,
|
||||||
|
pos_start_sha,
|
||||||
|
pos_head_sha,
|
||||||
|
)) => {
|
||||||
|
*body != note.body
|
||||||
|
|| *note_type != note.note_type
|
||||||
|
|| *resolved != note.resolved
|
||||||
|
|| *resolved_by != note.resolved_by
|
||||||
|
|| *pos_old_path != note.position_old_path
|
||||||
|
|| *pos_new_path != note.position_new_path
|
||||||
|
|| *pos_old_line != note.position_old_line
|
||||||
|
|| *pos_new_line != note.position_new_line
|
||||||
|
|| *pos_type != note.position_type
|
||||||
|
|| *pos_range_start != note.position_line_range_start
|
||||||
|
|| *pos_range_end != note.position_line_range_end
|
||||||
|
|| *pos_base_sha != note.position_base_sha
|
||||||
|
|| *pos_start_sha != note.position_start_sha
|
||||||
|
|| *pos_head_sha != note.position_head_sha
|
||||||
|
}
|
||||||
|
None => true,
|
||||||
|
};
|
||||||
|
|
||||||
conn.execute(
|
conn.execute(
|
||||||
"INSERT INTO notes (
|
"INSERT INTO notes (
|
||||||
gitlab_id, discussion_id, project_id, note_type, is_system,
|
gitlab_id, discussion_id, project_id, note_type, is_system,
|
||||||
author_username, body, created_at, updated_at, last_seen_at,
|
author_id, author_username, body, created_at, updated_at, last_seen_at,
|
||||||
position, resolvable, resolved, resolved_by, resolved_at,
|
position, resolvable, resolved, resolved_by, resolved_at,
|
||||||
position_old_path, position_new_path, position_old_line, position_new_line,
|
position_old_path, position_new_path, position_old_line, position_new_line,
|
||||||
position_type, position_line_range_start, position_line_range_end,
|
position_type, position_line_range_start, position_line_range_end,
|
||||||
position_base_sha, position_start_sha, position_head_sha,
|
position_base_sha, position_start_sha, position_head_sha,
|
||||||
raw_payload_id
|
raw_payload_id
|
||||||
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20, ?21, ?22, ?23, ?24, ?25, ?26)
|
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16, ?17, ?18, ?19, ?20, ?21, ?22, ?23, ?24, ?25, ?26, ?27)
|
||||||
ON CONFLICT(gitlab_id) DO UPDATE SET
|
ON CONFLICT(gitlab_id) DO UPDATE SET
|
||||||
note_type = excluded.note_type,
|
note_type = excluded.note_type,
|
||||||
|
author_id = excluded.author_id,
|
||||||
body = excluded.body,
|
body = excluded.body,
|
||||||
updated_at = excluded.updated_at,
|
updated_at = excluded.updated_at,
|
||||||
last_seen_at = excluded.last_seen_at,
|
last_seen_at = excluded.last_seen_at,
|
||||||
@@ -510,6 +607,7 @@ fn upsert_note(
|
|||||||
note.project_id,
|
note.project_id,
|
||||||
¬e.note_type,
|
¬e.note_type,
|
||||||
note.is_system,
|
note.is_system,
|
||||||
|
note.author_id,
|
||||||
¬e.author_username,
|
¬e.author_username,
|
||||||
¬e.body,
|
¬e.body,
|
||||||
note.created_at,
|
note.created_at,
|
||||||
@@ -533,7 +631,17 @@ fn upsert_note(
|
|||||||
payload_id,
|
payload_id,
|
||||||
],
|
],
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
|
||||||
|
let local_note_id: i64 = conn.query_row(
|
||||||
|
"SELECT id FROM notes WHERE gitlab_id = ?",
|
||||||
|
params![note.gitlab_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(NoteUpsertOutcome {
|
||||||
|
local_note_id,
|
||||||
|
changed_semantics,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sweep_stale_discussions(conn: &Connection, local_mr_id: i64, run_seen_at: i64) -> Result<usize> {
|
fn sweep_stale_discussions(conn: &Connection, local_mr_id: i64, run_seen_at: i64) -> Result<usize> {
|
||||||
@@ -554,13 +662,36 @@ fn sweep_stale_notes(
|
|||||||
local_mr_id: i64,
|
local_mr_id: i64,
|
||||||
run_seen_at: i64,
|
run_seen_at: i64,
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
|
// Step 1: Delete note documents for stale notes
|
||||||
|
conn.execute(
|
||||||
|
"DELETE FROM documents WHERE source_type = 'note' AND source_id IN
|
||||||
|
(SELECT id FROM notes
|
||||||
|
WHERE project_id = ?1
|
||||||
|
AND discussion_id IN (SELECT id FROM discussions WHERE merge_request_id = ?2)
|
||||||
|
AND last_seen_at < ?3
|
||||||
|
AND is_system = 0)",
|
||||||
|
params![local_project_id, local_mr_id, run_seen_at],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Step 2: Delete dirty_sources entries for stale notes
|
||||||
|
conn.execute(
|
||||||
|
"DELETE FROM dirty_sources WHERE source_type = 'note' AND source_id IN
|
||||||
|
(SELECT id FROM notes
|
||||||
|
WHERE project_id = ?1
|
||||||
|
AND discussion_id IN (SELECT id FROM discussions WHERE merge_request_id = ?2)
|
||||||
|
AND last_seen_at < ?3
|
||||||
|
AND is_system = 0)",
|
||||||
|
params![local_project_id, local_mr_id, run_seen_at],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Step 3: Delete the stale notes themselves
|
||||||
let deleted = conn.execute(
|
let deleted = conn.execute(
|
||||||
"DELETE FROM notes
|
"DELETE FROM notes
|
||||||
WHERE project_id = ?
|
WHERE project_id = ?1
|
||||||
AND discussion_id IN (
|
AND discussion_id IN (
|
||||||
SELECT id FROM discussions WHERE merge_request_id = ?
|
SELECT id FROM discussions WHERE merge_request_id = ?2
|
||||||
)
|
)
|
||||||
AND last_seen_at < ?",
|
AND last_seen_at < ?3",
|
||||||
params![local_project_id, local_mr_id, run_seen_at],
|
params![local_project_id, local_mr_id, run_seen_at],
|
||||||
)?;
|
)?;
|
||||||
if deleted > 0 {
|
if deleted > 0 {
|
||||||
@@ -604,6 +735,8 @@ fn clear_sync_health_error(conn: &Connection, local_mr_id: i64) -> Result<()> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::core::db::{create_connection, run_migrations};
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn result_default_has_zero_counts() {
|
fn result_default_has_zero_counts() {
|
||||||
@@ -621,4 +754,153 @@ mod tests {
|
|||||||
let result = IngestMrDiscussionsResult::default();
|
let result = IngestMrDiscussionsResult::default();
|
||||||
assert!(!result.pagination_succeeded);
|
assert!(!result.pagination_succeeded);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup_mr() -> Connection {
|
||||||
|
let conn = create_connection(Path::new(":memory:")).unwrap();
|
||||||
|
run_migrations(&conn).unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO projects (gitlab_project_id, path_with_namespace, web_url) \
|
||||||
|
VALUES (1, 'group/repo', 'https://gitlab.com/group/repo')",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO merge_requests (gitlab_id, iid, project_id, title, state, \
|
||||||
|
author_username, source_branch, target_branch, created_at, updated_at, last_seen_at) \
|
||||||
|
VALUES (200, 1, 1, 'Test MR', 'opened', 'testuser', 'feat', 'main', 1000, 2000, 3000)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO discussions (gitlab_discussion_id, project_id, merge_request_id, noteable_type, \
|
||||||
|
individual_note, last_seen_at, resolvable, resolved) \
|
||||||
|
VALUES ('mr-disc-1', 1, 1, 'MergeRequest', 0, 3000, 0, 0)",
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
conn
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mr_discussion_id(conn: &Connection) -> i64 {
|
||||||
|
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn make_mr_note(
|
||||||
|
gitlab_id: i64,
|
||||||
|
project_id: i64,
|
||||||
|
body: &str,
|
||||||
|
note_type: Option<&str>,
|
||||||
|
created_at: i64,
|
||||||
|
updated_at: i64,
|
||||||
|
resolved: bool,
|
||||||
|
resolved_by: Option<&str>,
|
||||||
|
) -> NormalizedNote {
|
||||||
|
NormalizedNote {
|
||||||
|
gitlab_id,
|
||||||
|
project_id,
|
||||||
|
note_type: note_type.map(String::from),
|
||||||
|
is_system: false,
|
||||||
|
author_id: None,
|
||||||
|
author_username: "testuser".to_string(),
|
||||||
|
body: body.to_string(),
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
last_seen_at: updated_at,
|
||||||
|
position: 0,
|
||||||
|
resolvable: false,
|
||||||
|
resolved,
|
||||||
|
resolved_by: resolved_by.map(String::from),
|
||||||
|
resolved_at: None,
|
||||||
|
position_old_path: None,
|
||||||
|
position_new_path: None,
|
||||||
|
position_old_line: None,
|
||||||
|
position_new_line: None,
|
||||||
|
position_type: None,
|
||||||
|
position_line_range_start: None,
|
||||||
|
position_line_range_end: None,
|
||||||
|
position_base_sha: None,
|
||||||
|
position_start_sha: None,
|
||||||
|
position_head_sha: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mr_note_upsert_captures_author_id() {
|
||||||
|
let conn = setup_mr();
|
||||||
|
let disc_id = get_mr_discussion_id(&conn);
|
||||||
|
|
||||||
|
let mut note = make_mr_note(8001, 1, "MR note", None, 1000, 2000, false, None);
|
||||||
|
note.author_id = Some(12345);
|
||||||
|
|
||||||
|
upsert_note(&conn, disc_id, ¬e, 5000, None).unwrap();
|
||||||
|
|
||||||
|
let stored: Option<i64> = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT author_id FROM notes WHERE gitlab_id = ?",
|
||||||
|
[8001_i64],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(stored, Some(12345));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_note_document(conn: &Connection, note_local_id: i64) {
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO documents (source_type, source_id, project_id, content_text, content_hash) \
|
||||||
|
VALUES ('note', ?1, 1, 'note content', 'hash123')",
|
||||||
|
[note_local_id],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_note_documents(conn: &Connection, note_local_id: i64) -> i64 {
|
||||||
|
conn.query_row(
|
||||||
|
"SELECT COUNT(*) FROM documents WHERE source_type = 'note' AND source_id = ?",
|
||||||
|
[note_local_id],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_mr_note_sweep_deletes_note_documents_immediately() {
|
||||||
|
let conn = setup_mr();
|
||||||
|
let disc_id = get_mr_discussion_id(&conn);
|
||||||
|
let local_project_id = 1;
|
||||||
|
let local_mr_id = 1;
|
||||||
|
|
||||||
|
// Insert 3 notes
|
||||||
|
let note1 = make_mr_note(8101, 1, "Keep", None, 1000, 2000, false, None);
|
||||||
|
let note2 = make_mr_note(8102, 1, "Keep too", None, 1000, 2000, false, None);
|
||||||
|
let note3 = make_mr_note(8103, 1, "Stale", None, 1000, 2000, false, None);
|
||||||
|
|
||||||
|
let out1 = upsert_note(&conn, disc_id, ¬e1, 5000, None).unwrap();
|
||||||
|
let out2 = upsert_note(&conn, disc_id, ¬e2, 5000, None).unwrap();
|
||||||
|
let out3 = upsert_note(&conn, disc_id, ¬e3, 5000, None).unwrap();
|
||||||
|
|
||||||
|
// Add documents for all 3
|
||||||
|
insert_note_document(&conn, out1.local_note_id);
|
||||||
|
insert_note_document(&conn, out2.local_note_id);
|
||||||
|
insert_note_document(&conn, out3.local_note_id);
|
||||||
|
|
||||||
|
// Re-sync only notes 1 and 2
|
||||||
|
upsert_note(&conn, disc_id, ¬e1, 6000, None).unwrap();
|
||||||
|
upsert_note(&conn, disc_id, ¬e2, 6000, None).unwrap();
|
||||||
|
|
||||||
|
// Sweep stale notes
|
||||||
|
sweep_stale_notes(&conn, local_project_id, local_mr_id, 6000).unwrap();
|
||||||
|
|
||||||
|
// Stale note's document should be gone
|
||||||
|
assert_eq!(count_note_documents(&conn, out3.local_note_id), 0);
|
||||||
|
|
||||||
|
// Kept notes' documents should survive
|
||||||
|
assert_eq!(count_note_documents(&conn, out1.local_note_id), 1);
|
||||||
|
assert_eq!(count_note_documents(&conn, out2.local_note_id), 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
94
src/main.rs
94
src/main.rs
@@ -11,23 +11,25 @@ use lore::Config;
|
|||||||
use lore::cli::autocorrect::{self, CorrectionResult};
|
use lore::cli::autocorrect::{self, CorrectionResult};
|
||||||
use lore::cli::commands::{
|
use lore::cli::commands::{
|
||||||
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||||
SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser, open_mr_in_browser,
|
NoteListFilters, SearchCliFilters, SyncOptions, TimelineParams, open_issue_in_browser,
|
||||||
print_count, print_count_json, print_doctor_results, print_drift_human, print_drift_json,
|
open_mr_in_browser, print_count, print_count_json, print_doctor_results, print_drift_human,
|
||||||
print_dry_run_preview, print_dry_run_preview_json, print_embed, print_embed_json,
|
print_drift_json, print_dry_run_preview, print_dry_run_preview_json, print_embed,
|
||||||
print_event_count, print_event_count_json, print_generate_docs, print_generate_docs_json,
|
print_embed_json, print_event_count, print_event_count_json, print_generate_docs,
|
||||||
print_ingest_summary, print_ingest_summary_json, print_list_issues, print_list_issues_json,
|
print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
|
||||||
print_list_mrs, print_list_mrs_json, print_search_results, print_search_results_json,
|
print_list_issues_json, print_list_mrs, print_list_mrs_json, print_list_notes,
|
||||||
print_show_issue, print_show_issue_json, print_show_mr, print_show_mr_json, print_stats,
|
print_list_notes_csv, print_list_notes_json, print_list_notes_jsonl, print_search_results,
|
||||||
print_stats_json, print_sync, print_sync_json, print_sync_status, print_sync_status_json,
|
print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
|
||||||
print_timeline, print_timeline_json_with_meta, print_who_human, print_who_json, run_auth_test,
|
print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
|
||||||
run_count, run_count_events, run_doctor, run_drift, run_embed, run_generate_docs, run_ingest,
|
print_sync_status, print_sync_status_json, print_timeline, print_timeline_json_with_meta,
|
||||||
run_ingest_dry_run, run_init, run_list_issues, run_list_mrs, run_search, run_show_issue,
|
print_who_human, print_who_json, query_notes, run_auth_test, run_count, run_count_events,
|
||||||
run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_who,
|
run_doctor, run_drift, run_embed, run_generate_docs, run_ingest, run_ingest_dry_run, run_init,
|
||||||
|
run_list_issues, run_list_mrs, run_search, run_show_issue, run_show_mr, run_stats, run_sync,
|
||||||
|
run_sync_status, run_timeline, run_who,
|
||||||
};
|
};
|
||||||
use lore::cli::robot::{RobotMeta, strip_schemas};
|
use lore::cli::robot::{RobotMeta, strip_schemas};
|
||||||
use lore::cli::{
|
use lore::cli::{
|
||||||
Cli, Commands, CountArgs, EmbedArgs, GenerateDocsArgs, IngestArgs, IssuesArgs, MrsArgs,
|
Cli, Commands, CountArgs, EmbedArgs, GenerateDocsArgs, IngestArgs, IssuesArgs, MrsArgs,
|
||||||
SearchArgs, StatsArgs, SyncArgs, TimelineArgs, WhoArgs,
|
NotesArgs, SearchArgs, StatsArgs, SyncArgs, TimelineArgs, WhoArgs,
|
||||||
};
|
};
|
||||||
use lore::core::db::{
|
use lore::core::db::{
|
||||||
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
LATEST_SCHEMA_VERSION, create_connection, get_schema_version, run_migrations,
|
||||||
@@ -173,6 +175,7 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
Some(Commands::Issues(args)) => handle_issues(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Issues(args)) => handle_issues(cli.config.as_deref(), args, robot_mode),
|
||||||
Some(Commands::Mrs(args)) => handle_mrs(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Mrs(args)) => handle_mrs(cli.config.as_deref(), args, robot_mode),
|
||||||
|
Some(Commands::Notes(args)) => handle_notes(cli.config.as_deref(), args, robot_mode),
|
||||||
Some(Commands::Search(args)) => {
|
Some(Commands::Search(args)) => {
|
||||||
handle_search(cli.config.as_deref(), args, robot_mode).await
|
handle_search(cli.config.as_deref(), args, robot_mode).await
|
||||||
}
|
}
|
||||||
@@ -801,6 +804,59 @@ fn handle_mrs(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_notes(
|
||||||
|
config_override: Option<&str>,
|
||||||
|
args: NotesArgs,
|
||||||
|
robot_mode: bool,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let config = Config::load(config_override)?;
|
||||||
|
let db_path = get_db_path(config.storage.db_path.as_deref());
|
||||||
|
let conn = create_connection(&db_path)?;
|
||||||
|
|
||||||
|
let order = if args.asc { "asc" } else { "desc" };
|
||||||
|
let filters = NoteListFilters {
|
||||||
|
limit: args.limit,
|
||||||
|
project: args.project,
|
||||||
|
author: args.author,
|
||||||
|
note_type: args.note_type,
|
||||||
|
include_system: args.include_system,
|
||||||
|
for_issue_iid: args.for_issue,
|
||||||
|
for_mr_iid: args.for_mr,
|
||||||
|
note_id: args.note_id,
|
||||||
|
gitlab_note_id: args.gitlab_note_id,
|
||||||
|
discussion_id: args.discussion_id,
|
||||||
|
since: args.since,
|
||||||
|
until: args.until,
|
||||||
|
path: args.path,
|
||||||
|
contains: args.contains,
|
||||||
|
resolution: args.resolution,
|
||||||
|
sort: args.sort,
|
||||||
|
order: order.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = query_notes(&conn, &filters, &config)?;
|
||||||
|
|
||||||
|
let format = if robot_mode && args.format == "table" {
|
||||||
|
"json"
|
||||||
|
} else {
|
||||||
|
&args.format
|
||||||
|
};
|
||||||
|
|
||||||
|
match format {
|
||||||
|
"json" => print_list_notes_json(
|
||||||
|
&result,
|
||||||
|
start.elapsed().as_millis() as u64,
|
||||||
|
args.fields.as_deref(),
|
||||||
|
),
|
||||||
|
"jsonl" => print_list_notes_jsonl(&result),
|
||||||
|
"csv" => print_list_notes_csv(&result),
|
||||||
|
_ => print_list_notes(&result),
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
async fn handle_ingest(
|
async fn handle_ingest(
|
||||||
config_override: Option<&str>,
|
config_override: Option<&str>,
|
||||||
args: IngestArgs,
|
args: IngestArgs,
|
||||||
@@ -2317,6 +2373,17 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"active_minimal": ["entity_type", "iid", "title", "participants"]
|
"active_minimal": ["entity_type", "iid", "title", "participants"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"notes": {
|
||||||
|
"description": "List notes from discussions with rich filtering",
|
||||||
|
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--format <table|json|jsonl|csv>", "--fields <list|minimal>", "--open"],
|
||||||
|
"robot_flags": ["--format json", "--fields minimal"],
|
||||||
|
"example": "lore --robot notes --author jdefting --since 1y --format json --fields minimal",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"notes": "[NoteListRowJson]", "total_count": "int", "showing": "int"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
"robot-docs": {
|
"robot-docs": {
|
||||||
"description": "This command (agent self-discovery manifest)",
|
"description": "This command (agent self-discovery manifest)",
|
||||||
"flags": ["--brief"],
|
"flags": ["--brief"],
|
||||||
@@ -2338,6 +2405,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"search: FTS5 + vector hybrid search across all entities",
|
"search: FTS5 + vector hybrid search across all entities",
|
||||||
"who: Expert/workload/reviews analysis per file path or person",
|
"who: Expert/workload/reviews analysis per file path or person",
|
||||||
"timeline: Chronological event reconstruction across entities",
|
"timeline: Chronological event reconstruction across entities",
|
||||||
|
"notes: Rich note listing with author, type, resolution, path, and discussion filters",
|
||||||
"stats: Database statistics with document/note/discussion counts",
|
"stats: Database statistics with document/note/discussion counts",
|
||||||
"count: Entity counts with state breakdowns",
|
"count: Entity counts with state breakdowns",
|
||||||
"embed: Generate vector embeddings for semantic search via Ollama"
|
"embed: Generate vector embeddings for semantic search via Ollama"
|
||||||
|
|||||||
Reference in New Issue
Block a user