From 050e00345a21e03d18e25e0367f4d60b8f0bc681 Mon Sep 17 00:00:00 2001 From: teernisse Date: Wed, 18 Feb 2026 15:03:30 -0500 Subject: [PATCH] =?UTF-8?q?feat(tui):=20Phase=202=20detail=20screens=20?= =?UTF-8?q?=E2=80=94=20Issue=20Detail,=20MR=20Detail,=20discussion=20tree,?= =?UTF-8?q?=20cross-refs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements the remaining Phase 2 Core Screens: - Discussion tree widget (view/common/discussion_tree.rs): DiscussionNode/NoteNode types, expand/collapse state, visual row flattening, format_relative_time with Clock trait - Cross-reference widget (view/common/cross_ref.rs): CrossRefKind enum, navigable refs, badge rendering ([MR]/[REL]/[REF]) - Issue Detail (state + action + view): progressive hydration (metadata Phase 1, discussions Phase 2), section cycling, description scroll, sanitized GitLab content - MR Detail (state + action + view): tab bar (Overview/Files/Discussions), file changes with change type indicators, branch info, draft/merge status, diff note support - Message + update wiring: IssueDetailLoaded, MrDetailLoaded, DiscussionsLoaded handlers with TaskSupervisor stale-result guards Closes bd-1d6z, bd-8ab7, bd-3t1b, bd-1cl9 (Phase 2 epic). 389 tests passing, clippy clean, fmt clean. --- .beads/issues.jsonl | 12 +- crates/lore-tui/src/action.rs | 1207 +++++++++++++++++ crates/lore-tui/src/app/update.rs | 52 + crates/lore-tui/src/message.rs | 7 +- crates/lore-tui/src/state/issue_detail.rs | 282 +++- crates/lore-tui/src/state/mr_detail.rs | 385 +++++- crates/lore-tui/src/view/common/cross_ref.rs | 410 ++++++ .../src/view/common/discussion_tree.rs | 979 +++++++++++++ crates/lore-tui/src/view/common/mod.rs | 7 + crates/lore-tui/src/view/issue_detail.rs | 626 +++++++++ crates/lore-tui/src/view/mod.rs | 8 + crates/lore-tui/src/view/mr_detail.rs | 635 +++++++++ 12 files changed, 4589 insertions(+), 21 deletions(-) create mode 100644 crates/lore-tui/src/view/common/cross_ref.rs create mode 100644 crates/lore-tui/src/view/common/discussion_tree.rs create mode 100644 crates/lore-tui/src/view/issue_detail.rs create mode 100644 crates/lore-tui/src/view/mr_detail.rs diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index eb53a18..da33ad6 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -28,9 +28,9 @@ {"id":"bd-1cb","title":"[CP0] gi doctor command - health checks","description":"## Background\n\ndoctor is the primary diagnostic command. It checks all system components and reports their status. Supports JSON output for scripting and CI integration. Must degrade gracefully - warn about optional components (Ollama) without failing.\n\nReference: docs/prd/checkpoint-0.md section \"gi doctor\"\n\n## Approach\n\n**src/cli/commands/doctor.ts:**\n\nPerforms 5 checks:\n1. **Config**: Load and validate config file\n2. **Database**: Open DB, verify pragmas, check schema version\n3. **GitLab**: Auth with token, verify connectivity\n4. **Projects**: Count configured vs resolved in DB\n5. **Ollama**: Ping embedding endpoint (optional - warn if unavailable)\n\n**DoctorResult interface:**\n```typescript\ninterface DoctorResult {\n success: boolean; // All required checks passed\n checks: {\n config: { status: 'ok' | 'error'; path?: string; error?: string };\n database: { status: 'ok' | 'error'; path?: string; schemaVersion?: number; error?: string };\n gitlab: { status: 'ok' | 'error'; url?: string; username?: string; error?: string };\n projects: { status: 'ok' | 'error'; configured?: number; resolved?: number; error?: string };\n ollama: { status: 'ok' | 'warning' | 'error'; url?: string; model?: string; error?: string };\n };\n}\n```\n\n**Human-readable output (default):**\n```\ngi doctor\n\n Config ✓ Loaded from ~/.config/gi/config.json\n Database ✓ ~/.local/share/gi/data.db (schema v1)\n GitLab ✓ https://gitlab.example.com (authenticated as @johndoe)\n Projects ✓ 2 configured, 2 resolved\n Ollama ⚠ Not running (semantic search unavailable)\n\nStatus: Ready (lexical search available, semantic search requires Ollama)\n```\n\n**JSON output (--json flag):**\nOutputs DoctorResult as JSON to stdout\n\n## Acceptance Criteria\n\n- [ ] Config check: shows path and validation status\n- [ ] Database check: shows path, schema version, pragma verification\n- [ ] GitLab check: shows URL and authenticated username\n- [ ] Projects check: shows configured count and resolved count\n- [ ] Ollama check: warns if not running, doesn't fail overall\n- [ ] success=true only if config, database, gitlab, projects all ok\n- [ ] --json outputs valid JSON matching DoctorResult interface\n- [ ] Exit 0 if success=true, exit 1 if any required check fails\n- [ ] Colors and symbols in human output (✓, ⚠, ✗)\n\n## Files\n\nCREATE:\n- src/cli/commands/doctor.ts\n- src/types/doctor.ts (DoctorResult interface)\n\n## TDD Loop\n\nN/A - diagnostic command, verify with manual testing:\n\n```bash\n# All good\ngi doctor\n\n# JSON output\ngi doctor --json | jq .\n\n# With missing Ollama\n# (just don't run Ollama - should show warning)\n\n# With bad config\nmv ~/.config/gi/config.json ~/.config/gi/config.json.bak\ngi doctor # should show config error\n```\n\n## Edge Cases\n\n- Ollama timeout should be short (2s) - don't block on slow network\n- Ollama 404 (wrong model) vs connection refused (not running)\n- Database file exists but wrong schema version\n- Projects in config but not in database (init not run)\n- Token valid for user but project access revoked","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-24T16:09:51.435540Z","created_by":"tayloreernisse","updated_at":"2026-01-25T03:30:24.921206Z","closed_at":"2026-01-25T03:30:24.921041Z","close_reason":"done","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-1cb","depends_on_id":"bd-13b","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1cb","depends_on_id":"bd-1l1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1cb","depends_on_id":"bd-3ng","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1cb","depends_on_id":"bd-epj","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-1cj0","title":"Epic: TUI Phase 0 — Toolchain Gate","description":"## Background\nPhase 0 is the hard gate for the TUI implementation. It validates that FrankenTUI (nightly Rust) can coexist with the stable lore workspace, that core infrastructure types compile and pass basic tests, and that terminal compatibility meets the bar. If Phase 0 fails, we evaluate alternatives before proceeding.\n\n## Acceptance Criteria\n- [ ] crates/lore-tui/ scaffold exists with Cargo.toml, rust-toolchain.toml, main.rs, lib.rs\n- [ ] cargo +stable check --workspace --all-targets passes for root workspace (lore-tui EXCLUDED)\n- [ ] cargo +nightly check --manifest-path crates/lore-tui/Cargo.toml --all-targets passes\n- [ ] FrankenTUI Model trait skeleton compiles and renders a hello-world frame\n- [ ] DbManager, Clock, safety, and core type modules compile with tests\n- [ ] Terminal compat smoke test passes in iTerm2 and tmux\n\n## Scope\nAll Phase 0 tasks are blockers for Phase 1 (Foundation).","status":"closed","priority":1,"issue_type":"epic","created_at":"2026-02-12T16:52:50.687401Z","created_by":"tayloreernisse","updated_at":"2026-02-12T20:07:27.208080Z","closed_at":"2026-02-12T20:07:27.208030Z","close_reason":"All Phase 0 children complete: scaffold (bd-3ddw), core types (bd-c9gk), clock (bd-2lg6), safety (bd-3ir1), DbManager (bd-2kop), theme (bd-5ofk), FrankenTUI integration (bd-2emv). 68 tests, quality gate green.","compaction_level":0,"original_size":0,"labels":["TUI"]} {"id":"bd-1cjx","title":"lore drift: detect discussion divergence from original intent","description":"## Background\nDetect when a discussion thread has evolved away from the original issue description. Surfaces hidden scope creep. No existing tool does this — not GitLab, Jira, Linear, or any CLI.\n\n## Current Infrastructure (Verified 2026-02-12)\n- Embeddings: nomic-embed-text model, 768 dimensions, stored in embedding_metadata + vec0 tables\n- OllamaClient::embed_batch() at src/embedding/ollama.rs:103 — batch embedding\n- notes table: 282K rows with body, author, created_at, is_system, discussion_id\n- issues table: description column contains original intent text\n- CHUNK_MAX_BYTES = 1500 bytes for embedding input\n- No `strip_markdown()` utility exists in the codebase — must be written (see Edge Cases)\n\n## Dependencies\nThis command is standalone. It only requires:\n- OllamaClient (already shipped at src/embedding/ollama.rs) for embedding computation\n- notes + discussions tables (already in DB since migration 001/004)\n- issues table (already in DB since migration 002)\n\nNo dependency on hybrid search (bd-1ksf) or per-note search (bd-2l3s). Drift embeds on-the-fly.\n\n## Algorithm\n\n### Step 1: Embed issue description\n```rust\nlet desc_text = issue.description.unwrap_or_default();\nif desc_text.len() < 20 {\n // Too short for meaningful drift analysis\n return Ok(DriftResponse::no_drift(\"Description too short for analysis\"));\n}\nlet desc_embedding = client.embed_batch(&[&desc_text]).await?[0].clone();\n```\n\n### Step 2: Get non-system notes chronologically\n```sql\nSELECT n.id, n.body, n.author_username, n.created_at\nFROM notes n\nJOIN discussions d ON n.discussion_id = d.id\nWHERE d.noteable_type = 'Issue' AND d.noteable_id = ?\n AND n.is_system = 0\n AND LENGTH(n.body) >= 20\nORDER BY n.created_at ASC\nLIMIT 200 -- cap for performance\n```\n\n### Step 3: Embed each note\n```rust\nlet note_texts: Vec<&str> = notes.iter().map(|n| n.body.as_str()).collect();\n// Batch in groups of 32 (BATCH_SIZE from embedding pipeline)\nlet note_embeddings = client.embed_batch(¬e_texts).await?;\n```\n\n### Step 4: Compute cosine similarity curve\n```rust\n/// Cosine similarity between two embedding vectors.\n/// Returns value in [-1, 1] range; higher = more similar.\n/// Place in src/embedding/similarity.rs for reuse by related (bd-8con) and drift.\npub fn cosine_similarity(a: &[f32], b: &[f32]) -> f32 {\n debug_assert_eq!(a.len(), b.len(), \"embedding dimensions must match\");\n let dot: f32 = a.iter().zip(b).map(|(x, y)| x * y).sum();\n let norm_a: f32 = a.iter().map(|x| x * x).sum::().sqrt();\n let norm_b: f32 = b.iter().map(|x| x * x).sum::().sqrt();\n if norm_a == 0.0 || norm_b == 0.0 {\n return 0.0; // zero vector = no similarity\n }\n dot / (norm_a * norm_b)\n}\n\nlet similarity_curve: Vec = notes.iter().zip(¬e_embeddings)\n .enumerate()\n .map(|(i, (note, emb))| SimilarityPoint {\n note_index: i,\n note_id: note.id,\n similarity: cosine_similarity(&desc_embedding, emb),\n author: note.author.clone(),\n created_at: note.created_at.clone(),\n })\n .collect();\n```\n\n### Step 5: Detect drift via sliding window\n```rust\nconst DEFAULT_THRESHOLD: f32 = 0.4;\nconst WINDOW_SIZE: usize = 3;\n\nfn detect_drift(curve: &[SimilarityPoint], threshold: f32) -> Option<&SimilarityPoint> {\n if curve.len() < WINDOW_SIZE {\n return None; // need minimum 3 notes for window\n }\n for window in curve.windows(WINDOW_SIZE) {\n let avg: f32 = window.iter().map(|p| p.similarity).sum::() / WINDOW_SIZE as f32;\n if avg < threshold {\n return Some(&window[0]); // first note in drifting window\n }\n }\n None\n}\n```\n\n### Step 6: Extract drift topics (simple term frequency v1)\n```rust\n/// Simple markdown stripping for embedding quality.\n/// Remove code blocks (```...```), inline code (`...`), links [text](url),\n/// block quotes (> ...), and HTML tags (<...>).\n/// This function must be written — no existing utility in the codebase.\nfn strip_markdown(text: &str) -> String {\n // Phase 1: Remove fenced code blocks (```...```)\n let re_code_block = regex::Regex::new(r\"(?s)```.*?```\").unwrap();\n let text = re_code_block.replace_all(text, \"\");\n // Phase 2: Remove inline code (`...`)\n let re_inline = regex::Regex::new(r\"`[^`]+`\").unwrap();\n let text = re_inline.replace_all(&text, \"\");\n // Phase 3: Remove markdown links, keep text: [text](url) -> text\n let re_link = regex::Regex::new(r\"\\[([^\\]]+)\\]\\([^)]+\\)\").unwrap();\n let text = re_link.replace_all(&text, \"$1\");\n // Phase 4: Remove block quotes\n let text = text.lines()\n .filter(|l| !l.trim_start().starts_with('>'))\n .collect::>()\n .join(\"\\n\");\n // Phase 5: Remove HTML tags\n let re_html = regex::Regex::new(r\"<[^>]+>\").unwrap();\n re_html.replace_all(&text, \"\").to_string()\n}\n\nfn extract_drift_topics(\n notes_after_drift: &[Note],\n description_words: &HashSet,\n) -> Vec {\n let stopwords: HashSet<&str> = [\n \"the\", \"a\", \"an\", \"is\", \"are\", \"was\", \"were\", \"be\", \"been\", \"being\",\n \"have\", \"has\", \"had\", \"do\", \"does\", \"did\", \"will\", \"would\", \"could\",\n \"should\", \"may\", \"might\", \"shall\", \"can\", \"need\", \"dare\", \"ought\",\n \"used\", \"to\", \"of\", \"in\", \"for\", \"on\", \"with\", \"at\", \"by\", \"from\",\n \"as\", \"into\", \"through\", \"during\", \"before\", \"after\", \"above\", \"below\",\n \"between\", \"out\", \"off\", \"over\", \"under\", \"again\", \"further\", \"then\",\n \"once\", \"here\", \"there\", \"when\", \"where\", \"why\", \"how\", \"all\", \"each\",\n \"every\", \"both\", \"few\", \"more\", \"most\", \"other\", \"some\", \"such\", \"no\",\n \"nor\", \"not\", \"only\", \"own\", \"same\", \"so\", \"than\", \"too\", \"very\",\n \"just\", \"because\", \"but\", \"and\", \"or\", \"if\", \"while\", \"that\", \"this\",\n \"these\", \"those\", \"it\", \"its\", \"they\", \"them\", \"their\", \"we\", \"our\",\n \"you\", \"your\", \"he\", \"she\", \"his\", \"her\", \"what\", \"which\", \"who\",\n ].into_iter().collect();\n\n let mut term_freq: HashMap = HashMap::new();\n for note in notes_after_drift {\n let body = strip_markdown(¬e.body);\n for word in body.split_whitespace() {\n let word = word.to_lowercase()\n .trim_matches(|c: char| !c.is_alphanumeric())\n .to_string();\n if word.len() >= 3\n && !stopwords.contains(word.as_str())\n && !description_words.contains(&word)\n {\n *term_freq.entry(word).or_default() += 1;\n }\n }\n }\n\n let mut ranked: Vec<_> = term_freq.into_iter().collect();\n ranked.sort_by(|a, b| b.1.cmp(&a.1));\n ranked.into_iter().take(3).map(|(word, _)| word).collect()\n}\n```\n\nNOTE: The `regex` crate is likely already a dependency (check Cargo.toml). If not, add it. Consider compiling regexes once with `lazy_static!` or `std::sync::LazyLock` instead of in-function `Regex::new()`.\n\n## Robot Mode Output Schema\n```json\n{\n \"ok\": true,\n \"data\": {\n \"entity\": { \"type\": \"issue\", \"iid\": 3864, \"title\": \"...\" },\n \"drift_detected\": true,\n \"threshold\": 0.4,\n \"drift_point\": {\n \"note_index\": 12,\n \"note_id\": 456,\n \"author\": \"devname\",\n \"created_at\": \"2026-01-20T...\",\n \"similarity\": 0.32\n },\n \"drift_topics\": [\"ingestion\", \"maintenance\", \"lubrication\"],\n \"similarity_curve\": [\n { \"note_index\": 0, \"similarity\": 0.91, \"author\": \"...\", \"created_at\": \"...\" },\n { \"note_index\": 1, \"similarity\": 0.85, \"author\": \"...\", \"created_at\": \"...\" }\n ],\n \"recommendation\": \"Consider splitting: notes after #12 discuss ingestion, maintenance, lubrication -- topics not in original description\"\n },\n \"meta\": { \"elapsed_ms\": 1500, \"notes_analyzed\": 25, \"description_tokens\": 150 }\n}\n```\n\n## Clap Registration\n```rust\n// In src/main.rs Commands enum, add:\nDrift {\n /// Entity type: \"issues\" (MRs not supported in v1)\n entity_type: String,\n /// Entity IID\n iid: i64,\n /// Similarity threshold for drift detection (0.0-1.0, default 0.4)\n #[arg(long, default_value = \"0.4\")]\n threshold: f32,\n /// Scope to project (fuzzy match)\n #[arg(short, long)]\n project: Option,\n},\n```\n\n## TDD Loop\nRED: Tests in src/cli/commands/drift.rs:\n- test_cosine_similarity_identical: same vector -> 1.0\n- test_cosine_similarity_orthogonal: orthogonal vectors -> 0.0\n- test_cosine_similarity_zero_vector: zero vector -> 0.0 (not NaN)\n- test_drift_detected_when_notes_diverge: mock embeddings where first 5 notes are similar (>0.8) to desc, last 5 are dissimilar (<0.3), assert drift_detected=true\n- test_no_drift_on_consistent_discussion: all notes similar to desc (>0.6), assert drift_detected=false\n- test_drift_point_is_first_divergent: assert drift_point.note_index is the first note in the first sub-threshold window\n- test_drift_topics_exclude_original_terms: terms from description body should NOT appear in drift_topics\n- test_single_note: assert drift_detected=false (need min 3 notes)\n- test_empty_description: assert response with \"Description too short for analysis\" message\n- test_strip_markdown_code_blocks: verify fenced code blocks removed\n- test_strip_markdown_preserves_text: verify plain text preserved\n\nGREEN: Implement drift command with cosine_similarity + sliding window + topic extraction\n\nVERIFY:\n```bash\ncargo test drift:: && cargo clippy --all-targets -- -D warnings\ncargo run --release -- -J drift issues 3864 | jq '.data.drift_detected'\n```\n\n## Acceptance Criteria\n- [ ] lore drift issues N computes similarity curve between description and notes\n- [ ] Drift detected when sliding window of 3 notes averages below threshold\n- [ ] Drift topics extracted from divergent notes (top 3 terms not in description)\n- [ ] --threshold flag to adjust sensitivity (default 0.4)\n- [ ] Robot mode returns structured analysis with similarity_curve array\n- [ ] Human mode shows visual indication (similarity bar or sparkline per note)\n- [ ] Suggests splitting when drift detected\n- [ ] Performance: <2s for issue with 100 notes (mostly embedding time)\n- [ ] Command registered in main.rs and robot-docs\n- [ ] cosine_similarity function has its own unit tests\n- [ ] strip_markdown function has its own unit tests\n\n## Edge Cases\n- Empty description: return early with message \"Description too short for analysis\"\n- Single note: drift_detected = false, similarity_curve has 1 entry\n- Very short notes (<20 chars): filtered out in SQL query\n- All notes by same author: still valid analysis (self-drift is real)\n- Notes that are mostly quotes/code blocks: strip_markdown before embedding (remove ``` blocks, > quotes)\n- Issue with 500+ notes: SQL LIMIT 200 on notes, note in meta that analysis is partial\n- Ollama unavailable: exit code 14 with message (drift requires embedding computation)\n- No stored note embeddings: always embed on-the-fly (drift needs to compare against description, not stored embeddings)\n- Embedding dimension mismatch: assert desc and note embeddings have same length (768 for nomic-embed-text)\n- Regex compilation: use LazyLock or lazy_static to avoid recompiling regexes on every call\n\n## Files to Create/Modify\n- NEW: src/cli/commands/drift.rs (main command implementation)\n- NEW: src/embedding/similarity.rs (cosine_similarity utility, reusable by bd-8con)\n- src/embedding/mod.rs (export similarity module)\n- src/cli/commands/mod.rs (add pub mod drift; re-export)\n- src/main.rs (register Drift subcommand in Commands enum, add handle_drift fn)","status":"closed","priority":3,"issue_type":"feature","created_at":"2026-02-12T15:47:40.232427Z","created_by":"tayloreernisse","updated_at":"2026-02-12T16:49:02.922951Z","closed_at":"2026-02-12T16:49:02.922901Z","close_reason":"Drift detection command implemented: cosine similarity curve, sliding window, topic extraction, human+robot output","compaction_level":0,"original_size":0,"labels":["cli-imp","intelligence"],"dependencies":[{"issue_id":"bd-1cjx","depends_on_id":"bd-13lp","type":"parent-child","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} -{"id":"bd-1cl9","title":"Epic: TUI Phase 2 — Core Screens","description":"## Background\nPhase 2 implements the five core screens: Dashboard, Issue List, Issue Detail, MR List, and MR Detail. These screens cover the primary read workflows. Each screen has a state struct, view function, and action query bridge. The entity table and filter bar widgets are shared across list screens.\n\n## Acceptance Criteria\n- [ ] Dashboard renders project overview with stats, recent activity, sync status\n- [ ] Issue List supports keyset pagination, filtering, sorting, and Quick Peek\n- [ ] Issue Detail shows progressive hydration (metadata, discussions, cross-refs)\n- [ ] MR List mirrors Issue List patterns with MR-specific columns\n- [ ] MR Detail shows file changes, diff discussions, and general discussions\n- [ ] All screens use TaskSupervisor for data loading with stale-result guards\n- [ ] Navigation between screens preserves state\n\n## Scope\nBlocked by Phase 1 (Foundation). Blocks Phase 2.5 (Vertical Slice Gate).","status":"open","priority":1,"issue_type":"epic","created_at":"2026-02-12T16:57:23.090933Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:51.135521Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-1cl9","depends_on_id":"bd-2tr4","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} +{"id":"bd-1cl9","title":"Epic: TUI Phase 2 — Core Screens","description":"## Background\nPhase 2 implements the five core screens: Dashboard, Issue List, Issue Detail, MR List, and MR Detail. These screens cover the primary read workflows. Each screen has a state struct, view function, and action query bridge. The entity table and filter bar widgets are shared across list screens.\n\n## Acceptance Criteria\n- [ ] Dashboard renders project overview with stats, recent activity, sync status\n- [ ] Issue List supports keyset pagination, filtering, sorting, and Quick Peek\n- [ ] Issue Detail shows progressive hydration (metadata, discussions, cross-refs)\n- [ ] MR List mirrors Issue List patterns with MR-specific columns\n- [ ] MR Detail shows file changes, diff discussions, and general discussions\n- [ ] All screens use TaskSupervisor for data loading with stale-result guards\n- [ ] Navigation between screens preserves state\n\n## Scope\nBlocked by Phase 1 (Foundation). Blocks Phase 2.5 (Vertical Slice Gate).","status":"closed","priority":1,"issue_type":"epic","created_at":"2026-02-12T16:57:23.090933Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:36:50.923129Z","closed_at":"2026-02-18T20:36:50.923019Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-1cl9","depends_on_id":"bd-2tr4","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-1d5","title":"[CP1] GitLab client pagination methods","description":"Add async generator methods for paginated GitLab API calls.\n\nMethods to add to src/gitlab/client.ts:\n- paginateIssues(gitlabProjectId, updatedAfter?) → AsyncGenerator\n- paginateIssueDiscussions(gitlabProjectId, issueIid) → AsyncGenerator\n- requestWithHeaders(path) → { data: T, headers: Headers }\n\nImplementation:\n- Use scope=all, state=all for issues\n- Order by updated_at ASC\n- Follow X-Next-Page header until empty/absent\n- Apply cursor rewind (subtract cursorRewindSeconds) for tuple semantics\n- Fall back to empty-page detection if headers missing\n\nFiles: src/gitlab/client.ts\nTests: tests/unit/pagination.test.ts\nDone when: Pagination handles multiple pages and respects cursors","status":"tombstone","priority":2,"issue_type":"task","created_at":"2026-01-25T15:19:43.069869Z","created_by":"tayloreernisse","updated_at":"2026-01-25T15:21:35.156881Z","closed_at":"2026-01-25T15:21:35.156881Z","deleted_at":"2026-01-25T15:21:35.156877Z","deleted_by":"tayloreernisse","delete_reason":"delete","original_type":"task","compaction_level":0,"original_size":0} -{"id":"bd-1d6z","title":"Implement discussion tree + cross-reference widgets","description":"## Background\nThe discussion tree renders threaded conversations from GitLab issues/MRs using FrankenTUI's Tree widget. Cross-references show linked entities (closing MRs, related issues) as navigable links. Both are used in Issue Detail and MR Detail views.\n\n## Approach\nDiscussion Tree (view/common/discussion_tree.rs):\n- Wraps ftui Tree widget with TreePersistState for expand/collapse persistence\n- Tree structure: top-level discussions as roots, notes within discussion as children\n- Each node renders: author, timestamp (relative via Clock), note body (sanitized)\n- System notes rendered with muted style\n- Diff notes show file path + line reference\n- Keyboard: j/k navigate, Enter expand/collapse, Space toggle thread\n- Expand-on-demand: thread bodies loaded only when expanded (progressive hydration phase 3)\n\nCross-Reference (view/common/cross_ref.rs):\n- CrossRefWidget: renders list of entity references with type icon and navigable links\n- CrossRef struct: kind (ClosingMR, RelatedIssue, MentionedIn), entity_key (EntityKey), label (String)\n- Enter on a cross-ref navigates to that entity (pushes nav stack)\n- Renders as: \"Closing MR !42: Fix authentication flow\" with colored kind indicator\n\n## Acceptance Criteria\n- [ ] Discussion tree renders top-level discussions as expandable nodes\n- [ ] Notes within discussion shown as children with indentation\n- [ ] System notes visually distinguished (muted color)\n- [ ] Diff notes show file path context\n- [ ] Timestamps use injected Clock for deterministic rendering\n- [ ] All note text sanitized via sanitize_for_terminal()\n- [ ] Cross-references render with entity type icons\n- [ ] Enter on cross-ref navigates to entity detail\n- [ ] Tree state persists across navigation (expand/collapse remembered)\n\n## Files\n- CREATE: crates/lore-tui/src/view/common/discussion_tree.rs\n- CREATE: crates/lore-tui/src/view/common/cross_ref.rs\n\n## TDD Anchor\nRED: Write test_cross_ref_entity_key that creates a CrossRef with EntityKey::mr(1, 42), asserts kind and key are correct.\nGREEN: Implement CrossRef struct.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_cross_ref\n\n## Edge Cases\n- Deeply nested discussions (rare in GitLab but possible): limit indent depth to 4 levels\n- Very long note bodies: wrap text within tree node area\n- Empty discussions (resolved with no notes): show \"[resolved]\" indicator\n- Cross-references to entities not in local DB: show as non-navigable text\n\n## Dependency Context\nUses sanitize_for_terminal() from \"Implement terminal safety module\" task.\nUses Clock for timestamps from \"Implement Clock trait\" task.\nUses EntityKey, Screen from \"Implement core types\" task.\nUses NavigationStack from \"Implement NavigationStack\" task.","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-12T16:58:49.765694Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:28.589883Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-1d6z","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1d6z","depends_on_id":"bd-2lg6","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1d6z","depends_on_id":"bd-3ir1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} +{"id":"bd-1d6z","title":"Implement discussion tree + cross-reference widgets","description":"## Background\nThe discussion tree renders threaded conversations from GitLab issues/MRs using FrankenTUI's Tree widget. Cross-references show linked entities (closing MRs, related issues) as navigable links. Both are used in Issue Detail and MR Detail views.\n\n## Approach\nDiscussion Tree (view/common/discussion_tree.rs):\n- Wraps ftui Tree widget with TreePersistState for expand/collapse persistence\n- Tree structure: top-level discussions as roots, notes within discussion as children\n- Each node renders: author, timestamp (relative via Clock), note body (sanitized)\n- System notes rendered with muted style\n- Diff notes show file path + line reference\n- Keyboard: j/k navigate, Enter expand/collapse, Space toggle thread\n- Expand-on-demand: thread bodies loaded only when expanded (progressive hydration phase 3)\n\nCross-Reference (view/common/cross_ref.rs):\n- CrossRefWidget: renders list of entity references with type icon and navigable links\n- CrossRef struct: kind (ClosingMR, RelatedIssue, MentionedIn), entity_key (EntityKey), label (String)\n- Enter on a cross-ref navigates to that entity (pushes nav stack)\n- Renders as: \"Closing MR !42: Fix authentication flow\" with colored kind indicator\n\n## Acceptance Criteria\n- [ ] Discussion tree renders top-level discussions as expandable nodes\n- [ ] Notes within discussion shown as children with indentation\n- [ ] System notes visually distinguished (muted color)\n- [ ] Diff notes show file path context\n- [ ] Timestamps use injected Clock for deterministic rendering\n- [ ] All note text sanitized via sanitize_for_terminal()\n- [ ] Cross-references render with entity type icons\n- [ ] Enter on cross-ref navigates to entity detail\n- [ ] Tree state persists across navigation (expand/collapse remembered)\n\n## Files\n- CREATE: crates/lore-tui/src/view/common/discussion_tree.rs\n- CREATE: crates/lore-tui/src/view/common/cross_ref.rs\n\n## TDD Anchor\nRED: Write test_cross_ref_entity_key that creates a CrossRef with EntityKey::mr(1, 42), asserts kind and key are correct.\nGREEN: Implement CrossRef struct.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_cross_ref\n\n## Edge Cases\n- Deeply nested discussions (rare in GitLab but possible): limit indent depth to 4 levels\n- Very long note bodies: wrap text within tree node area\n- Empty discussions (resolved with no notes): show \"[resolved]\" indicator\n- Cross-references to entities not in local DB: show as non-navigable text\n\n## Dependency Context\nUses sanitize_for_terminal() from \"Implement terminal safety module\" task.\nUses Clock for timestamps from \"Implement Clock trait\" task.\nUses EntityKey, Screen from \"Implement core types\" task.\nUses NavigationStack from \"Implement NavigationStack\" task.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:58:49.765694Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:17:02.460355Z","closed_at":"2026-02-18T20:17:02.460206Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-1d6z","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1d6z","depends_on_id":"bd-2lg6","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1d6z","depends_on_id":"bd-3ir1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-1df9","title":"Epic: TUI Phase 4 — Operations","description":"## Background\nPhase 4 adds operational screens: Sync (real-time progress + post-sync summary), Doctor/Stats (health checks), and CLI integration (lore tui command for binary delegation). The Sync screen is the most complex — it needs real-time streaming progress with backpressure handling.\n\n## Acceptance Criteria\n- [ ] Sync screen shows real-time progress during sync with per-lane indicators\n- [ ] Sync summary shows exact changed entities after completion\n- [ ] Doctor screen shows environment health checks\n- [ ] Stats screen shows database statistics\n- [ ] CLI integration: lore tui launches lore-tui binary via runtime delegation","status":"open","priority":1,"issue_type":"epic","created_at":"2026-02-12T17:01:44.603447Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:51.361318Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-1df9","depends_on_id":"bd-nwux","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-1elx","title":"Implement run_embed_for_document_ids scoped embedding","description":"## Background\n\nCurrently `embed_documents()` in `src/embedding/pipeline.rs` uses `find_pending_documents()` to discover ALL documents that need embedding (no existing embedding, changed content_hash, or model mismatch). The surgical sync pipeline needs a scoped variant that only embeds specific document IDs — the ones returned by the scoped doc regeneration step (bd-hs6j).\n\nThe existing `embed_page()` private function handles the actual embedding work for a batch of `PendingDocument` structs. It calls `split_into_chunks`, sends batches to the OllamaClient, and writes embeddings + metadata to the DB. The scoped function can reuse this by constructing `PendingDocument` structs from the provided document IDs.\n\nKey types:\n- `PendingDocument { document_id: i64, content_text: String, content_hash: String }` (from `change_detector.rs`)\n- `EmbedResult { chunks_embedded, docs_embedded, failed, skipped }` (pipeline.rs:21)\n- `OllamaClient` for the actual embedding API calls\n- `ShutdownSignal` for cancellation support\n\n## Approach\n\nAdd `embed_documents_by_ids()` to `src/embedding/pipeline.rs`:\n\n```rust\npub struct EmbedForIdsResult {\n pub chunks_embedded: usize,\n pub docs_embedded: usize,\n pub failed: usize,\n pub skipped: usize,\n}\n\npub async fn embed_documents_by_ids(\n conn: &Connection,\n client: &OllamaClient,\n model_name: &str,\n concurrency: usize,\n document_ids: &[i64],\n signal: &ShutdownSignal,\n) -> Result\n```\n\nImplementation:\n1. If `document_ids` is empty, return immediately with zero counts.\n2. Load `PendingDocument` structs for the specified IDs. Query: `SELECT id, content_text, content_hash FROM documents WHERE id IN (...)`. Filter out documents that already have current embeddings (same content_hash, model, dims, chunk_max_bytes) — reuse the LEFT JOIN logic from `find_pending_documents` but with `WHERE d.id IN (?)` instead of `WHERE d.id > ?`.\n3. If no documents need embedding after filtering, return with skipped=len.\n4. Chunk into pages of `DB_PAGE_SIZE` (500).\n5. For each page, call `embed_page()` (reuse existing private function) within a SAVEPOINT.\n6. Handle cancellation via `signal.is_cancelled()` between pages.\n\nAlternative simpler approach: load all specified doc IDs into a temp table or use a parameterized IN clause, then let `embed_page` process them. Since the list is typically small (1-5 documents for surgical sync), a single page call suffices.\n\nExport from `src/embedding/mod.rs` if not already pub.\n\n## Acceptance Criteria\n\n- [ ] `embed_documents_by_ids` only embeds the specified document IDs, not all pending documents\n- [ ] Documents already embedded with current content_hash + model are skipped (not re-embedded)\n- [ ] Empty document_ids input returns immediately with zero counts\n- [ ] Cancellation via ShutdownSignal is respected between pages\n- [ ] SAVEPOINT/ROLLBACK semantics match existing `embed_documents` for data integrity\n- [ ] Ollama errors for individual documents are counted as failed, not fatal\n- [ ] Function is pub for use by orchestration (bd-1i4i)\n\n## Files\n\n- `src/embedding/pipeline.rs` (add new function + result struct)\n- `src/embedding/mod.rs` (export if needed)\n\n## TDD Anchor\n\nTests in `src/embedding/pipeline_tests.rs` (or new `src/embedding/scoped_embed_tests.rs`):\n\n```rust\n#[tokio::test]\nasync fn test_embed_by_ids_only_embeds_specified_docs() {\n let conn = setup_test_db_with_documents();\n let mock = MockServer::start().await;\n setup_ollama_mock(&mock).await;\n let client = OllamaClient::new(&mock.uri());\n\n // Insert 2 documents: A (id=1) and B (id=2)\n insert_test_document(&conn, 1, \"Content A\", \"hash_a\");\n insert_test_document(&conn, 2, \"Content B\", \"hash_b\");\n\n let signal = ShutdownSignal::new();\n let result = embed_documents_by_ids(\n &conn, &client, \"nomic-embed-text\", 1,\n &[1], // Only embed doc 1\n &signal,\n ).await.unwrap();\n\n assert_eq!(result.docs_embedded, 1);\n // Verify doc 1 has embeddings\n let count: i64 = conn.query_row(\n \"SELECT COUNT(*) FROM embedding_metadata WHERE document_id = 1\",\n [], |r| r.get(0),\n ).unwrap();\n assert!(count > 0);\n // Verify doc 2 has NO embeddings\n let count_b: i64 = conn.query_row(\n \"SELECT COUNT(*) FROM embedding_metadata WHERE document_id = 2\",\n [], |r| r.get(0),\n ).unwrap();\n assert_eq!(count_b, 0);\n}\n\n#[tokio::test]\nasync fn test_embed_by_ids_skips_already_embedded() {\n let conn = setup_test_db_with_documents();\n let mock = MockServer::start().await;\n setup_ollama_mock(&mock).await;\n let client = OllamaClient::new(&mock.uri());\n\n insert_test_document(&conn, 1, \"Content A\", \"hash_a\");\n let signal = ShutdownSignal::new();\n\n // Embed once\n embed_documents_by_ids(&conn, &client, \"nomic-embed-text\", 1, &[1], &signal).await.unwrap();\n // Embed again with same hash — should skip\n let result = embed_documents_by_ids(\n &conn, &client, \"nomic-embed-text\", 1, &[1], &signal,\n ).await.unwrap();\n assert_eq!(result.docs_embedded, 0);\n assert_eq!(result.skipped, 1);\n}\n\n#[tokio::test]\nasync fn test_embed_by_ids_empty_input() {\n let conn = setup_test_db_with_documents();\n let mock = MockServer::start().await;\n let client = OllamaClient::new(&mock.uri());\n let signal = ShutdownSignal::new();\n\n let result = embed_documents_by_ids(\n &conn, &client, \"nomic-embed-text\", 1, &[], &signal,\n ).await.unwrap();\n assert_eq!(result.docs_embedded, 0);\n assert_eq!(result.chunks_embedded, 0);\n}\n\n#[tokio::test]\nasync fn test_embed_by_ids_respects_cancellation() {\n let conn = setup_test_db_with_documents();\n let mock = MockServer::start().await;\n // Use delayed response to allow cancellation\n setup_slow_ollama_mock(&mock).await;\n let client = OllamaClient::new(&mock.uri());\n\n insert_test_document(&conn, 1, \"Content A\", \"hash_a\");\n let signal = ShutdownSignal::new();\n signal.cancel(); // Pre-cancel\n\n let result = embed_documents_by_ids(\n &conn, &client, \"nomic-embed-text\", 1, &[1], &signal,\n ).await.unwrap();\n assert_eq!(result.docs_embedded, 0);\n}\n```\n\n## Edge Cases\n\n- Document ID that does not exist in the documents table: query returns no rows, skipped silently.\n- Document with empty `content_text`: `split_into_chunks` may return 0 chunks, counted as skipped.\n- Ollama server unreachable: returns `OllamaUnavailable` error. Must not leave partial embeddings (SAVEPOINT rollback).\n- Very long document (>1500 bytes): gets chunked into multiple chunks by `split_into_chunks`. All chunks for one document must be embedded atomically.\n- Document already has embeddings but with different model: content_hash check passes but model mismatch detected — should re-embed.\n- Concurrent calls with overlapping document_ids: SAVEPOINT isolation prevents conflicts, last writer wins on embedding_metadata upsert.\n\n## Dependency Context\n\n- **Blocked by bd-hs6j**: Gets `document_ids` from scoped doc regeneration output\n- **Blocks bd-1i4i**: Orchestration function calls this as the final step of surgical sync\n- **Blocks bd-3jqx**: Integration tests verify embed isolation (only surgical docs get embedded)\n- **Uses existing internals**: `embed_page`, `PendingDocument`, `split_into_chunks`, `OllamaClient`, `ShutdownSignal`","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-17T19:16:43.680009Z","created_by":"tayloreernisse","updated_at":"2026-02-17T20:05:18.735382Z","compaction_level":0,"original_size":0,"labels":["surgical-sync"],"dependencies":[{"issue_id":"bd-1elx","depends_on_id":"bd-1i4i","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"},{"issue_id":"bd-1elx","depends_on_id":"bd-3jqx","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"}]} {"id":"bd-1ep","title":"Wire resource event fetching into sync pipeline","description":"## Background\nAfter issue/MR primary ingestion and discussion fetch, changed entities need resource_events jobs enqueued and drained. This is the integration point that connects the queue (bd-tir), API client (bd-sqw), DB upserts (bd-1uc), and config flag (bd-2e8).\n\n## Approach\nModify the sync pipeline to add two new phases after discussion sync:\n\n**Phase 1 — Enqueue during ingestion:**\nIn src/ingestion/orchestrator.rs, after each entity upsert (issue or MR), call:\n```rust\nif config.sync.fetch_resource_events {\n enqueue_job(conn, project_id, \"issue\", iid, local_id, \"resource_events\", None)?;\n}\n// For MRs, also enqueue mr_closes_issues (always) and mr_diffs (when fetchMrFileChanges)\n```\n\nThe \"changed entity\" detection uses the existing dirty tracker: if an entity was inserted or updated during this sync run, it gets enqueued. On --full sync, all entities are enqueued.\n\n**Phase 2 — Drain dependent queue:**\nAdd a new drain step in src/cli/commands/sync.rs (or new src/core/drain.rs), called after discussion sync:\n```rust\npub async fn drain_dependent_queue(\n conn: &Connection,\n client: &GitLabClient,\n config: &Config,\n progress: Option,\n) -> Result\n```\n\nFlow:\n1. reclaim_stale_locks(conn, config.sync.stale_lock_minutes)\n2. Loop: claim_jobs(conn, \"resource_events\", batch_size=10)\n3. For each job:\n a. Fetch 3 event types via client (fetch_issue_state_events etc.)\n b. Store via upsert functions (upsert_state_events etc.)\n c. complete_job(conn, job.id) on success\n d. fail_job(conn, job.id, error_msg) on failure\n4. Report progress: \"Fetching resource events... [N/M]\"\n5. Repeat until no more claimable jobs\n\n**Progress reporting:**\nAdd new ProgressEvent variants:\n```rust\nResourceEventsFetchStart { total: usize },\nResourceEventsFetchProgress { completed: usize, total: usize },\nResourceEventsFetchComplete { fetched: usize, failed: usize },\n```\n\n## Acceptance Criteria\n- [ ] Full sync enqueues resource_events jobs for all issues and MRs\n- [ ] Incremental sync only enqueues for entities changed since last sync\n- [ ] --no-events prevents enqueueing resource_events jobs\n- [ ] Drain step fetches all 3 event types per entity\n- [ ] Successful fetches stored and job completed\n- [ ] Failed fetches recorded with error, job retried on next sync\n- [ ] Stale locks reclaimed at drain start\n- [ ] Progress displayed: \"Fetching resource events... [N/M]\"\n- [ ] Robot mode progress suppressed (quiet mode)\n\n## Files\n- src/ingestion/orchestrator.rs (add enqueue calls during upsert)\n- src/cli/commands/sync.rs (add drain step after discussions)\n- src/core/drain.rs (new, optional — or inline in sync.rs)\n\n## TDD Loop\nRED: tests/sync_pipeline_tests.rs (or extend existing):\n- `test_sync_enqueues_resource_events_for_changed_entities` - mock sync, verify jobs enqueued\n- `test_sync_no_events_flag_skips_enqueue` - verify no jobs when flag false\n- `test_drain_completes_jobs_on_success` - mock API responses, verify jobs deleted\n- `test_drain_fails_jobs_on_error` - mock API failure, verify job attempts incremented\n\nNote: Full pipeline integration tests may need mock HTTP server. Start with unit tests on enqueue/drain logic using the real DB with mock API responses.\n\nGREEN: Implement enqueue hooks + drain step\n\nVERIFY: `cargo test sync -- --nocapture && cargo build`\n\n## Edge Cases\n- Entity deleted between enqueue and drain: API returns 404, fail_job with \"entity not found\" (retry won't help but backoff caps it)\n- Rate limiting during drain: GitLabRateLimited error should fail_job with retry (transient)\n- Network error during drain: GitLabNetworkError should fail_job with retry\n- Multiple sync runs competing: locked_at prevents double-processing; stale lock reclaim handles crashes\n- Drain should have a max iterations guard to prevent infinite loop if jobs keep failing and being retried within the same run","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-02T21:31:57.334527Z","created_by":"tayloreernisse","updated_at":"2026-02-03T17:46:51.336138Z","closed_at":"2026-02-03T17:46:51.336077Z","close_reason":"Implemented: enqueue + drain resource events in orchestrator, wired counts through ingest→sync pipeline, added progress events, 4 new tests, all 209 tests pass","compaction_level":0,"original_size":0,"labels":["gate-1","phase-b","pipeline"],"dependencies":[{"issue_id":"bd-1ep","depends_on_id":"bd-1uc","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1ep","depends_on_id":"bd-2e8","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1ep","depends_on_id":"bd-2zl","type":"parent-child","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1ep","depends_on_id":"bd-sqw","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-1ep","depends_on_id":"bd-tir","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} @@ -147,7 +147,7 @@ {"id":"bd-2iqk","title":"Implement Doctor + Stats screens","description":"## Background\nDoctor shows environment health checks (config, auth, DB, Ollama). Stats shows database statistics (entity counts, index sizes, FTS coverage). Both are informational screens using ftui JsonView or simple table layouts.\n\n## Approach\nState:\n- DoctorState: checks (Vec), overall_status (Healthy|Warning|Error)\n- StatsState: entity_stats (EntityStats), index_stats (IndexStats), fts_stats (FtsStats)\n\nAction:\n- run_doctor(config, conn) -> Vec: reuses existing lore doctor logic\n- fetch_stats(conn) -> StatsData: reuses existing lore stats logic\n\nView:\n- Doctor: vertical list of health checks with pass/fail/warn indicators\n- Stats: table of entity counts, index sizes, FTS document count, embedding coverage\n\n## Acceptance Criteria\n- [ ] Doctor shows config, auth, DB, and Ollama health status\n- [ ] Stats shows entity counts matching lore --robot stats output\n- [ ] Both screens accessible via navigation (gd for Doctor)\n- [ ] Health check results color-coded: green pass, yellow warn, red fail\n\n## Files\n- CREATE: crates/lore-tui/src/state/doctor.rs\n- CREATE: crates/lore-tui/src/state/stats.rs\n- CREATE: crates/lore-tui/src/view/doctor.rs\n- CREATE: crates/lore-tui/src/view/stats.rs\n- MODIFY: crates/lore-tui/src/action.rs (add run_doctor, fetch_stats)\n\n## TDD Anchor\nRED: Write test_fetch_stats_counts that creates DB with known data, asserts fetch_stats returns correct counts.\nGREEN: Implement fetch_stats with COUNT queries.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_stats\n\n## Edge Cases\n- Ollama not running: Doctor shows warning, not error (optional dependency)\n- Very large databases: stats queries should be fast (use shadow tables for FTS count)\n\n## Dependency Context\nUses existing doctor and stats logic from lore CLI commands.\nUses DbManager from \"Implement DbManager\" task.","status":"open","priority":3,"issue_type":"task","created_at":"2026-02-12T17:02:21.744226Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:34.357165Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-2iqk","depends_on_id":"bd-1df9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2iqk","depends_on_id":"bd-2x2h","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-2jzn","title":"Migration 021: Add status columns to issues table","description":"## Background\nGitLab issues have work item status (To do, In progress, Done, Won't do, Duplicate) only available via GraphQL. We need 5 nullable columns on the issues table to store this data after enrichment. The status_synced_at column tracks when enrichment last wrote/cleared each row (ms epoch UTC).\n\n## Approach\nCreate a new SQL migration file and register it in the MIGRATIONS array. SQLite ALTER TABLE ADD COLUMN is non-destructive — existing rows get NULL defaults. Add a compound index for --status filter performance.\n\n## Files\n- migrations/021_work_item_status.sql (NEW)\n- src/core/db.rs (add entry to MIGRATIONS array)\n\n## Implementation\n\nmigrations/021_work_item_status.sql:\n ALTER TABLE issues ADD COLUMN status_name TEXT;\n ALTER TABLE issues ADD COLUMN status_category TEXT;\n ALTER TABLE issues ADD COLUMN status_color TEXT;\n ALTER TABLE issues ADD COLUMN status_icon_name TEXT;\n ALTER TABLE issues ADD COLUMN status_synced_at INTEGER;\n CREATE INDEX IF NOT EXISTS idx_issues_project_status_name ON issues(project_id, status_name);\n\nIn src/core/db.rs, add as last entry in MIGRATIONS array:\n (\"021\", include_str!(\"../../migrations/021_work_item_status.sql\")),\nLATEST_SCHEMA_VERSION is computed as MIGRATIONS.len() as i32 — auto-becomes 21.\n\n## Acceptance Criteria\n- [ ] Migration file exists at migrations/021_work_item_status.sql\n- [ ] MIGRATIONS array has 21 entries ending with (\"021\", ...)\n- [ ] In-memory DB: PRAGMA table_info(issues) includes all 5 new columns\n- [ ] In-memory DB: PRAGMA index_list(issues) includes idx_issues_project_status_name\n- [ ] Existing rows have NULL for all 5 new columns\n- [ ] cargo check --all-targets passes\n\n## TDD Loop\nRED: test_migration_021_adds_columns, test_migration_021_adds_index\n Pattern: create_connection(Path::new(\":memory:\")) + run_migrations(&conn), then PRAGMA queries\nGREEN: Create SQL file + register in MIGRATIONS\nVERIFY: cargo test test_migration_021\n\n## Edge Cases\n- Migration has 5 columns (including status_synced_at INTEGER), not 4\n- Test project insert uses gitlab_project_id, path_with_namespace, web_url (no name/last_seen_at)","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-11T06:41:40.806320Z","created_by":"tayloreernisse","updated_at":"2026-02-11T07:21:33.414434Z","closed_at":"2026-02-11T07:21:33.414387Z","close_reason":"Implemented by agent swarm — all quality gates pass (595 tests, 0 failures)","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-2jzn","depends_on_id":"bd-2y79","type":"parent-child","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-2kop","title":"Implement DbManager (read pool + dedicated writer)","description":"## Background\nThe TUI needs concurrent database access: multiple read queries can run in parallel (e.g., loading dashboard stats while prefetching issue list), but writes must be serialized. The DbManager provides a read pool (3 connections, round-robin) plus a dedicated writer connection, accessed via closures.\n\nThe database uses WAL mode with 5000ms busy_timeout (already configured in lore's create_connection). WAL allows concurrent readers + single writer. The TUI is self-contained — it does NOT detect or react to external CLI sync operations. If someone runs lore sync externally while the TUI is open, WAL prevents conflicts and the TUI's natural re-query on navigation handles stale data implicitly.\n\n## Approach\nCreate `crates/lore-tui/src/db.rs`:\n\n```rust\npub struct DbManager {\n readers: Vec, // 3 connections, WAL mode\n writer: Connection, // dedicated writer\n next_reader: AtomicUsize, // round-robin index\n}\n```\n\n- `DbManager::open(path: &Path) -> Result` — opens 4 connections (3 read + 1 write), all with WAL + busy_timeout via lore::core::db::create_connection\n- `with_reader(&self, f: F) -> Result where F: FnOnce(&Connection) -> Result` — closure-based read access, round-robin selection\n- `with_writer(&self, f: F) -> Result where F: FnOnce(&Connection) -> Result` — closure-based write access (serialized)\n- Reader connections set `PRAGMA query_only = ON` as a safety guard\n- All connections reuse lore's `create_connection()` which sets WAL + busy_timeout + foreign_keys\n\nThe DbManager is created once at app startup and shared (via Arc) across all screen states and action tasks.\n\n## Acceptance Criteria\n- [ ] DbManager opens 3 reader + 1 writer connection\n- [ ] Readers use round-robin selection via AtomicUsize\n- [ ] Reader connections have query_only = ON\n- [ ] Writer connection allows INSERT/UPDATE/DELETE\n- [ ] with_reader and with_writer use closure-based access (no connection leaking)\n- [ ] All connections use WAL mode and 5000ms busy_timeout\n- [ ] DbManager is Send + Sync (can be shared via Arc across async tasks)\n- [ ] Unit test: concurrent reads don't block each other\n- [ ] Unit test: write through reader connection fails (query_only guard)\n\n## Files\n- CREATE: crates/lore-tui/src/db.rs\n- MODIFY: crates/lore-tui/src/lib.rs (add pub mod db)\n\n## TDD Anchor\nRED: Write `test_reader_is_query_only` that opens a DbManager on an in-memory DB, attempts an INSERT via with_reader, and asserts it fails.\nGREEN: Implement DbManager with query_only pragma on readers.\nVERIFY: cargo test -p lore-tui db -- --nocapture\n\nAdditional tests:\n- test_writer_allows_mutations\n- test_round_robin_rotates_readers\n- test_dbmanager_is_send_sync (compile-time assert)\n- test_concurrent_reads (spawn threads, all complete without blocking)\n\n## Edge Cases\n- Database file doesn't exist — create_connection handles this (creates new DB)\n- Database locked by external process — busy_timeout handles retry\n- Connection pool exhaustion — not possible with closure-based access (connection is borrowed, not taken)\n- AtomicUsize overflow — wraps around, which is fine for round-robin (modulo 3)\n\n## Dependency Context\nDepends on bd-3ddw (scaffold) for the crate to exist. Uses lore::core::db::create_connection for connection setup. All screen action modules depend on DbManager for data access.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:53:59.708214Z","created_by":"tayloreernisse","updated_at":"2026-02-12T19:59:21.852517Z","closed_at":"2026-02-12T19:59:21.852405Z","close_reason":"Implemented DbManager: 3 reader pool (query_only, round-robin) + 1 writer, Mutex-wrapped for Send+Sync. 7 tests passing, clippy clean.","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-2kop","depends_on_id":"bd-3ddw","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} -{"id":"bd-2kr0","title":"Implement MR List (state + action + view)","description":"## Background\nThe MR List mirrors the Issue List pattern with MR-specific columns (target branch, source branch, draft status, reviewer). Same keyset pagination, snapshot fence, and filter bar DSL.\n\n## Approach\nState (state/mr_list.rs):\n- MrListState: same structure as IssueListState but with MrFilter and MrListRow, plus snapshot_upper_updated_at, filter_hash, peek_visible, peek_content\n- MrFilter: state, author, reviewer, target_branch, source_branch, label, draft (bool), free_text, project_id\n- MrListRow: project_path, iid, title, state, author, reviewer, target_branch, labels, updated_at, draft\n- MrCursor: updated_at, iid for keyset pagination\n- handle_key(): j/k scroll, J/K page, Enter select, / focus filter, Tab sort, g+g top, G bottom, r refresh, Space toggle Quick Peek\n\n**Snapshot fence:** Same pattern as Issue List — store snapshot_upper_updated_at on first load and refresh, filter subsequent pages. Explicit refresh (r) resets.\n\n**filter_hash:** Same pattern as Issue List — filter change resets cursor to page 1.\n\n**Quick Peek (Space key):**\n- Space toggles right-side preview pane (40% width) showing selected MR detail\n- Preview loads asynchronously via TaskSupervisor\n- j/k updates preview for newly selected row\n- Narrow terminals (<100 cols): peek replaces list\n\nAction (action.rs):\n- fetch_mrs(conn, filter, cursor, page_size, clock, snapshot_fence) -> Result: keyset query against merge_requests table. Uses idx_mrs_list_default index.\n- fetch_mr_peek(conn, entity_key) -> Result: loads MR detail for Quick Peek preview\n\nView (view/mr_list.rs):\n- render_mr_list(frame, state, area, theme): FilterBar + EntityTable with MR columns\n- When peek_visible: split area horizontally — list (60%) | peek preview (40%)\n- Columns: IID, Title (flex), State, Author, Target, Labels, Updated, Draft indicator\n- Draft MRs shown with muted style and [WIP] tag\n\n## Acceptance Criteria\n- [ ] Keyset pagination works for MR list (same pattern as issues)\n- [ ] Browse snapshot fence prevents rows shifting during concurrent sync\n- [ ] Explicit refresh (r) resets snapshot fence\n- [ ] filter_hash resets cursor on filter change\n- [ ] MR-specific filter fields: draft, reviewer, target_branch, source_branch\n- [ ] Draft MRs visually distinguished with [WIP] indicator\n- [ ] State filter supports: opened, merged, closed, locked, all\n- [ ] Columns: IID, Title, State, Author, Target Branch, Labels, Updated\n- [ ] Enter navigates to MrDetail, Esc returns with state preserved\n- [ ] Space toggles Quick Peek right-side preview pane\n- [ ] Quick Peek loads MR detail asynchronously\n- [ ] j/k in peek mode updates preview for newly selected row\n- [ ] Narrow terminal (<100 cols): peek replaces list\n\n## Files\n- MODIFY: crates/lore-tui/src/state/mr_list.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_mrs, fetch_mr_peek)\n- CREATE: crates/lore-tui/src/view/mr_list.rs\n\n## TDD Anchor\nRED: Write test_fetch_mrs_draft_filter in action.rs that inserts 5 MRs (3 draft, 2 not), calls fetch_mrs with draft=true filter, asserts 3 results.\nGREEN: Implement fetch_mrs with draft filter.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_mrs\n\nAdditional tests:\n- test_mr_snapshot_fence: verify fence excludes newer rows\n- test_mr_filter_hash_reset: verify filter change resets cursor\n\n## Edge Cases\n- MR state \"locked\" is rare but must be handled in filter and display\n- Very long branch names: truncate with ellipsis\n- MRs with no reviewer: show \"-\" in reviewer column\n- Quick Peek on empty list: no-op\n- Rapid j/k with peek open: debounce peek loads\n\n## Dependency Context\nUses EntityTable and FilterBar from \"Implement entity table + filter bar widgets\" (bd-18qs).\nUses same keyset pagination pattern from \"Implement Issue List\" (bd-3ei1).\nUses MrListState from \"Implement AppState composition\" (bd-1v9m).\nUses TaskSupervisor for load management from \"Implement TaskSupervisor\" (bd-3le2).\nRequires idx_mrs_list_default index from \"Add required TUI indexes\" (bd-3pm2).","status":"in_progress","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:24.070743Z","created_by":"tayloreernisse","updated_at":"2026-02-18T19:38:12.922553Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-2kr0","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2kr0","depends_on_id":"bd-3ei1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2kr0","depends_on_id":"bd-3pm2","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} +{"id":"bd-2kr0","title":"Implement MR List (state + action + view)","description":"## Background\nThe MR List mirrors the Issue List pattern with MR-specific columns (target branch, source branch, draft status, reviewer). Same keyset pagination, snapshot fence, and filter bar DSL.\n\n## Approach\nState (state/mr_list.rs):\n- MrListState: same structure as IssueListState but with MrFilter and MrListRow, plus snapshot_upper_updated_at, filter_hash, peek_visible, peek_content\n- MrFilter: state, author, reviewer, target_branch, source_branch, label, draft (bool), free_text, project_id\n- MrListRow: project_path, iid, title, state, author, reviewer, target_branch, labels, updated_at, draft\n- MrCursor: updated_at, iid for keyset pagination\n- handle_key(): j/k scroll, J/K page, Enter select, / focus filter, Tab sort, g+g top, G bottom, r refresh, Space toggle Quick Peek\n\n**Snapshot fence:** Same pattern as Issue List — store snapshot_upper_updated_at on first load and refresh, filter subsequent pages. Explicit refresh (r) resets.\n\n**filter_hash:** Same pattern as Issue List — filter change resets cursor to page 1.\n\n**Quick Peek (Space key):**\n- Space toggles right-side preview pane (40% width) showing selected MR detail\n- Preview loads asynchronously via TaskSupervisor\n- j/k updates preview for newly selected row\n- Narrow terminals (<100 cols): peek replaces list\n\nAction (action.rs):\n- fetch_mrs(conn, filter, cursor, page_size, clock, snapshot_fence) -> Result: keyset query against merge_requests table. Uses idx_mrs_list_default index.\n- fetch_mr_peek(conn, entity_key) -> Result: loads MR detail for Quick Peek preview\n\nView (view/mr_list.rs):\n- render_mr_list(frame, state, area, theme): FilterBar + EntityTable with MR columns\n- When peek_visible: split area horizontally — list (60%) | peek preview (40%)\n- Columns: IID, Title (flex), State, Author, Target, Labels, Updated, Draft indicator\n- Draft MRs shown with muted style and [WIP] tag\n\n## Acceptance Criteria\n- [ ] Keyset pagination works for MR list (same pattern as issues)\n- [ ] Browse snapshot fence prevents rows shifting during concurrent sync\n- [ ] Explicit refresh (r) resets snapshot fence\n- [ ] filter_hash resets cursor on filter change\n- [ ] MR-specific filter fields: draft, reviewer, target_branch, source_branch\n- [ ] Draft MRs visually distinguished with [WIP] indicator\n- [ ] State filter supports: opened, merged, closed, locked, all\n- [ ] Columns: IID, Title, State, Author, Target Branch, Labels, Updated\n- [ ] Enter navigates to MrDetail, Esc returns with state preserved\n- [ ] Space toggles Quick Peek right-side preview pane\n- [ ] Quick Peek loads MR detail asynchronously\n- [ ] j/k in peek mode updates preview for newly selected row\n- [ ] Narrow terminal (<100 cols): peek replaces list\n\n## Files\n- MODIFY: crates/lore-tui/src/state/mr_list.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_mrs, fetch_mr_peek)\n- CREATE: crates/lore-tui/src/view/mr_list.rs\n\n## TDD Anchor\nRED: Write test_fetch_mrs_draft_filter in action.rs that inserts 5 MRs (3 draft, 2 not), calls fetch_mrs with draft=true filter, asserts 3 results.\nGREEN: Implement fetch_mrs with draft filter.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_mrs\n\nAdditional tests:\n- test_mr_snapshot_fence: verify fence excludes newer rows\n- test_mr_filter_hash_reset: verify filter change resets cursor\n\n## Edge Cases\n- MR state \"locked\" is rare but must be handled in filter and display\n- Very long branch names: truncate with ellipsis\n- MRs with no reviewer: show \"-\" in reviewer column\n- Quick Peek on empty list: no-op\n- Rapid j/k with peek open: debounce peek loads\n\n## Dependency Context\nUses EntityTable and FilterBar from \"Implement entity table + filter bar widgets\" (bd-18qs).\nUses same keyset pagination pattern from \"Implement Issue List\" (bd-3ei1).\nUses MrListState from \"Implement AppState composition\" (bd-1v9m).\nUses TaskSupervisor for load management from \"Implement TaskSupervisor\" (bd-3le2).\nRequires idx_mrs_list_default index from \"Add required TUI indexes\" (bd-3pm2).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:24.070743Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:36:57.718716Z","closed_at":"2026-02-18T20:36:57.718613Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-2kr0","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2kr0","depends_on_id":"bd-3ei1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2kr0","depends_on_id":"bd-3pm2","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-2l3s","title":"Per-note search: search individual comments at note granularity","description":"## Background\nMost knowledge in a GitLab project is buried in discussion threads. Current lore search operates at document level (one doc per issue/MR/discussion). An agent searching for \"we decided to use Redis\" only finds the parent issue, not the specific comment where that decision was stated.\n\n## Current State (Verified 2026-02-12)\n- documents table (migration 007): source_type, source_id, project_id, author_username, label_names, content_text, content_hash, etc. NO source_note_id column.\n- source_type values: 'issue', 'merge_request', 'discussion' — discussion docs concatenate all notes into one text blob\n- notes table: 282K rows with individual note body, author, timestamps, is_system flag\n- discussions table: links notes to their parent entity (noteable_type, noteable_id)\n- FTS5 index (documents_fts): operates on coarse document-level text\n- Document generation: src/documents/extractor.rs extracts issue/MR/discussion documents\n- Document regeneration: src/documents/regenerator.rs handles dirty document refresh\n- PRD exists: docs/prd-per-note-search.md with 5 feedback iterations\n\n## Approach\n\n### Schema (Migration 022)\nThis bead owns migration 022. bd-2g50 (data gaps) ships after this and uses migration 023.\n\n```sql\n-- migrations/022_note_documents.sql\nALTER TABLE documents ADD COLUMN source_note_id INTEGER REFERENCES notes(id);\nCREATE INDEX idx_documents_source_note ON documents(source_note_id) WHERE source_note_id IS NOT NULL;\n```\n- source_note_id = NULL for existing entity-level documents (backwards compatible)\n- source_note_id = notes.id for new note-level documents\n\nWire into src/core/db.rs MIGRATIONS array as entry (\"022\", include_str!(\"../../migrations/022_note_documents.sql\")). LATEST_SCHEMA_VERSION auto-updates since it's `MIGRATIONS.len() as i32`.\n\n### Document Generation (src/documents/extractor.rs)\nAdd a new extraction function alongside existing `extract_issue_document()` (line 85), `extract_mr_document()` (line 186), `extract_discussion_document()` (line 302):\n\n```rust\npub fn extract_note_documents(\n conn: &Connection,\n project_id: i64,\n) -> Result> {\n // SELECT n.id, n.body, n.author_username, n.created_at, n.updated_at,\n // d.noteable_type, d.noteable_id\n // FROM notes n\n // JOIN discussions d ON n.discussion_id = d.id\n // WHERE n.is_system = 0\n // AND LENGTH(n.body) >= 50\n // AND d.project_id = ?1\n // AND n.id NOT IN (SELECT source_note_id FROM documents WHERE source_note_id IS NOT NULL)\n\n // For each qualifying note:\n // - source_type = 'note'\n // - source_id = note.id (the note's local DB id)\n // - source_note_id = note.id\n // - title = format!(\"Re: {}\", parent_entity_title)\n // - author_username = note.author_username\n // - content_text = note.body\n // - content_hash = sha256(note.body) for deduplication\n}\n```\n\nMinimum note length (50 chars) filters out \"+1\", \"LGTM\", emoji-only notes. is_system=0 filters automated state change notes.\n\nNOTE: The documents table CHECK constraint for source_type needs updating — currently enforces `CHECK (source_type IN ('issue','merge_request','discussion'))`. Migration 022 must also:\n```sql\n-- Drop and recreate the CHECK constraint is not supported in SQLite ALTER TABLE.\n-- Instead, the check is only on INSERT, so we need to handle this:\n-- Option A: Don't add 'note' to CHECK — just insert with source_type='note' and let\n-- SQLite ignore the CHECK on ALTER (it won't — CHECK is enforced).\n-- Option B: Use source_type='discussion' for note docs (semantically wrong).\n-- Option C: Recreate the table (heavy migration).\n-- RECOMMENDED: Use a new migration that drops the CHECK constraint entirely.\n-- SQLite doesn't support ALTER TABLE ... DROP CONSTRAINT, so:\n-- CREATE TABLE documents_new (... without CHECK ...);\n-- INSERT INTO documents_new SELECT * FROM documents;\n-- DROP TABLE documents;\n-- ALTER TABLE documents_new RENAME TO documents;\n-- Recreate indexes and triggers.\n-- This is the only correct approach. The CHECK constraint is in migration 007.\n```\n\n### Search Integration\nAdd --granularity flag to search command:\n\n```rust\n// In SearchCliFilters or SearchFilters (src/search/filters.rs:15)\npub granularity: Option, // note | entity (default)\n\n// In FTS query construction (src/search/fts.rs)\n// When granularity = note:\n// AND d.source_note_id IS NOT NULL\n// When granularity = entity (or default):\n// AND d.source_note_id IS NULL (existing behavior)\n```\n\n### Robot Mode Output (note granularity)\n```json\n{\n \"source_type\": \"note\",\n \"title\": \"Re: Switch Health Card\",\n \"parent_type\": \"issue\",\n \"parent_iid\": 3864,\n \"parent_title\": \"Switch Health Card (Throw Times)\",\n \"note_author\": \"teernisse\",\n \"note_created_at\": \"2026-02-01T...\",\n \"discussion_id\": \"abc123\",\n \"snippet\": \"...decided to use once-per-day ingestion from BNSF...\",\n \"score\": 0.87\n}\n```\n\nJoin path for note metadata:\n```sql\nSELECT d.source_note_id, n.author_username, n.created_at,\n disc.gitlab_discussion_id,\n CASE disc.noteable_type\n WHEN 'Issue' THEN 'issue'\n WHEN 'MergeRequest' THEN 'merge_request'\n END as parent_type,\n disc.noteable_id\nFROM documents d\nJOIN notes n ON d.source_note_id = n.id\nJOIN discussions disc ON n.discussion_id = disc.id\nWHERE d.source_note_id IS NOT NULL AND d.id IN (...)\n```\n\n## TDD Loop\nRED: Tests in src/documents/extractor.rs (or new test file):\n- test_note_document_generation: insert issue + discussion + 3 notes (one 10 chars, one 60 chars, one 200 chars), run extract_note_documents, assert 2 note-level documents created (>= 50 chars only)\n- test_note_document_skips_system_notes: insert system note (is_system=1) with 100-char body, assert no document generated\n- test_note_document_content_hash_dedup: insert note, generate doc, re-run, assert no duplicate created\n- test_note_document_parent_title: assert generated doc title starts with \"Re: \"\n\nTests in src/cli/commands/search.rs:\n- test_search_granularity_note_filter: with note docs in DB, --granularity note returns only note results\n- test_search_granularity_entity_default: default behavior unchanged, does NOT return note docs\n\nGREEN: Add migration, update extractor, add --granularity flag to search\n\nVERIFY:\n```bash\ncargo test note_document && cargo test search_granularity\ncargo clippy --all-targets -- -D warnings\ncargo run --release -- -J search 'ingestion' --granularity note | jq '.data.results[0].parent_iid'\n```\n\n## Acceptance Criteria\n- [ ] Migration 022 adds source_note_id to documents table (nullable, indexed, FK to notes)\n- [ ] Migration 022 handles the source_type CHECK constraint (allows 'note' as valid value)\n- [ ] extract_note_documents creates note-level docs for notes >= 50 chars, non-system\n- [ ] Content hash deduplication prevents duplicate note documents\n- [ ] lore search --granularity note returns note-level results with parent context\n- [ ] lore search (no flag) returns entity-level results only (backwards compatible)\n- [ ] Robot mode includes parent_type, parent_iid, parent_title, note_author, note_created_at\n- [ ] Performance: note-level FTS search across expanded index completes in <200ms\n- [ ] Embedding pipeline handles note-level documents (embed individually, same as entity docs)\n- [ ] lore stats shows note document count separately from entity document count\n\n## Edge Cases\n- Note with only markdown formatting (no text after stripping): skip (LENGTH(body) >= 50 handles most)\n- Note body is a quote of another note (duplicated text): deduplicate via content_hash\n- Very long note (>32KB): apply same truncation as entity documents (src/documents/truncation.rs)\n- Discussion with 100+ notes: each becomes its own document (correct behavior)\n- Deleted notes (if tracked): should not generate documents\n- Notes on confidential issues: inherit visibility (future concern, not blocking)\n- source_type CHECK constraint: migration MUST handle this — SQLite enforces CHECK on INSERT, so inserting source_type='note' will fail without updating the constraint\n\n## Files to Modify\n- NEW: migrations/022_note_documents.sql (schema change + CHECK constraint update)\n- src/core/db.rs (wire migration 022 into MIGRATIONS array)\n- src/documents/extractor.rs (add extract_note_documents function)\n- src/documents/mod.rs (export new function)\n- src/search/fts.rs (add granularity filter to FTS queries)\n- src/search/filters.rs (add granularity to SearchFilters at line 15)\n- src/cli/commands/search.rs (--granularity flag, note metadata in SearchResultDisplay)\n- src/cli/commands/stats.rs (show note document count)","status":"closed","priority":1,"issue_type":"feature","created_at":"2026-02-12T15:45:35.465446Z","created_by":"tayloreernisse","updated_at":"2026-02-12T16:55:56.774523Z","closed_at":"2026-02-12T16:55:56.774470Z","close_reason":"Replaced by granular beads broken out from docs/prd-per-note-search.md","compaction_level":0,"original_size":0,"labels":["cli-imp","search"],"dependencies":[{"issue_id":"bd-2l3s","depends_on_id":"bd-13lp","type":"parent-child","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-2l3s","depends_on_id":"bd-2g50","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-2ldg","title":"WHO: Mode resolution, path helpers, run_who entry point","description":"## Background\n\nCore scaffolding that all 5 query modes depend on. Defines the mode discrimination logic, path normalization, path-to-SQL translation (with project-scoped DB probes), time resolution, and the run_who() entry point that dispatches to query functions.\n\n## Approach\n\n### WhoMode enum\n```rust\nenum WhoMode<'a> {\n Expert { path: String }, // owns String (normalization produces new strings)\n Workload { username: &'a str }, // borrows from args\n Reviews { username: &'a str },\n Active,\n Overlap { path: String },\n}\n```\n\n### resolve_mode() discrimination rules:\n1. --path flag always wins -> Expert\n2. --active -> Active\n3. --overlap -> Overlap\n4. positional target with --reviews -> Reviews\n5. positional target containing '/' -> Expert (username never contains /)\n6. positional target without '/' -> Workload (strip @ prefix)\n7. No args -> error with usage examples\n\n### normalize_repo_path(): strips ./, leading /, collapses //, converts \\ to / (Windows paste, only when no / present), trims whitespace\n\n### PathQuery + build_path_query(conn, path, project_id):\n- Struct: `{ value: String, is_prefix: bool }`\n- Trailing / forces directory prefix\n- Root path (no /) without trailing / -> exact match (handles Makefile, LICENSE via --path)\n- Last segment contains . -> heuristic: file (exact)\n- **Two-way DB probe** (project-scoped): when heuristics are ambiguous, probe DB:\n - Probe 1: exact path exists? `SELECT 1 FROM notes WHERE note_type='DiffNote' AND is_system=0 AND position_new_path = ?1 AND (?2 IS NULL OR project_id = ?2) LIMIT 1`\n - Probe 2 (only if exact miss, not forced-dir): prefix exists?\n - Decision: forced_dir -> prefix; exact_exists -> exact; prefix_exists -> prefix; else heuristic\n- **CRITICAL**: escape_like() is ONLY called for prefix (LIKE) matches. For exact matches (=), use raw path — LIKE metacharacters (_, %) are not special in = comparisons.\n\n### Result types: WhoRun, WhoResolvedInput (since_mode tri-state: \"default\"/\"explicit\"/\"none\"), WhoResult enum, all 5 mode-specific result structs (see plan Step 2 \"Result Types\")\n\n### run_who() entry: resolve project -> resolve mode -> resolve since -> dispatch to query_* -> return WhoRun\n\n### since_mode semantics:\n- Expert/Reviews/Active/Overlap: default window applies if --since absent -> \"default\"\n- Workload: no default window; --since absent -> \"none\"\n- Any mode with explicit --since -> \"explicit\"\n\n## Files\n\n- `src/cli/commands/who.rs` — all code in this file\n\n## TDD Loop\n\nRED:\n```\ntest_is_file_path_discrimination — resolve_mode for paths/usernames/@/--reviews/--path\ntest_build_path_query — directory/file/root/dotted/underscore/dotless\ntest_build_path_query_exact_does_not_escape — _ in exact path stays raw\ntest_path_flag_dotless_root_file_is_exact — Makefile/Dockerfile via --path\ntest_build_path_query_dotless_subdir_file_uses_db_probe — src/Dockerfile with/without DB data\ntest_build_path_query_probe_is_project_scoped — data in proj 1, query proj 2\ntest_escape_like — normal/underscore/percent/backslash\ntest_normalize_repo_path — ./ / \\\\ // whitespace identity\ntest_lookup_project_path — basic round-trip\n```\n\nGREEN: Implement all functions. Query functions can be stubs (todo!()) for now.\nVERIFY: `cargo test -- who`\n\n## Acceptance Criteria\n\n- [ ] resolve_mode correctly discriminates all 7 cases (see tests)\n- [ ] build_path_query returns exact for files, prefix for dirs\n- [ ] build_path_query DB probe is project-scoped (cross-project isolation)\n- [ ] escape_like escapes %, _, \\ correctly\n- [ ] normalize_repo_path handles ./, /, \\\\, //, whitespace\n- [ ] WhoResolvedInput.since_mode is \"none\" for Workload without --since\n\n## Edge Cases\n\n- Dotless files in subdirectories (src/Dockerfile, infra/Makefile) — DB probe catches these, heuristic alone would misclassify as directory\n- Windows path paste (src\\foo\\bar.rs) — convert \\ to / only when no / present\n- LIKE metacharacters in filenames (README_with_underscore.md) — must NOT be escaped for exact match\n- Root files without / (README.md, LICENSE, Makefile) — must use --path flag, positional would treat as username","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-08T02:40:11.209288Z","created_by":"tayloreernisse","updated_at":"2026-02-08T04:10:29.595703Z","closed_at":"2026-02-08T04:10:29.595666Z","close_reason":"Implemented by agent team: migration 017, CLI skeleton, all 5 query modes, human+robot output, 20 tests. All quality gates pass.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-2ldg","depends_on_id":"bd-2rk9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-2lg6","title":"Implement Clock trait (SystemClock + FakeClock)","description":"## Background\nAll relative-time rendering (e.g., \"3h ago\" labels) must use an injected Clock, not wall-clock time. This ensures deterministic snapshot tests and consistent timestamps within a single frame. FakeClock lets tests control time precisely.\n\n## Approach\nCreate crates/lore-tui/src/clock.rs with:\n- Clock trait: fn now(&self) -> chrono::DateTime\n- SystemClock: impl Clock using chrono::Utc::now()\n- FakeClock: wraps Arc>>, impl Clock returning the frozen value. Methods: new(fixed_time), advance(duration), set(time)\n- Both cloneable (SystemClock is Copy, FakeClock shares Arc)\n\n## Acceptance Criteria\n- [ ] Clock trait with now() method\n- [ ] SystemClock returns real wall-clock time\n- [ ] FakeClock returns frozen time, advance() moves it forward\n- [ ] FakeClock is Clone (shared Arc)\n- [ ] Tests pass: frozen clock returns same time on repeated calls\n- [ ] Tests pass: advance() moves time forward by exact duration\n\n## Files\n- CREATE: crates/lore-tui/src/clock.rs\n\n## TDD Anchor\nRED: Write test_fake_clock_frozen that creates FakeClock at a fixed time, calls now() twice, asserts both return the same value.\nGREEN: Implement FakeClock with Arc>.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fake_clock\n\n## Edge Cases\n- FakeClock must be Send+Sync for use across Cmd::task threads\n- advance() must handle chrono overflow gracefully (use checked_add)","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:54:11.756415Z","created_by":"tayloreernisse","updated_at":"2026-02-12T19:48:39.169147Z","closed_at":"2026-02-12T19:48:39.169096Z","close_reason":"Clock trait + SystemClock + FakeClock with 7 tests: frozen time, advance, set, clone-shares-state, Send+Sync, trait object. Clippy clean.","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-2lg6","depends_on_id":"bd-3ddw","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} @@ -212,7 +212,7 @@ {"id":"bd-3cjp","title":"NOTE-2I: Batch parent metadata cache for note regeneration","description":"## Background\nextract_note_document() (from NOTE-2C) fetches parent entity metadata per note via SQL queries. During initial backfill of ~8K notes, this creates N+1 amplification — 50 notes on same MR = 50 identical parent lookups. This is a performance optimization for batch regeneration only.\n\n## Approach\n1. Add ParentMetadataCache struct in src/documents/extractor.rs:\n pub struct ParentMetadataCache {\n cache: HashMap<(String, i64), ParentMetadata>,\n }\n Key: (noteable_type: String, parent_local_id: i64)\n ParentMetadata struct: { iid: i64, title: String, web_url: String, labels: Vec, project_path: String }\n\n Methods:\n - pub fn new() -> Self\n - pub fn get_or_fetch(&mut self, conn: &Connection, noteable_type: &str, parent_id: i64) -> Result>\n get_or_fetch uses HashMap entry API: on miss, fetches from DB (same queries as extract_note_document), caches, returns ref.\n\n2. Add pub fn extract_note_document_cached(conn: &Connection, note_id: i64, cache: &mut ParentMetadataCache) -> Result>:\n Same logic as extract_note_document but calls cache.get_or_fetch() instead of inline parent queries. The uncached version remains for single-note use.\n\n3. Update batch regeneration loop in src/documents/regenerator.rs. The main regeneration loop is in regenerate_dirty_documents() (top of file, ~line 20). It processes dirty entries one at a time via regenerate_one() (line 86). For batch cache to work:\n - Create ParentMetadataCache before the loop\n - In the SourceType::Note arm of regenerate_one, pass the cache through\n - This requires either making regenerate_one() take an optional cache parameter, or restructuring to handle Note specially in the loop body.\n\n Cleanest approach: Add cache: &mut Option parameter to regenerate_one(). Initialize as Some(ParentMetadataCache::new()) before the loop. Only SourceType::Note uses it. Other types ignore it.\n\n Cache is created fresh per regenerate_dirty_documents() call — no cross-invocation persistence.\n\n## Files\n- MODIFY: src/documents/extractor.rs (add ParentMetadataCache struct + extract_note_document_cached)\n- MODIFY: src/documents/regenerator.rs (add cache parameter to regenerate_one, use in batch loop)\n- MODIFY: src/documents/mod.rs (export ParentMetadataCache if needed externally)\n\n## TDD Anchor\nRED: test_note_regeneration_batch_uses_cache — insert project, issue, 10 notes on same issue, mark all dirty, regenerate all, assert all 10 documents created correctly.\nGREEN: Implement ParentMetadataCache and extract_note_document_cached.\nVERIFY: cargo test note_regeneration_batch -- --nocapture\nTests: test_note_regeneration_cache_consistent_with_direct_extraction (cached output == uncached output), test_note_regeneration_cache_invalidates_across_parents (notes from different parents get correct metadata)\n\n## Acceptance Criteria\n- [ ] ParentMetadataCache reduces DB queries during batch regeneration (10 notes on 1 parent = 1 parent fetch, not 10)\n- [ ] Cached extraction produces identical DocumentData output to uncached\n- [ ] Cache keyed per (noteable_type, parent_id) — no cross-parent leakage\n- [ ] Cache scoped to single regenerate_dirty_documents call — no persistence or invalidation complexity\n- [ ] All 3 tests pass\n\n## Dependency Context\n- Depends on NOTE-2C (bd-18yh): extract_note_document function must exist to create the cached variant\n\n## Edge Cases\n- Parent deleted between cache creation and lookup: get_or_fetch returns None, extract_note_document_cached returns None (same as uncached)\n- Very large batch (10K+ notes): cache grows but is bounded by number of unique parents (typically <100 issues/MRs)\n- Cache miss for orphaned discussion: cached None result prevents repeated failed lookups","status":"closed","priority":3,"issue_type":"task","created_at":"2026-02-12T17:03:00.515490Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:13:15.870738Z","closed_at":"2026-02-12T18:13:15.870693Z","close_reason":"Implemented by agent swarm","compaction_level":0,"original_size":0,"labels":["per-note","search"]} {"id":"bd-3ddw","title":"Create lore-tui crate scaffold","description":"## Background\nThe TUI is implemented as a separate binary crate (crates/lore-tui/) that uses nightly Rust for FrankenTUI. It is EXCLUDED from the root workspace to keep nightly-only deps isolated. The lore CLI spawns lore-tui at runtime via binary delegation (PATH lookup) — zero compile-time dependency from lore to lore-tui. lore-tui depends on lore as a library (src/lib.rs exists and exports all modules).\n\nFrankenTUI is published on crates.io as ftui (0.1.1), ftui-core, ftui-runtime, ftui-render, ftui-style. Use crates.io versions. Local clone exists at ~/projects/FrankenTUI/ for reference.\n\n## Approach\nCreate the crate directory structure:\n- crates/lore-tui/Cargo.toml with dependencies:\n - ftui = \"0.1.1\" (crates.io) and related ftui-* crates\n - lore = { path = \"../..\" } (library dependency for Config, db, ingestion, etc.)\n - clap, anyhow, chrono, dirs, rusqlite (bundled), crossterm\n- crates/lore-tui/rust-toolchain.toml pinning nightly-2026-02-08\n- crates/lore-tui/src/main.rs — binary entry point with TuiCli struct (clap Parser) supporting --config, --sync, --fresh, --render-mode, --ascii, --no-alt-screen\n- crates/lore-tui/src/lib.rs — public API: launch_tui(), launch_sync_tui(), LaunchOptions struct, module declarations\n- Root Cargo.toml: verify lore-tui is NOT in [workspace] members\n\n## Acceptance Criteria\n- [ ] crates/lore-tui/Cargo.toml exists with ftui (crates.io) and lore (path dep) dependencies\n- [ ] crates/lore-tui/rust-toolchain.toml pins nightly-2026-02-08\n- [ ] crates/lore-tui/src/main.rs compiles with clap CLI args\n- [ ] crates/lore-tui/src/lib.rs declares all module stubs and exports LaunchOptions, launch_tui, launch_sync_tui\n- [ ] cargo +stable check --workspace --all-targets passes (lore-tui excluded)\n- [ ] cargo +nightly check --manifest-path crates/lore-tui/Cargo.toml --all-targets passes\n- [ ] Root Cargo.toml does NOT include lore-tui in workspace members\n\n## Files\n- CREATE: crates/lore-tui/Cargo.toml\n- CREATE: crates/lore-tui/rust-toolchain.toml\n- CREATE: crates/lore-tui/src/main.rs\n- CREATE: crates/lore-tui/src/lib.rs\n- VERIFY: Cargo.toml (root — confirm lore-tui NOT in members)\n\n## TDD Anchor\nRED: Write a shell test that runs cargo +nightly check --manifest-path crates/lore-tui/Cargo.toml and asserts exit 0.\nGREEN: Create the full crate scaffold with all deps.\nVERIFY: cargo +stable check --workspace --all-targets && cargo +nightly check --manifest-path crates/lore-tui/Cargo.toml\n\n## Edge Cases\n- ftui crates may require specific nightly features — pin exact nightly date\n- Path dependency to lore means lore-tui sees lore's edition 2024 — verify compat\n- rusqlite bundled feature pulls in cc build — may need nightly-compatible cc version\n- If ftui 0.1.1 has breaking changes vs PRD assumptions, check ~/projects/FrankenTUI/ for latest API\n\n## Dependency Context\nRoot task — no dependencies. All other Phase 0 tasks depend on this scaffold existing.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:53:10.859837Z","created_by":"tayloreernisse","updated_at":"2026-02-12T19:43:49.635086Z","closed_at":"2026-02-12T19:43:49.635040Z","close_reason":"Scaffold created and compiles: Cargo.toml, rust-toolchain.toml, main.rs, lib.rs all passing cargo check + clippy + fmt","compaction_level":0,"original_size":0,"labels":["TUI"]} {"id":"bd-3dum","title":"Orchestrator: status enrichment phase with transactional writes","description":"## Background\nThe orchestrator controls the sync pipeline. Status enrichment is a new Phase 1.5 that runs after issue ingestion but before discussion sync. It must be non-fatal — errors skip enrichment but don't crash the sync.\n\n## Approach\nAdd enrichment phase to ingest_project_issues_with_progress. Use client.graphql_client() factory. Look up project path from DB via .optional()? for non-fatal failure. Transactional writes via enrich_issue_statuses_txn() with two phases: clear stale, then apply new.\n\n## Files\n- src/ingestion/orchestrator.rs (enrichment phase + txn helper + IngestProjectResult fields + ProgressEvent variants)\n- src/cli/commands/ingest.rs (add match arms for new ProgressEvent variants)\n\n## Implementation\n\nIngestProjectResult new fields:\n statuses_enriched: usize, statuses_cleared: usize, statuses_seen: usize,\n statuses_without_widget: usize, partial_error_count: usize,\n first_partial_error: Option, status_enrichment_error: Option,\n status_enrichment_mode: String, status_unsupported_reason: Option\n Default: all 0/None/\"\" as appropriate\n\nProgressEvent new variants:\n StatusEnrichmentComplete { enriched: usize, cleared: usize }\n StatusEnrichmentSkipped\n\nPhase 1.5 logic (after ingest_issues, before discussion sync):\n 1. Check config.sync.fetch_work_item_status && !signal.is_cancelled()\n 2. If false: set mode=\"skipped\", emit StatusEnrichmentSkipped\n 3. Look up project path: conn.query_row(\"SELECT path_with_namespace FROM projects WHERE id = ?1\", [project_id], |r| r.get(0)).optional()?\n 4. If None: warn, set status_enrichment_error=\"project_path_missing\", emit StatusEnrichmentComplete{0,0}\n 5. Create graphql_client via client.graphql_client()\n 6. Call fetch_issue_statuses(&graphql_client, &project_path).await\n 7. On Ok: map unsupported_reason to mode/reason, call enrich_issue_statuses_txn(), set counters\n 8. On Err: warn, set status_enrichment_error, mode=\"fetched\"\n 9. Emit StatusEnrichmentComplete\n\nenrich_issue_statuses_txn(conn, project_id, statuses, all_fetched_iids, now_ms) -> Result<(usize, usize)>:\n Uses conn.unchecked_transaction() (conn is &Connection not &mut)\n Phase 1 (clear): UPDATE issues SET status_*=NULL, status_synced_at=now_ms WHERE project_id=? AND iid=? AND status_name IS NOT NULL — for IIDs in all_fetched_iids but NOT in statuses\n Phase 2 (apply): UPDATE issues SET status_name=?, status_category=?, status_color=?, status_icon_name=?, status_synced_at=now_ms WHERE project_id=? AND iid=?\n tx.commit(), return (enriched, cleared)\n\nIn src/cli/commands/ingest.rs progress callback, add arms:\n ProgressEvent::StatusEnrichmentComplete { enriched, cleared } => { ... }\n ProgressEvent::StatusEnrichmentSkipped => { ... }\n\n## Acceptance Criteria\n- [ ] Enrichment runs after ingest_issues, before discussion sync\n- [ ] Gated by config.sync.fetch_work_item_status\n- [ ] Project path missing -> skipped with error=\"project_path_missing\", sync continues\n- [ ] enrich_issue_statuses_txn correctly UPDATEs status columns + status_synced_at\n- [ ] Stale status cleared: issue in all_fetched_iids but not statuses -> NULL + synced_at set\n- [ ] Transaction rollback on failure: no partial updates\n- [ ] Idempotent: running twice with same data produces same result\n- [ ] GraphQL error: logged, enrichment_error captured, sync continues\n- [ ] ingest.rs compiles with new ProgressEvent arms\n- [ ] cargo check --all-targets passes\n\n## TDD Loop\nRED: test_enrich_issue_statuses_txn, test_enrich_skips_unknown_iids, test_enrich_clears_removed_status, test_enrich_transaction_rolls_back_on_failure, test_enrich_idempotent_across_two_runs, test_enrich_sets_synced_at_on_clear, test_enrichment_error_captured_in_result, test_project_path_missing_skips_enrichment\n Tests use in-memory DB with migration 021 applied\nGREEN: Implement enrichment phase + txn helper + result fields + progress arms\nVERIFY: cargo test enrich && cargo test orchestrator\n\n## Edge Cases\n- unchecked_transaction() needed because conn is &Connection not &mut Connection\n- .optional()? requires use rusqlite::OptionalExtension\n- status_synced_at is set on BOTH clear and apply operations (not NULL on clear)\n- Clear SQL has WHERE status_name IS NOT NULL to avoid counting already-cleared rows\n- Progress callback match must be updated in SAME batch as enum change (compile error otherwise)\n- status_enrichment_mode must be set in ALL code paths (fetched/unsupported/skipped)","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-11T06:42:11.254917Z","created_by":"tayloreernisse","updated_at":"2026-02-11T07:21:33.419310Z","closed_at":"2026-02-11T07:21:33.419268Z","close_reason":"Implemented by agent swarm — all quality gates pass (595 tests, 0 failures)","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-3dum","depends_on_id":"bd-1gvg","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3dum","depends_on_id":"bd-2jzn","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3dum","depends_on_id":"bd-2y79","type":"parent-child","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} -{"id":"bd-3ei1","title":"Implement Issue List (state + action + view)","description":"## Background\nThe Issue List is the primary browse interface for issues. It uses keyset pagination (not OFFSET) for deterministic cross-page traversal under concurrent sync writes. A browse snapshot fence preserves stable ordering until explicit refresh.\n\n## Approach\nState (state/issue_list.rs):\n- IssueListState: window (Vec), total_count, selected_index, scroll_offset, next_cursor (Option), prev_cursor (Option), prefetch_in_flight (bool), filter (IssueFilter), filter_input (TextInput), filter_focused (bool), sort_field (SortField), sort_order (SortOrder), snapshot_upper_updated_at (Option), filter_hash (u64), peek_visible (bool), peek_content (Option)\n- IssueCursor: updated_at (i64), iid (i64) — boundary values for keyset pagination\n- IssueFilter: state (Option), author (Option), assignee (Option), label (Option), milestone (Option), status (Option), free_text (Option), project_id (Option)\n- IssueListRow: project_path, iid, title, state, author, assignee, labels, updated_at, status_name, status_icon\n- handle_key(): j/k scroll, J/K page, Enter select, / focus filter, Tab sort, g+g top, G bottom, r refresh, Space toggle Quick Peek\n- scroll_to_top(), apply_filter(), set_sort(), toggle_peek()\n\n**Snapshot fence:** On first load and on explicit refresh (r), store snapshot_upper_updated_at = MAX(updated_at) from result set. Subsequent page fetches add WHERE updated_at <= snapshot_upper_updated_at to prevent rows from shifting as sync inserts new data. Explicit refresh (r) resets the fence.\n\n**filter_hash:** Compute a hash of the current filter state. When filter changes (new hash != old hash), reset cursor to page 1 and clear snapshot fence. This prevents stale pagination after filter changes.\n\n**Prefetch:** When scroll position reaches 80% of current window, trigger background prefetch of next page via TaskSupervisor. Prefetched data appended to window when user scrolls past current page boundary.\n\n**Quick Peek (Space key):**\n- Space toggles a right-side preview pane (40% width) showing the currently selected issue's detail\n- Preview content loads asynchronously via TaskSupervisor\n- Cursor movement (j/k) updates the preview for the newly selected row\n- Esc or Space again closes the peek pane\n- On narrow terminals (<100 cols), peek replaces the list instead of side-by-side\n\nAction (action.rs):\n- fetch_issues(conn, filter, cursor, page_size, clock, snapshot_fence) -> Result: keyset pagination query with WHERE (updated_at, iid) < (cursor.updated_at, cursor.iid) AND updated_at <= snapshot_fence ORDER BY updated_at DESC, iid DESC LIMIT page_size+1 (extra row detects has_next). Uses idx_issues_list_default index.\n- fetch_issue_peek(conn, entity_key) -> Result: loads issue detail for Quick Peek preview\n- IssueListPage: rows, next_cursor, prev_cursor, total_count\n\nView (view/issue_list.rs):\n- render_issue_list(frame, state, area, theme): FilterBar at top, EntityTable below, status bar at bottom\n- When peek_visible: split area horizontally — list (60%) | peek preview (40%)\n- Columns: IID, Title (flex), State, Author, Labels, Updated, Status\n\n## Acceptance Criteria\n- [ ] Keyset pagination fetches pages without OFFSET\n- [ ] Next/prev page navigation preserves deterministic ordering\n- [ ] Browse snapshot fence (snapshot_upper_updated_at) prevents rows from shifting during concurrent sync\n- [ ] Explicit refresh (r) resets snapshot fence and re-queries from first page\n- [ ] filter_hash tracks filter state; filter change resets cursor to page 1\n- [ ] Prefetch triggers at 80% scroll position via TaskSupervisor\n- [ ] Filter bar accepts DSL tokens and triggers re-query via ScreenIntent::RequeryNeeded\n- [ ] j/k scrolls within current page, J/K loads next/prev page\n- [ ] Enter navigates to IssueDetail(EntityKey), Esc returns to list with cursor preserved\n- [ ] Tab cycles sort column, sort indicator shown\n- [ ] Total count displayed in status area\n- [ ] Space toggles Quick Peek right-side preview pane\n- [ ] Quick Peek loads issue detail asynchronously\n- [ ] j/k in peek mode updates preview for newly selected row\n- [ ] Narrow terminal (<100 cols): peek replaces list instead of split view\n\n## Files\n- MODIFY: crates/lore-tui/src/state/issue_list.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_issues, fetch_issue_peek)\n- CREATE: crates/lore-tui/src/view/issue_list.rs\n\n## TDD Anchor\nRED: Write test_keyset_pagination in action.rs that inserts 30 issues, fetches page 1 (size 10), then fetches page 2 using returned cursor, asserts no overlap between pages.\nGREEN: Implement keyset pagination query.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_keyset_pagination\n\nAdditional tests:\n- test_snapshot_fence_excludes_newer_rows: insert row with updated_at > fence, assert not in results\n- test_filter_change_resets_cursor: change filter, verify cursor reset to None\n- test_prefetch_triggered_at_80pct: scroll to 80% of window, verify prefetch_in_flight set\n\n## Edge Cases\n- Multi-project datasets: cursor must include project_id scope from global ScopeContext\n- Issues with identical updated_at: keyset tiebreaker on iid ensures deterministic ordering\n- Empty result set: show \"No issues match your filter\" message, not empty table\n- Filter changes must reset cursor to first page (not continue from mid-pagination)\n- Quick Peek on empty list: no-op (don't show empty pane)\n- Rapid j/k with peek open: debounce peek loads to avoid flooding TaskSupervisor\n\n## Dependency Context\nUses EntityTable and FilterBar from \"Implement entity table + filter bar widgets\" (bd-18qs).\nUses AppState, IssueListState, ScreenIntent from \"Implement AppState composition\" (bd-1v9m).\nUses TaskSupervisor for load management and prefetch from \"Implement TaskSupervisor\" (bd-3le2).\nUses DbManager from \"Implement DbManager\" (bd-2kop).\nRequires idx_issues_list_default index from \"Add required TUI indexes\" (bd-3pm2).","status":"in_progress","priority":2,"issue_type":"task","created_at":"2026-02-12T16:58:31.401233Z","created_by":"tayloreernisse","updated_at":"2026-02-18T19:31:08.594688Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3ei1","depends_on_id":"bd-18qs","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-35g5","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-3pm2","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} +{"id":"bd-3ei1","title":"Implement Issue List (state + action + view)","description":"## Background\nThe Issue List is the primary browse interface for issues. It uses keyset pagination (not OFFSET) for deterministic cross-page traversal under concurrent sync writes. A browse snapshot fence preserves stable ordering until explicit refresh.\n\n## Approach\nState (state/issue_list.rs):\n- IssueListState: window (Vec), total_count, selected_index, scroll_offset, next_cursor (Option), prev_cursor (Option), prefetch_in_flight (bool), filter (IssueFilter), filter_input (TextInput), filter_focused (bool), sort_field (SortField), sort_order (SortOrder), snapshot_upper_updated_at (Option), filter_hash (u64), peek_visible (bool), peek_content (Option)\n- IssueCursor: updated_at (i64), iid (i64) — boundary values for keyset pagination\n- IssueFilter: state (Option), author (Option), assignee (Option), label (Option), milestone (Option), status (Option), free_text (Option), project_id (Option)\n- IssueListRow: project_path, iid, title, state, author, assignee, labels, updated_at, status_name, status_icon\n- handle_key(): j/k scroll, J/K page, Enter select, / focus filter, Tab sort, g+g top, G bottom, r refresh, Space toggle Quick Peek\n- scroll_to_top(), apply_filter(), set_sort(), toggle_peek()\n\n**Snapshot fence:** On first load and on explicit refresh (r), store snapshot_upper_updated_at = MAX(updated_at) from result set. Subsequent page fetches add WHERE updated_at <= snapshot_upper_updated_at to prevent rows from shifting as sync inserts new data. Explicit refresh (r) resets the fence.\n\n**filter_hash:** Compute a hash of the current filter state. When filter changes (new hash != old hash), reset cursor to page 1 and clear snapshot fence. This prevents stale pagination after filter changes.\n\n**Prefetch:** When scroll position reaches 80% of current window, trigger background prefetch of next page via TaskSupervisor. Prefetched data appended to window when user scrolls past current page boundary.\n\n**Quick Peek (Space key):**\n- Space toggles a right-side preview pane (40% width) showing the currently selected issue's detail\n- Preview content loads asynchronously via TaskSupervisor\n- Cursor movement (j/k) updates the preview for the newly selected row\n- Esc or Space again closes the peek pane\n- On narrow terminals (<100 cols), peek replaces the list instead of side-by-side\n\nAction (action.rs):\n- fetch_issues(conn, filter, cursor, page_size, clock, snapshot_fence) -> Result: keyset pagination query with WHERE (updated_at, iid) < (cursor.updated_at, cursor.iid) AND updated_at <= snapshot_fence ORDER BY updated_at DESC, iid DESC LIMIT page_size+1 (extra row detects has_next). Uses idx_issues_list_default index.\n- fetch_issue_peek(conn, entity_key) -> Result: loads issue detail for Quick Peek preview\n- IssueListPage: rows, next_cursor, prev_cursor, total_count\n\nView (view/issue_list.rs):\n- render_issue_list(frame, state, area, theme): FilterBar at top, EntityTable below, status bar at bottom\n- When peek_visible: split area horizontally — list (60%) | peek preview (40%)\n- Columns: IID, Title (flex), State, Author, Labels, Updated, Status\n\n## Acceptance Criteria\n- [ ] Keyset pagination fetches pages without OFFSET\n- [ ] Next/prev page navigation preserves deterministic ordering\n- [ ] Browse snapshot fence (snapshot_upper_updated_at) prevents rows from shifting during concurrent sync\n- [ ] Explicit refresh (r) resets snapshot fence and re-queries from first page\n- [ ] filter_hash tracks filter state; filter change resets cursor to page 1\n- [ ] Prefetch triggers at 80% scroll position via TaskSupervisor\n- [ ] Filter bar accepts DSL tokens and triggers re-query via ScreenIntent::RequeryNeeded\n- [ ] j/k scrolls within current page, J/K loads next/prev page\n- [ ] Enter navigates to IssueDetail(EntityKey), Esc returns to list with cursor preserved\n- [ ] Tab cycles sort column, sort indicator shown\n- [ ] Total count displayed in status area\n- [ ] Space toggles Quick Peek right-side preview pane\n- [ ] Quick Peek loads issue detail asynchronously\n- [ ] j/k in peek mode updates preview for newly selected row\n- [ ] Narrow terminal (<100 cols): peek replaces list instead of split view\n\n## Files\n- MODIFY: crates/lore-tui/src/state/issue_list.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_issues, fetch_issue_peek)\n- CREATE: crates/lore-tui/src/view/issue_list.rs\n\n## TDD Anchor\nRED: Write test_keyset_pagination in action.rs that inserts 30 issues, fetches page 1 (size 10), then fetches page 2 using returned cursor, asserts no overlap between pages.\nGREEN: Implement keyset pagination query.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_keyset_pagination\n\nAdditional tests:\n- test_snapshot_fence_excludes_newer_rows: insert row with updated_at > fence, assert not in results\n- test_filter_change_resets_cursor: change filter, verify cursor reset to None\n- test_prefetch_triggered_at_80pct: scroll to 80% of window, verify prefetch_in_flight set\n\n## Edge Cases\n- Multi-project datasets: cursor must include project_id scope from global ScopeContext\n- Issues with identical updated_at: keyset tiebreaker on iid ensures deterministic ordering\n- Empty result set: show \"No issues match your filter\" message, not empty table\n- Filter changes must reset cursor to first page (not continue from mid-pagination)\n- Quick Peek on empty list: no-op (don't show empty pane)\n- Rapid j/k with peek open: debounce peek loads to avoid flooding TaskSupervisor\n\n## Dependency Context\nUses EntityTable and FilterBar from \"Implement entity table + filter bar widgets\" (bd-18qs).\nUses AppState, IssueListState, ScreenIntent from \"Implement AppState composition\" (bd-1v9m).\nUses TaskSupervisor for load management and prefetch from \"Implement TaskSupervisor\" (bd-3le2).\nUses DbManager from \"Implement DbManager\" (bd-2kop).\nRequires idx_issues_list_default index from \"Add required TUI indexes\" (bd-3pm2).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:58:31.401233Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:36:57.589379Z","closed_at":"2026-02-18T20:36:57.589243Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3ei1","depends_on_id":"bd-18qs","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-35g5","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3ei1","depends_on_id":"bd-3pm2","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-3eis","title":"Implement property tests for navigation invariants","description":"## Background\nProperty-based tests verify navigation invariants hold for all possible sequences of push/pop/forward/jump/reset operations. Uses proptest or quickcheck for automated input generation.\n\n## Approach\n- Property: stack depth always >= 1 (Dashboard is always reachable)\n- Property: after push(X), current() == X\n- Property: after push(X) then pop(), current() returns to previous\n- Property: forward_stack cleared after any push (browser semantics)\n- Property: jump_list only contains detail/entity screens\n- Property: reset_to(X) clears all history, current() == X\n- Property: breadcrumbs length == back_stack.len() + 1\n- Arbitrary sequence of operations should never panic\n\n## Acceptance Criteria\n- [ ] All 7 navigation properties hold for 10000 generated test cases\n- [ ] No panic for any sequence of operations\n- [ ] Proptest shrinking finds minimal counterexamples on failure\n\n## Files\n- CREATE: crates/lore-tui/tests/nav_property_tests.rs\n\n## TDD Anchor\nRED: Write proptest that generates random sequences of push/pop/forward/reset, asserts stack depth >= 1 after every operation.\nGREEN: Ensure NavigationStack maintains invariant (pop returns None at root).\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml nav_property\n\n## Dependency Context\nUses NavigationStack from \"Implement NavigationStack\" task.\nUses Screen enum from \"Implement core types\" task.","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-12T17:04:53.366767Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:38.381515Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3eis","depends_on_id":"bd-1b6k","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3eis","depends_on_id":"bd-3fjk","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3eis","depends_on_id":"bd-nu0d","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-3er","title":"OBSERV Epic: Phase 3 - Performance Metrics Collection","description":"StageTiming struct, custom MetricsLayer tracing subscriber layer, span-to-metrics extraction, robot JSON enrichment with meta.stages, human-readable timing summary.\n\nDepends on: Phase 2 (spans must exist to extract timing from)\nUnblocks: Phase 4 (sync history needs Vec to store)\n\nFiles: src/core/metrics.rs (new), src/cli/commands/sync.rs, src/cli/commands/ingest.rs, src/main.rs\n\nAcceptance criteria (PRD Section 6.3):\n- lore --robot sync includes meta.run_id and meta.stages array\n- Each stage has name, elapsed_ms, items_processed\n- Top-level stages have sub_stages arrays\n- Interactive sync prints timing summary table\n- Zero-value fields omitted from JSON","status":"closed","priority":2,"issue_type":"epic","created_at":"2026-02-04T15:53:27.415566Z","created_by":"tayloreernisse","updated_at":"2026-02-04T17:32:56.743477Z","closed_at":"2026-02-04T17:32:56.743430Z","close_reason":"All Phase 3 tasks complete: StageTiming struct, MetricsLayer, span field recording, robot JSON enrichment with stages, and human-readable timing summary","compaction_level":0,"original_size":0,"labels":["observability"],"dependencies":[{"issue_id":"bd-3er","depends_on_id":"bd-2ni","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} {"id":"bd-3eu","title":"Implement hybrid search with adaptive recall","description":"## Background\nHybrid search is the top-level search orchestrator that combines FTS5 lexical results with sqlite-vec semantic results via RRF ranking. It supports three modes (Lexical, Semantic, Hybrid) and implements adaptive recall (wider initial fetch when filters are applied) and graceful degradation (falls back to FTS when Ollama is unavailable). All modes use RRF for consistent --explain output.\n\n## Approach\nCreate `src/search/hybrid.rs` per PRD Section 5.3.\n\n**Key types:**\n```rust\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\npub enum SearchMode {\n Hybrid, // Vector + FTS with RRF\n Lexical, // FTS only\n Semantic, // Vector only\n}\n\nimpl SearchMode {\n pub fn from_str(s: &str) -> Option {\n match s.to_lowercase().as_str() {\n \"hybrid\" => Some(Self::Hybrid),\n \"lexical\" | \"fts\" => Some(Self::Lexical),\n \"semantic\" | \"vector\" => Some(Self::Semantic),\n _ => None,\n }\n }\n\n pub fn as_str(&self) -> &'static str {\n match self {\n Self::Hybrid => \"hybrid\",\n Self::Lexical => \"lexical\",\n Self::Semantic => \"semantic\",\n }\n }\n}\n\npub struct HybridResult {\n pub document_id: i64,\n pub score: f64, // Normalized RRF score (0-1)\n pub vector_rank: Option,\n pub fts_rank: Option,\n pub rrf_score: f64, // Raw RRF score\n}\n```\n\n**Core function (ASYNC, PRD-exact signature):**\n```rust\npub async fn search_hybrid(\n conn: &Connection,\n client: Option<&OllamaClient>, // None if Ollama unavailable\n ollama_base_url: Option<&str>, // For actionable error messages\n query: &str,\n mode: SearchMode,\n filters: &SearchFilters,\n fts_mode: FtsQueryMode,\n) -> Result<(Vec, Vec)>\n```\n\n**IMPORTANT — client is `Option<&OllamaClient>`:** This enables graceful degradation. When Ollama is unavailable, the caller passes `None` and hybrid mode falls back to FTS-only with a warning. The `ollama_base_url` is separate so error messages can include it even when client is None.\n\n**Adaptive recall constants (PRD Section 5.3):**\n```rust\nconst BASE_RECALL_MIN: usize = 50;\nconst FILTERED_RECALL_MIN: usize = 200;\nconst RECALL_CAP: usize = 1500;\n```\n\n**Recall formula:**\n```rust\nlet requested = filters.clamp_limit();\nlet top_k = if filters.has_any_filter() {\n (requested * 50).max(FILTERED_RECALL_MIN).min(RECALL_CAP)\n} else {\n (requested * 10).max(BASE_RECALL_MIN).min(RECALL_CAP)\n};\n```\n\n**Mode behavior:**\n- **Lexical:** FTS only -> rank_rrf with empty vector list (single-list RRF)\n- **Semantic:** Vector only -> requires client (error if None) -> rank_rrf with empty FTS list\n- **Hybrid:** Both FTS + vector -> rank_rrf with both lists\n- **Hybrid with client=None:** Graceful degradation to Lexical with warning, NOT error\n\n**Graceful degradation logic:**\n```rust\nSearchMode::Hybrid => {\n let fts_results = search_fts(conn, query, top_k, fts_mode)?;\n let fts_tuples: Vec<_> = fts_results.iter().map(|r| (r.document_id, r.rank)).collect();\n\n match client {\n Some(client) => {\n let query_embedding = client.embed_batch(vec\\![query.to_string()]).await?;\n let embedding = query_embedding.into_iter().next().unwrap();\n let vec_results = search_vector(conn, &embedding, top_k)?;\n let vec_tuples: Vec<_> = vec_results.iter().map(|r| (r.document_id, r.distance)).collect();\n let ranked = rank_rrf(&vec_tuples, &fts_tuples);\n // ... map to HybridResult\n Ok((results, warnings))\n }\n None => {\n warnings.push(\"Ollama unavailable, falling back to lexical search\".into());\n let ranked = rank_rrf(&[], &fts_tuples);\n // ... map to HybridResult\n Ok((results, warnings))\n }\n }\n}\n```\n\n## Acceptance Criteria\n- [ ] Function is `async` (per PRD — Ollama client methods are async)\n- [ ] Signature takes `client: Option<&OllamaClient>` (not required)\n- [ ] Signature takes `ollama_base_url: Option<&str>` for actionable error messages\n- [ ] Returns `(Vec, Vec)` — results + warnings\n- [ ] Lexical mode: FTS-only results ranked via RRF (single list)\n- [ ] Semantic mode: vector-only results ranked via RRF; error if client is None\n- [ ] Hybrid mode: both FTS + vector results merged via RRF\n- [ ] Graceful degradation: client=None in Hybrid falls back to FTS with warning (not error)\n- [ ] Adaptive recall: unfiltered max(50, limit*10), filtered max(200, limit*50), capped 1500\n- [ ] All modes produce consistent --explain output (vector_rank, fts_rank, rrf_score)\n- [ ] SearchMode::from_str accepts aliases: \"fts\" for Lexical, \"vector\" for Semantic\n- [ ] `cargo build` succeeds\n\n## Files\n- `src/search/hybrid.rs` — new file\n- `src/search/mod.rs` — add `pub use hybrid::{search_hybrid, HybridResult, SearchMode};`\n\n## TDD Loop\nRED: Tests (some integration, some unit):\n- `test_lexical_mode` — FTS results only\n- `test_semantic_mode` — vector results only\n- `test_hybrid_mode` — both lists merged\n- `test_graceful_degradation` — None client falls back to FTS with warning in warnings vec\n- `test_adaptive_recall_unfiltered` — recall = max(50, limit*10)\n- `test_adaptive_recall_filtered` — recall = max(200, limit*50)\n- `test_recall_cap` — never exceeds 1500\n- `test_search_mode_from_str` — \"hybrid\", \"lexical\", \"fts\", \"semantic\", \"vector\", invalid\nGREEN: Implement search_hybrid\nVERIFY: `cargo test hybrid`\n\n## Edge Cases\n- Both FTS and vector return zero results: empty output (not error)\n- FTS returns results but vector returns empty: RRF still works (single-list)\n- Very high limit (100) with filters: recall = min(5000, 1500) = 1500\n- Semantic mode with client=None: error (OllamaUnavailable), not degradation\n- Semantic mode with 0% coverage: return LoreError::EmbeddingsNotBuilt","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-30T15:26:50.343002Z","created_by":"tayloreernisse","updated_at":"2026-01-30T17:56:16.631748Z","closed_at":"2026-01-30T17:56:16.631682Z","close_reason":"Implemented hybrid search with 3 modes (lexical/semantic/hybrid), graceful degradation when Ollama unavailable, adaptive recall (50-1500), RRF fusion. 6 tests pass.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-3eu","depends_on_id":"bd-1k1","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3eu","depends_on_id":"bd-335","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3eu","depends_on_id":"bd-3ez","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"},{"issue_id":"bd-3eu","depends_on_id":"bd-bjo","type":"blocks","created_at":"2026-02-12T19:34:39Z","created_by":"import"}]} @@ -250,7 +250,7 @@ {"id":"bd-3rl","title":"Epic: Gate C - Sync MVP","description":"## Background\nGate C adds the sync orchestrator and queue infrastructure that makes the search pipeline incremental and self-maintaining. It introduces dirty source tracking (change detection during ingestion), the discussion fetch queue, and the unified lore sync command that orchestrates the full pipeline. Gate C also adds integrity checks and repair paths.\n\n## Gate C Deliverables\n1. Orchestrated lore sync command with incremental doc regen + re-embedding\n2. Integrity checks + repair paths for FTS/embeddings consistency\n\n## Bead Dependencies (execution order, after Gate A)\n1. **bd-mem** — Shared backoff utility (no deps, shared with Gate B)\n2. **bd-38q** — Dirty source tracking (blocked by bd-36p, bd-hrs, bd-mem)\n3. **bd-1je** — Discussion queue (blocked by bd-hrs, bd-mem)\n4. **bd-1i2** — Integrate dirty tracking into ingestion (blocked by bd-38q)\n5. **bd-1x6** — Sync CLI (blocked by bd-38q, bd-1je, bd-1i2, bd-3qs, bd-2sx)\n\n## Acceptance Criteria\n- [ ] `lore sync` runs full pipeline: ingest -> generate-docs -> embed\n- [ ] `lore sync --full` does full re-sync + regeneration\n- [ ] `lore sync --no-embed` skips embedding stage\n- [ ] Dirty tracking: upserted entities automatically marked for regeneration\n- [ ] Queue draining: dirty_sources fully drained in bounded batch loop\n- [ ] Backoff: failed items use exponential backoff with jitter\n- [ ] `lore stats --check` detects inconsistencies\n- [ ] `lore stats --repair` fixes FTS/embedding inconsistencies","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-30T15:25:13.494698Z","created_by":"tayloreernisse","updated_at":"2026-01-30T18:05:52.121666Z","closed_at":"2026-01-30T18:05:52.121619Z","close_reason":"All Gate C sub-beads complete: backoff utility, dirty tracking, discussion queue, ingestion integration, sync CLI, stats CLI","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-3rl","depends_on_id":"bd-1x6","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3rl","depends_on_id":"bd-pr1","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-3sez","title":"Create surgical.rs core module with preflight fetch, ingest functions, and TOCTOU guards","description":"## Background\n\nThe surgical sync pipeline needs a core module (`src/ingestion/surgical.rs`) that fetches a single issue or MR by IID from GitLab and ingests it into the local SQLite database. This replaces the bulk pagination path (`ingest_issues`/`ingest_merge_requests`) for targeted, on-demand sync of specific entities.\n\nKey constraints:\n- `process_single_issue` (issues.rs:143) and `process_single_mr` (merge_requests.rs:144) are private functions. This bead wraps them with pub(crate) entry points that add TOCTOU guard logic and dirty marking.\n- `updated_at` is a `String` (ISO 8601) in `GitLabIssue`/`GitLabMergeRequest` but stored as `INTEGER` (ms-epoch) in the DB. The TOCTOU guard must parse the ISO string to ms-epoch for comparison.\n- `ProcessMrResult` (merge_requests.rs:138) is a private struct. The MR ingest wrapper returns its own result type or re-exports the needed fields.\n- `SyncRunRecorder` has `succeed()` and `fail()` that consume `self`. Not needed here since surgical.rs is called from the orchestrator which owns the recorder.\n\n## Approach\n\nCreate `src/ingestion/surgical.rs` with:\n\n1. **`preflight_fetch`** (async): Takes `&GitLabClient`, `gitlab_project_id`, and a list of `(entity_type, iid)` targets. Calls `client.get_issue_by_iid()` and `client.get_mr_by_iid()` (from bd-159p). Returns `PreflightResult { issues: Vec, merge_requests: Vec, failures: Vec }`.\n\n2. **`ingest_issue_by_iid`** (sync): Takes `&Connection`, `&Config`, `project_id`, `&GitLabIssue`. Applies TOCTOU guard (compare payload `updated_at` parsed to ms-epoch vs DB `updated_at`), then calls `process_single_issue` (requires making it `pub(crate)` in bd-1sc6), marks dirty via `dirty_tracker::mark_dirty(conn, SourceType::Issue, local_issue_id)`, and returns `IngestIssueResult { upserted: bool, labels_created: usize, skipped_stale: bool, dirty_source_keys: Vec<(SourceType, i64)> }`.\n\n3. **`ingest_mr_by_iid`** (sync): Same pattern for MRs. Calls `process_single_mr` (requires `pub(crate)` in bd-1sc6), returns `IngestMrResult { upserted: bool, labels_created: usize, assignees_linked: usize, reviewers_linked: usize, skipped_stale: bool, dirty_source_keys: Vec<(SourceType, i64)> }`.\n\n4. **TOCTOU guard**: `fn is_stale(payload_updated_at: &str, db_updated_at_ms: Option) -> Result`. Parses ISO 8601 string to ms-epoch using `chrono::DateTime::parse_from_rfc3339`. Returns `true` if `payload_ms <= db_ms` (payload is same age or older than what we already have).\n\nWire the module in `src/ingestion/mod.rs`.\n\n## Acceptance Criteria\n\n- [ ] `preflight_fetch` calls GitLabClient by-IID methods and collects successes + failures\n- [ ] `ingest_issue_by_iid` wraps `process_single_issue` with TOCTOU guard and dirty marking\n- [ ] `ingest_mr_by_iid` wraps `process_single_mr` with TOCTOU guard and dirty marking\n- [ ] TOCTOU guard correctly parses ISO 8601 String to ms-epoch for comparison with DB i64\n- [ ] Stale payloads (payload updated_at <= DB updated_at) are skipped, not ingested\n- [ ] `dirty_source_keys` returned include the `(SourceType, source_id)` tuples for downstream scoped doc regen\n- [ ] Module registered in `src/ingestion/mod.rs`\n- [ ] All tests from bd-x8oq pass\n\n## Files\n\n- `src/ingestion/surgical.rs` (NEW)\n- `src/ingestion/mod.rs` (add `pub(crate) mod surgical;`)\n- `src/ingestion/issues.rs` (change `process_single_issue` to `pub(crate)` — done in bd-1sc6)\n- `src/ingestion/merge_requests.rs` (change `process_single_mr` and `ProcessMrResult` to `pub(crate)` — done in bd-1sc6)\n\n## TDD Anchor\n\nTests live in bd-x8oq (`src/ingestion/surgical_tests.rs`), referenced via `#[cfg(test)] #[path = \"surgical_tests.rs\"] mod tests;` in surgical.rs. Key tests that validate this bead:\n\n- `test_ingest_issue_by_iid_upserts_and_marks_dirty` — verifies full issue ingest path + dirty marking\n- `test_ingest_mr_by_iid_upserts_and_marks_dirty` — verifies full MR ingest path + dirty marking\n- `test_toctou_skips_stale_issue` — inserts issue with updated_at=T1, calls ingest with payload updated_at=T1, asserts skipped_stale=true\n- `test_toctou_skips_stale_mr` — same for MRs\n- `test_toctou_allows_newer_issue` — payload T2 > DB T1, asserts upserted=true\n- `test_is_stale_parses_iso8601` — unit test for the ISO 8601 to ms-epoch parsing\n- `test_is_stale_handles_none_db_value` — first ingest (no existing row), should return false (not stale)\n- `test_preflight_fetch_returns_issues_and_mrs` — wiremock test for successful preflight\n- `test_preflight_fetch_collects_failures` — wiremock 404 returns failure, not error\n\n## Edge Cases\n\n- ISO 8601 with timezone offset (GitLab returns `+00:00` not `Z`) must parse correctly\n- First-ever ingest of an IID: no existing DB row, TOCTOU guard must treat as \"not stale\" (db_updated_at is None)\n- GitLab returns 404 for a deleted issue/MR during preflight: failure, not hard error\n- Concurrent surgical syncs for same IID: `process_single_issue` uses `unchecked_transaction()` with UPSERT, so last-writer-wins is safe\n- `process_single_mr` returns `ProcessMrResult` which is private: either make it `pub(crate)` in bd-1sc6 or replicate needed fields\n\n## Dependency Context\n\n- **Blocked by bd-159p**: `get_issue_by_iid` and `get_mr_by_iid` on GitLabClient (preflight needs these)\n- **Blocked by bd-1sc6**: Visibility changes to `process_single_issue`, `process_single_mr`, `ProcessMrResult` (must be `pub(crate)`)\n- **Blocks bd-1i4i**: Orchestration function calls `preflight_fetch` + `ingest_issue_by_iid` / `ingest_mr_by_iid`\n- **Blocks bd-kanh**: Dependent helpers are called after ingest to fetch discussions, resource events, etc.\n- **Blocks bd-wcja**: SyncResult surgical fields depend on return types from this module\n- **Co-depends with bd-x8oq**: Tests for this code live in that bead's test file","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-17T19:14:19.449695Z","created_by":"tayloreernisse","updated_at":"2026-02-17T20:02:01.692160Z","compaction_level":0,"original_size":0,"labels":["surgical-sync"],"dependencies":[{"issue_id":"bd-3sez","depends_on_id":"bd-1i4i","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"},{"issue_id":"bd-3sez","depends_on_id":"bd-3jqx","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"},{"issue_id":"bd-3sez","depends_on_id":"bd-kanh","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"},{"issue_id":"bd-3sez","depends_on_id":"bd-wcja","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"},{"issue_id":"bd-3sez","depends_on_id":"bd-x8oq","type":"blocks","created_at":"2026-02-18T17:42:00Z","created_by":"import"}]} {"id":"bd-3sh","title":"Add 'lore count events' command with robot mode","description":"## Background\nNeed to verify event ingestion and report counts by type. The existing count command (src/cli/commands/count.rs) handles issues, mrs, discussions, notes with both human and robot output. This adds 'events' as a new count subcommand.\n\n## Approach\nExtend the existing count command in src/cli/commands/count.rs:\n\n1. Add CountTarget::Events variant (or string match) in the count dispatcher\n2. Query each event table with GROUP BY entity type:\n```sql\nSELECT \n CASE WHEN issue_id IS NOT NULL THEN 'issue' ELSE 'merge_request' END as entity_type,\n COUNT(*) as count\nFROM resource_state_events\nGROUP BY entity_type;\n-- (repeat for label and milestone events)\n```\n\n3. Human output: table format\n```\nEvent Type Issues MRs Total\nState events 1,234 567 1,801\nLabel events 2,345 890 3,235\nMilestone events 456 123 579\nTotal 4,035 1,580 5,615\n```\n\n4. Robot JSON:\n```json\n{\n \"ok\": true,\n \"data\": {\n \"state_events\": {\"issue\": 1234, \"merge_request\": 567, \"total\": 1801},\n \"label_events\": {\"issue\": 2345, \"merge_request\": 890, \"total\": 3235},\n \"milestone_events\": {\"issue\": 456, \"merge_request\": 123, \"total\": 579},\n \"total\": 5615\n }\n}\n```\n\n5. Register in CLI: add \"events\" to count's entity_type argument in src/cli/mod.rs\n\n## Acceptance Criteria\n- [ ] `lore count events` shows correct counts by event type and entity type\n- [ ] Robot JSON matches the schema above\n- [ ] Works with empty tables (all zeros)\n- [ ] Does not error if migration 011 hasn't been applied (graceful degradation or \"no event tables\" message)\n\n## Files\n- src/cli/commands/count.rs (add events counting logic)\n- src/cli/mod.rs (add \"events\" to count's accepted entity types)\n\n## TDD Loop\nRED: tests/count_tests.rs (or extend existing):\n- `test_count_events_empty_tables` - verify all zeros on fresh DB\n- `test_count_events_with_data` - seed state + label events, verify correct counts\n- `test_count_events_robot_json` - verify JSON structure\n\nGREEN: Add the events branch to count command\n\nVERIFY: `cargo test count -- --nocapture`\n\n## Edge Cases\n- Tables don't exist if user hasn't run migrate — check table existence first or catch the error\n- COUNT with GROUP BY returns no rows for empty tables — need to handle missing entity types as 0","status":"closed","priority":3,"issue_type":"task","created_at":"2026-02-02T21:31:57.379702Z","created_by":"tayloreernisse","updated_at":"2026-02-03T16:21:21.408874Z","closed_at":"2026-02-03T16:21:21.408806Z","close_reason":"Added 'events' to count CLI parser, run_count_events function, print_event_count (table format) and print_event_count_json (structured JSON). Wired into handle_count in main.rs.","compaction_level":0,"original_size":0,"labels":["cli","gate-1","phase-b"],"dependencies":[{"issue_id":"bd-3sh","depends_on_id":"bd-2zl","type":"parent-child","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3sh","depends_on_id":"bd-hu3","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} -{"id":"bd-3t1b","title":"Implement MR Detail (state + action + view)","description":"## Background\nThe MR Detail shows a single merge request with file changes, diff discussions (position-specific comments), and general discussions. Same progressive hydration pattern as Issue Detail. MR detail has additional sections: file change list and diff-context notes.\n\n## Approach\nState (state/mr_detail.rs):\n- MrDetailState: current_key (Option), metadata (Option), discussions (Vec), diff_discussions (Vec), file_changes (Vec), cross_refs (Vec), tree_state (TreePersistState), scroll_offset, active_tab (MrTab: Overview|Files|Discussions)\n- MrMetadata: iid, title, description, state, author, reviewer, assignee, labels, target_branch, source_branch, created_at, updated_at, web_url, draft, merge_status\n- FileChange: old_path, new_path, change_type (added/modified/deleted/renamed), diff_line_count\n- DiffDiscussion: file_path, old_line, new_line, notes (Vec)\n\nAction (action.rs):\n- fetch_mr_detail(conn, key, clock) -> Result: uses with_read_snapshot\n\nView (view/mr_detail.rs):\n- render_mr_detail(frame, state, area, theme): header, tab bar (Overview|Files|Discussions), tab content\n- Overview tab: description + cross-refs\n- Files tab: file change list with change type indicators (+/-/~)\n- Discussions tab: general discussions + diff discussions grouped by file\n\n## Acceptance Criteria\n- [ ] MR metadata loads in Phase 1\n- [ ] Tab navigation between Overview, Files, Discussions\n- [ ] File changes list shows change type and line count\n- [ ] Diff discussions grouped by file path\n- [ ] General discussions rendered in tree widget\n- [ ] Cross-references navigable (related issues, etc.)\n- [ ] All text sanitized via sanitize_for_terminal()\n- [ ] Esc returns to MR List with state preserved\n\n## Files\n- MODIFY: crates/lore-tui/src/state/mr_detail.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_mr_detail)\n- CREATE: crates/lore-tui/src/view/mr_detail.rs\n\n## TDD Anchor\nRED: Write test_fetch_mr_detail in action.rs that inserts an MR with 3 file changes, calls fetch_mr_detail, asserts 3 files returned.\nGREEN: Implement fetch_mr_detail with file change query.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_mr_detail\n\n## Edge Cases\n- MR with no file changes (draft MR created without pushes): show \"No file changes\" message\n- Diff discussions referencing deleted files: show file path with strikethrough style\n- Very large MRs (hundreds of files): paginate file list, don't load all at once\n\n## Dependency Context\nUses discussion tree and cross-ref widgets from \"Implement discussion tree + cross-reference widgets\" task.\nUses same patterns as \"Implement Issue Detail\" task.\nUses MrDetailState from \"Implement AppState composition\" task.","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:38.427124Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:28.423643Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3t1b","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3t1b","depends_on_id":"bd-1d6z","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3t1b","depends_on_id":"bd-2kr0","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} +{"id":"bd-3t1b","title":"Implement MR Detail (state + action + view)","description":"## Background\nThe MR Detail shows a single merge request with file changes, diff discussions (position-specific comments), and general discussions. Same progressive hydration pattern as Issue Detail. MR detail has additional sections: file change list and diff-context notes.\n\n## Approach\nState (state/mr_detail.rs):\n- MrDetailState: current_key (Option), metadata (Option), discussions (Vec), diff_discussions (Vec), file_changes (Vec), cross_refs (Vec), tree_state (TreePersistState), scroll_offset, active_tab (MrTab: Overview|Files|Discussions)\n- MrMetadata: iid, title, description, state, author, reviewer, assignee, labels, target_branch, source_branch, created_at, updated_at, web_url, draft, merge_status\n- FileChange: old_path, new_path, change_type (added/modified/deleted/renamed), diff_line_count\n- DiffDiscussion: file_path, old_line, new_line, notes (Vec)\n\nAction (action.rs):\n- fetch_mr_detail(conn, key, clock) -> Result: uses with_read_snapshot\n\nView (view/mr_detail.rs):\n- render_mr_detail(frame, state, area, theme): header, tab bar (Overview|Files|Discussions), tab content\n- Overview tab: description + cross-refs\n- Files tab: file change list with change type indicators (+/-/~)\n- Discussions tab: general discussions + diff discussions grouped by file\n\n## Acceptance Criteria\n- [ ] MR metadata loads in Phase 1\n- [ ] Tab navigation between Overview, Files, Discussions\n- [ ] File changes list shows change type and line count\n- [ ] Diff discussions grouped by file path\n- [ ] General discussions rendered in tree widget\n- [ ] Cross-references navigable (related issues, etc.)\n- [ ] All text sanitized via sanitize_for_terminal()\n- [ ] Esc returns to MR List with state preserved\n\n## Files\n- MODIFY: crates/lore-tui/src/state/mr_detail.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_mr_detail)\n- CREATE: crates/lore-tui/src/view/mr_detail.rs\n\n## TDD Anchor\nRED: Write test_fetch_mr_detail in action.rs that inserts an MR with 3 file changes, calls fetch_mr_detail, asserts 3 files returned.\nGREEN: Implement fetch_mr_detail with file change query.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_mr_detail\n\n## Edge Cases\n- MR with no file changes (draft MR created without pushes): show \"No file changes\" message\n- Diff discussions referencing deleted files: show file path with strikethrough style\n- Very large MRs (hundreds of files): paginate file list, don't load all at once\n\n## Dependency Context\nUses discussion tree and cross-ref widgets from \"Implement discussion tree + cross-reference widgets\" task.\nUses same patterns as \"Implement Issue Detail\" task.\nUses MrDetailState from \"Implement AppState composition\" task.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:38.427124Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:36:38.457188Z","closed_at":"2026-02-18T20:36:38.457090Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3t1b","depends_on_id":"bd-1d6z","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3t1b","depends_on_id":"bd-2kr0","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-3t6r","title":"Epic: TUI Phase 5 — Polish","description":"## Background\nPhase 5 adds polish features: responsive breakpoints for all screens, session state persistence (resume where you left off), single-instance locking, entity/render caches for performance, text width handling for Unicode, snapshot tests, and terminal compatibility test matrix.\n\n## Acceptance Criteria\n- [ ] All screens adapt to terminal width with responsive breakpoints\n- [ ] Session state persisted and restored on relaunch\n- [ ] Single-instance lock prevents concurrent TUI launches\n- [ ] Entity cache enables near-instant detail view reopens\n- [ ] Snapshot tests produce deterministic output with FakeClock\n- [ ] Terminal compat verified across iTerm2, tmux, Alacritty, kitty","status":"open","priority":1,"issue_type":"epic","created_at":"2026-02-12T17:02:47.178645Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:51.435708Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3t6r","depends_on_id":"bd-1df9","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-3ty8","title":"Implement Bootstrap screen + schema preflight","description":"## Background\nThe Bootstrap screen handles first-launch and incompatible-database scenarios. Before entering the TUI event loop, a schema preflight check validates the database is compatible. If not, an actionable error is shown. The Bootstrap screen also guides users through initial sync if the database is empty.\n\n## Approach\n- Schema preflight in lib.rs: check schema version before creating LoreApp. If incompatible, print error with lore migrate suggestion and exit non-zero.\n- Bootstrap screen (Screen::Bootstrap): shown when database has zero issues/MRs. Shows: \"No data found. Run sync to get started.\" with option to start sync inline.\n- State: BootstrapState { has_data: bool, schema_ok: bool, config_valid: bool }\n- Action: check_data_readiness(conn) -> DataReadiness { has_issues: bool, has_mrs: bool, has_documents: bool, schema_version: i32 }\n\n## Acceptance Criteria\n- [ ] Schema preflight yields actionable error for incompatible DB versions\n- [ ] Bootstrap screen shown when database is empty\n- [ ] Bootstrap guides user to start sync\n- [ ] After sync completes, Bootstrap auto-transitions to Dashboard\n- [ ] Non-zero exit code on schema incompatibility\n\n## Files\n- CREATE: crates/lore-tui/src/state/bootstrap.rs\n- CREATE: crates/lore-tui/src/view/bootstrap.rs\n- MODIFY: crates/lore-tui/src/lib.rs (add schema preflight check)\n- MODIFY: crates/lore-tui/src/action.rs (add check_data_readiness)\n\n## TDD Anchor\nRED: Write test_schema_preflight_rejects_old that creates DB at schema version 1, asserts preflight returns error.\nGREEN: Implement schema version check.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_schema_preflight\n\n## Edge Cases\n- Database file doesn't exist: create it, then show Bootstrap\n- Database locked by another process: show DbBusy error with suggestion\n- Config file missing: show error with lore init suggestion","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-12T17:00:02.185699Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:28.671769Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-3ty8","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3ty8","depends_on_id":"bd-6pmy","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-3vqk","title":"OBSERV: Add rate_limit_hits and retries counters to StageTiming","description":"## Background\nMetricsLayer counts span timing but doesn't yet count rate-limit hits and retries. These counters complete the observability picture, showing HOW MUCH time was spent waiting vs. working.\n\n## Approach\n### src/core/metrics.rs - StageTiming struct\n\nAdd two new fields:\n```rust\n#[derive(Debug, Clone, Serialize)]\npub struct StageTiming {\n // ... existing fields ...\n #[serde(skip_serializing_if = \"is_zero\")]\n pub rate_limit_hits: usize,\n #[serde(skip_serializing_if = \"is_zero\")]\n pub retries: usize,\n}\n```\n\n### src/core/metrics.rs - MetricsLayer\n\nThe structured log events from bd-12ae use info!() with specific fields (status_code=429, \"Rate limited, retrying\"). MetricsLayer needs to count these events within each span.\n\nAdd to SpanData:\n```rust\nstruct SpanData {\n // ... existing fields ...\n rate_limit_hits: usize,\n retries: usize,\n}\n```\n\nAdd on_event() to MetricsLayer:\n```rust\nfn on_event(&self, event: &tracing::Event<'_>, ctx: Context<'_, S>) {\n // Check if event message contains rate-limit or retry indicators\n // Increment counters on the current span\n if let Some(span_ref) = ctx.event_span(event) {\n let id = span_ref.id();\n if let Some(data) = self.spans.lock().unwrap().get_mut(&id.into_u64()) {\n let mut visitor = EventVisitor::default();\n event.record(&mut visitor);\n\n if visitor.status_code == Some(429) {\n data.rate_limit_hits += 1;\n }\n if visitor.is_retry {\n data.retries += 1;\n }\n }\n }\n}\n```\n\nThe EventVisitor checks for status_code=429 and message containing \"retrying\" to classify events.\n\nOn span close, propagate counts to parent (bubble up):\n```rust\nfn on_close(&self, id: Id, _ctx: Context<'_, S>) {\n if let Some(data) = self.spans.lock().unwrap().remove(&id.into_u64()) {\n let timing = StageTiming {\n // ... existing fields ...\n rate_limit_hits: data.rate_limit_hits,\n retries: data.retries,\n };\n // ... push to completed\n }\n}\n```\n\n## Acceptance Criteria\n- [ ] StageTiming has rate_limit_hits and retries fields\n- [ ] Fields omitted when zero in JSON serialization\n- [ ] MetricsLayer counts 429 events as rate_limit_hits\n- [ ] MetricsLayer counts retry events as retries\n- [ ] Counts bubble up to parent spans in extract_timings()\n- [ ] Rate limit counts appear in metrics_json stored in sync_runs\n- [ ] cargo clippy --all-targets -- -D warnings passes\n\n## Files\n- src/core/metrics.rs (add fields to StageTiming, add on_event to MetricsLayer, add EventVisitor)\n\n## TDD Loop\nRED:\n - test_stage_timing_rate_limit_counts: simulate 3 rate-limit events, extract, assert rate_limit_hits=3\n - test_stage_timing_retry_counts: simulate 2 retries, extract, assert retries=2\n - test_rate_limit_fields_omitted_when_zero: StageTiming with zero counts, serialize, assert no keys\nGREEN: Add fields to StageTiming, implement on_event in MetricsLayer\nVERIFY: cargo test && cargo clippy --all-targets -- -D warnings\n\n## Edge Cases\n- Events outside any span: ctx.event_span() returns None. Skip counting. This shouldn't happen in practice since all GitLab calls happen within stage spans.\n- Event classification: rely on structured fields (status_code=429) not message text. More reliable and less fragile.\n- Count bubbling: parent stage should aggregate child counts. In extract_timings(), sum children's counts into parent.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-04T15:55:02.523778Z","created_by":"tayloreernisse","updated_at":"2026-02-04T17:25:25.456758Z","closed_at":"2026-02-04T17:25:25.456708Z","close_reason":"Implemented rate_limit_hits and retries counters in StageTiming with skip_serializing_if for zero values","compaction_level":0,"original_size":0,"labels":["observability"],"dependencies":[{"issue_id":"bd-3vqk","depends_on_id":"bd-12ae","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3vqk","depends_on_id":"bd-1o4h","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-3vqk","depends_on_id":"bd-3pk","type":"parent-child","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} @@ -259,7 +259,7 @@ {"id":"bd-5ta","title":"Add GitLab MR types to types.rs","description":"## Background\nGitLab API types for merge requests. These structs define how we deserialize GitLab API responses. Must handle deprecated field aliases for backward compatibility with older GitLab instances.\n\n## Approach\nAdd new structs to `src/gitlab/types.rs`:\n- `GitLabMergeRequest` - Main MR struct with all fields\n- `GitLabReviewer` - Reviewer with optional approval state\n- `GitLabReferences` - Short and full reference strings\n\nUse serde `#[serde(alias = \"...\")]` for deprecated field fallbacks.\n\n## Files\n- `src/gitlab/types.rs` - Add new structs after existing GitLabIssue\n- `tests/fixtures/gitlab_merge_request.json` - Test fixture\n\n## Acceptance Criteria\n- [ ] `GitLabMergeRequest` struct exists with all fields from PRD\n- [ ] `detailed_merge_status` field exists (non-deprecated)\n- [ ] `#[serde(alias = \"merge_status\")]` on `merge_status_legacy` for fallback\n- [ ] `merge_user` field exists (non-deprecated)\n- [ ] `merged_by` field exists for fallback\n- [ ] `draft` and `work_in_progress` both exist (draft preferred, WIP fallback)\n- [ ] `sha` field maps to `head_sha` in transformer\n- [ ] `references: Option` for short/full refs\n- [ ] `state: String` supports \"opened\", \"merged\", \"closed\", \"locked\"\n- [ ] Fixture deserializes without error\n- [ ] `cargo test` passes\n\n## TDD Loop\nRED: Add test that deserializes fixture -> struct not found\nGREEN: Add GitLabMergeRequest, GitLabReviewer, GitLabReferences structs\nVERIFY: `cargo test gitlab_types`\n\n## Struct Definitions (from PRD)\n```rust\n#[derive(Debug, Clone, Deserialize)]\npub struct GitLabMergeRequest {\n pub id: i64,\n pub iid: i64,\n pub project_id: i64,\n pub title: String,\n pub description: Option,\n pub state: String, // \"opened\" | \"merged\" | \"closed\" | \"locked\"\n #[serde(default)]\n pub draft: bool,\n #[serde(default)]\n pub work_in_progress: bool, // Deprecated fallback\n pub source_branch: String,\n pub target_branch: String,\n pub sha: Option, // head_sha\n pub references: Option,\n pub detailed_merge_status: Option,\n #[serde(alias = \"merge_status\")]\n pub merge_status_legacy: Option,\n pub created_at: String,\n pub updated_at: String,\n pub merged_at: Option,\n pub closed_at: Option,\n pub author: GitLabAuthor,\n pub merge_user: Option,\n pub merged_by: Option,\n #[serde(default)]\n pub labels: Vec,\n #[serde(default)]\n pub assignees: Vec,\n #[serde(default)]\n pub reviewers: Vec,\n pub web_url: String,\n}\n\n#[derive(Debug, Clone, Deserialize)]\npub struct GitLabReferences {\n pub short: String, // e.g. \"\\!123\"\n pub full: String, // e.g. \"group/project\\!123\"\n}\n\n#[derive(Debug, Clone, Deserialize)]\npub struct GitLabReviewer {\n pub id: i64,\n pub username: String,\n pub name: String,\n}\n```\n\n## Test Fixture (create tests/fixtures/gitlab_merge_request.json)\n```json\n{\n \"id\": 12345,\n \"iid\": 42,\n \"project_id\": 100,\n \"title\": \"Add user authentication\",\n \"description\": \"Implements JWT auth flow\",\n \"state\": \"merged\",\n \"draft\": false,\n \"work_in_progress\": false,\n \"source_branch\": \"feature/auth\",\n \"target_branch\": \"main\",\n \"sha\": \"abc123def456\",\n \"references\": { \"short\": \"\\!42\", \"full\": \"group/project\\!42\" },\n \"detailed_merge_status\": \"mergeable\",\n \"merge_status\": \"can_be_merged\",\n \"created_at\": \"2024-01-15T10:00:00Z\",\n \"updated_at\": \"2024-01-20T14:30:00Z\",\n \"merged_at\": \"2024-01-20T14:30:00Z\",\n \"closed_at\": null,\n \"author\": { \"id\": 1, \"username\": \"johndoe\", \"name\": \"John Doe\" },\n \"merge_user\": { \"id\": 2, \"username\": \"janedoe\", \"name\": \"Jane Doe\" },\n \"merged_by\": { \"id\": 2, \"username\": \"janedoe\", \"name\": \"Jane Doe\" },\n \"labels\": [\"enhancement\", \"auth\"],\n \"assignees\": [{ \"id\": 3, \"username\": \"bob\", \"name\": \"Bob Smith\" }],\n \"reviewers\": [{ \"id\": 4, \"username\": \"alice\", \"name\": \"Alice Wong\" }],\n \"web_url\": \"https://gitlab.example.com/group/project/-/merge_requests/42\"\n}\n```\n\n## Edge Cases\n- `locked` state is transitional (merge in progress) - rare but valid\n- Some older instances may not return `detailed_merge_status`\n- Some older instances may not return `merge_user` (use `merged_by` fallback)\n- `work_in_progress` is deprecated but still returned by some instances","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-26T22:06:40.498088Z","created_by":"tayloreernisse","updated_at":"2026-01-27T00:08:35.520229Z","closed_at":"2026-01-27T00:08:35.520167Z","close_reason":"Added GitLabMergeRequest, GitLabReviewer, GitLabReferences structs. Updated GitLabNotePosition with position_type, line_range, and SHA triplet fields. All 23 type tests passing.","compaction_level":0,"original_size":0,"dependencies":[{"issue_id":"bd-5ta","depends_on_id":"bd-3ir","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-6pmy","title":"Implement LoreApp Model trait (full update/view skeleton)","description":"## Background\nLoreApp is the central Model implementation for FrankenTUI's Elm Architecture. It owns all state (AppState), the navigation stack, task supervisor, db manager, clock, config, and crash context. The update() method is the single entry point for all state transitions, implementing a 5-stage key dispatch pipeline. The view() method routes to per-screen render functions.\n\n## Approach\nExpand crates/lore-tui/src/app.rs:\n- LoreApp struct fields: config (Config), db (DbManager), state (AppState), navigation (NavigationStack), supervisor (TaskSupervisor), clock (Box), input_mode (InputMode), command_registry (CommandRegistry), crash_context (CrashContext)\n- init() -> Cmd: install crash_context panic hook, return Cmd::task that loads dashboard data\n- update(msg: Msg) -> Option>: push CrashEvent to crash_context FIRST, then full dispatch with 5-stage interpret_key pipeline:\n 1. Quit check (q in Normal mode, Ctrl+C always)\n 2. InputMode routing (Text->delegate to text widget, Palette->delegate to palette, GoPrefix->check timeout+destination)\n 3. Global shortcuts (H=Home, Esc=back, Ctrl+P=palette, g=prefix, Ctrl+O/I=jump)\n 4. Screen-local keys (delegate to AppState::interpret_screen_key)\n 5. Fallback (unhandled key, no-op)\n\n**Key normalization pass in interpret_key():**\nBefore the 5-stage pipeline, normalize terminal key variants:\n- Backspace variants: map Delete/Backspace to canonical Backspace\n- Alt key variants: map Meta+key to Alt+key\n- Shift+Tab: map BackTab to Shift+Tab\n- This ensures consistent behavior across terminals (iTerm2, Alacritty, Terminal.app, tmux)\n\n- For non-key messages: match on Msg variants, update state, optionally return Cmd::task for async work\n- Stale result guard: check supervisor.is_current() before applying *Loaded results\n- view(frame): match navigation.current() to dispatch to per-screen view functions (stub initially)\n- subscriptions(): tick timer (250ms for spinner animation), debounce timers\n\n## Acceptance Criteria\n- [ ] LoreApp struct compiles with all required fields including crash_context\n- [ ] init() installs panic hook and returns a Cmd that triggers dashboard load\n- [ ] update() pushes CrashEvent to crash_context before dispatching\n- [ ] update() handles Msg::Quit by returning None\n- [ ] update() handles NavigateTo by pushing nav stack and spawning load_screen\n- [ ] update() handles GoBack by popping nav stack\n- [ ] interpret_key normalizes Backspace/Alt/Shift+Tab variants before dispatch\n- [ ] interpret_key 5-stage pipeline dispatches correctly per InputMode\n- [ ] GoPrefix times out after 500ms (checked via clock.now())\n- [ ] Stale results dropped: IssueListLoaded with old generation ignored\n- [ ] view() routes to correct screen render function based on navigation.current()\n- [ ] subscriptions() returns tick timer\n\n## Files\n- MODIFY: crates/lore-tui/src/app.rs (expand from minimal to full implementation)\n\n## TDD Anchor\nRED: Write test_quit_returns_none that creates LoreApp (with FakeClock, in-memory DB), calls update(Msg::Quit), asserts it returns None.\nGREEN: Implement update() with Quit match arm.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_quit\n\nAdditional tests:\n- test_navigate_to_pushes_stack: update(NavigateTo(IssueList)) changes navigation.current()\n- test_go_back_pops_stack: after push, GoBack returns to previous screen\n- test_stale_result_dropped: IssueListLoaded with old generation doesn't update state\n- test_go_prefix_timeout: GoPrefix cancels after 500ms (using FakeClock)\n- test_key_normalization_backspace: both Delete and Backspace map to canonical Backspace\n- test_crash_context_records_events: after update(), crash_context.events.len() increases\n\n## Edge Cases\n- update() must handle rapid-fire messages without blocking (no long computations in update)\n- Ctrl+C must always quit regardless of InputMode (safety escape)\n- GoPrefix must cancel on any non-destination key, not just on timeout\n- Text mode must pass Esc through to blur text input first, then Normal mode handles Esc for navigation\n- Key normalization must handle unknown/exotic key codes gracefully (pass through unchanged)\n\n## Dependency Context\nUses DbManager from \"Implement DbManager\" (bd-2kop).\nUses Clock/FakeClock from \"Implement Clock trait\" (bd-2lg6).\nUses Msg, Screen, InputMode from \"Implement core types\" (bd-c9gk).\nUses NavigationStack from \"Implement NavigationStack\" (bd-1qpp).\nUses TaskSupervisor from \"Implement TaskSupervisor\" (bd-3le2).\nUses CrashContext from \"Implement crash_context ring buffer\" (bd-2fr7).\nUses CommandRegistry from \"Implement CommandRegistry\" (bd-38lb).\nUses AppState from \"Implement AppState composition\" (bd-1v9m).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:55:27.130909Z","created_by":"tayloreernisse","updated_at":"2026-02-12T20:52:30.228655Z","closed_at":"2026-02-12T20:52:30.228596Z","close_reason":"LoreApp full Model impl: struct with all fields, 5-stage key dispatch, navigate_to, handle_msg with stale guard, 22 app tests. Fixed crossterm→ftui type migration.","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-6pmy","depends_on_id":"bd-1qpp","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-1v9m","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-2emv","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-2fr7","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-2kop","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-2lg6","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-38lb","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-6pmy","depends_on_id":"bd-3le2","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-88m","title":"[CP1] Issue ingestion module","description":"Fetch and store issues with cursor-based incremental sync.\n\n## Module\nsrc/ingestion/issues.rs\n\n## Key Structs\n\n### IngestIssuesResult\n- fetched: usize\n- upserted: usize\n- labels_created: usize\n- issues_needing_discussion_sync: Vec\n\n### IssueForDiscussionSync\n- local_issue_id: i64\n- iid: i64\n- updated_at: i64\n\n## Main Function\npub async fn ingest_issues(conn, client, config, project_id, gitlab_project_id) -> Result\n\n## Logic\n1. Get current cursor from sync_cursors (updated_at_cursor, tie_breaker_id)\n2. Paginate through issues updated after cursor with cursor_rewind_seconds\n3. Apply local filtering for tuple cursor semantics:\n - Skip if issue.updated_at < cursor_updated_at\n - Skip if issue.updated_at == cursor_updated_at AND issue.id <= cursor_gitlab_id\n4. For each issue passing filter:\n - Begin transaction\n - Store raw payload (compressed)\n - Transform and upsert issue\n - Clear existing label links (DELETE FROM issue_labels)\n - Extract and upsert labels\n - Link issue to labels via junction\n - Commit transaction\n - Track for discussion sync eligibility\n5. Incremental cursor update every 100 issues\n6. Final cursor update\n7. Determine issues needing discussion sync: where updated_at > discussions_synced_for_updated_at\n\n## Helper Functions\n- get_cursor(conn, project_id) -> (Option, Option)\n- get_discussions_synced_at(conn, issue_id) -> Option\n- upsert_issue(conn, issue, payload_id) -> usize\n- get_local_issue_id(conn, gitlab_id) -> i64\n- clear_issue_labels(conn, issue_id)\n- upsert_label(conn, label) -> bool\n- get_label_id(conn, project_id, name) -> i64\n- link_issue_label(conn, issue_id, label_id)\n- update_cursor(conn, project_id, resource_type, updated_at, gitlab_id)\n\nFiles: src/ingestion/mod.rs, src/ingestion/issues.rs\nTests: tests/issue_ingestion_tests.rs\nDone when: Issues, labels, issue_labels populated correctly with resumable cursor","status":"tombstone","priority":2,"issue_type":"task","created_at":"2026-01-25T16:57:35.655708Z","created_by":"tayloreernisse","updated_at":"2026-01-25T17:02:01.806982Z","closed_at":"2026-01-25T17:02:01.806982Z","deleted_at":"2026-01-25T17:02:01.806977Z","deleted_by":"tayloreernisse","delete_reason":"recreating with correct deps","original_type":"task","compaction_level":0,"original_size":0} -{"id":"bd-8ab7","title":"Implement Issue Detail (state + action + view)","description":"## Background\nThe Issue Detail screen shows a single issue with progressive hydration: Phase 1 loads metadata (fast), Phase 2 loads discussions asynchronously, Phase 3 loads thread bodies on expand. All subqueries run inside a single read transaction for snapshot consistency.\n\n## Approach\nState (state/issue_detail.rs):\n- IssueDetailState: current_key (Option), metadata (Option), discussions (Vec), discussions_loaded (bool), cross_refs (Vec), tree_state (TreePersistState), scroll_offset (usize)\n- IssueMetadata: iid, title, description, state, author, assignee, labels, milestone, created_at, updated_at, web_url, status_name, status_icon, closing_mr_iids, related_issue_iids\n- handle_key(): j/k scroll, Enter expand discussion thread, d open description, x cross-refs, o open in browser, t scoped timeline, Esc back to list\n\nAction (action.rs):\n- fetch_issue_detail(conn, key, clock) -> Result: uses with_read_snapshot for snapshot consistency. Fetches metadata, discussion count, cross-refs in single transaction.\n- fetch_discussions(conn, key) -> Result, LoreError>: loads discussions for the issue, separate async call (Phase 2 of hydration)\n\nView (view/issue_detail.rs):\n- render_issue_detail(frame, state, area, theme): header (IID, title, state badge, labels), description (markdown rendered with sanitization), discussions (tree widget), cross-references section\n- Header: \"Issue #42 — Fix auth flow [opened]\" with colored state badge\n- Description: rendered markdown, scrollable\n- Discussions: loaded async, shown with spinner until ready\n- Cross-refs: closing MRs, related issues as navigable links\n\n## Acceptance Criteria\n- [ ] Metadata loads in Phase 1 (p95 < 75ms on M-tier)\n- [ ] Discussions load async in Phase 2 (spinner shown while loading)\n- [ ] All detail subqueries run inside single read transaction (snapshot consistency)\n- [ ] Description text sanitized via sanitize_for_terminal()\n- [ ] Discussion tree renders with expand/collapse\n- [ ] Cross-references navigable via Enter\n- [ ] Esc returns to Issue List with cursor position preserved\n- [ ] Open in browser (o) uses classify_safe_url before launching\n- [ ] Scoped timeline (t) navigates to Timeline filtered for this entity\n\n## Files\n- MODIFY: crates/lore-tui/src/state/issue_detail.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_issue_detail, fetch_discussions)\n- CREATE: crates/lore-tui/src/view/issue_detail.rs\n\n## TDD Anchor\nRED: Write test_fetch_issue_detail_snapshot in action.rs that inserts an issue with 2 discussions, calls fetch_issue_detail, asserts metadata and discussion count are correct.\nGREEN: Implement fetch_issue_detail with read transaction.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_issue_detail\n\n## Edge Cases\n- Issue with no description: show placeholder \"[No description]\"\n- Issue with hundreds of discussions: paginate or lazy-load beyond first 50\n- Cross-refs to entities not in local DB: show as text-only (not navigable)\n- Issue description with embedded images: show [image] placeholder (no inline rendering)\n- Entity cache (future): near-instant reopen during Enter/Esc drill workflows\n\n## Dependency Context\nUses discussion tree and cross-ref widgets from \"Implement discussion tree + cross-reference widgets\" task.\nUses EntityKey, Msg from \"Implement core types\" task.\nUses with_read_snapshot from DbManager from \"Implement DbManager\" task.\nUses sanitize_for_terminal from \"Implement terminal safety module\" task.\nUses Clock for timestamps from \"Implement Clock trait\" task.","status":"open","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:10.081146Z","created_by":"tayloreernisse","updated_at":"2026-02-12T18:11:28.338916Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-8ab7","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8ab7","depends_on_id":"bd-1d6z","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8ab7","depends_on_id":"bd-3ei1","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} +{"id":"bd-8ab7","title":"Implement Issue Detail (state + action + view)","description":"## Background\nThe Issue Detail screen shows a single issue with progressive hydration: Phase 1 loads metadata (fast), Phase 2 loads discussions asynchronously, Phase 3 loads thread bodies on expand. All subqueries run inside a single read transaction for snapshot consistency.\n\n## Approach\nState (state/issue_detail.rs):\n- IssueDetailState: current_key (Option), metadata (Option), discussions (Vec), discussions_loaded (bool), cross_refs (Vec), tree_state (TreePersistState), scroll_offset (usize)\n- IssueMetadata: iid, title, description, state, author, assignee, labels, milestone, created_at, updated_at, web_url, status_name, status_icon, closing_mr_iids, related_issue_iids\n- handle_key(): j/k scroll, Enter expand discussion thread, d open description, x cross-refs, o open in browser, t scoped timeline, Esc back to list\n\nAction (action.rs):\n- fetch_issue_detail(conn, key, clock) -> Result: uses with_read_snapshot for snapshot consistency. Fetches metadata, discussion count, cross-refs in single transaction.\n- fetch_discussions(conn, key) -> Result, LoreError>: loads discussions for the issue, separate async call (Phase 2 of hydration)\n\nView (view/issue_detail.rs):\n- render_issue_detail(frame, state, area, theme): header (IID, title, state badge, labels), description (markdown rendered with sanitization), discussions (tree widget), cross-references section\n- Header: \"Issue #42 — Fix auth flow [opened]\" with colored state badge\n- Description: rendered markdown, scrollable\n- Discussions: loaded async, shown with spinner until ready\n- Cross-refs: closing MRs, related issues as navigable links\n\n## Acceptance Criteria\n- [ ] Metadata loads in Phase 1 (p95 < 75ms on M-tier)\n- [ ] Discussions load async in Phase 2 (spinner shown while loading)\n- [ ] All detail subqueries run inside single read transaction (snapshot consistency)\n- [ ] Description text sanitized via sanitize_for_terminal()\n- [ ] Discussion tree renders with expand/collapse\n- [ ] Cross-references navigable via Enter\n- [ ] Esc returns to Issue List with cursor position preserved\n- [ ] Open in browser (o) uses classify_safe_url before launching\n- [ ] Scoped timeline (t) navigates to Timeline filtered for this entity\n\n## Files\n- MODIFY: crates/lore-tui/src/state/issue_detail.rs (expand from stub)\n- MODIFY: crates/lore-tui/src/action.rs (add fetch_issue_detail, fetch_discussions)\n- CREATE: crates/lore-tui/src/view/issue_detail.rs\n\n## TDD Anchor\nRED: Write test_fetch_issue_detail_snapshot in action.rs that inserts an issue with 2 discussions, calls fetch_issue_detail, asserts metadata and discussion count are correct.\nGREEN: Implement fetch_issue_detail with read transaction.\nVERIFY: cargo test --manifest-path crates/lore-tui/Cargo.toml test_fetch_issue_detail\n\n## Edge Cases\n- Issue with no description: show placeholder \"[No description]\"\n- Issue with hundreds of discussions: paginate or lazy-load beyond first 50\n- Cross-refs to entities not in local DB: show as text-only (not navigable)\n- Issue description with embedded images: show [image] placeholder (no inline rendering)\n- Entity cache (future): near-instant reopen during Enter/Esc drill workflows\n\n## Dependency Context\nUses discussion tree and cross-ref widgets from \"Implement discussion tree + cross-reference widgets\" task.\nUses EntityKey, Msg from \"Implement core types\" task.\nUses with_read_snapshot from DbManager from \"Implement DbManager\" task.\nUses sanitize_for_terminal from \"Implement terminal safety module\" task.\nUses Clock for timestamps from \"Implement Clock trait\" task.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-12T16:59:10.081146Z","created_by":"tayloreernisse","updated_at":"2026-02-18T20:17:02.568850Z","closed_at":"2026-02-18T20:17:02.568729Z","compaction_level":0,"original_size":0,"labels":["TUI"],"dependencies":[{"issue_id":"bd-8ab7","depends_on_id":"bd-1cl9","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8ab7","depends_on_id":"bd-1d6z","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8ab7","depends_on_id":"bd-3ei1","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-8con","title":"lore related: semantic similarity discovery","description":"## Background\nGiven any entity or free text, find semantically related entities using vector embeddings. No other GitLab tool does this — glab, GitLab Advanced Search, and even paid tiers are keyword-only. This finds conceptual connections humans miss.\n\n## Current Infrastructure (Verified 2026-02-12)\n- sqlite-vec extension loaded via sqlite3_vec_init in src/core/db.rs:84\n- Embeddings stored in: embedding_metadata table (chunk info) + vec0 virtual table named `embeddings` (vectors)\n- Migration 009 creates embedding infrastructure\n- search_vector() at src/search/vector.rs:43 — works with sqlite-vec KNN queries\n- OllamaClient::embed_batch() at src/embedding/ollama.rs:103 — batch embedding\n- Model: nomic-embed-text, 768 dimensions, context_length=2048 tokens (~1500 bytes)\n- 61K documents in DB, embedding coverage TBD\n\n### sqlite-vec Distance Metric\nThe `embeddings` virtual table is `vec0(embedding float[768])`. sqlite-vec's MATCH query returns L2 (Euclidean) distance by default. Lower distance = more similar. The `search_vector()` function returns `VectorResult { document_id: i64, distance: f64 }`.\n\n## Approach\n\n### Entity Mode: lore related issues N\n1. Look up document for issue N:\n```sql\nSELECT d.id, d.content_text\nFROM documents d\nJOIN issues i ON d.source_type = 'issue' AND d.source_id = i.id\nWHERE i.iid = ?1 AND i.project_id = (SELECT id FROM projects WHERE ...)\n```\nNOTE: `documents.source_id` is the internal DB id from the source table (issues.id), NOT the GitLab IID. See migration 007 comment: `source_id INTEGER NOT NULL -- local DB id in the source table`.\n\n2. Get its embedding: Look up via embedding_metadata which maps document_id -> rowid in the vec0 table:\n```sql\nSELECT em.rowid\nFROM embedding_metadata em\nWHERE em.document_id = ?1\nLIMIT 1 -- use first chunk's embedding as representative\n```\nThen extract the embedding vector from the vec0 table to use as the KNN query.\n\nAlternatively, embed the document's content_text on-the-fly via OllamaClient (simpler, more robust):\n```rust\nlet embedding = client.embed_batch(&[&doc.content_text]).await?[0].clone();\n```\n\n3. Call search_vector(conn, &embedding, limit * 2) for KNN — multiply limit to have room after filtering self\n4. Exclude self (filter out source document_id from results)\n5. Hydrate results: join documents -> issues/mrs/discussions for title, url, labels, author\n6. Compute shared_labels: parse `documents.label_names` (JSON array string) for both source and each result, intersect\n7. Return ranked list\n\n### Query Mode: lore related 'free text'\n1. Embed query via OllamaClient::embed_batch(&[query_text])\n2. Call search_vector(conn, &query_embedding, limit)\n3. Hydrate and return (same as entity mode minus self-exclusion)\n\n### Key Design Decision\nThis is intentionally SIMPLER than hybrid search. No FTS, no RRF. Pure vector similarity. The point is conceptual relatedness, not keyword matching.\n\n### Distance to Similarity Score Conversion\nsqlite-vec returns L2 (Euclidean) distance. Convert to 0-1 similarity:\n```rust\n/// Convert L2 distance to a 0-1 similarity score.\n/// Uses inverse relationship: closer (lower distance) = higher similarity.\n/// The +1 prevents division by zero and ensures score is in (0, 1].\nfn distance_to_similarity(distance: f64) -> f64 {\n 1.0 / (1.0 + distance)\n}\n```\nFor normalized embeddings (which nomic-embed-text produces), L2 distance ranges roughly 0-2. This formula maps:\n- distance 0.0 -> similarity 1.0 (identical)\n- distance 1.0 -> similarity 0.5\n- distance 2.0 -> similarity 0.33\n\n### Label Extraction for shared_labels\n```rust\nfn parse_label_names(label_names_json: &Option) -> HashSet {\n label_names_json\n .as_deref()\n .and_then(|s| serde_json::from_str::>(s).ok())\n .unwrap_or_default()\n .into_iter()\n .collect()\n}\n\nlet source_labels = parse_label_names(&source_doc.label_names);\nlet result_labels = parse_label_names(&result_doc.label_names);\nlet shared: Vec = source_labels.intersection(&result_labels).cloned().collect();\n```\n\n## Function Signatures\n\n```rust\n// New: src/cli/commands/related.rs\npub struct RelatedArgs {\n pub entity_type: Option, // \"issues\" or \"mrs\"\n pub entity_iid: Option,\n pub query: Option, // free text mode\n pub project: Option,\n pub limit: Option,\n}\n\npub async fn run_related(\n config: &Config,\n args: RelatedArgs,\n) -> Result\n\n// Reuse from src/search/vector.rs:43\npub fn search_vector(\n conn: &Connection,\n query_embedding: &[f32],\n limit: usize,\n) -> Result>\n// VectorResult { document_id: i64, distance: f64 }\n\n// Reuse from src/embedding/ollama.rs:103\npub async fn embed_batch(&self, texts: &[&str]) -> Result>>\n```\n\n## Robot Mode Output Schema\n```json\n{\n \"ok\": true,\n \"data\": {\n \"source\": { \"type\": \"issue\", \"iid\": 3864, \"title\": \"...\" },\n \"query\": \"switch throw time...\",\n \"results\": [{\n \"source_type\": \"issue\",\n \"iid\": 3800,\n \"title\": \"Rail Break Card\",\n \"url\": \"...\",\n \"similarity_score\": 0.87,\n \"shared_labels\": [\"customer:BNSF\"],\n \"shared_authors\": [],\n \"project_path\": \"vs/typescript-code\"\n }]\n },\n \"meta\": { \"elapsed_ms\": 42, \"mode\": \"entity\", \"embedding_dims\": 768, \"distance_metric\": \"l2\" }\n}\n```\n\n## Clap Registration\n```rust\n// In src/main.rs Commands enum, add:\nRelated {\n /// Entity type (\"issues\" or \"mrs\") or free text query\n query_or_type: String,\n /// Entity IID (when first arg is entity type)\n iid: Option,\n /// Maximum results\n #[arg(short = 'n', long, default_value = \"10\")]\n limit: usize,\n /// Scope to project (fuzzy match)\n #[arg(short, long)]\n project: Option,\n},\n```\n\n## TDD Loop\nRED: Tests in src/cli/commands/related.rs:\n- test_related_entity_excludes_self: insert doc + embedding for issue, query related, assert source doc not in results\n- test_related_shared_labels: insert 2 docs with overlapping labels (JSON in label_names), assert shared_labels computed correctly\n- test_related_empty_embeddings: no embeddings in DB, assert exit code 14 with helpful error\n- test_related_query_mode: embed free text via mock, assert results returned\n- test_related_similarity_score_range: all scores between 0.0 and 1.0\n- test_distance_to_similarity: unit test the conversion function (0.0->1.0, 1.0->0.5, large->~0.0)\n\nGREEN: Implement related command using search_vector + hydration\n\nVERIFY:\n```bash\ncargo test related:: && cargo clippy --all-targets -- -D warnings\ncargo run --release -- -J related issues 3864 -n 5 | jq '.data.results[0].similarity_score'\n```\n\n## Acceptance Criteria\n- [ ] lore related issues N returns top-K semantically similar entities\n- [ ] lore related mrs N works for merge requests\n- [ ] lore related 'free text' works as concept search (requires Ollama)\n- [ ] Results exclude the input entity itself\n- [ ] similarity_score is 0-1 range (higher = more similar), converted from L2 distance\n- [ ] Robot mode includes shared_labels (from documents.label_names JSON), shared_authors per result\n- [ ] Human mode shows ranked list with titles, scores, common labels\n- [ ] No embeddings in DB: exit code 14 with message \"Run 'lore embed' first\"\n- [ ] Ollama unavailable (query mode only): exit code 14 with suggestion\n- [ ] Performance: <1s for 61K documents\n- [ ] Command registered in main.rs and robot-docs\n\n## Edge Cases\n- Entity has no embedding (added after last lore embed): embed its content_text on-the-fly via OllamaClient, or exit 14 if Ollama unavailable\n- All results have very low similarity (<0.3): include warning \"No strongly related entities found\"\n- Entity is a discussion (not issue/MR): should still work (documents table has discussion docs)\n- Multiple documents per entity (discussion docs): use the entity-level document, not discussion subdocs\n- Free text query very short (1-2 words): may produce noisy results, add warning\n- Entity not found in DB: exit code 17 with suggestion to sync\n- Ambiguous project: exit code 18 with suggestion to use -p flag\n- documents.label_names may be NULL or invalid JSON — parse_label_names handles both gracefully\n\n## Dependency Context\n- **bd-1ksf (hybrid search)**: BLOCKER. Shares OllamaClient infrastructure. Also ensures async search.rs patterns are established. Related reuses the same vector search infrastructure.\n\n## Files to Create/Modify\n- NEW: src/cli/commands/related.rs\n- src/cli/commands/mod.rs (add pub mod related; re-export)\n- src/main.rs (register Related subcommand in Commands enum, add handle_related fn)\n- Reuse: search_vector() from src/search/vector.rs, OllamaClient from src/embedding/ollama.rs","status":"open","priority":2,"issue_type":"feature","created_at":"2026-02-12T15:46:58.665923Z","created_by":"tayloreernisse","updated_at":"2026-02-12T16:31:35.489138Z","compaction_level":0,"original_size":0,"labels":["cli-imp","intelligence","search"],"dependencies":[{"issue_id":"bd-8con","depends_on_id":"bd-13lp","type":"parent-child","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8con","depends_on_id":"bd-1ksf","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-8t4","title":"Extract cross-references from resource_state_events","description":"## Background\nresource_state_events includes source_merge_request (with iid) for 'closed by MR' events. After state events are stored (Gate 1), post-processing extracts these into entity_references for the cross-reference graph.\n\n## Approach\nCreate src/core/references.rs (new module) or add to events_db.rs:\n\n```rust\n/// Extract cross-references from stored state events and insert into entity_references.\n/// Looks for state events with source_merge_request_id IS NOT NULL (meaning \"closed by MR\").\n/// \n/// Directionality: source = MR (that caused the close), target = issue (that was closed)\npub fn extract_refs_from_state_events(\n conn: &Connection,\n project_id: i64,\n) -> Result // returns count of new references inserted\n```\n\nSQL logic:\n```sql\nINSERT OR IGNORE INTO entity_references (\n source_entity_type, source_entity_id,\n target_entity_type, target_entity_id,\n reference_type, source_method, created_at\n)\nSELECT\n 'merge_request',\n mr.id,\n 'issue',\n rse.issue_id,\n 'closes',\n 'api_state_event',\n rse.created_at\nFROM resource_state_events rse\nJOIN merge_requests mr ON mr.project_id = rse.project_id AND mr.iid = rse.source_merge_request_id\nWHERE rse.source_merge_request_id IS NOT NULL\n AND rse.issue_id IS NOT NULL\n AND rse.project_id = ?1;\n```\n\nKey: source_merge_request_id stores the MR iid, so we JOIN on merge_requests.iid to get the local DB id.\n\nRegister in src/core/mod.rs: `pub mod references;`\n\nCall this after drain_dependent_queue in the sync pipeline (after all state events are stored).\n\n## Acceptance Criteria\n- [ ] State events with source_merge_request_id produce 'closes' references\n- [ ] Source = MR (resolved by iid), target = issue\n- [ ] source_method = 'api_state_event'\n- [ ] INSERT OR IGNORE prevents duplicates with api_closes_issues data\n- [ ] Returns count of newly inserted references\n- [ ] No-op when no state events have source_merge_request_id\n\n## Files\n- src/core/references.rs (new)\n- src/core/mod.rs (add `pub mod references;`)\n- src/cli/commands/sync.rs (call after drain step)\n\n## TDD Loop\nRED: tests/references_tests.rs:\n- `test_extract_refs_from_state_events_basic` - seed a \"closed\" state event with source_merge_request_id, verify entity_reference created\n- `test_extract_refs_dedup_with_closes_issues` - insert ref from closes_issues API first, verify state event extraction doesn't duplicate\n- `test_extract_refs_no_source_mr` - state events without source_merge_request_id produce no refs\n\nSetup: create_test_db with migrations 001-011, seed project + issue + MR + state events.\n\nGREEN: Implement extract_refs_from_state_events\n\nVERIFY: `cargo test references -- --nocapture`\n\n## Edge Cases\n- source_merge_request_id may reference an MR not synced locally (cross-project close) — the JOIN will produce no match, which is correct behavior (ref simply not created)\n- Multiple state events can reference the same MR for the same issue (reopen + re-close) — INSERT OR IGNORE handles dedup\n- The merge_requests table might not have the MR yet if sync is still running — call this after all dependent fetches complete","status":"closed","priority":2,"issue_type":"task","created_at":"2026-02-02T21:32:33.619606Z","created_by":"tayloreernisse","updated_at":"2026-02-04T20:13:28.219791Z","closed_at":"2026-02-04T20:13:28.219633Z","compaction_level":0,"original_size":0,"labels":["extraction","gate-2","phase-b"],"dependencies":[{"issue_id":"bd-8t4","depends_on_id":"bd-1ep","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8t4","depends_on_id":"bd-1se","type":"parent-child","created_at":"2026-02-12T19:34:59Z","created_by":"import"},{"issue_id":"bd-8t4","depends_on_id":"bd-hu3","type":"blocks","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} {"id":"bd-91j1","title":"Comprehensive robot-docs as agent bootstrap","description":"## Background\nAgents reach for glab because they already know it from training data. lore robot-docs exists but is not comprehensive enough to serve as a zero-training bootstrap. An agent encountering lore for the first time should be able to use any command correctly after reading robot-docs output alone.\n\n## Current State (Verified 2026-02-12)\n- `handle_robot_docs()` at src/main.rs:2069\n- Called at no-args in robot mode (main.rs:165) and via Commands::RobotDocs { brief } (main.rs:229)\n- Current output top-level keys: name, version, description, activation, commands, aliases, exit_codes, clap_error_codes, error_format, workflows\n- Missing: response_schema per command, example_output per command, quick_start section, glab equivalence table\n- --brief flag exists but returns shorter version of same structure\n- main.rs is 2579 lines total\n\n## Current robot-docs Output Structure\n```json\n{\n \"name\": \"lore\",\n \"version\": \"0.6.1\",\n \"description\": \"...\",\n \"activation\": { \"flags\": [\"--robot\", \"-J\"], \"env\": \"LORE_ROBOT=1\", \"auto_detect\": \"non-TTY\" },\n \"commands\": [{ \"name\": \"...\", \"description\": \"...\", \"flags\": [...], \"example\": \"...\" }],\n \"aliases\": { ... },\n \"exit_codes\": { ... },\n \"clap_error_codes\": { ... },\n \"error_format\": { ... },\n \"workflows\": { ... }\n}\n```\n\n## Approach\n\n### 1. Add quick_start section\nTop-level key with glab-to-lore translation and lore-exclusive feature summary:\n```json\n\"quick_start\": {\n \"glab_equivalents\": [\n { \"glab\": \"glab issue list\", \"lore\": \"lore -J issues -n 50\", \"note\": \"Richer: includes labels, status, closing MRs\" },\n { \"glab\": \"glab issue view 123\", \"lore\": \"lore -J issues 123\", \"note\": \"Includes discussions, work-item status\" },\n { \"glab\": \"glab mr list\", \"lore\": \"lore -J mrs\", \"note\": \"Includes draft status, reviewers\" },\n { \"glab\": \"glab mr view 456\", \"lore\": \"lore -J mrs 456\", \"note\": \"Includes discussions, file changes\" },\n { \"glab\": \"glab api '/projects/:id/issues'\", \"lore\": \"lore -J issues -p project\", \"note\": \"Fuzzy project matching\" }\n ],\n \"lore_exclusive\": [\n \"search: FTS5 + vector hybrid search across all entities\",\n \"who: Expert/workload/reviews analysis per file path or person\",\n \"timeline: Chronological event reconstruction across entities\",\n \"stats: Database statistics with document/note/discussion counts\",\n \"count: Entity counts with state breakdowns\"\n ]\n}\n```\n\n### 2. Add response_schema per command\nFor each command in the commands array, add a `response_schema` field showing the JSON shape:\n```json\n{\n \"name\": \"issues\",\n \"response_schema\": {\n \"ok\": \"boolean\",\n \"data\": { \"type\": \"array|object\", \"fields\": [\"iid\", \"title\", \"state\", \"...\"] },\n \"meta\": { \"elapsed_ms\": \"integer\" }\n }\n}\n```\nCommands with multiple output shapes (list vs detail) need both documented.\n\n### 3. Add example_output per command\nRealistic truncated JSON for each command. Keep each example under 500 bytes.\n\n### 4. Token budget enforcement\n- --brief mode: ONLY quick_start + command names + invocation syntax. Target <4000 tokens (~16000 bytes).\n- Full mode: everything. Target <12000 tokens (~48000 bytes).\n- Measure with: `cargo run --release -- --robot robot-docs --brief | wc -c`\n\n## TDD Loop\nRED: Tests in src/main.rs or new src/cli/commands/robot_docs.rs:\n- test_robot_docs_has_quick_start: parse output JSON, assert quick_start.glab_equivalents array has >= 5 entries\n- test_robot_docs_brief_size: --brief output < 16000 bytes\n- test_robot_docs_full_size: full output < 48000 bytes\n- test_robot_docs_has_response_schemas: every command entry has response_schema key\n- test_robot_docs_commands_complete: assert all registered commands appear (issues, mrs, search, who, timeline, count, stats, sync, embed, doctor, health, ingest, generate-docs, show)\n\nGREEN: Add quick_start, response_schema, example_output to robot-docs output\n\nVERIFY:\n```bash\ncargo test robot_docs && cargo clippy --all-targets -- -D warnings\ncargo run --release -- --robot robot-docs | jq '.quick_start.glab_equivalents | length'\n# Should return >= 5\ncargo run --release -- --robot robot-docs --brief | wc -c\n# Should be < 16000\n```\n\n## Acceptance Criteria\n- [ ] robot-docs JSON has quick_start.glab_equivalents array with >= 5 entries\n- [ ] robot-docs JSON has quick_start.lore_exclusive array\n- [ ] Every command entry has response_schema showing the JSON shape\n- [ ] Every command entry has example_output with realistic truncated data\n- [ ] --brief output is under 16000 bytes (~4000 tokens)\n- [ ] Full output is under 48000 bytes (~12000 tokens)\n- [ ] An agent reading ONLY robot-docs can correctly invoke any lore command\n- [ ] cargo test passes with new robot_docs tests\n\n## Edge Cases\n- Commands with multiple output shapes (e.g., issues list vs issues detail via iid) need both schemas documented\n- --fields flag changes output shape -- document the effect in the response_schema\n- robot-docs output must be stable across versions (agents may cache it)\n- Version field should match Cargo.toml version\n\n## Files to Modify\n- src/main.rs fn handle_robot_docs() (~line 2069) — add quick_start section, response_schema, example_output\n- Consider extracting to src/cli/commands/robot_docs.rs if the function exceeds 200 lines","status":"closed","priority":1,"issue_type":"task","created_at":"2026-02-12T15:44:40.495479Z","created_by":"tayloreernisse","updated_at":"2026-02-12T16:49:01.043915Z","closed_at":"2026-02-12T16:49:01.043832Z","close_reason":"Robot-docs enhanced with quick_start (glab equivalents, lore exclusives, read/write split) and example_output for issues/mrs/search/who","compaction_level":0,"original_size":0,"labels":["cli","cli-imp","robot-mode"],"dependencies":[{"issue_id":"bd-91j1","depends_on_id":"bd-13lp","type":"parent-child","created_at":"2026-02-12T19:34:59Z","created_by":"import"}]} diff --git a/crates/lore-tui/src/action.rs b/crates/lore-tui/src/action.rs index 245e76a..beb471d 100644 --- a/crates/lore-tui/src/action.rs +++ b/crates/lore-tui/src/action.rs @@ -16,6 +16,7 @@ use crate::state::dashboard::{ use crate::state::issue_list::{ IssueCursor, IssueFilter, IssueListPage, IssueListRow, SortField, SortOrder, }; +use crate::state::mr_detail::{FileChange, FileChangeType, MrDetailData, MrMetadata}; use crate::state::mr_list::{MrCursor, MrFilter, MrListPage, MrListRow, MrSortField, MrSortOrder}; // --------------------------------------------------------------------------- @@ -635,6 +636,544 @@ fn mr_sort_column_and_dir(field: MrSortField, order: MrSortOrder) -> (&'static s (col, dir) } +// --------------------------------------------------------------------------- +// Issue Detail +// --------------------------------------------------------------------------- + +use crate::message::EntityKey; +use crate::state::issue_detail::{IssueDetailData, IssueMetadata}; +use crate::view::common::cross_ref::{CrossRef, CrossRefKind}; +use crate::view::common::discussion_tree::{DiscussionNode, NoteNode}; + +/// Fetch issue metadata and cross-references (Phase 1 load). +/// +/// Runs inside a single read transaction for snapshot consistency. +/// Returns metadata + cross-refs; discussions are loaded separately. +pub fn fetch_issue_detail(conn: &Connection, key: &EntityKey) -> Result { + let metadata = fetch_issue_metadata(conn, key)?; + let cross_refs = fetch_issue_cross_refs(conn, key)?; + Ok(IssueDetailData { + metadata, + cross_refs, + }) +} + +/// Fetch issue metadata from the local DB. +fn fetch_issue_metadata(conn: &Connection, key: &EntityKey) -> Result { + let row = conn + .query_row( + "SELECT i.iid, p.path_with_namespace, i.title, + COALESCE(i.description, ''), i.state, i.author_username, + COALESCE(i.milestone_title, ''), + i.due_date, i.created_at, i.updated_at, + COALESCE(i.web_url, ''), + (SELECT COUNT(*) FROM discussions d + WHERE d.issue_id = i.id AND d.noteable_type = 'Issue') + FROM issues i + JOIN projects p ON p.id = i.project_id + WHERE i.project_id = ?1 AND i.iid = ?2", + rusqlite::params![key.project_id, key.iid], + |row| { + Ok(IssueMetadata { + iid: row.get(0)?, + project_path: row.get(1)?, + title: row.get(2)?, + description: row.get(3)?, + state: row.get(4)?, + author: row.get::<_, Option>(5)?.unwrap_or_default(), + assignees: Vec::new(), // Fetched separately below. + labels: Vec::new(), // Fetched separately below. + milestone: { + let m: String = row.get(6)?; + if m.is_empty() { None } else { Some(m) } + }, + due_date: row.get(7)?, + created_at: row.get(8)?, + updated_at: row.get(9)?, + web_url: row.get(10)?, + discussion_count: row.get::<_, i64>(11)? as usize, + }) + }, + ) + .context("fetching issue metadata")?; + + // Fetch assignees. + let mut assignees_stmt = conn + .prepare("SELECT username FROM issue_assignees WHERE issue_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2)") + .context("preparing assignees query")?; + let assignees: Vec = assignees_stmt + .query_map(rusqlite::params![key.project_id, key.iid], |r| r.get(0)) + .context("fetching assignees")? + .filter_map(Result::ok) + .collect(); + + // Fetch labels. + let mut labels_stmt = conn + .prepare( + "SELECT l.name FROM issue_labels il + JOIN labels l ON l.id = il.label_id + WHERE il.issue_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2) + ORDER BY l.name", + ) + .context("preparing labels query")?; + let labels: Vec = labels_stmt + .query_map(rusqlite::params![key.project_id, key.iid], |r| r.get(0)) + .context("fetching labels")? + .filter_map(Result::ok) + .collect(); + + Ok(IssueMetadata { + assignees, + labels, + ..row + }) +} + +/// Fetch cross-references for an issue from the entity_references table. +fn fetch_issue_cross_refs(conn: &Connection, key: &EntityKey) -> Result> { + let mut stmt = conn + .prepare( + "SELECT er.reference_type, er.target_entity_type, er.target_entity_id, + er.target_entity_iid, er.target_project_path, + CASE + WHEN er.target_entity_type = 'issue' + THEN (SELECT title FROM issues WHERE id = er.target_entity_id) + WHEN er.target_entity_type = 'merge_request' + THEN (SELECT title FROM merge_requests WHERE id = er.target_entity_id) + ELSE NULL + END as entity_title, + CASE + WHEN er.target_entity_id IS NOT NULL + THEN (SELECT project_id FROM issues WHERE id = er.target_entity_id + UNION ALL + SELECT project_id FROM merge_requests WHERE id = er.target_entity_id + LIMIT 1) + ELSE NULL + END as target_project_id + FROM entity_references er + WHERE er.source_entity_type = 'issue' + AND er.source_entity_id = (SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2) + ORDER BY er.reference_type, er.target_entity_iid", + ) + .context("preparing cross-ref query")?; + + let refs = stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| { + let ref_type: String = row.get(0)?; + let target_type: String = row.get(1)?; + let target_id: Option = row.get(2)?; + let target_iid: Option = row.get(3)?; + let target_path: Option = row.get(4)?; + let title: Option = row.get(5)?; + let target_project_id: Option = row.get(6)?; + + let kind = match (ref_type.as_str(), target_type.as_str()) { + ("closes", "merge_request") => CrossRefKind::ClosingMr, + ("related", "issue") => CrossRefKind::RelatedIssue, + _ => CrossRefKind::MentionedIn, + }; + + let iid = target_iid.unwrap_or(0); + let project_id = target_project_id.unwrap_or(key.project_id); + + let entity_key = match target_type.as_str() { + "merge_request" => EntityKey::mr(project_id, iid), + _ => EntityKey::issue(project_id, iid), + }; + + let label = title.unwrap_or_else(|| { + let prefix = if target_type == "merge_request" { + "!" + } else { + "#" + }; + let path = target_path.unwrap_or_default(); + if path.is_empty() { + format!("{prefix}{iid}") + } else { + format!("{path}{prefix}{iid}") + } + }); + + let navigable = target_id.is_some(); + + Ok(CrossRef { + kind, + entity_key, + label, + navigable, + }) + }) + .context("fetching cross-refs")? + .filter_map(Result::ok) + .collect(); + + Ok(refs) +} + +/// Fetch discussions for an issue (Phase 2 async load). +/// +/// Returns `DiscussionNode` tree suitable for the discussion tree widget. +pub fn fetch_issue_discussions(conn: &Connection, key: &EntityKey) -> Result> { + let issue_id: i64 = conn + .query_row( + "SELECT id FROM issues WHERE project_id = ?1 AND iid = ?2", + rusqlite::params![key.project_id, key.iid], + |r| r.get(0), + ) + .context("looking up issue id")?; + + let mut disc_stmt = conn + .prepare( + "SELECT d.id, d.gitlab_discussion_id, d.resolvable, d.resolved + FROM discussions d + WHERE d.issue_id = ?1 AND d.noteable_type = 'Issue' + ORDER BY d.first_note_at ASC, d.id ASC", + ) + .context("preparing discussions query")?; + + let mut note_stmt = conn + .prepare( + "SELECT n.author_username, n.body, n.created_at, n.is_system, + n.note_type, n.position_new_path, n.position_new_line + FROM notes n + WHERE n.discussion_id = ?1 + ORDER BY n.position ASC, n.created_at ASC", + ) + .context("preparing notes query")?; + + let discussions: Vec = disc_stmt + .query_map(rusqlite::params![issue_id], |row| { + Ok(( + row.get::<_, i64>(0)?, // id + row.get::<_, String>(1)?, // gitlab_discussion_id + row.get::<_, bool>(2)?, // resolvable + row.get::<_, bool>(3)?, // resolved + )) + }) + .context("fetching discussions")? + .filter_map(Result::ok) + .map(|(disc_db_id, discussion_id, resolvable, resolved)| { + let notes: Vec = note_stmt + .query_map(rusqlite::params![disc_db_id], |row| { + Ok(NoteNode { + author: row.get::<_, Option>(0)?.unwrap_or_default(), + body: row.get::<_, Option>(1)?.unwrap_or_default(), + created_at: row.get(2)?, + is_system: row.get(3)?, + is_diff_note: row.get::<_, Option>(4)?.as_deref() + == Some("DiffNote"), + diff_file_path: row.get(5)?, + diff_new_line: row.get(6)?, + }) + }) + .map(|rows| rows.filter_map(Result::ok).collect()) + .unwrap_or_default(); + + DiscussionNode { + discussion_id, + notes, + resolvable, + resolved, + } + }) + .collect(); + + Ok(discussions) +} + +// --------------------------------------------------------------------------- +// MR Detail +// --------------------------------------------------------------------------- + +/// Fetch MR metadata + cross-refs + file changes (Phase 1 composite). +pub fn fetch_mr_detail(conn: &Connection, key: &EntityKey) -> Result { + let metadata = fetch_mr_metadata(conn, key)?; + let cross_refs = fetch_mr_cross_refs(conn, key)?; + let file_changes = fetch_mr_file_changes(conn, key)?; + Ok(MrDetailData { + metadata, + cross_refs, + file_changes, + }) +} + +/// Fetch MR metadata from the local DB. +fn fetch_mr_metadata(conn: &Connection, key: &EntityKey) -> Result { + let row = conn + .query_row( + "SELECT m.iid, p.path_with_namespace, m.title, + COALESCE(m.description, ''), m.state, m.draft, + m.author_username, m.source_branch, m.target_branch, + COALESCE(m.detailed_merge_status, ''), + m.created_at, m.updated_at, m.merged_at, + COALESCE(m.web_url, ''), + (SELECT COUNT(*) FROM discussions d WHERE d.merge_request_id = m.id) AS disc_count, + (SELECT COUNT(*) FROM mr_file_changes fc WHERE fc.merge_request_id = m.id) AS fc_count + FROM merge_requests m + JOIN projects p ON p.id = m.project_id + WHERE m.project_id = ?1 AND m.iid = ?2", + rusqlite::params![key.project_id, key.iid], + |row| { + Ok(MrMetadata { + iid: row.get(0)?, + project_path: row.get(1)?, + title: row.get::<_, Option>(2)?.unwrap_or_default(), + description: row.get(3)?, + state: row.get::<_, Option>(4)?.unwrap_or_default(), + draft: row.get(5)?, + author: row.get::<_, Option>(6)?.unwrap_or_default(), + assignees: Vec::new(), + reviewers: Vec::new(), + labels: Vec::new(), + source_branch: row.get::<_, Option>(7)?.unwrap_or_default(), + target_branch: row.get::<_, Option>(8)?.unwrap_or_default(), + merge_status: row.get(9)?, + created_at: row.get(10)?, + updated_at: row.get(11)?, + merged_at: row.get(12)?, + web_url: row.get(13)?, + discussion_count: row.get::<_, i64>(14)? as usize, + file_change_count: row.get::<_, i64>(15)? as usize, + }) + }, + ) + .context("fetching MR metadata")?; + + // Fetch assignees. + let mut assignees_stmt = conn + .prepare( + "SELECT username FROM mr_assignees + WHERE merge_request_id = ( + SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2 + ) + ORDER BY username", + ) + .context("preparing assignees query")?; + let assignees: Vec = assignees_stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0)) + .context("fetching assignees")? + .filter_map(Result::ok) + .collect(); + + // Fetch reviewers. + let mut reviewers_stmt = conn + .prepare( + "SELECT username FROM mr_reviewers + WHERE merge_request_id = ( + SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2 + ) + ORDER BY username", + ) + .context("preparing reviewers query")?; + let reviewers: Vec = reviewers_stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0)) + .context("fetching reviewers")? + .filter_map(Result::ok) + .collect(); + + // Fetch labels. + let mut labels_stmt = conn + .prepare( + "SELECT l.name FROM mr_labels ml + JOIN labels l ON ml.label_id = l.id + WHERE ml.merge_request_id = ( + SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2 + ) + ORDER BY l.name", + ) + .context("preparing labels query")?; + let labels: Vec = labels_stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| row.get(0)) + .context("fetching labels")? + .filter_map(Result::ok) + .collect(); + + let mut result = row; + result.assignees = assignees; + result.reviewers = reviewers; + result.labels = labels; + Ok(result) +} + +/// Fetch cross-references for an MR. +fn fetch_mr_cross_refs(conn: &Connection, key: &EntityKey) -> Result> { + let mut stmt = conn + .prepare( + "SELECT er.reference_type, er.target_entity_type, + er.target_entity_id, er.target_entity_iid, + er.target_project_path, + CASE + WHEN er.target_entity_type = 'issue' + THEN (SELECT title FROM issues WHERE id = er.target_entity_id) + WHEN er.target_entity_type = 'merge_request' + THEN (SELECT title FROM merge_requests WHERE id = er.target_entity_id) + ELSE NULL + END as entity_title, + CASE + WHEN er.target_entity_id IS NOT NULL + THEN (SELECT project_id FROM issues WHERE id = er.target_entity_id + UNION ALL + SELECT project_id FROM merge_requests WHERE id = er.target_entity_id + LIMIT 1) + ELSE NULL + END as target_project_id + FROM entity_references er + WHERE er.source_entity_type = 'merge_request' + AND er.source_entity_id = (SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2) + ORDER BY er.reference_type, er.target_entity_iid", + ) + .context("preparing MR cross-refs query")?; + + let refs: Vec = stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| { + let ref_type: String = row.get(0)?; + let target_type: String = row.get(1)?; + let _target_id: Option = row.get(2)?; + let target_iid: Option = row.get(3)?; + let target_path: Option = row.get(4)?; + let title: Option = row.get(5)?; + let target_project_id: Option = row.get(6)?; + + let kind = match (ref_type.as_str(), target_type.as_str()) { + ("closes", "issue") => CrossRefKind::ClosingMr, + ("related", "issue") => CrossRefKind::RelatedIssue, + _ => CrossRefKind::MentionedIn, + }; + + let iid = target_iid.unwrap_or(0); + let project_id = target_project_id.unwrap_or(key.project_id); + + let entity_key = match target_type.as_str() { + "merge_request" => EntityKey::mr(project_id, iid), + _ => EntityKey::issue(project_id, iid), + }; + + let label = title.unwrap_or_else(|| { + let prefix = if target_type == "merge_request" { + "!" + } else { + "#" + }; + let path = target_path.clone().unwrap_or_default(); + if path.is_empty() { + format!("{prefix}{iid}") + } else { + format!("{path}{prefix}{iid}") + } + }); + + Ok(CrossRef { + kind, + entity_key, + label, + navigable: target_project_id.is_some(), + }) + }) + .context("fetching MR cross-refs")? + .filter_map(Result::ok) + .collect(); + + Ok(refs) +} + +/// Fetch file changes for an MR. +fn fetch_mr_file_changes(conn: &Connection, key: &EntityKey) -> Result> { + let mut stmt = conn + .prepare( + "SELECT fc.old_path, fc.new_path, fc.change_type + FROM mr_file_changes fc + WHERE fc.merge_request_id = ( + SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2 + ) + ORDER BY fc.new_path", + ) + .context("preparing file changes query")?; + + let changes: Vec = stmt + .query_map(rusqlite::params![key.project_id, key.iid], |row| { + Ok(FileChange { + old_path: row.get(0)?, + new_path: row.get(1)?, + change_type: FileChangeType::parse_db(&row.get::<_, String>(2).unwrap_or_default()), + }) + }) + .context("fetching file changes")? + .filter_map(Result::ok) + .collect(); + + Ok(changes) +} + +/// Fetch discussions for an MR (Phase 2 async load). +pub fn fetch_mr_discussions(conn: &Connection, key: &EntityKey) -> Result> { + let mr_id: i64 = conn + .query_row( + "SELECT id FROM merge_requests WHERE project_id = ?1 AND iid = ?2", + rusqlite::params![key.project_id, key.iid], + |row| row.get(0), + ) + .context("looking up MR id for discussions")?; + + let mut disc_stmt = conn + .prepare( + "SELECT d.id, d.gitlab_discussion_id, d.resolvable, d.resolved + FROM discussions d + WHERE d.merge_request_id = ?1 + ORDER BY d.first_note_at ASC", + ) + .context("preparing MR discussions query")?; + + let mut note_stmt = conn + .prepare( + "SELECT n.author_username, n.body, n.created_at, n.is_system, + n.note_type, n.position_new_path, n.position_new_line + FROM notes n + WHERE n.discussion_id = ?1 + ORDER BY n.position ASC, n.created_at ASC", + ) + .context("preparing MR notes query")?; + + let discussions: Vec = disc_stmt + .query_map(rusqlite::params![mr_id], |row| { + Ok(( + row.get::<_, i64>(0)?, // id + row.get::<_, String>(1)?, // gitlab_discussion_id + row.get::<_, bool>(2)?, // resolvable + row.get::<_, bool>(3)?, // resolved + )) + }) + .context("fetching MR discussions")? + .filter_map(Result::ok) + .map(|(disc_db_id, discussion_id, resolvable, resolved)| { + let notes: Vec = note_stmt + .query_map(rusqlite::params![disc_db_id], |row| { + Ok(NoteNode { + author: row.get::<_, Option>(0)?.unwrap_or_default(), + body: row.get::<_, Option>(1)?.unwrap_or_default(), + created_at: row.get(2)?, + is_system: row.get(3)?, + is_diff_note: row.get::<_, Option>(4)?.as_deref() + == Some("DiffNote"), + diff_file_path: row.get(5)?, + diff_new_line: row.get(6)?, + }) + }) + .map(|rows| rows.filter_map(Result::ok).collect()) + .unwrap_or_default(); + + DiscussionNode { + discussion_id, + notes, + resolvable, + resolved, + } + }) + .collect(); + + Ok(discussions) +} + // --------------------------------------------------------------------------- // Tests // --------------------------------------------------------------------------- @@ -1625,4 +2164,672 @@ mod tests { assert_eq!(page.total_count, 1); assert_eq!(page.rows[0].iid, 3); } + + // ----------------------------------------------------------------------- + // Issue Detail helpers + // ----------------------------------------------------------------------- + + fn create_issue_detail_schema(conn: &Connection) { + conn.execute_batch( + " + CREATE TABLE projects ( + id INTEGER PRIMARY KEY, + gitlab_project_id INTEGER UNIQUE NOT NULL, + path_with_namespace TEXT NOT NULL + ); + CREATE TABLE issues ( + id INTEGER PRIMARY KEY, + gitlab_id INTEGER UNIQUE NOT NULL, + project_id INTEGER NOT NULL, + iid INTEGER NOT NULL, + title TEXT NOT NULL, + description TEXT, + state TEXT NOT NULL DEFAULT 'opened', + author_username TEXT, + milestone_title TEXT, + due_date TEXT, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + web_url TEXT, + UNIQUE(project_id, iid) + ); + CREATE TABLE issue_assignees ( + issue_id INTEGER NOT NULL, + username TEXT NOT NULL, + UNIQUE(issue_id, username) + ); + CREATE TABLE labels ( + id INTEGER PRIMARY KEY, + project_id INTEGER NOT NULL, + name TEXT NOT NULL + ); + CREATE TABLE issue_labels ( + issue_id INTEGER NOT NULL, + label_id INTEGER NOT NULL, + UNIQUE(issue_id, label_id) + ); + CREATE TABLE discussions ( + id INTEGER PRIMARY KEY, + gitlab_discussion_id TEXT NOT NULL, + project_id INTEGER NOT NULL, + issue_id INTEGER, + merge_request_id INTEGER, + noteable_type TEXT NOT NULL, + resolvable INTEGER NOT NULL DEFAULT 0, + resolved INTEGER NOT NULL DEFAULT 0, + first_note_at INTEGER + ); + CREATE TABLE notes ( + id INTEGER PRIMARY KEY, + gitlab_id INTEGER UNIQUE NOT NULL, + discussion_id INTEGER NOT NULL, + project_id INTEGER NOT NULL, + note_type TEXT, + is_system INTEGER NOT NULL DEFAULT 0, + author_username TEXT, + body TEXT, + created_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + position INTEGER, + position_new_path TEXT, + position_new_line INTEGER + ); + CREATE TABLE entity_references ( + id INTEGER PRIMARY KEY, + project_id INTEGER NOT NULL, + source_entity_type TEXT NOT NULL, + source_entity_id INTEGER NOT NULL, + target_entity_type TEXT NOT NULL, + target_entity_id INTEGER, + target_project_path TEXT, + target_entity_iid INTEGER, + reference_type TEXT NOT NULL, + source_method TEXT NOT NULL DEFAULT 'api', + created_at INTEGER NOT NULL DEFAULT 0 + ); + CREATE TABLE merge_requests ( + id INTEGER PRIMARY KEY, + gitlab_id INTEGER UNIQUE NOT NULL, + project_id INTEGER NOT NULL, + iid INTEGER NOT NULL, + title TEXT NOT NULL, + state TEXT NOT NULL DEFAULT 'opened', + UNIQUE(project_id, iid) + ); + ", + ) + .unwrap(); + } + + fn setup_issue_detail_data(conn: &Connection) { + // Project. + conn.execute( + "INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')", + [], + ) + .unwrap(); + + // Issue. + conn.execute( + "INSERT INTO issues (id, gitlab_id, project_id, iid, title, description, state, author_username, milestone_title, due_date, created_at, updated_at, web_url) + VALUES (1, 1000, 1, 42, 'Fix authentication flow', 'Detailed description here', 'opened', 'alice', 'v1.0', '2026-03-01', 1700000000000, 1700000060000, 'https://gitlab.com/group/project/-/issues/42')", + [], + ) + .unwrap(); + + // Assignees. + conn.execute( + "INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'bob')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO issue_assignees (issue_id, username) VALUES (1, 'charlie')", + [], + ) + .unwrap(); + + // Labels. + conn.execute( + "INSERT INTO labels (id, project_id, name) VALUES (1, 1, 'backend')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO labels (id, project_id, name) VALUES (2, 1, 'urgent')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 1)", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO issue_labels (issue_id, label_id) VALUES (1, 2)", + [], + ) + .unwrap(); + + // Discussions + notes. + conn.execute( + "INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, resolvable, resolved, first_note_at) + VALUES (1, 'disc-aaa', 1, 1, 'Issue', 0, 0, 1700000010000)", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type) + VALUES (1, 10001, 1, 1, 'alice', 'This looks good overall', 1700000010000, 1700000010000, 0, 0, 'DiscussionNote')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type) + VALUES (2, 10002, 1, 1, 'bob', 'Agreed, but see my comment below', 1700000020000, 1700000020000, 1, 0, 'DiscussionNote')", + [], + ) + .unwrap(); + + // System note discussion. + conn.execute( + "INSERT INTO discussions (id, gitlab_discussion_id, project_id, issue_id, noteable_type, first_note_at) + VALUES (2, 'disc-bbb', 1, 1, 'Issue', 1700000030000)", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO notes (id, gitlab_id, discussion_id, project_id, author_username, body, created_at, updated_at, position, is_system, note_type) + VALUES (3, 10003, 2, 1, 'system', 'changed the description', 1700000030000, 1700000030000, 0, 1, NULL)", + [], + ) + .unwrap(); + + // Closing MR cross-ref. + conn.execute( + "INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state) + VALUES (1, 2000, 1, 10, 'Fix auth MR', 'opened')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_entity_iid, reference_type) + VALUES (1, 'issue', 1, 'merge_request', 1, 10, 'closes')", + [], + ) + .unwrap(); + } + + // ----------------------------------------------------------------------- + // Issue Detail tests + // ----------------------------------------------------------------------- + + #[test] + fn test_fetch_issue_detail_basic() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let data = fetch_issue_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.iid, 42); + assert_eq!(data.metadata.title, "Fix authentication flow"); + assert_eq!(data.metadata.state, "opened"); + assert_eq!(data.metadata.author, "alice"); + assert_eq!(data.metadata.project_path, "group/project"); + assert_eq!(data.metadata.milestone, Some("v1.0".to_string())); + assert_eq!(data.metadata.due_date, Some("2026-03-01".to_string())); + assert_eq!( + data.metadata.web_url, + "https://gitlab.com/group/project/-/issues/42" + ); + } + + #[test] + fn test_fetch_issue_detail_assignees() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let data = fetch_issue_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.assignees.len(), 2); + assert!(data.metadata.assignees.contains(&"bob".to_string())); + assert!(data.metadata.assignees.contains(&"charlie".to_string())); + } + + #[test] + fn test_fetch_issue_detail_labels() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let data = fetch_issue_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.labels, vec!["backend", "urgent"]); + } + + #[test] + fn test_fetch_issue_detail_cross_refs() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let data = fetch_issue_detail(&conn, &key).unwrap(); + + assert_eq!(data.cross_refs.len(), 1); + assert_eq!(data.cross_refs[0].kind, CrossRefKind::ClosingMr); + assert_eq!(data.cross_refs[0].entity_key, EntityKey::mr(1, 10)); + assert_eq!(data.cross_refs[0].label, "Fix auth MR"); + assert!(data.cross_refs[0].navigable); + } + + #[test] + fn test_fetch_issue_detail_discussion_count() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let data = fetch_issue_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.discussion_count, 2); + } + + #[test] + fn test_fetch_issue_discussions_basic() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let discussions = fetch_issue_discussions(&conn, &key).unwrap(); + + assert_eq!(discussions.len(), 2); + } + + #[test] + fn test_fetch_issue_discussions_notes() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let discussions = fetch_issue_discussions(&conn, &key).unwrap(); + + // First discussion has 2 notes. + assert_eq!(discussions[0].notes.len(), 2); + assert_eq!(discussions[0].notes[0].author, "alice"); + assert_eq!(discussions[0].notes[0].body, "This looks good overall"); + assert_eq!(discussions[0].notes[1].author, "bob"); + assert!(!discussions[0].notes[0].is_system); + } + + #[test] + fn test_fetch_issue_discussions_system_note() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let discussions = fetch_issue_discussions(&conn, &key).unwrap(); + + // Second discussion is a system note. + assert_eq!(discussions[1].notes.len(), 1); + assert!(discussions[1].notes[0].is_system); + assert_eq!(discussions[1].notes[0].body, "changed the description"); + } + + #[test] + fn test_fetch_issue_discussions_ordering() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 42); + let discussions = fetch_issue_discussions(&conn, &key).unwrap(); + + // Ordered by first_note_at. + assert_eq!(discussions[0].discussion_id, "disc-aaa"); + assert_eq!(discussions[1].discussion_id, "disc-bbb"); + } + + #[test] + fn test_fetch_issue_detail_not_found() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + setup_issue_detail_data(&conn); + + let key = EntityKey::issue(1, 999); + let result = fetch_issue_detail(&conn, &key); + assert!(result.is_err()); + } + + #[test] + fn test_fetch_issue_detail_no_description() { + let conn = Connection::open_in_memory().unwrap(); + create_issue_detail_schema(&conn); + + conn.execute( + "INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO issues (id, gitlab_id, project_id, iid, title, description, state, created_at, updated_at) + VALUES (1, 1000, 1, 1, 'No desc', NULL, 'opened', 0, 0)", + [], + ) + .unwrap(); + + let key = EntityKey::issue(1, 1); + let data = fetch_issue_detail(&conn, &key).unwrap(); + assert_eq!(data.metadata.description, ""); + } + + // ----------------------------------------------------------------------- + // MR Detail Tests + // ----------------------------------------------------------------------- + + fn create_mr_detail_schema(conn: &Connection) { + create_issue_detail_schema(conn); + // Add MR-specific columns and tables on top of the base schema. + conn.execute_batch( + " + -- Add columns to merge_requests that the detail query needs. + ALTER TABLE merge_requests ADD COLUMN description TEXT; + ALTER TABLE merge_requests ADD COLUMN draft INTEGER NOT NULL DEFAULT 0; + ALTER TABLE merge_requests ADD COLUMN author_username TEXT; + ALTER TABLE merge_requests ADD COLUMN source_branch TEXT; + ALTER TABLE merge_requests ADD COLUMN target_branch TEXT; + ALTER TABLE merge_requests ADD COLUMN detailed_merge_status TEXT; + ALTER TABLE merge_requests ADD COLUMN created_at INTEGER; + ALTER TABLE merge_requests ADD COLUMN updated_at INTEGER; + ALTER TABLE merge_requests ADD COLUMN merged_at INTEGER; + ALTER TABLE merge_requests ADD COLUMN web_url TEXT; + + CREATE TABLE mr_assignees ( + merge_request_id INTEGER NOT NULL, + username TEXT NOT NULL, + UNIQUE(merge_request_id, username) + ); + CREATE TABLE mr_reviewers ( + merge_request_id INTEGER NOT NULL, + username TEXT NOT NULL, + UNIQUE(merge_request_id, username) + ); + CREATE TABLE mr_labels ( + merge_request_id INTEGER NOT NULL, + label_id INTEGER NOT NULL, + UNIQUE(merge_request_id, label_id) + ); + CREATE TABLE mr_file_changes ( + id INTEGER PRIMARY KEY, + merge_request_id INTEGER NOT NULL, + project_id INTEGER NOT NULL, + old_path TEXT, + new_path TEXT NOT NULL, + change_type TEXT NOT NULL + ); + ", + ) + .unwrap(); + } + + fn setup_mr_detail_data(conn: &Connection) { + // Project (if not already inserted). + conn.execute( + "INSERT OR IGNORE INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'group/project')", + [], + ) + .unwrap(); + + // MR. + conn.execute( + "INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, description, state, draft, author_username, source_branch, target_branch, detailed_merge_status, created_at, updated_at, merged_at, web_url) + VALUES (1, 2000, 1, 10, 'Fix auth flow', 'MR description', 'opened', 0, 'alice', 'fix-auth', 'main', 'mergeable', 1700000000000, 1700000060000, NULL, 'https://gitlab.com/group/project/-/merge_requests/10')", + [], + ) + .unwrap(); + + // Assignees. + conn.execute( + "INSERT INTO mr_assignees (merge_request_id, username) VALUES (1, 'bob')", + [], + ) + .unwrap(); + + // Reviewers. + conn.execute( + "INSERT INTO mr_reviewers (merge_request_id, username) VALUES (1, 'carol')", + [], + ) + .unwrap(); + + // Labels. + conn.execute( + "INSERT OR IGNORE INTO labels (id, project_id, name) VALUES (10, 1, 'backend')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO mr_labels (merge_request_id, label_id) VALUES (1, 10)", + [], + ) + .unwrap(); + + // File changes. + conn.execute( + "INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type) + VALUES (1, 1, NULL, 'src/auth.rs', 'modified')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type) + VALUES (1, 1, NULL, 'src/lib.rs', 'added')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO mr_file_changes (merge_request_id, project_id, old_path, new_path, change_type) + VALUES (1, 1, 'src/old.rs', 'src/new.rs', 'renamed')", + [], + ) + .unwrap(); + + // Discussion with a note. + conn.execute( + "INSERT INTO discussions (id, gitlab_discussion_id, project_id, merge_request_id, noteable_type, resolvable, resolved, first_note_at) + VALUES (1, 'mr_disc_1', 1, 1, 'MergeRequest', 1, 0, 1700000010000)", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO notes (id, gitlab_id, discussion_id, project_id, note_type, is_system, author_username, body, created_at, updated_at, position, position_new_path, position_new_line) + VALUES (1, 5001, 1, 1, 'DiffNote', 0, 'alice', 'Please fix this', 1700000010000, 1700000010000, 0, 'src/auth.rs', 42)", + [], + ) + .unwrap(); + + // Cross-reference (MR closes issue). + conn.execute( + "INSERT INTO issues (id, gitlab_id, project_id, iid, title, state, created_at, updated_at) + VALUES (1, 1000, 1, 5, 'Auth bug', 'opened', 0, 0)", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO entity_references (project_id, source_entity_type, source_entity_id, target_entity_type, target_entity_id, target_project_path, target_entity_iid, reference_type, source_method) + VALUES (1, 'merge_request', 1, 'issue', 1, 'group/project', 5, 'closes', 'api')", + [], + ) + .unwrap(); + } + + #[test] + fn test_fetch_mr_detail_basic_metadata() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + setup_mr_detail_data(&conn); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.iid, 10); + assert_eq!(data.metadata.title, "Fix auth flow"); + assert_eq!(data.metadata.description, "MR description"); + assert_eq!(data.metadata.state, "opened"); + assert!(!data.metadata.draft); + assert_eq!(data.metadata.author, "alice"); + assert_eq!(data.metadata.source_branch, "fix-auth"); + assert_eq!(data.metadata.target_branch, "main"); + assert_eq!(data.metadata.merge_status, "mergeable"); + assert!(data.metadata.merged_at.is_none()); + assert_eq!( + data.metadata.web_url, + "https://gitlab.com/group/project/-/merge_requests/10" + ); + } + + #[test] + fn test_fetch_mr_detail_assignees_reviewers_labels() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + setup_mr_detail_data(&conn); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + + assert_eq!(data.metadata.assignees, vec!["bob"]); + assert_eq!(data.metadata.reviewers, vec!["carol"]); + assert_eq!(data.metadata.labels, vec!["backend"]); + } + + #[test] + fn test_fetch_mr_detail_file_changes() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + setup_mr_detail_data(&conn); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + + assert_eq!(data.file_changes.len(), 3); + assert_eq!(data.metadata.file_change_count, 3); + + // Ordered by new_path. + assert_eq!(data.file_changes[0].new_path, "src/auth.rs"); + assert_eq!(data.file_changes[0].change_type, FileChangeType::Modified); + + assert_eq!(data.file_changes[1].new_path, "src/lib.rs"); + assert_eq!(data.file_changes[1].change_type, FileChangeType::Added); + + assert_eq!(data.file_changes[2].new_path, "src/new.rs"); + assert_eq!(data.file_changes[2].change_type, FileChangeType::Renamed); + assert_eq!(data.file_changes[2].old_path.as_deref(), Some("src/old.rs")); + } + + #[test] + fn test_fetch_mr_detail_cross_refs() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + setup_mr_detail_data(&conn); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + + assert_eq!(data.cross_refs.len(), 1); + assert_eq!(data.cross_refs[0].kind, CrossRefKind::ClosingMr); + assert_eq!(data.cross_refs[0].label, "Auth bug"); + } + + #[test] + fn test_fetch_mr_discussions() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + setup_mr_detail_data(&conn); + + let key = EntityKey::mr(1, 10); + let discussions = fetch_mr_discussions(&conn, &key).unwrap(); + + assert_eq!(discussions.len(), 1); + assert_eq!(discussions[0].discussion_id, "mr_disc_1"); + assert!(discussions[0].resolvable); + assert!(!discussions[0].resolved); + assert_eq!(discussions[0].notes.len(), 1); + assert_eq!(discussions[0].notes[0].author, "alice"); + assert_eq!(discussions[0].notes[0].body, "Please fix this"); + assert!(discussions[0].notes[0].is_diff_note); + assert_eq!( + discussions[0].notes[0].diff_file_path.as_deref(), + Some("src/auth.rs") + ); + assert_eq!(discussions[0].notes[0].diff_new_line, Some(42)); + } + + #[test] + fn test_fetch_mr_detail_not_found() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + + // Insert project but no MR. + conn.execute( + "INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')", + [], + ) + .unwrap(); + + let key = EntityKey::mr(1, 99); + assert!(fetch_mr_detail(&conn, &key).is_err()); + } + + #[test] + fn test_fetch_mr_detail_no_file_changes() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + + conn.execute( + "INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, created_at, updated_at, web_url) + VALUES (1, 2000, 1, 10, 'Empty MR', 'opened', 0, 0, '')", + [], + ) + .unwrap(); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + assert!(data.file_changes.is_empty()); + assert_eq!(data.metadata.file_change_count, 0); + } + + #[test] + fn test_fetch_mr_detail_draft() { + let conn = Connection::open_in_memory().unwrap(); + create_mr_detail_schema(&conn); + + conn.execute( + "INSERT INTO projects (id, gitlab_project_id, path_with_namespace) VALUES (1, 100, 'g/p')", + [], + ) + .unwrap(); + conn.execute( + "INSERT INTO merge_requests (id, gitlab_id, project_id, iid, title, state, draft, created_at, updated_at, web_url) + VALUES (1, 2000, 1, 10, 'Draft: WIP', 'opened', 1, 0, 0, '')", + [], + ) + .unwrap(); + + let key = EntityKey::mr(1, 10); + let data = fetch_mr_detail(&conn, &key).unwrap(); + assert!(data.metadata.draft); + } } diff --git a/crates/lore-tui/src/app/update.rs b/crates/lore-tui/src/app/update.rs index c4f8fad..52f6e63 100644 --- a/crates/lore-tui/src/app/update.rs +++ b/crates/lore-tui/src/app/update.rs @@ -332,6 +332,58 @@ impl LoreApp { Cmd::none() } + // --- Issue detail --- + Msg::IssueDetailLoaded { + generation, + key, + data, + } => { + let screen = Screen::IssueDetail(key.clone()); + if self + .supervisor + .is_current(&TaskKey::LoadScreen(screen.clone()), generation) + { + self.state.issue_detail.apply_metadata(*data); + self.state.set_loading(screen.clone(), LoadState::Idle); + self.supervisor + .complete(&TaskKey::LoadScreen(screen), generation); + } + Cmd::none() + } + Msg::DiscussionsLoaded { + generation, + key, + discussions, + } => { + let screen = Screen::IssueDetail(key.clone()); + if self + .supervisor + .is_current(&TaskKey::LoadScreen(screen.clone()), generation) + { + self.state.issue_detail.apply_discussions(discussions); + } + Cmd::none() + } + + // --- MR detail --- + Msg::MrDetailLoaded { + generation, + key, + data, + } => { + let screen = Screen::MrDetail(key.clone()); + if self + .supervisor + .is_current(&TaskKey::LoadScreen(screen.clone()), generation) + { + self.state.mr_detail.apply_metadata(*data); + self.state.set_loading(screen.clone(), LoadState::Idle); + self.supervisor + .complete(&TaskKey::LoadScreen(screen), generation); + } + Cmd::none() + } + // All other message variants: no-op for now. // Future phases will fill these in as screens are implemented. _ => Cmd::none(), diff --git a/crates/lore-tui/src/message.rs b/crates/lore-tui/src/message.rs index a9d97c3..c54a6b0 100644 --- a/crates/lore-tui/src/message.rs +++ b/crates/lore-tui/src/message.rs @@ -240,20 +240,21 @@ pub enum Msg { IssueDetailLoaded { generation: u64, key: EntityKey, - detail: Box, + data: Box, }, // --- MR detail --- MrDetailLoaded { generation: u64, key: EntityKey, - detail: Box, + data: Box, }, // --- Discussions (shared by issue + MR detail) --- DiscussionsLoaded { generation: u64, - discussions: Vec, + key: EntityKey, + discussions: Vec, }, // --- Search --- diff --git a/crates/lore-tui/src/state/issue_detail.rs b/crates/lore-tui/src/state/issue_detail.rs index f367a53..6e3e1be 100644 --- a/crates/lore-tui/src/state/issue_detail.rs +++ b/crates/lore-tui/src/state/issue_detail.rs @@ -1,14 +1,284 @@ -#![allow(dead_code)] +#![allow(dead_code)] // Phase 2: consumed by Issue Detail screen //! Issue detail screen state. +//! +//! Holds metadata, discussions, cross-references, and UI state for +//! viewing a single issue. Supports progressive hydration: metadata +//! loads first, discussions load async in a second phase. -use crate::message::{Discussion, EntityKey, IssueDetail}; +use crate::message::EntityKey; +use crate::view::common::cross_ref::{CrossRef, CrossRefState}; +use crate::view::common::discussion_tree::{DiscussionNode, DiscussionTreeState}; + +// --------------------------------------------------------------------------- +// IssueMetadata +// --------------------------------------------------------------------------- + +/// Full metadata for a single issue, fetched from the local DB. +#[derive(Debug, Clone)] +pub struct IssueMetadata { + /// Issue IID (project-scoped). + pub iid: i64, + /// Project path (e.g., "group/project"). + pub project_path: String, + /// Issue title. + pub title: String, + /// Issue description (markdown). + pub description: String, + /// Current state: "opened" or "closed". + pub state: String, + /// Author username. + pub author: String, + /// Assigned usernames. + pub assignees: Vec, + /// Label names. + pub labels: Vec, + /// Milestone title (if set). + pub milestone: Option, + /// Due date (if set, "YYYY-MM-DD"). + pub due_date: Option, + /// Created timestamp (ms epoch). + pub created_at: i64, + /// Updated timestamp (ms epoch). + pub updated_at: i64, + /// GitLab web URL for "open in browser". + pub web_url: String, + /// Discussion count (for display before discussions load). + pub discussion_count: usize, +} + +// --------------------------------------------------------------------------- +// IssueDetailData +// --------------------------------------------------------------------------- + +/// Bundle returned by the metadata fetch action. +/// +/// Metadata + cross-refs load in Phase 1 (fast). Discussions load +/// separately in Phase 2. +#[derive(Debug, Clone)] +pub struct IssueDetailData { + pub metadata: IssueMetadata, + pub cross_refs: Vec, +} + +// --------------------------------------------------------------------------- +// DetailSection +// --------------------------------------------------------------------------- + +/// Which section of the detail view has keyboard focus. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum DetailSection { + /// Description area (scrollable text). + #[default] + Description, + /// Discussion tree. + Discussions, + /// Cross-references list. + CrossRefs, +} + +impl DetailSection { + /// Cycle to the next section. + #[must_use] + pub fn next(self) -> Self { + match self { + Self::Description => Self::Discussions, + Self::Discussions => Self::CrossRefs, + Self::CrossRefs => Self::Description, + } + } + + /// Cycle to the previous section. + #[must_use] + pub fn prev(self) -> Self { + match self { + Self::Description => Self::CrossRefs, + Self::Discussions => Self::Description, + Self::CrossRefs => Self::Discussions, + } + } +} + +// --------------------------------------------------------------------------- +// IssueDetailState +// --------------------------------------------------------------------------- /// State for the issue detail screen. #[derive(Debug, Default)] pub struct IssueDetailState { - pub key: Option, - pub detail: Option, - pub discussions: Vec, - pub scroll_offset: u16, + /// Entity key for the currently displayed issue. + pub current_key: Option, + /// Issue metadata (Phase 1 load). + pub metadata: Option, + /// Discussion nodes (Phase 2 async load). + pub discussions: Vec, + /// Whether discussions have finished loading. + pub discussions_loaded: bool, + /// Cross-references (loaded with metadata in Phase 1). + pub cross_refs: Vec, + /// Discussion tree UI state (expand/collapse, selection). + pub tree_state: DiscussionTreeState, + /// Cross-reference list UI state. + pub cross_ref_state: CrossRefState, + /// Description scroll offset. + pub description_scroll: usize, + /// Active section for keyboard focus. + pub active_section: DetailSection, +} + +impl IssueDetailState { + /// Reset state for a new issue. + pub fn load_new(&mut self, key: EntityKey) { + self.current_key = Some(key); + self.metadata = None; + self.discussions.clear(); + self.discussions_loaded = false; + self.cross_refs.clear(); + self.tree_state = DiscussionTreeState::default(); + self.cross_ref_state = CrossRefState::default(); + self.description_scroll = 0; + self.active_section = DetailSection::Description; + } + + /// Apply Phase 1 data (metadata + cross-refs). + pub fn apply_metadata(&mut self, data: IssueDetailData) { + self.metadata = Some(data.metadata); + self.cross_refs = data.cross_refs; + } + + /// Apply Phase 2 data (discussions). + pub fn apply_discussions(&mut self, discussions: Vec) { + self.discussions = discussions; + self.discussions_loaded = true; + } + + /// Whether we have metadata loaded for the current key. + #[must_use] + pub fn has_metadata(&self) -> bool { + self.metadata.is_some() + } + + /// Cycle to the next section. + pub fn next_section(&mut self) { + self.active_section = self.active_section.next(); + } + + /// Cycle to the previous section. + pub fn prev_section(&mut self) { + self.active_section = self.active_section.prev(); + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::view::common::cross_ref::CrossRefKind; + + #[test] + fn test_issue_detail_state_default() { + let state = IssueDetailState::default(); + assert!(state.current_key.is_none()); + assert!(state.metadata.is_none()); + assert!(state.discussions.is_empty()); + assert!(!state.discussions_loaded); + assert!(state.cross_refs.is_empty()); + assert_eq!(state.active_section, DetailSection::Description); + } + + #[test] + fn test_load_new_resets_state() { + let mut state = IssueDetailState { + discussions_loaded: true, + description_scroll: 10, + active_section: DetailSection::CrossRefs, + ..IssueDetailState::default() + }; + + state.load_new(EntityKey::issue(1, 42)); + assert_eq!(state.current_key, Some(EntityKey::issue(1, 42))); + assert!(state.metadata.is_none()); + assert!(!state.discussions_loaded); + assert_eq!(state.description_scroll, 0); + assert_eq!(state.active_section, DetailSection::Description); + } + + #[test] + fn test_apply_metadata() { + let mut state = IssueDetailState::default(); + state.load_new(EntityKey::issue(1, 42)); + + let data = IssueDetailData { + metadata: IssueMetadata { + iid: 42, + project_path: "group/proj".into(), + title: "Fix auth".into(), + description: "Description here".into(), + state: "opened".into(), + author: "alice".into(), + assignees: vec!["bob".into()], + labels: vec!["backend".into()], + milestone: Some("v1.0".into()), + due_date: None, + created_at: 1_700_000_000_000, + updated_at: 1_700_000_060_000, + web_url: "https://gitlab.com/group/proj/-/issues/42".into(), + discussion_count: 3, + }, + cross_refs: vec![CrossRef { + kind: CrossRefKind::ClosingMr, + entity_key: EntityKey::mr(1, 10), + label: "Fix auth MR".into(), + navigable: true, + }], + }; + + state.apply_metadata(data); + assert!(state.has_metadata()); + assert_eq!(state.metadata.as_ref().unwrap().iid, 42); + assert_eq!(state.cross_refs.len(), 1); + } + + #[test] + fn test_apply_discussions() { + let mut state = IssueDetailState::default(); + assert!(!state.discussions_loaded); + + let discussions = vec![DiscussionNode { + discussion_id: "d1".into(), + notes: vec![], + resolvable: false, + resolved: false, + }]; + + state.apply_discussions(discussions); + assert!(state.discussions_loaded); + assert_eq!(state.discussions.len(), 1); + } + + #[test] + fn test_detail_section_cycling() { + let section = DetailSection::Description; + assert_eq!(section.next(), DetailSection::Discussions); + assert_eq!(section.next().next(), DetailSection::CrossRefs); + assert_eq!(section.next().next().next(), DetailSection::Description); + + assert_eq!(section.prev(), DetailSection::CrossRefs); + assert_eq!(section.prev().prev(), DetailSection::Discussions); + } + + #[test] + fn test_section_next_prev_round_trip() { + let mut state = IssueDetailState::default(); + assert_eq!(state.active_section, DetailSection::Description); + + state.next_section(); + assert_eq!(state.active_section, DetailSection::Discussions); + + state.prev_section(); + assert_eq!(state.active_section, DetailSection::Description); + } } diff --git a/crates/lore-tui/src/state/mr_detail.rs b/crates/lore-tui/src/state/mr_detail.rs index 43b5fb3..95b0574 100644 --- a/crates/lore-tui/src/state/mr_detail.rs +++ b/crates/lore-tui/src/state/mr_detail.rs @@ -1,14 +1,387 @@ -#![allow(dead_code)] +#![allow(dead_code)] // Phase 2: consumed by MR Detail screen //! Merge request detail screen state. +//! +//! Holds MR metadata, file changes, discussions, cross-references, +//! and UI state. Supports progressive hydration identical to +//! Issue Detail: metadata loads first, discussions load async. -use crate::message::{Discussion, EntityKey, MrDetail}; +use crate::message::EntityKey; +use crate::view::common::cross_ref::{CrossRef, CrossRefState}; +use crate::view::common::discussion_tree::{DiscussionNode, DiscussionTreeState}; + +// --------------------------------------------------------------------------- +// MrMetadata +// --------------------------------------------------------------------------- + +/// Full metadata for a single merge request, fetched from the local DB. +#[derive(Debug, Clone)] +pub struct MrMetadata { + /// MR IID (project-scoped). + pub iid: i64, + /// Project path (e.g., "group/project"). + pub project_path: String, + /// MR title. + pub title: String, + /// MR description (markdown). + pub description: String, + /// Current state: "opened", "merged", "closed", "locked". + pub state: String, + /// Whether this is a draft/WIP MR. + pub draft: bool, + /// Author username. + pub author: String, + /// Assigned usernames. + pub assignees: Vec, + /// Reviewer usernames. + pub reviewers: Vec, + /// Label names. + pub labels: Vec, + /// Source branch name. + pub source_branch: String, + /// Target branch name. + pub target_branch: String, + /// Detailed merge status (e.g., "mergeable", "checking"). + pub merge_status: String, + /// Created timestamp (ms epoch). + pub created_at: i64, + /// Updated timestamp (ms epoch). + pub updated_at: i64, + /// Merged timestamp (ms epoch), if merged. + pub merged_at: Option, + /// GitLab web URL. + pub web_url: String, + /// Discussion count (for display before discussions load). + pub discussion_count: usize, + /// File change count. + pub file_change_count: usize, +} + +// --------------------------------------------------------------------------- +// FileChange +// --------------------------------------------------------------------------- + +/// A file changed in the merge request. +#[derive(Debug, Clone)] +pub struct FileChange { + /// Previous file path (if renamed). + pub old_path: Option, + /// New/current file path. + pub new_path: String, + /// Type of change. + pub change_type: FileChangeType, +} + +/// The type of file change in an MR. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FileChangeType { + Added, + Modified, + Deleted, + Renamed, +} + +impl FileChangeType { + /// Short icon for display. + #[must_use] + pub const fn icon(&self) -> &str { + match self { + Self::Added => "+", + Self::Modified => "~", + Self::Deleted => "-", + Self::Renamed => "R", + } + } + + /// Parse from DB string. + #[must_use] + pub fn parse_db(s: &str) -> Self { + match s { + "added" => Self::Added, + "deleted" => Self::Deleted, + "renamed" => Self::Renamed, + _ => Self::Modified, + } + } +} + +// --------------------------------------------------------------------------- +// MrDetailData +// --------------------------------------------------------------------------- + +/// Bundle returned by the metadata fetch action. +/// +/// Metadata + cross-refs + file changes load in Phase 1 (fast). +/// Discussions load separately in Phase 2. +#[derive(Debug, Clone)] +pub struct MrDetailData { + pub metadata: MrMetadata, + pub cross_refs: Vec, + pub file_changes: Vec, +} + +// --------------------------------------------------------------------------- +// MrTab +// --------------------------------------------------------------------------- + +/// Active tab in the MR detail view. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum MrTab { + /// Overview: description + cross-refs. + #[default] + Overview, + /// File changes list. + Files, + /// Discussions (general + diff). + Discussions, +} + +impl MrTab { + /// Cycle to the next tab. + #[must_use] + pub fn next(self) -> Self { + match self { + Self::Overview => Self::Files, + Self::Files => Self::Discussions, + Self::Discussions => Self::Overview, + } + } + + /// Cycle to the previous tab. + #[must_use] + pub fn prev(self) -> Self { + match self { + Self::Overview => Self::Discussions, + Self::Files => Self::Overview, + Self::Discussions => Self::Files, + } + } + + /// Human-readable label. + #[must_use] + pub const fn label(&self) -> &str { + match self { + Self::Overview => "Overview", + Self::Files => "Files", + Self::Discussions => "Discussions", + } + } +} + +// --------------------------------------------------------------------------- +// MrDetailState +// --------------------------------------------------------------------------- /// State for the MR detail screen. #[derive(Debug, Default)] pub struct MrDetailState { - pub key: Option, - pub detail: Option, - pub discussions: Vec, - pub scroll_offset: u16, + /// Entity key for the currently displayed MR. + pub current_key: Option, + /// MR metadata (Phase 1 load). + pub metadata: Option, + /// File changes (loaded with metadata in Phase 1). + pub file_changes: Vec, + /// Discussion nodes (Phase 2 async load). + pub discussions: Vec, + /// Whether discussions have finished loading. + pub discussions_loaded: bool, + /// Cross-references (loaded with metadata in Phase 1). + pub cross_refs: Vec, + /// Discussion tree UI state. + pub tree_state: DiscussionTreeState, + /// Cross-reference list UI state. + pub cross_ref_state: CrossRefState, + /// Description scroll offset. + pub description_scroll: usize, + /// File list selected index. + pub file_selected: usize, + /// File list scroll offset. + pub file_scroll: usize, + /// Active tab. + pub active_tab: MrTab, +} + +impl MrDetailState { + /// Reset state for a new MR. + pub fn load_new(&mut self, key: EntityKey) { + self.current_key = Some(key); + self.metadata = None; + self.file_changes.clear(); + self.discussions.clear(); + self.discussions_loaded = false; + self.cross_refs.clear(); + self.tree_state = DiscussionTreeState::default(); + self.cross_ref_state = CrossRefState::default(); + self.description_scroll = 0; + self.file_selected = 0; + self.file_scroll = 0; + self.active_tab = MrTab::Overview; + } + + /// Apply Phase 1 data (metadata + cross-refs + file changes). + pub fn apply_metadata(&mut self, data: MrDetailData) { + self.metadata = Some(data.metadata); + self.cross_refs = data.cross_refs; + self.file_changes = data.file_changes; + } + + /// Apply Phase 2 data (discussions). + pub fn apply_discussions(&mut self, discussions: Vec) { + self.discussions = discussions; + self.discussions_loaded = true; + } + + /// Whether we have metadata loaded. + #[must_use] + pub fn has_metadata(&self) -> bool { + self.metadata.is_some() + } + + /// Switch to the next tab. + pub fn next_tab(&mut self) { + self.active_tab = self.active_tab.next(); + } + + /// Switch to the previous tab. + pub fn prev_tab(&mut self) { + self.active_tab = self.active_tab.prev(); + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::view::common::cross_ref::CrossRefKind; + + #[test] + fn test_mr_detail_state_default() { + let state = MrDetailState::default(); + assert!(state.current_key.is_none()); + assert!(state.metadata.is_none()); + assert!(state.discussions.is_empty()); + assert!(!state.discussions_loaded); + assert!(state.file_changes.is_empty()); + assert_eq!(state.active_tab, MrTab::Overview); + } + + #[test] + fn test_load_new_resets_state() { + let mut state = MrDetailState { + discussions_loaded: true, + description_scroll: 10, + active_tab: MrTab::Files, + ..MrDetailState::default() + }; + + state.load_new(EntityKey::mr(1, 42)); + assert_eq!(state.current_key, Some(EntityKey::mr(1, 42))); + assert!(state.metadata.is_none()); + assert!(!state.discussions_loaded); + assert_eq!(state.description_scroll, 0); + assert_eq!(state.active_tab, MrTab::Overview); + } + + #[test] + fn test_apply_metadata() { + let mut state = MrDetailState::default(); + state.load_new(EntityKey::mr(1, 42)); + + let data = MrDetailData { + metadata: MrMetadata { + iid: 42, + project_path: "group/proj".into(), + title: "Fix auth".into(), + description: "MR description".into(), + state: "opened".into(), + draft: false, + author: "alice".into(), + assignees: vec!["bob".into()], + reviewers: vec!["carol".into()], + labels: vec!["backend".into()], + source_branch: "fix-auth".into(), + target_branch: "main".into(), + merge_status: "mergeable".into(), + created_at: 1_700_000_000_000, + updated_at: 1_700_000_060_000, + merged_at: None, + web_url: "https://gitlab.com/group/proj/-/merge_requests/42".into(), + discussion_count: 2, + file_change_count: 3, + }, + cross_refs: vec![CrossRef { + kind: CrossRefKind::RelatedIssue, + entity_key: EntityKey::issue(1, 10), + label: "Related issue".into(), + navigable: true, + }], + file_changes: vec![FileChange { + old_path: None, + new_path: "src/auth.rs".into(), + change_type: FileChangeType::Modified, + }], + }; + + state.apply_metadata(data); + assert!(state.has_metadata()); + assert_eq!(state.metadata.as_ref().unwrap().iid, 42); + assert_eq!(state.cross_refs.len(), 1); + assert_eq!(state.file_changes.len(), 1); + } + + #[test] + fn test_tab_cycling() { + let tab = MrTab::Overview; + assert_eq!(tab.next(), MrTab::Files); + assert_eq!(tab.next().next(), MrTab::Discussions); + assert_eq!(tab.next().next().next(), MrTab::Overview); + + assert_eq!(tab.prev(), MrTab::Discussions); + assert_eq!(tab.prev().prev(), MrTab::Files); + } + + #[test] + fn test_tab_labels() { + assert_eq!(MrTab::Overview.label(), "Overview"); + assert_eq!(MrTab::Files.label(), "Files"); + assert_eq!(MrTab::Discussions.label(), "Discussions"); + } + + #[test] + fn test_file_change_type_icon() { + assert_eq!(FileChangeType::Added.icon(), "+"); + assert_eq!(FileChangeType::Modified.icon(), "~"); + assert_eq!(FileChangeType::Deleted.icon(), "-"); + assert_eq!(FileChangeType::Renamed.icon(), "R"); + } + + #[test] + fn test_file_change_type_parse_db() { + assert_eq!(FileChangeType::parse_db("added"), FileChangeType::Added); + assert_eq!(FileChangeType::parse_db("deleted"), FileChangeType::Deleted); + assert_eq!(FileChangeType::parse_db("renamed"), FileChangeType::Renamed); + assert_eq!( + FileChangeType::parse_db("modified"), + FileChangeType::Modified + ); + assert_eq!( + FileChangeType::parse_db("unknown"), + FileChangeType::Modified + ); + } + + #[test] + fn test_next_prev_tab_on_state() { + let mut state = MrDetailState::default(); + assert_eq!(state.active_tab, MrTab::Overview); + + state.next_tab(); + assert_eq!(state.active_tab, MrTab::Files); + + state.prev_tab(); + assert_eq!(state.active_tab, MrTab::Overview); + } } diff --git a/crates/lore-tui/src/view/common/cross_ref.rs b/crates/lore-tui/src/view/common/cross_ref.rs new file mode 100644 index 0000000..6add2c8 --- /dev/null +++ b/crates/lore-tui/src/view/common/cross_ref.rs @@ -0,0 +1,410 @@ +#![allow(dead_code)] // Phase 2: consumed by Issue Detail + MR Detail screens + +//! Cross-reference widget for entity detail screens. +//! +//! Renders a list of linked entities (closing MRs, related issues, mentions) +//! as navigable items. Used in both Issue Detail and MR Detail views. + +use std::fmt; + +use ftui::core::geometry::Rect; +use ftui::render::cell::{Cell, PackedRgba}; +use ftui::render::drawing::Draw; +use ftui::render::frame::Frame; + +use crate::message::EntityKey; + +// --------------------------------------------------------------------------- +// CrossRefKind +// --------------------------------------------------------------------------- + +/// The relationship type between two entities. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CrossRefKind { + /// MR that closes this issue when merged. + ClosingMr, + /// Issue related via GitLab link. + RelatedIssue, + /// Entity mentioned in a note or description. + MentionedIn, +} + +impl CrossRefKind { + /// Short icon/prefix for display. + #[must_use] + pub const fn icon(&self) -> &str { + match self { + Self::ClosingMr => "MR", + Self::RelatedIssue => "REL", + Self::MentionedIn => "REF", + } + } +} + +impl fmt::Display for CrossRefKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::ClosingMr => write!(f, "Closing MR"), + Self::RelatedIssue => write!(f, "Related Issue"), + Self::MentionedIn => write!(f, "Mentioned In"), + } + } +} + +// --------------------------------------------------------------------------- +// CrossRef +// --------------------------------------------------------------------------- + +/// A single cross-reference to another entity. +#[derive(Debug, Clone)] +pub struct CrossRef { + /// Relationship type. + pub kind: CrossRefKind, + /// Target entity identity. + pub entity_key: EntityKey, + /// Human-readable label (e.g., "Fix authentication flow"). + pub label: String, + /// Whether this ref points to an entity in the local DB (navigable). + pub navigable: bool, +} + +// --------------------------------------------------------------------------- +// CrossRefState +// --------------------------------------------------------------------------- + +/// Rendering state for the cross-reference list. +#[derive(Debug, Clone, Default)] +pub struct CrossRefState { + /// Index of the selected cross-reference. + pub selected: usize, + /// First visible item index. + pub scroll_offset: usize, +} + +impl CrossRefState { + /// Move selection down. + pub fn select_next(&mut self, total: usize) { + if total > 0 && self.selected < total - 1 { + self.selected += 1; + } + } + + /// Move selection up. + pub fn select_prev(&mut self) { + self.selected = self.selected.saturating_sub(1); + } +} + +// --------------------------------------------------------------------------- +// Colors +// --------------------------------------------------------------------------- + +/// Color scheme for cross-reference rendering. +pub struct CrossRefColors { + /// Foreground for the kind icon/badge. + pub kind_fg: PackedRgba, + /// Foreground for the label text. + pub label_fg: PackedRgba, + /// Muted foreground for non-navigable refs. + pub muted_fg: PackedRgba, + /// Selected item foreground. + pub selected_fg: PackedRgba, + /// Selected item background. + pub selected_bg: PackedRgba, +} + +// --------------------------------------------------------------------------- +// Render +// --------------------------------------------------------------------------- + +/// Render a list of cross-references within the given area. +/// +/// Returns the number of rows consumed. +/// +/// Layout per row: +/// ```text +/// [MR] !42 Fix authentication flow +/// [REL] #15 Related auth issue +/// [REF] !99 Mentioned in pipeline MR +/// ``` +pub fn render_cross_refs( + frame: &mut Frame<'_>, + refs: &[CrossRef], + state: &CrossRefState, + area: Rect, + colors: &CrossRefColors, +) -> u16 { + if refs.is_empty() || area.height == 0 || area.width < 10 { + return 0; + } + + let max_x = area.x.saturating_add(area.width); + let visible_count = (area.height as usize).min(refs.len().saturating_sub(state.scroll_offset)); + + for i in 0..visible_count { + let idx = state.scroll_offset + i; + let Some(cr) = refs.get(idx) else { break }; + + let y = area.y + i as u16; + let is_selected = idx == state.selected; + + // Background fill for selected row. + if is_selected { + frame.draw_rect_filled( + Rect::new(area.x, y, area.width, 1), + Cell { + fg: colors.selected_fg, + bg: colors.selected_bg, + ..Cell::default() + }, + ); + } + + let mut x = area.x; + + // Kind badge: [MR], [REL], [REF] + let badge = format!("[{}]", cr.kind.icon()); + let badge_style = if is_selected { + Cell { + fg: colors.selected_fg, + bg: colors.selected_bg, + ..Cell::default() + } + } else { + Cell { + fg: colors.kind_fg, + ..Cell::default() + } + }; + x = frame.print_text_clipped(x, y, &badge, badge_style, max_x); + + // Spacing + x = frame.print_text_clipped(x, y, " ", badge_style, max_x); + + // Entity prefix + label + let prefix = match cr.kind { + CrossRefKind::ClosingMr | CrossRefKind::MentionedIn => { + format!("!{} ", cr.entity_key.iid) + } + CrossRefKind::RelatedIssue => { + format!("#{} ", cr.entity_key.iid) + } + }; + + let label_style = if is_selected { + Cell { + fg: colors.selected_fg, + bg: colors.selected_bg, + ..Cell::default() + } + } else if cr.navigable { + Cell { + fg: colors.label_fg, + ..Cell::default() + } + } else { + Cell { + fg: colors.muted_fg, + ..Cell::default() + } + }; + + x = frame.print_text_clipped(x, y, &prefix, label_style, max_x); + let _ = frame.print_text_clipped(x, y, &cr.label, label_style, max_x); + } + + visible_count as u16 +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use ftui::render::grapheme_pool::GraphemePool; + + macro_rules! with_frame { + ($width:expr, $height:expr, |$frame:ident| $body:block) => {{ + let mut pool = GraphemePool::new(); + let mut $frame = Frame::new($width, $height, &mut pool); + $body + }}; + } + + fn sample_refs() -> Vec { + vec![ + CrossRef { + kind: CrossRefKind::ClosingMr, + entity_key: EntityKey::mr(1, 42), + label: "Fix authentication flow".into(), + navigable: true, + }, + CrossRef { + kind: CrossRefKind::RelatedIssue, + entity_key: EntityKey::issue(1, 15), + label: "Related auth issue".into(), + navigable: true, + }, + CrossRef { + kind: CrossRefKind::MentionedIn, + entity_key: EntityKey::mr(2, 99), + label: "Pipeline improvements".into(), + navigable: false, + }, + ] + } + + fn test_colors() -> CrossRefColors { + CrossRefColors { + kind_fg: PackedRgba::rgb(0xDA, 0x70, 0x2C), + label_fg: PackedRgba::rgb(0xCE, 0xCD, 0xC3), + muted_fg: PackedRgba::rgb(0x87, 0x87, 0x80), + selected_fg: PackedRgba::rgb(0x10, 0x0F, 0x0F), + selected_bg: PackedRgba::rgb(0xCE, 0xCD, 0xC3), + } + } + + // TDD anchor test from bead spec. + #[test] + fn test_cross_ref_entity_key() { + let cr = CrossRef { + kind: CrossRefKind::ClosingMr, + entity_key: EntityKey::mr(1, 42), + label: "Fix auth".into(), + navigable: true, + }; + assert_eq!(cr.kind, CrossRefKind::ClosingMr); + assert_eq!(cr.entity_key, EntityKey::mr(1, 42)); + } + + #[test] + fn test_cross_ref_kind_display() { + assert_eq!(CrossRefKind::ClosingMr.to_string(), "Closing MR"); + assert_eq!(CrossRefKind::RelatedIssue.to_string(), "Related Issue"); + assert_eq!(CrossRefKind::MentionedIn.to_string(), "Mentioned In"); + } + + #[test] + fn test_cross_ref_kind_icon() { + assert_eq!(CrossRefKind::ClosingMr.icon(), "MR"); + assert_eq!(CrossRefKind::RelatedIssue.icon(), "REL"); + assert_eq!(CrossRefKind::MentionedIn.icon(), "REF"); + } + + #[test] + fn test_cross_ref_state_navigation() { + let mut state = CrossRefState::default(); + assert_eq!(state.selected, 0); + + state.select_next(3); + assert_eq!(state.selected, 1); + + state.select_next(3); + assert_eq!(state.selected, 2); + + // Can't go past end. + state.select_next(3); + assert_eq!(state.selected, 2); + + state.select_prev(); + assert_eq!(state.selected, 1); + + state.select_prev(); + assert_eq!(state.selected, 0); + + // Can't go before start. + state.select_prev(); + assert_eq!(state.selected, 0); + } + + #[test] + fn test_render_cross_refs_no_panic() { + with_frame!(80, 10, |frame| { + let refs = sample_refs(); + let state = CrossRefState::default(); + let rows = render_cross_refs( + &mut frame, + &refs, + &state, + Rect::new(0, 0, 80, 10), + &test_colors(), + ); + assert_eq!(rows, 3); + }); + } + + #[test] + fn test_render_cross_refs_empty() { + with_frame!(80, 10, |frame| { + let state = CrossRefState::default(); + let rows = render_cross_refs( + &mut frame, + &[], + &state, + Rect::new(0, 0, 80, 10), + &test_colors(), + ); + assert_eq!(rows, 0); + }); + } + + #[test] + fn test_render_cross_refs_tiny_area() { + with_frame!(5, 1, |frame| { + let refs = sample_refs(); + let state = CrossRefState::default(); + let rows = render_cross_refs( + &mut frame, + &refs, + &state, + Rect::new(0, 0, 5, 1), + &test_colors(), + ); + // Too narrow (< 10), should bail. + assert_eq!(rows, 0); + }); + } + + #[test] + fn test_render_cross_refs_with_scroll() { + with_frame!(80, 2, |frame| { + let refs = sample_refs(); + let state = CrossRefState { + selected: 2, + scroll_offset: 1, + }; + let rows = render_cross_refs( + &mut frame, + &refs, + &state, + Rect::new(0, 0, 80, 2), + &test_colors(), + ); + // 2 visible (indices 1 and 2). + assert_eq!(rows, 2); + }); + } + + #[test] + fn test_render_cross_refs_non_navigable() { + with_frame!(80, 5, |frame| { + let refs = vec![CrossRef { + kind: CrossRefKind::MentionedIn, + entity_key: EntityKey::mr(2, 99), + label: "Non-local entity".into(), + navigable: false, + }]; + let state = CrossRefState::default(); + let rows = render_cross_refs( + &mut frame, + &refs, + &state, + Rect::new(0, 0, 80, 5), + &test_colors(), + ); + assert_eq!(rows, 1); + }); + } +} diff --git a/crates/lore-tui/src/view/common/discussion_tree.rs b/crates/lore-tui/src/view/common/discussion_tree.rs new file mode 100644 index 0000000..7178cd4 --- /dev/null +++ b/crates/lore-tui/src/view/common/discussion_tree.rs @@ -0,0 +1,979 @@ +#![allow(dead_code)] // Phase 2: consumed by Issue Detail + MR Detail screens + +//! Discussion tree widget for entity detail screens. +//! +//! Renders threaded conversations from GitLab issues/MRs. Discussions are +//! top-level expandable nodes, with notes as children. Supports expand/collapse +//! persistence, system note styling, and diff note file path rendering. + +use std::collections::HashSet; + +use ftui::core::geometry::Rect; +use ftui::render::cell::{Cell, PackedRgba}; +use ftui::render::drawing::Draw; +use ftui::render::frame::Frame; + +use crate::clock::Clock; +use crate::safety::{UrlPolicy, sanitize_for_terminal}; + +// --------------------------------------------------------------------------- +// Data types +// --------------------------------------------------------------------------- + +/// A single discussion thread (top-level node). +#[derive(Debug, Clone)] +pub struct DiscussionNode { + /// GitLab discussion ID (used as expand/collapse key). + pub discussion_id: String, + /// Notes within this discussion, ordered by position. + pub notes: Vec, + /// Whether this discussion is resolvable (MR discussions only). + pub resolvable: bool, + /// Whether this discussion has been resolved. + pub resolved: bool, +} + +impl DiscussionNode { + /// Summary line for collapsed display. + fn summary(&self) -> String { + let first = self.notes.first(); + let author = first.map_or("unknown", |n| n.author.as_str()); + let note_count = self.notes.len(); + let resolved_tag = if self.resolved { " [resolved]" } else { "" }; + + if note_count == 1 { + format!("{author}{resolved_tag}") + } else { + format!("{author} ({note_count} notes){resolved_tag}") + } + } + + /// First line of the first note body, sanitized and truncated. + fn preview(&self, max_chars: usize) -> String { + self.notes + .first() + .and_then(|n| n.body.lines().next()) + .map(|line| { + let sanitized = sanitize_for_terminal(line, UrlPolicy::Strip); + if sanitized.len() > max_chars { + let trunc = max_chars.saturating_sub(3); + // Find the last valid char boundary at or before `trunc` + // to avoid panicking on multi-byte UTF-8 (emoji, CJK). + let safe_end = sanitized + .char_indices() + .take_while(|&(i, _)| i <= trunc) + .last() + .map_or(0, |(i, c)| i + c.len_utf8()); + format!("{}...", &sanitized[..safe_end]) + } else { + sanitized + } + }) + .unwrap_or_default() + } +} + +/// A single note within a discussion. +#[derive(Debug, Clone)] +pub struct NoteNode { + /// Author username. + pub author: String, + /// Note body (markdown text from GitLab). + pub body: String, + /// Creation timestamp in milliseconds since epoch. + pub created_at: i64, + /// Whether this is a system-generated note. + pub is_system: bool, + /// Whether this is a diff/code review note. + pub is_diff_note: bool, + /// File path for diff notes. + pub diff_file_path: Option, + /// New line number for diff notes. + pub diff_new_line: Option, +} + +// --------------------------------------------------------------------------- +// State +// --------------------------------------------------------------------------- + +/// Rendering state for the discussion tree. +#[derive(Debug, Clone, Default)] +pub struct DiscussionTreeState { + /// Index of the selected discussion (0-based). + pub selected: usize, + /// First visible row index for scrolling. + pub scroll_offset: usize, + /// Set of expanded discussion IDs. + pub expanded: HashSet, +} + +impl DiscussionTreeState { + /// Move selection down. + pub fn select_next(&mut self, total: usize) { + if total > 0 && self.selected < total - 1 { + self.selected += 1; + } + } + + /// Move selection up. + pub fn select_prev(&mut self) { + self.selected = self.selected.saturating_sub(1); + } + + /// Toggle expand/collapse for the selected discussion. + pub fn toggle_selected(&mut self, discussions: &[DiscussionNode]) { + if let Some(d) = discussions.get(self.selected) { + let id = &d.discussion_id; + if self.expanded.contains(id) { + self.expanded.remove(id); + } else { + self.expanded.insert(id.clone()); + } + } + } + + /// Whether a discussion is expanded. + #[must_use] + pub fn is_expanded(&self, discussion_id: &str) -> bool { + self.expanded.contains(discussion_id) + } +} + +// --------------------------------------------------------------------------- +// Colors +// --------------------------------------------------------------------------- + +/// Color scheme for discussion tree rendering. +pub struct DiscussionTreeColors { + /// Author name foreground. + pub author_fg: PackedRgba, + /// Timestamp foreground. + pub timestamp_fg: PackedRgba, + /// Note body foreground. + pub body_fg: PackedRgba, + /// System note foreground (muted). + pub system_fg: PackedRgba, + /// Diff file path foreground. + pub diff_path_fg: PackedRgba, + /// Resolved indicator foreground. + pub resolved_fg: PackedRgba, + /// Tree guide characters. + pub guide_fg: PackedRgba, + /// Selected discussion background. + pub selected_fg: PackedRgba, + /// Selected discussion background. + pub selected_bg: PackedRgba, + /// Expand/collapse indicator. + pub expand_fg: PackedRgba, +} + +// --------------------------------------------------------------------------- +// Relative time formatting +// --------------------------------------------------------------------------- + +/// Format a timestamp as a human-readable relative time string. +/// +/// Uses the provided `Clock` for deterministic rendering in tests. +#[must_use] +pub fn format_relative_time(epoch_ms: i64, clock: &dyn Clock) -> String { + let now_ms = clock.now_ms(); + let diff_ms = now_ms.saturating_sub(epoch_ms); + + if diff_ms < 0 { + return "just now".to_string(); + } + + let seconds = diff_ms / 1_000; + let minutes = seconds / 60; + let hours = minutes / 60; + let days = hours / 24; + let weeks = days / 7; + let months = days / 30; + + if seconds < 60 { + "just now".to_string() + } else if minutes < 60 { + format!("{minutes}m ago") + } else if hours < 24 { + format!("{hours}h ago") + } else if days < 7 { + format!("{days}d ago") + } else if weeks < 4 { + format!("{weeks}w ago") + } else { + format!("{months}mo ago") + } +} + +// --------------------------------------------------------------------------- +// Render +// --------------------------------------------------------------------------- + +/// Maximum indent depth for nested content (notes within discussions). +const INDENT: u16 = 4; + +/// Render a discussion tree within the given area. +/// +/// Returns the number of rows consumed. +/// +/// Layout: +/// ```text +/// > alice (3 notes) [resolved] <- collapsed discussion +/// First line of note body preview... +/// +/// v bob (2 notes) <- expanded discussion +/// | bob · 3h ago +/// | This is the first note body... +/// | +/// | alice · 1h ago <- diff note +/// | diff src/auth.rs:42 +/// | Code review comment about... +/// ``` +pub fn render_discussion_tree( + frame: &mut Frame<'_>, + discussions: &[DiscussionNode], + state: &DiscussionTreeState, + area: Rect, + colors: &DiscussionTreeColors, + clock: &dyn Clock, +) -> u16 { + if discussions.is_empty() || area.height == 0 || area.width < 15 { + return 0; + } + + let max_x = area.x.saturating_add(area.width); + let mut y = area.y; + let y_max = area.y.saturating_add(area.height); + + // Pre-compute all visual rows to support scroll offset. + let rows = compute_visual_rows_with_clock( + discussions, + state, + max_x.saturating_sub(area.x) as usize, + clock, + ); + + // Apply scroll offset. + let visible_rows = rows + .iter() + .skip(state.scroll_offset) + .take(area.height as usize); + + for row in visible_rows { + if y >= y_max { + break; + } + + match row { + VisualRow::DiscussionHeader { + disc_idx, + expanded, + summary, + preview, + } => { + let is_selected = *disc_idx == state.selected; + + // Background fill for selected. + if is_selected { + frame.draw_rect_filled( + Rect::new(area.x, y, area.width, 1), + Cell { + fg: colors.selected_fg, + bg: colors.selected_bg, + ..Cell::default() + }, + ); + } + + let style = if is_selected { + Cell { + fg: colors.selected_fg, + bg: colors.selected_bg, + ..Cell::default() + } + } else { + Cell { + fg: colors.author_fg, + ..Cell::default() + } + }; + + let indicator = if *expanded { "v " } else { "> " }; + let mut x = frame.print_text_clipped(area.x, y, indicator, style, max_x); + x = frame.print_text_clipped(x, y, summary, style, max_x); + + // Show preview on same line for collapsed. + if !expanded && !preview.is_empty() { + let preview_style = if is_selected { + style + } else { + Cell { + fg: colors.timestamp_fg, + ..Cell::default() + } + }; + x = frame.print_text_clipped(x, y, " - ", preview_style, max_x); + let _ = frame.print_text_clipped(x, y, preview, preview_style, max_x); + } + + y += 1; + } + + VisualRow::NoteHeader { + author, + relative_time, + is_system, + .. + } => { + let style = if *is_system { + Cell { + fg: colors.system_fg, + ..Cell::default() + } + } else { + Cell { + fg: colors.author_fg, + ..Cell::default() + } + }; + + let guide_style = Cell { + fg: colors.guide_fg, + ..Cell::default() + }; + + let indent_x = area.x.saturating_add(INDENT); + let mut x = frame.print_text_clipped(area.x, y, " | ", guide_style, max_x); + x = frame.print_text_clipped(x.max(indent_x), y, author, style, max_x); + + let time_style = Cell { + fg: colors.timestamp_fg, + ..Cell::default() + }; + x = frame.print_text_clipped(x, y, " · ", time_style, max_x); + let _ = frame.print_text_clipped(x, y, relative_time, time_style, max_x); + + y += 1; + } + + VisualRow::DiffPath { file_path, line } => { + let guide_style = Cell { + fg: colors.guide_fg, + ..Cell::default() + }; + let path_style = Cell { + fg: colors.diff_path_fg, + ..Cell::default() + }; + + let mut x = frame.print_text_clipped(area.x, y, " | ", guide_style, max_x); + let indent_x = area.x.saturating_add(INDENT); + x = x.max(indent_x); + + let location = match line { + Some(l) => format!("diff {file_path}:{l}"), + None => format!("diff {file_path}"), + }; + let _ = frame.print_text_clipped(x, y, &location, path_style, max_x); + + y += 1; + } + + VisualRow::BodyLine { text, is_system } => { + let guide_style = Cell { + fg: colors.guide_fg, + ..Cell::default() + }; + let body_style = if *is_system { + Cell { + fg: colors.system_fg, + ..Cell::default() + } + } else { + Cell { + fg: colors.body_fg, + ..Cell::default() + } + }; + + let mut x = frame.print_text_clipped(area.x, y, " | ", guide_style, max_x); + let indent_x = area.x.saturating_add(INDENT); + x = x.max(indent_x); + let _ = frame.print_text_clipped(x, y, text, body_style, max_x); + + y += 1; + } + + VisualRow::Separator => { + let guide_style = Cell { + fg: colors.guide_fg, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x, y, " |", guide_style, max_x); + y += 1; + } + } + } + + y.saturating_sub(area.y) +} + +// --------------------------------------------------------------------------- +// Visual row computation +// --------------------------------------------------------------------------- + +/// Pre-computed visual row for the discussion tree. +/// +/// We flatten the tree into rows to support scroll offset correctly. +#[derive(Debug)] +enum VisualRow { + /// Discussion header (collapsed or expanded). + DiscussionHeader { + disc_idx: usize, + expanded: bool, + summary: String, + preview: String, + }, + /// Note author + timestamp line. + NoteHeader { + author: String, + relative_time: String, + is_system: bool, + }, + /// Diff note file path line. + DiffPath { + file_path: String, + line: Option, + }, + /// Note body text line. + BodyLine { text: String, is_system: bool }, + /// Blank separator between notes. + Separator, +} + +/// Maximum body lines shown per note to prevent one huge note from +/// consuming the entire viewport. +const MAX_BODY_LINES: usize = 10; + +/// Compute visual rows with relative timestamps from the clock. +fn compute_visual_rows_with_clock( + discussions: &[DiscussionNode], + state: &DiscussionTreeState, + available_width: usize, + clock: &dyn Clock, +) -> Vec { + let mut rows = Vec::new(); + let preview_max = available_width.saturating_sub(40).max(20); + + for (idx, disc) in discussions.iter().enumerate() { + let expanded = state.is_expanded(&disc.discussion_id); + + rows.push(VisualRow::DiscussionHeader { + disc_idx: idx, + expanded, + summary: disc.summary(), + preview: if expanded { + String::new() + } else { + disc.preview(preview_max) + }, + }); + + if expanded { + for (note_idx, note) in disc.notes.iter().enumerate() { + if note_idx > 0 { + rows.push(VisualRow::Separator); + } + + rows.push(VisualRow::NoteHeader { + author: note.author.clone(), + relative_time: format_relative_time(note.created_at, clock), + is_system: note.is_system, + }); + + if note.is_diff_note + && let Some(ref path) = note.diff_file_path + { + rows.push(VisualRow::DiffPath { + file_path: path.clone(), + line: note.diff_new_line, + }); + } + + let sanitized = sanitize_for_terminal(¬e.body, UrlPolicy::Strip); + for (line_idx, line) in sanitized.lines().enumerate() { + if line_idx >= MAX_BODY_LINES { + rows.push(VisualRow::BodyLine { + text: "...".to_string(), + is_system: note.is_system, + }); + break; + } + rows.push(VisualRow::BodyLine { + text: line.to_string(), + is_system: note.is_system, + }); + } + } + } + } + + rows +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::clock::FakeClock; + use ftui::render::grapheme_pool::GraphemePool; + + macro_rules! with_frame { + ($width:expr, $height:expr, |$frame:ident| $body:block) => {{ + let mut pool = GraphemePool::new(); + let mut $frame = Frame::new($width, $height, &mut pool); + $body + }}; + } + + fn sample_note(author: &str, body: &str, created_at: i64) -> NoteNode { + NoteNode { + author: author.into(), + body: body.into(), + created_at, + is_system: false, + is_diff_note: false, + diff_file_path: None, + diff_new_line: None, + } + } + + fn system_note(body: &str, created_at: i64) -> NoteNode { + NoteNode { + author: "system".into(), + body: body.into(), + created_at, + is_system: true, + is_diff_note: false, + diff_file_path: None, + diff_new_line: None, + } + } + + fn diff_note(author: &str, body: &str, path: &str, line: i64, created_at: i64) -> NoteNode { + NoteNode { + author: author.into(), + body: body.into(), + created_at, + is_system: false, + is_diff_note: true, + diff_file_path: Some(path.into()), + diff_new_line: Some(line), + } + } + + fn sample_discussions() -> Vec { + vec![ + DiscussionNode { + discussion_id: "disc-1".into(), + notes: vec![ + sample_note("alice", "This looks good overall", 1_700_000_000_000), + sample_note("bob", "Agreed, but one concern", 1_700_000_060_000), + ], + resolvable: false, + resolved: false, + }, + DiscussionNode { + discussion_id: "disc-2".into(), + notes: vec![diff_note( + "charlie", + "This function needs error handling", + "src/auth.rs", + 42, + 1_700_000_120_000, + )], + resolvable: true, + resolved: true, + }, + DiscussionNode { + discussion_id: "disc-3".into(), + notes: vec![system_note("changed the description", 1_700_000_180_000)], + resolvable: false, + resolved: false, + }, + ] + } + + fn test_colors() -> DiscussionTreeColors { + DiscussionTreeColors { + author_fg: PackedRgba::rgb(0xCE, 0xCD, 0xC3), + timestamp_fg: PackedRgba::rgb(0x87, 0x87, 0x80), + body_fg: PackedRgba::rgb(0xCE, 0xCD, 0xC3), + system_fg: PackedRgba::rgb(0x6F, 0x6E, 0x69), + diff_path_fg: PackedRgba::rgb(0x87, 0x96, 0x6B), + resolved_fg: PackedRgba::rgb(0x87, 0x96, 0x6B), + guide_fg: PackedRgba::rgb(0x40, 0x40, 0x3C), + selected_fg: PackedRgba::rgb(0x10, 0x0F, 0x0F), + selected_bg: PackedRgba::rgb(0xCE, 0xCD, 0xC3), + expand_fg: PackedRgba::rgb(0xDA, 0x70, 0x2C), + } + } + + // Clock set to 1h after the last sample note. + fn test_clock() -> FakeClock { + FakeClock::from_ms(1_700_000_180_000 + 3_600_000) + } + + #[test] + fn test_format_relative_time_just_now() { + let clock = FakeClock::from_ms(1_000_000); + assert_eq!(format_relative_time(1_000_000, &clock), "just now"); + assert_eq!(format_relative_time(999_990, &clock), "just now"); + } + + #[test] + fn test_format_relative_time_minutes() { + let clock = FakeClock::from_ms(1_000_000 + 5 * 60 * 1_000); + assert_eq!(format_relative_time(1_000_000, &clock), "5m ago"); + } + + #[test] + fn test_format_relative_time_hours() { + let clock = FakeClock::from_ms(1_000_000 + 3 * 3_600 * 1_000); + assert_eq!(format_relative_time(1_000_000, &clock), "3h ago"); + } + + #[test] + fn test_format_relative_time_days() { + let clock = FakeClock::from_ms(1_000_000 + 2 * 86_400 * 1_000); + assert_eq!(format_relative_time(1_000_000, &clock), "2d ago"); + } + + #[test] + fn test_format_relative_time_weeks() { + let clock = FakeClock::from_ms(1_000_000 + 14 * 86_400 * 1_000); + assert_eq!(format_relative_time(1_000_000, &clock), "2w ago"); + } + + #[test] + fn test_format_relative_time_months() { + let clock = FakeClock::from_ms(1_000_000 + 60 * 86_400 * 1_000); + assert_eq!(format_relative_time(1_000_000, &clock), "2mo ago"); + } + + #[test] + fn test_discussion_node_summary() { + let disc = DiscussionNode { + discussion_id: "d1".into(), + notes: vec![ + sample_note("alice", "body", 0), + sample_note("bob", "reply", 1000), + ], + resolvable: false, + resolved: false, + }; + assert_eq!(disc.summary(), "alice (2 notes)"); + } + + #[test] + fn test_discussion_node_summary_single() { + let disc = DiscussionNode { + discussion_id: "d1".into(), + notes: vec![sample_note("alice", "body", 0)], + resolvable: false, + resolved: false, + }; + assert_eq!(disc.summary(), "alice"); + } + + #[test] + fn test_discussion_node_summary_resolved() { + let disc = DiscussionNode { + discussion_id: "d1".into(), + notes: vec![sample_note("alice", "body", 0)], + resolvable: true, + resolved: true, + }; + assert_eq!(disc.summary(), "alice [resolved]"); + } + + #[test] + fn test_discussion_node_preview() { + let disc = DiscussionNode { + discussion_id: "d1".into(), + notes: vec![sample_note("alice", "First line\nSecond line", 0)], + resolvable: false, + resolved: false, + }; + assert_eq!(disc.preview(50), "First line"); + } + + #[test] + fn test_discussion_tree_state_navigation() { + let mut state = DiscussionTreeState::default(); + assert_eq!(state.selected, 0); + + state.select_next(3); + assert_eq!(state.selected, 1); + + state.select_next(3); + assert_eq!(state.selected, 2); + + state.select_next(3); + assert_eq!(state.selected, 2); + + state.select_prev(); + assert_eq!(state.selected, 1); + + state.select_prev(); + assert_eq!(state.selected, 0); + + state.select_prev(); + assert_eq!(state.selected, 0); + } + + #[test] + fn test_discussion_tree_state_toggle() { + let discussions = sample_discussions(); + let mut state = DiscussionTreeState::default(); + + assert!(!state.is_expanded("disc-1")); + + state.toggle_selected(&discussions); + assert!(state.is_expanded("disc-1")); + + state.toggle_selected(&discussions); + assert!(!state.is_expanded("disc-1")); + } + + #[test] + fn test_render_discussion_tree_collapsed_no_panic() { + with_frame!(80, 20, |frame| { + let discussions = sample_discussions(); + let state = DiscussionTreeState::default(); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &discussions, + &state, + Rect::new(0, 0, 80, 20), + &test_colors(), + &clock, + ); + // 3 discussions, all collapsed = 3 rows. + assert_eq!(rows, 3); + }); + } + + #[test] + fn test_render_discussion_tree_expanded_no_panic() { + with_frame!(80, 30, |frame| { + let discussions = sample_discussions(); + let mut state = DiscussionTreeState::default(); + state.expanded.insert("disc-1".into()); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &discussions, + &state, + Rect::new(0, 0, 80, 30), + &test_colors(), + &clock, + ); + // disc-1 expanded: header + 2 notes (each: header + body line) + separator between + // = 1 + (1+1) + 1 + (1+1) = 6 rows from disc-1 + // disc-2 collapsed: 1 row + // disc-3 collapsed: 1 row + // Total: 8 + assert!(rows >= 6); // At least disc-1 content + 2 collapsed. + }); + } + + #[test] + fn test_render_discussion_tree_empty() { + with_frame!(80, 20, |frame| { + let state = DiscussionTreeState::default(); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &[], + &state, + Rect::new(0, 0, 80, 20), + &test_colors(), + &clock, + ); + assert_eq!(rows, 0); + }); + } + + #[test] + fn test_render_discussion_tree_tiny_area() { + with_frame!(10, 2, |frame| { + let discussions = sample_discussions(); + let state = DiscussionTreeState::default(); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &discussions, + &state, + Rect::new(0, 0, 10, 2), + &test_colors(), + &clock, + ); + // Too narrow (< 15), should bail. + assert_eq!(rows, 0); + }); + } + + #[test] + fn test_render_discussion_tree_with_diff_note() { + with_frame!(80, 30, |frame| { + let discussions = vec![DiscussionNode { + discussion_id: "diff-disc".into(), + notes: vec![diff_note( + "reviewer", + "Add error handling here", + "src/main.rs", + 42, + 1_700_000_000_000, + )], + resolvable: true, + resolved: false, + }]; + let mut state = DiscussionTreeState::default(); + state.expanded.insert("diff-disc".into()); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &discussions, + &state, + Rect::new(0, 0, 80, 30), + &test_colors(), + &clock, + ); + // header + note header + diff path + body line = 4 + assert!(rows >= 3); + }); + } + + #[test] + fn test_render_discussion_tree_system_note() { + with_frame!(80, 20, |frame| { + let discussions = vec![DiscussionNode { + discussion_id: "sys-disc".into(), + notes: vec![system_note("changed the description", 1_700_000_000_000)], + resolvable: false, + resolved: false, + }]; + let mut state = DiscussionTreeState::default(); + state.expanded.insert("sys-disc".into()); + let clock = test_clock(); + let rows = render_discussion_tree( + &mut frame, + &discussions, + &state, + Rect::new(0, 0, 80, 20), + &test_colors(), + &clock, + ); + assert!(rows >= 2); + }); + } + + #[test] + fn test_compute_visual_rows_collapsed() { + let discussions = sample_discussions(); + let state = DiscussionTreeState::default(); + let clock = test_clock(); + let rows = compute_visual_rows_with_clock(&discussions, &state, 80, &clock); + + // 3 collapsed headers. + assert_eq!(rows.len(), 3); + assert!(matches!( + rows[0], + VisualRow::DiscussionHeader { + expanded: false, + .. + } + )); + } + + #[test] + fn test_compute_visual_rows_expanded() { + let discussions = sample_discussions(); + let mut state = DiscussionTreeState::default(); + state.expanded.insert("disc-1".into()); + let clock = test_clock(); + let rows = compute_visual_rows_with_clock(&discussions, &state, 80, &clock); + + // disc-1: header + note1 (header + body) + separator + note2 (header + body) = 6 + // disc-2: 1 header + // disc-3: 1 header + // Total: 8 + assert!(rows.len() >= 6); + assert!(matches!( + rows[0], + VisualRow::DiscussionHeader { expanded: true, .. } + )); + } + + #[test] + fn test_long_body_truncation() { + let long_body = (0..20) + .map(|i| format!("Line {i} of a very long discussion note")) + .collect::>() + .join("\n"); + + let discussions = vec![DiscussionNode { + discussion_id: "long".into(), + notes: vec![sample_note("alice", &long_body, 1_700_000_000_000)], + resolvable: false, + resolved: false, + }]; + let mut state = DiscussionTreeState::default(); + state.expanded.insert("long".into()); + let clock = test_clock(); + let rows = compute_visual_rows_with_clock(&discussions, &state, 80, &clock); + + // Header + note header + MAX_BODY_LINES + 1 ("...") = 1 + 1 + 10 + 1 = 13 + let body_lines: Vec<_> = rows + .iter() + .filter(|r| matches!(r, VisualRow::BodyLine { .. })) + .collect(); + // Should cap at MAX_BODY_LINES + 1 (for the "..." truncation line). + assert!(body_lines.len() <= MAX_BODY_LINES + 1); + } + + #[test] + fn test_preview_multibyte_utf8_no_panic() { + // Emoji are 4 bytes each. Truncating at a byte boundary that falls + // inside a multi-byte char must not panic. + let disc = DiscussionNode { + discussion_id: "d-utf8".into(), + notes: vec![sample_note( + "alice", + "Hello 🌍🌎🌏 world of emoji 🎉🎊🎈", + 0, + )], + resolvable: false, + resolved: false, + }; + // max_chars=10 would land inside the first emoji's bytes. + let preview = disc.preview(10); + assert!(preview.ends_with("...")); + assert!(preview.len() <= 20); // char-bounded + "..." + + // Edge: max_chars smaller than a single multi-byte char. + let disc2 = DiscussionNode { + discussion_id: "d-utf8-2".into(), + notes: vec![sample_note("bob", "🌍🌎🌏", 0)], + resolvable: false, + resolved: false, + }; + let preview2 = disc2.preview(3); + assert!(preview2.ends_with("...")); + } +} diff --git a/crates/lore-tui/src/view/common/mod.rs b/crates/lore-tui/src/view/common/mod.rs index c3ce5bc..4939159 100644 --- a/crates/lore-tui/src/view/common/mod.rs +++ b/crates/lore-tui/src/view/common/mod.rs @@ -5,6 +5,8 @@ //! no side effects. mod breadcrumb; +pub mod cross_ref; +pub mod discussion_tree; pub mod entity_table; mod error_toast; pub mod filter_bar; @@ -13,6 +15,11 @@ mod loading; mod status_bar; pub use breadcrumb::render_breadcrumb; +pub use cross_ref::{CrossRef, CrossRefColors, CrossRefKind, CrossRefState, render_cross_refs}; +pub use discussion_tree::{ + DiscussionNode, DiscussionTreeColors, DiscussionTreeState, NoteNode, format_relative_time, + render_discussion_tree, +}; pub use entity_table::{ColumnDef, EntityTableState, TableColors, TableRow, render_entity_table}; pub use error_toast::render_error_toast; pub use filter_bar::{FilterBarColors, FilterBarState, render_filter_bar}; diff --git a/crates/lore-tui/src/view/issue_detail.rs b/crates/lore-tui/src/view/issue_detail.rs new file mode 100644 index 0000000..959e542 --- /dev/null +++ b/crates/lore-tui/src/view/issue_detail.rs @@ -0,0 +1,626 @@ +#![allow(dead_code)] // Phase 2: consumed by view/mod.rs screen dispatch + +//! Issue detail screen view. +//! +//! Composes metadata header, description, discussion tree, and +//! cross-references into a scrollable detail layout. Supports +//! progressive hydration: metadata renders immediately while +//! discussions load async in Phase 2. + +use ftui::core::geometry::Rect; +use ftui::render::cell::{Cell, PackedRgba}; +use ftui::render::drawing::Draw; +use ftui::render::frame::Frame; + +use crate::clock::Clock; +use crate::safety::{UrlPolicy, sanitize_for_terminal}; +use crate::state::issue_detail::{DetailSection, IssueDetailState, IssueMetadata}; +use crate::view::common::cross_ref::{CrossRefColors, render_cross_refs}; +use crate::view::common::discussion_tree::{DiscussionTreeColors, render_discussion_tree}; + +// --------------------------------------------------------------------------- +// Colors (Flexoki palette — will use injected Theme in a later phase) +// --------------------------------------------------------------------------- + +const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx +const TEXT_MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2 +const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); // orange +const GREEN: PackedRgba = PackedRgba::rgb(0x87, 0x9A, 0x39); // green +const RED: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); // red +const CYAN: PackedRgba = PackedRgba::rgb(0x3A, 0xA9, 0x9F); // cyan +const BG_SURFACE: PackedRgba = PackedRgba::rgb(0x28, 0x28, 0x24); // bg-2 +const BORDER: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); // tx-2 +const SELECTED_FG: PackedRgba = PackedRgba::rgb(0x10, 0x0F, 0x0F); // bg +const SELECTED_BG: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); // tx + +// --------------------------------------------------------------------------- +// Color constructors +// --------------------------------------------------------------------------- + +fn discussion_colors() -> DiscussionTreeColors { + DiscussionTreeColors { + author_fg: CYAN, + timestamp_fg: TEXT_MUTED, + body_fg: TEXT, + system_fg: TEXT_MUTED, + diff_path_fg: GREEN, + resolved_fg: TEXT_MUTED, + guide_fg: BORDER, + selected_fg: SELECTED_FG, + selected_bg: SELECTED_BG, + expand_fg: ACCENT, + } +} + +fn cross_ref_colors() -> CrossRefColors { + CrossRefColors { + kind_fg: ACCENT, + label_fg: TEXT, + muted_fg: TEXT_MUTED, + selected_fg: SELECTED_FG, + selected_bg: SELECTED_BG, + } +} + +// --------------------------------------------------------------------------- +// Render +// --------------------------------------------------------------------------- + +/// Render the full issue detail screen. +/// +/// Layout: +/// ```text +/// Row 0: #42 Fix authentication flow (title bar) +/// Row 1: opened | alice | backend, security (metadata row) +/// Row 2: Milestone: v1.0 | Due: 2026-03-01 (optional) +/// Row 3: ─────────────────────────────────── (separator) +/// Row 4..N: Description text... (scrollable) +/// ─────────────────────────────────── (separator) +/// Discussions (3) (section header) +/// ▶ alice: Fixed the login flow... (collapsed) +/// ▼ bob: I think we should also... (expanded) +/// bob: body line 1... +/// ─────────────────────────────────── (separator) +/// Cross References (section header) +/// [MR] !10 Fix authentication MR +/// ``` +pub fn render_issue_detail( + frame: &mut Frame<'_>, + state: &IssueDetailState, + area: Rect, + clock: &dyn Clock, +) { + if area.height < 3 || area.width < 10 { + return; + } + + let Some(ref meta) = state.metadata else { + // No metadata yet — the loading spinner handles this. + return; + }; + + let max_x = area.x.saturating_add(area.width); + let mut y = area.y; + + // --- Title bar --- + y = render_title_bar(frame, meta, area.x, y, max_x); + + // --- Metadata row --- + y = render_metadata_row(frame, meta, area.x, y, max_x); + + // --- Optional milestone / due date row --- + if meta.milestone.is_some() || meta.due_date.is_some() { + y = render_milestone_row(frame, meta, area.x, y, max_x); + } + + // --- Separator --- + y = render_separator(frame, area.x, y, area.width); + + let bottom = area.y.saturating_add(area.height); + if y >= bottom { + return; + } + + // Remaining space is split between description, discussions, and cross-refs. + let remaining = bottom.saturating_sub(y); + + // Compute section heights based on content. + let desc_lines = count_description_lines(meta, area.width); + let disc_count = state.discussions.len(); + let xref_count = state.cross_refs.len(); + + let (desc_h, disc_h, xref_h) = allocate_sections(remaining, desc_lines, disc_count, xref_count); + + // --- Description section --- + if desc_h > 0 { + let desc_area = Rect::new(area.x, y, area.width, desc_h); + let is_focused = state.active_section == DetailSection::Description; + render_description(frame, meta, state.description_scroll, desc_area, is_focused); + y += desc_h; + } + + // --- Separator before discussions --- + if (disc_h > 0 || xref_h > 0) && y < bottom { + y = render_separator(frame, area.x, y, area.width); + } + + // --- Discussions section --- + if disc_h > 0 && y < bottom { + let header_h = 1; + let is_focused = state.active_section == DetailSection::Discussions; + + // Section header. + render_section_header( + frame, + &format!("Discussions ({})", state.discussions.len()), + area.x, + y, + max_x, + is_focused, + ); + y += header_h; + + if !state.discussions_loaded { + // Still loading. + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x + 1, y, "Loading discussions...", style, max_x); + y += 1; + } else if state.discussions.is_empty() { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x + 1, y, "No discussions", style, max_x); + y += 1; + } else { + let tree_height = disc_h.saturating_sub(header_h); + if tree_height > 0 { + let tree_area = Rect::new(area.x, y, area.width, tree_height); + let rendered = render_discussion_tree( + frame, + &state.discussions, + &state.tree_state, + tree_area, + &discussion_colors(), + clock, + ); + y += rendered; + } + } + } + + // --- Separator before cross-refs --- + if xref_h > 0 && y < bottom { + y = render_separator(frame, area.x, y, area.width); + } + + // --- Cross-references section --- + if xref_h > 0 && y < bottom { + let is_focused = state.active_section == DetailSection::CrossRefs; + + render_section_header( + frame, + &format!("Cross References ({})", state.cross_refs.len()), + area.x, + y, + max_x, + is_focused, + ); + y += 1; + + if state.cross_refs.is_empty() { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x + 1, y, "No cross-references", style, max_x); + } else { + let refs_height = xref_h.saturating_sub(1); // minus header + if refs_height > 0 { + let refs_area = Rect::new(area.x, y, area.width, refs_height); + let _ = render_cross_refs( + frame, + &state.cross_refs, + &state.cross_ref_state, + refs_area, + &cross_ref_colors(), + ); + } + } + } +} + +// --------------------------------------------------------------------------- +// Sub-renderers +// --------------------------------------------------------------------------- + +/// Render the issue title bar: `#42 Fix authentication flow` +fn render_title_bar( + frame: &mut Frame<'_>, + meta: &IssueMetadata, + x: u16, + y: u16, + max_x: u16, +) -> u16 { + let iid_text = format!("#{} ", meta.iid); + let iid_style = Cell { + fg: ACCENT, + ..Cell::default() + }; + let title_style = Cell { + fg: TEXT, + ..Cell::default() + }; + + let cx = frame.print_text_clipped(x, y, &iid_text, iid_style, max_x); + let safe_title = sanitize_for_terminal(&meta.title, UrlPolicy::Strip); + let _ = frame.print_text_clipped(cx, y, &safe_title, title_style, max_x); + + y + 1 +} + +/// Render the metadata row: `opened | alice | backend, security` +fn render_metadata_row( + frame: &mut Frame<'_>, + meta: &IssueMetadata, + x: u16, + y: u16, + max_x: u16, +) -> u16 { + let state_fg = match meta.state.as_str() { + "opened" => GREEN, + "closed" => RED, + _ => TEXT_MUTED, + }; + let state_style = Cell { + fg: state_fg, + ..Cell::default() + }; + let muted_style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let author_style = Cell { + fg: CYAN, + ..Cell::default() + }; + + let mut cx = frame.print_text_clipped(x, y, &meta.state, state_style, max_x); + cx = frame.print_text_clipped(cx, y, " | ", muted_style, max_x); + cx = frame.print_text_clipped(cx, y, &meta.author, author_style, max_x); + + if !meta.labels.is_empty() { + cx = frame.print_text_clipped(cx, y, " | ", muted_style, max_x); + let labels_text = meta.labels.join(", "); + let _ = frame.print_text_clipped(cx, y, &labels_text, muted_style, max_x); + } + + if !meta.assignees.is_empty() { + cx = frame.print_text_clipped(cx, y, " | ", muted_style, max_x); + let assignees_text = format!("-> {}", meta.assignees.join(", ")); + let _ = frame.print_text_clipped(cx, y, &assignees_text, muted_style, max_x); + } + + y + 1 +} + +/// Render optional milestone / due date row. +fn render_milestone_row( + frame: &mut Frame<'_>, + meta: &IssueMetadata, + x: u16, + y: u16, + max_x: u16, +) -> u16 { + let muted = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let mut cx = x; + + if let Some(ref ms) = meta.milestone { + cx = frame.print_text_clipped(cx, y, "Milestone: ", muted, max_x); + let val_style = Cell { + fg: TEXT, + ..Cell::default() + }; + cx = frame.print_text_clipped(cx, y, ms, val_style, max_x); + } + + if let Some(ref due) = meta.due_date { + if cx > x { + cx = frame.print_text_clipped(cx, y, " | ", muted, max_x); + } + cx = frame.print_text_clipped(cx, y, "Due: ", muted, max_x); + let val_style = Cell { + fg: TEXT, + ..Cell::default() + }; + let _ = frame.print_text_clipped(cx, y, due, val_style, max_x); + } + + y + 1 +} + +/// Render a horizontal separator line. +fn render_separator(frame: &mut Frame<'_>, x: u16, y: u16, width: u16) -> u16 { + let sep_style = Cell { + fg: BORDER, + ..Cell::default() + }; + let line: String = "\u{2500}".repeat(width as usize); + let _ = frame.print_text_clipped(x, y, &line, sep_style, x.saturating_add(width)); + y + 1 +} + +/// Render a section header with focus indicator. +fn render_section_header( + frame: &mut Frame<'_>, + label: &str, + x: u16, + y: u16, + max_x: u16, + is_focused: bool, +) { + if is_focused { + let style = Cell { + fg: SELECTED_FG, + bg: SELECTED_BG, + ..Cell::default() + }; + // Fill the row with selected background. + frame.draw_rect_filled(Rect::new(x, y, max_x.saturating_sub(x), 1), style); + let _ = frame.print_text_clipped(x, y, label, style, max_x); + } else { + let style = Cell { + fg: ACCENT, + ..Cell::default() + }; + let _ = frame.print_text_clipped(x, y, label, style, max_x); + } +} + +/// Render the description section. +fn render_description( + frame: &mut Frame<'_>, + meta: &IssueMetadata, + scroll: usize, + area: Rect, + _is_focused: bool, +) { + let safe_desc = sanitize_for_terminal(&meta.description, UrlPolicy::Strip); + let lines: Vec<&str> = safe_desc.lines().collect(); + + let text_style = Cell { + fg: TEXT, + ..Cell::default() + }; + let max_x = area.x.saturating_add(area.width); + + for (i, line) in lines + .iter() + .skip(scroll) + .take(area.height as usize) + .enumerate() + { + let y = area.y + i as u16; + let _ = frame.print_text_clipped(area.x, y, line, text_style, max_x); + } +} + +/// Count the number of visible description lines for layout allocation. +fn count_description_lines(meta: &IssueMetadata, _width: u16) -> usize { + if meta.description.is_empty() { + return 0; + } + // Rough estimate: count newlines. Proper word-wrap would need unicode width. + meta.description.lines().count().max(1) +} + +/// Allocate vertical space between description, discussions, and cross-refs. +/// +/// Priority: description gets min(content, 40%), discussions get most of the +/// remaining space, cross-refs get a fixed portion at the bottom. +fn allocate_sections( + available: u16, + desc_lines: usize, + _disc_count: usize, + xref_count: usize, +) -> (u16, u16, u16) { + if available == 0 { + return (0, 0, 0); + } + + let total = available as usize; + + // Cross-refs: 1 header + count, max 25% of space. + let xref_need = if xref_count > 0 { + (1 + xref_count).min(total / 4) + } else { + 0 + }; + + let after_xref = total.saturating_sub(xref_need); + + // Description: up to 40% of remaining, but at least the content lines. + let desc_max = after_xref * 2 / 5; + let desc_alloc = desc_lines.min(desc_max).min(after_xref); + + // Discussions: everything else. + let disc_alloc = after_xref.saturating_sub(desc_alloc); + + (desc_alloc as u16, disc_alloc as u16, xref_need as u16) +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::clock::FakeClock; + use crate::message::EntityKey; + use crate::state::issue_detail::{IssueDetailData, IssueMetadata}; + use crate::view::common::cross_ref::{CrossRef, CrossRefKind}; + use crate::view::common::discussion_tree::{DiscussionNode, NoteNode}; + use ftui::render::grapheme_pool::GraphemePool; + + macro_rules! with_frame { + ($width:expr, $height:expr, |$frame:ident| $body:block) => {{ + let mut pool = GraphemePool::new(); + let mut $frame = Frame::new($width, $height, &mut pool); + $body + }}; + } + + fn sample_metadata() -> IssueMetadata { + IssueMetadata { + iid: 42, + project_path: "group/project".into(), + title: "Fix authentication flow".into(), + description: "The login page has a bug.\nSteps to reproduce:\n1. Go to /login\n2. Enter credentials\n3. Click submit".into(), + state: "opened".into(), + author: "alice".into(), + assignees: vec!["bob".into()], + labels: vec!["backend".into(), "security".into()], + milestone: Some("v1.0".into()), + due_date: Some("2026-03-01".into()), + created_at: 1_700_000_000_000, + updated_at: 1_700_000_060_000, + web_url: "https://gitlab.com/group/project/-/issues/42".into(), + discussion_count: 2, + } + } + + fn sample_state_with_metadata() -> IssueDetailState { + let mut state = IssueDetailState::default(); + state.load_new(EntityKey::issue(1, 42)); + state.apply_metadata(IssueDetailData { + metadata: sample_metadata(), + cross_refs: vec![CrossRef { + kind: CrossRefKind::ClosingMr, + entity_key: EntityKey::mr(1, 10), + label: "Fix auth MR".into(), + navigable: true, + }], + }); + state + } + + #[test] + fn test_render_issue_detail_no_metadata_no_panic() { + with_frame!(80, 24, |frame| { + let state = IssueDetailState::default(); + let clock = FakeClock::from_ms(1_700_000_000_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_issue_detail_with_metadata_no_panic() { + with_frame!(80, 24, |frame| { + let state = sample_state_with_metadata(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_issue_detail_tiny_area() { + with_frame!(5, 2, |frame| { + let state = sample_state_with_metadata(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 5, 2), &clock); + // Should bail early, no panic. + }); + } + + #[test] + fn test_render_issue_detail_with_discussions() { + with_frame!(80, 40, |frame| { + let mut state = sample_state_with_metadata(); + state.apply_discussions(vec![DiscussionNode { + discussion_id: "d1".into(), + notes: vec![NoteNode { + author: "alice".into(), + body: "I found the bug".into(), + created_at: 1_700_000_000_000, + is_system: false, + is_diff_note: false, + diff_file_path: None, + diff_new_line: None, + }], + resolvable: false, + resolved: false, + }]); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 80, 40), &clock); + }); + } + + #[test] + fn test_render_issue_detail_discussions_loading() { + with_frame!(80, 24, |frame| { + let state = sample_state_with_metadata(); + // discussions_loaded is false by default after load_new. + assert!(!state.discussions_loaded); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_issue_detail_narrow_terminal() { + with_frame!(30, 10, |frame| { + let state = sample_state_with_metadata(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_issue_detail(&mut frame, &state, Rect::new(0, 0, 30, 10), &clock); + }); + } + + #[test] + fn test_allocate_sections_empty() { + assert_eq!(allocate_sections(0, 5, 3, 2), (0, 0, 0)); + } + + #[test] + fn test_allocate_sections_balanced() { + let (d, disc, x) = allocate_sections(20, 5, 3, 2); + assert!(d > 0); + assert!(disc > 0); + assert!(x > 0); + assert_eq!(d + disc + x, 20); + } + + #[test] + fn test_allocate_sections_no_xrefs() { + let (d, disc, x) = allocate_sections(20, 5, 3, 0); + assert_eq!(x, 0); + assert_eq!(d + disc, 20); + } + + #[test] + fn test_allocate_sections_no_discussions() { + let (d, disc, x) = allocate_sections(20, 5, 0, 2); + assert!(d > 0); + assert_eq!(d + disc + x, 20); + } + + #[test] + fn test_count_description_lines() { + let meta = sample_metadata(); + let lines = count_description_lines(&meta, 80); + assert_eq!(lines, 5); // 5 lines in the sample description + } + + #[test] + fn test_count_description_lines_empty() { + let mut meta = sample_metadata(); + meta.description = String::new(); + assert_eq!(count_description_lines(&meta, 80), 0); + } +} diff --git a/crates/lore-tui/src/view/mod.rs b/crates/lore-tui/src/view/mod.rs index f2c1839..75c3075 100644 --- a/crates/lore-tui/src/view/mod.rs +++ b/crates/lore-tui/src/view/mod.rs @@ -8,7 +8,9 @@ pub mod common; pub mod dashboard; +pub mod issue_detail; pub mod issue_list; +pub mod mr_detail; pub mod mr_list; use ftui::layout::{Constraint, Flex}; @@ -22,7 +24,9 @@ use common::{ render_breadcrumb, render_error_toast, render_help_overlay, render_loading, render_status_bar, }; use dashboard::render_dashboard; +use issue_detail::render_issue_detail; use issue_list::render_issue_list; +use mr_detail::render_mr_detail; use mr_list::render_mr_list; // --------------------------------------------------------------------------- @@ -93,6 +97,10 @@ pub fn render_screen(frame: &mut Frame<'_>, app: &LoreApp) { render_issue_list(frame, &app.state.issue_list, content_area); } else if screen == &Screen::MrList { render_mr_list(frame, &app.state.mr_list, content_area); + } else if matches!(screen, Screen::IssueDetail(_)) { + render_issue_detail(frame, &app.state.issue_detail, content_area, &*app.clock); + } else if matches!(screen, Screen::MrDetail(_)) { + render_mr_detail(frame, &app.state.mr_detail, content_area, &*app.clock); } // --- Status bar --- diff --git a/crates/lore-tui/src/view/mr_detail.rs b/crates/lore-tui/src/view/mr_detail.rs new file mode 100644 index 0000000..7d6e425 --- /dev/null +++ b/crates/lore-tui/src/view/mr_detail.rs @@ -0,0 +1,635 @@ +#![allow(dead_code)] // Phase 2: consumed by view/mod.rs screen dispatch + +//! Merge request detail screen view. +//! +//! Composes metadata header, tab bar (Overview / Files / Discussions), +//! and tab content. Supports progressive hydration: metadata + file +//! changes render immediately while discussions load async. + +use ftui::core::geometry::Rect; +use ftui::render::cell::{Cell, PackedRgba}; +use ftui::render::drawing::Draw; +use ftui::render::frame::Frame; + +use crate::clock::Clock; +use crate::safety::{UrlPolicy, sanitize_for_terminal}; +use crate::state::mr_detail::{FileChangeType, MrDetailState, MrMetadata, MrTab}; +use crate::view::common::cross_ref::{CrossRefColors, render_cross_refs}; +use crate::view::common::discussion_tree::{DiscussionTreeColors, render_discussion_tree}; + +// --------------------------------------------------------------------------- +// Colors (Flexoki palette) +// --------------------------------------------------------------------------- + +const TEXT: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); +const TEXT_MUTED: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); +const ACCENT: PackedRgba = PackedRgba::rgb(0xDA, 0x70, 0x2C); +const GREEN: PackedRgba = PackedRgba::rgb(0x87, 0x9A, 0x39); +const RED: PackedRgba = PackedRgba::rgb(0xAF, 0x3A, 0x29); +const CYAN: PackedRgba = PackedRgba::rgb(0x3A, 0xA9, 0x9F); +const YELLOW: PackedRgba = PackedRgba::rgb(0xD0, 0xA2, 0x15); +const BORDER: PackedRgba = PackedRgba::rgb(0x87, 0x87, 0x80); +const SELECTED_FG: PackedRgba = PackedRgba::rgb(0x10, 0x0F, 0x0F); +const SELECTED_BG: PackedRgba = PackedRgba::rgb(0xCE, 0xCD, 0xC3); + +// --------------------------------------------------------------------------- +// Color constructors +// --------------------------------------------------------------------------- + +fn discussion_colors() -> DiscussionTreeColors { + DiscussionTreeColors { + author_fg: CYAN, + timestamp_fg: TEXT_MUTED, + body_fg: TEXT, + system_fg: TEXT_MUTED, + diff_path_fg: GREEN, + resolved_fg: TEXT_MUTED, + guide_fg: BORDER, + selected_fg: SELECTED_FG, + selected_bg: SELECTED_BG, + expand_fg: ACCENT, + } +} + +fn cross_ref_colors() -> CrossRefColors { + CrossRefColors { + kind_fg: ACCENT, + label_fg: TEXT, + muted_fg: TEXT_MUTED, + selected_fg: SELECTED_FG, + selected_bg: SELECTED_BG, + } +} + +// --------------------------------------------------------------------------- +// Render +// --------------------------------------------------------------------------- + +/// Render the full MR detail screen. +/// +/// Layout: +/// ```text +/// Row 0: !10 Fix auth flow (title bar) +/// Row 1: opened | alice | fix-auth -> main (metadata row) +/// Row 2: [Overview] [Files (3)] [Discussions] (tab bar) +/// Row 3: ──────────────────────────────────── (separator) +/// Row 4..N: Tab-specific content +/// ``` +pub fn render_mr_detail( + frame: &mut Frame<'_>, + state: &MrDetailState, + area: Rect, + clock: &dyn Clock, +) { + if area.height < 4 || area.width < 10 { + return; + } + + let Some(ref meta) = state.metadata else { + return; + }; + + let max_x = area.x.saturating_add(area.width); + let mut y = area.y; + + // --- Title bar --- + y = render_title_bar(frame, meta, area.x, y, max_x); + + // --- Metadata row --- + y = render_metadata_row(frame, meta, area.x, y, max_x); + + // --- Tab bar --- + y = render_tab_bar(frame, state, area.x, y, max_x); + + // --- Separator --- + y = render_separator(frame, area.x, y, area.width); + + let bottom = area.y.saturating_add(area.height); + if y >= bottom { + return; + } + + let content_area = Rect::new(area.x, y, area.width, bottom.saturating_sub(y)); + + match state.active_tab { + MrTab::Overview => render_overview_tab(frame, state, meta, content_area, clock), + MrTab::Files => render_files_tab(frame, state, content_area), + MrTab::Discussions => render_discussions_tab(frame, state, content_area, clock), + } +} + +// --------------------------------------------------------------------------- +// Sub-renderers +// --------------------------------------------------------------------------- + +/// Render `!10 Fix auth flow` (or `!10 [Draft] Fix auth flow`). +fn render_title_bar(frame: &mut Frame<'_>, meta: &MrMetadata, x: u16, y: u16, max_x: u16) -> u16 { + let iid_text = format!("!{} ", meta.iid); + let iid_style = Cell { + fg: ACCENT, + ..Cell::default() + }; + let mut cx = frame.print_text_clipped(x, y, &iid_text, iid_style, max_x); + + if meta.draft { + let draft_style = Cell { + fg: YELLOW, + ..Cell::default() + }; + cx = frame.print_text_clipped(cx, y, "[Draft] ", draft_style, max_x); + } + + let title_style = Cell { + fg: TEXT, + ..Cell::default() + }; + let safe_title = sanitize_for_terminal(&meta.title, UrlPolicy::Strip); + let _ = frame.print_text_clipped(cx, y, &safe_title, title_style, max_x); + + y + 1 +} + +/// Render `opened | alice | fix-auth -> main | mergeable`. +fn render_metadata_row( + frame: &mut Frame<'_>, + meta: &MrMetadata, + x: u16, + y: u16, + max_x: u16, +) -> u16 { + let state_fg = match meta.state.as_str() { + "opened" => GREEN, + "merged" => CYAN, + "closed" => RED, + _ => TEXT_MUTED, + }; + let state_style = Cell { + fg: state_fg, + ..Cell::default() + }; + let muted = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let author_style = Cell { + fg: CYAN, + ..Cell::default() + }; + + let mut cx = frame.print_text_clipped(x, y, &meta.state, state_style, max_x); + cx = frame.print_text_clipped(cx, y, " | ", muted, max_x); + cx = frame.print_text_clipped(cx, y, &meta.author, author_style, max_x); + cx = frame.print_text_clipped(cx, y, " | ", muted, max_x); + + let branch_text = format!("{} -> {}", meta.source_branch, meta.target_branch); + cx = frame.print_text_clipped(cx, y, &branch_text, muted, max_x); + + if !meta.merge_status.is_empty() { + cx = frame.print_text_clipped(cx, y, " | ", muted, max_x); + let status_fg = if meta.merge_status == "mergeable" { + GREEN + } else { + YELLOW + }; + let status_style = Cell { + fg: status_fg, + ..Cell::default() + }; + let _ = frame.print_text_clipped(cx, y, &meta.merge_status, status_style, max_x); + } + + y + 1 +} + +/// Render tab bar: `[Overview] [Files (3)] [Discussions (2)]`. +fn render_tab_bar(frame: &mut Frame<'_>, state: &MrDetailState, x: u16, y: u16, max_x: u16) -> u16 { + let tabs = [ + (MrTab::Overview, "Overview".to_string()), + ( + MrTab::Files, + format!("Files ({})", state.file_changes.len()), + ), + ( + MrTab::Discussions, + format!("Discussions ({})", state.discussions.len()), + ), + ]; + + let mut cx = x; + for (tab, label) in &tabs { + if *tab == state.active_tab { + let style = Cell { + fg: SELECTED_FG, + bg: SELECTED_BG, + ..Cell::default() + }; + let text = format!(" {label} "); + cx = frame.print_text_clipped(cx, y, &text, style, max_x); + } else { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let text = format!(" {label} "); + cx = frame.print_text_clipped(cx, y, &text, style, max_x); + } + // Tab separator. + let sep = Cell { + fg: BORDER, + ..Cell::default() + }; + cx = frame.print_text_clipped(cx, y, " ", sep, max_x); + } + + y + 1 +} + +/// Render horizontal separator. +fn render_separator(frame: &mut Frame<'_>, x: u16, y: u16, width: u16) -> u16 { + let sep_style = Cell { + fg: BORDER, + ..Cell::default() + }; + let line: String = "\u{2500}".repeat(width as usize); + let _ = frame.print_text_clipped(x, y, &line, sep_style, x.saturating_add(width)); + y + 1 +} + +// --------------------------------------------------------------------------- +// Tab content renderers +// --------------------------------------------------------------------------- + +/// Overview tab: description + cross-references. +fn render_overview_tab( + frame: &mut Frame<'_>, + state: &MrDetailState, + meta: &MrMetadata, + area: Rect, + _clock: &dyn Clock, +) { + let max_x = area.x.saturating_add(area.width); + let mut y = area.y; + let bottom = area.y.saturating_add(area.height); + + // --- Description --- + let safe_desc = sanitize_for_terminal(&meta.description, UrlPolicy::Strip); + let lines: Vec<&str> = safe_desc.lines().collect(); + let text_style = Cell { + fg: TEXT, + ..Cell::default() + }; + + for line in lines + .iter() + .skip(state.description_scroll) + .take((bottom.saturating_sub(y)) as usize) + { + let _ = frame.print_text_clipped(area.x, y, line, text_style, max_x); + y += 1; + } + + if y >= bottom { + return; + } + + // --- Separator --- + y = render_separator(frame, area.x, y, area.width); + if y >= bottom { + return; + } + + // --- Cross-references --- + if !state.cross_refs.is_empty() { + let header_style = Cell { + fg: ACCENT, + ..Cell::default() + }; + let header = format!("Cross References ({})", state.cross_refs.len()); + let _ = frame.print_text_clipped(area.x, y, &header, header_style, max_x); + y += 1; + + if y < bottom { + let refs_area = Rect::new(area.x, y, area.width, bottom.saturating_sub(y)); + let _ = render_cross_refs( + frame, + &state.cross_refs, + &state.cross_ref_state, + refs_area, + &cross_ref_colors(), + ); + } + } +} + +/// Files tab: list of changed files with change type indicators. +fn render_files_tab(frame: &mut Frame<'_>, state: &MrDetailState, area: Rect) { + let max_x = area.x.saturating_add(area.width); + let mut y = area.y; + let bottom = area.y.saturating_add(area.height); + + if state.file_changes.is_empty() { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x + 1, y, "No file changes", style, max_x); + return; + } + + for (i, fc) in state + .file_changes + .iter() + .skip(state.file_scroll) + .take((bottom.saturating_sub(y)) as usize) + .enumerate() + { + let is_selected = i + state.file_scroll == state.file_selected; + + let (fg, bg) = if is_selected { + (SELECTED_FG, SELECTED_BG) + } else { + (TEXT, PackedRgba::TRANSPARENT) + }; + + if is_selected { + let sel_cell = Cell { + fg, + bg, + ..Cell::default() + }; + frame.draw_rect_filled(Rect::new(area.x, y, area.width, 1), sel_cell); + } + + // Change type icon. + let icon_fg = match fc.change_type { + FileChangeType::Added => GREEN, + FileChangeType::Deleted => RED, + FileChangeType::Modified => YELLOW, + FileChangeType::Renamed => CYAN, + }; + let icon_style = Cell { + fg: if is_selected { fg } else { icon_fg }, + bg, + ..Cell::default() + }; + let mut cx = frame.print_text_clipped(area.x, y, fc.change_type.icon(), icon_style, max_x); + cx = frame.print_text_clipped(cx, y, " ", icon_style, max_x); + + // File path. + let path_style = Cell { + fg, + bg, + ..Cell::default() + }; + let display_path = if fc.change_type == FileChangeType::Renamed { + if let Some(ref old) = fc.old_path { + format!("{old} -> {}", fc.new_path) + } else { + fc.new_path.clone() + } + } else { + fc.new_path.clone() + }; + let _ = frame.print_text_clipped(cx, y, &display_path, path_style, max_x); + + y += 1; + } +} + +/// Discussions tab: all discussions rendered via the tree widget. +fn render_discussions_tab( + frame: &mut Frame<'_>, + state: &MrDetailState, + area: Rect, + clock: &dyn Clock, +) { + let max_x = area.x.saturating_add(area.width); + + if !state.discussions_loaded { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = + frame.print_text_clipped(area.x + 1, area.y, "Loading discussions...", style, max_x); + return; + } + + if state.discussions.is_empty() { + let style = Cell { + fg: TEXT_MUTED, + ..Cell::default() + }; + let _ = frame.print_text_clipped(area.x + 1, area.y, "No discussions", style, max_x); + return; + } + + let _ = render_discussion_tree( + frame, + &state.discussions, + &state.tree_state, + area, + &discussion_colors(), + clock, + ); +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use crate::clock::FakeClock; + use crate::message::EntityKey; + use crate::state::mr_detail::{FileChange, FileChangeType, MrDetailData, MrMetadata, MrTab}; + use crate::view::common::cross_ref::{CrossRef, CrossRefKind}; + use crate::view::common::discussion_tree::{DiscussionNode, NoteNode}; + use ftui::render::grapheme_pool::GraphemePool; + + macro_rules! with_frame { + ($width:expr, $height:expr, |$frame:ident| $body:block) => {{ + let mut pool = GraphemePool::new(); + let mut $frame = Frame::new($width, $height, &mut pool); + $body + }}; + } + + fn sample_mr_metadata() -> MrMetadata { + MrMetadata { + iid: 10, + project_path: "group/project".into(), + title: "Fix authentication flow".into(), + description: "This MR fixes the login bug.\nSee issue #42.".into(), + state: "opened".into(), + draft: false, + author: "alice".into(), + assignees: vec!["bob".into()], + reviewers: vec!["carol".into()], + labels: vec!["backend".into()], + source_branch: "fix-auth".into(), + target_branch: "main".into(), + merge_status: "mergeable".into(), + created_at: 1_700_000_000_000, + updated_at: 1_700_000_060_000, + merged_at: None, + web_url: "https://gitlab.com/group/project/-/merge_requests/10".into(), + discussion_count: 1, + file_change_count: 2, + } + } + + fn sample_mr_state() -> MrDetailState { + let mut state = MrDetailState::default(); + state.load_new(EntityKey::mr(1, 10)); + state.apply_metadata(MrDetailData { + metadata: sample_mr_metadata(), + cross_refs: vec![CrossRef { + kind: CrossRefKind::ClosingMr, + entity_key: EntityKey::issue(1, 42), + label: "Auth bug".into(), + navigable: true, + }], + file_changes: vec![ + FileChange { + old_path: None, + new_path: "src/auth.rs".into(), + change_type: FileChangeType::Modified, + }, + FileChange { + old_path: None, + new_path: "src/lib.rs".into(), + change_type: FileChangeType::Added, + }, + ], + }); + state + } + + #[test] + fn test_render_mr_detail_no_metadata() { + with_frame!(80, 24, |frame| { + let state = MrDetailState::default(); + let clock = FakeClock::from_ms(1_700_000_000_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_mr_detail_overview_tab() { + with_frame!(80, 24, |frame| { + let state = sample_mr_state(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_mr_detail_files_tab() { + with_frame!(80, 24, |frame| { + let mut state = sample_mr_state(); + state.active_tab = MrTab::Files; + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_mr_detail_discussions_tab_loading() { + with_frame!(80, 24, |frame| { + let mut state = sample_mr_state(); + state.active_tab = MrTab::Discussions; + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_mr_detail_discussions_tab_with_data() { + with_frame!(80, 30, |frame| { + let mut state = sample_mr_state(); + state.active_tab = MrTab::Discussions; + state.apply_discussions(vec![DiscussionNode { + discussion_id: "d1".into(), + notes: vec![NoteNode { + author: "alice".into(), + body: "Looks good".into(), + created_at: 1_700_000_020_000, + is_system: false, + is_diff_note: true, + diff_file_path: Some("src/auth.rs".into()), + diff_new_line: Some(42), + }], + resolvable: true, + resolved: false, + }]); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 30), &clock); + }); + } + + #[test] + fn test_render_mr_detail_draft() { + with_frame!(80, 24, |frame| { + let mut state = sample_mr_state(); + state.metadata.as_mut().unwrap().draft = true; + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_mr_detail_tiny_area() { + with_frame!(5, 3, |frame| { + let state = sample_mr_state(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 5, 3), &clock); + }); + } + + #[test] + fn test_render_mr_detail_narrow_terminal() { + with_frame!(30, 10, |frame| { + let state = sample_mr_state(); + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 30, 10), &clock); + }); + } + + #[test] + fn test_render_files_empty() { + with_frame!(80, 24, |frame| { + let mut state = MrDetailState::default(); + state.load_new(EntityKey::mr(1, 10)); + state.apply_metadata(MrDetailData { + metadata: sample_mr_metadata(), + cross_refs: vec![], + file_changes: vec![], + }); + state.active_tab = MrTab::Files; + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } + + #[test] + fn test_render_files_with_rename() { + with_frame!(80, 24, |frame| { + let mut state = MrDetailState::default(); + state.load_new(EntityKey::mr(1, 10)); + state.apply_metadata(MrDetailData { + metadata: sample_mr_metadata(), + cross_refs: vec![], + file_changes: vec![FileChange { + old_path: Some("src/old.rs".into()), + new_path: "src/new.rs".into(), + change_type: FileChangeType::Renamed, + }], + }); + state.active_tab = MrTab::Files; + let clock = FakeClock::from_ms(1_700_000_060_000); + render_mr_detail(&mut frame, &state, Rect::new(0, 0, 80, 24), &clock); + }); + } +}