Compare commits
13 Commits
d94bcbfbe7
...
robot-meta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
60075cd400 | ||
|
|
ddab186315 | ||
|
|
d6d1686f8e | ||
|
|
5c44ee91fb | ||
|
|
6aff96d32f | ||
|
|
06889ec85a | ||
|
|
08bda08934 | ||
|
|
32134ea933 | ||
|
|
16cc58b17f | ||
|
|
a10d870863 | ||
|
|
59088af2ab | ||
|
|
ace9c8bf17 | ||
|
|
cab8c540da |
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
bd-23xb
|
bd-9lbr
|
||||||
|
|||||||
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -1324,7 +1324,7 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lore"
|
name = "lore"
|
||||||
version = "0.9.2"
|
version = "0.9.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"asupersync",
|
"asupersync",
|
||||||
"async-stream",
|
"async-stream",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "lore"
|
name = "lore"
|
||||||
version = "0.9.2"
|
version = "0.9.4"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
description = "Gitlore - Local GitLab data management with semantic search"
|
description = "Gitlore - Local GitLab data management with semantic search"
|
||||||
authors = ["Taylor Eernisse"]
|
authors = ["Taylor Eernisse"]
|
||||||
|
|||||||
@@ -37,11 +37,10 @@
|
|||||||
| 29 | *help* | — | — | — | (clap built-in) |
|
| 29 | *help* | — | — | — | (clap built-in) |
|
||||||
| | **Hidden/deprecated:** | | | | |
|
| | **Hidden/deprecated:** | | | | |
|
||||||
| 30 | `list` | — | `<ENTITY>` | 14 | deprecated, use issues/mrs |
|
| 30 | `list` | — | `<ENTITY>` | 14 | deprecated, use issues/mrs |
|
||||||
| 31 | `show` | — | `<ENTITY> <IID>` | 1 | deprecated, use issues/mrs |
|
| 31 | `auth-test` | — | — | 0 | deprecated, use auth |
|
||||||
| 32 | `auth-test` | — | — | 0 | deprecated, use auth |
|
| 32 | `sync-status` | — | — | 0 | deprecated, use status |
|
||||||
| 33 | `sync-status` | — | — | 0 | deprecated, use status |
|
| 33 | `backup` | — | — | 0 | Stub (not implemented) |
|
||||||
| 34 | `backup` | — | — | 0 | Stub (not implemented) |
|
| 34 | `reset` | — | — | 1 | Stub (not implemented) |
|
||||||
| 35 | `reset` | — | — | 1 | Stub (not implemented) |
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
1. **Make `gitlab_note_id` explicit in all note-level payloads without breaking existing consumers**
|
1. **Make `gitlab_note_id` explicit in all note-level payloads without breaking existing consumers**
|
||||||
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in `show`. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
Rationale: Your Bridge Contract already requires `gitlab_note_id`, but current plan keeps `gitlab_id` only in `notes` list while adding `gitlab_note_id` only in detail views. That forces agents to special-case commands. Add `gitlab_note_id` as an alias field everywhere note-level data appears, while keeping `gitlab_id` for compatibility.
|
||||||
|
|
||||||
```diff
|
```diff
|
||||||
@@ Bridge Contract (Cross-Cutting)
|
@@ Bridge Contract (Cross-Cutting)
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ construct API calls without a separate project-ID lookup, even after path change
|
|||||||
**Back-compat rule**: Note payloads in the `notes` list command continue exposing `gitlab_id`
|
**Back-compat rule**: Note payloads in the `notes` list command continue exposing `gitlab_id`
|
||||||
for existing consumers, but **MUST also** expose `gitlab_note_id` with the same value. This
|
for existing consumers, but **MUST also** expose `gitlab_note_id` with the same value. This
|
||||||
ensures agents can use a single field name (`gitlab_note_id`) across all commands — `notes`,
|
ensures agents can use a single field name (`gitlab_note_id`) across all commands — `notes`,
|
||||||
`show`, and `discussions --include-notes` — without special-casing by command.
|
`issues <IID>`/`mrs <IID>`, and `discussions --include-notes` — without special-casing by command.
|
||||||
|
|
||||||
This contract exists so agents can deterministically construct `glab api` write calls without
|
This contract exists so agents can deterministically construct `glab api` write calls without
|
||||||
cross-referencing multiple commands. Each workstream below must satisfy these fields in its
|
cross-referencing multiple commands. Each workstream below must satisfy these fields in its
|
||||||
|
|||||||
@@ -107,12 +107,12 @@ Each criterion is independently testable. Implementation is complete when ALL pa
|
|||||||
|
|
||||||
### AC-7: Show Issue Display (E2E)
|
### AC-7: Show Issue Display (E2E)
|
||||||
|
|
||||||
**Human (`lore show issue 123`):**
|
**Human (`lore issues 123`):**
|
||||||
- [ ] New line after "State": `Status: In progress` (colored by `status_color` hex → nearest terminal color)
|
- [ ] New line after "State": `Status: In progress` (colored by `status_color` hex → nearest terminal color)
|
||||||
- [ ] Status line only shown when `status_name IS NOT NULL`
|
- [ ] Status line only shown when `status_name IS NOT NULL`
|
||||||
- [ ] Category shown in parens when available, lowercased: `Status: In progress (in_progress)`
|
- [ ] Category shown in parens when available, lowercased: `Status: In progress (in_progress)`
|
||||||
|
|
||||||
**Robot (`lore --robot show issue 123`):**
|
**Robot (`lore --robot issues 123`):**
|
||||||
- [ ] JSON includes `status_name`, `status_category`, `status_color`, `status_icon_name`, `status_synced_at` fields
|
- [ ] JSON includes `status_name`, `status_category`, `status_color`, `status_icon_name`, `status_synced_at` fields
|
||||||
- [ ] Fields are `null` (not absent) when status not available
|
- [ ] Fields are `null` (not absent) when status not available
|
||||||
- [ ] `status_synced_at` is integer (ms epoch UTC) or `null` — enables freshness checks by consumers
|
- [ ] `status_synced_at` is integer (ms epoch UTC) or `null` — enables freshness checks by consumers
|
||||||
|
|||||||
701
specs/SPEC_explain.md
Normal file
701
specs/SPEC_explain.md
Normal file
@@ -0,0 +1,701 @@
|
|||||||
|
# Spec: lore explain — Auto-Generated Issue/MR Narratives
|
||||||
|
|
||||||
|
**Bead:** bd-9lbr
|
||||||
|
**Created:** 2026-03-10
|
||||||
|
|
||||||
|
## Spec Status
|
||||||
|
| Section | Status | Notes |
|
||||||
|
|---------|--------|-------|
|
||||||
|
| Objective | complete | |
|
||||||
|
| Tech Stack | complete | |
|
||||||
|
| Project Structure | complete | |
|
||||||
|
| Commands | complete | |
|
||||||
|
| Code Style | complete | UX-audited: after_help, --sections, --since, --no-timeline, --max-decisions, singular types |
|
||||||
|
| Boundaries | complete | |
|
||||||
|
| Testing Strategy | complete | 13 test cases (7 original + 5 UX flags + 1 singular type) |
|
||||||
|
| Git Workflow | complete | jj-first |
|
||||||
|
| User Journeys | complete | 3 journeys covering agent, human, pipeline use |
|
||||||
|
| Architecture | complete | ExplainParams + section filtering + time scoping |
|
||||||
|
| Success Criteria | complete | 15 criteria (10 original + 5 UX flags) |
|
||||||
|
| Non-Goals | complete | |
|
||||||
|
| Tasks | complete | 5 tasks across 3 phases, all updated for UX flags |
|
||||||
|
|
||||||
|
**Definition of Complete:** All sections `complete`, Open Questions empty,
|
||||||
|
every user journey has tasks, every task has TDD workflow and acceptance criteria.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
- [Entity Detail] (Architecture): reuse show/ query patterns (private — copy, don't import)
|
||||||
|
- [Timeline] (Architecture): import `crate::timeline::seed::seed_timeline_direct` + `collect_events`
|
||||||
|
- [Events] (Architecture): new inline queries against resource_state_events/resource_label_events
|
||||||
|
- [References] (Architecture): new query against entity_references table
|
||||||
|
- [Discussions] (Architecture): adapted from show/ patterns, add resolved/resolvable filter
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open Questions (Resolve Before Implementation)
|
||||||
|
<!-- All resolved -->
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Objective
|
||||||
|
|
||||||
|
**Goal:** Add `lore explain issues N` / `lore explain mrs N` to auto-generate structured narratives of what happened on an issue or MR.
|
||||||
|
|
||||||
|
**Problem:** Understanding the full story of an issue/MR requires reading dozens of notes, cross-referencing state changes, checking related entities, and piecing together a timeline. This is time-consuming for humans and nearly impossible for AI agents without custom orchestration.
|
||||||
|
|
||||||
|
**Success metrics:**
|
||||||
|
- Produces a complete narrative in <500ms for an issue with 50 notes
|
||||||
|
- All 7 sections populated (entity, description_excerpt, key_decisions, activity, open_threads, related, timeline_excerpt)
|
||||||
|
- Works fully offline (no API calls, no LLM)
|
||||||
|
- Deterministic and reproducible (same input = same output)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tech Stack & Constraints
|
||||||
|
|
||||||
|
| Layer | Technology | Version |
|
||||||
|
|-------|-----------|---------|
|
||||||
|
| Language | Rust | nightly-2026-03-01 (rust-toolchain.toml) |
|
||||||
|
| Framework | clap (derive) | As in Cargo.toml |
|
||||||
|
| Database | SQLite via rusqlite | Bundled |
|
||||||
|
| Testing | cargo test | Inline #[cfg(test)] |
|
||||||
|
| Async | asupersync | 0.2 |
|
||||||
|
|
||||||
|
**Constraints:**
|
||||||
|
- No LLM dependency — template-based, deterministic
|
||||||
|
- No network calls — all data from local SQLite
|
||||||
|
- Performance: <500ms for 50-note entity
|
||||||
|
- Unsafe code forbidden (`#![forbid(unsafe_code)]`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/cli/commands/
|
||||||
|
explain.rs # NEW: command module (queries, heuristic, result types)
|
||||||
|
src/cli/
|
||||||
|
mod.rs # EDIT: add Explain variant to Commands enum
|
||||||
|
src/app/
|
||||||
|
handlers.rs # EDIT: add handle_explain dispatch
|
||||||
|
robot_docs.rs # EDIT: register explain in robot-docs manifest
|
||||||
|
src/main.rs # EDIT: add Explain match arm
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build
|
||||||
|
cargo check --all-targets
|
||||||
|
|
||||||
|
# Test
|
||||||
|
cargo test explain
|
||||||
|
|
||||||
|
# Lint
|
||||||
|
cargo clippy --all-targets -- -D warnings
|
||||||
|
|
||||||
|
# Format
|
||||||
|
cargo fmt --check
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
**Command registration (from cli/mod.rs):**
|
||||||
|
```rust
|
||||||
|
/// Auto-generate a structured narrative of an issue or MR
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore explain issues 42 # Narrative for issue #42
|
||||||
|
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
|
||||||
|
lore -J explain issues 42 # JSON output for automation
|
||||||
|
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
|
||||||
|
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
|
||||||
|
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
|
||||||
|
Explain {
|
||||||
|
/// Entity type: "issues" or "mrs" (singular forms also accepted)
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
|
||||||
|
entity_type: String,
|
||||||
|
|
||||||
|
/// Entity IID
|
||||||
|
iid: i64,
|
||||||
|
|
||||||
|
/// Scope to project (fuzzy match)
|
||||||
|
#[arg(short, long)]
|
||||||
|
project: Option<String>,
|
||||||
|
|
||||||
|
/// Select specific sections (comma-separated)
|
||||||
|
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
|
||||||
|
#[arg(long, value_delimiter = ',', help_heading = "Output")]
|
||||||
|
sections: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Skip timeline excerpt (faster execution)
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
no_timeline: bool,
|
||||||
|
|
||||||
|
/// Maximum key decisions to include
|
||||||
|
#[arg(long, default_value = "10", help_heading = "Output")]
|
||||||
|
max_decisions: usize,
|
||||||
|
|
||||||
|
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
since: Option<String>,
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Entity type normalization:** The handler must normalize singular forms: `"issue"` -> `"issues"`, `"mr"` -> `"mrs"`. This prevents common typos from causing errors.
|
||||||
|
|
||||||
|
**Query pattern (from show/issue.rs):**
|
||||||
|
```rust
|
||||||
|
fn find_issue(conn: &Connection, iid: i64, project_filter: Option<&str>) -> Result<IssueRow> {
|
||||||
|
let project_id = resolve_project(conn, project_filter)?;
|
||||||
|
let mut stmt = conn.prepare_cached("SELECT ... FROM issues WHERE iid = ?1 AND project_id = ?2")?;
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Robot mode output (from cli/robot.rs):**
|
||||||
|
```rust
|
||||||
|
let response = serde_json::json!({
|
||||||
|
"ok": true,
|
||||||
|
"data": result,
|
||||||
|
"meta": { "elapsed_ms": elapsed.as_millis() }
|
||||||
|
});
|
||||||
|
println!("{}", serde_json::to_string(&response)?);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Boundaries
|
||||||
|
|
||||||
|
### Always (autonomous)
|
||||||
|
- Run `cargo test explain` and `cargo clippy` after every code change
|
||||||
|
- Follow existing query patterns from show/issue.rs and show/mr.rs
|
||||||
|
- Use `resolve_project()` for project resolution (fuzzy match)
|
||||||
|
- Cap key_decisions at `--max-decisions` (default 10), timeline_excerpt at 20 events
|
||||||
|
- Normalize singular entity types (`issue` -> `issues`, `mr` -> `mrs`)
|
||||||
|
- Respect `--sections` filter: omit unselected sections from output (both robot and human)
|
||||||
|
- Respect `--since` filter: scope events/notes queries with `created_at >= ?` threshold
|
||||||
|
|
||||||
|
### Ask First (needs approval)
|
||||||
|
- Adding new dependencies to Cargo.toml
|
||||||
|
- Modifying existing query functions in show/ or timeline/
|
||||||
|
- Changing the entity_references table schema
|
||||||
|
|
||||||
|
### Never (hard stops)
|
||||||
|
- No LLM calls — explain must be deterministic
|
||||||
|
- No API/network calls — fully offline
|
||||||
|
- No new database migrations — use existing schema only
|
||||||
|
- Do not modify show/ or timeline/ modules (copy patterns instead)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Strategy (TDD — Red-Green)
|
||||||
|
|
||||||
|
**Methodology:** Test-Driven Development. Write tests first, confirm red, implement, confirm green.
|
||||||
|
|
||||||
|
**Framework:** cargo test, inline `#[cfg(test)]`
|
||||||
|
**Location:** `src/cli/commands/explain.rs` (inline test module)
|
||||||
|
|
||||||
|
**Test categories:**
|
||||||
|
- Unit tests: key-decisions heuristic, activity counting, description truncation
|
||||||
|
- Integration tests: full explain pipeline with in-memory DB
|
||||||
|
|
||||||
|
**User journey test mapping:**
|
||||||
|
| Journey | Test | Scenarios |
|
||||||
|
|---------|------|-----------|
|
||||||
|
| UJ-1: Agent explains issue | test_explain_issue_basic | All 7 sections present, robot JSON valid |
|
||||||
|
| UJ-1: Agent explains MR | test_explain_mr | entity.type = "merge_request", merged_at included |
|
||||||
|
| UJ-1: Singular entity type | test_explain_singular_entity_type | `"issue"` normalizes to `"issues"` |
|
||||||
|
| UJ-1: Section filtering | test_explain_sections_filter_robot | Only selected sections in output |
|
||||||
|
| UJ-1: No-timeline flag | test_explain_no_timeline_flag | timeline_excerpt is None |
|
||||||
|
| UJ-2: Human reads narrative | (human render tested manually) | Headers, indentation, color |
|
||||||
|
| UJ-3: Key decisions | test_explain_key_decision_heuristic | Note within 60min of state change by same actor |
|
||||||
|
| UJ-3: No false decisions | test_explain_key_decision_ignores_unrelated_notes | Different author's note excluded |
|
||||||
|
| UJ-3: Max decisions cap | test_explain_max_decisions | Respects `--max-decisions` parameter |
|
||||||
|
| UJ-3: Since scopes events | test_explain_since_scopes_events | Only recent events included |
|
||||||
|
| UJ-3: Open threads | test_explain_open_threads | Only unresolved discussions in output |
|
||||||
|
| UJ-3: Edge case | test_explain_no_notes | Empty sections, no panic |
|
||||||
|
| UJ-3: Activity counts | test_explain_activity_counts | Correct state/label/note counts |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Git Workflow
|
||||||
|
|
||||||
|
- **jj-first** — all VCS via jj, not git
|
||||||
|
- **Commit format:** `feat(explain): <description>`
|
||||||
|
- **No branches** — commit in place, use jj bookmarks to push
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## User Journeys (Prioritized)
|
||||||
|
|
||||||
|
### P1 — Critical
|
||||||
|
- **UJ-1: Agent queries issue/MR narrative**
|
||||||
|
- Actor: AI agent (via robot mode)
|
||||||
|
- Flow: `lore -J explain issues 42` → JSON with 7 sections → agent parses and acts
|
||||||
|
- Error paths: Issue not found (exit 17), ambiguous project (exit 18)
|
||||||
|
- Implemented by: Task 1, 2, 3, 4
|
||||||
|
|
||||||
|
### P2 — Important
|
||||||
|
- **UJ-2: Human reads explain output**
|
||||||
|
- Actor: Developer at terminal
|
||||||
|
- Flow: `lore explain issues 42` → formatted narrative with headers, colors, indentation
|
||||||
|
- Error paths: Same as UJ-1 but with human-readable error messages
|
||||||
|
- Implemented by: Task 5
|
||||||
|
|
||||||
|
### P3 — Nice to Have
|
||||||
|
- **UJ-3: Agent uses key-decisions to understand context**
|
||||||
|
- Actor: AI agent making decisions
|
||||||
|
- Flow: Parse `key_decisions` array → understand who decided what and when → inform action
|
||||||
|
- Error paths: No key decisions found (empty array, not error)
|
||||||
|
- Implemented by: Task 3
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture / Data Model
|
||||||
|
|
||||||
|
### Data Assembly Pipeline (sync, no async needed)
|
||||||
|
|
||||||
|
```
|
||||||
|
1. RESOLVE → resolve_project() + find entity by IID
|
||||||
|
2. PARSE → normalize entity_type, parse --since, validate --sections
|
||||||
|
3. DETAIL → entity metadata (title, state, author, labels, assignees, status)
|
||||||
|
4. EVENTS → resource_state_events + resource_label_events (optionally --since scoped)
|
||||||
|
5. NOTES → non-system notes via discussions join (optionally --since scoped)
|
||||||
|
6. HEURISTIC → key_decisions = events correlated with notes by same actor within 60min
|
||||||
|
7. THREADS → discussions WHERE resolvable=1 AND resolved=0
|
||||||
|
8. REFERENCES → entity_references (both directions: source and target)
|
||||||
|
9. TIMELINE → seed_timeline_direct + collect_events (capped at 20, skip if --no-timeline)
|
||||||
|
10. FILTER → apply --sections filter: drop unselected sections before serialization
|
||||||
|
11. ASSEMBLE → combine into ExplainResult
|
||||||
|
```
|
||||||
|
|
||||||
|
**Section filtering:** When `--sections` is provided, only the listed sections are populated.
|
||||||
|
Unselected sections are set to their zero-value (`None`, empty vec, etc.) and omitted
|
||||||
|
from robot JSON via `#[serde(skip_serializing_if = "...")]`. The `entity` section is always
|
||||||
|
included (needed for identification). Human mode skips rendering unselected sections.
|
||||||
|
|
||||||
|
**Time scoping:** When `--since` is provided, parse it using `crate::core::time::parse_since()`
|
||||||
|
(same function used by timeline, me, file-history). Add `AND created_at >= ?` to events
|
||||||
|
and notes queries. The entity header, references, and open threads are NOT time-scoped
|
||||||
|
(they represent current state, not historical events).
|
||||||
|
|
||||||
|
### Key Types
|
||||||
|
|
||||||
|
```rust
|
||||||
|
/// Parameters controlling explain behavior.
|
||||||
|
pub struct ExplainParams {
|
||||||
|
pub entity_type: String, // "issues" or "mrs" (already normalized)
|
||||||
|
pub iid: i64,
|
||||||
|
pub project: Option<String>,
|
||||||
|
pub sections: Option<Vec<String>>, // None = all sections
|
||||||
|
pub no_timeline: bool,
|
||||||
|
pub max_decisions: usize, // default 10
|
||||||
|
pub since: Option<i64>, // ms epoch threshold from --since parsing
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ExplainResult {
|
||||||
|
pub entity: EntitySummary,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub description_excerpt: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub key_decisions: Option<Vec<KeyDecision>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub activity: Option<ActivitySummary>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub open_threads: Option<Vec<OpenThread>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub related: Option<RelatedEntities>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub timeline_excerpt: Option<Vec<TimelineEventSummary>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct EntitySummary {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub entity_type: String, // "issue" or "merge_request"
|
||||||
|
pub iid: i64,
|
||||||
|
pub title: String,
|
||||||
|
pub state: String,
|
||||||
|
pub author: String,
|
||||||
|
pub assignees: Vec<String>,
|
||||||
|
pub labels: Vec<String>,
|
||||||
|
pub created_at: String, // ISO 8601
|
||||||
|
pub updated_at: String, // ISO 8601
|
||||||
|
pub url: Option<String>,
|
||||||
|
pub status_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct KeyDecision {
|
||||||
|
pub timestamp: String, // ISO 8601
|
||||||
|
pub actor: String,
|
||||||
|
pub action: String, // "state: opened -> closed" or "label: +bug"
|
||||||
|
pub context_note: String, // truncated to 500 chars
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ActivitySummary {
|
||||||
|
pub state_changes: usize,
|
||||||
|
pub label_changes: usize,
|
||||||
|
pub notes: usize, // non-system only
|
||||||
|
pub first_event: Option<String>, // ISO 8601
|
||||||
|
pub last_event: Option<String>, // ISO 8601
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct OpenThread {
|
||||||
|
pub discussion_id: String,
|
||||||
|
pub started_by: String,
|
||||||
|
pub started_at: String, // ISO 8601
|
||||||
|
pub note_count: usize,
|
||||||
|
pub last_note_at: String, // ISO 8601
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct RelatedEntities {
|
||||||
|
pub closing_mrs: Vec<ClosingMrInfo>,
|
||||||
|
pub related_issues: Vec<RelatedEntityInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct TimelineEventSummary {
|
||||||
|
pub timestamp: String, // ISO 8601
|
||||||
|
pub event_type: String,
|
||||||
|
pub actor: Option<String>,
|
||||||
|
pub summary: String,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Decisions Heuristic
|
||||||
|
|
||||||
|
The heuristic identifies notes that explain WHY state/label changes were made:
|
||||||
|
|
||||||
|
1. Collect all `resource_state_events` and `resource_label_events` for the entity
|
||||||
|
2. Merge into unified chronological list with (timestamp, actor, description)
|
||||||
|
3. For each event, find the FIRST non-system note by the SAME actor within 60 minutes AFTER the event
|
||||||
|
4. Pair them as a `KeyDecision`
|
||||||
|
5. Cap at `params.max_decisions` (default 10)
|
||||||
|
|
||||||
|
**SQL for state events:**
|
||||||
|
```sql
|
||||||
|
SELECT state, actor_username, created_at
|
||||||
|
FROM resource_state_events
|
||||||
|
WHERE issue_id = ?1 -- or merge_request_id = ?1
|
||||||
|
AND (?2 IS NULL OR created_at >= ?2) -- --since filter
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL for label events:**
|
||||||
|
```sql
|
||||||
|
SELECT action, label_name, actor_username, created_at
|
||||||
|
FROM resource_label_events
|
||||||
|
WHERE issue_id = ?1 -- or merge_request_id = ?1
|
||||||
|
AND (?2 IS NULL OR created_at >= ?2) -- --since filter
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL for non-system notes (for correlation):**
|
||||||
|
```sql
|
||||||
|
SELECT n.body, n.author_username, n.created_at
|
||||||
|
FROM notes n
|
||||||
|
JOIN discussions d ON n.discussion_id = d.id
|
||||||
|
WHERE d.noteable_type = ?1 AND d.issue_id = ?2 -- or d.merge_request_id
|
||||||
|
AND n.is_system = 0
|
||||||
|
AND (?3 IS NULL OR n.created_at >= ?3) -- --since filter
|
||||||
|
ORDER BY n.created_at ASC
|
||||||
|
```
|
||||||
|
|
||||||
|
**Entity ID resolution:** The `discussions` table uses `issue_id` / `merge_request_id` columns (CHECK constraint: exactly one non-NULL). The `resource_state_events` and `resource_label_events` tables use the same pattern.
|
||||||
|
|
||||||
|
### Cross-References Query
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Outgoing references (this entity references others)
|
||||||
|
SELECT target_entity_type, target_entity_id, target_project_path,
|
||||||
|
target_entity_iid, reference_type, source_method
|
||||||
|
FROM entity_references
|
||||||
|
WHERE source_entity_type = ?1 AND source_entity_id = ?2
|
||||||
|
|
||||||
|
-- Incoming references (others reference this entity)
|
||||||
|
SELECT source_entity_type, source_entity_id,
|
||||||
|
reference_type, source_method
|
||||||
|
FROM entity_references
|
||||||
|
WHERE target_entity_type = ?1 AND target_entity_id = ?2
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** For closing MRs, reuse the pattern from show/issue.rs `get_closing_mrs()` which queries entity_references with `reference_type = 'closes'`.
|
||||||
|
|
||||||
|
### Open Threads Query
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at
|
||||||
|
FROM discussions d
|
||||||
|
WHERE d.issue_id = ?1 -- or d.merge_request_id
|
||||||
|
AND d.resolvable = 1
|
||||||
|
AND d.resolved = 0
|
||||||
|
ORDER BY d.last_note_at DESC
|
||||||
|
```
|
||||||
|
|
||||||
|
Then for each discussion, fetch the first note's author:
|
||||||
|
```sql
|
||||||
|
SELECT author_username, created_at
|
||||||
|
FROM notes
|
||||||
|
WHERE discussion_id = ?1
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
LIMIT 1
|
||||||
|
```
|
||||||
|
|
||||||
|
And count notes per discussion:
|
||||||
|
```sql
|
||||||
|
SELECT COUNT(*) FROM notes WHERE discussion_id = ?1 AND is_system = 0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Robot Mode Output Schema
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ok": true,
|
||||||
|
"data": {
|
||||||
|
"entity": {
|
||||||
|
"type": "issue", "iid": 3864, "title": "...", "state": "opened",
|
||||||
|
"author": "teernisse", "assignees": ["teernisse"],
|
||||||
|
"labels": ["customer:BNSF"], "created_at": "2026-01-10T...",
|
||||||
|
"updated_at": "2026-02-12T...", "url": "...", "status_name": "In progress"
|
||||||
|
},
|
||||||
|
"description_excerpt": "First 500 chars...",
|
||||||
|
"key_decisions": [{
|
||||||
|
"timestamp": "2026-01-15T...",
|
||||||
|
"actor": "teernisse",
|
||||||
|
"action": "state: opened -> closed",
|
||||||
|
"context_note": "Starting work on the integration..."
|
||||||
|
}],
|
||||||
|
"activity": {
|
||||||
|
"state_changes": 3, "label_changes": 5, "notes": 42,
|
||||||
|
"first_event": "2026-01-10T...", "last_event": "2026-02-12T..."
|
||||||
|
},
|
||||||
|
"open_threads": [{
|
||||||
|
"discussion_id": "abc123",
|
||||||
|
"started_by": "cseiber",
|
||||||
|
"started_at": "2026-02-01T...",
|
||||||
|
"note_count": 5,
|
||||||
|
"last_note_at": "2026-02-10T..."
|
||||||
|
}],
|
||||||
|
"related": {
|
||||||
|
"closing_mrs": [{ "iid": 200, "title": "...", "state": "merged" }],
|
||||||
|
"related_issues": [{ "iid": 3800, "title": "Rail Break Card", "type": "related" }]
|
||||||
|
},
|
||||||
|
"timeline_excerpt": [
|
||||||
|
{ "timestamp": "...", "event_type": "state_changed", "actor": "teernisse", "summary": "State changed to closed" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"meta": { "elapsed_ms": 350 }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
| # | Criterion | Input | Expected Output |
|
||||||
|
|---|-----------|-------|----------------|
|
||||||
|
| 1 | Issue explain produces all 7 sections | `lore -J explain issues N` | JSON with entity, description_excerpt, key_decisions, activity, open_threads, related, timeline_excerpt |
|
||||||
|
| 2 | MR explain produces all 7 sections | `lore -J explain mrs N` | Same shape, entity.type = "merge_request" |
|
||||||
|
| 3 | Key decisions captures correlated notes | State change + note by same actor within 60min | KeyDecision with action + context_note |
|
||||||
|
| 4 | Key decisions ignores unrelated notes | Note by different author near state change | Not in key_decisions array |
|
||||||
|
| 5 | Open threads filters correctly | 2 discussions: 1 resolved, 1 unresolved | Only unresolved in open_threads |
|
||||||
|
| 6 | Activity counts are accurate | 3 state events, 2 label events, 10 notes | Matching counts in activity section |
|
||||||
|
| 7 | Performance | Issue with 50 notes | <500ms |
|
||||||
|
| 8 | Entity not found | Non-existent IID | Exit code 17, suggestion to sync |
|
||||||
|
| 9 | Ambiguous project | IID exists in multiple projects, no -p | Exit code 18, suggestion to use -p |
|
||||||
|
| 10 | Human render | `lore explain issues N` (no -J) | Formatted narrative with headers |
|
||||||
|
| 11 | Singular entity type accepted | `lore explain issue 42` | Same as `lore explain issues 42` |
|
||||||
|
| 12 | Section filtering works | `--sections key_decisions,activity` | Only those 2 sections + entity in JSON |
|
||||||
|
| 13 | No-timeline skips timeline | `--no-timeline` | timeline_excerpt absent, faster execution |
|
||||||
|
| 14 | Max-decisions caps output | `--max-decisions 3` | At most 3 key_decisions |
|
||||||
|
| 15 | Since scopes events/notes | `--since 30d` | Only events/notes from last 30 days in activity, key_decisions |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Non-Goals
|
||||||
|
|
||||||
|
- **No LLM summarization** — This is template-based v1. LLM enhancement is a separate future feature.
|
||||||
|
- **No new database migrations** — Uses existing schema (resource_state_events, resource_label_events, discussions, notes, entity_references tables all exist).
|
||||||
|
- **No modification of show/ or timeline/ modules** — Copy patterns, don't refactor existing code. If we later want to share code, that's a separate refactoring bead.
|
||||||
|
- **No interactive mode** — Output only, no prompts or follow-up questions.
|
||||||
|
- **No MR diff analysis** — No file-level change summaries. Use `file-history` or `trace` for that.
|
||||||
|
- **No assignee/reviewer history** — Activity summary counts events but doesn't track assignment changes over time.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Setup & Registration
|
||||||
|
|
||||||
|
- [ ] **Task 1:** Register explain command in CLI and wire dispatch
|
||||||
|
- **Implements:** Infrastructure (UJ-1, UJ-2 prerequisite)
|
||||||
|
- **Files:** `src/cli/mod.rs`, `src/cli/commands/mod.rs`, `src/main.rs`, `src/app/handlers.rs`, NEW `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Nothing
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_issue_basic` in explain.rs: insert a minimal issue + project + 1 discussion + 1 note + 1 state event into in-memory DB, call `run_explain()` with default ExplainParams, assert all 7 top-level sections present in result
|
||||||
|
2. Write `test_explain_mr` in explain.rs: insert MR with merged_at, call `run_explain()`, assert `entity.type == "merge_request"` and merged_at is populated
|
||||||
|
3. Write `test_explain_singular_entity_type`: call with `entity_type: "issue"`, assert it resolves same as `"issues"`
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
5. Implement: Explain variant in Commands enum (with all flags: `--sections`, `--no-timeline`, `--max-decisions`, `--since`, singular entity type acceptance), handle_explain in handlers.rs (normalize entity_type, parse --since, build ExplainParams), skeleton `run_explain()` in explain.rs
|
||||||
|
6. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** `cargo test explain::tests::test_explain_issue_basic`, `test_explain_mr`, and `test_explain_singular_entity_type` pass. Command registered in CLI help with after_help examples block.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Use inline args pattern (like Drift) with all flags from Code Style section
|
||||||
|
- `entity_type` validated by `#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]`
|
||||||
|
- Normalize in handler: `"issue"` -> `"issues"`, `"mr"` -> `"mrs"`
|
||||||
|
- Parse `--since` using `crate::core::time::parse_since()` — returns ms epoch threshold
|
||||||
|
- Validate `--sections` values against allowed set: `["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"]`
|
||||||
|
- Copy the `find_issue`/`find_mr` and `get_*` query patterns from show/issue.rs and show/mr.rs — they're private functions so can't be imported
|
||||||
|
- Use `resolve_project()` from `crate::core::project` for project resolution
|
||||||
|
- Use `ms_to_iso()` from `crate::core::time` for timestamp conversion
|
||||||
|
|
||||||
|
### Phase 2: Core Logic
|
||||||
|
|
||||||
|
- [ ] **Task 2:** Implement key-decisions heuristic
|
||||||
|
- **Implements:** UJ-3
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_key_decision_heuristic`: insert state change event at T, insert note by SAME author at T+30min, call `extract_key_decisions()`, assert 1 decision with correct action + context_note
|
||||||
|
2. Write `test_explain_key_decision_ignores_unrelated_notes`: insert state change by alice, insert note by bob at T+30min, assert 0 decisions
|
||||||
|
3. Write `test_explain_key_decision_label_event`: insert label add event + correlated note, assert decision.action starts with "label: +"
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
4. Write `test_explain_max_decisions`: insert 5 correlated event+note pairs, call with `max_decisions: 3`, assert exactly 3 decisions returned
|
||||||
|
5. Write `test_explain_since_scopes_events`: insert event at T-60d and event at T-10d, call with `since: Some(T-30d)`, assert only recent event appears
|
||||||
|
6. Run tests — all must FAIL (red)
|
||||||
|
7. Implement `extract_key_decisions()` function:
|
||||||
|
- Query resource_state_events and resource_label_events for entity (with optional `--since` filter)
|
||||||
|
- Merge into unified chronological list
|
||||||
|
- For each event, find first non-system note by same actor within 60min (notes also `--since` filtered)
|
||||||
|
- Cap at `params.max_decisions`
|
||||||
|
8. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** All 5 tests pass. Heuristic correctly correlates events with explanatory notes. `--max-decisions` and `--since` respected.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- State events query: `SELECT state, actor_username, created_at FROM resource_state_events WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) ORDER BY created_at`
|
||||||
|
- Label events query: `SELECT action, label_name, actor_username, created_at FROM resource_label_events WHERE {id_col} = ?1 AND (?2 IS NULL OR created_at >= ?2) ORDER BY created_at`
|
||||||
|
- Notes query: `SELECT n.body, n.author_username, n.created_at FROM notes n JOIN discussions d ON n.discussion_id = d.id WHERE d.{id_col} = ?1 AND n.is_system = 0 AND (?2 IS NULL OR n.created_at >= ?2) ORDER BY n.created_at`
|
||||||
|
- The `{id_col}` is either `issue_id` or `merge_request_id` based on entity_type
|
||||||
|
- Pass `params.since` (Option<i64>) as the `?2` parameter — NULL means no filter
|
||||||
|
- Use `crate::core::time::ms_to_iso()` for timestamp conversion in output
|
||||||
|
- Truncate context_note to 500 chars using `crate::cli::render::truncate()` or a local helper
|
||||||
|
|
||||||
|
- [ ] **Task 3:** Implement open threads, activity summary, and cross-references
|
||||||
|
- **Implements:** UJ-1
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_open_threads`: insert 2 discussions (1 with resolved=0 resolvable=1, 1 with resolved=1 resolvable=1), assert only unresolved appears in open_threads
|
||||||
|
2. Write `test_explain_activity_counts`: insert 3 state events + 2 label events + 10 non-system notes, assert activity.state_changes=3, label_changes=2, notes=10
|
||||||
|
3. Write `test_explain_no_notes`: insert issue with zero notes and zero events, assert empty key_decisions, empty open_threads, activity all zeros, description_excerpt = "(no description)" if description is NULL
|
||||||
|
4. Run tests — all must FAIL (red)
|
||||||
|
5. Implement:
|
||||||
|
- `fetch_open_threads()`: query discussions WHERE resolvable=1 AND resolved=0, fetch first note author + note count per thread
|
||||||
|
- `build_activity_summary()`: count state events, label events, non-system notes, find min/max timestamps
|
||||||
|
- `fetch_related_entities()`: query entity_references in both directions (source and target)
|
||||||
|
- Description excerpt: first 500 chars of description, or "(no description)" if NULL
|
||||||
|
6. Run tests — all must PASS (green)
|
||||||
|
- **Acceptance:** All 3 tests pass. Open threads correctly filtered. Activity counts accurate. Empty entity handled gracefully.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Open threads query: `SELECT d.gitlab_discussion_id, d.first_note_at, d.last_note_at FROM discussions d WHERE d.{id_col} = ?1 AND d.resolvable = 1 AND d.resolved = 0 ORDER BY d.last_note_at DESC`
|
||||||
|
- For first note author: `SELECT author_username FROM notes WHERE discussion_id = ?1 ORDER BY created_at ASC LIMIT 1`
|
||||||
|
- For note count: `SELECT COUNT(*) FROM notes WHERE discussion_id = ?1 AND is_system = 0`
|
||||||
|
- Cross-references: both outgoing and incoming from entity_references table
|
||||||
|
- For closing MRs, reuse the query pattern from show/issue.rs `get_closing_mrs()`
|
||||||
|
|
||||||
|
- [ ] **Task 4:** Wire timeline excerpt using existing pipeline
|
||||||
|
- **Implements:** UJ-1
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`
|
||||||
|
- **Depends on:** Task 1
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_timeline_excerpt`: insert issue + state events + notes, call run_explain() with `no_timeline: false`, assert timeline_excerpt is Some and non-empty and capped at 20 events
|
||||||
|
2. Write `test_explain_no_timeline_flag`: call run_explain() with `no_timeline: true`, assert timeline_excerpt is None
|
||||||
|
3. Run tests — both must FAIL (red)
|
||||||
|
4. Implement: when `!params.no_timeline` and `--sections` includes "timeline" (or is None), call `seed_timeline_direct()` with entity type + IID, then `collect_events()`, convert first 20 TimelineEvents into TimelineEventSummary structs. Otherwise set timeline_excerpt to None.
|
||||||
|
5. Run tests — both must PASS (green)
|
||||||
|
- **Acceptance:** Timeline excerpt present with max 20 events when enabled. Skipped entirely when `--no-timeline`. Uses existing timeline pipeline (no reimplementation).
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Import: `use crate::timeline::seed::seed_timeline_direct;` and `use crate::timeline::collect::collect_events;`
|
||||||
|
- `seed_timeline_direct()` takes `(conn, entity_type, iid, project_id)` — verify exact signature before implementing
|
||||||
|
- `collect_events()` returns `Vec<TimelineEvent>` — map to simplified `TimelineEventSummary` (timestamp, event_type string, actor, summary)
|
||||||
|
- Timeline pipeline uses `EntityRef` struct from `crate::timeline::types` — needs entity's local DB id and project_path
|
||||||
|
- Cap at 20 events: `events.truncate(20)` after collection
|
||||||
|
- `--no-timeline` takes precedence over `--sections timeline` (if both specified, skip timeline)
|
||||||
|
|
||||||
|
### Phase 3: Output Rendering
|
||||||
|
|
||||||
|
- [ ] **Task 5:** Robot mode JSON output and human-readable rendering
|
||||||
|
- **Implements:** UJ-1, UJ-2
|
||||||
|
- **Files:** `src/cli/commands/explain.rs`, `src/app/robot_docs.rs`
|
||||||
|
- **Depends on:** Task 1, 2, 3, 4
|
||||||
|
- **Test-first:**
|
||||||
|
1. Write `test_explain_robot_output_shape`: call run_explain() with all sections, serialize to JSON, assert all 7 top-level keys present
|
||||||
|
2. Write `test_explain_sections_filter_robot`: call run_explain() with `sections: Some(vec!["key_decisions", "activity"])`, serialize, assert only `entity` + `key_decisions` + `activity` keys present (entity always included), assert `description_excerpt`, `open_threads`, `related`, `timeline_excerpt` are absent
|
||||||
|
3. Run tests — both must FAIL (red)
|
||||||
|
4. Implement:
|
||||||
|
- Robot mode: `print_explain_json()` wrapping ExplainResult in `{"ok": true, "data": ..., "meta": {...}}` envelope. `#[serde(skip_serializing_if = "Option::is_none")]` on optional sections handles filtering automatically.
|
||||||
|
- Human mode: `print_explain()` with section headers, colored output, indented key decisions, truncated descriptions. Check `params.sections` before rendering each section.
|
||||||
|
- Register in robot-docs manifest (include `--sections`, `--no-timeline`, `--max-decisions`, `--since` flags)
|
||||||
|
5. Run tests — both must PASS (green)
|
||||||
|
- **Acceptance:** Robot JSON matches schema. Section filtering works in both robot and human mode. Command appears in `lore robot-docs`.
|
||||||
|
- **Implementation notes:**
|
||||||
|
- Robot envelope: use `serde_json::json!()` with `RobotMeta` from `crate::cli::robot`
|
||||||
|
- Human rendering: use `Theme::bold()`, `Icons`, `render::truncate()` from `crate::cli::render`
|
||||||
|
- Follow timeline.rs rendering pattern: header with entity info -> separator line -> sections
|
||||||
|
- Register in robot_docs.rs following the existing pattern for other commands
|
||||||
|
- Section filtering: the `run_explain()` function should already return None for unselected sections. The serializer skips them. Human renderer checks `is_some()` before rendering.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Corrections from Original Bead
|
||||||
|
|
||||||
|
The bead (bd-9lbr) was created before a codebase rearchitecture. Key corrections:
|
||||||
|
|
||||||
|
1. **`src/core/events_db.rs` does not exist** — Event storage is in `src/ingestion/storage/events.rs` (insert only). Event queries are inline in `timeline/collect.rs`. Explain needs its own inline queries.
|
||||||
|
|
||||||
|
2. **`ResourceStateEvent` / `ResourceLabelEvent` structs don't exist** — The timeline queries raw rows directly. Explain should define lightweight local structs or use tuples.
|
||||||
|
|
||||||
|
3. **`run_show_issue()` / `run_show_mr()` are private** — They live in `include!()` files inside show/mod.rs. Cannot be imported. Copy the query patterns instead.
|
||||||
|
|
||||||
|
4. **bd-2g50 blocker is CLOSED** — `IssueDetail` already has `closed_at`, `references_full`, `user_notes_count`, `confidential`. No blocker.
|
||||||
|
|
||||||
|
5. **Clap registration pattern** — The bead shows args directly on the enum variant, which is correct for explain's simple args (matches Drift, Related pattern). No need for a separate ExplainArgs struct.
|
||||||
|
|
||||||
|
6. **entity_references has no fetch query** — Only `insert_entity_reference()` and `count_references_for_source()` exist. Explain needs a new SELECT query (inline in explain.rs).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Session Log
|
||||||
|
|
||||||
|
### Session 1 — 2026-03-10
|
||||||
|
- Read bead bd-9lbr thoroughly — exceptionally detailed but written before rearchitecture
|
||||||
|
- Verified infrastructure: show/ (private functions, copy patterns), timeline/ (importable pipeline), events (inline SQL, no typed structs), xref (no fetch query), discussions (resolvable/resolved confirmed in migration 028)
|
||||||
|
- Discovered bd-2g50 blocker is CLOSED — no dependency
|
||||||
|
- Decided: two positional args (`lore explain issues N`) over single query syntax
|
||||||
|
- Decided: formalize + gap-fill approach (bead is thorough, just needs updating)
|
||||||
|
- Documented 6 corrections from original bead to current codebase state
|
||||||
|
- Drafted complete spec with 5 tasks across 3 phases
|
||||||
|
|
||||||
|
### Session 1b — 2026-03-10 (CLI UX Audit)
|
||||||
|
- Audited full CLI surface (30+ commands) against explain's proposed UX
|
||||||
|
- Identified 8 improvements, user selected 6 to incorporate:
|
||||||
|
1. **after_help examples block** — every other lore command has this, explain was missing it
|
||||||
|
2. **--sections flag** — robot token efficiency, skip unselected sections entirely
|
||||||
|
4. **Singular entity type tolerance** — accept `issue`/`mr` alongside `issues`/`mrs`
|
||||||
|
5. **--no-timeline flag** — skip heaviest section for faster execution
|
||||||
|
7. **--max-decisions N flag** — user control over key_decisions cap (default 10)
|
||||||
|
8. **--since flag** — time-scope events/notes for long-lived entities
|
||||||
|
- Skipped: #3 (command aliases ex/narrative), #6 (#42/!99 shorthand)
|
||||||
|
- Updated: Code Style, Boundaries, Architecture (ExplainParams + ExplainResult types, section filtering, time scoping, SQL queries), Success Criteria (+5 new), Testing Strategy (+5 new tests), all 5 Tasks
|
||||||
|
- ExplainResult sections now `Option<T>` with `skip_serializing_if` for section filtering
|
||||||
|
- All sections remain complete — spec is ready for implementation
|
||||||
@@ -7,6 +7,10 @@ struct FallbackErrorOutput {
|
|||||||
struct FallbackError {
|
struct FallbackError {
|
||||||
code: String,
|
code: String,
|
||||||
message: String,
|
message: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
suggestion: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
actions: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
||||||
@@ -20,6 +24,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
|||||||
error: FallbackError {
|
error: FallbackError {
|
||||||
code: "INTERNAL_ERROR".to_string(),
|
code: "INTERNAL_ERROR".to_string(),
|
||||||
message: gi_error.to_string(),
|
message: gi_error.to_string(),
|
||||||
|
suggestion: None,
|
||||||
|
actions: Vec::new(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
serde_json::to_string(&fallback)
|
serde_json::to_string(&fallback)
|
||||||
@@ -59,6 +65,8 @@ fn handle_error(e: Box<dyn std::error::Error>, robot_mode: bool) -> ! {
|
|||||||
error: FallbackError {
|
error: FallbackError {
|
||||||
code: "INTERNAL_ERROR".to_string(),
|
code: "INTERNAL_ERROR".to_string(),
|
||||||
message: e.to_string(),
|
message: e.to_string(),
|
||||||
|
suggestion: None,
|
||||||
|
actions: Vec::new(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
|||||||
@@ -361,7 +361,7 @@ fn print_combined_ingest_json(
|
|||||||
notes_upserted: mrs.notes_upserted,
|
notes_upserted: mrs.notes_upserted,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
@@ -735,7 +735,7 @@ async fn handle_init(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let project_paths: Vec<String> = projects_flag
|
let project_paths: Vec<String> = projects_flag
|
||||||
.unwrap()
|
.expect("validated: checked for None at lines 714-721")
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(|p| p.trim().to_string())
|
.map(|p| p.trim().to_string())
|
||||||
.filter(|p| !p.is_empty())
|
.filter(|p| !p.is_empty())
|
||||||
@@ -743,8 +743,10 @@ async fn handle_init(
|
|||||||
|
|
||||||
let result = run_init(
|
let result = run_init(
|
||||||
InitInputs {
|
InitInputs {
|
||||||
gitlab_url: gitlab_url_flag.unwrap(),
|
gitlab_url: gitlab_url_flag
|
||||||
token_env_var: token_env_var_flag.unwrap(),
|
.expect("validated: checked for None at lines 714-721"),
|
||||||
|
token_env_var: token_env_var_flag
|
||||||
|
.expect("validated: checked for None at lines 714-721"),
|
||||||
project_paths,
|
project_paths,
|
||||||
default_project: default_project_flag.clone(),
|
default_project: default_project_flag.clone(),
|
||||||
},
|
},
|
||||||
@@ -973,9 +975,7 @@ async fn handle_auth_test(
|
|||||||
name: result.name.clone(),
|
name: result.name.clone(),
|
||||||
gitlab_url: result.base_url.clone(),
|
gitlab_url: result.base_url.clone(),
|
||||||
},
|
},
|
||||||
meta: RobotMeta {
|
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output)?);
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
} else {
|
} else {
|
||||||
@@ -1036,9 +1036,7 @@ async fn handle_doctor(
|
|||||||
success: result.success,
|
success: result.success,
|
||||||
checks: result.checks,
|
checks: result.checks,
|
||||||
},
|
},
|
||||||
meta: RobotMeta {
|
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output)?);
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
} else {
|
} else {
|
||||||
@@ -1083,9 +1081,7 @@ fn handle_version(robot_mode: bool) -> Result<(), Box<dyn std::error::Error>> {
|
|||||||
Some(git_hash)
|
Some(git_hash)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
meta: RobotMeta {
|
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output)?);
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
} else if git_hash.is_empty() {
|
} else if git_hash.is_empty() {
|
||||||
@@ -1243,9 +1239,7 @@ async fn handle_migrate(
|
|||||||
after_version,
|
after_version,
|
||||||
migrated: after_version > before_version,
|
migrated: after_version > before_version,
|
||||||
},
|
},
|
||||||
meta: RobotMeta {
|
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output)?);
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
} else if after_version > before_version {
|
} else if after_version > before_version {
|
||||||
@@ -1326,7 +1320,7 @@ fn handle_file_history(
|
|||||||
|
|
||||||
if robot_mode {
|
if robot_mode {
|
||||||
let elapsed_ms = start.elapsed().as_millis() as u64;
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
print_file_history_json(&result, elapsed_ms);
|
print_file_history_json(&result, elapsed_ms)?;
|
||||||
} else {
|
} else {
|
||||||
print_file_history(&result);
|
print_file_history(&result);
|
||||||
}
|
}
|
||||||
@@ -1382,7 +1376,7 @@ fn handle_trace(
|
|||||||
|
|
||||||
if robot_mode {
|
if robot_mode {
|
||||||
let elapsed_ms = start.elapsed().as_millis() as u64;
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
print_trace_json(&result, elapsed_ms, line_requested);
|
print_trace_json(&result, elapsed_ms, line_requested)?;
|
||||||
} else {
|
} else {
|
||||||
print_trace(&result);
|
print_trace(&result);
|
||||||
}
|
}
|
||||||
@@ -1960,9 +1954,7 @@ async fn handle_health(
|
|||||||
schema_version,
|
schema_version,
|
||||||
actions,
|
actions,
|
||||||
},
|
},
|
||||||
meta: RobotMeta {
|
meta: RobotMeta::new(start.elapsed().as_millis() as u64),
|
||||||
elapsed_ms: start.elapsed().as_millis() as u64,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
println!("{}", serde_json::to_string(&output)?);
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"issues": {
|
"issues": {
|
||||||
"description": "List or show issues",
|
"description": "List issues, or view detail with <IID>",
|
||||||
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "--status <name>", "-p/--project", "-a/--author", "-A/--assignee", "-l/--label", "-m/--milestone", "--since", "--due-before", "--has-due", "--no-has-due", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||||
"example": "lore --robot issues --state opened --limit 10",
|
"example": "lore --robot issues --state opened --limit 10",
|
||||||
"notes": {
|
"notes": {
|
||||||
@@ -128,7 +128,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
|
"data": {"issues": "[{iid:int, title:string, state:string, author_username:string, labels:[string], assignees:[string], discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, status_name:string?}]", "total_count": "int", "showing": "int"},
|
||||||
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
|
"meta": {"elapsed_ms": "int", "available_statuses": "[string] — all distinct status names in the database, for use with --status filter"}
|
||||||
},
|
},
|
||||||
"show": {
|
"detail": {
|
||||||
"ok": "bool",
|
"ok": "bool",
|
||||||
"data": "IssueDetail (full entity with description, discussions, notes, events)",
|
"data": "IssueDetail (full entity with description, discussions, notes, events)",
|
||||||
"meta": {"elapsed_ms": "int"}
|
"meta": {"elapsed_ms": "int"}
|
||||||
@@ -138,7 +138,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
"fields_presets": {"minimal": ["iid", "title", "state", "updated_at_iso"]}
|
||||||
},
|
},
|
||||||
"mrs": {
|
"mrs": {
|
||||||
"description": "List or show merge requests",
|
"description": "List merge requests, or view detail with <IID>",
|
||||||
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
"flags": ["<IID>", "-n/--limit", "--fields <list>", "-s/--state", "-p/--project", "-a/--author", "-A/--assignee", "-r/--reviewer", "-l/--label", "--since", "-d/--draft", "-D/--no-draft", "--target", "--source", "--sort", "--asc", "--no-asc", "-o/--open", "--no-open"],
|
||||||
"example": "lore --robot mrs --state opened",
|
"example": "lore --robot mrs --state opened",
|
||||||
"response_schema": {
|
"response_schema": {
|
||||||
@@ -147,7 +147,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
|
"data": {"mrs": "[{iid:int, title:string, state:string, author_username:string, labels:[string], draft:bool, target_branch:string, source_branch:string, discussion_count:int, unresolved_count:int, created_at_iso:string, updated_at_iso:string, web_url:string?, project_path:string, reviewers:[string]}]", "total_count": "int", "showing": "int"},
|
||||||
"meta": {"elapsed_ms": "int"}
|
"meta": {"elapsed_ms": "int"}
|
||||||
},
|
},
|
||||||
"show": {
|
"detail": {
|
||||||
"ok": "bool",
|
"ok": "bool",
|
||||||
"data": "MrDetail (full entity with description, discussions, notes, events)",
|
"data": "MrDetail (full entity with description, discussions, notes, events)",
|
||||||
"meta": {"elapsed_ms": "int"}
|
"meta": {"elapsed_ms": "int"}
|
||||||
@@ -316,6 +316,17 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"meta": {"elapsed_ms": "int"}
|
"meta": {"elapsed_ms": "int"}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"explain": {
|
||||||
|
"description": "Auto-generate a structured narrative of an issue or MR",
|
||||||
|
"flags": ["<entity_type: issues|mrs>", "<IID>", "-p/--project <path>", "--sections <comma-list>", "--no-timeline", "--max-decisions <N>", "--since <period>"],
|
||||||
|
"valid_sections": ["entity", "description", "key_decisions", "activity", "open_threads", "related", "timeline"],
|
||||||
|
"example": "lore --robot explain issues 42 --sections key_decisions,activity --since 30d",
|
||||||
|
"response_schema": {
|
||||||
|
"ok": "bool",
|
||||||
|
"data": {"entity": "{type:string, iid:int, title:string, state:string, author:string, assignees:[string], labels:[string], created_at:string, updated_at:string, url:string?, status_name:string?}", "description_excerpt": "string?", "key_decisions": "[{timestamp:string, actor:string, action:string, context_note:string}]?", "activity": "{state_changes:int, label_changes:int, notes:int, first_event:string?, last_event:string?}?", "open_threads": "[{discussion_id:string, started_by:string, started_at:string, note_count:int, last_note_at:string}]?", "related": "{closing_mrs:[{iid:int, title:string, state:string, web_url:string?}], related_issues:[{entity_type:string, iid:int, title:string?, reference_type:string}]}?", "timeline_excerpt": "[{timestamp:string, event_type:string, actor:string?, summary:string}]?"},
|
||||||
|
"meta": {"elapsed_ms": "int"}
|
||||||
|
}
|
||||||
|
},
|
||||||
"notes": {
|
"notes": {
|
||||||
"description": "List notes from discussions with rich filtering",
|
"description": "List notes from discussions with rich filtering",
|
||||||
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
|
"flags": ["--limit/-n <N>", "--author/-a <username>", "--note-type <type>", "--contains <text>", "--for-issue <iid>", "--for-mr <iid>", "-p/--project <path>", "--since <period>", "--until <period>", "--path <filepath>", "--resolution <any|unresolved|resolved>", "--sort <created|updated>", "--asc", "--include-system", "--note-id <id>", "--gitlab-note-id <id>", "--discussion-id <id>", "--fields <list|minimal>", "--open"],
|
||||||
@@ -371,7 +382,7 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"mentioned_in": "[{entity_type:string, project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, updated_at_iso:string, web_url:string?}]",
|
"mentioned_in": "[{entity_type:string, project:string, iid:int, title:string, state:string, attention_state:string, attention_reason:string, updated_at_iso:string, web_url:string?}]",
|
||||||
"activity": "[{timestamp_iso:string, event_type:string, entity_type:string, entity_iid:int, project:string, actor:string?, is_own:bool, summary:string, body_preview:string?}]"
|
"activity": "[{timestamp_iso:string, event_type:string, entity_type:string, entity_iid:int, project:string, actor:string?, is_own:bool, summary:string, body_preview:string?}]"
|
||||||
},
|
},
|
||||||
"meta": {"elapsed_ms": "int"}
|
"meta": {"elapsed_ms": "int", "gitlab_base_url": "string (GitLab instance URL for constructing entity links: {base_url}/{project}/-/issues/{iid})"}
|
||||||
},
|
},
|
||||||
"fields_presets": {
|
"fields_presets": {
|
||||||
"me_items_minimal": ["iid", "title", "attention_state", "attention_reason", "updated_at_iso"],
|
"me_items_minimal": ["iid", "title", "attention_state", "attention_reason", "updated_at_iso"],
|
||||||
@@ -385,7 +396,8 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"since_default": "1d for activity feed",
|
"since_default": "1d for activity feed",
|
||||||
"issue_filter": "Only In Progress / In Review status issues shown",
|
"issue_filter": "Only In Progress / In Review status issues shown",
|
||||||
"since_last_check": "Cursor-based inbox showing events since last run. Null on first run (no cursor yet). Groups events by entity (issue/MR). Sources: others' comments on your items, @mentions, assignment/review-request notes. Cursor auto-advances after each run. Use --reset-cursor to clear.",
|
"since_last_check": "Cursor-based inbox showing events since last run. Null on first run (no cursor yet). Groups events by entity (issue/MR). Sources: others' comments on your items, @mentions, assignment/review-request notes. Cursor auto-advances after each run. Use --reset-cursor to clear.",
|
||||||
"cursor_persistence": "Stored per user in ~/.local/share/lore/me_cursor_<username>.json. --project filters display only for since-last-check; cursor still advances for all projects for that user."
|
"cursor_persistence": "Stored per user in ~/.local/share/lore/me_cursor_<username>.json. --project filters display only for since-last-check; cursor still advances for all projects for that user.",
|
||||||
|
"url_construction": "Use meta.gitlab_base_url + project + entity_type + iid to build links: {gitlab_base_url}/{project}/-/{issues|merge_requests}/{iid}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"robot-docs": {
|
"robot-docs": {
|
||||||
@@ -449,7 +461,8 @@ fn handle_robot_docs(robot_mode: bool, brief: bool) -> Result<(), Box<dyn std::e
|
|||||||
"17": "Not found",
|
"17": "Not found",
|
||||||
"18": "Ambiguous match",
|
"18": "Ambiguous match",
|
||||||
"19": "Health check failed",
|
"19": "Health check failed",
|
||||||
"20": "Config not found"
|
"20": "Config not found",
|
||||||
|
"21": "Embeddings not built"
|
||||||
});
|
});
|
||||||
|
|
||||||
let workflows = serde_json::json!({
|
let workflows = serde_json::json!({
|
||||||
@@ -780,42 +793,3 @@ async fn handle_list_compat(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_show_compat(
|
|
||||||
config_override: Option<&str>,
|
|
||||||
entity: &str,
|
|
||||||
iid: i64,
|
|
||||||
project_filter: Option<&str>,
|
|
||||||
robot_mode: bool,
|
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
let config = Config::load(config_override)?;
|
|
||||||
let project_filter = config.effective_project(project_filter);
|
|
||||||
|
|
||||||
match entity {
|
|
||||||
"issue" => {
|
|
||||||
let result = run_show_issue(&config, iid, project_filter)?;
|
|
||||||
if robot_mode {
|
|
||||||
print_show_issue_json(&result, start.elapsed().as_millis() as u64);
|
|
||||||
} else {
|
|
||||||
print_show_issue(&result);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
"mr" => {
|
|
||||||
let result = run_show_mr(&config, iid, project_filter)?;
|
|
||||||
if robot_mode {
|
|
||||||
print_show_mr_json(&result, start.elapsed().as_millis() as u64);
|
|
||||||
} else {
|
|
||||||
print_show_mr(&result);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
eprintln!(
|
|
||||||
"{}",
|
|
||||||
Theme::error().render(&format!("Unknown entity: {entity}"))
|
|
||||||
);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -209,6 +209,16 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
("drift", &["--threshold", "--project"]),
|
("drift", &["--threshold", "--project"]),
|
||||||
|
(
|
||||||
|
"explain",
|
||||||
|
&[
|
||||||
|
"--project",
|
||||||
|
"--sections",
|
||||||
|
"--no-timeline",
|
||||||
|
"--max-decisions",
|
||||||
|
"--since",
|
||||||
|
],
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"notes",
|
"notes",
|
||||||
&[
|
&[
|
||||||
@@ -290,7 +300,6 @@ const COMMAND_FLAGS: &[(&str, &[&str])] = &[
|
|||||||
"--source-branch",
|
"--source-branch",
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
("show", &["--project"]),
|
|
||||||
("reset", &["--yes"]),
|
("reset", &["--yes"]),
|
||||||
(
|
(
|
||||||
"me",
|
"me",
|
||||||
@@ -389,6 +398,7 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
|||||||
"file-history",
|
"file-history",
|
||||||
"trace",
|
"trace",
|
||||||
"drift",
|
"drift",
|
||||||
|
"explain",
|
||||||
"related",
|
"related",
|
||||||
"cron",
|
"cron",
|
||||||
"token",
|
"token",
|
||||||
@@ -396,7 +406,6 @@ const CANONICAL_SUBCOMMANDS: &[&str] = &[
|
|||||||
"backup",
|
"backup",
|
||||||
"reset",
|
"reset",
|
||||||
"list",
|
"list",
|
||||||
"show",
|
|
||||||
"auth-test",
|
"auth-test",
|
||||||
"sync-status",
|
"sync-status",
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -254,7 +254,7 @@ pub fn print_event_count_json(counts: &EventCounts, elapsed_ms: u64) {
|
|||||||
},
|
},
|
||||||
total: counts.total(),
|
total: counts.total(),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
@@ -325,7 +325,7 @@ pub fn print_count_json(result: &CountResult, elapsed_ms: u64) {
|
|||||||
system_excluded: result.system_count,
|
system_excluded: result.system_count,
|
||||||
breakdown,
|
breakdown,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ pub fn print_cron_install_json(result: &CronInstallResult, elapsed_ms: u64) {
|
|||||||
log_path: result.log_path.display().to_string(),
|
log_path: result.log_path.display().to_string(),
|
||||||
replaced: result.replaced,
|
replaced: result.replaced,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
@@ -128,7 +128,7 @@ pub fn print_cron_uninstall_json(result: &CronUninstallResult, elapsed_ms: u64)
|
|||||||
action: "uninstall",
|
action: "uninstall",
|
||||||
was_installed: result.was_installed,
|
was_installed: result.was_installed,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
@@ -284,7 +284,7 @@ pub fn print_cron_status_json(info: &CronStatusInfo, elapsed_ms: u64) {
|
|||||||
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
last_sync_at: info.last_sync.as_ref().map(|s| s.started_at_iso.clone()),
|
||||||
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
last_sync_status: info.last_sync.as_ref().map(|s| s.status.clone()),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
if let Ok(json) = serde_json::to_string(&output) {
|
if let Ok(json) = serde_json::to_string(&output) {
|
||||||
println!("{json}");
|
println!("{json}");
|
||||||
|
|||||||
@@ -468,7 +468,7 @@ pub fn print_drift_human(response: &DriftResponse) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_drift_json(response: &DriftResponse, elapsed_ms: u64) {
|
pub fn print_drift_json(response: &DriftResponse, elapsed_ms: u64) {
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": response,
|
"data": response,
|
||||||
|
|||||||
@@ -135,7 +135,7 @@ pub fn print_embed_json(result: &EmbedCommandResult, elapsed_ms: u64) {
|
|||||||
let output = EmbedJsonOutput {
|
let output = EmbedJsonOutput {
|
||||||
ok: true,
|
ok: true,
|
||||||
data: result,
|
data: result,
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
1977
src/cli/commands/explain.rs
Normal file
1977
src/cli/commands/explain.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@ use tracing::info;
|
|||||||
use crate::Config;
|
use crate::Config;
|
||||||
use crate::cli::render::{self, Icons, Theme};
|
use crate::cli::render::{self, Icons, Theme};
|
||||||
use crate::core::db::create_connection;
|
use crate::core::db::create_connection;
|
||||||
use crate::core::error::Result;
|
use crate::core::error::{LoreError, Result};
|
||||||
use crate::core::file_history::resolve_rename_chain;
|
use crate::core::file_history::resolve_rename_chain;
|
||||||
use crate::core::paths::get_db_path;
|
use crate::core::paths::get_db_path;
|
||||||
use crate::core::project::resolve_project;
|
use crate::core::project::resolve_project;
|
||||||
@@ -391,7 +391,7 @@ pub fn print_file_history(result: &FileHistoryResult) {
|
|||||||
|
|
||||||
// ── Robot (JSON) output ─────────────────────────────────────────────────────
|
// ── Robot (JSON) output ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) -> Result<()> {
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": {
|
"data": {
|
||||||
@@ -409,5 +409,10 @@ pub fn print_file_history_json(result: &FileHistoryResult, elapsed_ms: u64) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
println!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output)
|
||||||
|
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -257,7 +257,7 @@ pub fn print_generate_docs_json(result: &GenerateDocsResult, elapsed_ms: u64) {
|
|||||||
unchanged: result.unchanged,
|
unchanged: result.unchanged,
|
||||||
errored: result.errored,
|
errored: result.errored,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
@@ -191,7 +191,7 @@ pub fn print_ingest_summary_json(result: &IngestResult, elapsed_ms: u64) {
|
|||||||
status_enrichment,
|
status_enrichment,
|
||||||
status_enrichment_errors: result.status_enrichment_errors,
|
status_enrichment_errors: result.status_enrichment_errors,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
|
|||||||
@@ -370,7 +370,7 @@ pub fn print_list_mrs(result: &MrListResult) {
|
|||||||
|
|
||||||
pub fn print_list_mrs_json(result: &MrListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
pub fn print_list_mrs_json(result: &MrListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||||
let json_result = MrListResultJson::from(result);
|
let json_result = MrListResultJson::from(result);
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": json_result,
|
"data": json_result,
|
||||||
|
|||||||
@@ -193,7 +193,7 @@ pub fn print_list_notes(result: &NoteListResult) {
|
|||||||
|
|
||||||
pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
pub fn print_list_notes_json(result: &NoteListResult, elapsed_ms: u64, fields: Option<&[String]>) {
|
||||||
let json_result = NoteListResultJson::from(result);
|
let json_result = NoteListResultJson::from(result);
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": json_result,
|
"data": json_result,
|
||||||
|
|||||||
@@ -247,7 +247,7 @@ pub fn run_me(config: &Config, args: &MeArgs, robot_mode: bool) -> Result<()> {
|
|||||||
|
|
||||||
if robot_mode {
|
if robot_mode {
|
||||||
let fields = args.fields.as_deref();
|
let fields = args.fields.as_deref();
|
||||||
render_robot::print_me_json(&dashboard, elapsed_ms, fields)?;
|
render_robot::print_me_json(&dashboard, elapsed_ms, fields, &config.gitlab.base_url)?;
|
||||||
} else if show_all {
|
} else if show_all {
|
||||||
render_human::print_me_dashboard(&dashboard, single_project);
|
render_human::print_me_dashboard(&dashboard, single_project);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -15,11 +15,12 @@ pub fn print_me_json(
|
|||||||
dashboard: &MeDashboard,
|
dashboard: &MeDashboard,
|
||||||
elapsed_ms: u64,
|
elapsed_ms: u64,
|
||||||
fields: Option<&[String]>,
|
fields: Option<&[String]>,
|
||||||
|
gitlab_base_url: &str,
|
||||||
) -> crate::core::error::Result<()> {
|
) -> crate::core::error::Result<()> {
|
||||||
let envelope = MeJsonEnvelope {
|
let envelope = MeJsonEnvelope {
|
||||||
ok: true,
|
ok: true,
|
||||||
data: MeDataJson::from_dashboard(dashboard),
|
data: MeDataJson::from_dashboard(dashboard),
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::with_base_url(elapsed_ms, gitlab_base_url),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut value = serde_json::to_value(&envelope)
|
let mut value = serde_json::to_value(&envelope)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ pub mod cron;
|
|||||||
pub mod doctor;
|
pub mod doctor;
|
||||||
pub mod drift;
|
pub mod drift;
|
||||||
pub mod embed;
|
pub mod embed;
|
||||||
|
pub mod explain;
|
||||||
pub mod file_history;
|
pub mod file_history;
|
||||||
pub mod generate_docs;
|
pub mod generate_docs;
|
||||||
pub mod ingest;
|
pub mod ingest;
|
||||||
@@ -35,6 +36,7 @@ pub use cron::{
|
|||||||
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
pub use doctor::{DoctorChecks, print_doctor_results, run_doctor};
|
||||||
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
pub use drift::{DriftResponse, print_drift_human, print_drift_json, run_drift};
|
||||||
pub use embed::{print_embed, print_embed_json, run_embed};
|
pub use embed::{print_embed, print_embed_json, run_embed};
|
||||||
|
pub use explain::{handle_explain, print_explain, print_explain_json, run_explain};
|
||||||
pub use file_history::{print_file_history, print_file_history_json, run_file_history};
|
pub use file_history::{print_file_history, print_file_history_json, run_file_history};
|
||||||
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
|
pub use generate_docs::{print_generate_docs, print_generate_docs_json, run_generate_docs};
|
||||||
pub use ingest::{
|
pub use ingest::{
|
||||||
|
|||||||
@@ -558,7 +558,7 @@ pub fn print_related_human(response: &RelatedResponse) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_related_json(response: &RelatedResponse, elapsed_ms: u64) {
|
pub fn print_related_json(response: &RelatedResponse, elapsed_ms: u64) {
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": response,
|
"data": response,
|
||||||
|
|||||||
@@ -44,6 +44,7 @@ pub struct DiscussionDetail {
|
|||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct NoteDetail {
|
pub struct NoteDetail {
|
||||||
|
pub gitlab_id: i64,
|
||||||
pub author_username: String,
|
pub author_username: String,
|
||||||
pub body: String,
|
pub body: String,
|
||||||
pub created_at: i64,
|
pub created_at: i64,
|
||||||
@@ -277,7 +278,7 @@ fn get_issue_discussions(conn: &Connection, issue_id: i64) -> Result<Vec<Discuss
|
|||||||
.collect::<std::result::Result<Vec<_>, _>>()?;
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
let mut note_stmt = conn.prepare(
|
let mut note_stmt = conn.prepare(
|
||||||
"SELECT author_username, body, created_at, is_system
|
"SELECT gitlab_id, author_username, body, created_at, is_system
|
||||||
FROM notes
|
FROM notes
|
||||||
WHERE discussion_id = ?
|
WHERE discussion_id = ?
|
||||||
ORDER BY position",
|
ORDER BY position",
|
||||||
@@ -287,11 +288,12 @@ fn get_issue_discussions(conn: &Connection, issue_id: i64) -> Result<Vec<Discuss
|
|||||||
for (disc_id, individual_note) in disc_rows {
|
for (disc_id, individual_note) in disc_rows {
|
||||||
let notes: Vec<NoteDetail> = note_stmt
|
let notes: Vec<NoteDetail> = note_stmt
|
||||||
.query_map([disc_id], |row| {
|
.query_map([disc_id], |row| {
|
||||||
let is_system: i64 = row.get(3)?;
|
let is_system: i64 = row.get(4)?;
|
||||||
Ok(NoteDetail {
|
Ok(NoteDetail {
|
||||||
author_username: row.get(0)?,
|
gitlab_id: row.get(0)?,
|
||||||
body: row.get(1)?,
|
author_username: row.get(1)?,
|
||||||
created_at: row.get(2)?,
|
body: row.get(2)?,
|
||||||
|
created_at: row.get(3)?,
|
||||||
is_system: is_system == 1,
|
is_system: is_system == 1,
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ pub struct MrDiscussionDetail {
|
|||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct MrNoteDetail {
|
pub struct MrNoteDetail {
|
||||||
|
pub gitlab_id: i64,
|
||||||
pub author_username: String,
|
pub author_username: String,
|
||||||
pub body: String,
|
pub body: String,
|
||||||
pub created_at: i64,
|
pub created_at: i64,
|
||||||
@@ -224,7 +225,7 @@ fn get_mr_discussions(conn: &Connection, mr_id: i64) -> Result<Vec<MrDiscussionD
|
|||||||
.collect::<std::result::Result<Vec<_>, _>>()?;
|
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
let mut note_stmt = conn.prepare(
|
let mut note_stmt = conn.prepare(
|
||||||
"SELECT author_username, body, created_at, is_system,
|
"SELECT gitlab_id, author_username, body, created_at, is_system,
|
||||||
position_old_path, position_new_path, position_old_line,
|
position_old_path, position_new_path, position_old_line,
|
||||||
position_new_line, position_type
|
position_new_line, position_type
|
||||||
FROM notes
|
FROM notes
|
||||||
@@ -236,12 +237,12 @@ fn get_mr_discussions(conn: &Connection, mr_id: i64) -> Result<Vec<MrDiscussionD
|
|||||||
for (disc_id, individual_note) in disc_rows {
|
for (disc_id, individual_note) in disc_rows {
|
||||||
let notes: Vec<MrNoteDetail> = note_stmt
|
let notes: Vec<MrNoteDetail> = note_stmt
|
||||||
.query_map([disc_id], |row| {
|
.query_map([disc_id], |row| {
|
||||||
let is_system: i64 = row.get(3)?;
|
let is_system: i64 = row.get(4)?;
|
||||||
let old_path: Option<String> = row.get(4)?;
|
let old_path: Option<String> = row.get(5)?;
|
||||||
let new_path: Option<String> = row.get(5)?;
|
let new_path: Option<String> = row.get(6)?;
|
||||||
let old_line: Option<i64> = row.get(6)?;
|
let old_line: Option<i64> = row.get(7)?;
|
||||||
let new_line: Option<i64> = row.get(7)?;
|
let new_line: Option<i64> = row.get(8)?;
|
||||||
let position_type: Option<String> = row.get(8)?;
|
let position_type: Option<String> = row.get(9)?;
|
||||||
|
|
||||||
let position = if old_path.is_some()
|
let position = if old_path.is_some()
|
||||||
|| new_path.is_some()
|
|| new_path.is_some()
|
||||||
@@ -260,9 +261,10 @@ fn get_mr_discussions(conn: &Connection, mr_id: i64) -> Result<Vec<MrDiscussionD
|
|||||||
};
|
};
|
||||||
|
|
||||||
Ok(MrNoteDetail {
|
Ok(MrNoteDetail {
|
||||||
author_username: row.get(0)?,
|
gitlab_id: row.get(0)?,
|
||||||
body: row.get(1)?,
|
author_username: row.get(1)?,
|
||||||
created_at: row.get(2)?,
|
body: row.get(2)?,
|
||||||
|
created_at: row.get(3)?,
|
||||||
is_system: is_system == 1,
|
is_system: is_system == 1,
|
||||||
position,
|
position,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -398,6 +398,7 @@ pub struct DiscussionDetailJson {
|
|||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct NoteDetailJson {
|
pub struct NoteDetailJson {
|
||||||
|
pub gitlab_id: i64,
|
||||||
pub author_username: String,
|
pub author_username: String,
|
||||||
pub body: String,
|
pub body: String,
|
||||||
pub created_at: String,
|
pub created_at: String,
|
||||||
@@ -458,6 +459,7 @@ impl From<&DiscussionDetail> for DiscussionDetailJson {
|
|||||||
impl From<&NoteDetail> for NoteDetailJson {
|
impl From<&NoteDetail> for NoteDetailJson {
|
||||||
fn from(note: &NoteDetail) -> Self {
|
fn from(note: &NoteDetail) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
gitlab_id: note.gitlab_id,
|
||||||
author_username: note.author_username.clone(),
|
author_username: note.author_username.clone(),
|
||||||
body: note.body.clone(),
|
body: note.body.clone(),
|
||||||
created_at: ms_to_iso(note.created_at),
|
created_at: ms_to_iso(note.created_at),
|
||||||
@@ -497,6 +499,7 @@ pub struct MrDiscussionDetailJson {
|
|||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct MrNoteDetailJson {
|
pub struct MrNoteDetailJson {
|
||||||
|
pub gitlab_id: i64,
|
||||||
pub author_username: String,
|
pub author_username: String,
|
||||||
pub body: String,
|
pub body: String,
|
||||||
pub created_at: String,
|
pub created_at: String,
|
||||||
@@ -542,6 +545,7 @@ impl From<&MrDiscussionDetail> for MrDiscussionDetailJson {
|
|||||||
impl From<&MrNoteDetail> for MrNoteDetailJson {
|
impl From<&MrNoteDetail> for MrNoteDetailJson {
|
||||||
fn from(note: &MrNoteDetail) -> Self {
|
fn from(note: &MrNoteDetail) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
gitlab_id: note.gitlab_id,
|
||||||
author_username: note.author_username.clone(),
|
author_username: note.author_username.clone(),
|
||||||
body: note.body.clone(),
|
body: note.body.clone(),
|
||||||
created_at: ms_to_iso(note.created_at),
|
created_at: ms_to_iso(note.created_at),
|
||||||
@@ -553,7 +557,7 @@ impl From<&MrNoteDetail> for MrNoteDetailJson {
|
|||||||
|
|
||||||
pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
||||||
let json_result = IssueDetailJson::from(issue);
|
let json_result = IssueDetailJson::from(issue);
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": json_result,
|
"data": json_result,
|
||||||
@@ -567,7 +571,7 @@ pub fn print_show_issue_json(issue: &IssueDetail, elapsed_ms: u64) {
|
|||||||
|
|
||||||
pub fn print_show_mr_json(mr: &MrDetail, elapsed_ms: u64) {
|
pub fn print_show_mr_json(mr: &MrDetail, elapsed_ms: u64) {
|
||||||
let json_result = MrDetailJson::from(mr);
|
let json_result = MrDetailJson::from(mr);
|
||||||
let meta = RobotMeta { elapsed_ms };
|
let meta = RobotMeta::new(elapsed_ms);
|
||||||
let output = serde_json::json!({
|
let output = serde_json::json!({
|
||||||
"ok": true,
|
"ok": true,
|
||||||
"data": json_result,
|
"data": json_result,
|
||||||
|
|||||||
@@ -583,7 +583,7 @@ pub fn print_stats_json(result: &StatsResult, elapsed_ms: u64) {
|
|||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
Ok(json) => println!("{json}"),
|
Ok(json) => println!("{json}"),
|
||||||
|
|||||||
@@ -313,7 +313,7 @@ pub fn print_sync_status_json(result: &SyncStatusResult, elapsed_ms: u64) {
|
|||||||
system_notes: result.summary.system_note_count,
|
system_notes: result.summary.system_note_count,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
match serde_json::to_string(&output) {
|
match serde_json::to_string(&output) {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use crate::cli::render::{Icons, Theme};
|
use crate::cli::render::{Icons, Theme};
|
||||||
|
use crate::core::error::{LoreError, Result};
|
||||||
use crate::core::trace::{TraceChain, TraceResult};
|
use crate::core::trace::{TraceChain, TraceResult};
|
||||||
|
|
||||||
/// Parse a path with optional `:line` suffix.
|
/// Parse a path with optional `:line` suffix.
|
||||||
@@ -152,7 +153,11 @@ fn truncate_body(body: &str, max: usize) -> String {
|
|||||||
format!("{}...", &body[..boundary])
|
format!("{}...", &body[..boundary])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: Option<u32>) {
|
pub fn print_trace_json(
|
||||||
|
result: &TraceResult,
|
||||||
|
elapsed_ms: u64,
|
||||||
|
line_requested: Option<u32>,
|
||||||
|
) -> Result<()> {
|
||||||
// Truncate discussion bodies for token efficiency in robot mode
|
// Truncate discussion bodies for token efficiency in robot mode
|
||||||
let chains: Vec<serde_json::Value> = result
|
let chains: Vec<serde_json::Value> = result
|
||||||
.trace_chains
|
.trace_chains
|
||||||
@@ -205,7 +210,12 @@ pub fn print_trace_json(result: &TraceResult, elapsed_ms: u64, line_requested: O
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
println!("{}", serde_json::to_string(&output).unwrap_or_default());
|
println!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string(&output)
|
||||||
|
.map_err(|e| LoreError::Other(format!("JSON serialization failed: {e}")))?
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -376,7 +376,7 @@ pub fn print_who_json(run: &WhoRun, args: &WhoArgs, elapsed_ms: u64) {
|
|||||||
resolved_input,
|
resolved_input,
|
||||||
result: data,
|
result: data,
|
||||||
},
|
},
|
||||||
meta: RobotMeta { elapsed_ms },
|
meta: RobotMeta::new(elapsed_ms),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
let mut value = serde_json::to_value(&output).unwrap_or_else(|e| {
|
||||||
|
|||||||
@@ -277,6 +277,44 @@ pub enum Commands {
|
|||||||
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
/// Trace why code was introduced: file -> MR -> issue -> discussion
|
||||||
Trace(TraceArgs),
|
Trace(TraceArgs),
|
||||||
|
|
||||||
|
/// Auto-generate a structured narrative of an issue or MR
|
||||||
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
|
lore explain issues 42 # Narrative for issue #42
|
||||||
|
lore explain mrs 99 -p group/repo # Narrative for MR !99 in specific project
|
||||||
|
lore -J explain issues 42 # JSON output for automation
|
||||||
|
lore explain issues 42 --sections key_decisions,open_threads # Specific sections only
|
||||||
|
lore explain issues 42 --since 30d # Narrative scoped to last 30 days
|
||||||
|
lore explain issues 42 --no-timeline # Skip timeline (faster)")]
|
||||||
|
Explain {
|
||||||
|
/// Entity type: "issues" or "mrs" (singular forms also accepted)
|
||||||
|
#[arg(value_parser = ["issues", "mrs", "issue", "mr"])]
|
||||||
|
entity_type: String,
|
||||||
|
|
||||||
|
/// Entity IID
|
||||||
|
iid: i64,
|
||||||
|
|
||||||
|
/// Scope to project (fuzzy match)
|
||||||
|
#[arg(short, long)]
|
||||||
|
project: Option<String>,
|
||||||
|
|
||||||
|
/// Select specific sections (comma-separated)
|
||||||
|
/// Valid: entity, description, key_decisions, activity, open_threads, related, timeline
|
||||||
|
#[arg(long, value_delimiter = ',', help_heading = "Output")]
|
||||||
|
sections: Option<Vec<String>>,
|
||||||
|
|
||||||
|
/// Skip timeline excerpt (faster execution)
|
||||||
|
#[arg(long, help_heading = "Output")]
|
||||||
|
no_timeline: bool,
|
||||||
|
|
||||||
|
/// Maximum key decisions to include
|
||||||
|
#[arg(long, default_value = "10", help_heading = "Output")]
|
||||||
|
max_decisions: usize,
|
||||||
|
|
||||||
|
/// Time scope for events/notes (e.g. 7d, 2w, 1m, or YYYY-MM-DD)
|
||||||
|
#[arg(long, help_heading = "Filters")]
|
||||||
|
since: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
/// Detect discussion divergence from original intent
|
/// Detect discussion divergence from original intent
|
||||||
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
#[command(after_help = "\x1b[1mExamples:\x1b[0m
|
||||||
lore drift issues 42 # Check drift on issue #42
|
lore drift issues 42 # Check drift on issue #42
|
||||||
@@ -381,17 +419,6 @@ pub enum Commands {
|
|||||||
source_branch: Option<String>,
|
source_branch: Option<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[command(hide = true)]
|
|
||||||
Show {
|
|
||||||
#[arg(value_parser = ["issue", "mr"])]
|
|
||||||
entity: String,
|
|
||||||
|
|
||||||
iid: i64,
|
|
||||||
|
|
||||||
#[arg(long)]
|
|
||||||
project: Option<String>,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[command(hide = true, name = "auth-test")]
|
#[command(hide = true, name = "auth-test")]
|
||||||
AuthTest,
|
AuthTest,
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,26 @@ use serde::Serialize;
|
|||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct RobotMeta {
|
pub struct RobotMeta {
|
||||||
pub elapsed_ms: u64,
|
pub elapsed_ms: u64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub gitlab_base_url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RobotMeta {
|
||||||
|
/// Standard meta with timing only.
|
||||||
|
pub fn new(elapsed_ms: u64) -> Self {
|
||||||
|
Self {
|
||||||
|
elapsed_ms,
|
||||||
|
gitlab_base_url: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Meta with GitLab base URL for URL construction by consumers.
|
||||||
|
pub fn with_base_url(elapsed_ms: u64, base_url: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
elapsed_ms,
|
||||||
|
gitlab_base_url: Some(base_url.trim_end_matches('/').to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter JSON object fields in-place for `--fields` support.
|
/// Filter JSON object fields in-place for `--fields` support.
|
||||||
@@ -133,4 +153,27 @@ mod tests {
|
|||||||
let expanded = expand_fields_preset(&fields, "notes");
|
let expanded = expand_fields_preset(&fields, "notes");
|
||||||
assert_eq!(expanded, ["id", "body"]);
|
assert_eq!(expanded, ["id", "body"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_new_omits_base_url() {
|
||||||
|
let meta = RobotMeta::new(42);
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["elapsed_ms"], 42);
|
||||||
|
assert!(json.get("gitlab_base_url").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_with_base_url_includes_it() {
|
||||||
|
let meta = RobotMeta::with_base_url(99, "https://gitlab.example.com");
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["elapsed_ms"], 99);
|
||||||
|
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn meta_with_base_url_strips_trailing_slash() {
|
||||||
|
let meta = RobotMeta::with_base_url(0, "https://gitlab.example.com/");
|
||||||
|
let json = serde_json::to_value(&meta).unwrap();
|
||||||
|
assert_eq!(json["gitlab_base_url"], "https://gitlab.example.com");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,8 +28,11 @@ pub enum ErrorCode {
|
|||||||
OllamaUnavailable,
|
OllamaUnavailable,
|
||||||
OllamaModelNotFound,
|
OllamaModelNotFound,
|
||||||
EmbeddingFailed,
|
EmbeddingFailed,
|
||||||
|
EmbeddingsNotBuilt,
|
||||||
NotFound,
|
NotFound,
|
||||||
Ambiguous,
|
Ambiguous,
|
||||||
|
HealthCheckFailed,
|
||||||
|
UsageError,
|
||||||
SurgicalPreflightFailed,
|
SurgicalPreflightFailed,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -52,8 +55,11 @@ impl std::fmt::Display for ErrorCode {
|
|||||||
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
|
Self::OllamaUnavailable => "OLLAMA_UNAVAILABLE",
|
||||||
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
|
Self::OllamaModelNotFound => "OLLAMA_MODEL_NOT_FOUND",
|
||||||
Self::EmbeddingFailed => "EMBEDDING_FAILED",
|
Self::EmbeddingFailed => "EMBEDDING_FAILED",
|
||||||
|
Self::EmbeddingsNotBuilt => "EMBEDDINGS_NOT_BUILT",
|
||||||
Self::NotFound => "NOT_FOUND",
|
Self::NotFound => "NOT_FOUND",
|
||||||
Self::Ambiguous => "AMBIGUOUS",
|
Self::Ambiguous => "AMBIGUOUS",
|
||||||
|
Self::HealthCheckFailed => "HEALTH_CHECK_FAILED",
|
||||||
|
Self::UsageError => "USAGE_ERROR",
|
||||||
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
|
Self::SurgicalPreflightFailed => "SURGICAL_PREFLIGHT_FAILED",
|
||||||
};
|
};
|
||||||
write!(f, "{code}")
|
write!(f, "{code}")
|
||||||
@@ -79,8 +85,11 @@ impl ErrorCode {
|
|||||||
Self::OllamaUnavailable => 14,
|
Self::OllamaUnavailable => 14,
|
||||||
Self::OllamaModelNotFound => 15,
|
Self::OllamaModelNotFound => 15,
|
||||||
Self::EmbeddingFailed => 16,
|
Self::EmbeddingFailed => 16,
|
||||||
|
Self::EmbeddingsNotBuilt => 21,
|
||||||
Self::NotFound => 17,
|
Self::NotFound => 17,
|
||||||
Self::Ambiguous => 18,
|
Self::Ambiguous => 18,
|
||||||
|
Self::HealthCheckFailed => 19,
|
||||||
|
Self::UsageError => 2,
|
||||||
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
|
// Shares exit code 6 with GitLabNotFound — same semantic category (resource not found).
|
||||||
// Robot consumers distinguish via ErrorCode string, not exit code.
|
// Robot consumers distinguish via ErrorCode string, not exit code.
|
||||||
Self::SurgicalPreflightFailed => 6,
|
Self::SurgicalPreflightFailed => 6,
|
||||||
@@ -201,7 +210,7 @@ impl LoreError {
|
|||||||
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
|
Self::OllamaUnavailable { .. } => ErrorCode::OllamaUnavailable,
|
||||||
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
|
Self::OllamaModelNotFound { .. } => ErrorCode::OllamaModelNotFound,
|
||||||
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
|
Self::EmbeddingFailed { .. } => ErrorCode::EmbeddingFailed,
|
||||||
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingFailed,
|
Self::EmbeddingsNotBuilt => ErrorCode::EmbeddingsNotBuilt,
|
||||||
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
|
Self::SurgicalPreflightFailed { .. } => ErrorCode::SurgicalPreflightFailed,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,70 +0,0 @@
|
|||||||
pub const CHUNK_ROWID_MULTIPLIER: i64 = 1000;
|
|
||||||
|
|
||||||
pub fn encode_rowid(document_id: i64, chunk_index: i64) -> i64 {
|
|
||||||
assert!(
|
|
||||||
(0..CHUNK_ROWID_MULTIPLIER).contains(&chunk_index),
|
|
||||||
"chunk_index {chunk_index} out of range [0, {CHUNK_ROWID_MULTIPLIER})"
|
|
||||||
);
|
|
||||||
document_id
|
|
||||||
.checked_mul(CHUNK_ROWID_MULTIPLIER)
|
|
||||||
.and_then(|v| v.checked_add(chunk_index))
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
panic!("encode_rowid overflow: document_id={document_id}, chunk_index={chunk_index}")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode_rowid(rowid: i64) -> (i64, i64) {
|
|
||||||
assert!(
|
|
||||||
rowid >= 0,
|
|
||||||
"decode_rowid called with negative rowid: {rowid}"
|
|
||||||
);
|
|
||||||
let document_id = rowid / CHUNK_ROWID_MULTIPLIER;
|
|
||||||
let chunk_index = rowid % CHUNK_ROWID_MULTIPLIER;
|
|
||||||
(document_id, chunk_index)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_single_chunk() {
|
|
||||||
assert_eq!(encode_rowid(1, 0), 1000);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_multi_chunk() {
|
|
||||||
assert_eq!(encode_rowid(1, 5), 1005);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode_specific_values() {
|
|
||||||
assert_eq!(encode_rowid(42, 0), 42000);
|
|
||||||
assert_eq!(encode_rowid(42, 5), 42005);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode_zero_chunk() {
|
|
||||||
assert_eq!(decode_rowid(42000), (42, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode_roundtrip() {
|
|
||||||
for doc_id in [0, 1, 42, 100, 999, 10000] {
|
|
||||||
for chunk_idx in [0, 1, 5, 99, 999] {
|
|
||||||
let rowid = encode_rowid(doc_id, chunk_idx);
|
|
||||||
let (decoded_doc, decoded_chunk) = decode_rowid(rowid);
|
|
||||||
assert_eq!(
|
|
||||||
(decoded_doc, decoded_chunk),
|
|
||||||
(doc_id, chunk_idx),
|
|
||||||
"Roundtrip failed for doc_id={doc_id}, chunk_idx={chunk_idx}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_multiplier_value() {
|
|
||||||
assert_eq!(CHUNK_ROWID_MULTIPLIER, 1000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
pub const CHUNK_MAX_BYTES: usize = 1_500;
|
|
||||||
|
|
||||||
pub const EXPECTED_DIMS: usize = 768;
|
|
||||||
|
|
||||||
pub const CHUNK_OVERLAP_CHARS: usize = 200;
|
|
||||||
|
|
||||||
pub fn split_into_chunks(content: &str) -> Vec<(usize, String)> {
|
|
||||||
if content.is_empty() {
|
|
||||||
return Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
if content.len() <= CHUNK_MAX_BYTES {
|
|
||||||
return vec![(0, content.to_string())];
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut chunks: Vec<(usize, String)> = Vec::new();
|
|
||||||
let mut start = 0;
|
|
||||||
let mut chunk_index = 0;
|
|
||||||
|
|
||||||
while start < content.len() {
|
|
||||||
let remaining = &content[start..];
|
|
||||||
if remaining.len() <= CHUNK_MAX_BYTES {
|
|
||||||
chunks.push((chunk_index, remaining.to_string()));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = floor_char_boundary(content, start + CHUNK_MAX_BYTES);
|
|
||||||
let window = &content[start..end];
|
|
||||||
|
|
||||||
let split_at = find_paragraph_break(window)
|
|
||||||
.or_else(|| find_sentence_break(window))
|
|
||||||
.or_else(|| find_word_break(window))
|
|
||||||
.unwrap_or(window.len());
|
|
||||||
|
|
||||||
let chunk_text = &content[start..start + split_at];
|
|
||||||
chunks.push((chunk_index, chunk_text.to_string()));
|
|
||||||
|
|
||||||
let advance = if split_at > CHUNK_OVERLAP_CHARS {
|
|
||||||
split_at - CHUNK_OVERLAP_CHARS
|
|
||||||
} else {
|
|
||||||
split_at
|
|
||||||
}
|
|
||||||
.max(1);
|
|
||||||
let old_start = start;
|
|
||||||
start += advance;
|
|
||||||
// Ensure start lands on a char boundary after overlap subtraction
|
|
||||||
start = floor_char_boundary(content, start);
|
|
||||||
// Guarantee forward progress: multi-byte chars can cause
|
|
||||||
// floor_char_boundary to round back to old_start
|
|
||||||
if start <= old_start {
|
|
||||||
start = old_start
|
|
||||||
+ content[old_start..]
|
|
||||||
.chars()
|
|
||||||
.next()
|
|
||||||
.map_or(1, |c| c.len_utf8());
|
|
||||||
}
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
chunks
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_paragraph_break(window: &str) -> Option<usize> {
|
|
||||||
let search_start = floor_char_boundary(window, window.len() * 2 / 3);
|
|
||||||
window[search_start..]
|
|
||||||
.rfind("\n\n")
|
|
||||||
.map(|pos| search_start + pos + 2)
|
|
||||||
.or_else(|| window[..search_start].rfind("\n\n").map(|pos| pos + 2))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_sentence_break(window: &str) -> Option<usize> {
|
|
||||||
let search_start = floor_char_boundary(window, window.len() / 2);
|
|
||||||
for pat in &[". ", "? ", "! "] {
|
|
||||||
if let Some(pos) = window[search_start..].rfind(pat) {
|
|
||||||
return Some(search_start + pos + pat.len());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for pat in &[". ", "? ", "! "] {
|
|
||||||
if let Some(pos) = window[..search_start].rfind(pat) {
|
|
||||||
return Some(pos + pat.len());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_word_break(window: &str) -> Option<usize> {
|
|
||||||
let search_start = floor_char_boundary(window, window.len() / 2);
|
|
||||||
window[search_start..]
|
|
||||||
.rfind(' ')
|
|
||||||
.map(|pos| search_start + pos + 1)
|
|
||||||
.or_else(|| window[..search_start].rfind(' ').map(|pos| pos + 1))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn floor_char_boundary(s: &str, idx: usize) -> usize {
|
|
||||||
if idx >= s.len() {
|
|
||||||
return s.len();
|
|
||||||
}
|
|
||||||
let mut i = idx;
|
|
||||||
while i > 0 && !s.is_char_boundary(i) {
|
|
||||||
i -= 1;
|
|
||||||
}
|
|
||||||
i
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
#[path = "chunking_tests.rs"]
|
|
||||||
mod tests;
|
|
||||||
@@ -53,14 +53,8 @@ pub struct NormalizedNote {
|
|||||||
pub position_head_sha: Option<String>,
|
pub position_head_sha: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_timestamp(ts: &str) -> i64 {
|
fn parse_timestamp(ts: &str) -> Result<i64, String> {
|
||||||
match iso_to_ms(ts) {
|
iso_to_ms_strict(ts)
|
||||||
Some(ms) => ms,
|
|
||||||
None => {
|
|
||||||
warn!(timestamp = ts, "Invalid timestamp, defaulting to epoch 0");
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn transform_discussion(
|
pub fn transform_discussion(
|
||||||
@@ -133,7 +127,15 @@ pub fn transform_notes(
|
|||||||
.notes
|
.notes
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(idx, note)| transform_single_note(note, local_project_id, idx as i32, now))
|
.filter_map(|(idx, note)| {
|
||||||
|
match transform_single_note(note, local_project_id, idx as i32, now) {
|
||||||
|
Ok(n) => Some(n),
|
||||||
|
Err(e) => {
|
||||||
|
warn!(note_id = note.id, error = %e, "Skipping note with invalid timestamp");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,7 +144,10 @@ fn transform_single_note(
|
|||||||
local_project_id: i64,
|
local_project_id: i64,
|
||||||
position: i32,
|
position: i32,
|
||||||
now: i64,
|
now: i64,
|
||||||
) -> NormalizedNote {
|
) -> Result<NormalizedNote, String> {
|
||||||
|
let created_at = parse_timestamp(¬e.created_at)?;
|
||||||
|
let updated_at = parse_timestamp(¬e.updated_at)?;
|
||||||
|
|
||||||
let (
|
let (
|
||||||
position_old_path,
|
position_old_path,
|
||||||
position_new_path,
|
position_new_path,
|
||||||
@@ -156,7 +161,7 @@ fn transform_single_note(
|
|||||||
position_head_sha,
|
position_head_sha,
|
||||||
) = extract_position_fields(¬e.position);
|
) = extract_position_fields(¬e.position);
|
||||||
|
|
||||||
NormalizedNote {
|
Ok(NormalizedNote {
|
||||||
gitlab_id: note.id,
|
gitlab_id: note.id,
|
||||||
project_id: local_project_id,
|
project_id: local_project_id,
|
||||||
note_type: note.note_type.clone(),
|
note_type: note.note_type.clone(),
|
||||||
@@ -164,8 +169,8 @@ fn transform_single_note(
|
|||||||
author_id: Some(note.author.id),
|
author_id: Some(note.author.id),
|
||||||
author_username: note.author.username.clone(),
|
author_username: note.author.username.clone(),
|
||||||
body: note.body.clone(),
|
body: note.body.clone(),
|
||||||
created_at: parse_timestamp(¬e.created_at),
|
created_at,
|
||||||
updated_at: parse_timestamp(¬e.updated_at),
|
updated_at,
|
||||||
last_seen_at: now,
|
last_seen_at: now,
|
||||||
position,
|
position,
|
||||||
resolvable: note.resolvable,
|
resolvable: note.resolvable,
|
||||||
@@ -182,7 +187,7 @@ fn transform_single_note(
|
|||||||
position_base_sha,
|
position_base_sha,
|
||||||
position_start_sha,
|
position_start_sha,
|
||||||
position_head_sha,
|
position_head_sha,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
|
|||||||
@@ -40,8 +40,12 @@ fn setup() -> Connection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_discussion_id(conn: &Connection) -> i64 {
|
fn get_discussion_id(conn: &Connection) -> i64 {
|
||||||
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
conn.query_row(
|
||||||
.unwrap()
|
"SELECT id FROM discussions ORDER BY id LIMIT 1",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
|||||||
@@ -786,8 +786,12 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_mr_discussion_id(conn: &Connection) -> i64 {
|
fn get_mr_discussion_id(conn: &Connection) -> i64 {
|
||||||
conn.query_row("SELECT id FROM discussions LIMIT 1", [], |row| row.get(0))
|
conn.query_row(
|
||||||
.unwrap()
|
"SELECT id FROM discussions ORDER BY id LIMIT 1",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
|||||||
@@ -242,14 +242,16 @@ mod tests {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let project_id: i64 = conn
|
let project_id: i64 = conn
|
||||||
.query_row("SELECT id FROM projects LIMIT 1", [], |row| row.get(0))
|
.query_row("SELECT id FROM projects ORDER BY id LIMIT 1", [], |row| {
|
||||||
|
row.get(0)
|
||||||
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
enqueue_job(&conn, project_id, "issue", 42, 100, "resource_events", None).unwrap();
|
enqueue_job(&conn, project_id, "issue", 42, 100, "resource_events", None).unwrap();
|
||||||
|
|
||||||
let job_id: i64 = conn
|
let job_id: i64 = conn
|
||||||
.query_row(
|
.query_row(
|
||||||
"SELECT id FROM pending_dependent_fetches LIMIT 1",
|
"SELECT id FROM pending_dependent_fetches ORDER BY id LIMIT 1",
|
||||||
[],
|
[],
|
||||||
|row| row.get(0),
|
|row| row.get(0),
|
||||||
)
|
)
|
||||||
@@ -301,7 +303,9 @@ mod tests {
|
|||||||
let (conn, _job_id) = setup_db_with_job();
|
let (conn, _job_id) = setup_db_with_job();
|
||||||
|
|
||||||
let project_id: i64 = conn
|
let project_id: i64 = conn
|
||||||
.query_row("SELECT id FROM projects LIMIT 1", [], |row| row.get(0))
|
.query_row("SELECT id FROM projects ORDER BY id LIMIT 1", [], |row| {
|
||||||
|
row.get(0)
|
||||||
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let jobs = claim_jobs(&conn, "resource_events", project_id, 10).unwrap();
|
let jobs = claim_jobs(&conn, "resource_events", project_id, 10).unwrap();
|
||||||
assert_eq!(jobs.len(), 1);
|
assert_eq!(jobs.len(), 1);
|
||||||
|
|||||||
81
src/main.rs
81
src/main.rs
@@ -13,23 +13,24 @@ use lore::cli::autocorrect::{self, CorrectionResult};
|
|||||||
use lore::cli::commands::{
|
use lore::cli::commands::{
|
||||||
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
IngestDisplay, InitInputs, InitOptions, InitResult, ListFilters, MrListFilters,
|
||||||
NoteListFilters, RefreshOptions, RefreshResult, SearchCliFilters, SyncOptions, TimelineParams,
|
NoteListFilters, RefreshOptions, RefreshResult, SearchCliFilters, SyncOptions, TimelineParams,
|
||||||
delete_orphan_projects, open_issue_in_browser, open_mr_in_browser, parse_trace_path,
|
delete_orphan_projects, handle_explain, open_issue_in_browser, open_mr_in_browser,
|
||||||
print_count, print_count_json, print_cron_install, print_cron_install_json, print_cron_status,
|
parse_trace_path, print_count, print_count_json, print_cron_install, print_cron_install_json,
|
||||||
print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json, print_doctor_results,
|
print_cron_status, print_cron_status_json, print_cron_uninstall, print_cron_uninstall_json,
|
||||||
print_drift_human, print_drift_json, print_dry_run_preview, print_dry_run_preview_json,
|
print_doctor_results, print_drift_human, print_drift_json, print_dry_run_preview,
|
||||||
print_embed, print_embed_json, print_event_count, print_event_count_json, print_file_history,
|
print_dry_run_preview_json, print_embed, print_embed_json, print_event_count,
|
||||||
print_file_history_json, print_generate_docs, print_generate_docs_json, print_ingest_summary,
|
print_event_count_json, print_file_history, print_file_history_json, print_generate_docs,
|
||||||
print_ingest_summary_json, print_list_issues, print_list_issues_json, print_list_mrs,
|
print_generate_docs_json, print_ingest_summary, print_ingest_summary_json, print_list_issues,
|
||||||
print_list_mrs_json, print_list_notes, print_list_notes_json, print_related_human,
|
print_list_issues_json, print_list_mrs, print_list_mrs_json, print_list_notes,
|
||||||
print_related_json, print_search_results, print_search_results_json, print_show_issue,
|
print_list_notes_json, print_related_human, print_related_json, print_search_results,
|
||||||
print_show_issue_json, print_show_mr, print_show_mr_json, print_stats, print_stats_json,
|
print_search_results_json, print_show_issue, print_show_issue_json, print_show_mr,
|
||||||
print_sync, print_sync_json, print_sync_status, print_sync_status_json, print_timeline,
|
print_show_mr_json, print_stats, print_stats_json, print_sync, print_sync_json,
|
||||||
print_timeline_json_with_meta, print_trace, print_trace_json, print_who_human, print_who_json,
|
print_sync_status, print_sync_status_json, print_timeline, print_timeline_json_with_meta,
|
||||||
query_notes, run_auth_test, run_count, run_count_events, run_cron_install, run_cron_status,
|
print_trace, print_trace_json, print_who_human, print_who_json, query_notes, run_auth_test,
|
||||||
run_cron_uninstall, run_doctor, run_drift, run_embed, run_file_history, run_generate_docs,
|
run_count, run_count_events, run_cron_install, run_cron_status, run_cron_uninstall, run_doctor,
|
||||||
run_ingest, run_ingest_dry_run, run_init, run_init_refresh, run_list_issues, run_list_mrs,
|
run_drift, run_embed, run_file_history, run_generate_docs, run_ingest, run_ingest_dry_run,
|
||||||
run_me, run_related, run_search, run_show_issue, run_show_mr, run_stats, run_sync,
|
run_init, run_init_refresh, run_list_issues, run_list_mrs, run_me, run_related, run_search,
|
||||||
run_sync_status, run_timeline, run_token_set, run_token_show, run_who,
|
run_show_issue, run_show_mr, run_stats, run_sync, run_sync_status, run_timeline, run_token_set,
|
||||||
|
run_token_show, run_who,
|
||||||
};
|
};
|
||||||
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
use lore::cli::render::{ColorMode, GlyphMode, Icons, LoreRenderer, Theme};
|
||||||
use lore::cli::robot::{RobotMeta, strip_schemas};
|
use lore::cli::robot::{RobotMeta, strip_schemas};
|
||||||
@@ -222,6 +223,25 @@ fn main() {
|
|||||||
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Trace(args)) => handle_trace(cli.config.as_deref(), args, robot_mode),
|
||||||
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
|
Some(Commands::Cron(args)) => handle_cron(cli.config.as_deref(), args, robot_mode),
|
||||||
Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await,
|
Some(Commands::Token(args)) => handle_token(cli.config.as_deref(), args, robot_mode).await,
|
||||||
|
Some(Commands::Explain {
|
||||||
|
entity_type,
|
||||||
|
iid,
|
||||||
|
project,
|
||||||
|
sections,
|
||||||
|
no_timeline,
|
||||||
|
max_decisions,
|
||||||
|
since,
|
||||||
|
}) => handle_explain(
|
||||||
|
cli.config.as_deref(),
|
||||||
|
&entity_type,
|
||||||
|
iid,
|
||||||
|
project.as_deref(),
|
||||||
|
sections,
|
||||||
|
no_timeline,
|
||||||
|
max_decisions,
|
||||||
|
since.as_deref(),
|
||||||
|
robot_mode,
|
||||||
|
),
|
||||||
Some(Commands::Drift {
|
Some(Commands::Drift {
|
||||||
entity_type,
|
entity_type,
|
||||||
iid,
|
iid,
|
||||||
@@ -365,33 +385,6 @@ fn main() {
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
Some(Commands::Show {
|
|
||||||
entity,
|
|
||||||
iid,
|
|
||||||
project,
|
|
||||||
}) => {
|
|
||||||
if robot_mode {
|
|
||||||
eprintln!(
|
|
||||||
r#"{{"warning":{{"type":"DEPRECATED","message":"'lore show' is deprecated, use 'lore {entity}s {iid}'","successor":"{entity}s"}}}}"#
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"{}",
|
|
||||||
Theme::warning().render(&format!(
|
|
||||||
"warning: 'lore show' is deprecated, use 'lore {}s {}'",
|
|
||||||
entity, iid
|
|
||||||
))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
handle_show_compat(
|
|
||||||
cli.config.as_deref(),
|
|
||||||
&entity,
|
|
||||||
iid,
|
|
||||||
project.as_deref(),
|
|
||||||
robot_mode,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
Some(Commands::AuthTest) => {
|
Some(Commands::AuthTest) => {
|
||||||
if robot_mode {
|
if robot_mode {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
|||||||
@@ -119,15 +119,12 @@ pub fn search_fts(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_fallback_snippet(content_text: &str, max_chars: usize) -> String {
|
pub fn generate_fallback_snippet(content_text: &str, max_chars: usize) -> String {
|
||||||
if content_text.chars().count() <= max_chars {
|
// Use char_indices to find the boundary at max_chars in a single pass,
|
||||||
return content_text.to_string();
|
// short-circuiting early for large strings instead of counting all chars.
|
||||||
}
|
let byte_end = match content_text.char_indices().nth(max_chars) {
|
||||||
|
Some((i, _)) => i,
|
||||||
let byte_end = content_text
|
None => return content_text.to_string(), // content fits within max_chars
|
||||||
.char_indices()
|
};
|
||||||
.nth(max_chars)
|
|
||||||
.map(|(i, _)| i)
|
|
||||||
.unwrap_or(content_text.len());
|
|
||||||
let truncated = &content_text[..byte_end];
|
let truncated = &content_text[..byte_end];
|
||||||
|
|
||||||
if let Some(last_space) = truncated.rfind(' ') {
|
if let Some(last_space) = truncated.rfind(' ') {
|
||||||
|
|||||||
@@ -411,7 +411,9 @@ fn round_robin_select_by_discussion(
|
|||||||
let mut made_progress = false;
|
let mut made_progress = false;
|
||||||
|
|
||||||
for (disc_idx, &discussion_id) in discussion_order.iter().enumerate() {
|
for (disc_idx, &discussion_id) in discussion_order.iter().enumerate() {
|
||||||
let notes = by_discussion.get(&discussion_id).unwrap();
|
let notes = by_discussion
|
||||||
|
.get(&discussion_id)
|
||||||
|
.expect("key present: inserted into by_discussion via discussion_order");
|
||||||
let note_idx = indices[disc_idx];
|
let note_idx = indices[disc_idx];
|
||||||
|
|
||||||
if note_idx < notes.len() {
|
if note_idx < notes.len() {
|
||||||
|
|||||||
Reference in New Issue
Block a user